repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
jseabold/statsmodels | statsmodels/examples/ex_misc_tarma.py | 5 | 1837 | # -*- coding: utf-8 -*-
"""
Created on Wed Jul 03 23:01:44 2013
Author: Josef Perktold
"""
import numpy as np
import matplotlib.pyplot as plt
from statsmodels.tsa.arima_process import arma_generate_sample, ArmaProcess
from statsmodels.miscmodels.tmodel import TArma
from statsmodels.tsa.arima_model import ARMA
from statsmodels.tsa.arma_mle import Arma
nobs = 500
ar = [1, -0.6, -0.1]
ma = [1, 0.7]
dist = lambda n: np.random.standard_t(3, size=n)
np.random.seed(8659567)
x = arma_generate_sample(ar, ma, nobs, scale=1, distrvs=dist,
burnin=500)
mod = TArma(x)
order = (2, 1)
res = mod.fit(order=order)
res2 = mod.fit_mle(order=order, start_params=np.r_[res[0], 5, 1], method='nm')
print(res[0])
proc = ArmaProcess.from_coeffs(res[0][:order[0]], res[0][:order[1]])
print(ar, ma)
proc.nobs = nobs
# TODO: bug nobs is None, not needed ?, used in ArmaProcess.__repr__
print(proc.ar, proc.ma)
print(proc.ar_roots(), proc.ma_roots())
modn = Arma(x)
resn = modn.fit_mle(order=order)
moda = ARMA(x, order=order)
resa = moda.fit( trend='nc')
print('\nparameter estimates')
print('ls ', res[0])
print('norm', resn.params)
print('t ', res2.params)
print('A ', resa.params)
print('\nstandard deviation of parameter estimates')
#print 'ls ', res[0] #TODO: not available yet
print('norm', resn.bse)
print('t ', res2.bse)
print('A ', resa.bse)
print('A/t-1', resa.bse / res2.bse[:3] - 1)
print('other bse')
print(resn.bsejac)
print(resn.bsejhj)
print(res2.bsejac)
print(res2.bsejhj)
print(res2.t_test(np.eye(len(res2.params))))
# TArma has no fittedvalues and resid
# TODO: check if lag is correct or if fitted `x-resid` is shifted
resid = res2.model.geterrors(res2.params)
fv = res[2]['fvec'] #resid returned from leastsq?
plt.plot(x, 'o', alpha=0.5)
plt.plot(x-resid)
plt.plot(x-fv)
#plt.show()
| bsd-3-clause | 1,438,013,817,249,552,600 | 23.493333 | 78 | 0.67828 | false |
ftfarias/PySubsim | old/ship.py | 1 | 1753 | # -*- coding: utf-8 -*-
# class Ship(MovableNewtonObject):
# def __init__(self, drag_factor, max_turn_per_hour, max_acceleration):
# super(Ship, self).__init__()
# self._rudder = 0
# self.max_turn_per_hour = max_turn_per_hour
# self.drag_factor = drag_factor
# self.frontal_drag_factor = drag_factor
# self.drag_factor = drag_factor
# self.drag_force = Point(0, 0)
# self.turbine_acceleration = Point(0, 0)
# self.turbine = Turbine(self, max_acceleration)
# return self._velocity.angle
# def set_course(self, angle):
# '''
# :param angle: new angle in radians
# :return: none
# '''
# angle = normalize_angle_2pi(angle)
# self._velocity.angle = angle
# self._acceleration.angle = self._velocity.angle # assumes the rotation also changes the acceleration
# def __str__(self):
# return "pos:{p} vel:{v}({vt:.1f};{va:.0f}˚) accel:{a}({at:.1f};{aa:.0f}˚) rudder:{rudder}".format(
# p=self._position,
# v=self._velocity,
# vt=self._velocity.angle,
# va=self._velocity.bearing,
# a=self._acceleration,
# at=self._acceleration.angle,
# aa=self._acceleration.bearing,
# rudder=self.rudder)
#
# def debug(self):
# return "pos:{p} vel:{v}({vt:.1f};{va:.0f}˚) accel:{a}({at:.1f};{aa:.0f}˚)".format(
# p=self._position,
# v=self._velocity,
# vt=self._velocity.angle,
# va=self._velocity.bearing,
# a=self._acceleration,
# at=self._acceleration.angle,
# aa=self._acceleration.bearing)
#
#
| gpl-3.0 | -4,022,949,757,789,183,500 | 30.232143 | 111 | 0.527158 | false |
Azure/azure-sdk-for-python | sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/aio/operations/_cross_region_restore_operations.py | 1 | 7924 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class CrossRegionRestoreOperations:
"""CrossRegionRestoreOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservicesbackup.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _trigger_initial(
self,
azure_region: str,
parameters: "_models.CrossRegionRestoreRequest",
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-20"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._trigger_initial.metadata['url'] # type: ignore
path_format_arguments = {
'azureRegion': self._serialize.url("azure_region", azure_region, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CrossRegionRestoreRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.NewErrorResponseAutoGenerated, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_trigger_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.RecoveryServices/locations/{azureRegion}/backupCrossRegionRestore'} # type: ignore
async def begin_trigger(
self,
azure_region: str,
parameters: "_models.CrossRegionRestoreRequest",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Restores the specified backed up data in a different region as compared to where the data is backed up.
Restores the specified backed up data in a different region as compared to where the data is
backed up.
:param azure_region: Azure region to hit Api.
:type azure_region: str
:param parameters: resource cross region restore request.
:type parameters: ~azure.mgmt.recoveryservicesbackup.models.CrossRegionRestoreRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._trigger_initial(
azure_region=azure_region,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'azureRegion': self._serialize.url("azure_region", azure_region, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_trigger.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.RecoveryServices/locations/{azureRegion}/backupCrossRegionRestore'} # type: ignore
| mit | 5,629,534,090,437,627,000 | 49.151899 | 185 | 0.665825 | false |
lkhomenk/integration_tests | artifactor/plugins/filedump.py | 1 | 4173 | """ FileDump plugin for Artifactor
Add a stanza to the artifactor config like this,
artifactor:
log_dir: /home/username/outdir
per_run: test #test, run, None
overwrite: True
plugins:
filedump:
enabled: True
plugin: filedump
"""
from artifactor import ArtifactorBasePlugin
import base64
import os
import re
from cfme.utils import normalize_text, safe_string
import six
class Filedump(ArtifactorBasePlugin):
def plugin_initialize(self):
self.register_plugin_hook('filedump', self.filedump)
self.register_plugin_hook('sanitize', self.sanitize)
self.register_plugin_hook('pre_start_test', self.start_test)
self.register_plugin_hook('finish_test', self.finish_test)
def configure(self):
self.configured = True
def start_test(self, artifact_path, test_name, test_location, slaveid):
if not slaveid:
slaveid = "Master"
self.store[slaveid] = {
"artifact_path": artifact_path,
"test_name": test_name,
"test_location": test_location
}
def finish_test(self, artifact_path, test_name, test_location, slaveid):
if not slaveid:
slaveid = "Master"
@ArtifactorBasePlugin.check_configured
def filedump(self, description, contents, slaveid=None, mode="w", contents_base64=False,
display_type="primary", display_glyph=None, file_type=None,
dont_write=False, os_filename=None, group_id=None, test_name=None,
test_location=None):
if not slaveid:
slaveid = "Master"
test_ident = "{}/{}".format(self.store[slaveid]['test_location'],
self.store[slaveid]['test_name'])
artifacts = []
if os_filename is None:
safe_name = re.sub(r"\s+", "_", normalize_text(safe_string(description)))
os_filename = self.ident + "-" + safe_name
os_filename = os.path.join(self.store[slaveid]['artifact_path'], os_filename)
if file_type is not None and "screenshot" in file_type:
os_filename = os_filename + ".png"
elif file_type is not None and (
"_tb" in file_type or "traceback" in file_type or file_type == "log"):
os_filename = os_filename + ".log"
elif file_type is not None and file_type == "html":
os_filename = os_filename + ".html"
elif file_type is not None and file_type == "video":
os_filename = os_filename + ".ogv"
else:
os_filename = os_filename + ".txt"
artifacts.append({
"file_type": file_type,
"display_type": display_type,
"display_glyph": display_glyph,
"description": description,
"os_filename": os_filename,
"group_id": group_id,
})
if not dont_write:
if os.path.isfile(os_filename):
os.remove(os_filename)
with open(os_filename, mode) as f:
if contents_base64:
contents = base64.b64decode(contents)
f.write(contents)
return None, {'artifacts': {test_ident: {'files': artifacts}}}
@ArtifactorBasePlugin.check_configured
def sanitize(self, test_location, test_name, artifacts, words):
test_ident = "{}/{}".format(test_location, test_name)
filename = None
try:
for f in artifacts[test_ident]['files']:
if f["file_type"] not in {
"traceback", "short_tb", "rbac", "soft_traceback",
"soft_short_tb"}:
continue
filename = f["os_filename"]
with open(filename) as f:
data = f.read()
for word in words:
if not isinstance(word, six.string_types):
word = str(word)
data = data.replace(word, "*" * len(word))
with open(filename, "w") as f:
f.write(data)
except KeyError:
pass
| gpl-2.0 | -6,062,698,431,524,116,000 | 36.936364 | 92 | 0.551881 | false |
dziadu/gitbrowser | settings/base.py | 1 | 2525 | """
Django settings for gitbrowser project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.core.urlresolvers import reverse_lazy
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = None
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django.contrib.sitemaps',
'gitbrowser',
'bootstrap3'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'gitbrowser.middlewares.LoginRequiredMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'gitbrowser.middlewares.InterceptGitwebMiddleware',
'gitbrowser.middlewares.ContentSecurityPolicyMiddleware',
)
ROOT_URLCONF = 'gitbrowser.urls'
WSGI_APPLICATION = 'gitbrowser.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'KEY_FUNCTION': os.path.join(BASE_DIR, 'utils.cache.gen_cache_key'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL = reverse_lazy('list')
LOGOUT_URL = LOGIN_REDIRECT_URL
LOGIN_EXEMPT_URLS = [
'^robots\.txt$',
]
# A single string or a list of string will appended to all CSPs
# A dictionary will be merged with the default CSPs
CONTENT_SECURITY_POLICY_URLS = None
| gpl-2.0 | 4,518,041,189,520,562,700 | 23.514563 | 71 | 0.752475 | false |
python-recsys/mrec | mrec/base_recommender.py | 1 | 3656 | class BaseRecommender(object):
"""
Minimal interface to be implemented by recommenders.
"""
def get_similar_items(self,j,max_similar_items=30):
"""
Get the most similar items to a supplied item.
Parameters
==========
j : int
Index of item for which to get similar items.
max_similar_items : int
Maximum number of similar items to return.
Returns
=======
sims : list
Sorted list of similar items, best first. Each entry is
a tuple of the form (i,score).
"""
pass
def recommend_items(self,dataset,u,max_items=10,return_scores=True):
"""
Recommend new items for a user.
Parameters
==========
dataset : scipy.sparse.csr_matrix
User-item matrix containing known items.
u : int
Index of user for which to make recommendations.
max_items : int
Maximum number of recommended items to return.
return_scores : bool
If true return a score along with each recommended item.
Returns
=======
recs : list
List of (idx,score) pairs if return_scores is True, else
just a list of idxs.
"""
pass
def batch_recommend_items(self,dataset,max_items=10,return_scores=True,show_progress=False):
"""
Recommend new items for all users in the training dataset.
Parameters
==========
dataset : scipy.sparse.csr_matrix
User-item matrix containing known items.
max_items : int
Maximum number of recommended items to return.
return_scores : bool
If true return a score along with each recommended item.
show_progress: bool
If true print something to stdout to show progress.
Returns
=======
recs : list of lists
Each entry is a list of (idx,score) pairs if return_scores is True,
else just a list of idxs.
"""
# default implementation, you may be able to optimize this for some recommenders.
recs = []
for u in xrange(self.num_users):
if show_progress and u%1000 == 0:
print u,'..',
recs.append(self.recommend_items(dataset,u,max_items,return_scores))
if show_progress:
print
return recs
def range_recommend_items(self,dataset,user_start,user_end,max_items=10,return_scores=True):
"""
Recommend new items for a range of users in the training dataset.
Assumes you've already called fit() to learn the similarity matrix.
Parameters
==========
dataset : scipy.sparse.csr_matrix
User-item matrix containing known items.
user_start : int
Index of first user in the range to recommend.
user_end : int
Index one beyond last user in the range to recommend.
max_items : int
Maximum number of recommended items to return.
return_scores : bool
If true return a score along with each recommended item.
Returns
=======
recs : list of lists
Each entry is a list of (idx,score) pairs if return_scores is True,
else just a list of idxs.
"""
# default implementation, you may be able to optimize this for some recommenders.
recs = []
for u in xrange(user_start,user_end):
recs.append(self.recommend_items(dataset,u,max_items,return_scores))
return recs
| bsd-3-clause | 2,509,509,946,566,095,400 | 33.168224 | 96 | 0.576313 | false |
wisechengyi/pants | contrib/python/src/python/pants/contrib/python/checks/checker/variable_names.py | 1 | 5328 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import ast
import keyword
import re
from functools import wraps
import six
from pants.contrib.python.checks.checker.common import CheckstylePlugin
ALL_LOWER_CASE_RE = re.compile(r"^[a-z][a-z\d]*$")
ALL_UPPER_CASE_RE = re.compile(r"^[A-Z][A-Z\d]+$")
LOWER_SNAKE_RE = re.compile(r"^([a-z][a-z\d]*)(_[a-z\d]+)*$")
UPPER_SNAKE_RE = re.compile(r"^([A-Z][A-Z\d]*)(_[A-Z\d]+)*$")
UPPER_CAMEL_RE = re.compile(r"^([A-Z][a-z\d]*)+$")
RESERVED_NAMES = frozenset(keyword.kwlist)
BUILTIN_NAMES = dir(six.moves.builtins)
def allow_underscores(num):
def wrap(function):
@wraps(function)
def wrapped_function(name):
if name.startswith("_" * (num + 1)):
return False
return function(name.lstrip("_"))
return wrapped_function
return wrap
@allow_underscores(1)
def is_upper_camel(name):
"""UpperCamel, AllowingHTTPAbbrevations, _WithUpToOneUnderscoreAllowable."""
return bool(UPPER_CAMEL_RE.match(name) and not ALL_UPPER_CASE_RE.match(name))
@allow_underscores(2)
def is_lower_snake(name):
"""lower_snake_case, _with, __two_underscores_allowable."""
return LOWER_SNAKE_RE.match(name) is not None
def is_reserved_name(name):
return name in BUILTIN_NAMES or name in RESERVED_NAMES
def is_reserved_with_trailing_underscore(name):
"""For example, super_, id_, type_"""
if name.endswith("_") and not name.endswith("__"):
return is_reserved_name(name[:-1])
return False
def is_builtin_name(name):
"""For example, __foo__ or __bar__."""
if name.startswith("__") and name.endswith("__"):
return ALL_LOWER_CASE_RE.match(name[2:-2]) is not None
return False
@allow_underscores(2)
def is_constant(name):
return UPPER_SNAKE_RE.match(name) is not None
class PEP8VariableNames(CheckstylePlugin):
"""Enforces PEP8 recommendations for variable names.
Specifically:
UpperCamel class names
lower_snake / _lower_snake / __lower_snake function names
lower_snake expression variable names
CLASS_LEVEL_CONSTANTS = {}
GLOBAL_LEVEL_CONSTANTS = {}
"""
@classmethod
def name(cls):
return "variable-names"
CLASS_GLOBAL_BUILTINS = frozenset({"__slots__", "__metaclass__",})
def iter_class_methods(self, class_node):
for node in class_node.body:
if isinstance(node, ast.FunctionDef):
yield node
def iter_class_globals(self, class_node):
for node in class_node.body:
# TODO(wickman) Occasionally you have the pattern where you set methods equal to each other
# which should be allowable, for example:
# class Foo(object):
# def bar(self):
# pass
# alt_bar = bar
if isinstance(node, ast.Assign):
for name in node.targets:
if isinstance(name, ast.Name):
yield name
def nits(self):
class_methods = set()
all_methods = {
function_def
for function_def in ast.walk(self.python_file.tree)
if isinstance(function_def, ast.FunctionDef)
}
for class_def in self.iter_ast_types(ast.ClassDef):
if not is_upper_camel(class_def.name):
yield self.error("T000", "Classes must be UpperCamelCased", class_def)
for class_global in self.iter_class_globals(class_def):
if (
not is_constant(class_global.id)
and class_global.id not in self.CLASS_GLOBAL_BUILTINS
):
yield self.error(
"T001", "Class globals must be UPPER_SNAKE_CASED", class_global
)
if not class_def.bases or all(
isinstance(base, ast.Name) and base.id == "object" for base in class_def.bases
):
class_methods.update(self.iter_class_methods(class_def))
else:
# If the class is inheriting from anything that is potentially a bad actor, rely
# upon checking that bad actor out of band. Fixes PANTS-172.
for method in self.iter_class_methods(class_def):
all_methods.discard(method)
for function_def in all_methods - class_methods:
if is_reserved_name(function_def.name):
yield self.error("T801", "Method name overrides a builtin.", function_def)
# TODO(wickman) Only enforce this for classes that derive from object. If they
# don't derive object, it's possible that the superclass naming is out of its
# control.
for function_def in all_methods:
if not any(
(
is_lower_snake(function_def.name),
is_builtin_name(function_def.name),
is_reserved_with_trailing_underscore(function_def.name),
)
):
yield self.error("T002", "Method names must be lower_snake_cased", function_def)
| apache-2.0 | 5,738,994,270,418,695,000 | 34.052632 | 103 | 0.598161 | false |
hckrtst/learnpython | py3_essential_training/19 Projects/testimonials/bwCGI.py | 1 | 3856 | #!/usr/bin/python3
# bwCGI.py by Bill Weinman <http://bw.org/contact/>
# Copyright (c) 1995-2010 The BearHeart Group, LLC
#
from cgi import FieldStorage
import cgitb
import os
__version__ = '0.3.2'
_cookie_var = 'HTTP_COOKIE'
class bwCGI:
''' handy cgi stuff '''
_header_state = False # True after header has been sent
cgi_cookies = dict()
cgi_headers = dict()
def __init__(self, **kwargs):
self.set_header('Content-type', kwargs.get('content_type', 'text/html'))
if _cookie_var in os.environ:
self.parse_cookies()
def set_header(self, k, v):
'''
set a header
use str for single value, list for multiples values
'''
if k in self.cgi_headers:
if isinstance(self.cgi_headers[k], list): self.cgi_headers[k].append(v)
else: self.cgi_headers[k] = [ self.cgi_headers[k], v ]
else:
self.cgi_headers[k] = str(v)
return v
def get_header(self, k):
return self.cgi_headers.get(k, None)
def send_header(self):
''' send the header(s), only once '''
if self._header_state: return
for k in self.cgi_headers:
value = self.cgi_headers[k]
if isinstance(value, list):
for v in value: print('{}: {}'.format(k, v))
else:
print('{}: {}'.format(k, value))
print()
self._header_state = True
cgitb.enable() # only after the header has been sent
def set_cookie(self, key, value, **kwargs):
''' kwargs can include expires, path, or domain
'''
cookie = '{}={}'.format(str(key), str(value))
if kwargs.keys():
for k in kwargs.keys():
cookie = '{}; {}={}'.format(cookie, k, kwargs[k])
self.set_header('Set-Cookie', cookie)
def parse_cookies(self):
for ck in os.environ[_cookie_var].split(';'):
lhs, rhs = ck.strip().split('=')
self.cgi_cookies[lhs.strip()] = rhs.strip()
def get_cookies(self):
return self.cgi_cookies;
def get_cookie(self, key):
return self.cgi_cookies.get(key, None)
def linkback(self):
''' return a relative URI for use as a linkback to this script '''
for e in ( 'REQUEST_URI', 'SCRIPT_NAME' ):
if e in os.environ:
l = os.environ[e]
break
else: return '*** cannot make linkback ***'
if '?' in l: l = l[0:l.find('?')]
return os.path.basename(l)
def vars(self):
return FieldStorage()
# utility methods
def entity_encode(self, s):
''' convert unicode to XML entities
returns encoded string
'''
outbytes = bytearray()
for c in s:
if ord(c) > 127:
outbytes += bytes('&#{:d};'.format(ord(c)), encoding = 'utf_8')
else: outbytes.append(ord(c))
return str(outbytes, encoding = 'utf_8')
def test():
if _cookie_var not in os.environ:
os.environ[_cookie_var] = 'one=1; two=2; three=3'
cgi = bwCGI(content_type='text/plain')
cgi.set_header('X-bwCGI', __version__)
cgi.set_header('X-number', 42)
cgi.set_cookie('one', 1)
cgi.set_cookie('two', 2)
cgi.set_cookie('three', 3, path='/', expires='31-Dec-2010 23:59:59 GMT', domain='.bw.org')
cgi.set_cookie('five', 5)
cgi.send_header() # should only see one set of headers
cgi.send_header()
cgi.send_header()
print('Hello, CGI')
print('header X-bwCGI:', cgi.get_header('X-bwCGI'))
print('header Eggs:', cgi.get_header('Eggs'))
print('Cookies:')
print(sorted(cgi.get_cookies()))
print('cookie one:', cgi.get_cookie('one'))
print('cookie seven:', cgi.get_cookie('seven'))
if __name__ == '__main__': test()
| mit | -3,525,216,791,796,005,000 | 31.133333 | 94 | 0.547977 | false |
MMaus/mutils | mmnotebooks/bslip.py | 1 | 31581 | from libshai import integro
from pylab import (norm, pi, hstack, vstack, array, sign, sin, cos, arctan2,
sqrt, zeros,
figure, subplot, plot, legend, xlabel, ylabel)
from numpy import float64
from copy import deepcopy
import mutils.io as mio
import fastode # local!
class SimulationError(Exception):
pass
class BSLIP(mio.saveable):
""" Class of the bipedal walking SLIP """
def __init__(self, params=None, IC=None):
"""
The BSLIP is a bipedal walking SLIP model.
params (mutils.misc.Struct): parameter of the model
IC (array): initial conditions. [x, y, z, vx, vy, vz]
*NOTE* the system starts in single stance and *must* have
positive vertical velocity ("vy > 0")
"""
super(BSLIP, self).__init__()
self.params = deepcopy(params)
self.state = deepcopy(IC)
self.odd_step = True # leg 1 or leg two on ground?
self.dt = .01
self.odess = fastode.FastODE('bslipss')
self.odeds = fastode.FastODE('bslipds')
self.buf = zeros((2000, self.odess.WIDTH), dtype=float64)
self.t = 0
self.t_td = 0
self.t_to = 0
self.singleStance = True
self.failed = False
self.skip_forces = False
self.ErrMsg = ""
# storage for ode solutions
self.feet1_seq = []
self.feet2_seq = []
self.t_ss_seq = []
self.t_ds_seq = []
self.y_ss_seq = []
self.y_ds_seq = []
self.forces_ss_seq = []
self.forces_ds_seq = []
self.DEBUG = False
if self.params is not None:
self.feet1_seq.append(self.params['foot1'])
self.feet2_seq.append(self.params['foot2'])
def init_ode(self):
""" re-initialize the ODE solver """
self.ode = integro.odeDP5(self.dy_Stance, pars=self.params)
self.ode.ODE_RTOL = 1e-9
def restore(self, filename):
"""
update the restore procedure: re-initialize the ODE solver!
:args:
filename (str): the filename where the model information is stored
"""
super(BSLIP, self).restore(filename)
self.ode = integro.odeDP5(self.dy_Stance, pars=self.params)
self.ode.ODE_RTOL = 1e-9
def legfunc1(self, t, y, pars):
"""
Force (scalar) function of leg 1: Here, spring function
:args:
t (float): time (ignored)
y (6x float): CoM state [position, velocity]
pars (dict): parameters of the model. Must include
'foot1' (3x float) foot1 position
'lp1' (4x float) parameters of leg 1
:returns:
f (float): the axial leg force ["f = k * (l - l0)"]
NOTE: Overwrite this function to get different models.
The signature must not change.
"""
#DEBUG:
#print 'pf1: ', pars['foot1']
l1 = norm(array(y[:3]) - array(pars['foot1']))
return -pars['lp1'][0] * (l1 - pars['lp1'][1])
def legfunc2(self, t, y, pars):
"""
leg function of leg 2: a spring function
:args:
t (float): time (ignored)
y (6x float): CoM state [position, velocity]
pars (dict): parameters of the model. Must include
'foot1' (3x float) foot1 position
'lp1' (4x float) parameters of leg 1
:returns:
f (float): the axial leg force ["f = k * (l - l0)"]
NOTE: Overwrite this function to get different models.
The signature must not change.
"""
l2 = norm(array(y[:3]) - array(pars['foot2']))
return -pars['lp2'][0] * (l2 - pars['lp2'][1])
def evt_vy0(self, t, states, traj, p):
"""
triggers the vy=0 event
:args:
t (2x float): list of time prior to and after event
states (2x array): list of states prior to and after event
traj (trajectory patch): a trajectory patch (ignored here)
:returns:
(bool) vy=0 detected? (both directions)
"""
return sign(states[0][4]) * sign(states[1][4]) != 1
def update_params_ss(self):
"""
Updates the model parameters in the single stance vy=0 event.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def update_params_ds(self):
"""
Updates the model parameters in the double stance vy=0 event.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def update_params_td(self):
"""
Updates the model parameters at touchdown events.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def update_params_to(self):
"""
Updates the model parameters at takeoff events.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def takeoff_event(self, t, states, traj, pars, legfun):
"""
triggers the take off of a leg
Hint: use a lambda function to adapt the call signature
This function is force-triggered. The parameter format (pars) must
be the same as for legfun (which is called from here!)
*NOTE* you can overwrite this method for derived models. However,
this is not required if the takeoff condition is "zero force".
:args:
t (2x float): list of time prior to and after event
states (2x array): list of states prior to and after event
traj (trajectory patch): a trajectory patch (ignored here)
pars (<any>): the leg functions parameters
legfun (function of (t, y, pars) ): the leg force function.
:returns:
(bool) takeoff detected? (force has falling zero crossing)
"""
F0 = legfun(t[0], states[0], pars)
F1 = legfun(t[1], states[1], pars)
return F0 > 0 and F1 <= 0
def touchdown_event(self, t, states, traj, pars):
"""
triggers the touchdown of the leading leg.
Hint: use a lambda function to adapt the call signature
:args:
t (2x float): list of time prior to and after event
states (2x array): list of states prior to and after event
traj (trajectory patch): a trajectory patch (ignored here)
pars (4x float): the leg functions parameters. Format:
[l0, alpha, beta, floorlevel]
pars format:
[l0, alpha, beta, floorlevel]
:returns:
(bool) takeoff detected? (force has falling zero crossing)
"""
def zfoot(state, pars):
foot = state[1] - pars[0] * sin(pars[1])
return foot - pars[3]
return zfoot(states[0], pars) > 0 and zfoot(states[1], pars) <= 0
def touchdown_event_refine(self, t, state, pars):
"""
The touchdown event function for refinement of touchdown detection.
The zero-crossing of the output is defined as instant of the event.
Hint: use a lambda function to adapt the call signature
:args:
t (float): time (ignored)
y (6x float): CoM state [position, velocity]
pars (4x float): the leg functions parameters. Format:
[l0, alpha, beta, floorlevel]
:returns:
f (float): the axial leg force ["f = k * (l - l0)"]
"""
foot = state.squeeze()[1] - pars[0] * sin(pars[1])
return foot - pars[3] # foot - ground level
def dy_Stance(self, t, y, pars, return_force = False):
"""
This is the ode function that is passed to the solver. Internally, it calles:
legfunc1 - force of leg 1 (overwrite for new models)
legfunc2 - force of leg 2 (overwrite for new models)
:args:
t (float): simulation time
y (6x float): CoM state
pars (dict): parameters, will be passed to legfunc1 and legfunc2.
must also include 'foot1' (3x float), 'foot2' (3x float), 'm' (float)
and 'g' (3x float) indicating the feet positions, mass and direction of
gravity, respectively.
return_force (bool, default: False): return [F_leg1, F_leg2] (6x
float) instead of dy/dt.
"""
f1 = max(self.legfunc1(t, y, pars), 0) # only push
l1 = norm(array(y[:3]) - array(pars['foot1']))
f1_vec = (array(y[:3]) - array(pars['foot1'])) / l1 * f1
f2 = max(self.legfunc2(t, y, pars), 0) # only push
l2 = norm(array(y[:3]) - array(pars['foot2']))
f2_vec = (array(y[:3]) - array(pars['foot2'])) / l2 * f2
if return_force:
return hstack([f1_vec, f2_vec])
return hstack([y[3:], (f1_vec + f2_vec) / pars['m'] + pars['g']])
def get_touchdown(self, t, y, params):
"""
Compute the touchdown position of the leg. Overwrite this for different leg parameters!
:args:
t (float): time
y (6x float): state of the CoM
params (4x float): leg parameter: stiffness, l0, alpha, beta
:returns:
[xFoot, yFoot, zFoot] the position of the leg tip
"""
k, l0, alpha, beta = params
xf = y[0] + l0 * cos(alpha) * cos(beta)
yf = y[1] - l0 * sin(alpha)
zf = y[2] - l0 * cos(alpha) * sin(beta)
return array([xf, yf, zf])
def checkSim(self):
"""
Raises an error if the model failed.
Overwrite in derived classes to avoid raised errors.
"""
if self.failed:
raise SimulationError("simulation failed!")
def do_step(self):
"""
Performs a step from the current state, using the current parameters.
The simulation results are also stored in self.[y|t]_[s|d]s_seq,
the states and times of single and double support phases.
*requires*:
self.
- params (dict): model and leg function parameters
- odd_step (bool): whether or not to trigger contact of leg2 (leg1 otherwise)
- state (6x float): the initial state
:args:
(None)
:returns:
t_ss, y_ss, t_ds, y_ds: time and simulation results for single stance and double stance
phases
:raises:
TypeError - invalid IC or parameter
SimulationError - if the simulation fails.
"""
# test initial conditions.
# test wether there is a current state and current parameters
if self.params is None:
raise TypeError("parameters not set")
if self.state is None:
raise TypeError("state (initial condition) not set")
if self.failed:
raise SimulationError("Simulation failed previously.")
#demo_p_reduced = [13100, 12900, 68.5 * pi / 180., -.05] # [k1, k2, alpha, beta]
#demo_p = { 'foot1' : [0, 0, 0],
# 'foot2' : [-1.5, 0, 0],
# 'm' : 80,
# 'g' : [0, -9.81, 0],
# 'lp1' : [13100, 1, 68.5 * pi / 180, -0.05], # leg params: stiffness, l0, alpha, beta
# 'lp2' : [12900, 1, 68.5 * pi / 180, 0.1],
# 'delta_beta' : .05
# }
p = self.params # shortcut
leadingleg = 1. if self.odd_step else 2.
pars = [p['lp1'][0],
p['lp2'][0],
p['lp1'][2],
p['lp2'][2],
p['lp1'][1],
p['lp2'][1],
p['lp1'][3],
p['lp2'][3],
p['m'],
p['g'][1],
p['foot1'][0],
p['foot1'][1],
p['foot1'][2],
p['foot2'][0],
p['foot2'][1],
p['foot2'][2],
leadingleg]
# maximal time for simulation of single stance or double stance (each)
max_T = 1.
# run single stance
self.buf[0, 1:] = array(self.state) #.copy()
N = self.odess.odeOnce(self.buf, self.t + max_T, dt=1e-3, pars = pars)
self.state = self.buf[N,1:].copy()
self.y_ss_seq.append(self.buf[:N+1, 1:].copy())
self.t_ss_seq.append(self.buf[:N+1,0].copy())
# quick sanity check: simulation time not exceeded?
if self.buf[N,0] - self.t >= max_T - 1e-2:
self.failed=True
print "N=", N
raise SimulationError("Maximal simulation time (single stance) reached!")
self.t = self.buf[N,0]
# touchdown detected:
# update foot parameters
# (1) foot2 = foot1
# (2) foot1 = [NEW]
# (3) leading_leg = ~leading_leg
# update leg positions; change trailing leg
y = self.state # shortcut
vx, vz = y[3], y[5]
a_v_com = -arctan2(vz, vx) # correct with our coordinate system
pars[13] = pars[10]
pars[15] = pars[12]
if pars[16] == 1.:
# stance leg is leg 1 -> update leg 2 params
pars[10] = y[0] + cos(pars[3]) * cos(pars[7] + a_v_com) * pars[5]
pars[12] = y[2] - cos(pars[3]) * sin(pars[7] + a_v_com) * pars[5]
#pars[13] = res[N, 1] + cos(pars[3])*cos(pars[7])*pars[5]
#pars[15] = res[N, 3] + cos(pars[3])*sin(pars[7])*pars[5]
pars[16] = 2.;
else:
pars[10] = y[0] + cos(pars[2]) * cos(pars[6] + a_v_com) * pars[4]
pars[12] = y[2] - cos(pars[2]) * sin(pars[6] + a_v_com) * pars[4]
#pars[10] = res[N, 1] + cos(pars[2])*cos(pars[6])*pars[4]
#pars[12] = res[N, 3] + cos(pars[2])*sin(pars[6])*pars[4]
pars[16] = 1.;
self.params['foot1'] = pars[10:13][:]
self.params['foot2'] = pars[13:16][:]
# run double stance
self.buf[0, 1:] = array(self.state) #.copy()
N = self.odeds.odeOnce(self.buf, self.t + max_T, dt=1e-3, pars = pars)
self.state = self.buf[N,1:].copy()
self.feet1_seq.append(self.params['foot1'])
self.feet2_seq.append(self.params['foot2'])
self.y_ds_seq.append(self.buf[:N+1, 1:].copy())
self.t_ds_seq.append(self.buf[:N+1,0].copy())
# quick sanity check: simulation time not exceeded?
if self.buf[N,0] - self.t >= max_T - 1e-2:
self.failed=True
raise SimulationError("Maximal simulation time (double stance) reached!")
self.t = self.buf[N,0]
#self.y_ds_seq.append(y2)
#self.t_ds_seq.append(t2)
self.odd_step = not self.odd_step
return self.t_ss_seq[-1], self.y_ss_seq[-1], self.t_ds_seq[-1], self.y_ds_seq[-1]
if self.odd_step:
td_pars = self.params['lp2'][1:] + [ground, ] # set touchdown parameters
td_pars_2 = self.params['lp2'] # another format of touchdown parameters (for get_touchdown)
newfoot = 'foot2' # which foot position to update?
to_evt_fun = self.legfunc1 # force generation for takeoff trigger in double support
to_evt_ds_refine = self.legfunc1 # function for refinement of DS
self.odd_step = False # next step is "even": leg "2" in single stance on ground
else:
td_pars = self.params['lp1'][1:] + [ground, ] # set touchdown parameters
td_pars_2 = self.params['lp1'] # another format of touchdown parameters (for get_touchdown)
newfoot = 'foot1' # which foot position to update?
to_evt_fun = self.legfunc2 # force generation for takeoff trigger in double support
to_evt_ds_refine = self.legfunc2 # function for refinement of DS
self.odd_step = True # next step is "odd": leg "1" in single stance on ground
# stage 1a: simulate until vy=0
self.singleStance = True
self.ode.event = self.evt_vy0
if self.state[4] <= 0:
self.failed = True
self.ErrMsg = ("initial vertical velocity < 0: single " +
"stance apex cannot be reached!")
t0 = self.t
tE = t0 + max_T
t_a, y_a = self.ode(self.state, t0, tE, dt=self.dt)
#d_pars_l2 = self.params['lp2'][1:] + [ground, ]
if self.DEBUG:
print "finished stage 1 (raw)"
if t_a[-1] >= tE:
self.failed = True
self.ErrMsg = ("max. simulation time exceeded - " +
"this often indicates simulation failure")
else:
tt1, yy1 = self.ode.refine(lambda tf, yf: yf[4])
if self.DEBUG:
print "finished stage 1 (fine)"
self.state = yy1
# compute forces
if not self.skip_forces:
forces_ss = [self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t_a, y_a)]
#self.forces_ss_seq.append()
t = [] # dummy, if next step is not executed
y = array([[]])
if not self.failed:
self.update_params_ss()
# stage 1b: simulate until touchdown of leading leg
# touchdown event of leading leg
self.ode.event = lambda t,states,traj,p: self.touchdown_event(t, states, traj, td_pars)
t0 = tt1
tE = t0 + max_T
t, y = self.ode(self.state, t0, tE, dt=self.dt)
if self.DEBUG:
print "finished stage 2 (raw)"
if t[-1] >= tE:
self.failed = True
self.ErrMsg = ("max. sim time exceeded in single stance - no "
+ "touchdown occurred")
else:
#d_pars_l2 = self.params['lp2'][1:] + [ground, ]
tt, yy = self.ode.refine(lambda tf, yf: self.touchdown_event_refine(tf, yf, td_pars))
if self.DEBUG:
print "finished stage 2 (fine)"
self.state = yy
forces_ss.extend([self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t[1:], y[1:, :])])
if not self.skip_forces:
self.forces_ss_seq.append(vstack(forces_ss))
if not self.failed:
# allow application of control law
self.t_td = tt
self.singleStance = False
self.update_params_td()
# accumulate results from stage 1a and stage 1b
if not self.failed:
t = hstack([t_a, t[1:]])
y = vstack([y_a, y[1:, :]])
# stage 2: double support
# compute leg 2 touchdown position
t2_a = []
y2_a = array([[]])
if not self.failed:
xf, yf, zf = self.get_touchdown(tt, yy, td_pars_2)
self.params[newfoot] = [xf, yf, zf]
# stage 2a: simulate until vy=0
self.ode.event = self.evt_vy0
t0 = tt
tE = t0 + max_T
t2_a, y2_a = self.ode(self.state, t0, tE, dt=self.dt)
if t2_a[-1] >= tE:
self.failed = True
self.ErrMsg = ("max. sim time exceeded - no nadir event " +
"detected in double stance")
if self.DEBUG:
print "finished stage 3 (raw)"
else:
tt2, yy2 = self.ode.refine(lambda tf, yf: yf[4])
if self.DEBUG:
print "finished stage 3 (fine)"
self.state = yy2
if not self.skip_forces:
forces_ds = [self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t2_a, y2_a)]
if not self.failed:
# allow application of control law
self.update_params_ds()
# stage 2b: double stance - simulate until takeoff of trailing leg
# define and solve double stance ode
#ode = integro.odeDP5(self.dy_Stance, pars=self.params)
# event is takeoff of leg 1
t2_b = []
y2_b = array([[]])
if not self.failed:
self.ode.event = lambda t,states,traj,p: self.takeoff_event(t,
states, traj, p, legfun=to_evt_fun)
t0 = tt2
tE = t0 + max_T
t2_b, y2_b = self.ode(self.state, t0, tE, dt=self.dt)
if t2_b[-1] >= tE:
self.failed = True
self.ErrMsg = ("sim. time exeeded - takeoff of trailing leg " +
"not detected")
if self.DEBUG:
print "finished stage 4 (raw)"
else:
# refinement: force reaches zero
tt, yy = self.ode.refine(lambda tf, yf: to_evt_ds_refine(tf, yf, self.params))
if self.DEBUG:
print "finished stage 4 (fine)"
self.state = yy
if not self.skip_forces:
forces_ds.extend([self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t2_b[1:], y2_b[1:, :])])
self.forces_ds_seq.append(vstack(forces_ds))
# allow application of control law
self.t_to = tt
self.singleStance = True
self.update_params_to()
# accumulate results from stage 1a and stage 1b
if not self.failed:
t2 = hstack([t2_a, t2_b[1:]])
y2 = vstack([y2_a, y2_b[1:, :]])
#store simulation results
if not self.failed:
self.y_ss_seq.append(y)
self.y_ds_seq.append(y2)
self.t_ss_seq.append(t)
self.t_ds_seq.append(t2)
self.feet1_seq.append(self.params['foot1'])
self.feet2_seq.append(self.params['foot2'])
if not self.failed:
if len(t2) > 0:
self.t = t2[-1]
if self.failed:
raise SimulationError(self.ErrMsg)
return t, y, t2, y2
class BSLIP_newTD(BSLIP):
""" derived from BSLIP. The get_touchdown function is overwritten
such that the leg placement is w.r.t. walking direction.
*NOTE* This is also a show-case how to use inheritance for modelling here.
"""
def get_touchdown(self, t, y, params):
"""
Compute the touchdown position of the leg w.r.t. CoM velocity
:args:
t (float): time
y (6x float): state of the CoM
params (4x float): leg parameter: stiffness, l0, alpha, beta
:returns:
[xFoot, yFoot, zFoot] the position of the leg tip
"""
k, l0, alpha, beta = params
vx, vz = y[3], y[5]
a_v_com = -arctan2(vz, vx) # correct with our coordinate system
#for debugging
#print "v_com_angle:", a_v_com * 180. / pi
xf = y[0] + l0 * cos(alpha) * cos(beta + a_v_com)
yf = y[1] - l0 * sin(alpha)
zf = y[2] - l0 * cos(alpha) * sin(beta + a_v_com)
#for debugging
#print "foot: %2.3f,%2.3f,%2.3f," % ( xf,yf, zf)
return array([xf, yf, zf])
def ICeuklid_to_ICcircle(IC):
"""
converts from IC in euklidean space to IC in circle parameters (rotational invariant).
The formats are:
IC_euklid: [x, y, z, vx, vy, vz]
IC_circle: [y, vy, |v|, |l|, phiv], where |v| is the magnitude of CoM velocity, |l|
is the distance from leg1 (assumed to be at [0,0,0]) to CoM, and phiv the angle
of the velocity in horizontal plane wrt x-axis
*NOTE* for re-conversion, the leg position is additionally required
:args:
IC (6x float): the initial conditions in euklidean space
:returns:
IC (5x float): the initial conditions in circular coordinates
"""
x,y,z,vx,vy,vz = IC
v = sqrt(vx**2 + vy**2 + vz**2)
l = sqrt(x**2 + y**2 + z**2)
#phiv = arctan2(vz, vx)
#phiv = arctan2(-vz, vx)
phiv = -arctan2(-vz, vx)
#phix = arctan2(-z, -x)
phix = arctan2(z, -x)
# warnings.warn('TODO: fix phi_x (add)')
# print "phix:", phix * 180 / pi
return [y, vy, v, l, phiv + phix]
def ICcircle_to_ICeuklid(IC):
"""
converts from IC in cirle parameters to IC in euklidean space (rotational invariant).
The formats are:
IC_euklid: [x, y, z, vx, vy, vz]
IC_circle: [y, vy, |v|, |l|, phiv], where |v| is the magnitude of CoM velocity, |l|
is the distance from leg1 (assumed to be at [0,0,0]) to CoM, and phiv the angle
of the velocity in horizontal plane wrt x-axis
*NOTE* for re-conversion, the leg position is additionally required, assumed to be [0,0,0]
Further, it is assumed that the axis foot-CoM points in x-axis
:args:
IC (5x float): the initial conditions in circular coordinates
:returns:
IC (6x float): the initial conditions in euklidean space
"""
y, vy, v, l, phiv = IC
z = 0
xsq = l**2 - y**2
if xsq < 0:
raise RuntimeError('Error in initial conditions: y > l!')
x = -sqrt(xsq)
vhsq = v**2 - vy**2
if vhsq < 0:
raise RuntimeError('Error in initial conditions: |vy| > |v|!')
v_horiz = sqrt(vhsq)
vx = v_horiz * cos(phiv)
#vz = v_horiz * sin(phiv)
vz = v_horiz * sin(phiv)
return [x, y, z, vx, vy, vz]
def circ2normal_param(fixParams, P):
"""
converts the set (fixParams, P) to a set of initial conditions for
a BSLIP model.
:args:
fixParams (dict): set of parameters for BSLIP, plus "delta_beta" key
P [4x float]: step parameters k1, k2, alpha, beta (last two: for both legs)
"""
k1, k2, alpha, beta = P
par = deepcopy(fixParams)
par['foot1'] = [0, 0, 0]
par['foot2'] = [-2*par['lp2'][1], 0, 0] # set x to some very negative value
par['lp1'][0] = k1
par['lp2'][0] = k2
par['lp1'][2] = par['lp2'][2] = alpha
par['lp1'][3] = beta
par['lp2'][3] = -beta + par['delta_beta']
return par
def pred_to_p(baseParams, P):
"""
converts the set (fixParams, P) to a set of initial conditions for
a BSLIP model.
:args:
fixParams (dict): set of parameters for BSLIP
P [8x float]: step parameters k1, k2, alpha1, alpha2, beta1, beta2,
l01, l02
"""
k1, k2, a1, a2, b1, b2, l01, l02 = P
par = deepcopy(baseParams)
par['foot1'] = [0, 0, 0]
par['foot2'] = [-2*par['lp2'][1], 0, 0] # set x to some very negative value
par['lp1'][0] = k1
par['lp2'][0] = k2
par['lp1'][1] = l01
par['lp2'][1] = l02
par['lp1'][2] = a1
par['lp2'][2] = a2
par['lp1'][3] = b1
par['lp2'][3] = b2
return par
def new_stridefunction(fixParams):
""" returns a function that maps [IC, P] -> [FS],
in the BSLIP_newTD model
where IC: (reduced) initial conditions
P: reduced parameter vector (4x float)
FS: final state
"""
model = BSLIP_newTD(fixParams,[0,0,0,0,0,0])
model.skip_force = True #speed up simulation a little bit
def stridefun(IC, P):
""" performs a stride of the given model.
:args:
IC: (reduced) initial conditions: [y, vy, v, l, phiv]
P: (reduced) parameter set: [k1, k2, alpha, beta]
:returns:
FS: final state, same format as initial conditions
"""
full_IC = ICcircle_to_ICeuklid(IC)
par = circ2normal_param(fixParams, P)
model.state = full_IC
model.params = par
model.init_ode()
model.do_step()
model.do_step()
fs = model.state.copy() # final state of simulation
fs[:3] -= model.params['foot1'] # set origin to location of foot1 (which is on ground)
return array(ICeuklid_to_ICcircle(fs))
return stridefun
def stridefunction(fixParams):
""" returns a function that maps [IC, P] -> [FS],
in the BSLIP_newTD model
where IC: (reduced) initial conditions
P: reduced parameter vector (8x float): k1, k2, a1, a2, b1, b2, l01,
l02
FS: final state
"""
model = BSLIP_newTD(fixParams,[0,0,0,0,0,0])
model.skip_force = True #speed up simulation a little bit
def stridefun2(IC, P):
""" performs a stride of the given model.
:args:
IC: (reduced) initial conditions: [y, vy, v, l, phiv]
P: (reduced) parameter set: (k1, k2, a1, a2, b1, b2, l01, l02)
:returns:
FS: final state, same format as initial conditions
"""
full_IC = ICcircle_to_ICeuklid(IC)
par = pred_to_p(fixParams, P)
model.state = full_IC
model.params = par
model.init_ode()
model.do_step()
model.do_step()
fs = model.state.copy() # final state of simulation
fs[:3] -= model.params['foot1'] # set origin to location of foot1 (which is on ground)
return array(ICeuklid_to_ICcircle(fs))
return stridefun2
def vis_sim(mdl):
"""
quick hack that visualizes the simulation results from a model
:args:
mdl (BSLIP): model that has run some steps
"""
# visualize
fig = figure(figsize=(18,8))
fig.clf()
subplot(1,2,1)
rep = 0
for ys, yd, f1, f2 in zip(mdl.y_ss_seq, mdl.y_ds_seq, mdl.feet1_seq[1:], mdl.feet2_seq[1:]):
label1 = label2 = label3 = label4 = None
if rep == 0:
label1 = 'single stance'
label2 = 'double stance'
label3 = 'foot leg#1'
label4 = 'foot leg#2'
plot(ys[:, 0], ys[:, 1], 'b-', linewidth=1, label=label1)
plot(yd[:, 0], yd[: ,1], 'g-', linewidth=3, label=label2)
plot(f1[0], f1[1], 'kd', label=label3)
plot(f2[0], f2[1], 'cd', label=label4)
rep += 1
legend(loc='best')
xlabel('horizontal position [m]')
ylabel('vertical position [m]')
subplot(1,2,2)
rep = 0
for ys, yd, f1, f2 in zip(mdl.y_ss_seq, mdl.y_ds_seq, mdl.feet1_seq[1:], mdl.feet2_seq[1:]):
label1 = label2 = label3 = label4 = None
if rep == 0:
label1 = 'single stance'
label2 = 'double stance'
label3 = 'foot leg#1'
label4 = 'foot leg#2'
plot(ys[:, 0], ys[:, 2], 'r-', linewidth=1, label=label1)
plot(yd[:, 0], yd[: ,2], 'm-', linewidth=3, label=label2)
plot(f1[0], f1[2], 'kd', label=label3)
plot(f2[0], f2[2], 'cd', label=label4)
rep += 1
legend(loc='best')
#axis('equal')
xlabel('horizontal position [m]')
ylabel('lateral position [m]')
return fig
# define some example values
demo_p_reduced = [13100, 12900, 68.5 * pi / 180., -.05] # [k1, k2, alpha, beta]
demo_p = { 'foot1' : [0, 0, 0],
'foot2' : [-1.5, 0, 0],
'm' : 80,
'g' : [0, -9.81, 0],
'lp1' : [13100, 1, 68.5 * pi / 180, -0.05], # leg params: stiffness, l0, alpha, beta
'lp2' : [12900, 1, 68.5 * pi / 180, 0.1],
'delta_beta' : .05
}
demo_IC = array([-0.153942, 0.929608, 0, 1.16798, 0.593798, -0.045518])
| gpl-2.0 | -1,109,068,607,172,693,500 | 34.564189 | 103 | 0.528482 | false |
hyphaltip/cndtools | util/runGeneid.py | 1 | 4841 | #!/usr/bin/env python
# Copyright (c) 2006
# Colin Dewey (University of Wisconsin-Madison)
# [email protected]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys
import os
from optparse import OptionParser
import FASTA
import GFF
usage = "usage: %prog [options] < fastaInput > gffOutput"
optparser = OptionParser(usage)
optparser.add_option("-b", "--binary", dest="bin",
help="Path to Geneid binary",
default="geneid")
optparser.add_option("-s", "--seglength", type="int", dest="segLength",
help="break large sequences into SEGLENGTH size pieces",
default=100000000)
optparser.add_option("-p", "--paramfile", dest="paramFilename",
help="use PARAMFILE for parameters",
default="/usr/local/apps/geneid/param/human1iso.param",
metavar="PARAMFILE")
(options, args) = optparser.parse_args()
if len(args) != 0:
optparser.error("incorrect number of arguments")
def runGeneid(rec,
param="/usr/local/apps/geneid/param/human1iso.param",
segLength=100000000,
bin="geneid",
options=None):
if not options:
options = []
if "-G" not in options:
options.append("-G")
optString = ' '.join(options)
seqFilename = os.tmpnam()
cmd = "%(bin)s %(optString)s -P %(param)s %(seqFilename)s" % vars()
gffRecs = []
subrec = FASTA.Record()
subrec.title = rec.title
for i in range(((len(rec.sequence) - 1)/ segLength) + 1):
subrec.sequence = rec.sequence[i * segLength: (i + 1) * segLength]
seqFile = file(seqFilename, 'w')
seqFile.write(str(subrec))
seqFile.close()
for line in os.popen(cmd):
if not line or line.startswith('#'):
continue
fields = line.rstrip().split('\t')
cdsRec = GFF.Record(seqname=rec.title,
source="geneid",
feature="CDS",
start=int(fields[3]) + i * segLength,
end=int(fields[4]) + i * segLength,
score=float(fields[5]),
strand=fields[6],
frame=fields[7],
attributes={"gene_id": [fields[8]],
"transcript_id": [fields[8] + ".1"]})
exonType = fields[2]
if exonType in ["First", "Single"]:
startCodonRec = cdsRec.copy()
startCodonRec.feature = "start_codon"
startCodonRec.score = None
startCodonRec.frame = None
if cdsRec.strand == '+':
startCodonRec.end = startCodonRec.start + 2
else:
startCodonRec.start = startCodonRec.end - 2
gffRecs.append(startCodonRec)
exonRec = cdsRec.copy()
exonRec.feature = "exon"
exonRec.frame = None
gffRecs.append(exonRec)
gffRecs.append(cdsRec)
if exonType in ["Terminal", "Single"]:
stopCodonRec = cdsRec.copy()
stopCodonRec.feature = "stop_codon"
stopCodonRec.score = None
stopCodonRec.frame = None
if cdsRec.strand == '+':
stopCodonRec.start = stopCodonRec.end - 2
else:
stopCodonRec.end = stopCodonRec.start + 2
gffRecs.append(stopCodonRec)
try:
os.remove(seqFilename)
except OSError, e:
sys.stderr.write("Could not delete temporary file %s: %s" % \
(seqFilename, str(e)))
return gffRecs
for rec in FASTA.Iterator(sys.stdin):
print >>sys.stderr, rec.title
gffRecs = runGeneid(rec,
bin=options.bin,
param=options.paramFilename,
segLength=options.segLength)
for gffRec in gffRecs:
print gffRec
| gpl-2.0 | 8,994,272,857,674,351,000 | 35.126866 | 81 | 0.546375 | false |
burz/simcom | src/parser.py | 1 | 35470 | import symbol_table
import syntax_tree
import interpreter
negated_relation = { '=' : '#', '#' : '=', '<' : '>=', '>' : '<=', '<=' : '>', '>=' : '<' }
class Parser_error(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return "error: {}".format(self.error)
class Parser(object):
def parse_tokens(self, tokens):
self.tokens = tokens
self.symbol_table = symbol_table.Symbol_table()
self.position = 0
self.in_expression = 0
instructions = self.Program()
if not instructions:
raise Parser_error("There is no 'PROGRAM' declared")
return instructions, self.symbol_table
def token(self):
if self.position >= len(self.tokens):
return False
return self.tokens[self.position]
def token_type(self):
if self.position >= len(self.tokens):
return False
return self.tokens[self.position].token_type
def token_line(self):
if self.position >= len(self.tokens):
return False
return self.tokens[self.position].line
def next_token(self):
self.position += 1
def type_check_binary_operation(self, operator, expression_left, expression_right, line):
if not type(expression_right.type_object) in [symbol_table.Integer, symbol_table.Constant]:
raise Parser_error("The expression to the left of the '{}' on line {} is not an INTEGER".
format(operator, line))
if not type(expression_left.type_object) in [symbol_table.Integer, symbol_table.Constant]:
raise Parser_error("The expression to the right of the '{}' on line {} is not an INTEGER".
format(operator, line))
def Program(self):
if not self.token_type() == 'PROGRAM':
return False
line = self.token_line()
self.next_token()
identifier = self.identifier()
if not identifier:
raise Parser_error("The 'PROGRAM' on line {} is not followed by an identifier".format(line))
if not self.token_type() == ';':
raise Parser_error("PROGRAM '{}' on line {} is not followed by a ';'".format(
identifier.data, identifier.line))
self.next_token()
self.Declarations()
instructions = False
if self.token_type() == 'BEGIN':
begin_line = self.token_line()
self.next_token()
instructions = self.Instructions()
if not instructions:
raise Parser_error("The 'BEGIN' on line {} is not followed by any Instructions".format(
begin_line))
if not self.token_type() == 'END':
raise Parser_error("The 'PROGRAM' on line {} is not terminated by an 'END'".format(line))
end_line = self.token_line()
self.next_token()
final_id = self.identifier()
if not final_id:
raise Parser_error("The 'END' on line {} is not followed by a program name to close".format(
end_line))
if not final_id.data == identifier.data:
raise Parser_error(
"The name of the program on line {} does not match the name of it's closing on line {}".
format(identifier.line, final_id.line))
if not self.token_type() == '.':
raise Parser_error("The program closing on line {} is not followed by a '.'".format(end_line))
self.next_token()
return syntax_tree.Syntax_tree(instructions)
def Declarations(self):
self.in_procedure = False
self.forward_declarations = {}
self.call_type_checks = []
self.argument_number_checks = []
while self.ConstDecl() or self.TypeDecl() or self.VarDecl() or self.ProcDecl():
pass
if self.forward_declarations:
error = ''
for name, call in self.forward_declarations:
error += " The function '{}' on line {} has not been defined\n".format(name, call.line)
raise Parser_error(error[7:-1])
for check in self.call_type_checks:
if not type(check.type_object) is symbol_table.Integer:
raise Parser_error("The call to '{}' on line {} must result in an INTEGER".format(
check.definition.name, check.line))
def ConstDecl(self):
if not self.token_type() == 'CONST':
return False
self.next_token()
while True:
identifier = self.identifier()
if not identifier:
return True
if not self.token_type() == '=':
raise Parser_error("The constant declaration of '{}' on line {} is not followed by a '='".
format(identifier.data, identifier.line))
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error(
"The constant declaration of '{}' on line {} is not followed by an Expression".
format(identifier.data, identifier.line))
if not type(expression.type_object) is symbol_table.Integer:
raise Parser_error(
"The expression following the constant declaration of '{}' on line {} is not an INTEGER".
format(identifier.data, identifier.line))
value = interpreter.Interpreter.evaluate_expression(interpreter.Interpreter(), expression)
if not self.token_type() == ';':
raise Parser_error("The constant declaration of '{}' on line {} is not followed by a ';'".
format(identifier.data, identifier.line))
self.next_token()
constant = symbol_table.Constant(self.symbol_table.integer_singleton, value, expression.line)
if not self.symbol_table.insert(identifier.data, constant):
previous_definition = self.symbol_table.find(identifier.data)
raise Parser_error("The constant delaration of '{}' on line {} ".format(
identifier.data, identifier.line) +
"conflicts with the previous declaration on line {}".format(
previous_definition.line))
return True
def TypeDecl(self):
if not self.token_type() == 'TYPE':
return False
self.next_token()
while True:
identifier = self.identifier()
if not identifier:
return True
if not self.token_type() == '=':
raise Parser_error("The type declaration of '{}' on line {} is not followed by a '='".
format(identifier.data, identifier.line))
self.next_token()
type_object = self.Type()
if not type_object:
raise Parser_error("The type declaration of '{}' on line {} is not followed by a Type".
format(identifier.data, identifier.line))
if not self.token_type() == ';':
raise Parser_error("The type declaration of '{}' on line {} is not followed by a ';'".
format(identifier.data, identifier.line))
self.next_token()
if not self.symbol_table.insert(identifier.data, type_object):
previous_definition = self.symbol_table.find(identifier.data)
raise Parser_error(
"The type delaration of '{}' on line {} conflicts with the previous declaration on line {}".
format(identifier.data, identifier.line, previous_definition.line))
return True
def VarDecl(self):
if not self.token_type() == 'VAR':
return False
self.next_token()
while True:
identifiers = self.IdentifierList()
if not identifiers:
return True
if not self.token_type() == ':':
if len(identifiers) is 1:
raise Parser_error("The variable declaration of '{}' on line {} is not followed by a ':'".
format(identifiers[0].data, identifiers[0].line))
else:
error = "The variable declarations of:\n"
for identifier in identifiers:
error += " '{}' on line '{}'\n".format(identifier.data, identifier.line)
raise Parser_error(error + " are not follwed by a ':'")
self.next_token()
type_object = self.Type()
if not type_object:
if len(identifiers) is 1:
raise Parser_error("The variable declaration of '{}' on line {} is not followed by a Type".
format(identifiers[0].data, identifiers[0].line))
else:
error = "The variable declarations of:\n"
for identifier in identifiers:
error += " '{}' on line '{}'\n".format(identifier.data, identifier.line)
raise Parser_error(error + " are not follwed by a Type")
if not self.token_type() == ';':
if len(identifiers) is 1:
raise Parser_error("The variable declaration of '{}' on line {} is not followed by a ';'".
format(identifiers[0].data, identifiers[0].line))
else:
error = "The variable declarations of:\n"
for identifier in identifiers:
error += " '{}' on line '{}'\n".format(identifier.data, identifier.line)
raise Parser_error(error + " are not follwed by a ';'")
self.next_token()
for identifier in identifiers:
if not self.symbol_table.insert(identifier.data, type_object):
previous_definition = self.symbol_table.find(identifier.data)
raise Parser_error("The variable declaration of '{}' on line {} ".format(
identifier.data, identifier.line) +
"conflicts with the previous declaration at {}".format(
previous_definition.line))
return True
def ProcDecl(self):
if not self.token_type() == 'PROCEDURE':
return False
self.in_procedure = True
line = self.token_line()
self.next_token()
identifier = self.identifier()
if not identifier:
raise Parser_error("The 'PROCEDURE' on line {} is not followed by an identifier".format(line))
if not self.token_type() == '(':
raise Parser_error("The procedure declaration of '{}' on line {} is not followed by a '('".
format(identifier.data, line))
par_line = self.token_line()
self.next_token()
self.symbol_table.push_scope()
formals = self.Formals()
if not self.token_type() == ')':
raise Parser_error("The '(' on line {} is not terminated by a ')'".format(par_line))
self.next_token()
return_type_object = False
if self.token_type() == ':':
return_type_line = self.token_line()
self.next_token()
return_type_object = self.Type()
if not return_type_object:
raise Parser_error("The ':' on line {} is not followed by a Type".format(return_type_line))
if not self.token_type() == ';':
raise Parser_error("The procedure declaration of '{}' on line {} is not followed by a ';'".
format(identifier.data, line))
self.next_token()
while self.VarDecl():
pass
instructions = False
if self.token_type() == 'BEGIN':
begin_line = self.token_line()
self.next_token()
instructions = self.Instructions()
if not instructions:
raise Parser_error("The 'BEGIN' on line {} is not followed by any Instructions".format(
begin_line))
return_expression = False
return_line = False
if self.token_type() == 'RETURN':
return_line = self.token_line()
self.next_token()
return_expression = self.Expression()
if not return_expression:
raise Parser_error("The 'RETURN' on line {} is not followed by an Expression".format(
return_line))
if not return_expression.type_object is return_type_object:
raise Parser_error(
"The return type defined for '{}' on line {} does not match the type of the".
format(identifier.data, line) +
"return expression on line {}".format(return_line))
elif return_type_object:
raise Parser_error(
"Expected a return statement in the procedure declaration of '{}' on line {}".
format(identifier.data, line))
if not self.token_type() == 'END':
raise Parser_error("The procedure declaration of '{}' on line {} is not followed by an 'END'".
format(identifier.data, line))
end_line = self.token_line()
self.next_token()
closing_name = self.identifier()
if not closing_name:
raise Parser_error("The 'END' on line {} is not followed by a procedure name to close".format(
end_line))
if not closing_name.data == identifier.data:
raise Parser_error("Expected a closing of procedure '{}'; got '{}' on line {}".format(
identifier.data, closing_name.data, closing_name.line))
if not self.token_type() == ';':
raise Parser_error("Expected a ';' following the closing of the procedure '{}' on line {}".
format(closing_name.data, closing_name.line))
self.next_token()
scope = self.symbol_table.pop_scope()
procedure = symbol_table.Procedure(identifier.data, formals, scope, return_type_object,
instructions, return_expression, line)
if not self.symbol_table.insert(identifier.data, procedure):
previous_definition = self.symbol_table.find(identifier.data)
raise Parser_error("The procedure definition of '{}' on line {} ".format(
identifier.data, line) +
"conflicts with the previous declaration on line {}".format(
previous_definition.line))
self.in_procedure = False
if self.forward_declarations:
delete = []
for name, calls in self.forward_declarations.iteritems():
if name == identifier.data:
for call in calls:
call.definition = procedure
call.type_object = return_type_object
delete.append(name)
for name in delete:
del self.forward_declarations[name]
return True
def Type(self):
identifier = self.identifier()
if identifier:
definition = self.symbol_table.find(identifier.data)
if not type(definition) in [symbol_table.Integer, symbol_table.Array, symbol_table.Record]:
raise Parser_error("The identifier '{}' on line {} does not name a type".format(
identifier.data, identifier.line))
return definition
if self.token_type() == 'ARRAY':
line = self.token_line()
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The 'ARRAY' on line {} is not followed by an Expression".format(line))
if not type(expression.type_object) is symbol_table.Integer:
raise Parser_error("The Expression following the 'ARRAY' on line {} must be an INTEGER".
format(expression.line))
size = interpreter.Interpreter.evaluate_expression(interpreter.Interpreter(), expression)
if not self.token_type() == 'OF':
raise Parser_error("The 'ARRAY' on line {} is not followed by a 'OF'".format(line))
of_line = self.token_line()
self.next_token()
type_object = self.Type()
if not type_object:
raise Parser_error("The 'OF' on line {} is not followed by a Type".format(of_line))
return symbol_table.Array(type_object, size, line)
if self.token_type() == 'RECORD':
line = self.token_line()
self.next_token()
self.symbol_table.push_scope()
while True:
identifiers = self.IdentifierList()
if not identifiers:
break
if not self.token_type() == ':':
raise Parser_error(
"The IdentifierList following the 'RECORD' on line {} is not followed by a ':'".
format(identifiers[0].line))
col_line = self.token_line()
self.next_token()
type_object = self.Type()
if not type_object:
raise Parser_error("The ':' on line {} is not followed by a Type".format(col_line))
if not self.token_type() == ';':
raise Parser_error("The field declarations on line {} are not followed by a ';'".
format(col_line))
self.next_token()
for ident in identifiers:
if not self.symbol_table.insert(ident.data, type_object):
previous_definition = self.symbol_table.find(ident.data)
raise Parser_error(
"The definition of '{}' on line {} conflicts with the previous definition at {}".
format(ident.data, ident.line, previous_definition.line))
if not self.token_type() == 'END':
raise Parser_error(
"The definition of the 'RECORD' on line {} was not terminated by an 'END'".
format(line))
self.next_token()
scope = self.symbol_table.pop_scope()
return symbol_table.Record(scope, line)
return False
def Expression(self):
self.in_expression += 1
if self.token_type() == '+':
line = self.token_line()
self.next_token()
term = self.Term()
if not term:
raise Parser_error("The '+' on line {} is not followed by a Term".format(line))
elif self.token_type() == '-':
line = self.token_line()
self.next_token()
term = self.Term()
if not term:
raise Parser_error("The '-' on line {} is not followed by a Term".format(line))
constant = symbol_table.Constant(self.symbol_table.integer_singleton, 0, line)
number = syntax_tree.Number(constant, constant.line)
expression = syntax_tree.Expression(number, constant.type_object, number.line)
self.type_check_binary_operation('-', expression, term, line)
binary = syntax_tree.Binary('-', expression, term, line)
term = syntax_tree.Expression(binary, constant.type_object, binary.line)
else:
line = self.token_line
term = self.Term()
if not term:
self.in_expression -= 1
return False
while self.token_type() in ['+', '-']:
op_line = self.token_line()
operator = self.token_type()
self.next_token()
new_term = self.Term()
if not new_term:
raise Parser_error("The '{}' on line {} is not followed by a Term".format(operator, op_line))
self.type_check_binary_operation(operator, term, new_term, op_line)
if type(term.child) is syntax_tree.Number and type(new_term.child) is syntax_tree.Number:
interp = interpreter.Interpreter()
term_result = interp.evaluate_expression(term)
new_term_result = interp.evaluate_expression(new_term)
if operator == '+':
result = term_result + new_term_result
else: # -
result = term_result - new_term_result
constant = symbol_table.Constant(self.symbol_table.integer_singleton, result, op_line)
child = syntax_tree.Number(constant, constant.line)
else:
child = syntax_tree.Binary(operator, term, new_term, op_line)
term = syntax_tree.Expression(child, self.symbol_table.integer_singleton, child.line)
self.in_expression -= 1
return term
def Term(self):
factor = self.Factor()
if not factor:
return False
while self.token_type() in ['*', 'DIV', 'MOD']:
line = self.token_line()
operator = self.token_type()
self.next_token()
new_factor = self.Factor()
if not new_factor:
raise Parser_error("The '{}' on line {} is not followed by a Factor".format(operator, line))
self.type_check_binary_operation(operator, factor, new_factor, line)
if type(factor.child) is syntax_tree.Number and type(new_factor.child) is syntax_tree.Number:
interp = interpreter.Interpreter()
factor_result = interp.evaluate_expression(factor)
new_factor_result = interp.evaluate_expression(new_factor)
if operator == '*':
result = factor_result * new_factor_result
elif operator == 'DIV':
if new_factor_result is 0:
raise Parser_error("The right side of the 'DIV' on line {} evaluated to 0".format(line))
result = factor_result / new_factor_result
else: # MOD
if new_factor_result is 0:
raise Parser_error("The right side of the 'MOD' on line {} evaluated to 0".format(line))
result = factor_result % new_factor_result
constant = symbol_table.Constant(self.symbol_table.integer_singleton, result, line)
child = syntax_tree.Number(constant, constant.line)
else:
child = syntax_tree.Binary(operator, factor, new_factor, line)
factor = syntax_tree.Expression(child, self.symbol_table.integer_singleton, child.line)
return factor
def Factor(self):
integer = self.integer()
if integer:
return integer
designator = self.Designator()
if designator:
if type(designator) is syntax_tree.Number:
return syntax_tree.Expression(designator, self.symbol_table.integer_singleton,
designator.line)
return syntax_tree.Expression(designator, designator.type_object, designator.line)
if self.token_type() == '(':
line = self.token_line()
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The '(' on line {} is not followed by an Expression".format(line))
if not self.token_type() == ')':
raise Parser_error("The '(' on line {} is not terminated by a ')'".format(line))
self.next_token()
return expression
call = self.Call()
if call:
return syntax_tree.Expression(call, call.type_object, call.line)
return False
def Instructions(self):
instruction = self.Instruction()
if not instruction:
return False
instructions = [instruction]
while self.token_type() == ';':
line = self.token_line()
self.next_token()
instruction = self.Instruction()
if not instruction:
raise Parser_error("The ';' on line {} is not followed by any instructions".format(line))
instructions.append(instruction)
return syntax_tree.Instructions(instructions, instructions[0].line)
def Instruction(self):
instruction = (self.Assign() or self.If() or self.Repeat() or self.While() or self.Read() or
self.Write() or self.Call())
if not instruction:
return False
return syntax_tree.Instruction(instruction, instruction.line)
def Assign(self):
starting_position = self.position
location = self.Designator()
if not location:
return False
if not self.token_type() == ':=':
self.position = starting_position
return False
line = self.token_line()
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The ':=' on line {} is not followed by an Expression".format(line))
if not type(location.type_object) is type(expression.type_object):
raise Parser_error("The types of the location and expression for ':=' on line {} do not match".
format(line))
return syntax_tree.Assign(location, expression, line)
def If(self):
if not self.token_type() == 'IF':
return False
line = self.token_line()
self.next_token()
condition = self.Condition()
if not condition:
raise Parser_error("The 'IF' on line {} is not followed by a Condition".format(line))
if not self.token_type() == 'THEN':
raise Parser_error("The 'IF' on line {} is not followed by a 'THEN'".format(line))
then_line = self.token_line()
self.next_token()
instructions_true = self.Instructions()
if not instructions_true:
raise Parser_error("The 'THEN' on line {} is not followed by any Instructions".format(
then_line))
instructions_false = False
if self.token_type() == 'ELSE':
else_line = self.token_line()
self.next_token()
instructions_false = self.Instructions()
if not instructions_false:
raise Parser_error("The 'ELSE' on line {} is not followed by any Instructions".format(
else_line))
if not self.token_type() == 'END':
raise Parser_exception("The 'IF' on line {} is not followed by an 'END'".format(line))
self.next_token()
return syntax_tree.If(condition, instructions_true, instructions_false, line)
def Repeat(self):
if not self.token_type() == 'REPEAT':
return False
line = self.token_line()
self.next_token()
instructions = self.Instructions()
if not instructions:
raise Parser_error("The 'REPEAT' on line {} is not followed by any Instructions".format(line))
if not self.token_type() == 'UNTIL':
raise Parser_error("The 'REPEAT' on line {} is not followed by an 'UNTIL'".format(line))
until_line = self.token_line()
self.next_token()
condition = self.Condition()
if not condition:
raise Parser_error("The 'UNTIL' on line {} is not followed by a Condition".format(until_line))
if not self.token_type() == 'END':
raise Parser_error("The 'REPEAT' on line {} is not terminated by an 'END'".format(line))
self.next_token()
return syntax_tree.Repeat(condition, instructions, line)
def While(self):
if not self.token_type() == 'WHILE':
return False
line = self.token_line()
self.next_token()
condition = self.Condition()
if not condition:
raise Parser_error("The 'WHILE' on line {} is not followed by a Condition".format(line))
if not self.token_type() == 'DO':
raise Parser_error("The 'WHILE' on line {} is not followed by a 'DO'".format(line))
do_line = self.token_line()
self.next_token()
instructions = self.Instructions()
if not instructions:
raise Parser_error("The 'DO' on line {} is not followed by any Instructions".format(do_line))
if not self.token_type() == 'END':
raise Parser_error("The 'WHILE' on line {} is not teminated by an 'END'".format(line))
self.next_token()
repeat_relation = negated_relation[condition.relation]
repeat_condition = syntax_tree.Condition(repeat_relation, condition.expression_left,
condition.expression_right, condition.line)
repeat = syntax_tree.Repeat(repeat_condition, instructions, repeat_condition.line)
instruction = syntax_tree.Instruction(repeat, repeat.line)
instructions = syntax_tree.Instructions([instruction], instruction.line)
return syntax_tree.If(condition, instructions, False, line)
def Condition(self):
starting_position = self.position
expression_left = self.Expression()
if not expression_left:
return False
relation = self.token()
if not relation.data in ['=', '#', '<', '>', '<=', '>=']:
self.position = starting_position
return False
self.next_token()
expression_right = self.Expression()
if not expression_right:
raise Parser_error("There is no Expression following the '{}' on line {}".format(
operator.data, operator.line))
self.type_check_binary_operation(relation.data, expression_left, expression_right, relation.line)
return syntax_tree.Condition(relation.data, expression_left, expression_right, relation.line)
def Write(self):
if not self.token_type() == 'WRITE':
return False
line = self.token_line()
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The 'WRITE' on line {} is not followed by an Expression".format(line))
if not type(expression.type_object) is symbol_table.Integer:
raise Parser_error("The Expression on line {} must result in an INTEGER".format(
expression.line))
return syntax_tree.Write(expression, line)
def Read(self):
if not self.token_type() == 'READ':
return False
line = self.token_line()
self.next_token()
designator = self.Designator()
if not designator:
raise Parser_error("The 'READ' on line {} is not followed by a Designator".format(line))
return syntax_tree.Read(designator, line)
def Call(self):
starting_position = self.position
identifier = self.identifier()
if not identifier:
return False
definition = self.symbol_table.find(identifier.data)
if not self.token_type() == '(':
self.position = starting_position
return False
forward = False
if not definition:
if not self.in_procedure:
raise Parser_error("The Procedure '{}' on line {} has not been defined".format(
identifier.data, identifier.line))
forward = True
elif not type(definition) is symbol_table.Procedure:
raise Parser_error("'{}' on line {} is not a Procedure".format(
identifier.data, identifier.line))
line = self.token_line()
self.next_token()
actuals = self.Actuals()
if forward:
if self.in_expression:
return_type = self.symbol_table.integer_singleton
else:
return_type = False
else:
return_type = definition.type_object
call = syntax_tree.Call(definition, actuals, return_type, identifier.line)
if not forward:
length = len(actuals) if actuals else 0
definition_length = len(definition.formals) if definition.formals else 0
if length != definition_length:
raise Parser_error(
"The call to '{}' on line {} does not have the correct number of arguments ({} for {})".
format(identifier.data, identifier.line, length, definition_length))
else:
self.argument_number_checks.append(call)
if not identifier.data in self.forward_declarations:
self.forward_declarations[identifier.data] = [call]
else:
self.forward_declarations[identifier.data].append(call)
if self.in_expression:
self.call_type_checks.append(call)
if not self.token_type() == ')':
raise Parser_error("The '(' on line {} is not terminated by a ')'".format(line))
self.next_token()
return call
def Designator(self):
starting_position = self.position
identifier = self.identifier()
if not identifier:
return False
if self.token_type() == '(':
self.position = starting_position
return False
table_entry = self.symbol_table.find(identifier.data)
if not table_entry:
self.position = starting_position
return False
if type(table_entry) is symbol_table.Constant:
return syntax_tree.Number(table_entry, identifier.line)
selectors = self.Selector()
variable = syntax_tree.Variable(identifier.data, table_entry, identifier.line)
location = syntax_tree.Location(variable, table_entry, variable.line)
for selector in selectors:
if type(location.child) == syntax_tree.Variable:
definition = location.child.table_entry
else:
definition = location.child.type_object
if type(selector) is syntax_tree.Expression:
if not type(definition) is symbol_table.Array:
raise Parser_error("The index on line {} does not follow an Array".format(selector.line))
index = syntax_tree.Index(location, selector, definition.type_object, selector.line)
location = syntax_tree.Location(index, index.type_object, index.line)
else:
if not type(definition) is symbol_table.Record:
raise Parser_error("The field '{}' on line {} does not follow a Record".format(
selector.data, selector.line))
table_entry = definition.scope.find(selector.data)
if not table_entry:
raise Parser_error("The field '{}' on line {} has not been defined".format(
selector.data, selector.line))
variable = syntax_tree.Variable(selector.data, table_entry, selector.line)
field = syntax_tree.Field(location, variable, table_entry, variable.line)
location = syntax_tree.Location(field, table_entry, field.line)
return location
def Formals(self):
formal = self.Formal()
if not formal:
return False
formals = []
formals += formal
while self.token_type() == ';':
line = self.token_line()
self.next_token()
formal = self.Formal()
if not formal:
raise Parser_error("The ';' on line {} is not followed by a Formal".format(line))
formals += formal
return formals
def Formal(self):
line = self.token_line()
identifiers = self.IdentifierList()
if not identifiers:
return False
if not self.token_type() == ':':
raise Parser_error("The IdentifierList on line {} is not followed by a ':'".format(line))
line = self.token_line()
self.next_token()
type_object = self.Type()
if not type_object:
raise Parser_error("The ':' on line {} is not followed by a Type".format(line))
definitions = []
for identifier in identifiers:
self.symbol_table.insert(identifier.data, type_object)
definitions.append(identifier.data)
return definitions
def Actuals(self):
return self.ExpressionList()
def Selector(self):
selectors = []
while True:
if self.token_type() == '[':
line = self.token_line()
self.next_token()
expr_list = self.ExpressionList()
if not expr_list:
raise Parser_error("The '[' on line {} is not followed by an ExpressionList".format(line))
if not self.token_type() == ']':
raise Parser_error("The '[' on line {} is not closed by a ']'".format(line))
self.next_token()
selectors += expr_list
elif self.token_type() == '.':
self.next_token()
identifier = self.identifier()
if not identifier:
raise Parser_error("The '.' on line {} is not followed by an identifier".format(
self.last_line()))
selectors.append(identifier)
else:
break
return selectors
def IdentifierList(self):
identifier = self.identifier()
if not identifier:
return False
identifiers = [identifier]
while self.token_type() == ',':
self.next_token()
identifier = self.identifier()
if not identifier:
raise Parser_error("The ',' on line {} is not followed by an identifier".format(
self.last_line()))
identifiers.append(identifier)
return identifiers
def ExpressionList(self):
expression = self.Expression()
if not expression:
return False
expressions = [expression]
while self.token_type() == ',':
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The ',' on line {} is not followed by an expression".format(
self.last_line()))
expressions.append(expression)
return expressions
def identifier(self):
if not self.token_type() == 'identifier':
return False
identifier = self.token()
self.next_token()
return identifier
def integer(self):
if not self.token_type() == 'integer':
return False
constant = symbol_table.Constant(self.symbol_table.integer_singleton, int(self.token().data),
self.token_line())
number = syntax_tree.Number(constant, constant.line)
self.next_token()
return syntax_tree.Expression(number, constant.type_object, number.line)
| mit | 1,540,278,318,245,075,000 | 43.785354 | 101 | 0.617931 | false |
schleichdi2/OpenNfr_E2_Gui-6.0 | lib/python/Plugins/Extensions/MediaPortal/resources/update.py | 1 | 7746 | # -*- coding: utf-8 -*-
###############################################################################################
#
# MediaPortal for Dreambox OS
#
# Coded by MediaPortal Team (c) 2013-2017
#
# This plugin is open source but it is NOT free software.
#
# This plugin may only be distributed to and executed on hardware which
# is licensed by Dream Property GmbH. This includes commercial distribution.
# In other words:
# It's NOT allowed to distribute any parts of this plugin or its source code in ANY way
# to hardware which is NOT licensed by Dream Property GmbH.
# It's NOT allowed to execute this plugin and its source code or even parts of it in ANY way
# on hardware which is NOT licensed by Dream Property GmbH.
#
# This applies to the source code as a whole as well as to parts of it, unless
# explicitely stated otherwise.
#
# If you want to use or modify the code or parts of it,
# you have to keep OUR license and inform us about the modifications, but it may NOT be
# commercially distributed other than under the conditions noted above.
#
# As an exception regarding execution on hardware, you are permitted to execute this plugin on VU+ hardware
# which is licensed by satco europe GmbH, if the VTi image is used on that hardware.
#
# As an exception regarding modifcations, you are NOT permitted to remove
# any copy protections implemented in this plugin or change them for means of disabling
# or working around the copy protections, unless the change has been explicitly permitted
# by the original authors. Also decompiling and modification of the closed source
# parts is NOT permitted.
#
# Advertising with this plugin is NOT allowed.
# For other uses, permission from the authors is necessary.
#
###############################################################################################
from Plugins.Extensions.MediaPortal.plugin import _
from imports import *
import mp_globals
from messageboxext import MessageBoxExt
from twagenthelper import twAgentGetPage
import random
gLogFile = None
class checkupdate:
def __init__(self, session):
self.session = session
def checkforupdate(self):
update_agent = getUserAgent()
update_url = getUpdateUrl()
twAgentGetPage(update_url, agent=update_agent, timeout=60).addCallback(self.gotUpdateInfo).addErrback(self.gotError)
def gotError(self, error=""):
printl(error,self,"E")
return
def gotUpdateInfo(self, html):
if re.search(".*?<html", html):
return
self.html = html
tmp_infolines = html.splitlines()
remoteversion_ipk = re.sub('\D', '', tmp_infolines[0])
remoteversion_deb = re.sub('\D', '', tmp_infolines[2])
try:
mirrors = self.updateurl = tmp_infolines[5].split(';')
mirror_rand = random.choice(mirrors)
except:
mirror_rand = None
if mp_globals.isDreamOS:
self.updateurl = tmp_infolines[3]
remoteversion = remoteversion_deb
else:
self.updateurl = tmp_infolines[1]
remoteversion = remoteversion_ipk
if mirror_rand:
mirror_replace = re.search('(sourceforge.net.*)', self.updateurl)
if mirror_replace:
self.updateurl = 'http://' + mirror_rand + '.dl.' + mirror_replace.group(1)
if int(config.mediaportal.version.value) < int(remoteversion):
if mirror_rand:
printl('Random update mirror selected: %s' % mirror_rand,self,'A')
printl('Found update url: %s' % self.updateurl,self,'A')
if mirror_replace:
printl('Generated update url: %s' % self.updateurl,self,'A')
self.session.openWithCallback(self.startUpdate,MessageBoxExt,_("An update is available for the MediaPortal Plugin!\nDo you want to download and install it now?"), MessageBoxExt.TYPE_YESNO, timeout=15, default=False)
return
else:
return
def startUpdate(self,answer):
if answer is True:
self.session.open(MPUpdateScreen,self.updateurl,self.html)
else:
return
class MPUpdateScreen(Screen):
def __init__(self, session, updateurl, html):
self.session = session
self.updateurl = updateurl
self.html = html
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/MP_Update.xml" % (self.skin_path, mp_globals.currentskin)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/MP_Update.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
self.ml = MenuList([])
self['mplog'] = self.ml
self.list = []
Screen.__init__(self, session)
self['title'] = Label("MediaPortal Update")
self.setTitle("MediaPortal Update")
self.onLayoutFinish.append(self.__onLayoutFinished)
def __onLayoutFinished(self):
height = self['mplog'].l.getItemSize().height()
try:
self.ml.l.setFont(gFont(mp_globals.font, height - 2 * mp_globals.sizefactor))
except:
pass
self.list.append(_("Starting update, please wait..."))
self.ml.setList(self.list)
self.ml.moveToIndex(len(self.list)-1)
self.ml.selectionEnabled(False)
self.startPluginUpdate()
def startPluginUpdate(self):
self.container=eConsoleAppContainer()
if mp_globals.isDreamOS:
self.container.appClosed_conn = self.container.appClosed.connect(self.finishedPluginUpdate)
self.container.stdoutAvail_conn = self.container.stdoutAvail.connect(self.mplog)
f = open("/etc/apt/apt.conf", "r")
arch = ''.join(f.readlines()).strip()
arch = re.findall('"(.*?)";', arch, re.S)[0]
tmp_infolines = self.html.splitlines()
files = ''
for i in range(0, len(tmp_infolines)):
if re.match(".*?/update/",tmp_infolines[i], re.S):
file = "wget -q -O /tmp/mediaportal/update/%s %s" % (tmp_infolines[i].split('/update/')[-1].replace('&&ARCH&&', arch), tmp_infolines[i].replace('&&ARCH&&', arch))
files = files + ' && ' + file
download = files.strip(' && ')
self.container.execute("mkdir -p /tmp/mediaportal/update && %s && cd /tmp/mediaportal/update/ && dpkg-scanpackages . | gzip -1c > Packages.gz && echo deb file:/tmp/mediaportal/update ./ > /etc/apt/sources.list.d/mediaportal.list && apt-get update && apt-get install -y --force-yes enigma2-plugin-extensions-mediaportal && rm -r /tmp/mediaportal/update && rm /etc/apt/sources.list.d/mediaportal.list" % download)
else:
self.container.appClosed.append(self.finishedPluginUpdate)
self.container.stdoutAvail.append(self.mplog)
self.container.execute("opkg update ; opkg install " + str(self.updateurl))
def finishedPluginUpdate(self,retval):
self.container.kill()
if retval == 0:
config.mediaportal.filter.value = "ALL"
config.mediaportal.filter.save()
configfile.save()
self.session.openWithCallback(self.restartGUI, MessageBoxExt, _("MediaPortal successfully updated!\nDo you want to restart the Enigma2 GUI now?"), MessageBoxExt.TYPE_YESNO)
else:
self.session.openWithCallback(self.returnGUI, MessageBoxExt, _("MediaPortal update failed! Check the update log carefully!"), MessageBoxExt.TYPE_ERROR)
def restartGUI(self, answer):
if answer is True:
self.session.open(TryQuitMainloop, 3)
self.close()
def returnGUI(self, answer):
self.close()
def mplog(self,str):
self.list.append(str)
self.ml.setList(self.list)
self.ml.moveToIndex(len(self.list)-1)
self.ml.selectionEnabled(False)
self.writeToLog(str)
def writeToLog(self, log):
global gLogFile
if gLogFile is None:
self.openLogFile()
now = datetime.datetime.now()
gLogFile.write(str(log) + "\n")
gLogFile.flush()
def openLogFile(self):
global gLogFile
baseDir = "/tmp"
logDir = baseDir + "/mediaportal"
now = datetime.datetime.now()
try:
os.makedirs(baseDir)
except OSError, e:
pass
try:
os.makedirs(logDir)
except OSError, e:
pass
gLogFile = open(logDir + "/MediaPortal_update_%04d%02d%02d_%02d%02d.log" % (now.year, now.month, now.day, now.hour, now.minute, ), "w") | gpl-2.0 | 1,712,898,901,760,975,400 | 35.023256 | 414 | 0.695894 | false |
Wolkabout/WolkConnect-Python- | wolk/__init__.py | 1 | 3048 | # coding=utf-8
"""
.. module:: wolk
This module provides connection to WolkAbout IoT Platform.
To start publishing data to the platform
create an instance of Device class with credentials obtained from the platform
and pass it to an instance of WolkConnect class.
For more information about module features visit:
https://github.com/Wolkabout/WolkConnect-Python/tree/master/examples/full_feature_set
"""
from .models.ActuatorCommand import ActuatorCommand
from .models.ActuatorCommandType import ActuatorCommandType
from .models.ActuatorState import ActuatorState
from .models.ActuatorStatus import ActuatorStatus
from .models.Alarm import Alarm
from .models.ConfigurationCommand import ConfigurationCommand
from .models.ConfigurationCommandType import ConfigurationCommandType
from .models.Device import Device
from .models.FileTransferPacket import FileTransferPacket
from .models.FirmwareCommand import FirmwareCommand
from .models.FirmwareCommandType import FirmwareCommandType
from .models.FirmwareErrorType import FirmwareErrorType
from .models.FirmwareStatus import FirmwareStatus
from .models.FirmwareStatusType import FirmwareStatusType
from .models.FirmwareUpdateStateType import FirmwareUpdateStateType
from .models.InboundMessage import InboundMessage
from .models.OutboundMessage import OutboundMessage
from .models.Protocol import Protocol
from .models.SensorReading import SensorReading
from .interfaces.ActuationHandler import ActuationHandler
from .interfaces.ActuatorStatusProvider import ActuatorStatusProvider
from .interfaces.ConfigurationHandler import ConfigurationHandler
from .interfaces.ConfigurationProvider import ConfigurationProvider
from .interfaces.ConnectivityService import ConnectivityService
from .interfaces.FirmwareInstaller import FirmwareInstaller
from .interfaces.FirmwareURLDownloadHandler import FirmwareURLDownloadHandler
from .interfaces.InboundMessageDeserializer import InboundMessageDeserializer
from .interfaces.OutboundMessageFactory import OutboundMessageFactory
from .interfaces.OutboundMessageQueue import OutboundMessageQueue
from .FileSystemFirmwareHandler import FileSystemFirmwareHandler
from .LoggerFactory import logging_config
from .WolkConnect import WolkConnect
__all__ = [
"ActuatorCommand",
"ActuatorCommandType",
"ActuatorState",
"ActuatorStatus",
"Alarm",
"ConfigurationCommand",
"ConfigurationCommandType",
"Device",
"FileTransferPacket",
"FirmwareCommand",
"FirmwareCommandType",
"FirmwareErrorType",
"FirmwareStatus",
"FirmwareStatusType",
"FirmwareUpdateStateType",
"InboundMessage",
"OutboundMessage",
"Protocol",
"SensorReading",
"ActuationHandler",
"ActuatorStatusProvider",
"ConfigurationHandler",
"ConfigurationProvider",
"ConnectivityService",
"FileSystemFirmwareHandler",
"FirmwareInstaller",
"FirmwareURLDownloadHandler",
"logging_config",
"InboundMessageDeserializer",
"OutboundMessageFactory",
"OutboundMessageQueue",
"WolkConnect",
]
| apache-2.0 | -7,907,046,333,640,627,000 | 36.62963 | 85 | 0.82185 | false |
census-instrumentation/opencensus-python | opencensus/common/http_handler/__init__.py | 1 | 1391 | # Copyright 2018, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
# For Python 3.0 and later
from urllib.request import urlopen, Request
from urllib.error import HTTPError, URLError
except ImportError:
# Fall back to Python 2's urllib2
from urllib2 import urlopen, Request
from urllib2 import HTTPError, URLError
import socket
_REQUEST_TIMEOUT = 2 # in secs
def get_request(request_url, request_headers=dict()):
"""Execute http get request on given request_url with optional headers
"""
request = Request(request_url)
for key, val in request_headers.items():
request.add_header(key, val)
try:
response = urlopen(request, timeout=_REQUEST_TIMEOUT)
response_content = response.read()
except (HTTPError, URLError, socket.timeout):
response_content = None
return response_content
| apache-2.0 | -4,971,914,294,319,869,000 | 31.348837 | 74 | 0.723221 | false |
unicef/un-partner-portal | backend/unpp_api/apps/partner/migrations/0053_auto_20180115_0834.py | 1 | 2538 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-01-15 08:34
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('common', '0007_auto_20171031_0715'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('partner', '0052_merge_20180115_0938'),
]
operations = [
migrations.CreateModel(
name='PartnerCapacityAssessment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('assessment_type', models.TextField(blank=True, choices=[('HAC', 'HACT micro-assessment'), ('OCH', 'OCHA CBPF (Country-Based Pooled Fund) capacity assessment'), ('UNH', 'UNHCR procurement pre-qualification assessment '), ('DFI', 'DFID pre-grant due diligence assessment'), ('EUE', 'EU/ECHO Framework Partnership Agreement (FPA) assessment'), ('Oth', 'Other formal capacity assessment')], null=True)),
('report_url', models.URLField(blank=True, null=True)),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='capacity_assessments', to=settings.AUTH_USER_MODEL)),
('partner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='capacity_assessments', to='partner.Partner')),
('report_file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='partner_capacity_assessments', to='common.CommonFile')),
],
options={
'ordering': ['id'],
},
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='assessment_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='assessments',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='capacity_assessment',
),
]
| apache-2.0 | 7,876,868,366,261,741,000 | 50.795918 | 417 | 0.646178 | false |
ClydeSpace-GroundStation/GroundStation | Utilities/Supporting_Libraries/gr-bruninga-master/python/ax25_fsk_mod.py | 1 | 5030 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# TODO: Pickle -> JSON
import pickle
import numpy as np
import Queue
import time
from gnuradio import gr
from bruninga import packet
import pmt
class ax25_fsk_mod(gr.sync_block):
"""
A continuous phase FSK modulator for AX25 packets.
When given an AX25 Packet, this block converts it to an audio stream with
the given configured parameters. Two in question:
- Flag Count: How many flags to put before and after the packet
- Preamble Len (ms): How long to transmit a clock signal (01010101)
The default values for the mark, space, and baud rate are configurable to
allow for further experimentation. v.23 modems, for example, use 1300/2100
tones to generate 1200 baud signals.
"""
def __init__(self, samp_rate, preamble_len_ms, flag_count, mark_freq,
space_freq, baud_rate):
gr.sync_block.__init__(self,
name="ax25_fsk_mod",
in_sig=None,
out_sig=[np.float32])
self.samp_rate = samp_rate
self.flag_count = flag_count
self.mark_freq = mark_freq
self.space_freq = space_freq
self.baud_rate = baud_rate
self.preamble_len_bits = int((preamble_len_ms / 1000.0) * baud_rate / 2)
self.sps = int(1.0 * self.samp_rate / self.baud_rate)
self.outbox = Queue.Queue()
self.output_buffer = None
self.opb_idx = 0
self.message_port_register_in(pmt.intern('in'))
self.set_msg_handler(pmt.intern('in'), self.handle_msg)
def handle_msg(self, msg_pmt):
msg = pmt.to_python(msg_pmt)
if not (isinstance(msg, tuple) and len(msg) == 2):
print 'Invalid Message: Expected tuple of len 2'
print 'Dropping msg of type %s' % type(msg)
return
try:
msg = pickle.loads(msg[1])
except StandardError as e:
print 'Bad format: Expected pickled AX25Packet'
print str(e)
return
# TODO: Take list of AX25 packets VVVV
if not isinstance(msg, packet.AX25Packet):
print 'Expected AX25Packet, got %s' % type(msg)
return
self.outbox.put(msg)
def ax25_to_fsk(self, msg):
# TODO: Allow multiple messages to be strung together with
# one preamble
# Generate message
msg_bits = [0, 1] * self.preamble_len_bits
msg_bits += msg.hdlc_wrap(self.flag_count, self.flag_count)
# Calculate phase increments
mark_pinc = 2 * np.pi * self.mark_freq / self.samp_rate
space_pinc = 2 * np.pi * self.space_freq / self.samp_rate
phase = 0
opb = np.empty(len(msg_bits) * self.sps)
for i, bit in enumerate(msg_bits):
pinc = (mark_pinc if bit is 1 else space_pinc)
phase += pinc
tmp = np.arange(self.sps) * pinc + phase
opb[i*self.sps:(i+1)*self.sps] = np.sin(tmp)
phase = tmp[-1]
return opb
def work(self, input_items, output_items):
out = output_items[0]
idx = 0
# TODO: Transmit cooldown period
if self.output_buffer is None:
if self.outbox.empty():
# TODO: This is a bit of a hack to work around the ALSA Audio
# Sink being unhappy with underflows
out[0:] = 0
return len(out)
self.output_buffer = self.ax25_to_fsk(self.outbox.get())
self.opb_idx = 0
# How many samples do we have left for each buffer?
opb_left = len(self.output_buffer) - self.opb_idx
out_left = len(out) - idx
# Take the minimum, and copy them to out
cnt = min(opb_left, out_left)
out[idx:idx+cnt] = self.output_buffer[self.opb_idx:self.opb_idx+cnt]
# Update counters
idx += cnt
self.opb_idx += cnt
# If we run out of samples in the output buffer, we're done
if self.opb_idx >= len(self.output_buffer):
self.output_buffer = None
# Fill the remaining buffer with zeros. Hack to help the ALSA audio sink
# be happy.
if idx < len(out):
out[idx:] = 0
return len(out)
| mit | 7,835,768,773,258,997,000 | 31.662338 | 80 | 0.603777 | false |
TylerSandman/mopy | tests/nim_test.py | 1 | 2169 | from mopy.impl.nim.state import NimState
from mopy.impl.nim.action import NimAction
from mopy.impl.nim.game import NimGame
import pytest
@pytest.fixture
def game(scope="module"):
return NimGame()
@pytest.fixture
def new_state(game):
return game.new_game()
@pytest.fixture
def mid_state():
return NimState([2, 0, 1], 0)
@pytest.fixture
def completed_state():
return NimState([0, 0, 0], 0)
def test_legal_first_actions(game, new_state):
all_actions = game.get_legal_actions(new_state)
# Should be able to take 1-3 from heap 0,
# 1-4 from heap 1, and 1-5 from heap 2
for n in range(1, 4):
assert NimAction(0, n) in all_actions
for n in range(1, 5):
assert NimAction(1, n) in all_actions
for n in range(1, 6):
assert NimAction(2, n) in all_actions
def test_legal_intermediate_actions(game, mid_state):
all_actions = game.get_legal_actions(mid_state)
# Should be able to take 1-2 from heap 0,
# 0 from heap 1, 1 from heap 2
assert NimAction(0, 1) in all_actions
assert NimAction(0, 2) in all_actions
assert NimAction(2, 1) in all_actions
for n in range(3, 4):
assert NimAction(0, n) not in all_actions
for n in range(1, 5):
assert NimAction(1, n) not in all_actions
for n in range(2, 6):
assert NimAction(2, n) not in all_actions
def test_action(game, new_state):
game.do_action(new_state, NimAction(0, 3))
assert new_state.current_player == 1
assert new_state.heaps[0] == 0
assert new_state.heaps[1] == 4
assert new_state.heaps[2] == 5
game.do_action(new_state, NimAction(1, 2))
assert new_state.current_player == 0
assert new_state.heaps[0] == 0
assert new_state.heaps[1] == 2
assert new_state.heaps[2] == 5
def test_game_over(game, new_state, mid_state, completed_state):
assert game.is_over(completed_state)
assert game.get_result(completed_state) == 1
with pytest.raises(Exception):
game.get_result(mid_state)
with pytest.raises(Exception):
game.get_result(new_state)
| mit | -83,457,380,437,558,980 | 24.777778 | 64 | 0.632089 | false |
nmishkin/tosca-vcloud-plugin | manager_blueprint/scripts/configure.py | 1 | 1565 | import tempfile
import json
import fabric
import vcloud_plugin_common
from cloudify import ctx
PROVIDER_CONTEXT_RUNTIME_PROPERTY = 'provider_context'
def configure(vcloud_config):
_copy_vsphere_configuration_to_manager(vcloud_config)
_install_docker()
_save_context()
def _copy_vsphere_configuration_to_manager(vcloud_config):
tmp = tempfile.mktemp()
with open(tmp, 'w') as f:
json.dump(vcloud_config, f)
fabric.api.put(tmp,
vcloud_plugin_common.Config.VCLOUD_CONFIG_PATH_DEFAULT)
def _install_docker():
distro = fabric.api.run(
'python -c "import platform; print platform.dist()[0]"')
kernel_version = fabric.api.run(
'python -c "import platform; print platform.release()"')
if kernel_version.startswith("3.13") and 'Ubuntu' in distro:
fabric.api.run("wget -qO- https://get.docker.com/ | sudo sh")
def _save_context():
resources = dict()
node_instances = ctx._endpoint.storage.get_node_instances()
nodes_by_id = \
{node.id: node for node in ctx._endpoint.storage.get_nodes()}
for node_instance in node_instances:
run_props = node_instance.runtime_properties
props = nodes_by_id[node_instance.node_id].properties
if "management_network" == node_instance.node_id:
resources['int_network'] = {
"name": props.get('resource_id')
}
provider = {
'resources': resources
}
ctx.instance.runtime_properties[PROVIDER_CONTEXT_RUNTIME_PROPERTY] = \
provider
| apache-2.0 | -6,348,438,394,776,755,000 | 26.45614 | 74 | 0.647923 | false |
SanaMobile/sana.protocol_builder | src-django/api/tests/test_model_concept.py | 1 | 2306 | from django.test import TestCase
from django.db import IntegrityError
from nose.tools import raises, assert_equals, assert_is_not_none
from api.models import Concept
from utils import factories
class ConceptTest(TestCase):
def setUp(self):
self.name = 'SX SITE SWELLING'
self.display_name = 'Swelling at surgical site'
def test_concept_required(self):
concept = Concept.objects.create(
name=self.name,
display_name=self.display_name
)
assert_equals(len(concept.uuid.hex), 32)
assert_equals(concept.name, self.name)
assert_equals(concept.display_name, self.display_name)
def test_concepts_all_properties(self):
Concept.objects.create(
name=self.name,
display_name=self.display_name,
description='Swelling observed at surgical site post procedure',
data_type='string',
mime_type='text/plain',
constraint='yes;no'
)
concept = Concept.objects.get(name='SX SITE SWELLING')
assert_equals(len(concept.uuid.hex), 32)
assert_is_not_none(concept.created)
assert_is_not_none(concept.last_modified)
assert_equals(concept.name, self.name)
assert_equals(concept.display_name, self.display_name)
assert_equals(concept.description, 'Swelling observed at surgical site post procedure')
assert_equals(concept.data_type, 'string')
assert_equals(concept.mime_type, 'text/plain')
assert_equals(concept.constraint, 'yes;no')
@raises(IntegrityError)
def test_concept_type_invalid(self):
Concept.objects.create(
name=self.name,
display_name=self.display_name,
data_type='bad'
)
def test_data_type_string(self):
self.assert_data_type_valid('string')
def test_data_type_boolean(self):
self.assert_data_type_valid('boolean')
def test_data_type_number(self):
self.assert_data_type_valid('number')
def test_data_type_complex(self):
self.assert_data_type_valid('complex')
# HELPERS
def assert_data_type_valid(self, data_type):
concept = factories.ConceptFactory(
data_type=data_type
)
assert_is_not_none(concept)
| bsd-3-clause | -2,954,011,861,640,709,000 | 31.027778 | 95 | 0.641804 | false |
ollitapa/MMP-TracerApi | Tests/MeshTests/meshConeTest.py | 1 | 1598 | #
# Copyright 2015 Olli Tapaninen, VTT Technical Research Center of Finland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
from meshpy.geometry import generate_extrusion
from matplotlib import pylab as plt
from mpl_toolkits.mplot3d import Axes3D
from meshpy.tet import MeshInfo, build
rz = [(0, 0), (1, 0), (1.5, 0.5), (2, 1), (0, 1)]
base = []
for theta in np.linspace(0, 2 * np.pi, 40):
x = np.sin(theta)
y = np.cos(theta)
base.extend([(x, y)])
(points, facets,
facet_holestarts, markers) = generate_extrusion(rz_points=rz, base_shape=base)
p_array = np.array(points)
xs = p_array[:, 0]
ys = p_array[:, 1]
zs = p_array[:, 2]
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xs, ys, zs)
for f in facets:
plt.plot(xs[list(f[0])], ys[list(f[0])], zs[list(f[0])])
plt.show()
for i_facet, poly_list in enumerate(facets):
print(poly_list)
mesh_info = MeshInfo()
mesh_info.set_points(points)
mesh_info.set_facets_ex(facets, facet_holestarts, markers)
mesh = build(mesh_info)
print(mesh.elements)
mesh.write_vtk('test.vtk')
| apache-2.0 | 9,024,554,759,309,677,000 | 25.633333 | 79 | 0.702128 | false |
dabrahams/zeroinstall | tests/testalias.py | 1 | 3191 | #!/usr/bin/env python
from basetest import BaseTest
import sys, tempfile
from StringIO import StringIO
import unittest
sys.path.insert(0, '..')
from zeroinstall import alias
expected_script = """#!/bin/sh
exec 0launch 'http://example.com/foo.xml' "$@"
"""
old_script = """#!/bin/sh
if [ "$*" = "--versions" ]; then
exec 0launch -gd 'http://example.com/foo.xml' "$@"
else
exec 0launch 'http://example.com/foo.xml' "$@"
fi
"""
expected_script_main = """#!/bin/sh
exec 0launch --main 'a'\\'''\\''\\test' 'http://example.com/foo.xml' "$@"
"""
expected_script_command = """#!/bin/sh
exec 0launch --command 'a'\\'''\\''\\test' 'http://example.com/foo.xml' "$@"
"""
old_script_main = """#!/bin/sh
if [ "$*" = "--versions" ]; then
exec 0launch -gd 'http://example.com/foo.xml' "$@"
else
exec 0launch --main 'a'\\'''\\''\\test' 'http://example.com/foo.xml' "$@"
fi
"""
class TestAlias(BaseTest):
def setUp(self):
BaseTest.setUp(self)
def testWrite(self):
buf = StringIO()
alias.write_script(buf, 'http://example.com/foo.xml', None)
self.assertEqual(expected_script, buf.getvalue())
buf = StringIO()
alias.write_script(buf, 'http://example.com/foo.xml', 'a\'\'\\test')
self.assertEqual(expected_script_main, buf.getvalue())
buf = StringIO()
alias.write_script(buf, 'http://example.com/foo.xml', command = 'a\'\'\\test')
self.assertEqual(expected_script_command, buf.getvalue())
def testParse(self):
tmp = tempfile.NamedTemporaryFile()
tmp.write(expected_script)
tmp.flush()
tmp.seek(0)
uri, main = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', uri)
self.assertEqual(None, main)
tmp = tempfile.NamedTemporaryFile()
tmp.write(expected_script_main)
tmp.flush()
tmp.seek(0)
uri, main = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', uri)
self.assertEqual('a\'\'\\test', main)
tmp = tempfile.NamedTemporaryFile()
tmp.write(expected_script_command)
tmp.flush()
tmp.seek(0)
info = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', info.uri)
self.assertEqual('a\'\'\\test', info.command)
self.assertEqual(None, info.main)
def testParseOld(self):
tmp = tempfile.NamedTemporaryFile()
tmp.write(old_script)
tmp.flush()
tmp.seek(0)
uri, main = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', uri)
self.assertEqual(None, main)
tmp = tempfile.NamedTemporaryFile()
tmp.write(old_script_main)
tmp.flush()
tmp.seek(0)
uri, main = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', uri)
self.assertEqual('a\'\'\\test', main)
def testParseException(self):
tmp = tempfile.NamedTemporaryFile()
tmp.write('hi' + expected_script)
tmp.flush()
tmp.seek(0)
try:
alias.parse_script(tmp.name)
assert False
except alias.NotAnAliasScript:
pass
tmp = tempfile.NamedTemporaryFile()
tmp.write(expected_script_command.replace('command', 'bob'))
tmp.flush()
tmp.seek(0)
try:
alias.parse_script(tmp.name)
assert False
except alias.NotAnAliasScript, ex:
assert 'bob' in str(ex)
pass
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | -7,761,561,276,122,753,000 | 25.155738 | 80 | 0.666562 | false |
bjoernricks/python-quilt | quilt/cli/series.py | 1 | 1199 | # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 - 2017 Björn Ricks <[email protected]>
#
# See LICENSE comming with the source of python-quilt for details.
from quilt.cli.meta import Command
from quilt.cli.parser import OptionArgument
from quilt.db import Db, Series
class SeriesCommand(Command):
name = "series"
help = "Print the names of all patches in the series file."
v = OptionArgument(action="store_true", help="""indicate applied (+)
and topmost (=) patches""")
def run(self, args):
series = Series(self.get_patches_dir())
if args.v:
applied = Db(self.get_pc_dir()).patches()
for patch in applied[:-1]:
print("+ " + patch.get_name())
if applied:
print("= " + applied[-1].get_name())
patches = series.patches_after(applied[-1])
else:
patches = series.patches()
for patch in patches:
print(" " + patch.get_name())
else:
for patch in series.patches():
print(patch.get_name())
| mit | 6,167,082,070,145,650,000 | 31.378378 | 72 | 0.582638 | false |
w0pke/oppgavegenerator | oppgavegen/generation_folder/generation.py | 1 | 16870 | """
Handles task generation from templates.
"""
from random import uniform, shuffle, choice
import json
from sympy import sympify
from sympy.parsing.sympy_parser import (parse_expr, standard_transformations,
implicit_multiplication_application, convert_xor)
from oppgavegen.parsing.latex_translator import latex_to_sympy
from oppgavegen.models import Level
from oppgavegen.generation_folder.multifill import multifill
from oppgavegen.generation_folder.fill_in import fill_in_the_blanks
from oppgavegen.parsing.parenthesis_removal import *
from oppgavegen.utility.utility import *
from oppgavegen.generation_folder.calculate_parse_solution import parse_solution
from oppgavegen.generation_folder.get_question import get_question, get_level_question
@Debugger
def generate_task(user, template_extra, desired_type=''):
"""Makes a valid math question at the correct rating from a template in the database.
:param user: The user requesting a template
:param template_extra: (optional) A id used for requesting a specific template.
:param desired_type: (optional) A string for requesting a specific template type.
:return: Returns a complete math question with generated numbers.
"""
if template_extra == "":
get_question_dict = get_question(user, '') # Gets a question from the DB
else:
get_question_dict = get_question(user, template_extra)
q = get_question_dict['template']
if desired_type == '':
desired_type = get_question_dict['type']
if desired_type != 'normal':
if (desired_type == 'multiple' or desired_type == 'multifill') and not q.multiple_support:
return {'question': 'error'}
if desired_type == 'blanks' and not q.fill_in:
return {'question': 'error'}
# The domain of random numbers that can be generated for the question
random_domain_list = q.random_domain
task = str(q.question_text)
task = task.replace('\\\\', '\\') # Replaces double \\ with \
task = task.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
task = task.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
template_type = desired_type
choices = q.choices.replace('\\\\', '\\')
choices = choices.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
choices = choices.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
conditions = q.conditions.replace('\\\\', '\\')
dictionary = q.dictionary
answer = q.answer.replace('\\\\', '\\')
primary_key = q.pk
fill_in = q.fill_in.replace('\\\\', '\\')
fill_in = fill_in.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
fill_in = fill_in.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
template_specific = "" # A variable that holds the extra values for a given type. ie. choices for multiple.
variables_used = "" # Sends a splitable string since dictionaries can't be passed between layers.
replacing_words = '' # The words that got replaced, and the words that replaced them
graph = q.graph # took out .replace('\\\\', '\\')
if graph:
graph = json.loads(graph)
#task = add_phantom_minus(task)
#answer = add_phantom_minus(answer)
#choices = add_phantom_minus(choices)
new_choices = ''
new_task = ''
new_answer = ''
variable_dict = ''
valid_solution = False
while valid_solution is False: # Loop until we get a form of the task that has a valid solution
variable_dict = generate_valid_numbers(task, random_domain_list, conditions, False)
variables_used = dict_to_string(variable_dict) # Get a string with the variables used
new_task = string_replace(task, variable_dict)
new_answer = string_replace(answer, variable_dict)
new_choices = string_replace(choices, variable_dict)
for x in range(0, len(graph)):
graph[x] = string_replace(graph[x], variable_dict)
graph[x] = parse_solution(graph[x], q.random_domain)
if new_answer == 'error':
continue # Retry if the current template resulted in a error.
valid_solution = True
if template_type.lower() == 'multiple':
new_choices = new_choices.split('§')
for x in range(len(new_choices)):
new_choices[x] = parse_solution(new_choices[x], q.random_domain)
new_choices.append(parse_solution(new_answer, q.random_domain).replace('§', 'og'))
shuffle(new_choices) # Shuffles the choices so that the answer is not always in the same place.
new_choices = '§'.join(new_choices)
new_choices = parenthesis_removal(new_choices)
template_specific = new_choices
#template_specific = remove_pm_and_add_parenthesis(template_specific)
elif template_type == 'blanks':
fill_in_dict = fill_in_the_blanks(fill_in)
# new_task = new_task + '\n' + fill_in_dict['fill_in'].replace('\\n', '\n')
new_task = new_task + '§' + fill_in_dict['fill_in']
new_task = replace_variables_from_array(variables_used.split('§'), new_task)
new_task = parse_solution(new_task, q.random_domain)
template_specific = fill_in_dict['hole_positions']
elif template_type == 'multifill':
new_choices = choices + '§' + answer.replace('§', 'og')
new_choices = parenthesis_removal(new_choices)
template_specific = multifill(new_choices, variable_dict)
if dictionary is not None:
replace_words_dict = replace_words(new_task, dictionary)
new_task = replace_words_dict['sentence']
replacing_words = replace_words_dict['replace_string']
number_of_answers = len(new_answer.split('§'))
if graph != None and graph != '': # to prevent error if none
graph = json.dumps(graph)
new_task = parse_solution(new_task, q.random_domain)
#new_task = remove_pm_and_add_parenthesis(new_task)
new_task = parenthesis_removal(new_task)
return_dict = {'question': new_task,
'variable_dictionary': variables_used, 'template_type': template_type,
'template_specific': template_specific, 'primary_key': primary_key,
'number_of_answers': number_of_answers, 'replacing_words': replacing_words,
'graph': graph, 'graph_settings': q.graph_settings, 'graph_color': q.graph_color}
return return_dict
@Debugger
def generate_level(user, level_id):
"""Makes a valid math question at the correct rating from a template in the database.
:param user: The user requesting a template
:param template_extra: (optional) A id used for requesting a specific template.
:param desired_type: (optional) A string for requesting a specific template type.
:return: Returns a complete math question with generated numbers.
"""
level = Level.objects.get(pk=level_id)
get_question_dict = get_level_question(user, level) # Gets a template from the DB
q = get_question_dict['template']
desired_type = get_question_dict['type']
# The domain of random numbers that can be generated for the question
random_domain_list = q.random_domain
task = str(q.question_text)
task = task.replace('\\\\', '\\') # Replaces double \\ with \
task = task.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
task = task.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
template_type = desired_type
choices = q.choices.replace('\\\\', '\\')
choices = choices.replace('(', '+parenthesisleft+')
choices = choices.replace(')', '+parenthesisright+')
conditions = q.conditions.replace('\\\\', '\\')
dictionary = q.dictionary
answer = q.answer.replace('\\\\', '\\')
primary_key = q.pk
fill_in = q.fill_in.replace('\\\\', '\\')
fill_in = fill_in.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
fill_in = fill_in.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
template_specific = "" # A variable that holds the extra values for a given type. ie. choices for multiple.
variables_used = ""
replacing_words = '' # The words that got replaced, and the words that replaced them
#task = add_phantom_minus(task)
#answer = add_phantom_minus(answer)
#choices = add_phantom_minus(choices)
new_choices = ''
new_task = ''
new_answer = ''
variable_dict = ''
graph = q.graph # took out .replace('\\\\', '\\')
if graph:
graph = json.loads(graph)
valid_solution = False
while valid_solution is False: # Loop until we get a form of the task that has a valid solution
variable_dict = generate_valid_numbers(task, random_domain_list, conditions, False)
variables_used = dict_to_string(variable_dict) # Get a string with the variables used
new_task = string_replace(task, variable_dict)
new_answer = string_replace(answer, variable_dict)
new_choices = string_replace(choices, variable_dict)
for x in range(0, len(graph)):
graph[x] = string_replace(graph[x], variable_dict)
graph[x] = parse_solution(graph[x], q.random_domain)
if new_answer == 'error':
continue # Retry if the current template resulted in a error.
valid_solution = True
if template_type.lower() == 'multiple':
new_choices = new_choices.split('§')
for x in range(len(new_choices)):
new_choices[x] = parse_solution(new_choices[x], q.random_domain)
new_choices.append(parse_solution(new_answer, q.random_domain).replace('§', 'og'))
shuffle(new_choices) # Shuffles the choices so that the answer is not always in the same place.
new_choices = '§'.join(new_choices)
new_choices = parenthesis_removal(new_choices)
template_specific = new_choices
#template_specific = remove_pm_and_add_parenthesis(template_specific)
elif template_type == 'blanks':
fill_in_dict = fill_in_the_blanks(fill_in)
# new_task = new_task + '\n' + fill_in_dict['fill_in'].replace('\\n', '\n')
new_task = new_task + '§' + fill_in_dict['fill_in']
new_task = replace_variables_from_array(variables_used.split('§'), new_task)
new_task = parse_solution(new_task, q.random_domain)
template_specific = fill_in_dict['hole_positions']
elif template_type == 'multifill':
new_choices = choices + '§' + answer.replace('§', 'og')
template_specific = multifill(new_choices, variable_dict)
if dictionary is not None:
replace_words_dict = replace_words(new_task, dictionary)
new_task = replace_words_dict['sentence']
replacing_words = replace_words_dict['replace_string']
number_of_answers = len(new_answer.split('§'))
if graph != None and graph != '': # to prevent error if none
graph = json.dumps(graph)
new_task = parse_solution(new_task, q.random_domain)
# new_task = remove_pm_and_add_parenthesis(new_task)
new_task = parenthesis_removal(new_task)
return_dict = {'question': new_task, 'variable_dictionary': variables_used, 'template_type': template_type,
'template_specific': template_specific, 'primary_key': primary_key,
'number_of_answers': number_of_answers, 'replacing_words': replacing_words,
'graph': graph, 'graph_settings': q.graph_settings, 'graph_color': q.graph_color}
return return_dict
@Debugger
def generate_valid_numbers(template, random_domain, conditions, test):
"""Generates valid numbers using each variables random domain.
Also makes sure all variables follows the given conditions.
:param template: The template used.
:param random_domain: dict used for random domains
:param conditions: The conditions the variable have to follow.
:param test: If true the function returns the domain_dict instead of variable_dict.
:return: The current generated variables used in the template.
"""
hardcoded_variables = ['R22R', 'R21R', 'R20R', 'R19R', 'R18R', 'R17R', 'R16R', 'R15R', 'R14R', 'R13R', 'R12R',
'R11R', 'R10R', 'R9R', 'R8R', 'R7R', 'R6R', 'R3R', 'R2R', 'R1R', 'R0R']
domain_dict = {}
domain_list = {}
variable_dict = {}
try:
random_domain = json.loads(random_domain)
# Loops through all possible variable names, and generate a random number for it.
# Adds the variables names and numbers to the 2 dictionaries and the string
for key in random_domain:
if random_domain[key][1]:
random_number = str(make_number_from_list(random_domain[key][0]))
else:
random_number = str(make_number(random_domain[key][0]))
domain_dict[key] = random_domain[key][0]
domain_list[key] = random_domain[key][1]
variable_dict[key] = random_number
except ValueError:
pass
if len(conditions) > 1:
variable_dict = check_conditions(conditions, variable_dict, domain_dict, domain_list)
# lesser_than('R0 * 2 < 3', domain_dict, variable_dict) #for testing purposes
if test:
return domain_dict
return variable_dict
@Debugger
def check_conditions(conditions, variable_dict, domain_dict, domain_list):
"""A function that checks if the generated variables pass the conditions and generates new ones until they do.
:param conditions: The conditions of the template.
:param variable_dict: List of variables.
:param domain_dict: the domain of the variables.
:param domain_list: a dict with the domain list.
:return: List of variables that pass the conditions of the given template.
"""
conditions = remove_unnecessary(conditions)
# Check conditions --> if false: change a variable -> check conditions
inserted_conditions = string_replace(conditions, variable_dict)
while not parse_expr(latex_to_sympy(inserted_conditions), transformations=standard_transformations +
(convert_xor, implicit_multiplication_application,), global_dict=None, evaluate=True):
variable_to_change = choice(list(variable_dict.keys())) # Chose a random key from variable_dict
if domain_list[variable_to_change]:
variable_dict[variable_to_change] = make_number_from_list(domain_dict[variable_to_change])
else:
variable_dict[variable_to_change] = new_random_value(variable_to_change, domain_dict)
inserted_conditions = string_replace(conditions, variable_dict)
return variable_dict
@Debugger
def get_variables_used(string, variable_dict):
"""Returns what variables are used in the given string as a list."""
used_variables = []
for key in variable_dict:
temp_string = string.replace(key, "")
if temp_string != string:
used_variables.append(key)
string = temp_string
return used_variables
@Debugger
def new_random_value(value, domain_dict, bonus=0, extra=''):
"""Creates a new random value for a given variable using its domain.
:param value: The value to change.
:param domain_dict: Domain of the variables, decides what range of the variable and number of decimals.
:param bonus: Used for limiting the domain for the variable further if needed.
:param extra: argument for different configurations of what approach to use for the new variable
:return: New value.
"""
domain = domain_dict[value]
# If bonus isn't between the domain values, changing the value won't fix the condition.
bonus -= 1 # This is because we use smaller than and not <=..
if extra == 'left': # Approach to use if on the left side of lesser than (<)
if int(domain[0]) <= bonus <= int(domain[1]):
domain[1] = bonus
new_value = randint(int(domain[0]), int(domain[1]))
elif extra == 'right': # Approach to use if on the right side of lesser than (<)
if int(domain[0]) <= bonus <= int(domain[1]):
domain[0] = bonus
new_value = randint(int(domain[0]), int(domain[1]))
else:
new_value = randint(int(domain[0]), int(domain[1]))
return new_value
def make_number_from_list(domain):
return sympify(latex_to_sympy(choice(domain)))
@Debugger
def make_number(domain):
"""Returns a random number within the range and decimal point of the domain given."""
number = uniform(float(domain[0]), float(domain[1]))
try:
number = round(number, int(domain[2]))
if number.is_integer():
number = round(number)
except IndexError:
number = round(number)
return number
| bsd-3-clause | -6,944,604,632,999,622,000 | 46.210084 | 114 | 0.654563 | false |
dinhkhanh/trac | sample-plugins/Timestamp.py | 1 | 1219 | """Inserts the current time (in seconds) into the wiki page."""
revision = "$Rev: 10617 $"
url = "$URL: https://svn.edgewall.org/repos/trac/tags/trac-1.0/sample-plugins/Timestamp.py $"
#
# The following shows the code for macro, old-style.
#
# The `execute` function serves no purpose other than to illustrate
# the example, it will not be used anymore.
#
# ---- (ignore in your own macro) ----
# --
import time # Trac before version 0.11 was using `time` module
def execute(hdf, txt, env):
t = time.localtime()
return "<b>%s</b>" % time.strftime('%c', t)
# --
# ---- (ignore in your own macro) ----
#
# The following is the converted new-style macro
#
# ---- (reuse for your own macro) ----
# --
from datetime import datetime
# Note: since Trac 0.11, datetime objects are used internally
from genshi.builder import tag
from trac.util.datefmt import format_datetime, utc
from trac.wiki.macros import WikiMacroBase
class TimestampMacro(WikiMacroBase):
_description = "Inserts the current time (in seconds) into the wiki page."
def expand_macro(self, formatter, name, args):
t = datetime.now(utc)
return tag.b(format_datetime(t, '%c'))
# --
# ---- (reuse for your own macro) ----
| bsd-3-clause | -3,748,584,367,646,097,400 | 27.348837 | 93 | 0.673503 | false |
botswana-harvard/bcpp-subject | bcpp_subject/admin/postitive_participant_admin.py | 1 | 2114 | from django.contrib import admin
from django.utils.safestring import mark_safe
from edc_base.modeladmin_mixins import audit_fieldset_tuple
from ..admin_site import bcpp_subject_admin
from ..forms import PositiveParticipantForm
from ..models import PositiveParticipant
from .modeladmin_mixins import CrfModelAdminMixin
@admin.register(PositiveParticipant, site=bcpp_subject_admin)
class PositiveParticipantAdmin(CrfModelAdminMixin, admin.ModelAdmin):
form = PositiveParticipantForm
fieldsets = (
(None, {
'fields': (
'subject_visit',
'internalize_stigma',
'internalized_stigma',
'friend_stigma',
'family_stigma',
'enacted_talk_stigma',
'enacted_respect_stigma',
'enacted_jobs_tigma')}),
audit_fieldset_tuple,
)
radio_fields = {
'internalize_stigma': admin.VERTICAL,
'internalized_stigma': admin.VERTICAL,
'friend_stigma': admin.VERTICAL,
'family_stigma': admin.VERTICAL,
'enacted_talk_stigma': admin.VERTICAL,
'enacted_respect_stigma': admin.VERTICAL,
'enacted_jobs_tigma': admin.VERTICAL, }
additional_instructions = mark_safe(
'<h5>Note to Interviewer</h5>'
'Note The following supplemental questions '
'are only asked for respondents with known HIV infection. '
'SKIP for respondents without known HIV infection. '
'<H5><span style="color:orange;">Read to Participant</span></H5>'
'You let us know earlier that you '
'are HIV positive. I would now like to ask you a few '
'questions about your experiences living with HIV. '
'Please remember this interview and your responses '
'are private and confidential.In this section, '
'I\'m going to read you statements '
'about how you may feel about yourself and your '
'HIV/AIDS infection. I would like you to tell me '
'if you strongly agree, agree, disagree or strongly '
'disagree with each statement?')
| gpl-3.0 | -1,579,892,250,554,721,500 | 37.436364 | 73 | 0.644749 | false |
walac/build-mozharness | mozharness/mozilla/proxxy.py | 1 | 6748 | """Proxxy module. Defines a Proxxy element that fetches files using local
proxxy instances (if available). The goal of Proxxy is to lower the traffic
from the cloud to internal servers.
"""
import urlparse
import socket
from mozharness.base.log import INFO, ERROR, LogMixin
from mozharness.base.script import ScriptMixin
# Proxxy {{{1
class Proxxy(ScriptMixin, LogMixin):
"""
Support downloading files from HTTP caching proxies
Current supports 'proxxy' instances, in which the caching proxy at
proxxy.domain.com will cache requests for ftp.mozilla.org when passed requests to
http://ftp.mozilla.org.proxxy.domain.com/...
self.config['proxxy']['urls'] defines the list of backend hosts we are currently caching, and
the hostname prefix to use for proxxy
self.config['proxxy']['instances'] lists current hostnames for proxxy instances. wildcard DNS
is set up so that *.proxxy.domain.com is a CNAME to the proxxy instance
"""
# Default configuration. Can be overridden via self.config
PROXXY_CONFIG = {
"urls": [
('http://ftp.mozilla.org', 'ftp.mozilla.org'),
('https://ftp.mozilla.org', 'ftp.mozilla.org'),
('https://ftp-ssl.mozilla.org', 'ftp.mozilla.org'),
('http://pvtbuilds.pvt.build.mozilla.org', 'pvtbuilds.mozilla.org'),
# tooltool
('http://tooltool.pvt.build.mozilla.org', 'tooltool.pvt.build.mozilla.org'),
# pypi
('http://pypi.pvt.build.mozilla.org', 'pypi.pvt.build.mozilla.org'),
('http://pypi.pub.build.mozilla.org', 'pypi.pub.build.mozilla.org'),
# taskcluster stuff
('https://queue.taskcluster.net', 'queue.taskcluster.net'),
],
"instances": [
'proxxy1.srv.releng.use1.mozilla.com',
'proxxy1.srv.releng.usw2.mozilla.com',
'proxxy1.srv.releng.scl3.mozilla.com',
],
"regions": [".use1.", ".usw2.", ".scl3"],
}
def __init__(self, config, log_obj):
# proxxy does not need the need the full configuration,
# just the 'proxxy' element
# if configuration has no 'proxxy' section use the default
# configuration instead
self.config = config.get('proxxy', self.PROXXY_CONFIG)
self.log_obj = log_obj
def get_proxies_for_url(self, url):
"""Maps url to its proxxy urls
Args:
url (str): url to be proxxied
Returns:
list: of proxy URLs to try, in sorted order.
please note that url is NOT included in this list.
"""
config = self.config
urls = []
self.info("proxxy config: %s" % config)
proxxy_urls = config.get('urls', [])
proxxy_instances = config.get('instances', [])
url_parts = urlparse.urlsplit(url)
url_path = url_parts.path
if url_parts.query:
url_path += "?" + url_parts.query
if url_parts.fragment:
url_path += "#" + url_parts.fragment
for prefix, target in proxxy_urls:
if url.startswith(prefix):
self.info("%s matches %s" % (url, prefix))
for instance in proxxy_instances:
if not self.query_is_proxxy_local(instance):
continue
new_url = "http://%s.%s%s" % (target, instance, url_path)
urls.append(new_url)
for url in urls:
self.info("URL Candidate: %s" % url)
return urls
def get_proxies_and_urls(self, urls):
"""Gets a list of urls and returns a list of proxied urls, the list
of input urls is appended at the end of the return values
Args:
urls (list, tuple): urls to be mapped to proxxy urls
Returns:
list: proxxied urls and urls. urls are appended to the proxxied
urls list and they are the last elements of the list.
"""
proxxy_list = []
for url in urls:
# get_proxies_for_url returns always a list...
proxxy_list.extend(self.get_proxies_for_url(url))
proxxy_list.extend(urls)
return proxxy_list
def query_is_proxxy_local(self, url):
"""Checks is url is 'proxxable' for the local instance
Args:
url (string): url to check
Returns:
bool: True if url maps to a usable proxxy,
False in any other case
"""
fqdn = socket.getfqdn()
config = self.config
regions = config.get('regions', [])
return any(r in fqdn and r in url for r in regions)
def download_proxied_file(self, url, file_name, parent_dir=None,
create_parent_dir=True, error_level=ERROR,
exit_code=3):
"""
Wrapper around BaseScript.download_file that understands proxies
retry dict is set to 3 attempts, sleeping time 30 seconds.
Args:
url (string): url to fetch
file_name (string, optional): output filename, defaults to None
if file_name is not defined, the output name is taken from
the url.
parent_dir (string, optional): name of the parent directory
create_parent_dir (bool, optional): if True, creates the parent
directory. Defaults to True
error_level (mozharness log level, optional): log error level
defaults to ERROR
exit_code (int, optional): return code to log if file_name
is not defined and it cannot be determined from the url
Returns:
string: file_name if the download has succeded, None in case of
error. In case of error, if error_level is set to FATAL,
this method interrupts the execution of the script
"""
urls = self.get_proxies_and_urls([url])
for url in urls:
self.info("trying %s" % url)
retval = self.download_file(
url, file_name=file_name, parent_dir=parent_dir,
create_parent_dir=create_parent_dir, error_level=ERROR,
exit_code=exit_code,
retry_config=dict(
attempts=3,
sleeptime=30,
error_level=INFO,
))
if retval:
return retval
self.log("Failed to download from all available URLs, aborting",
level=error_level, exit_code=exit_code)
return retval
| mpl-2.0 | -458,509,179,348,131,500 | 38.232558 | 97 | 0.566835 | false |
laijingtao/landlab | landlab/plot/imshow.py | 1 | 20927 | #! /usr/bin/env python
"""
Methods to plot data defined on Landlab grids.
Plotting functions
++++++++++++++++++
.. autosummary::
:toctree: generated/
~landlab.plot.imshow.imshow_grid
~landlab.plot.imshow.imshow_grid_at_cell
~landlab.plot.imshow.imshow_grid_at_node
"""
import numpy as np
import inspect
from landlab.field.scalar_data_fields import FieldError
try:
import matplotlib.pyplot as plt
except ImportError:
import warnings
warnings.warn('matplotlib not found', ImportWarning)
from landlab.grid import CLOSED_BOUNDARY
from landlab.grid.raster import RasterModelGrid
from landlab.grid.voronoi import VoronoiDelaunayGrid
from landlab.utils.decorators import deprecated
def imshow_grid_at_node(grid, values, **kwds):
"""Prepare a map view of data over all nodes in the grid.
Data is plotted as cells shaded with the value at the node at its center.
Outer edges of perimeter cells are extrapolated. Closed elements are
colored uniformly (default black, overridden with kwd 'color_for_closed');
other open boundary nodes get their actual values.
*values* can be a field name, a regular array, or a masked array. If a
masked array is provided, masked entries will be treated as if they were
Landlab CLOSED_BOUNDARYs. Used together with the color_at_closed=None
keyword (i.e., "transparent"), this can allow for construction of overlay
layers in a figure (e.g., only defining values in a river network, and
overlaying it on another landscape).
Use matplotlib functions like xlim, ylim to modify your plot after calling
:func:`imshow_grid`, as desired.
This function happily works with both regular and irregular grids.
Construction ::
imshow_grid_at_node(grid, values, plot_name=None, var_name=None,
var_units=None, grid_units=None,
symmetric_cbar=False, cmap='pink',
limits=(values.min(), values.max()),
vmin=values.min(), vmax=values.max(),
allow_colorbar=True,
norm=[linear], shrink=1.,
color_for_closed='black',
color_for_background=None,
show_elements=False, output=None)
Parameters
----------
grid : ModelGrid
Grid containing the field to plot, or describing the geometry of the
provided array.
values : array_like, masked_array, or str
Node values, or a field name as a string from which to draw the data.
plot_name : str, optional
String to put as the plot title.
var_name : str, optional
Variable name, to use as a colorbar label.
var_units : str, optional
Units for the variable being plotted, for the colorbar.
grid_units : tuple of str, optional
Units for y, and x dimensions. If None, component will look to the
gri property `axis_units` for this information. If no units are
specified there, no entry is made.
symmetric_cbar : bool
Make the colormap symetric about 0.
cmap : str
Name of a colormap
limits : tuple of float
Minimum and maximum of the colorbar.
vmin, vmax: floats
Alternatives to limits.
allow_colorbar : bool
If True, include the colorbar.
colorbar_label : str or None
The string with which to label the colorbar.
norm : matplotlib.colors.Normalize
The normalizing object which scales data, typically into the interval
[0, 1]. Ignore in most cases.
shrink : float
Fraction by which to shrink the colorbar.
color_for_closed : str or None
Color to use for closed nodes (default 'black'). If None, closed
(or masked) nodes will be transparent.
color_for_background : color str or other color declaration, or None
Color to use for closed elements (default None). If None, the
background will be transparent, and appear white.
show_elements : bool
If True, and grid is a Voronoi, the faces will be plotted in black
along with just the colour of the cell, defining the cell outlines
(defaults False).
output : None, string, or bool
If None (or False), the image is sent to the imaging buffer to await
an explicit call to show() or savefig() from outside this function.
If a string, the string should be the path to a save location, and the
filename (with file extension). The function will then call
plt.savefig([string]) itself. If True, the function will call
plt.show() itself once plotting is complete.
"""
if isinstance(values, str):
values_at_node = grid.at_node[values]
else:
values_at_node = values
if values_at_node.size != grid.number_of_nodes:
raise ValueError('number of values does not match number of nodes')
values_at_node = np.ma.masked_where(
grid.status_at_node == CLOSED_BOUNDARY, values_at_node)
try:
shape = grid.shape
except AttributeError:
shape = (-1, )
_imshow_grid_values(grid, values_at_node.reshape(shape), **kwds)
if isinstance(values, str):
plt.title(values)
@deprecated(use='imshow_grid_at_node', version='0.5')
def imshow_node_grid(grid, values, **kwds):
imshow_grid_at_node(grid, values, **kwds)
def imshow_grid_at_cell(grid, values, **kwds):
"""Map view of grid data over all grid cells.
Prepares a map view of data over all cells in the grid.
Method can take any of the same ``**kwds`` as :func:`imshow_grid_at_node`.
Construction ::
imshow_grid_at_cell(grid, values, plot_name=None, var_name=None,
var_units=None, grid_units=None,
symmetric_cbar=False, cmap='pink',
limits=(values.min(), values.max()),
vmin=values.min(), vmax=values.max(),
allow_colorbar=True, colorbar_label=None,
norm=[linear], shrink=1.,
color_for_closed='black',
color_for_background=None,
show_elements=False, output=None)
Parameters
----------
grid : ModelGrid
Grid containing the field to plot, or describing the geometry of the
provided array.
values : array_like, masked_array, or str
Values at the cells on the grid. Alternatively, can be a field name
(string) from which to draw the data from the grid.
plot_name : str, optional
String to put as the plot title.
var_name : str, optional
Variable name, to use as a colorbar label.
var_units : str, optional
Units for the variable being plotted, for the colorbar.
grid_units : tuple of str, optional
Units for y, and x dimensions. If None, component will look to the
gri property `axis_units` for this information. If no units are
specified there, no entry is made.
symmetric_cbar : bool
Make the colormap symetric about 0.
cmap : str
Name of a colormap
limits : tuple of float
Minimum and maximum of the colorbar.
vmin, vmax: floats
Alternatives to limits.
allow_colorbar : bool
If True, include the colorbar.
colorbar_label : str or None
The string with which to label the colorbar.
norm : matplotlib.colors.Normalize
The normalizing object which scales data, typically into the interval
[0, 1]. Ignore in most cases.
shrink : float
Fraction by which to shrink the colorbar.
color_for_closed : str or None
Color to use for closed elements (default 'black'). If None, closed
(or masked) elements will be transparent.
color_for_background : color str or other color declaration, or None
Color to use for closed elements (default None). If None, the
background will be transparent, and appear white.
show_elements : bool
If True, and grid is a Voronoi, the faces will be plotted in black
along with just the colour of the cell, defining the cell outlines
(defaults False).
output : None, string, or bool
If None (or False), the image is sent to the imaging buffer to await
an explicit call to show() or savefig() from outside this function.
If a string, the string should be the path to a save location, and the
filename (with file extension). The function will then call
plt.savefig([string]) itself. If True, the function will call
plt.show() itself once plotting is complete.
Raises
------
ValueError
If input grid is not uniform rectilinear.
"""
if isinstance(values, str):
try:
values_at_cell = grid.at_cell[values]
except FieldError:
values_at_cell = grid.at_node[values]
else:
values_at_cell = values
if values_at_cell.size == grid.number_of_nodes:
values_at_cell = values_at_cell[grid.node_at_cell]
if values_at_cell.size != grid.number_of_cells:
raise ValueError('number of values must match number of cells or '
'number of nodes')
values_at_cell = np.ma.asarray(values_at_cell)
values_at_cell.mask = True
values_at_cell.mask[grid.core_cells] = False
myimage = _imshow_grid_values(grid,
values_at_cell.reshape(grid.cell_grid_shape),
**kwds)
if isinstance(values, str):
plt.title(values)
return myimage
@deprecated(use='imshow_grid_at_cell', version='0.5')
def imshow_cell_grid(grid, values, **kwds):
imshow_grid_at_cell(grid, values, **kwds)
def _imshow_grid_values(grid, values, plot_name=None, var_name=None,
var_units=None, grid_units=(None, None),
symmetric_cbar=False, cmap='pink', limits=None,
allow_colorbar=True, vmin=None, vmax=None,
norm=None, shrink=1., color_for_closed='black',
color_for_background=None, show_elements=False,
output=None):
gridtypes = inspect.getmro(grid.__class__)
cmap = plt.get_cmap(cmap)
if color_for_closed is not None:
cmap.set_bad(color=color_for_closed)
else:
cmap.set_bad(alpha=0.)
if isinstance(grid, RasterModelGrid):
if values.ndim != 2:
raise ValueError('values must have ndim == 2')
y = np.arange(values.shape[0] + 1) * grid.dy - grid.dy * .5
x = np.arange(values.shape[1] + 1) * grid.dx - grid.dx * .5
kwds = dict(cmap=cmap)
(kwds['vmin'], kwds['vmax']) = (values.min(), values.max())
if (limits is None) and ((vmin is None) and (vmax is None)):
if symmetric_cbar:
(var_min, var_max) = (values.min(), values.max())
limit = max(abs(var_min), abs(var_max))
(kwds['vmin'], kwds['vmax']) = (- limit, limit)
elif limits is not None:
(kwds['vmin'], kwds['vmax']) = (limits[0], limits[1])
else:
if vmin is not None:
kwds['vmin'] = vmin
if vmax is not None:
kwds['vmax'] = vmax
if np.isclose(grid.dx, grid.dy):
if values.size == grid.number_of_nodes:
myimage = plt.imshow(
values.reshape(grid.shape), origin='lower',
extent=(x[0], x[-1], y[0], y[-1]), **kwds)
else: # this is a cell grid, and has been reshaped already...
myimage = plt.imshow(values, origin='lower',
extent=(x[0], x[-1], y[0], y[-1]), **kwds)
myimage = plt.pcolormesh(x, y, values, **kwds)
plt.gca().set_aspect(1.)
plt.autoscale(tight=True)
if allow_colorbar:
cb = plt.colorbar(norm=norm, shrink=shrink)
elif VoronoiDelaunayGrid in gridtypes:
# This is still very much ad-hoc, and needs prettifying.
# We should save the modifications needed to plot color all the way
# to the diagram edge *into* the grid, for faster plotting.
# (see http://stackoverflow.com/questions/20515554/...
# colorize-voronoi-diagram)
# (This technique is not implemented yet)
from scipy.spatial import voronoi_plot_2d
import matplotlib.colors as colors
import matplotlib.cm as cmx
cm = plt.get_cmap(cmap)
if (limits is None) and ((vmin is None) and (vmax is None)):
# only want to work with NOT CLOSED nodes
open_nodes = grid.status_at_node != 4
if symmetric_cbar:
(var_min, var_max) = (values.flat[
open_nodes].min(), values.flat[open_nodes].max())
limit = max(abs(var_min), abs(var_max))
(vmin, vmax) = (- limit, limit)
else:
(vmin, vmax) = (values.flat[
open_nodes].min(), values.flat[open_nodes].max())
elif limits is not None:
(vmin, vmax) = (limits[0], limits[1])
else:
open_nodes = grid.status_at_node != 4
if vmin is None:
vmin = values.flat[open_nodes].min()
if vmax is None:
vmax = values.flat[open_nodes].max()
cNorm = colors.Normalize(vmin, vmax)
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap=cm)
colorVal = scalarMap.to_rgba(values)
if show_elements:
myimage = voronoi_plot_2d(grid.vor, show_vertices=False,
show_points=False)
# show_points to be supported in scipy0.18, but harmless for now
mycolors = (i for i in colorVal)
for order in grid.vor.point_region:
region = grid.vor.regions[order]
colortouse = next(mycolors)
if -1 not in region:
polygon = [grid.vor.vertices[i] for i in region]
plt.fill(*zip(*polygon), color=colortouse)
plt.gca().set_aspect(1.)
# plt.autoscale(tight=True)
# Tempting though it is to move the boundary outboard of the outermost
# nodes (e.g., to the outermost corners), this is a bad idea, as the
# outermost cells tend to have highly elongated shapes which make the
# plot look stupid
plt.xlim((np.min(grid.node_x), np.max(grid.node_x)))
plt.ylim((np.min(grid.node_y), np.max(grid.node_y)))
scalarMap.set_array(values)
if allow_colorbar:
cb = plt.colorbar(scalarMap, shrink=shrink)
if grid_units[1] is None and grid_units[0] is None:
grid_units = grid.axis_units
if grid_units[1] == '-' and grid_units[0] == '-':
plt.xlabel('X')
plt.ylabel('Y')
else:
plt.xlabel('X (%s)' % grid_units[1])
plt.ylabel('Y (%s)' % grid_units[0])
else:
plt.xlabel('X (%s)' % grid_units[1])
plt.ylabel('Y (%s)' % grid_units[0])
if plot_name is not None:
plt.title('%s' % (plot_name))
if var_name is not None or var_units is not None:
if var_name is not None:
assert type(var_name) is str
if var_units is not None:
assert type(var_units) is str
colorbar_label = var_name + ' (' + var_units + ')'
else:
colorbar_label = var_name
else:
assert type(var_units) is str
colorbar_label = '(' + var_units + ')'
assert type(colorbar_label) is str
assert allow_colorbar
cb.set_label(colorbar_label)
if color_for_background is not None:
plt.gca().set_axis_bgcolor(color_for_background)
if output is not None:
if type(output) is str:
plt.savefig(output)
plt.clf()
elif output:
plt.show()
def imshow_grid(grid, values, **kwds):
"""Prepare a map view of data over all nodes or cells in the grid.
Data is plotted as colored cells. If at='node', the surrounding cell is
shaded with the value at the node at its center. If at='cell', the cell
is shaded with its own value. Outer edges of perimeter cells are
extrapolated. Closed elements are colored uniformly (default black,
overridden with kwd 'color_for_closed'); other open boundary nodes get
their actual values.
*values* can be a field name, a regular array, or a masked array. If a
masked array is provided, masked entries will be treated as if they were
Landlab CLOSED_BOUNDARYs. Used together with the color_at_closed=None
keyword (i.e., "transparent"), this can allow for construction of overlay
layers in a figure (e.g., only defining values in a river network, and
overlaying it on another landscape).
Use matplotlib functions like xlim, ylim to modify your plot after calling
:func:`imshow_grid`, as desired.
This function happily works with both regular and irregular grids.
Construction ::
imshow_grid(grid, values, plot_name=None, var_name=None,
var_units=None, grid_units=None,
symmetric_cbar=False, cmap='pink',
limits=(values.min(), values.max()),
vmin=values.min(), vmax=values.max(),
allow_colorbar=True, colorbar_label=None,
norm=[linear], shrink=1.,
color_for_closed='black',
color_for_background=None,
show_elements=False)
Parameters
----------
grid : ModelGrid
Grid containing the field to plot, or describing the geometry of the
provided array.
values : array_like, masked_array, or str
Node or cell values, or a field name as a string from which to draw
the data.
at : str, {'node', 'cell'}
Tells plotter where values are defined.
plot_name : str, optional
String to put as the plot title.
var_name : str, optional
Variable name, to use as a colorbar label.
var_units : str, optional
Units for the variable being plotted, for the colorbar.
grid_units : tuple of str, optional
Units for y, and x dimensions. If None, component will look to the
gri property `axis_units` for this information. If no units are
specified there, no entry is made.
symmetric_cbar : bool
Make the colormap symetric about 0.
cmap : str
Name of a colormap
limits : tuple of float
Minimum and maximum of the colorbar.
vmin, vmax: floats
Alternatives to limits.
allow_colorbar : bool
If True, include the colorbar.
colorbar_label : str or None
The string with which to label the colorbar.
norm : matplotlib.colors.Normalize
The normalizing object which scales data, typically into the interval
[0, 1]. Ignore in most cases.
shrink : float
Fraction by which to shrink the colorbar.
color_for_closed : str or None
Color to use for closed elements (default 'black'). If None, closed
(or masked) elements will be transparent.
color_for_background : color str or other color declaration, or None
Color to use for closed elements (default None). If None, the
background will be transparent, and appear white.
show_elements : bool
If True, and grid is a Voronoi, the faces will be plotted in black
along with just the colour of the cell, defining the cell outlines
(defaults False).
output : None, string, or bool
If None (or False), the image is sent to the imaging buffer to await
an explicit call to show() or savefig() from outside this function.
If a string, the string should be the path to a save location, and the
filename (with file extension). The function will then call
plt.savefig([string]) itself. If True, the function will call
plt.show() itself once plotting is complete.
"""
show = kwds.pop('show', False)
values_at = kwds.pop('values_at', 'node')
values_at = kwds.pop('at', values_at)
if isinstance(values, str):
values = grid.field_values(values_at, values)
if isinstance(values, str):
values = grid.field_values(values_at, values)
if values_at == 'node':
imshow_grid_at_node(grid, values, **kwds)
elif values_at == 'cell':
imshow_grid_at_cell(grid, values, **kwds)
else:
raise TypeError('value location %s not understood' % values_at)
# retained for backwards compatibility:
if show:
plt.show()
| mit | -8,240,677,285,092,095,000 | 39.321773 | 79 | 0.60711 | false |
OCA/l10n-spain | l10n_es_facturae/models/res_partner.py | 1 | 1829 | # © 2015 Omar Castiñeira (Comunitea)
# © 2017 Creu Blanca
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import models, fields, api, exceptions, _
class ResPartner(models.Model):
_inherit = "res.partner"
facturae = fields.Boolean('Factura electrónica')
facturae_version = fields.Selection([
('3_2', '3.2'),
('3_2_1', '3.2.1'),
('3_2_2', '3.2.2'),
])
organo_gestor = fields.Char('Órgano gestor', size=10)
unidad_tramitadora = fields.Char('Unidad tramitadora', size=10)
oficina_contable = fields.Char('Oficina contable', size=10)
organo_proponente = fields.Char('Órgano proponente', size=10)
invoice_integration_method_ids = fields.Many2many(
comodel_name='account.invoice.integration.method',
string="Integration Methods"
)
attach_invoice_as_annex = fields.Boolean()
def get_facturae_residence(self):
if not self.country_id:
return 'E'
if self.country_id.code == 'ES':
return 'R'
for group in self.country_id.country_group_ids:
if group.name == 'Europe':
return 'U'
return 'E'
@api.constrains('facturae', 'vat', 'state_id', 'country_id')
def check_facturae(self):
for record in self:
if record.facturae:
if not record.vat:
raise exceptions.ValidationError(_('Vat must be defined'))
if not record.country_id:
raise exceptions.ValidationError(
_('Country must be defined'))
if record.country_id.code_alpha3 == 'ESP':
if not record.state_id:
raise exceptions.ValidationError(
_('State must be defined'))
| agpl-3.0 | 8,327,318,882,392,049,000 | 36.204082 | 78 | 0.575425 | false |
PyListener/CF401-Project-1---PyListener | pylistener/scripts/initializedb.py | 1 | 5166 | import os
import sys
import transaction
import json
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from pylistener.models.meta import Base
from pylistener.models import (
get_engine,
get_session_factory,
get_tm_session,
)
from pylistener.models import User, AddressBook, Attribute, Category, UserAttributeLink
from passlib.apps import custom_app_context as pwd_context
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
settings["sqlalchemy.url"] = os.environ["DATABASE_URL"]
engine = get_engine(settings)
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
session_factory = get_session_factory(engine)
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'data.json')) as data:
json_data = data.read()
j_data = json.loads(json_data)
with open(os.path.join(here, 'contacts.json')) as contacts:
test_contacts = contacts.read()
j_test_contacts = json.loads(test_contacts)
with transaction.manager:
dbsession = get_tm_session(session_factory, transaction.manager)
test_user = create_user_object("testted", "password", "Tedley Lamar")
test_user2 = create_user_object("Nurse Jackie", "password1234", "Charlie")
dbsession.add(test_user)
dbsession.add(test_user2)
u_id = dbsession.query(User).first().id
u_id2 = dbsession.query(User).filter(User.username == "Nurse Jackie").first().id
for i in range(3):
add_row = create_address_object(
j_test_contacts[i]["name"],
j_test_contacts[i]["phone"],
j_test_contacts[i]["email"],
u_id,
get_picture_binary(os.path.join(here, j_test_contacts[i]["picture"])),
j_test_contacts[i]["pic_mime"]
)
add_row2 = create_address_object(
j_test_contacts[i + 3]["name"],
j_test_contacts[i + 3]["phone"],
j_test_contacts[i + 3]["email"],
u_id2,
get_picture_binary(os.path.join(here, j_test_contacts[i + 3]["picture"])),
j_test_contacts[i]["pic_mime"]
)
dbsession.add(add_row)
dbsession.add(add_row2)
for category in j_data:
cat_row = create_cat_object(
category["label"],
category["desc"],
get_picture_binary(os.path.join(here, category["picture"])),
j_data[i]["pic_mime"]
)
dbsession.add(cat_row)
cat_id_query = dbsession.query(Category)
cat_id = cat_id_query.filter(Category.label == category["label"]).first()
for attribute in category["attributes"]:
attr_row = create_att_object(
attribute["label"],
attribute["desc"],
get_picture_binary(os.path.join(here, attribute["picture"])),
attribute["pic_mime"],
cat_id.id
)
dbsession.add(attr_row)
attr_id = dbsession.query(Attribute).filter(Attribute.label == attribute["label"]).first().id
link_row = create_user_att_link_object(u_id, attr_id)
link_row2 = create_user_att_link_object(u_id2, attr_id)
dbsession.add(link_row)
dbsession.add(link_row2)
def get_picture_binary(path):
"""Open an image to save binary data."""
with open(path, "rb") as pic_data:
return pic_data.read()
def create_cat_object(lbl, des, pic, pic_mime):
"""Return a Category object with necessary information."""
return Category(
label=lbl,
desc=des,
picture=pic,
pic_mime=pic_mime,
)
def create_att_object(lbl, des, pic, pic_mime, c_id):
"""Return an Attribute object with given information."""
return Attribute(
label=lbl,
desc=des,
picture=pic,
pic_mime=pic_mime,
cat_id=c_id
)
def create_user_object(uname, psswd,sub_u):
"""Return a User object with given information."""
return User(
username=uname,
password=pwd_context.hash(psswd),
sub_user=sub_u
)
def create_address_object(nme, phn, eml, u, pic, pic_mime):
"""Return an AddressBook object with given information."""
return AddressBook(
name=nme,
phone=phn,
email=eml,
user=u,
picture=pic,
pic_mime=pic_mime,
)
def create_user_att_link_object(u, att):
"""Return a UserAttributeLink object with given information."""
return UserAttributeLink(
user_id=u,
attr_id=att
)
| mit | 5,939,428,013,816,041,000 | 30.888889 | 109 | 0.576849 | false |
Chuban/moose | python/TestHarness/schedulers/Job.py | 1 | 4761 | import re
from timeit import default_timer as clock
class Timer(object):
"""
A helper class for testers to track the time it takes to run.
Every call to the start method must be followed by a call to stop.
"""
def __init__(self):
self.starts = []
self.ends = []
def start(self):
""" starts the timer clock """
self.starts.append(clock())
def stop(self):
""" stop/pauses the timer clock """
self.ends.append(clock())
def cumulativeDur(self):
""" returns the total/cumulative time taken by the timer """
diffs = [end - start for start, end in zip(self.starts, self.ends)]
return sum(diffs)
def avgerageDur(self):
return self.cumulativeDur() / len(self.starts)
def nRuns(self):
return len(self.starts)
def reset(self):
self.starts = []
self.ends = []
class Job(object):
"""
The Job class is a simple container for the tester and its associated output file object, the DAG,
the process object, the exit codes, and the start and end times.
"""
def __init__(self, tester, tester_dag, options):
self.options = options
self.__tester = tester
self.timer = Timer()
self.__dag = tester_dag
self.__dag_clone = None
self.__outfile = None
self.__start_time = clock()
self.__end_time = None
self.__std_out = ''
self.report_timer = None
def getTester(self):
""" Return the tester object """
return self.__tester
def getDAG(self):
""" Return the DAG object """
return self.__dag
def getOriginalDAG(self):
"""
Retreive the DAG object from the state it was when setOriginalDAG was called or the current
state it is in now, if setOriginalDAG was never called.
"""
return self.setOriginalDAG()
def setOriginalDAG(self):
"""
Create a soft clone of the working DAG for what ever state it is currently in. This method
should only be called once, and once the working DAG is properly set up.
This is to protect the DAG from further tampering.
"""
if self.__dag_clone == None:
self.__dag_clone = self.__dag.clone()
return self.__dag_clone
def getTestName(self):
""" Wrapper method to return the testers test name """
return self.__tester.getTestName()
def run(self):
"""
A blocking method to handle the exit status of the process object while keeping track of the
time the process was active. When the process exits, read the output and close the file.
"""
self.__tester.prepare(self.options)
if self.options.dry_run or not self.__tester.shouldExecute():
self.__tester.setStatus(self.__tester.getSuccessMessage(), self.__tester.bucket_success)
return
self.timer.reset()
self.__tester.run(self.timer, self.options)
self.__start_time = self.timer.starts[0]
self.__end_time = self.timer.ends[-1]
self.__std_out = self.__tester.std_out
def killProcess(self):
""" Kill remaining process that may be running """
self.__tester.killCommand()
def getStartTime(self):
""" Return the time the process started """
return self.__start_time
def getEndTime(self):
""" Return the time the process exited """
return self.__end_time
def getOutput(self):
""" Return the contents of output """
return self.__std_out
def setOutput(self, output):
""" Method to allow testers to overwrite the output if certain conditions are met """
if self.__tester.outfile is not None and not self.__tester.outfile.closed:
return
self.__std_out = output
def getActiveTime(self):
""" Return active time """
m = re.search(r"Active time=(\S+)", self.__std_out)
if m != None:
return m.group(1)
def getSolveTime(self):
""" Return solve time """
m = re.search(r"solve().*", self.__std_out)
if m != None:
return m.group().split()[5]
def getTiming(self):
""" Return active time if available, if not return a comparison of start and end time """
if self.getActiveTime():
return self.getActiveTime()
elif self.getEndTime() and self.getStartTime():
return self.timer.cumulativeDur()
elif self.getStartTime() and self.__tester.isPending():
# If the test is still running, return current run time instead
return max(0.0, clock() - self.getStartTime())
else:
return 0.0
| lgpl-2.1 | -2,639,172,414,935,538,000 | 32.293706 | 102 | 0.592523 | false |
kylefrost/budget | sql.py | 1 | 1688 | import MySQLdb
import config
def add(table_name, data_tuple):
db = MySQLdb.connect(host=config.database.HOST, user=config.database.USER, passwd=config.database.PASSWD, db=config.database.DB)
cur = db.cursor()
if table_name == "spending":
cur.execute("INSERT INTO spending (spend_description, spend_date, spend_amount, account_used) VALUES (%s, %s, %s, %s)", data_tuple)
db.commit()
cur.close()
elif table_name == "bills":
cur.execute("INSERT INTO bills (bill_name, pay_date, pay_amount) VALUES (%s, %s, %s)", data_tuple)
db.commit()
cur.close()
elif table_name == "accounts":
cur.execute("INSERT INTO accounts (account_name, account_balance) VALUES (%s, %s)", data_tuple)
db.commit()
cur.close()
db.close()
def edit():
db = MySQLdb.connect(host=config.database.HOST, user=config.database.USER, passwd=config.database.PASSWD, db=config.database.DB)
pass
def delete():
db = MySQLdb.connect(host=config.database.HOST, user=config.database.USER, passwd=config.database.PASSWD, db=config.database.DB)
pass
def select(table_name):
db = MySQLdb.connect(host=config.database.HOST, user=config.database.USER, passwd=config.database.PASSWD, db=config.database.DB)
cur = db.cursor()
if table_name == "spending":
cur.execute("SELECT * FROM spending;")
db.close()
return cur.fetchall()
elif table_name == "bills":
cur.execute("SELECT * FROM bills;")
db.close()
return cur.fetchall()
elif table_name == "accounts":
cur.execute("SELECT * FROM accounts;")
db.close()
return cur.fetchall()
| gpl-3.0 | 6,148,542,761,144,439,000 | 32.76 | 139 | 0.64218 | false |
jriguera/photoplace | photoplace/lib/PhotoPlace/UserInterface/commandUI.py | 1 | 5140 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# commandUI.py
#
# Copyright 2010-2015 Jose Riguera Lopez <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A command line implementation for a user interface.
"""
__program__ = "photoplace"
__author__ = "Jose Riguera Lopez <[email protected]>"
__version__ = "0.6.1"
__date__ = "Dec 2014"
__license__ = "Apache 2.0"
__copyright__ ="(c) Jose Riguera"
import os
import sys
from PhotoPlace.definitions import *
from PhotoPlace.observerHandler import *
from PhotoPlace.stateHandler import *
from PhotoPlace.userFacade import *
from PhotoPlace.Plugins.Interface import *
from Interface import InterfaceUI
class PhotoPlaceCOM(InterfaceUI):
"""
GTK GUI for PhotoPlace
"""
_instance = None
# Singleton
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(PhotoPlaceCOM, cls).__new__(cls)
return cls._instance
def __init__(self, resourcedir=None):
InterfaceUI.__init__(self, resourcedir)
def init(self, userfacade):
self.userfacade = userfacade
self.plugins = dict()
self.plugins_error = []
self.num_photos_process = 0
# Make a new state
try:
self.userfacade.init()
except Error as e:
print(e)
self.userfacade.init(True)
def loadPlugins(self):
errors = self.userfacade.load_plugins()
for p, e in errors.iteritems():
print(e)
self.plugins_error = []
for p in self.userfacade.addons :
if not p in errors:
try:
error = self.userfacade.activate_plugin(p, None)
except Error as e:
self.plugins_error.append(p)
print(e)
else:
if error != None:
self.plugins_error.append(p)
print(error)
else:
self.plugins_error.append(p)
def unloadPlugins(self):
pass
def activate_plugins(self):
for plg, plgobj in self.userfacade.list_plugins().iteritems():
if plg in self.plugins or plg in self.plugins_error:
continue
if not plgobj.capabilities['UI']:
# Active all plugins
try:
self.userfacade.init_plugin(plg, '*', None)
except Error as e:
print(e)
self.plugins[plg] = (plgobj)
def deactivate_plugins(self):
for plg in self.plugins.keys():
plgobj = self.plugins[plg]
try:
self.userfacade.end_plugin(plg)
except Error as e:
print(e)
del self.plugins[plg]
self.plugins = dict()
def start(self, load_files=True):
self.activate_plugins()
if self.action_loadtemplates():
if self.action_loadphotos():
if self.userfacade.state['gpxinputfile']:
if self.action_readgpx():
self.action_geolocate()
try:
self.userfacade.goprocess(True)
except Error as e:
print(e)
self.deactivate_plugins()
def action_loadtemplates(self):
try:
loadtemplates = self.userfacade.DoTemplates()
if loadtemplates:
loadtemplates.run()
return True
else:
return False
except Error as e:
print(e)
return False
def action_loadphotos(self, directory=None):
try:
loadphotos = self.userfacade.LoadPhotos(directory)
if loadphotos:
loadphotos.run()
return True
else:
return False
except Error as e:
print(e)
return False
def action_readgpx(self, filename=None):
try:
readgpx = self.userfacade.ReadGPX(filename)
if readgpx:
readgpx.run()
return True
else:
return False
except Error as e:
print(e)
return False
def action_geolocate(self):
try:
geolocate = self.userfacade.Geolocate()
if geolocate:
geolocate.run()
else:
return False
except Error as e:
print(e)
return False
return True
# EOF
| apache-2.0 | -2,436,587,359,322,661,400 | 26.486631 | 76 | 0.542412 | false |
lightbase/WSCacicNeo | wscacicneo/views/orgaos.py | 1 | 11498 | #!/usr/env python
# -*- coding: utf-8 -*-
__author__ = 'eduardo'
import requests
import json
import datetime
from pyramid.response import Response
from pyramid.httpexceptions import HTTPFound, HTTPNotFound
from pyramid.view import view_config, forbidden_view_config
from wscacicneo.model import orgao as model_orgao
from wscacicneo.utils.utils import Utils
from wscacicneo.model.orgao import Orgao
from ..model import atividade
from liblightbase.lbutils import conv
from .. import config
from .. import search
import uuid
import ast
from pyramid.session import check_csrf_token
class Orgaos(object):
"""
Views de notificação
"""
def __init__(self, request):
"""
Método construtor
:param request: Requisição
"""
self.request = request
self.usuario_autenticado = Utils.retorna_usuario_autenticado(
self.request.session.get('userid'))
def listorgao(self):
orgao_obj = Utils.create_orgao_obj()
search = orgao_obj.search_list_orgaos()
return {'orgao_doc': search.results,
'usuario_autenticado': self.usuario_autenticado
}
def get_orgao_initial(self):
if Utils.check_has_orgao(): # se tiver orgao
return HTTPFound(location = self.request.route_url('login'))
return {'api_key': uuid.uuid4()}
def post_orgao_initial(self):
if Utils.check_has_orgao(): # se tiver orgao
return HTTPFound(location = self.request.route_url('login'))
return self.post_orgao()
def config_orgao(self):
sigla = self.request.matchdict['sigla']
search_obj = search.orgao.SearchOrgao(
param=sigla
)
orgao_obj = search_obj.search_by_name()
saida = orgao_obj.orgao_to_dict()
# Coloca algum valor na URL
if saida.get('url') is None:
saida['url'] = self.request.application_url
saida['usuario_autenticado'] = self.usuario_autenticado
return saida
def editorgao(self):
sigla = self.request.matchdict['sigla']
search_obj = search.orgao.SearchOrgao(
param=sigla
)
orgao_obj = search_obj.search_by_name()
saida = orgao_obj.orgao_to_dict()
if saida.get('url') is None:
saida['url'] = self.request.application_url
saida['usuario_autenticado'] = self.usuario_autenticado
return saida
def post_orgao(self):
"""
Post doc órgãos
"""
rest_url = config.REST_URL
orgaobase = model_orgao.OrgaoBase().lbbase
doc = self.request.json_body
nome_base = Utils.format_name(doc.get('sigla'))
orgao_obj = Orgao(
nome=nome_base,
pretty_name=doc.get('pretty_name'),
siorg=doc.get('siorg'),
cargo=doc.get('cargo'),
gestor=doc.get('gestor'),
coleta=int(doc.get('coleta')),
sigla=doc.get('sigla'),
endereco=doc.get('end'),
email=doc.get('email'),
telefone=doc.get('telefone'),
url=doc.get('url'),
habilitar_bot=ast.literal_eval(doc.get('habilitar_bot')),
api_key=doc.get('api_key')
)
try:
if self.usuario_autenticado is None:
user = 'Sistema'
else:
user = self.usuario_autenticado.nome
except IndexError:
user = 'Sistema'
at = atividade.Atividade(
tipo='insert',
usuario=user,
descricao='Cadastrou o órgão ' + nome_base,
data=datetime.datetime.now()
)
at.create_atividade()
id_doc = orgao_obj.create_orgao()
session = self.request.session
session.flash('Orgão cadastrado com sucesso', queue="success")
return Response(str(id_doc))
def put_orgao(self):
"""
Edita um doc apartir do id
"""
doc = self.request.json_body
sigla = doc['id']
nome_base = Utils.format_name(doc.get('sigla'))
orgao_obj = Orgao(
nome=nome_base,
pretty_name=doc.get('pretty_name'),
siorg=doc.get('siorg'),
gestor=doc.get('gestor'),
cargo=doc.get('cargo'),
coleta=int(doc.get('coleta')),
sigla=nome_base,
endereco=doc.get('end'),
email=doc.get('email'),
telefone=doc.get('telefone'),
url=doc.get('url'),
habilitar_bot=ast.literal_eval(doc.get('habilitar_bot')),
api_key=doc.get('api_key')
)
at = atividade.Atividade(
tipo='put',
usuario=self.usuario_autenticado.nome,
descricao='Alterou o órgão ' + nome_base,
data=datetime.datetime.now()
)
at.create_atividade()
orgao = orgao_obj.orgao_to_dict()
search = orgao_obj.search_orgao(sigla)
id = search.results[0]._metadata.id_doc
doc = json.dumps(orgao)
print(doc)
print(sigla)
edit = orgao_obj.edit_orgao(id, doc)
session = self.request.session
session.flash('Alteração realizado com sucesso', queue="success")
return Response(str(id))
def delete_orgao(self):
"""
Deleta doc apartir do id
"""
session = self.request.session
doc = self.request.params
sigla = self.request.matchdict['sigla']
orgao_obj = Utils.create_orgao_obj()
user_obj = Utils.create_user_obj()
at = atividade.Atividade(
tipo='delete',
usuario=self.usuario_autenticado.nome,
descricao='Removeu o órgão '+ sigla,
data=datetime.datetime.now()
)
at.create_atividade()
search = orgao_obj.search_orgao(sigla)
id = search.results[0]._metadata.id_doc
orgao_name = search.results[0].nome
lista_usuarios = Utils.verifica_orgaos(orgao_name)
list_admins = Utils.verifica_admin(lista_usuarios)
# Lista os nomes dos usuários administradores do sistema
list_admins_names = []
for x in list_admins:
list_admins_names.append(x.nome)
# Remove o órgão e seus usuários caso não exista administradores ligados ao mesmo.
if not list_admins:
for id_users in lista_usuarios:
delete_user = user_obj.delete_user(id_users)
delete_orgao = orgao_obj.delete_orgao(id)
if delete_orgao:
session.flash('Sucesso ao apagar o órgão e os usuários ligados a ele'+search.results[0].pretty_name, queue="success")
else:
session.flash('Ocorreu um erro ao apagar o órgão '+search.results[0].pretty_name, queue="error")
return HTTPFound(location=self.request.route_url('listorgao'))
else:
if len(list_admins) > 1:
session.flash('O órgão '+search.results[0].pretty_name+' não pode ser removido pois ainda há administradores ligados a ele.', queue="error")
session.flash('Os administradores ligados ao órgão '+search.results[0].pretty_name+' são: '+str(list_admins_names).strip("[]"), queue="error")
else:
session.flash('O órgão '+search.results[0].pretty_name+' não pode ser removido pois ainda há um administrador ligado a ele.', queue="error")
session.flash('O administrador ligado ao órgão '+search.results[0].pretty_name+' é: '+str(list_admins_names).strip("[]"), queue="error")
return HTTPFound(location=self.request.route_url('listorgao'))
# Views de Orgão
def orgao(self):
return {
'usuario_autenticado': self.usuario_autenticado,
'api_key': uuid.uuid4()
}
def valida_orgao(self):
"""
Valida cadastro do órgão
:return: JSON no seguinte formato
{
'result': True/False,
'message': 'Se houver erro'
'element': 'Id do elemento onde houve o erro'
}
"""
orgao = self.request.json_body
# 1 - Verifica nome do órgão
exists = search.orgao.orgao_base.element_exists('nome', orgao['sigla'])
if exists:
# Órgão já existe
return {
'result': False,
'message': 'Já existe um órgão com essa sigla',
'element': 'sigla'
}
# 2 - Nome tem que ser único
nome_base = Utils.format_name(orgao['sigla'])
exists = search.orgao.orgao_base.element_exists('nome', nome_base)
if exists:
# Email existe
return {
'result': False,
'message': 'Nome de órgão já cadastrado. '
'Números e caracteres especiais são desconsiderados',
'element': 'sigla'
}
# 3 - Verifica e-mail
exists = search.orgao.orgao_base.element_exists('email', orgao['email'])
if exists:
# Email existe
return {
'result': False,
'message': 'E-mail já cadastrado',
'element': 'email'
}
# Retorna verdadeiro com padrão
return {
'result': True
}
def valida_put_orgao(self):
"""
Valida cadastro do órgão
:return: JSON no seguinte formato
{
'result': True/False,
'message': 'Se houver erro'
'element': 'Id do elemento onde houve o erro'
}
"""
orgao = self.request.json_body
# 1 - Verifica nome do órgão
search_obj = search.orgao.SearchOrgao(
param=orgao['id']
)
orgao_obj = search_obj.search_by_name()
if orgao_obj is None:
# Órgão já existe
return {
'result': False,
'message': 'Órgão não encontrado',
'element': 'sigla'
}
# 2 - Nome tem que ser único
nome_base = Utils.format_name(orgao['sigla'])
exists = search.orgao.orgao_base.element_exists('nome', nome_base, orgao_obj.nome)
if exists:
# Email existe
return {
'result': False,
'message': 'Nome de órgão já cadastrado. '
'Números e caracteres especiais são desconsiderados',
'element': 'sigla'
}
# 3 - Verifica e-mail
exists = search.orgao.orgao_base.element_exists('email', orgao['email'], orgao_obj.nome)
if exists:
# Email existe
return {
'result': False,
'message': 'E-mail já cadastrado',
'element': 'email'
}
exists = search.orgao.orgao_base.element_exists('sigla', orgao['sigla'], orgao_obj.nome)
if exists:
# Email existe
return {
'result': False,
'message': 'Sigla já cadastrado. '
'Números e caracteres especiais são desconsiderados',
'element': 'sigla'
}
# Retorna verdadeiro com padrão
return {
'result': True
}
| gpl-2.0 | -7,502,199,000,167,618,000 | 34.02454 | 158 | 0.547644 | false |
noam09/kodi | xmlgen.py | 1 | 4509 | #!/usr/bin/env python
# *
# * Copyright (C) 2012-2013 Garrett Brown
# * Copyright (C) 2010 j48antialias
# *
# * This Program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2, or (at your option)
# * any later version.
# *
# * This Program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with XBMC; see the file COPYING. If not, write to
# * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# * http://www.gnu.org/copyleft/gpl.html
# *
# * Based on code by j48antialias:
# * https://anarchintosh-projects.googlecode.com/files/addons_xml_generator.py
""" addons.xml generator """
import os
import sys
# Compatibility with 3.0, 3.1 and 3.2 not supporting u"" literals
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
class Generator:
"""
Generates a new addons.xml file from each addons addon.xml file
and a new addons.xml.md5 hash file. Must be run from the root of
the checked-out repo. Only handles single depth folder structure.
"""
def __init__( self ):
# generate files
self._generate_addons_file()
self._generate_md5_file()
# notify user
print("Finished updating addons xml and md5 files")
def _generate_addons_file( self ):
# addon list
addons = os.listdir( "." )
excludedFolders = {'.svn':'.svn','.git':'.git','repo': 'repo', 'plugin.video.moviexil': 'plugin.video.moviexil',
'plugin.video.themarker.video':'plugin.video.themarker.video'
}
# final addons text
addons_xml = u("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n<addons>\n")
# loop thru and add each addons addon.xml file
for addon in addons:
try:
# skip any file or .svn folder or .git folder
if ( not os.path.isdir( addon ) or addon in excludedFolders): continue
# create path
_path = os.path.join( addon, "addon.xml" )
# split lines for stripping
xml_lines = open( _path, "r" ).read().splitlines()
# new addon
addon_xml = ""
# loop thru cleaning each line
for line in xml_lines:
# skip encoding format line
if ( line.find( "<?xml" ) >= 0 ): continue
# add line
if sys.version < '3':
addon_xml += unicode( line.rstrip() + "\n", "UTF-8" )
else:
addon_xml += line.rstrip() + "\n"
# we succeeded so add to our final addons.xml text
addons_xml += addon_xml.rstrip() + "\n\n"
except Exception as e:
# missing or poorly formatted addon.xml
print("Excluding %s for %s" % ( _path, e ))
# clean and add closing tag
addons_xml = addons_xml.strip() + u("\n</addons>\n")
# save file
self._save_file( addons_xml.encode( "UTF-8" ), file="addons.xml" )
def _generate_md5_file( self ):
# create a new md5 hash
try:
import md5
m = md5.new( open( "addons.xml", "r" ).read() ).hexdigest()
except ImportError:
import hashlib
m = hashlib.md5( open( "addons.xml", "r", encoding="UTF-8" ).read().encode( "UTF-8" ) ).hexdigest()
# save file
try:
self._save_file( m.encode( "UTF-8" ), file="addons.xml.md5" )
except Exception as e:
# oops
print("An error occurred creating addons.xml.md5 file!\n%s" % e)
def _save_file( self, data, file ):
try:
# write data to the file (use b for Python 3)
open( file, "wb" ).write( data )
except Exception as e:
# oops
print("An error occurred saving %s file!\n%s" % ( file, e ))
if ( __name__ == "__main__" ):
# start
Generator()
| gpl-3.0 | -7,378,184,051,172,303,000 | 36.890756 | 120 | 0.554447 | false |
ricardonhuang/blog | app/auth/views.py | 1 | 6229 | #coding=utf-8
'''
Created on 2016��10��20��
@author: huangning
'''
from flask import render_template, redirect, request, url_for, flash
from flask_login import login_user, logout_user, login_required, \
current_user
from . import auth
from .. import db
from ..models import User
from ..email import send_email
from .forms import LoginForm, RegistrationForm, ChangePasswordForm,\
PasswordResetRequestForm, PasswordResetForm, ChangeEmailForm
@auth.before_app_request
def before_request():
if current_user.is_authenticated:
current_user.ping()
if not current_user.confirmed and request.endpoint[:5] != 'auth.':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/unconfirmed')
def unconfirmed():
if current_user.is_anonymous or current_user.confirmed:
return redirect(url_for('main.index'))
return render_template('auth/unconfirmed.html')
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
token = user.generate_confirmation_token()
send_email(user.email, 'Confirm Your Account',
'auth/email/confirm', user=user, token=token)
flash('A confirmation email has been sent to you by email.')
return redirect(url_for('auth.login'))
return render_template('auth/register.html', form=form)
@auth.route('/confirm/<token>')
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash('You have confirmed your account. Thanks!')
else:
flash('The confirmation link is invalid or has expired.')
return redirect(url_for('main.index'))
@auth.route('/confirm')
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
send_email(current_user.email, 'Confirm Your Account',
'auth/email/confirm', user=current_user, token=token)
flash('A new confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
@auth.route('/change-password', methods=['GET', 'POST'])
@login_required
def change_password():
form = ChangePasswordForm()
if form.validate_on_submit():
if current_user.verify_password(form.old_password.data):
current_user.password = form.password.data
db.session.add(current_user)
flash('Your password has been updated.')
return redirect(url_for('main.index'))
else:
flash('Invalid password.')
return render_template("auth/change_password.html", form=form)
@auth.route('/reset', methods=['GET', 'POST'])
def password_reset_request():
if not current_user.is_anonymous:
return redirect(url_for('main.index'))
form = PasswordResetRequestForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user:
token = user.generate_reset_token()
send_email(user.email, 'Reset Your Password',
'auth/email/reset_password',
user=user, token=token,
next=request.args.get('next'))
flash('An email with instructions to reset your password has been '
'sent to you.')
return redirect(url_for('auth.login'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if not current_user.is_anonymous:
return redirect(url_for('main.index'))
form = PasswordResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is None:
return redirect(url_for('main.index'))
if user.reset_password(token, form.password.data):
flash('Your password has been updated.')
return redirect(url_for('auth.login'))
else:
return redirect(url_for('main.index'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/change-email', methods=['GET', 'POST'])
@login_required
def change_email_request():
form = ChangeEmailForm()
if form.validate_on_submit():
if current_user.verify_password(form.password.data):
new_email = form.email.data
token = current_user.generate_email_change_token(new_email)
send_email(new_email, 'Confirm your email address',
'auth/email/change_email',
user=current_user, token=token)
flash('An email with instructions to confirm your new email '
'address has been sent to you.')
return redirect(url_for('main.index'))
else:
flash('Invalid email or password.')
return render_template("auth/change_email.html", form=form)
@auth.route('/change-email/<token>')
@login_required
def change_email(token):
if current_user.change_email(token):
flash('Your email address has been updated.')
else:
flash('Invalid request.')
return redirect(url_for('main.index'))
| gpl-3.0 | 5,823,037,768,428,690,000 | 34.145349 | 78 | 0.623774 | false |
YuxingZhang/prescription | rnn_model/batch.py | 1 | 10241 | import numpy as np
import random
import cPickle as pkl
from collections import OrderedDict
class Batch():
def __init__(self, lhs, rel, rhs, batch_size=128):
self.lhs = lhs
self.rel = rel
self.rhs = rhs
self.batch_size = batch_size
self.prepare()
self.reset()
def prepare(self):
self.indices = np.arange(len(self.lhs))
self.curr_indices = np.random.permutation(self.indices)
def reset(self):
self.curr_indices = np.random.permutation(self.indices)
self.curr_pos = 0
self.curr_remaining = len(self.indices)
def next(self):
if self.curr_pos >= len(self.indices):
self.reset()
raise StopIteration()
# current batch size
curr_batch_size = np.minimum(self.batch_size, self.curr_remaining)
# indices for current batch
curr_indices = self.curr_indices[self.curr_pos:self.curr_pos+curr_batch_size]
self.curr_pos += curr_batch_size
self.curr_remaining -= curr_batch_size
# data and targets for current batch
lhs_batch = [self.lhs[ii] for ii in curr_indices]
rel_batch = [self.rel[ii] for ii in curr_indices]
rhs_batch = [self.rhs[ii] for ii in curr_indices]
return lhs_batch, rel_batch, rhs_batch
def __iter__(self):
return self
def prepare_data(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars, use_beos=False):
"""
Prepare the data for training - add masks and remove infrequent characters, used for training
"""
batch_size = len(lhs_b)
lhs_list = lhs_dict.keys()
rand_idx = np.random.choice(len(lhs_list), batch_size)
lhsn_b = []
for i in range(batch_size):
lhsn_b.append([lhs_list[rand_idx[i]] if lhs_b[i] != lhs_list[rand_idx[i]] else lhs_list[np.random.randint(len(lhs_list))]])
lhs_in, lhs_mask = prepare_lhs(lhs_b, chardict, n_chars)
lhsn_in, lhsn_mask = prepare_lhs(lhsn_b, chardict, n_chars)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] for yy in rhs_b] # convert each right hand side to its index
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
# random index as the negative triples
rhsn_in = np.random.randint(len(rhs_dict), size=batch_size).astype('int32')
return lhs_in, lhs_mask, lhsn_in, lhsn_mask, rel_in, rhs_in, rhsn_in
def prepare_data_nn(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars, use_beos=False):
"""
Prepare the data for training - add masks and remove infrequent characters, used for training
"""
batch_size = len(lhs_b)
lhs_list = lhs_dict.keys()
rand_idx = np.random.choice(len(lhs_list), batch_size)
lhsn_b = []
for i in range(batch_size):
lhsn_b.append([lhs_list[rand_idx[i]] if lhs_b[i] != lhs_list[rand_idx[i]] else lhs_list[np.random.randint(len(lhs_list))]])
lhs_in, lhs_mask = prepare_lhs(lhs_b, chardict, n_chars)
lhsn_in, lhsn_mask = prepare_lhs(lhsn_b, chardict, n_chars)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] for yy in rhs_b] # convert each right hand side to its index
lhs_idx = [(lhs_dict[yy] + 1) if yy in lhs_dict else 0 for yy in lhs_b] # if not in dict, set to 0
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
lhs_emb_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
lhs_emb_in[idx] = lhs_idx[idx]
# random index as the negative triples
rhsn_in = np.random.randint(len(rhs_dict), size=batch_size).astype('int32')
lhsn_emb_in = np.random.randint(len(lhs_dict) + 1, size=batch_size).astype('int32')
return lhs_in, lhs_mask, lhsn_in, lhsn_mask, lhs_emb_in, lhsn_emb_in, rel_in, rhs_in, rhsn_in
def prepare_data_tr(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars, use_beos=False):
"""
Prepare the data for training - add masks and remove infrequent characters, used for training
"""
batch_size = len(lhs_b)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] for yy in rhs_b] # convert each right hand side to its index
lhs_idx = [(lhs_dict[yy] + 1) if yy in lhs_dict else 0 for yy in lhs_b] # if not in dict, set to 0
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
lhs_emb_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
lhs_emb_in[idx] = lhs_idx[idx]
# random index as the negative triples
rhsn_in = np.random.randint(len(rhs_dict), size=batch_size).astype('int32')
lhsn_emb_in = np.random.randint(len(lhs_dict) + 1, size=batch_size).astype('int32')
return lhs_emb_in, lhsn_emb_in, rel_in, rhs_in, rhsn_in
def prepare_vs_tr(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars):
'''
prepare data without generating negative triples, used for validation and testing
'''
batch_size = len(lhs_b)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] if yy in rhs_dict else 0 for yy in rhs_b] # convert each right hand side to its index, 0 if not in dict
lhs_idx = [(lhs_dict[yy] + 1) if yy in lhs_dict else 0 for yy in lhs_b] # if not in dict, set to 0
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
lhs_emb_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
lhs_emb_in[idx] = lhs_idx[idx]
return lhs_emb_in, rel_in, rhs_in
def prepare_vs_nn(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars):
'''
prepare data without generating negative triples, used for validation and testing
'''
batch_size = len(lhs_b)
lhs_in, lhs_mask = prepare_lhs(lhs_b, chardict, n_chars)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] if yy in rhs_dict else 0 for yy in rhs_b] # convert each right hand side to its index, 0 if not in dict
lhs_idx = [(lhs_dict[yy] + 1) if yy in lhs_dict else 0 for yy in lhs_b] # if not in dict, set to 0
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
lhs_emb_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
lhs_emb_in[idx] = lhs_idx[idx]
return lhs_in, lhs_mask, lhs_emb_in, rel_in, rhs_in
def prepare_vs(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars):
'''
prepare data without generating negative triples, used for validation and testing
'''
batch_size = len(lhs_b)
lhs_in, lhs_mask = prepare_lhs(lhs_b, chardict, n_chars)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] if yy in rhs_dict else 0 for yy in rhs_b] # convert each right hand side to its index, 0 if not in dict
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
return lhs_in, lhs_mask, rel_in, rhs_in
def prepare_lhs(lhs_b, chardict, n_chars):
'''
prepare left hand side (or negative left hand side) given a list of words, used as a subroutine of prepare_data
'''
lhs_idx = []
for cc in lhs_b:
current = list(cc)
lhs_idx.append([chardict[c] if c in chardict and chardict[c] <= n_chars else 0 for c in current])
len_lhs = [len(s) for s in lhs_idx]
max_length = max(len_lhs)
n_samples = len(lhs_idx)
# positive lhs
lhs_in = np.zeros((n_samples,max_length)).astype('int32')
lhs_mask = np.zeros((n_samples,max_length)).astype('float32')
for idx, lhs_idx_i in enumerate(lhs_idx):
lhs_in[idx,:len_lhs[idx]] = lhs_idx_i
lhs_mask[idx,:len_lhs[idx]] = 1.
return lhs_in, lhs_mask
def build_char_dictionary(text):
"""
Build a character dictionary
"""
charcount = OrderedDict()
for cc in text:
chars = list(cc)
for c in chars:
if c not in charcount:
charcount[c] = 0
charcount[c] += 1
chars = charcount.keys()
freqs = charcount.values()
sorted_idx = np.argsort(freqs)[::-1]
chardict = OrderedDict()
for idx, sidx in enumerate(sorted_idx):
chardict[chars[sidx]] = idx + 1
return chardict, charcount
def build_entity_dictionary(targets):
"""
Build a label dictionary
"""
labelcount = OrderedDict()
for l in targets:
if l not in labelcount:
labelcount[l] = 0
labelcount[l] += 1
labels = labelcount.keys()
freqs = labelcount.values()
sorted_idx = np.argsort(freqs)[::-1]
labeldict = OrderedDict()
for idx, sidx in enumerate(sorted_idx):
labeldict[labels[sidx]] = idx
return labeldict, labelcount
def save_dictionary(worddict, wordcount, loc):
"""
Save a dictionary to the specified location
"""
with open(loc, 'w') as f:
pkl.dump(worddict, f)
pkl.dump(wordcount, f)
def load_labeled_entities(f): # split each line into lhs, rel and rhs
lhs = []
rel = []
rhs = []
for line in f:
entities = line.rstrip().split('\t')
if len(entities) != 3:
continue
lhs.append(entities[0])
rel.append(entities[1])
rhs.append(entities[2])
return lhs, rel, rhs
| bsd-3-clause | 157,359,887,290,219,680 | 36.375912 | 131 | 0.620252 | false |
ustclug/lug-vpn-web | scripts/migrate.py | 1 | 1829 | #!/usr/bin/env python3
# encoding: utf-8
import MySQLdb
import random
import hashlib
import string
db = MySQLdb.connect(host=input('host:'),
user=input('user:'),
passwd=input('password:'),
db=input('db:'))
db.autocommit(True)
cur = db.cursor()
cur.execute("rename table `user` to `user_bak`")
cur.execute("""
CREATE TABLE `user` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`email` varchar(63) DEFAULT NULL,
`passwordhash` varchar(127) NOT NULL,
`salt` varchar(127) NOT NULL,
`active` tinyint(1) DEFAULT NULL,
`admin` tinyint(1) DEFAULT NULL,
`status` enum('none','applying','pass','reject','banned') DEFAULT NULL,
`name` varchar(127) DEFAULT NULL,
`studentno` varchar(127) DEFAULT NULL,
`phone` varchar(127) DEFAULT NULL,
`reason` text,
`applytime` datetime DEFAULT NULL,
`vpnpassword` varchar(127) DEFAULT NULL,
`rejectreason` text,
`banreason` text,
PRIMARY KEY (`id`),
UNIQUE KEY `email` (`email`)
) CHARSET=utf8
""")
cur.execute("""
insert into user
(`id`,`email`,`active`,`admin`,`status`,`name`,`studentno`,`phone`,`reason`,`applytime`,`vpnpassword`,`passwordhash`,`salt`)
select `id`,`email`,`active`,`admin`,`apply`,`name`,`studentno`,`phone`,`reason`,`applytime`,
(select `value` from `radcheck` where username=user_bak.email),'',''
from user_bak
where 1
""")
cur.execute('select id,password from user_bak')
for row in cur.fetchall():
id = row[0]
p = row[1]
salt = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for i in range(10))
s = hashlib.sha256()
s.update(p.encode('utf-8'))
s.update(salt.encode('utf-8'))
passwordhash = s.hexdigest()
cur.execute('update user set passwordhash=%s,salt=%s where id=%s', (passwordhash, salt, id))
db.close()
| agpl-3.0 | -2,951,162,330,318,354,400 | 28.031746 | 124 | 0.651722 | false |
goujonpa/chateaumagondeau | website/magondeau/settings.py | 1 | 2870 | """
Django settings for magondeau project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'i)v6vy5a8*c%ndm3)3%0knp-a#tg7iyczh^7muntb-%qbrb(d9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'home',
'news',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'magondeau.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'magondeau.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'magondeau',
'USER': 'Polo',
'PASSWORD': '',
'HOST': '',
'PORT': '5432',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'fr-FR'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
| mit | 8,085,411,588,434,776,000 | 24.175439 | 71 | 0.672125 | false |
amkusmec/snptools | src/filter.py | 1 | 10883 | # -*- coding: utf-8 -*-
"""
Created on Wed May 27 14:47:00 2015
@author: aaron
"""
import argparse
import textwrap
import timeit
import os
from snptools import *
#########################################################
#### Need to add retention list filtering for DSF and PED
#########################################################
###############################################################################
def version():
v0 = """
############################################################################
filter V1.1
(c) 2015 Aaron Kusmec
N.B. VCF functionality is experimental. Use at your own risk.
Filter SNPs based on missing rates/minor allele frequencies.
Input modes,
1 = .dsf
2 = .hmp.txt
3 = .ped (PLINK)
4 = .vcf
Usage: python3 filter.py -s example.stat -i example.dsf -o filtered -mi 1 -n 0.6 -f 0.05
NOTE1: Retaining SNPs through a SNP list is currently only supported for HMP
files.
NOTE2: Using a SNP list cannot currently be combined with MAF/miss filtering.
############################################################################
"""
return v0
#############################################################################
def get_parser():
parser = argparse.ArgumentParser(
formatter_class = argparse.RawDescriptionHelpFormatter,
description = textwrap.dedent(version()))
parser.add_argument('-p', '--path', help = 'Path of the input file', \
nargs = '?', default = os.getcwd())
parser.add_argument('-s', '--stat', help = 'Stat file', type = str)
parser.add_argument('-i', '--input', help = 'Input file', type = str)
parser.add_argument('-o', '--output', help = 'Output file (no ext)', type = str)
parser.add_argument('-mi', '--modei', help = 'Input (and output) mode', type = int)
parser.add_argument('-n', '--miss', help = 'Max missing rate', \
type = float, default = 1.0)
parser.add_argument('-f', '--maf', help = 'Minimum minor allele frequency',\
type = float, default = 0.0)
parser.add_argument('-ht', '--het', help = 'Maximum heterozygosity', type = float, default = 1.0)
parser.add_argument('-r', '--retain', help = 'List of SNPs to retain', type = str, default = None)
return parser
###############################################################################
def getStats(filename):
print("Reading [ ", filename, " ].")
stats = {}
with open(filename, 'r') as infile:
header = infile.readline()
for line in infile:
line = line.split()
stats[line[0]] = [float(line[5]), float(line[6]), float(line[7])]
return stats
###############################################################################
def filterDsf(inname, outname, stats, miss, maf, het):
print("Filtering [ ", inname, " ].")
infile = open(inname, 'r')
keepfile = open(outname + ".dsf", 'w')
filtfile = open(outname + "_filtered.dsf", 'w')
header = infile.readline().split()
keepfile.write('\t'.join(header) + '\n')
filtfile.write('\t'.join(header) + '\n')
kept = filt = counter = 0
for snp in infile:
snp = snp.split()
if snp[0] not in stats:
warning(snp[0] + " is not present in .stat file.")
# Filter or keep
if stats[snp[0]][0] <= miss and stats[snp[0]][1] >= maf and stats[snp[0]][2] <= het:
keepfile.write('\t'.join(snp) + '\n')
kept += 1
else:
filtfile.write('\t'.join(snp) + '\n')
filt += 1
counter += 1
if counter % 1e5 == 0:
print("Processed [ ", str(counter), " ] SNPs.", end = '\r')
infile.close()
keepfile.close()
filtfile.close()
print()
print("Kept [ ", str(kept), " ] SNPs in [ ", outname + ".dsf", " ].")
print("Removed [ ", str(filt), " ] SNPs to [ ", outname + "_filtered.dsf", " ].")
###############################################################################
def filterHmp(inname, outname, stats, miss, maf, het, retain):
print("Filtering [ ", inname, " ].")
infile = open(inname, 'r')
keepfile = open(outname + ".hmp.txt", 'w')
filtfile = open(outname + "_filtered.hmp.txt", 'w')
header = infile.readline().split()
keepfile.write('\t'.join(header) + '\n')
filtfile.write('\t'.join(header) + '\n')
kept = filt = counter = 0
for snp in infile:
snp = snp.split()
if snp[0] not in stats:
warning(snp[0] + " is not present in .stat file.")
if retain is not None:
if snp[0] in retain:
keepfile.write('\t'.join(snp) + '\n')
kept += 1
else:
filtfile.write('\t'.join(snp) + '\n')
filt += 1
else:
# Filter or keep
if stats[snp[0]][0] <= miss and stats[snp[0]][1] >= maf and stats[snp[0]][2] <= het:
keepfile.write('\t'.join(snp) + '\n')
kept += 1
else:
filtfile.write('\t'.join(snp) + '\n')
filt += 1
counter += 1
if counter % 1e5 == 0:
print("Processed [ ", str(counter), " ] SNPs.", end = '\r')
infile.close()
keepfile.close()
filtfile.close()
print()
print("Kept [ ", str(kept), " ] SNPs in [ ", outname + ".hmp.txt", " ].")
print("Removed [ ", str(filt), " ] SNPs to [ ", outname + "_filtered.hmp.txt", " ].")
###############################################################################
def filterPed(inname, outname, stats, miss, maf, het):
# Read the .map file and verify that it contains the same SNPs
# as the .stat file.
mapname = inname.split('.')[0] + ".map"
print("Verifying [ ", mapname, " ].")
smap = []
with open(mapname, 'r') as mapfile:
for line in mapfile:
line = line.split()
if line[1] in stats:
smap.append(line)
else:
warning(line[1] + " is not present in .stat file.")
# Read the entire .ped file into memory and transpose
snps = []
print("Reading [ ", inname, " ].")
with open(inname, 'r') as infile:
for line in infile:
snps.append(line.strip().split('\t'))
snps = zip(*snps)
# Setup the output lists and process the metadata
ksnps = []; kmap = []
fsnps = []; fmap = []
for _ in range(6):
m = next(snps)
ksnps.append(m)
fsnps.append(m)
# Filter or keep
kept = filt = counter = 0
for index, value in enumerate(snps):
if stats[smap[index][1]][0] <= miss and stats[smap[index][1]][1] >= maf and stats[smap[index][1]][2] <= het:
ksnps.append(value)
kmap.append(smap[index])
kept += 1
else:
fsnps.append(value)
fmap.append(smap[index])
filt += 1
counter += 1
if counter % 1e5 == 0:
print("Processed [ ", str(counter), " ] SNPs.", end = '\r')
# Report the results and write the output
print()
print("Kept [ ", str(kept), " ] SNPs in [ ", outname + ".ped", " ].")
ksnps = zip(*ksnps)
with open(outname + ".ped", 'w') as outfile:
for k in ksnps:
outfile.write('\t'.join(k) + '\n')
with open(outname + ".map", 'w') as outfile:
for k in kmap:
outfile.write('\t'.join(k) + '\n')
print("Removed [ ", str(filt), " ] SNPs to [ ", outname + "_filtered.ped", " ].")
fsnps = zip(*fsnps)
with open(outname + "_filtered.ped", 'w') as outfile:
for f in fsnps:
outfile.write('\t'.join(f) + '\n')
with open(outname + "_filtered.map", 'w') as outfile:
for f in fmap:
outfile.write('\t'.join(f) + '\n')
###############################################################################
def filterVcf(inname, outname, stats, miss, maf, het):
print("Filtering [ ", inname, " ].")
infile = open(inname, 'r')
keepfile = open(outname + ".vcf", 'w')
filtfile = open(outname + "_filtered.vcf", 'w')
kept = filt = counter = 0
for snp in infile:
snp = snp.strip()
if snp[0] == "#":
keepfile.write(snp + '\n')
filtfile.write(snp + '\n')
continue
# Filter or keep
snp = snp.split()
if snp[2] not in stats:
warning(snp[2] + " is not present in .stat file.")
if stats[snp[2]][0] <= miss and stats[snp[2]][1] >= maf and stats[snp[2]][2] <= het:
keepfile.write('\t'.join(snp) + '\n')
kept += 1
else:
filtfile.write('\t'.join(snp) + '\n')
filt += 1
counter += 1
if counter % 1e5 == 0:
print("Processed [ ", str(counter), " ] SNPs.", end = '\r')
infile.close()
keepfile.close()
filtfile.close()
print()
print("Kept [ ", str(kept), " ] SNPs in [ ", outname + ".vcf", " ].")
print("Removed [ ", str(filt), " ] SNPs to [ ", outname + "_filtered.vcf", " ].")
###############################################################################
def getRetain(filename):
retain = {}
with open(filename, 'r') as infile:
for line in infile:
retain[line.strip()] = True
return retain
###############################################################################
if __name__ == '__main__':
parser = get_parser()
args = vars(parser.parse_args())
# Change the working directory if necessary
if args['path'] is not None:
os.chdir(args['path'])
if args['input'] is None:
warning("No input file.")
if args['output'] is None:
warning("No output file.")
print(version())
st = timeit.default_timer()
# Check input file
checkFile(args['input'], args['modei'])
stats = getStats(args['stat'])
if args['retain'] is not None:
retain = getRetain(args['retain'])
else:
retain = None
if args['modei'] == 1:
filterDsf(args['input'], args['output'], stats, args['miss'], args['maf'], args['het'])
elif args['modei'] == 2:
filterHmp(args['input'], args['output'], stats, args['miss'], args['maf'], args['het'], retain)
elif args['modei'] == 3:
filterPed(args['input'], args['output'], stats, args['miss'], args['maf'], args['het'])
elif args['modei'] == 4:
filterVcf(args['input'], args['output'], stats, args['miss'], args['maf'], args['het'])
else:
warning("Unrecognized input mode.")
et = timeit.default_timer()
print("Filtering finished.")
print("Time: %.2f min." % ((et - st)/60))
| mit | -6,806,972,596,291,616,000 | 32.798137 | 116 | 0.473307 | false |
dsnopek/anki-sync-server | tests/test_rest_app.py | 1 | 22180 | # -*- coding: utf-8 -*-
import os
import shutil
import tempfile
import unittest
import logging
import time
from pprint import pprint
import mock
from mock import MagicMock
import AnkiServer
from AnkiServer.collection import CollectionManager
from AnkiServer.apps.rest_app import RestApp, RestHandlerRequest, CollectionHandler, ImportExportHandler, NoteHandler, ModelHandler, DeckHandler, CardHandler
from CollectionTestBase import CollectionTestBase
from webob.exc import *
import anki
import anki.storage
class RestAppTest(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.collection_manager = CollectionManager()
self.rest_app = RestApp(self.temp_dir, collection_manager=self.collection_manager)
# disable all but critical errors!
logging.disable(logging.CRITICAL)
def tearDown(self):
self.collection_manager.shutdown()
self.collection_manager = None
self.rest_app = None
shutil.rmtree(self.temp_dir)
def test_list_collections(self):
os.mkdir(os.path.join(self.temp_dir, 'test1'))
os.mkdir(os.path.join(self.temp_dir, 'test2'))
with open(os.path.join(self.temp_dir, 'test1', 'collection.anki2'), 'wt') as fd:
fd.write('Testing!')
self.assertEqual(self.rest_app.list_collections(), ['test1'])
def test_parsePath(self):
tests = [
('collection/user', ('collection', 'index', ['user'])),
('collection/user/handler', ('collection', 'handler', ['user'])),
('collection/user/note/123', ('note', 'index', ['user', '123'])),
('collection/user/note/123/handler', ('note', 'handler', ['user', '123'])),
('collection/user/deck/name', ('deck', 'index', ['user', 'name'])),
('collection/user/deck/name/handler', ('deck', 'handler', ['user', 'name'])),
#('collection/user/deck/name/card/123', ('card', 'index', ['user', 'name', '123'])),
#('collection/user/deck/name/card/123/handler', ('card', 'handler', ['user', 'name', '123'])),
('collection/user/card/123', ('card', 'index', ['user', '123'])),
('collection/user/card/123/handler', ('card', 'handler', ['user', '123'])),
# the leading slash should make no difference!
('/collection/user', ('collection', 'index', ['user'])),
]
for path, result in tests:
self.assertEqual(self.rest_app._parsePath(path), result)
def test_parsePath_not_found(self):
tests = [
'bad',
'bad/oaeu',
'collection',
'collection/user/handler/bad',
'',
'/',
]
for path in tests:
self.assertRaises(HTTPNotFound, self.rest_app._parsePath, path)
def test_getCollectionPath(self):
def fullpath(collection_id):
return os.path.normpath(os.path.join(self.temp_dir, collection_id, 'collection.anki2'))
# This is simple and straight forward!
self.assertEqual(self.rest_app._getCollectionPath('user'), fullpath('user'))
# These are dangerous - the user is trying to hack us!
dangerous = ['../user', '/etc/passwd', '/tmp/aBaBaB', '/root/.ssh/id_rsa']
for collection_id in dangerous:
self.assertRaises(HTTPBadRequest, self.rest_app._getCollectionPath, collection_id)
def test_getHandler(self):
def handlerOne():
pass
def handlerTwo():
pass
handlerTwo.hasReturnValue = False
self.rest_app.add_handler('collection', 'handlerOne', handlerOne)
self.rest_app.add_handler('deck', 'handlerTwo', handlerTwo)
(handler, hasReturnValue) = self.rest_app._getHandler('collection', 'handlerOne')
self.assertEqual(handler, handlerOne)
self.assertEqual(hasReturnValue, True)
(handler, hasReturnValue) = self.rest_app._getHandler('deck', 'handlerTwo')
self.assertEqual(handler, handlerTwo)
self.assertEqual(hasReturnValue, False)
# try some bad handler names and types
self.assertRaises(HTTPNotFound, self.rest_app._getHandler, 'collection', 'nonExistantHandler')
self.assertRaises(HTTPNotFound, self.rest_app._getHandler, 'nonExistantType', 'handlerOne')
def test_parseRequestBody(self):
req = MagicMock()
req.body = '{"key":"value"}'
data = self.rest_app._parseRequestBody(req)
self.assertEqual(data, {'key': 'value'})
self.assertEqual(data.keys(), ['key'])
self.assertEqual(type(data.keys()[0]), str)
# test some bad data
req.body = '{aaaaaaa}'
self.assertRaises(HTTPBadRequest, self.rest_app._parseRequestBody, req)
class CollectionHandlerTest(CollectionTestBase):
def setUp(self):
super(CollectionHandlerTest, self).setUp()
self.handler = CollectionHandler()
def execute(self, name, data):
ids = ['collection_name']
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def test_list_decks(self):
data = {}
ret = self.execute('list_decks', data)
# It contains only the 'Default' deck
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0]['name'], 'Default')
def test_select_deck(self):
data = {'deck': 1}
ret = self.execute('select_deck', data)
self.assertEqual(ret, None);
def test_create_dynamic_deck_simple(self):
self.add_default_note(5)
data = {
'name': 'Dyn deck',
'mode': 'random',
'count': 2,
'query': "deck:\"Default\" (tag:'Tag1' or tag:'Tag2') (-tag:'Tag3')",
}
ret = self.execute('create_dynamic_deck', data)
self.assertEqual(ret['name'], 'Dyn deck')
self.assertEqual(ret['dyn'], True)
cards = self.collection.findCards('deck:"Dyn deck"')
self.assertEqual(len(cards), 2)
def test_list_models(self):
data = {}
ret = self.execute('list_models', data)
# get a sorted name list that we can actually check
names = [model['name'] for model in ret]
names.sort()
# These are the default models created by Anki in a new collection
default_models = [
'Basic',
'Basic (and reversed card)',
'Basic (optional reversed card)',
'Cloze'
]
self.assertEqual(names, default_models)
def test_find_model_by_name(self):
data = {'model': 'Basic'}
ret = self.execute('find_model_by_name', data)
self.assertEqual(ret['name'], 'Basic')
def test_find_notes(self):
ret = self.execute('find_notes', {})
self.assertEqual(ret, [])
# add a note programatically
self.add_default_note()
# get the id for the one note on this collection
note_id = self.collection.findNotes('')[0]
ret = self.execute('find_notes', {})
self.assertEqual(ret, [{'id': note_id}])
ret = self.execute('find_notes', {'query': 'tag:Tag1'})
self.assertEqual(ret, [{'id': note_id}])
ret = self.execute('find_notes', {'query': 'tag:TagX'})
self.assertEqual(ret, [])
ret = self.execute('find_notes', {'preload': True})
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0]['id'], note_id)
self.assertEqual(ret[0]['model']['name'], 'Basic')
def test_add_note(self):
# make sure there are no notes (yet)
self.assertEqual(self.collection.findNotes(''), [])
# add a note programatically
note = {
'model': 'Basic',
'fields': {
'Front': 'The front',
'Back': 'The back',
},
'tags': "Tag1 Tag2",
}
self.execute('add_note', note)
notes = self.collection.findNotes('')
self.assertEqual(len(notes), 1)
note_id = notes[0]
note = self.collection.getNote(note_id)
self.assertEqual(note.model()['name'], 'Basic')
self.assertEqual(note['Front'], 'The front')
self.assertEqual(note['Back'], 'The back')
self.assertEqual(note.tags, ['Tag1', 'Tag2'])
def test_list_tags(self):
ret = self.execute('list_tags', {})
self.assertEqual(ret, [])
self.add_default_note()
ret = self.execute('list_tags', {})
ret.sort()
self.assertEqual(ret, ['Tag1', 'Tag2'])
def test_set_language(self):
import anki.lang
self.assertEqual(anki.lang._('Again'), 'Again')
try:
data = {'code': 'pl'}
self.execute('set_language', data)
self.assertEqual(anki.lang._('Again'), u'Znowu')
finally:
# return everything to normal!
anki.lang.setLang('en')
def test_reset_scheduler(self):
self.add_default_note(3)
ret = self.execute('reset_scheduler', {'deck': 'Default'})
self.assertEqual(ret, {
'new_cards': 3,
'learning_cards': 0,
'review_cards': 0,
})
def test_next_card(self):
ret = self.execute('next_card', {})
self.assertEqual(ret, None)
# add a note programatically
self.add_default_note()
# get the id for the one card and note on this collection
note_id = self.collection.findNotes('')[0]
card_id = self.collection.findCards('')[0]
self.collection.sched.reset()
ret = self.execute('next_card', {})
self.assertEqual(ret['id'], card_id)
self.assertEqual(ret['nid'], note_id)
self.assertEqual(ret['css'], '<style>.card {\n font-family: arial;\n font-size: 20px;\n text-align: center;\n color: black;\n background-color: white;\n}\n</style>')
self.assertEqual(ret['question'], 'The front')
self.assertEqual(ret['answer'], 'The front\n\n<hr id=answer>\n\nThe back')
self.assertEqual(ret['answer_buttons'], [
{'ease': 1,
'label': 'Again',
'string_label': 'Again',
'interval': 60,
'string_interval': '<1 minute'},
{'ease': 2,
'label': 'Good',
'string_label': 'Good',
'interval': 600,
'string_interval': '<10 minutes'},
{'ease': 3,
'label': 'Easy',
'string_label': 'Easy',
'interval': 345600,
'string_interval': '4 days'}])
def test_next_card_translation(self):
# add a note programatically
self.add_default_note()
# get the card in Polish so we can test translation too
anki.lang.setLang('pl')
try:
ret = self.execute('next_card', {})
finally:
anki.lang.setLang('en')
self.assertEqual(ret['answer_buttons'], [
{'ease': 1,
'label': 'Again',
'string_label': u'Znowu',
'interval': 60,
'string_interval': '<1 minuta'},
{'ease': 2,
'label': 'Good',
'string_label': u'Dobra',
'interval': 600,
'string_interval': '<10 minut'},
{'ease': 3,
'label': 'Easy',
'string_label': u'Łatwa',
'interval': 345600,
'string_interval': '4 dni'}])
def test_next_card_five_times(self):
self.add_default_note(5)
for idx in range(0, 5):
ret = self.execute('next_card', {})
self.assertTrue(ret is not None)
def test_answer_card(self):
import time
self.add_default_note()
# instantiate a deck handler to get the card
card = self.execute('next_card', {})
self.assertEqual(card['reps'], 0)
self.execute('answer_card', {'id': card['id'], 'ease': 2, 'timerStarted': time.time()})
# reset the scheduler and try to get the next card again - there should be none!
self.collection.sched.reset()
card = self.execute('next_card', {})
self.assertEqual(card['reps'], 1)
def test_suspend_cards(self):
# add a note programatically
self.add_default_note()
# get the id for the one card on this collection
card_id = self.collection.findCards('')[0]
# suspend it
self.execute('suspend_cards', {'ids': [card_id]})
# test that getting the next card will be None
card = self.collection.sched.getCard()
self.assertEqual(card, None)
# unsuspend it
self.execute('unsuspend_cards', {'ids': [card_id]})
# test that now we're getting the next card!
self.collection.sched.reset()
card = self.collection.sched.getCard()
self.assertEqual(card.id, card_id)
def test_cards_recent_ease(self):
self.add_default_note()
card_id = self.collection.findCards('')[0]
# answer the card
self.collection.reset()
card = self.collection.sched.getCard()
card.startTimer()
# answer multiple times to see that we only get the latest!
self.collection.sched.answerCard(card, 1)
self.collection.sched.answerCard(card, 3)
self.collection.sched.answerCard(card, 2)
# pull the latest revision
ret = self.execute('cards_recent_ease', {})
self.assertEqual(ret[0]['id'], card_id)
self.assertEqual(ret[0]['ease'], 2)
class ImportExportHandlerTest(CollectionTestBase):
export_rows = [
['Card front 1', 'Card back 1', 'Tag1 Tag2'],
['Card front 2', 'Card back 2', 'Tag1 Tag3'],
]
def setUp(self):
super(ImportExportHandlerTest, self).setUp()
self.handler = ImportExportHandler()
def execute(self, name, data):
ids = ['collection_name']
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def generate_text_export(self):
# Create a simple export file
export_data = ''
for row in self.export_rows:
export_data += '\t'.join(row) + '\n'
export_path = os.path.join(self.temp_dir, 'export.txt')
with file(export_path, 'wt') as fd:
fd.write(export_data)
return (export_data, export_path)
def check_import(self):
note_ids = self.collection.findNotes('')
notes = [self.collection.getNote(note_id) for note_id in note_ids]
self.assertEqual(len(notes), len(self.export_rows))
for index, test_data in enumerate(self.export_rows):
self.assertEqual(notes[index]['Front'], test_data[0])
self.assertEqual(notes[index]['Back'], test_data[1])
self.assertEqual(' '.join(notes[index].tags), test_data[2])
def test_import_text_data(self):
(export_data, export_path) = self.generate_text_export()
data = {
'filetype': 'text',
'data': export_data,
}
ret = self.execute('import_file', data)
self.check_import()
def test_import_text_url(self):
(export_data, export_path) = self.generate_text_export()
data = {
'filetype': 'text',
'url': 'file://' + os.path.realpath(export_path),
}
ret = self.execute('import_file', data)
self.check_import()
class NoteHandlerTest(CollectionTestBase):
def setUp(self):
super(NoteHandlerTest, self).setUp()
self.handler = NoteHandler()
def execute(self, name, data, note_id):
ids = ['collection_name', note_id]
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def test_index(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
ret = self.execute('index', {}, note_id)
self.assertEqual(ret['id'], note_id)
self.assertEqual(len(ret['fields']), 2)
self.assertEqual(ret['flags'], 0)
self.assertEqual(ret['model']['name'], 'Basic')
self.assertEqual(ret['tags'], ['Tag1', 'Tag2'])
self.assertEqual(ret['string_tags'], 'Tag1 Tag2')
self.assertEqual(ret['usn'], -1)
def test_update(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
data = self.execute('index', {}, note_id)
data['fields']['Front'] = 'The new front'
data['fields']['Back'] = 'The new back'
data['tags'] = ['new1', 'new2']
self.execute('update', data, note_id)
note = self.collection.getNote(note_id)
self.assertEqual(note['Front'], data['fields']['Front'])
self.assertEqual(note['Back'], data['fields']['Back'])
self.assertEqual(note.tags, data['tags'])
def test_delete(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
res = self.collection.findNotes('nid:%s' % note_id)
self.assertNotEqual(res, [])
self.execute('delete', {}, note_id)
res = self.collection.findNotes('nid:%s' % note_id)
self.assertEqual(res, [])
def test_add_tags(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
note = self.collection.getNote(note_id)
old_mod = note.mod
self.assertFalse('NT1' in note.tags)
self.assertFalse('NT2' in note.tags)
time.sleep(1)
self.execute('add_tags', {'tags': ['NT1', 'NT2']}, note_id)
note = self.collection.getNote(note_id)
self.assertTrue('NT1' in note.tags)
self.assertTrue('NT2' in note.tags)
self.assertTrue(note.mod > old_mod)
def test_add_tags_no_mod_update(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
note = self.collection.getNote(note_id)
old_mod = note.mod
self.assertFalse('NT1' in note.tags)
self.assertFalse('NT2' in note.tags)
time.sleep(1)
self.execute('add_tags', {'tags': ['NT1', 'NT2'], 'update_mod': False}, note_id)
note = self.collection.getNote(note_id)
self.assertTrue('NT1' in note.tags)
self.assertTrue('NT2' in note.tags)
self.assertEqual(note.mod, old_mod)
def test_remove_tags(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
note = self.collection.getNote(note_id)
old_mod = note.mod
self.assertTrue('Tag1' in note.tags)
self.assertTrue('Tag2' in note.tags)
time.sleep(1)
self.execute('remove_tags', {'tags': ['Tag1', 'Tag2']}, note_id)
note = self.collection.getNote(note_id)
self.assertFalse('Tag1' in note.tags)
self.assertFalse('Tag2' in note.tags)
self.assertTrue(note.mod > old_mod)
def test_remove_tags_no_mod_update(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
note = self.collection.getNote(note_id)
old_mod = note.mod
self.assertTrue('Tag1' in note.tags)
self.assertTrue('Tag2' in note.tags)
time.sleep(1)
self.execute('remove_tags', {'tags': ['Tag1', 'Tag2'], 'update_mod': False}, note_id)
note = self.collection.getNote(note_id)
self.assertFalse('Tag1' in note.tags)
self.assertFalse('Tag2' in note.tags)
self.assertEqual(note.mod, old_mod)
class DeckHandlerTest(CollectionTestBase):
def setUp(self):
super(DeckHandlerTest, self).setUp()
self.handler = DeckHandler()
def execute(self, name, data):
ids = ['collection_name', '1']
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def test_index(self):
ret = self.execute('index', {})
#pprint(ret)
self.assertEqual(ret['name'], 'Default')
self.assertEqual(ret['id'], 1)
self.assertEqual(ret['dyn'], False)
def test_next_card(self):
self.mock_app.execute_handler.return_value = None
ret = self.execute('next_card', {})
self.assertEqual(ret, None)
self.mock_app.execute_handler.assert_called_with('collection', 'next_card', self.collection, RestHandlerRequest(self.mock_app, {'deck': '1'}, ['collection_name'], {}))
def test_get_conf(self):
ret = self.execute('get_conf', {})
#pprint(ret)
self.assertEqual(ret['name'], 'Default')
self.assertEqual(ret['id'], 1)
self.assertEqual(ret['dyn'], False)
class CardHandlerTest(CollectionTestBase):
def setUp(self):
super(CardHandlerTest, self).setUp()
self.handler = CardHandler()
def execute(self, name, data, card_id):
ids = ['collection_name', card_id]
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def test_index_simple(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
card_id = self.collection.findCards('')[0]
ret = self.execute('index', {}, card_id)
self.assertEqual(ret['id'], card_id)
self.assertEqual(ret['nid'], note_id)
self.assertEqual(ret['did'], 1)
self.assertFalse(ret.has_key('note'))
self.assertFalse(ret.has_key('deck'))
def test_index_load(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
card_id = self.collection.findCards('')[0]
ret = self.execute('index', {'load_note': 1, 'load_deck': 1}, card_id)
self.assertEqual(ret['id'], card_id)
self.assertEqual(ret['nid'], note_id)
self.assertEqual(ret['did'], 1)
self.assertEqual(ret['note']['id'], note_id)
self.assertEqual(ret['note']['model']['name'], 'Basic')
self.assertEqual(ret['deck']['name'], 'Default')
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | 4,325,671,205,340,881,400 | 33.546729 | 175 | 0.579061 | false |
chrislit/abydos | abydos/distance/_damerau_levenshtein.py | 1 | 7982 | # Copyright 2014-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.distance._damerau_levenshtein.
Damerau-Levenshtein distance
"""
from sys import maxsize
from typing import Any, Callable, List, Tuple, cast
from numpy import int_ as np_int
from numpy import zeros as np_zeros
from ._distance import _Distance
__all__ = [
'DamerauLevenshtein',
]
class DamerauLevenshtein(_Distance):
"""Damerau-Levenshtein distance.
This computes the Damerau-Levenshtein distance :cite:`Damerau:1964`.
Damerau-Levenshtein code is based on Java code by Kevin L. Stern
:cite:`Stern:2014`, under the MIT license:
https://github.com/KevinStern/software-and-algorithms/blob/master/src/main/java/blogspot/software_and_algorithms/stern_library/string/DamerauLevenshteinAlgorithm.java
"""
def __init__(
self,
cost: Tuple[float, float, float, float] = (1, 1, 1, 1),
normalizer: Callable[[List[float]], float] = max,
**kwargs: Any
):
"""Initialize Levenshtein instance.
Parameters
----------
cost : tuple
A 4-tuple representing the cost of the four possible edits:
inserts, deletes, substitutions, and transpositions, respectively
(by default: (1, 1, 1, 1))
normalizer : function
A function that takes an list and computes a normalization term
by which the edit distance is divided (max by default). Another
good option is the sum function.
**kwargs
Arbitrary keyword arguments
.. versionadded:: 0.4.0
"""
super(DamerauLevenshtein, self).__init__(**kwargs)
self._cost = cost
self._normalizer = normalizer
def dist_abs(self, src: str, tar: str) -> float:
"""Return the Damerau-Levenshtein distance between two strings.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
Returns
-------
int (may return a float if cost has float values)
The Damerau-Levenshtein distance between src & tar
Raises
------
ValueError
Unsupported cost assignment; the cost of two transpositions must
not be less than the cost of an insert plus a delete.
Examples
--------
>>> cmp = DamerauLevenshtein()
>>> cmp.dist_abs('cat', 'hat')
1
>>> cmp.dist_abs('Niall', 'Neil')
3
>>> cmp.dist_abs('aluminum', 'Catalan')
7
>>> cmp.dist_abs('ATCG', 'TAGC')
2
.. versionadded:: 0.1.0
.. versionchanged:: 0.3.6
Encapsulated in class
"""
ins_cost, del_cost, sub_cost, trans_cost = self._cost
if src == tar:
return 0
if not src:
return len(tar) * ins_cost
if not tar:
return len(src) * del_cost
if 2 * trans_cost < ins_cost + del_cost:
raise ValueError(
'Unsupported cost assignment; the cost of two transpositions '
+ 'must not be less than the cost of an insert plus a delete.'
)
d_mat = np_zeros((len(src), len(tar)), dtype=np_int)
if src[0] != tar[0]:
d_mat[0, 0] = min(sub_cost, ins_cost + del_cost)
src_index_by_character = {src[0]: 0}
for i in range(1, len(src)):
del_distance = d_mat[i - 1, 0] + del_cost
ins_distance = (i + 1) * del_cost + ins_cost
match_distance = i * del_cost + (
0 if src[i] == tar[0] else sub_cost
)
d_mat[i, 0] = min(del_distance, ins_distance, match_distance)
for j in range(1, len(tar)):
del_distance = (j + 1) * ins_cost + del_cost
ins_distance = d_mat[0, j - 1] + ins_cost
match_distance = j * ins_cost + (
0 if src[0] == tar[j] else sub_cost
)
d_mat[0, j] = min(del_distance, ins_distance, match_distance)
for i in range(1, len(src)):
max_src_letter_match_index = 0 if src[i] == tar[0] else -1
for j in range(1, len(tar)):
candidate_swap_index = (
-1
if tar[j] not in src_index_by_character
else src_index_by_character[tar[j]]
)
j_swap = max_src_letter_match_index
del_distance = d_mat[i - 1, j] + del_cost
ins_distance = d_mat[i, j - 1] + ins_cost
match_distance = d_mat[i - 1, j - 1]
if src[i] != tar[j]:
match_distance += sub_cost
else:
max_src_letter_match_index = j
if candidate_swap_index != -1 and j_swap != -1:
i_swap = candidate_swap_index
if i_swap == 0 and j_swap == 0:
pre_swap_cost = 0
else:
pre_swap_cost = d_mat[
max(0, i_swap - 1), max(0, j_swap - 1)
]
swap_distance = (
pre_swap_cost
+ (i - i_swap - 1) * del_cost
+ (j - j_swap - 1) * ins_cost
+ trans_cost
)
else:
swap_distance = maxsize
d_mat[i, j] = min(
del_distance, ins_distance, match_distance, swap_distance
)
src_index_by_character[src[i]] = i
return cast(float, d_mat[len(src) - 1, len(tar) - 1])
def dist(self, src: str, tar: str) -> float:
"""Return the Damerau-Levenshtein similarity of two strings.
Damerau-Levenshtein distance normalized to the interval [0, 1].
The Damerau-Levenshtein distance is normalized by dividing the
Damerau-Levenshtein distance by the greater of
the number of characters in src times the cost of a delete and
the number of characters in tar times the cost of an insert.
For the case in which all operations have :math:`cost = 1`, this is
equivalent to the greater of the length of the two strings src & tar.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
Returns
-------
float
The normalized Damerau-Levenshtein distance
Examples
--------
>>> cmp = DamerauLevenshtein()
>>> round(cmp.dist('cat', 'hat'), 12)
0.333333333333
>>> round(cmp.dist('Niall', 'Neil'), 12)
0.6
>>> cmp.dist('aluminum', 'Catalan')
0.875
>>> cmp.dist('ATCG', 'TAGC')
0.5
.. versionadded:: 0.1.0
.. versionchanged:: 0.3.6
Encapsulated in class
"""
if src == tar:
return 0.0
ins_cost, del_cost = self._cost[:2]
return self.dist_abs(src, tar) / (
self._normalizer([len(src) * del_cost, len(tar) * ins_cost])
)
if __name__ == '__main__':
import doctest
doctest.testmod()
| gpl-3.0 | -4,152,594,237,314,092,000 | 31.713115 | 170 | 0.534578 | false |
yeming233/rally | rally/task/trigger.py | 1 | 2283 | # Copyright 2016: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from rally.common.i18n import _
from rally.common import logging
from rally.common.plugin import plugin
from rally.common import validation
configure = plugin.configure
LOG = logging.getLogger(__name__)
@validation.add_default("jsonschema")
@plugin.base()
@six.add_metaclass(abc.ABCMeta)
class Trigger(plugin.Plugin, validation.ValidatablePluginMixin):
"""Factory for trigger classes."""
CONFIG_SCHEMA = {"type": "null"}
def __init__(self, context, task, hook_cls):
self.context = context
self.config = self.context["trigger"]["args"]
self.task = task
self.hook_cls = hook_cls
self._runs = []
@abc.abstractmethod
def get_listening_event(self):
"""Returns event type to listen."""
def on_event(self, event_type, value=None):
"""Launch hook on specified event."""
LOG.info(_("Hook %s is triggered for Task %s by %s=%s")
% (self.hook_cls.__name__, self.task["uuid"],
event_type, value))
hook = self.hook_cls(self.task, self.context.get("args", {}),
{"event_type": event_type, "value": value})
hook.run_async()
self._runs.append(hook)
def get_results(self):
results = {"config": self.context,
"results": [],
"summary": {}}
for hook in self._runs:
hook_result = hook.result()
results["results"].append(hook_result)
results["summary"].setdefault(hook_result["status"], 0)
results["summary"][hook_result["status"]] += 1
return results
| apache-2.0 | 4,417,696,180,981,200,400 | 32.573529 | 78 | 0.624617 | false |
Ghini/ghini.desktop | bauble/error.py | 1 | 2187 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2005,2006,2007,2008,2009 Brett Adams <[email protected]>
# Copyright (c) 2012-2015 Mario Frasca <[email protected]>
#
# This file is part of ghini.desktop.
#
# ghini.desktop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ghini.desktop is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ghini.desktop. If not, see <http://www.gnu.org/licenses/>.
#
# all bauble exceptions and errors
#
class BaubleError(Exception):
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
if self.msg is None:
return str(type(self).__name__)
else:
return '%s: %s' % (type(self).__name__, self.msg)
return self.msg
class CommitException(Exception):
def __init__(self, exc, row):
self.row = row # the model we were trying to commit
self.exc = exc # the exception thrown while committing
def __str__(self):
return str(self.exc)
class NoResultException(BaubleError):
## use this exception if the caller should return None
pass
class DatabaseError(BaubleError):
pass
class EmptyDatabaseError(DatabaseError):
pass
class MetaTableError(DatabaseError):
pass
class TimestampError(DatabaseError):
pass
class RegistryError(DatabaseError):
pass
class VersionError(DatabaseError):
def __init__(self, version):
super().__init__()
self.version = version
class SQLAlchemyVersionError(BaubleError):
pass
class CheckConditionError(BaubleError):
pass
def check(condition, msg=None):
"""
Check that condition is true. If not then raise
CheckConditionError(msg)
"""
if not condition:
raise CheckConditionError(msg)
| gpl-2.0 | -2,004,390,259,977,985,500 | 22.516129 | 78 | 0.683128 | false |
point97/hapifis | server/apps/survey/migrations/0058_auto__add_field_question_skip_condition.py | 1 | 10477 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Question.skip_condition'
db.add_column(u'survey_question', 'skip_condition',
self.gf('django.db.models.fields.CharField')(max_length=254, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Question.skip_condition'
db.delete_column(u'survey_question', 'skip_condition')
models = {
u'survey.location': {
'Meta': {'object_name': 'Location'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '7'}),
'lng': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '7'}),
'respondant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Respondant']", 'null': 'True', 'blank': 'True'}),
'response': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Response']"})
},
u'survey.locationanswer': {
'Meta': {'object_name': 'LocationAnswer'},
'answer': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Location']"})
},
u'survey.multianswer': {
'Meta': {'object_name': 'MultiAnswer'},
'answer_label': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'answer_text': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'response': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Response']"})
},
u'survey.option': {
'Meta': {'object_name': 'Option'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.SlugField', [], {'max_length': '64'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rows': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'integer'", 'max_length': '20'})
},
u'survey.page': {
'Meta': {'ordering': "['survey', 'question__order']", 'object_name': 'Page'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Question']", 'null': 'True', 'blank': 'True'}),
'survey': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Survey']", 'null': 'True', 'blank': 'True'})
},
u'survey.question': {
'Meta': {'ordering': "['order']", 'object_name': 'Question'},
'allow_other': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cols': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'filterBy': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'filter_questions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'filter_questions_rel_+'", 'null': 'True', 'to': u"orm['survey.Question']"}),
'foreach_question': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'foreach'", 'null': 'True', 'to': u"orm['survey.Question']"}),
'grid_cols': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'grid_cols'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['survey.Option']"}),
'hoist_answers': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'hoisted'", 'null': 'True', 'to': u"orm['survey.Question']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.CharField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'integer_max': ('django.db.models.fields.IntegerField', [], {'default': '365', 'null': 'True', 'blank': 'True'}),
'integer_min': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'lat': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'lng': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'min_zoom': ('django.db.models.fields.IntegerField', [], {'default': '10', 'null': 'True', 'blank': 'True'}),
'modalQuestion': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modal_question'", 'null': 'True', 'to': u"orm['survey.Question']"}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['survey.Option']", 'null': 'True', 'blank': 'True'}),
'options_from_previous_answer': ('django.db.models.fields.CharField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'options_json': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'randomize_groups': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'report_type': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '20', 'null': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rows': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'skip_condition': ('django.db.models.fields.CharField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '64'}),
'term_condition': ('django.db.models.fields.CharField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'}),
'visualize': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'zoom': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'survey.respondant': {
'Meta': {'object_name': 'Respondant'},
'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'county': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'default': 'None', 'max_length': '254', 'null': 'True', 'blank': 'True'}),
'last_question': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'locations': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'responses': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'responses'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['survey.Response']"}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'survey': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Survey']"}),
'ts': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 2, 0, 0)'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'01496547-962e-4773-a38d-bd6dacdc25ca'", 'max_length': '36', 'primary_key': 'True'})
},
u'survey.response': {
'Meta': {'object_name': 'Response'},
'answer': ('django.db.models.fields.TextField', [], {}),
'answer_raw': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Question']"}),
'respondant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Respondant']", 'null': 'True', 'blank': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 2, 0, 0)'})
},
u'survey.survey': {
'Meta': {'object_name': 'Survey'},
'anon': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'offline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'questions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['survey.Question']", 'null': 'True', 'through': u"orm['survey.Page']", 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '254'}),
'states': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['survey'] | gpl-3.0 | -4,969,207,650,817,675,000 | 80.224806 | 207 | 0.549203 | false |
MadeiraCloud/salt | sources/salt/modules/pkgutil.py | 1 | 9536 | # -*- coding: utf-8 -*-
'''
Pkgutil support for Solaris
'''
# Import python libs
import copy
# Import salt libs
import salt.utils
from salt.exceptions import CommandExecutionError, MinionError
def __virtual__():
'''
Set the virtual pkg module if the os is Solaris
'''
if 'os' in __grains__ and __grains__['os'] == 'Solaris':
return 'pkgutil'
return False
def refresh_db():
'''
Updates the pkgutil repo database (pkgutil -U)
CLI Example:
.. code-block:: bash
salt '*' pkgutil.refresh_db
'''
return __salt__['cmd.retcode']('/opt/csw/bin/pkgutil -U > /dev/null 2>&1') == 0
def upgrade_available(name):
'''
Check if there is an upgrade available for a certain package
CLI Example:
.. code-block:: bash
salt '*' pkgutil.upgrade_available CSWpython
'''
version_num = None
cmd = '/opt/csw/bin/pkgutil -c --parse --single {0} 2>/dev/null'.format(
name)
out = __salt__['cmd.run_stdout'](cmd)
if out:
version_num = out.split()[2].strip()
if version_num:
if version_num == "SAME":
return ''
else:
return version_num
return ''
def list_upgrades(refresh=True):
'''
List all available package upgrades on this system
CLI Example:
.. code-block:: bash
salt '*' pkgutil.list_upgrades
'''
if salt.utils.is_true(refresh):
refresh_db()
upgrades = {}
lines = __salt__['cmd.run_stdout'](
'/opt/csw/bin/pkgutil -A --parse').splitlines()
for line in lines:
comps = line.split('\t')
if comps[2] == "SAME":
continue
if comps[2] == "not installed":
continue
upgrades[comps[0]] = comps[1]
return upgrades
def upgrade(refresh=True, **kwargs):
'''
Upgrade all of the packages to the latest available version.
Returns a dict containing the changes::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkgutil.upgrade
'''
if salt.utils.is_true(refresh):
refresh_db()
old = list_pkgs()
# Install or upgrade the package
# If package is already installed
cmd = '/opt/csw/bin/pkgutil -yu'
__salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def list_pkgs(versions_as_list=False, **kwargs):
'''
List the packages currently installed as a dict::
{'<package_name>': '<version>'}
CLI Example:
.. code-block:: bash
salt '*' pkg.list_pkgs
salt '*' pkg.list_pkgs versions_as_list=True
'''
versions_as_list = salt.utils.is_true(versions_as_list)
# 'removed' not yet implemented or not applicable
if salt.utils.is_true(kwargs.get('removed')):
return {}
if 'pkg.list_pkgs' in __context__:
if versions_as_list:
return __context__['pkg.list_pkgs']
else:
ret = copy.deepcopy(__context__['pkg.list_pkgs'])
__salt__['pkg_resource.stringify'](ret)
return ret
ret = {}
cmd = '/usr/bin/pkginfo -x'
# Package information returned two lines per package. On even-offset
# lines, the package name is in the first column. On odd-offset lines, the
# package version is in the second column.
lines = __salt__['cmd.run'](cmd).splitlines()
for index, line in enumerate(lines):
if index % 2 == 0:
name = line.split()[0].strip()
if index % 2 == 1:
version_num = line.split()[1].strip()
__salt__['pkg_resource.add_pkg'](ret, name, version_num)
__salt__['pkg_resource.sort_pkglist'](ret)
__context__['pkg.list_pkgs'] = copy.deepcopy(ret)
if not versions_as_list:
__salt__['pkg_resource.stringify'](ret)
return ret
def version(*names, **kwargs):
'''
Returns a version if the package is installed, else returns an empty string
CLI Example:
.. code-block:: bash
salt '*' pkgutil.version CSWpython
'''
return __salt__['pkg_resource.version'](*names, **kwargs)
def latest_version(*names, **kwargs):
'''
Return the latest version of the named package available for upgrade or
installation. If more than one package name is specified, a dict of
name/version pairs is returned.
If the latest version of a given package is already installed, an empty
string will be returned for that package.
CLI Example:
.. code-block:: bash
salt '*' pkgutil.latest_version CSWpython
salt '*' pkgutil.latest_version <package1> <package2> <package3> ...
'''
refresh = salt.utils.is_true(kwargs.pop('refresh', True))
if not names:
return ''
ret = {}
# Initialize the dict with empty strings
for name in names:
ret[name] = ''
# Refresh before looking for the latest version available
if refresh:
refresh_db()
pkgs = list_pkgs()
cmd = '/opt/csw/bin/pkgutil -a --parse {0}'.format(' '.join(names))
output = __salt__['cmd.run_all'](cmd).get('stdout', '').splitlines()
for line in output:
try:
name, version_rev = line.split()[1:3]
except ValueError:
continue
if name in names:
cver = pkgs.get(name, '')
nver = version_rev.split(',')[0]
if not cver or salt.utils.compare_versions(ver1=cver,
oper='<',
ver2=nver):
# Remove revision for version comparison
ret[name] = version_rev
# Return a string if only one package name passed
if len(names) == 1:
return ret[names[0]]
return ret
# available_version is being deprecated
available_version = latest_version
def install(name=None, refresh=False, version=None, pkgs=None, **kwargs):
'''
Install packages using the pkgutil tool.
CLI Example:
.. code-block:: bash
salt '*' pkg.install <package_name>
salt '*' pkg.install SMClgcc346
Multiple Package Installation Options:
pkgs
A list of packages to install from OpenCSW. Must be passed as a python
list.
CLI Example:
.. code-block:: bash
salt '*' pkg.install pkgs='["foo", "bar"]'
salt '*' pkg.install pkgs='["foo", {"bar": "1.2.3"}]'
Returns a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
'''
if refresh:
refresh_db()
try:
# Ignore 'sources' argument
pkg_params = __salt__['pkg_resource.parse_targets'](name,
pkgs,
**kwargs)[0]
except MinionError as exc:
raise CommandExecutionError(exc)
if pkg_params is None or len(pkg_params) == 0:
return {}
if pkgs is None and version and len(pkg_params) == 1:
pkg_params = {name: version}
targets = []
for param, pkgver in pkg_params.iteritems():
if pkgver is None:
targets.append(param)
else:
targets.append('{0}-{1}'.format(param, pkgver))
cmd = '/opt/csw/bin/pkgutil -yu {0}'.format(' '.join(targets))
old = list_pkgs()
__salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def remove(name=None, pkgs=None, **kwargs):
'''
Remove a package and all its dependencies which are not in use by other
packages.
name
The name of the package to be deleted.
Multiple Package Options:
pkgs
A list of packages to delete. Must be passed as a python list. The
``name`` parameter will be ignored if this option is passed.
.. versionadded:: 0.16.0
Returns a dict containing the changes.
CLI Example:
.. code-block:: bash
salt '*' pkg.remove <package name>
salt '*' pkg.remove <package1>,<package2>,<package3>
salt '*' pkg.remove pkgs='["foo", "bar"]'
'''
try:
pkg_params = __salt__['pkg_resource.parse_targets'](name, pkgs)[0]
except MinionError as exc:
raise CommandExecutionError(exc)
old = list_pkgs()
targets = [x for x in pkg_params if x in old]
if not targets:
return {}
cmd = '/opt/csw/bin/pkgutil -yr {0}'.format(' '.join(targets))
__salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def purge(name=None, pkgs=None, **kwargs):
'''
Package purges are not supported, this function is identical to
``remove()``.
name
The name of the package to be deleted.
Multiple Package Options:
pkgs
A list of packages to delete. Must be passed as a python list. The
``name`` parameter will be ignored if this option is passed.
.. versionadded:: 0.16.0
Returns a dict containing the changes.
CLI Example:
.. code-block:: bash
salt '*' pkg.purge <package name>
salt '*' pkg.purge <package1>,<package2>,<package3>
salt '*' pkg.purge pkgs='["foo", "bar"]'
'''
return remove(name=name, pkgs=pkgs)
| apache-2.0 | 7,205,687,913,548,600,000 | 25.197802 | 83 | 0.570889 | false |
tensorflow/federated | tensorflow_federated/python/common_libs/golden_test.py | 1 | 2657 | # Copyright 2020, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for `golden` library."""
from absl.testing import absltest
from absl.testing import flagsaver
from tensorflow_federated.python.common_libs import golden
class GoldenTest(absltest.TestCase):
def test_check_string_succeeds(self):
golden.check_string('test_check_string_succeeds.expected',
'foo\nbar\nbaz\nfizzbuzz')
def test_check_string_fails(self):
with self.assertRaises(golden.MismatchedGoldenError):
golden.check_string('test_check_string_fails.expected',
'not\nwhat\nyou\nexpected')
def test_check_string_updates(self):
filename = 'test_check_string_updates.expected'
golden_path = golden._filename_to_golden_path(filename)
old_contents = 'old\ndata\n'
new_contents = 'new\ndata\n'
# Attempt to reset the contents of the file to their checked-in state.
try:
with open(golden_path, 'w') as f:
f.write(old_contents)
except (OSError, PermissionError):
# We're running without `--test_strategy=local`, and so can't test
# updates properly because these files are read-only.
return
# Check for a mismatch when `--update_goldens` isn't passed.
with self.assertRaises(golden.MismatchedGoldenError):
golden.check_string(filename, new_contents)
# Rerun with `--update_goldens`.
with flagsaver.flagsaver(update_goldens=True):
golden.check_string(filename, new_contents)
# Check again without `--update_goldens` now that they have been updated.
try:
golden.check_string(filename, new_contents)
except golden.MismatchedGoldenError as e:
self.fail(f'Unexpected mismatch after update: {e}')
# Reset the contents of the file to their checked-in state.
with open(golden_path, 'w') as f:
f.write(old_contents)
def test_check_raises_traceback(self):
with golden.check_raises_traceback('test_check_raises_traceback.expected',
RuntimeError):
raise RuntimeError()
if __name__ == '__main__':
absltest.main()
| apache-2.0 | -5,081,523,066,329,102,000 | 38.073529 | 78 | 0.697779 | false |
cgpotts/pypragmods | embeddedscalars/fragment.py | 1 | 7029 | #!/usr/bin/env python
"""
The logical grammar (base lexicon) used throughout the paper. The
code in grammar.py messes with the namespace that it establishes, in
order to implement lexical uncertainty in an intuitive way.
"""
__author__ = "Christopher Potts"
__version__ = "2.0"
__license__ = "GNU general public license, version 3"
__maintainer__ = "Christopher Potts"
__email__ = "See the author's website"
from itertools import product
import sys
from pypragmods.embeddedscalars.settings import a, b, c, s1, s2
from pypragmods.utils import powerset
######################################################################
def define_lexicon(player=[], shot=[], worlds=[]):
D_et = powerset(player+shot)
relational_hit = [[w, x, y] for w, x, y in product(worlds, player, shot) if y in shot[: w[player.index(x)]]]
lex = {
# Concessions to tractability -- these are defined extensionally (invariant across worlds):
"some": [[X, Y] for X, Y in product(D_et, repeat=2) if len(set(X) & set(Y)) > 0],
"exactly_one": [[X, Y] for X, Y in product(D_et, repeat=2) if len(set(X) & set(Y)) == 1],
"every": [[X, Y] for X, Y in product(D_et, repeat=2) if set(X) <= set(Y)],
"no": [[X, Y] for X, Y in product(D_et, repeat=2) if len(set(X) & set(Y)) == 0],
"PlayerA": [X for X in powerset(player) if a in X],
"PlayerB": [X for X in powerset(player) if b in X],
"PlayerC": [X for X in powerset(player) if c in X],
# Tempting to intensionalize these, but that means using intensional quantifiers,
# which are intractable on this set-theoretic formulation. Our goal is to understand
# refinement and lexical uncertainty, which we can study using verbs and extensional
# quantifiers, so this limitation seems well worth it.
"player": player,
"shot": shot,
# Intensional predicates:
"scored": [[w, x] for w, x in product(worlds, player) if len(shot[: w[player.index(x)]]) > 0],
"aced": [[w, x] for w, x in product(worlds, player) if len(shot[: w[player.index(x)]]) > 1],
"missed": [[w, x] for w, x in product(worlds, player) if len(shot[: w[player.index(x)]]) == 0],
"hit" : [[w, x, y] for w, x, y in product(worlds, player, shot) if y in shot[: w[player.index(x)]]],
# More concessions to tractability -- we'll refine these rather than the determiners;
# this should have no effect because of the limited class of predicates -- no predicate
# is true of both players and shots, and player and shot have the same extensions in all
# worlds.
"some_player": [Y for Y in powerset(player) if len(set(player) & set(Y)) > 0],
"some_shot": [Y for Y in powerset(shot) if len(set(shot) & set(Y)) > 0],
"exactly_one_player": [Y for Y in powerset(player) if len(set(player) & set(Y)) == 1],
"exactly_one_shot": [Y for Y in D_et if len(set(shot) & set(Y)) == 1],
"every_player": [Y for Y in D_et if set(player) <= set(Y)],
"every_shot": [Y for Y in D_et if set(shot) <= set(Y)],
"no_player": [Y for Y in D_et if len(set(player) & set(Y)) == 0],
"no_shot": [Y for Y in D_et if len(set(shot) & set(Y)) == 0],
# Mainly for specifying refinements:
"not_every_player": [Y for Y in D_et if not(set(player) <= set(Y))],
"not_every_shot": [Y for Y in D_et if not(set(shot) <= set(Y))],
"scored_not_aced": [[w, x] for w, x in product(worlds, player) if len(shot[: w[player.index(x)]]) == 1],
"only_PlayerA": [X for X in powerset(player) if a in X and len(X) == 1],
"only_PlayerB": [X for X in powerset(player) if b in X and len(X) == 1],
"only_PlayerC": [X for X in powerset(player) if c in X and len(X) == 1],
# For disjunctive examples (limited compositionality to keep the examples tractable):
"hit_shot1": [[w, x] for w, x in product(worlds, player) if w[player.index(x)] in (1, 3)],
"hit_shot2": [[w, x] for w, x in product(worlds, player) if w[player.index(x)] in (2, 3)],
"hit_shot1_or_shot2": [[w, x] for w, x in product(worlds, player) if w[player.index(x)] != 0],
"hit_shot1_and_shot2": [[w, x] for w, x in product(worlds, player) if w[player.index(x)] == 3]
}
return lex
def fa(A, b):
"""Muskens-like function application -- in a list [(x,y), ...], we get
back the second projection limited to the pairs where the first is b."""
return [y for x, y in A if x == b]
def iv(Q, X):
"""Returns a proposition as function true of a world w iff the set of
entities X-at-w is a member of the quantifier (set of sets) Q."""
return (lambda w : fa(X, w) in Q)
def tv(V, Q, worlds, subjects):
"""Funcion composition taking the intensional relation on entities V
and combining it with the set of sets Q to return an intensional
property. The dependence on worlds and subjects is unfortunate but
I don't see how to avoid it."""
return [[w,x] for w, x in product(worlds, subjects)
if [y for w_prime, x_prime, y in V if w_prime == w and x_prime == x] in Q]
def coord(f, X, Y):
for x, y, z in f:
if x==X and y==Y:
return z
return []
######################################################################
def get_worlds(basic_states=(0,1,2), length=3, increasing=False):
worlds = list(product(basic_states, repeat=length))
# Remove sequences in which the elements dom't appear in
# increasing order. We don't care about order, so this just one
# way of removing conceptual duplicates.
if increasing:
worlds = [w for w in worlds if tuple(sorted(w)) == w]
return worlds
def worldname(w):
return "".join(["NSA"[i] for i in w])
######################################################################
if __name__ == '__main__':
# Domain set up:
player = [a, b, c]
shot = [s1, s2]
worlds = get_worlds((0,1,2), length=len(player), increasing=True)
lex = define_lexicon(player=player, shot=shot, worlds=worlds)
# Import the lexicon into this namespace:
for word, sem in list(lex.items()):
setattr(sys.modules[__name__], word, sem)
# Examples:
for d1, d2 in product(("some", "exactly_one", "every", "no"), repeat=2):
msg = "%s(player)(hit(%s(shot)))" % (d1, d2)
formula = "iv(fa(%s, player), tv(hit, fa(%s, shot), worlds, player))" % (d1, d2)
print(msg, [worldname(w) for w in worlds if eval(formula)(w)])
# Examples:
for pn, pred in product(('PlayerA', 'PlayerB', 'PlayerC'), ("missed", "scored", "aced")):
msg = "%s(%s)" % (pn, pred)
formula = "iv(%s, %s)" % (pn, pred)
print(msg, [worldname(w) for w in worlds if eval(formula)(w)])
| gpl-3.0 | -2,607,856,894,522,690,000 | 49.568345 | 115 | 0.565372 | false |
luzheqi1987/nova-annotation | nova/tests/unit/api/openstack/compute/contrib/test_admin_actions.py | 1 | 29452 | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.serialization import jsonutils
from oslo.utils import timeutils
import webob
from nova.api.openstack import common
from nova.api.openstack.compute.contrib import admin_actions as \
admin_actions_v2
from nova.api.openstack.compute.plugins.v3 import admin_actions as \
admin_actions_v21
from nova.compute import vm_states
import nova.context
from nova import exception
from nova import objects
from nova.openstack.common import uuidutils
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
class CommonMixin(object):
admin_actions = None
fake_url = None
def _make_request(self, url, body):
req = webob.Request.blank(self.fake_url + url)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.content_type = 'application/json'
return req.get_response(self.app)
def _stub_instance_get(self, uuid=None):
if uuid is None:
uuid = uuidutils.generate_uuid()
instance = fake_instance.fake_db_instance(
id=1, uuid=uuid, vm_state=vm_states.ACTIVE,
task_state=None, launched_at=timeutils.utcnow())
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), instance)
self.compute_api.get(self.context, uuid, expected_attrs=None,
want_objects=True).AndReturn(instance)
return instance
def _stub_instance_get_failure(self, exc_info, uuid=None):
if uuid is None:
uuid = uuidutils.generate_uuid()
self.compute_api.get(self.context, uuid, expected_attrs=None,
want_objects=True).AndRaise(exc_info)
return uuid
def _test_non_existing_instance(self, action, body_map=None):
uuid = uuidutils.generate_uuid()
self._stub_instance_get_failure(
exception.InstanceNotFound(instance_id=uuid), uuid=uuid)
self.mox.ReplayAll()
res = self._make_request('/servers/%s/action' % uuid,
{action: body_map.get(action)})
self.assertEqual(404, res.status_int)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
def _test_action(self, action, body=None, method=None):
if method is None:
method = action
instance = self._stub_instance_get()
getattr(self.compute_api, method)(self.context, instance)
self.mox.ReplayAll()
res = self._make_request('/servers/%s/action' % instance['uuid'],
{action: None})
self.assertEqual(202, res.status_int)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
def _test_invalid_state(self, action, method=None, body_map=None,
compute_api_args_map=None):
if method is None:
method = action
if body_map is None:
body_map = {}
if compute_api_args_map is None:
compute_api_args_map = {}
instance = self._stub_instance_get()
args, kwargs = compute_api_args_map.get(action, ((), {}))
getattr(self.compute_api, method)(self.context, instance,
*args, **kwargs).AndRaise(
exception.InstanceInvalidState(
attr='vm_state', instance_uuid=instance['uuid'],
state='foo', method=method))
self.mox.ReplayAll()
res = self._make_request('/servers/%s/action' % instance['uuid'],
{action: body_map.get(action)})
self.assertEqual(409, res.status_int)
self.assertIn("Cannot \'%(action)s\' instance %(id)s"
% {'id': instance['uuid'], 'action': action}, res.body)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
def _test_locked_instance(self, action, method=None, body_map=None,
compute_api_args_map=None):
if method is None:
method = action
instance = self._stub_instance_get()
args, kwargs = (), {}
act = None
if compute_api_args_map:
args, kwargs = compute_api_args_map.get(action, ((), {}))
act = body_map.get(action)
getattr(self.compute_api, method)(self.context, instance,
*args, **kwargs).AndRaise(
exception.InstanceIsLocked(instance_uuid=instance['uuid']))
self.mox.ReplayAll()
res = self._make_request('/servers/%s/action' % instance['uuid'],
{action: act})
self.assertEqual(409, res.status_int)
self.assertIn('Instance %s is locked' % instance['uuid'], res.body)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
class AdminActionsTestV21(CommonMixin, test.NoDBTestCase):
admin_actions = admin_actions_v21
fake_url = '/v2/fake'
def setUp(self):
super(AdminActionsTestV21, self).setUp()
self.controller = self.admin_actions.AdminActionsController()
self.compute_api = self.controller.compute_api
self.context = nova.context.RequestContext('fake', 'fake')
def _fake_controller(*args, **kwargs):
return self.controller
self.stubs.Set(self.admin_actions, 'AdminActionsController',
_fake_controller)
self.app = self._get_app()
self.mox.StubOutWithMock(self.compute_api, 'get')
def _get_app(self):
return fakes.wsgi_app_v21(init_only=('servers',
'os-admin-actions'),
fake_auth_context=self.context)
def test_actions(self):
actions = ['resetNetwork', 'injectNetworkInfo']
method_translations = {'resetNetwork': 'reset_network',
'injectNetworkInfo': 'inject_network_info'}
for action in actions:
method = method_translations.get(action)
self.mox.StubOutWithMock(self.compute_api, method or action)
self._test_action(action, method=method)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def test_actions_with_non_existed_instance(self):
actions = ['resetNetwork', 'injectNetworkInfo', 'os-resetState']
body_map = {'os-resetState': {'state': 'active'}}
for action in actions:
self._test_non_existing_instance(action,
body_map=body_map)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def test_actions_with_locked_instance(self):
actions = ['resetNetwork', 'injectNetworkInfo']
method_translations = {'resetNetwork': 'reset_network',
'injectNetworkInfo': 'inject_network_info'}
for action in actions:
method = method_translations.get(action)
self.mox.StubOutWithMock(self.compute_api, method or action)
self._test_locked_instance(action, method=method)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
class AdminActionsTestV2(AdminActionsTestV21):
admin_actions = admin_actions_v2
def setUp(self):
super(AdminActionsTestV2, self).setUp()
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Admin_actions'])
def _get_app(self):
return fakes.wsgi_app(init_only=('servers',),
fake_auth_context=self.context)
def test_actions(self):
actions = ['pause', 'unpause', 'suspend', 'resume', 'migrate',
'resetNetwork', 'injectNetworkInfo', 'lock',
'unlock']
method_translations = {'migrate': 'resize',
'resetNetwork': 'reset_network',
'injectNetworkInfo': 'inject_network_info'}
for action in actions:
method = method_translations.get(action)
self.mox.StubOutWithMock(self.compute_api, method or action)
self._test_action(action, method=method)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def test_actions_raise_conflict_on_invalid_state(self):
actions = ['pause', 'unpause', 'suspend', 'resume', 'migrate',
'os-migrateLive']
method_translations = {'migrate': 'resize',
'os-migrateLive': 'live_migrate'}
body_map = {'os-migrateLive':
{'host': 'hostname',
'block_migration': False,
'disk_over_commit': False}}
args_map = {'os-migrateLive': ((False, False, 'hostname'), {})}
for action in actions:
method = method_translations.get(action)
self.mox.StubOutWithMock(self.compute_api, method or action)
self._test_invalid_state(action, method=method, body_map=body_map,
compute_api_args_map=args_map)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def test_actions_with_non_existed_instance(self):
actions = ['pause', 'unpause', 'suspend', 'resume',
'resetNetwork', 'injectNetworkInfo', 'lock',
'unlock', 'os-resetState', 'migrate', 'os-migrateLive']
body_map = {'os-resetState': {'state': 'active'},
'os-migrateLive':
{'host': 'hostname',
'block_migration': False,
'disk_over_commit': False}}
for action in actions:
self._test_non_existing_instance(action,
body_map=body_map)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def test_actions_with_locked_instance(self):
actions = ['pause', 'unpause', 'suspend', 'resume', 'migrate',
'resetNetwork', 'injectNetworkInfo', 'os-migrateLive']
method_translations = {'migrate': 'resize',
'resetNetwork': 'reset_network',
'injectNetworkInfo': 'inject_network_info',
'os-migrateLive': 'live_migrate'}
args_map = {'os-migrateLive': ((False, False, 'hostname'), {})}
body_map = {'os-migrateLive': {'host': 'hostname',
'block_migration': False,
'disk_over_commit': False}}
for action in actions:
method = method_translations.get(action)
self.mox.StubOutWithMock(self.compute_api, method or action)
self._test_locked_instance(action, method=method,
body_map=body_map,
compute_api_args_map=args_map)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def _test_migrate_exception(self, exc_info, expected_result):
self.mox.StubOutWithMock(self.compute_api, 'resize')
instance = self._stub_instance_get()
self.compute_api.resize(self.context, instance).AndRaise(exc_info)
self.mox.ReplayAll()
res = self._make_request('/servers/%s/action' % instance['uuid'],
{'migrate': None})
self.assertEqual(expected_result, res.status_int)
def _test_migrate_live_succeeded(self, param):
self.mox.StubOutWithMock(self.compute_api, 'live_migrate')
instance = self._stub_instance_get()
self.compute_api.live_migrate(self.context, instance, False,
False, 'hostname')
self.mox.ReplayAll()
res = self._make_request('/servers/%s/action' % instance['uuid'],
{'os-migrateLive': param})
self.assertEqual(202, res.status_int)
def test_migrate_live_enabled(self):
param = {'host': 'hostname',
'block_migration': False,
'disk_over_commit': False}
self._test_migrate_live_succeeded(param)
def test_migrate_live_enabled_with_string_param(self):
param = {'host': 'hostname',
'block_migration': "False",
'disk_over_commit': "False"}
self._test_migrate_live_succeeded(param)
def test_migrate_live_missing_dict_param(self):
body = {'os-migrateLive': {'dummy': 'hostname',
'block_migration': False,
'disk_over_commit': False}}
res = self._make_request('/servers/FAKE/action', body)
self.assertEqual(400, res.status_int)
def test_migrate_live_with_invalid_block_migration(self):
body = {'os-migrateLive': {'host': 'hostname',
'block_migration': "foo",
'disk_over_commit': False}}
res = self._make_request('/servers/FAKE/action', body)
self.assertEqual(400, res.status_int)
def test_migrate_live_with_invalid_disk_over_commit(self):
body = {'os-migrateLive': {'host': 'hostname',
'block_migration': False,
'disk_over_commit': "foo"}}
res = self._make_request('/servers/FAKE/action', body)
self.assertEqual(400, res.status_int)
def _test_migrate_live_failed_with_exception(self, fake_exc,
uuid=None):
self.mox.StubOutWithMock(self.compute_api, 'live_migrate')
instance = self._stub_instance_get(uuid=uuid)
self.compute_api.live_migrate(self.context, instance, False,
False, 'hostname').AndRaise(fake_exc)
self.mox.ReplayAll()
res = self._make_request('/servers/%s/action' % instance.uuid,
{'os-migrateLive':
{'host': 'hostname',
'block_migration': False,
'disk_over_commit': False}})
self.assertEqual(400, res.status_int)
self.assertIn(unicode(fake_exc), res.body)
def test_migrate_live_compute_service_unavailable(self):
self._test_migrate_live_failed_with_exception(
exception.ComputeServiceUnavailable(host='host'))
def test_migrate_live_invalid_hypervisor_type(self):
self._test_migrate_live_failed_with_exception(
exception.InvalidHypervisorType())
def test_migrate_live_invalid_cpu_info(self):
self._test_migrate_live_failed_with_exception(
exception.InvalidCPUInfo(reason=""))
def test_migrate_live_unable_to_migrate_to_self(self):
uuid = uuidutils.generate_uuid()
self._test_migrate_live_failed_with_exception(
exception.UnableToMigrateToSelf(instance_id=uuid,
host='host'),
uuid=uuid)
def test_migrate_live_destination_hypervisor_too_old(self):
self._test_migrate_live_failed_with_exception(
exception.DestinationHypervisorTooOld())
def test_migrate_live_no_valid_host(self):
self._test_migrate_live_failed_with_exception(
exception.NoValidHost(reason=''))
def test_migrate_live_invalid_local_storage(self):
self._test_migrate_live_failed_with_exception(
exception.InvalidLocalStorage(path='', reason=''))
def test_migrate_live_invalid_shared_storage(self):
self._test_migrate_live_failed_with_exception(
exception.InvalidSharedStorage(path='', reason=''))
def test_migrate_live_hypervisor_unavailable(self):
self._test_migrate_live_failed_with_exception(
exception.HypervisorUnavailable(host=""))
def test_migrate_live_instance_not_running(self):
self._test_migrate_live_failed_with_exception(
exception.InstanceNotRunning(instance_id=""))
def test_migrate_live_migration_pre_check_error(self):
self._test_migrate_live_failed_with_exception(
exception.MigrationPreCheckError(reason=''))
def test_unlock_not_authorized(self):
self.mox.StubOutWithMock(self.compute_api, 'unlock')
instance = self._stub_instance_get()
self.compute_api.unlock(self.context, instance).AndRaise(
exception.PolicyNotAuthorized(action='unlock'))
self.mox.ReplayAll()
res = self._make_request('/servers/%s/action' % instance['uuid'],
{'unlock': None})
self.assertEqual(403, res.status_int)
class CreateBackupTestsV2(CommonMixin, test.NoDBTestCase):
fake_url = '/v2/fake'
def setUp(self):
super(CreateBackupTestsV2, self).setUp()
self.controller = admin_actions_v2.AdminActionsController()
self.compute_api = self.controller.compute_api
self.context = nova.context.RequestContext('fake', 'fake')
def _fake_controller(*args, **kwargs):
return self.controller
self.stubs.Set(admin_actions_v2, 'AdminActionsController',
_fake_controller)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Admin_actions'])
self.app = fakes.wsgi_app(init_only=('servers',),
fake_auth_context=self.context)
self.mox.StubOutWithMock(self.compute_api, 'get')
self.mox.StubOutWithMock(common,
'check_img_metadata_properties_quota')
self.mox.StubOutWithMock(self.compute_api,
'backup')
def _make_url(self, uuid):
return '/servers/%s/action' % uuid
def test_create_backup_with_metadata(self):
metadata = {'123': 'asdf'}
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 1,
'metadata': metadata,
},
}
image = dict(id='fake-image-id', status='ACTIVE', name='Backup 1',
properties=metadata)
common.check_img_metadata_properties_quota(self.context, metadata)
instance = self._stub_instance_get()
self.compute_api.backup(self.context, instance, 'Backup 1',
'daily', 1,
extra_properties=metadata).AndReturn(image)
self.mox.ReplayAll()
res = self._make_request(self._make_url(instance['uuid']), body=body)
self.assertEqual(202, res.status_int)
self.assertIn('fake-image-id', res.headers['Location'])
def test_create_backup_no_name(self):
# Name is required for backups.
body = {
'createBackup': {
'backup_type': 'daily',
'rotation': 1,
},
}
res = self._make_request(self._make_url('fake'), body=body)
self.assertEqual(400, res.status_int)
def test_create_backup_no_rotation(self):
# Rotation is required for backup requests.
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
},
}
res = self._make_request(self._make_url('fake'), body=body)
self.assertEqual(400, res.status_int)
def test_create_backup_negative_rotation(self):
"""Rotation must be greater than or equal to zero
for backup requests
"""
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': -1,
},
}
res = self._make_request(self._make_url('fake'), body=body)
self.assertEqual(400, res.status_int)
def test_create_backup_no_backup_type(self):
# Backup Type (daily or weekly) is required for backup requests.
body = {
'createBackup': {
'name': 'Backup 1',
'rotation': 1,
},
}
res = self._make_request(self._make_url('fake'), body=body)
self.assertEqual(400, res.status_int)
def test_create_backup_bad_entity(self):
body = {'createBackup': 'go'}
res = self._make_request(self._make_url('fake'), body=body)
self.assertEqual(400, res.status_int)
def test_create_backup_rotation_is_zero(self):
# The happy path for creating backups if rotation is zero.
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 0,
},
}
image = dict(id='fake-image-id', status='ACTIVE', name='Backup 1',
properties={})
common.check_img_metadata_properties_quota(self.context, {})
instance = self._stub_instance_get()
self.compute_api.backup(self.context, instance, 'Backup 1',
'daily', 0,
extra_properties={}).AndReturn(image)
self.mox.ReplayAll()
res = self._make_request(self._make_url(instance['uuid']), body=body)
self.assertEqual(202, res.status_int)
self.assertNotIn('Location', res.headers)
def test_create_backup_rotation_is_positive(self):
# The happy path for creating backups if rotation is positive.
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 1,
},
}
image = dict(id='fake-image-id', status='ACTIVE', name='Backup 1',
properties={})
common.check_img_metadata_properties_quota(self.context, {})
instance = self._stub_instance_get()
self.compute_api.backup(self.context, instance, 'Backup 1',
'daily', 1,
extra_properties={}).AndReturn(image)
self.mox.ReplayAll()
res = self._make_request(self._make_url(instance['uuid']), body=body)
self.assertEqual(202, res.status_int)
self.assertIn('fake-image-id', res.headers['Location'])
def test_create_backup_raises_conflict_on_invalid_state(self):
body_map = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 1,
},
}
args_map = {
'createBackup': (
('Backup 1', 'daily', 1), {'extra_properties': {}}
),
}
common.check_img_metadata_properties_quota(self.context, {})
self._test_invalid_state('createBackup', method='backup',
body_map=body_map,
compute_api_args_map=args_map)
def test_create_backup_with_non_existed_instance(self):
body_map = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 1,
},
}
common.check_img_metadata_properties_quota(self.context, {})
self._test_non_existing_instance('createBackup',
body_map=body_map)
def test_create_backup_with_invalid_createBackup(self):
body = {
'createBackupup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 1,
},
}
res = self._make_request(self._make_url('fake'), body=body)
self.assertEqual(400, res.status_int)
class ResetStateTestsV21(test.NoDBTestCase):
admin_act = admin_actions_v21
bad_request = exception.ValidationError
fake_url = '/servers'
def setUp(self):
super(ResetStateTestsV21, self).setUp()
self.uuid = uuidutils.generate_uuid()
self.admin_api = self.admin_act.AdminActionsController()
self.compute_api = self.admin_api.compute_api
url = '%s/%s/action' % (self.fake_url, self.uuid)
self.request = self._get_request(url)
self.context = self.request.environ['nova.context']
def _get_request(self, url):
return fakes.HTTPRequest.blank(url)
def test_no_state(self):
self.assertRaises(self.bad_request,
self.admin_api._reset_state,
self.request, self.uuid,
body={"os-resetState": None})
def test_bad_state(self):
self.assertRaises(self.bad_request,
self.admin_api._reset_state,
self.request, self.uuid,
body={"os-resetState": {"state": "spam"}})
def test_no_instance(self):
self.mox.StubOutWithMock(self.compute_api, 'get')
exc = exception.InstanceNotFound(instance_id='inst_ud')
self.compute_api.get(self.context, self.uuid, expected_attrs=None,
want_objects=True).AndRaise(exc)
self.mox.ReplayAll()
self.assertRaises(webob.exc.HTTPNotFound,
self.admin_api._reset_state,
self.request, self.uuid,
body={"os-resetState": {"state": "active"}})
def _setup_mock(self, expected):
instance = objects.Instance()
instance.uuid = self.uuid
instance.vm_state = 'fake'
instance.task_state = 'fake'
instance.obj_reset_changes()
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute_api, 'get')
def check_state(admin_state_reset=True):
self.assertEqual(set(expected.keys()),
instance.obj_what_changed())
for k, v in expected.items():
self.assertEqual(v, getattr(instance, k),
"Instance.%s doesn't match" % k)
instance.obj_reset_changes()
self.compute_api.get(self.context, instance.uuid, expected_attrs=None,
want_objects=True).AndReturn(instance)
instance.save(admin_state_reset=True).WithSideEffects(check_state)
def test_reset_active(self):
self._setup_mock(dict(vm_state=vm_states.ACTIVE,
task_state=None))
self.mox.ReplayAll()
body = {"os-resetState": {"state": "active"}}
result = self.admin_api._reset_state(self.request, self.uuid,
body=body)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.admin_api,
admin_actions_v21.AdminActionsController):
status_int = self.admin_api._reset_state.wsgi_code
else:
status_int = result.status_int
self.assertEqual(202, status_int)
def test_reset_error(self):
self._setup_mock(dict(vm_state=vm_states.ERROR,
task_state=None))
self.mox.ReplayAll()
body = {"os-resetState": {"state": "error"}}
result = self.admin_api._reset_state(self.request, self.uuid,
body=body)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.admin_api,
admin_actions_v21.AdminActionsController):
status_int = self.admin_api._reset_state.wsgi_code
else:
status_int = result.status_int
self.assertEqual(202, status_int)
class ResetStateTestsV2(ResetStateTestsV21):
admin_act = admin_actions_v2
bad_request = webob.exc.HTTPBadRequest
fake_url = '/fake/servers'
| apache-2.0 | -1,109,982,764,369,733,400 | 39.125341 | 78 | 0.559996 | false |
ageneau/fishnet | test.py | 1 | 3617 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of the lichess.org fishnet client.
# Copyright (C) 2016-2017 Niklas Fiekas <[email protected]>
# See LICENSE.txt for licensing information.
import fishnet
import argparse
import unittest
import logging
import sys
import multiprocessing
try:
import configparser
except ImportError:
import ConfigParser as configparser
STARTPOS = "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1"
class WorkerTest(unittest.TestCase):
def setUp(self):
conf = configparser.ConfigParser()
conf.add_section("Fishnet")
conf.set("Fishnet", "Key", "testkey")
fishnet.get_stockfish_command(conf, update=True)
self.worker = fishnet.Worker(conf,
threads=multiprocessing.cpu_count(),
memory=32,
progress_reporter=None)
self.worker.start_stockfish()
def tearDown(self):
self.worker.stop()
def test_bestmove(self):
job = {
"work": {
"type": "move",
"id": "abcdefgh",
"level": 8,
},
"game_id": "hgfedcba",
"variant": "standard",
"position": STARTPOS,
"moves": "f2f3 e7e6 g2g4",
}
response = self.worker.bestmove(job)
self.assertEqual(response["move"]["bestmove"], "d8h4")
def test_zh_bestmove(self):
job = {
"work": {
"type": "move",
"id": "hihihihi",
"level": 1,
},
"game_id": "ihihihih",
"variant": "crazyhouse",
"position": "rnbqk1nr/ppp2ppp/3b4/3N4/4p1PP/5P2/PPPPP3/R1BQKBNR/P b KQkq - 9 5",
"moves": "d6g3",
}
response = self.worker.bestmove(job)
self.assertEqual(response["move"]["bestmove"], "P@f2") # only move
def test_3check_bestmove(self):
job = {
"work": {
"type": "move",
"id": "3c3c3c3c",
"level": 8,
},
"game_id": "c3c3c3c3",
"variant": "threecheck",
"position": "r1b1kbnr/pppp1ppp/2n2q2/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 4 4 +2+0",
"moves": "f1c4 d7d6",
}
response = self.worker.bestmove(job)
self.assertEqual(response["move"]["bestmove"], "c4f7")
def test_analysis(self):
job = {
"work": {
"type": "analysis",
"id": "12345678",
},
"game_id": "87654321",
"variant": "standard",
"position": STARTPOS,
"moves": "f2f3 e7e6 g2g4 d8h4",
"skipPositions": [1],
}
response = self.worker.analysis(job)
result = response["analysis"]
self.assertTrue(0 <= result[0]["score"]["cp"] <= 90)
self.assertTrue(result[1]["skipped"])
self.assertEqual(result[3]["score"]["mate"], 1)
self.assertTrue(result[3]["pv"].startswith("d8h4"))
self.assertEqual(result[4]["score"]["mate"], 0)
class UnitTests(unittest.TestCase):
def test_parse_bool(self):
self.assertEqual(fishnet.parse_bool("yes"), True)
self.assertEqual(fishnet.parse_bool("no"), False)
self.assertEqual(fishnet.parse_bool(""), False)
self.assertEqual(fishnet.parse_bool("", default=True), True)
if __name__ == "__main__":
if "-v" in sys.argv or "--verbose" in sys.argv:
fishnet.setup_logging(3)
else:
fishnet.setup_logging(0)
unittest.main()
| gpl-3.0 | 584,502,897,233,279,600 | 26.610687 | 96 | 0.535803 | false |
joaormatos/anaconda | Chowdren/chowdren/shaderheader.py | 1 | 3599 | import sys
sys.path.append('..')
from chowdren.shaders import SHADERS
from mmfparser.gperf import get_hash_function
from chowdren.common import get_method_name, get_base_path
from chowdren.codewriter import CodeWriter
import os
def write_shader_param():
header = CodeWriter(os.path.join(get_base_path(), 'shaderparam.h'))
code = CodeWriter(os.path.join(get_base_path(), 'shaderparam.cpp'))
parameters = []
for shader in SHADERS:
for param in shader.uniforms:
parameters.append(param[0])
if shader.tex_param:
parameters.append(shader.tex_param)
parameters = list(set(parameters))
hash_data = get_hash_function('hash_shader_parameter', parameters,
False)
code.putln(hash_data.code.replace('inline ', ''))
header.start_guard('CHOWDREN_SHADERPARAM_H')
header.putln('unsigned int hash_shader_parameter(const char * str, '
'unsigned int len);')
header.putln('')
for k, v in hash_data.strings.iteritems():
name = 'SHADER_PARAM_%s' % get_method_name(k).upper()
header.putdefine(name, v)
header.close_guard('CHOWDREN_SHADERPARAM_H')
header.close()
code.close()
def write_shaders():
code = CodeWriter(os.path.join(get_base_path(), 'shaders.cpp'))
for shader in SHADERS:
shader_name = '%sShader' % shader.name
code.putlnc('class %s : public BaseShader', shader_name)
code.start_brace()
code.put_access('public')
for uniform in shader.uniforms:
code.putlnc('static int %s;', uniform[0])
if shader.uniforms:
code.putln('')
asset_name = 'SHADER_%s' % shader.asset_name.upper()
args = [asset_name]
options = []
if shader.has_back:
options.append('SHADER_HAS_BACK')
if shader.has_tex_size:
options.append('SHADER_HAS_TEX_SIZE')
if not options:
if shader.tex_param:
args.append('0')
else:
args.append(' | '.join(options))
if shader.tex_param:
args.append('"%s"' % shader.tex_param)
code.putlnc('%s()', shader_name)
code.putlnc(': BaseShader(%s)', ', '.join(args))
code.start_brace()
code.end_brace()
code.putln('')
code.putmeth('void initialize_parameters')
for uniform in shader.uniforms:
code.putlnc('%s = get_uniform(%r);', uniform[0], uniform[0],
cpp=False)
code.end_brace()
code.putln('')
code.putmeth('static void set_parameters', 'FrameObject * instance')
for uniform in shader.uniforms:
param = 'SHADER_PARAM_%s' % uniform[0].upper()
code.putlnc('BaseShader::set_%s(instance, %s, %s);', uniform[1],
param, uniform[0])
if shader.tex_param:
param = 'SHADER_PARAM_%s' % shader.tex_param.upper()
code.putlnc('BaseShader::set_image(instance, %s);', param)
code.end_brace()
code.end_brace(True)
for uniform in shader.uniforms:
code.putlnc('int %s::%s;', shader_name, uniform[0])
code.putln('')
# write static init code
for shader in SHADERS:
shader_type = '%sShader' % shader.name
shader_name = '%s_shader' % shader.name.lower()
code.putlnc('%s %s;', shader_type, shader_name)
code.close()
def main():
write_shader_param()
write_shaders()
if __name__ == '__main__':
main() | gpl-3.0 | -1,006,155,459,375,083,800 | 29.508475 | 76 | 0.572103 | false |
dhocker/athomepowerlineserver | CommandHandler.py | 1 | 6289 | #
# AtHomePowerlineServer - networked server for CM11/CM11A/XTB-232 X10 controllers
# Copyright © 2014, 2021 Dave Hocker
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# See the LICENSE file for more details.
#
import json
import datetime
import logging
import commands.ServerCommand
import commands.ServerCommand
import commands.StatusRequest
import commands.DeviceOn
import commands.DeviceOff
import commands.DeviceDim
import commands.DeviceBright
import commands.DeviceAllUnitsOff
import commands.DeviceAllLightsOff
import commands.DeviceAllLightsOn
import commands.GetTime
import commands.SetTime
import commands.GetSunData
import commands.define_device
import commands.query_devices
import commands.update_device
import commands.define_program
import commands.update_program
import commands.query_device_programs
import commands.query_device_program
import commands.delete_device
import commands.delete_device_program
import commands.all_devices_on
import commands.all_devices_off
import commands.query_available_devices
import commands.discover_devices
import commands.query_available_programs
import commands.query_programs
import commands.assign_program
import commands.delete_program
import commands.query_action_groups
import commands.query_action_group
import commands.query_action_group_devices
import commands.update_action_group
import commands.define_group
import commands.group_on
import commands.group_off
import commands.delete_group
import commands.query_available_group_devices
import commands.assign_device
import commands.delete_group_device
import commands.assign_program_to_group
logger = logging.getLogger("server")
class CommandHandler:
call_sequence = 1
# Error codes
NotImplemented = 404
UnhandledException = 405
COMMAND_HANDLER_LIST = {
"deviceon": commands.DeviceOn.DeviceOn,
"on": commands.DeviceOn.DeviceOn,
"deviceoff": commands.DeviceOff.DeviceOff,
"off": commands.DeviceOff.DeviceOff,
"dim": commands.DeviceDim.DeviceDim,
"bright": commands.DeviceBright.DeviceBright,
"statusrequest": commands.StatusRequest.StatusRequest,
"gettime": commands.GetTime.GetTime,
"settime": commands.SetTime.SetTime,
"getsundata": commands.GetSunData.GetSunData,
"definedevice": commands.define_device.DefineDevice,
"querydevices": commands.query_devices.QueryDevices,
"queryavailabledevices": commands.query_available_devices.QueryAvailableDevices,
"discoverdevices": commands.discover_devices.DiscoverDevices,
"queryavailableprograms": commands.query_available_programs.QueryAvailablePrograms,
"updatedevice": commands.update_device.UpdateDevice,
"deletedevice": commands.delete_device.DeleteDevice,
"queryprograms": commands.query_programs.QueryPrograms,
"defineprogram": commands.define_program.DefineProgram,
"updateprogram": commands.update_program.UpdateProgram,
"deleteprogram": commands.delete_program.DeleteProgram,
"deletedeviceprogram": commands.delete_device_program.DeleteDeviceProgram,
"querydeviceprograms": commands.query_device_programs.QueryDevicePrograms,
"querydeviceprogram": commands.query_device_program.QueryDeviceProgram,
"assignprogram": commands.assign_program.AssignProgram,
"assignprogramtogroup": commands.assign_program_to_group.AssignProgramToGroup,
"defineactiongroup": commands.define_group.DefineGroup,
"deleteactiongroup": commands.delete_group.DeleteGroup,
"queryactiongroups": commands.query_action_groups.QueryActionGroups,
"queryactiongroup": commands.query_action_group.QueryActionGroup,
"updateactiongroup": commands.update_action_group.UpdateActionGroup,
"queryactiongroupdevices": commands.query_action_group_devices.QueryActionGroupDevices,
"queryavailablegroupdevices": commands.query_available_group_devices.QueryAvailableGroupDevices,
"assigndevice": commands.assign_device.AssignDevice,
"deleteactiongroupdevice": commands.delete_group_device.DeleteActionGroupDevice,
"groupon": commands.group_on.GroupOn,
"groupoff": commands.group_off.GroupOff,
"alldeviceson": commands.all_devices_on.AllDevicesOn,
"alldevicesoff": commands.all_devices_off.AllDevicesOff
}
def GetHandler(self, command):
"""
Return an instance of the handler for a given command
:param command: API command as a string
:return: Instance of class that executes the command
"""
logger.info("GetHandler for command: %s", command)
ci_command = command.lower()
if ci_command in self.COMMAND_HANDLER_LIST.keys():
handler = self.COMMAND_HANDLER_LIST[ci_command]()
else:
handler = None
return handler
#######################################################################
# Execute the command specified by the incoming request
def Execute(self, request):
handler = self.GetHandler(request["request"])
if handler is not None:
response = handler.Execute(request)
response['call-sequence'] = CommandHandler.call_sequence
else:
logger.error("No handler for command: %s", request["request"])
response = CommandHandler.CreateErrorResponse(request["request"], CommandHandler.NotImplemented,
"Command is not recognized or implemented", "")
CommandHandler.call_sequence += 1
return response
@classmethod
def CreateErrorResponse(cls, request_command, result_code, error_msg, extra_data):
r = commands.ServerCommand.ServerCommand.CreateResponse(request_command)
r['result-code'] = result_code
r['error'] = error_msg
r['call-sequence'] = cls.call_sequence
r['data'] = extra_data
return r
| gpl-3.0 | -6,893,993,220,547,688,000 | 39.92 | 108 | 0.708651 | false |
Nolski/airmozilla | airmozilla/roku/tests/test_views.py | 1 | 8166 | import datetime
from django.conf import settings
from django.utils import timezone
from django.core.files import File
from funfactory.urlresolvers import reverse
from nose.tools import eq_, ok_
from airmozilla.main.models import (
Event,
Channel,
Template,
Picture,
EventHitStats,
Approval,
)
from airmozilla.base.tests.testbase import DjangoTestCase
class TestRoku(DjangoTestCase):
"""These tests are deliberately very UN-thorough.
That's because this whole app is very much an experiment.
"""
fixtures = ['airmozilla/manage/tests/main_testdata.json']
main_image = 'airmozilla/manage/tests/firefox.png'
def test_categories_feed(self):
url = reverse('roku:categories_feed')
main_channel = Channel.objects.get(slug=settings.DEFAULT_CHANNEL_SLUG)
main_url = reverse('roku:channel_feed', args=(main_channel.slug,))
trending_url = reverse('roku:trending_feed')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(main_url in response.content)
ok_(trending_url in response.content)
def test_categories_feed_live_events(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('roku:categories_feed')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title not in response.content)
now = timezone.now()
event.start_time = now - datetime.timedelta(seconds=3600)
event.archive_time = None
event.save()
assert not event.archive_time
assert event in Event.objects.live()
edgecast_hls = Template.objects.create(
content='something {{ file }}',
name='EdgeCast hls'
)
event.template = edgecast_hls
event.template_environment = {'file': 'abc123'}
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
# but it really has to have that 'file' attribute
event.template_environment = {'something': 'else'}
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title not in response.content)
def test_channel_feed(self):
main_channel = Channel.objects.get(slug=settings.DEFAULT_CHANNEL_SLUG)
main_url = reverse('roku:channel_feed', args=(main_channel.slug,))
response = self.client.get(main_url)
eq_(response.status_code, 200)
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
ok_(event.title not in response.content)
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
# if the *needs* approval, it shouldn't appear
app = Approval.objects.create(event=event)
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title not in response.content)
app.processed = True
app.save()
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title not in response.content)
app.approved = True
app.save()
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
def test_channel_feed_with_no_placeholder(self):
main_channel = Channel.objects.get(slug=settings.DEFAULT_CHANNEL_SLUG)
main_url = reverse('roku:channel_feed', args=(main_channel.slug,))
event = Event.objects.get(title='Test event')
with open(self.main_image) as fp:
picture = Picture.objects.create(file=File(fp))
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.picture = picture
event.placeholder_img = None
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
def test_event_feed(self):
event = Event.objects.get(title='Test event')
start_time = event.start_time
start_time = start_time.replace(year=2014)
start_time = start_time.replace(month=9)
start_time = start_time.replace(day=13)
event.start_time = start_time
event.save()
self._attach_file(event, self.main_image)
url = reverse('roku:event_feed', args=(event.id,))
response = self.client.get(url)
eq_(response.status_code, 200)
event = Event.objects.get(title='Test event')
ok_(event.title not in response.content)
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('%s - Sep 13 2014' % event.title in response.content)
def test_event_feed_escape_description(self):
event = Event.objects.get(title='Test event')
event.description = (
'Check out <a href="http://peterbe.com">peterbe</a> '
"and <script>alert('xss')</script> this."
)
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
self._attach_file(event, self.main_image)
url = reverse('roku:event_feed', args=(event.id,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Check out peterbe and' in response.content)
ok_('alert('xss') this' in response.content)
def test_event_duration(self):
event = Event.objects.get(title='Test event')
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
self._attach_file(event, self.main_image)
url = reverse('roku:event_feed', args=(event.id,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<runtime>3600</runtime>' in response.content)
event.duration = 12
event.save()
self._attach_file(event, self.main_image)
url = reverse('roku:event_feed', args=(event.id,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<runtime>12</runtime>' in response.content)
def test_trending_feed(self):
url = reverse('roku:trending_feed')
response = self.client.get(url)
eq_(response.status_code, 200)
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
ok_(event.title not in response.content)
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
# because it's not trending
ok_(event.title not in response.content)
EventHitStats.objects.create(
event=event,
total_hits=1000,
)
# This save will trigger to disrupt the cache used inside
# get_featured_events() since it'll change the modified time.
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
| bsd-3-clause | 1,651,720,650,497,076,500 | 34.350649 | 78 | 0.605927 | false |
RedhawkSDR/integration-gnuhawk | components/file_sink_c/tests/test_file_sink_c.py | 1 | 4065 | #!/usr/bin/env python
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of GNUHAWK.
#
# GNUHAWK is free software: you can redistribute it and/or modify is under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/.
#
import unittest
import ossie.utils.testing
import os
from omniORB import any
class ComponentTests(ossie.utils.testing.ScaComponentTestCase):
"""Test for all component implementations in file_sink_c"""
def testScaBasicBehavior(self):
#######################################################################
# Launch the component with the default execparams
execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False)
execparams = dict([(x.id, any.from_any(x.value)) for x in execparams])
self.launch(execparams)
#######################################################################
# Verify the basic state of the component
self.assertNotEqual(self.comp, None)
self.assertEqual(self.comp.ref._non_existent(), False)
self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True)
#######################################################################
# Validate that query returns all expected parameters
# Query of '[]' should return the following set of properties
expectedProps = []
expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True))
expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True))
props = self.comp.query([])
props = dict((x.id, any.from_any(x.value)) for x in props)
# Query may return more than expected, but not less
for expectedProp in expectedProps:
self.assertEquals(props.has_key(expectedProp.id), True)
#######################################################################
# Verify that all expected ports are available
for port in self.scd.get_componentfeatures().get_ports().get_uses():
port_obj = self.comp.getPort(str(port.get_usesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True)
for port in self.scd.get_componentfeatures().get_ports().get_provides():
port_obj = self.comp.getPort(str(port.get_providesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a(port.get_repid()), True)
#######################################################################
# Make sure start and stop can be called without throwing exceptions
self.comp.start()
self.comp.stop()
#######################################################################
# Simulate regular component shutdown
self.comp.releaseObject()
# TODO Add additional tests here
#
# See:
# ossie.utils.bulkio.bulkio_helpers,
# ossie.utils.bluefile.bluefile_helpers
# for modules that will assist with testing components with BULKIO ports
if __name__ == "__main__":
ossie.utils.testing.main("../file_sink_c.spd.xml") # By default tests all implementations
| gpl-3.0 | -1,006,469,763,631,082,900 | 46.823529 | 133 | 0.591636 | false |
sergey-dryabzhinsky/dedupsqlfs | dedupsqlfs/db/mysql/table/subvolume.py | 1 | 7299 | # -*- coding: utf8 -*-
__author__ = 'sergey'
import hashlib
from time import time
from dedupsqlfs.db.mysql.table import Table
class TableSubvolume( Table ):
_table_name = "subvolume"
def create( self ):
c = self.getCursor()
# Create table
c.execute(
"CREATE TABLE IF NOT EXISTS `%s` (" % self.getName()+
"`id` INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, "+
"`hash` BINARY(16) NOT NULL, "+
"`name` BLOB NOT NULL, "+
"`stats` TEXT, "+
"`root_diff` TEXT, "+
"`readonly` TINYINT UNSIGNED NOT NULL DEFAULT 0, "+
"`stats_at` INT UNSIGNED, "+
"`root_diff_at` INT UNSIGNED, "+
"`created_at` INT UNSIGNED NOT NULL, "+
"`mounted_at` INT UNSIGNED, "+
"`updated_at` INT UNSIGNED"+
")"+
self._getCreationAppendString()
)
self.createIndexIfNotExists('hash', ('hash',), True)
return
def insert( self, name, created_at, mounted_at=None, updated_at=None, stats_at=None, stats=None, root_diff_at=None, root_diff=None ):
"""
:param name: str - name for subvolume/snapshot
:param created_at: int - creation time
:param mounted_at: int|None - subvolume mounted
:param updated_at: int|None - subvolume updated
:return: int
"""
self.startTimer()
cur = self.getCursor()
digest = hashlib.new('md5', name).digest()
cur.execute(
"INSERT INTO `%s` " % self.getName()+
" (`hash`,`name`,`created_at`, `mounted_at`, `updated_at`, `stats_at`, `stats`, `root_diff_at`, `root_diff`) "+
"VALUES (X%(hash)s, X%(name)s, %(created)s, %(mounted)s, %(updated)s, %(statsed)s, %(stats)s, %(diffed)s, %(root_diff)s)",
{
"hash": digest.hex(),
"name": name.hex(),
"created": int(created_at),
"mounted": mounted_at,
"updated": updated_at,
"statsed": stats_at,
"stats": stats,
"diffed": root_diff_at,
"root_diff": root_diff
}
)
item = cur.lastrowid
self.stopTimer('insert')
return item
def get_count(self):
self.startTimer()
cur = self.getCursor()
cur.execute("SELECT COUNT(1) as `cnt` FROM `%s`" % self.getName())
item = cur.fetchone()
if item:
item = item["cnt"]
else:
item = 0
self.stopTimer('get_count')
return item
def readonly(self, subvol_id, flag=True):
self.startTimer()
if flag:
flag = 1
else:
flag = 0
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `readonly`=%(readonly)s WHERE `id`=%(id)s",
{
"readonly": flag,
"id": subvol_id
}
)
self.stopTimer('readonly')
return cur.rowcount
def mount_time(self, subvol_id, mtime=None):
self.startTimer()
if mtime is None:
mtime = time()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `mounted_at`=%(mounted)s WHERE `id`=%(id)s",
{
"mounted": int(mtime),
"id": subvol_id
}
)
self.stopTimer('mount_time')
return cur.rowcount
def update_time(self, subvol_id, utime=None):
self.startTimer()
if utime is None:
utime = time()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `updated_at`=%(updated)s WHERE `id`=%(id)s",
{
"updated": int(utime),
"id": subvol_id
}
)
self.stopTimer('update_time')
return cur.rowcount
def stats_time(self, subvol_id, stime=None):
self.startTimer()
if stime is None:
stime = time()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `stats_at`=%(stime)s WHERE `id`=%(id)s",
{
"stime": int(stime),
"id": subvol_id
}
)
self.stopTimer('stats_time')
return cur.rowcount
def set_stats(self, subvol_id, stats):
self.startTimer()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `stats`=%(stats)s WHERE `id`=%(id)s",
{
"stats": stats,
"id": subvol_id
}
)
self.stopTimer('set_stats')
return cur.rowcount
def root_diff_time(self, subvol_id, rtime=None):
self.startTimer()
if rtime is None:
rtime = time()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `root_diff_at`=%(rtime)s WHERE `id`=%(id)s",
{
"rtime": int(rtime),
"id": subvol_id
}
)
self.stopTimer('stats_time')
return cur.rowcount
def set_root_diff(self, subvol_id, root_diff):
self.startTimer()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `root_diff`=%(rdiff)s WHERE `id`=%(id)s",
{
"rdiff": root_diff,
"id": subvol_id
}
)
self.stopTimer('set_stats')
return cur.rowcount
def delete(self, subvol_id):
self.startTimer()
cur = self.getCursor()
cur.execute(
"DELETE FROM `%s` " % self.getName()+
" WHERE `id`=%(id)s",
{
"id": subvol_id
}
)
item = cur.rowcount
self.stopTimer('delete')
return item
def get(self, subvol_id):
self.startTimer()
cur = self.getCursor()
cur.execute(
"SELECT * FROM `%s` " % self.getName()+
" WHERE `id`=%(id)s",
{
"id": int(subvol_id)
}
)
item = cur.fetchone()
self.stopTimer('get')
return item
def find(self, name):
self.startTimer()
cur = self.getCursor()
digest = hashlib.new('md5', name).digest()
cur.execute(
"SELECT * FROM `%s` " % self.getName()+
" WHERE `hash`=X%(hash)s",
{
"hash": digest.hex()
}
)
item = cur.fetchone()
self.stopTimer('find')
return item
def get_ids(self, order_by=None, order_dir="ASC"):
self.startTimer()
cur = self.getCursor()
sql = "SELECT id FROM `%s`" % self.getName()
if order_by:
sql += " ORDER BY `%s` %s" % (order_by, order_dir,)
cur.execute(sql)
items = (item["id"] for item in cur.fetchall())
self.stopTimer('get_ids')
return items
pass
| mit | 8,267,020,845,199,836,000 | 28.550607 | 137 | 0.461981 | false |
abutcher/openshift-ansible | roles/lib_openshift/library/oc_obj.py | 1 | 60056 | #!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import fcntl
import json
import time
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/obj -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_obj
short_description: Generic interface to openshift objects
description:
- Manage openshift objects programmatically.
options:
state:
description:
- Currently present is only supported state.
required: true
default: present
choices: ["present", "absent", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: False
aliases: []
name:
description:
- Name of the object that is being queried.
required: false
default: None
aliases: []
namespace:
description:
- The namespace where the object lives.
required: false
default: str
aliases: []
all_namespaces:
description:
- Search in all namespaces for the object.
required: false
default: false
aliases: []
kind:
description:
- The kind attribute of the object. e.g. dc, bc, svc, route. May be a comma-separated list, e.g. "dc,po,svc".
required: True
default: None
aliases: []
files:
description:
- A list of files provided for object
required: false
default: None
aliases: []
delete_after:
description:
- Whether or not to delete the files after processing them.
required: false
default: false
aliases: []
content:
description:
- Content of the object being managed.
required: false
default: None
aliases: []
force:
description:
- Whether or not to force the operation
required: false
default: None
aliases: []
selector:
description:
- Selector that gets added to the query.
required: false
default: None
aliases: []
field_selector:
description:
- Field selector that gets added to the query.
required: false
default: None
aliases: []
author:
- "Kenny Woodson <[email protected]>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
oc_obj:
kind: dc
name: router
namespace: default
register: router_output
'''
# -*- -*- -*- End included fragment: doc/obj -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
class YeditException(Exception): # pragma: no cover
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class Yedit(object): # pragma: no cover
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup_ext=None,
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
if backup_ext is None:
self.backup_ext = ".{}".format(time.strftime("%Y%m%dT%H%M%S"))
else:
self.backup_ext = backup_ext
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for separator '''
return self._separator
@separator.setter
def separator(self, inc_sep):
''' setter method for separator '''
self._separator = inc_sep
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def remove_entry(data, key, index=None, value=None, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
if value is not None:
data.pop(value)
elif index is not None:
raise YeditException("remove_entry for a dictionary does not have an index {}".format(index))
else:
data.clear()
return True
elif key == '' and isinstance(data, list):
ind = None
if value is not None:
try:
ind = data.index(value)
except ValueError:
return False
elif index is not None:
ind = index
else:
del data[:]
if ind is not None:
data.pop(ind)
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
fcntl.flock(yfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
yfd.write(contents)
fcntl.flock(yfd, fcntl.LOCK_UN)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, '{}{}'.format(self.filename, self.backup_ext))
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
if self.content_type == 'yaml':
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
elif self.content_type == 'json':
Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
else:
raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
'Please specify a content_type of yaml or json.')
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path, index=None, value=None):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.safe_load(str(invalue))
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
'''run through a list of edits and process them one-by-one'''
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
content_type=params['content_type'],
backup_ext=params['backup_ext'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key'])
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'], params['index'], params['value'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
updated = False
if content is not None:
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
updated = True
elif edits is not None:
results = Yedit.process_edits(edits, yed)
if results['changed']:
updated = True
if updated:
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-p')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None, field_selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
if field_selector is not None:
cmd.append('--field-selector={}'.format(field_selector))
# Name cannot be used with selector or field_selector.
if selector is None and field_selector is None and name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object): # pragma: no cover
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
versions_dict[tech + '_numeric'] = version[1:].split('+')[0]
# "v3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = version[1:4]
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_obj.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class OCObject(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
# pylint allows 5. we need 6
# pylint: disable=too-many-arguments
def __init__(self,
kind,
namespace,
name=None,
selector=None,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False,
field_selector=None):
''' Constructor for OpenshiftOC '''
super(OCObject, self).__init__(namespace, kubeconfig=kubeconfig, verbose=verbose,
all_namespaces=all_namespaces)
self.kind = kind
self.name = name
self.selector = selector
self.field_selector = field_selector
def get(self):
'''return a kind by name '''
results = self._get(self.kind, name=self.name, selector=self.selector, field_selector=self.field_selector)
if (results['returncode'] != 0 and 'stderr' in results and
'\"{}\" not found'.format(self.name) in results['stderr']):
results['returncode'] = 0
return results
def delete(self):
'''delete the object'''
results = self._delete(self.kind, name=self.name, selector=self.selector)
if (results['returncode'] != 0 and 'stderr' in results and
'\"{}\" not found'.format(self.name) in results['stderr']):
results['returncode'] = 0
return results
def create(self, files=None, content=None):
'''
Create a config
NOTE: This creates the first file OR the first conent.
TODO: Handle all files and content passed in
'''
if files:
return self._create(files[0])
# pylint: disable=no-member
# The purpose of this change is twofold:
# - we need a check to only use the ruamel specific dumper if ruamel is loaded
# - the dumper or the flow style change is needed so openshift is able to parse
# the resulting yaml, at least until gopkg.in/yaml.v2 is updated
if hasattr(yaml, 'RoundTripDumper'):
content['data'] = yaml.dump(content['data'], Dumper=yaml.RoundTripDumper)
else:
content['data'] = yaml.safe_dump(content['data'], default_flow_style=False)
content_file = Utils.create_tmp_files_from_contents(content)[0]
return self._create(content_file['path'])
# pylint: disable=too-many-function-args
def update(self, files=None, content=None, force=False):
'''update a current openshift object
This receives a list of file names or content
and takes the first and calls replace.
TODO: take an entire list
'''
if files:
return self._replace(files[0], force)
if content and 'data' in content:
content = content['data']
return self.update_content(content, force)
def update_content(self, content, force=False):
'''update an object through using the content param'''
return self._replace_content(self.kind, self.name, content, force=force)
def needs_update(self, files=None, content=None, content_type='yaml'):
''' check to see if we need to update '''
objects = self.get()
if objects['returncode'] != 0:
return objects
data = None
if files:
data = Utils.get_resource_file(files[0], content_type)
elif content and 'data' in content:
data = content['data']
else:
data = content
# if equal then no need. So not equal is True
return not Utils.check_def_equal(data, objects['results'][0], skip_keys=None, debug=False)
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params, check_mode=False):
'''perform the ansible idempotent code'''
ocobj = OCObject(params['kind'],
params['namespace'],
params['name'],
params['selector'],
kubeconfig=params['kubeconfig'],
verbose=params['debug'],
all_namespaces=params['all_namespaces'],
field_selector=params['field_selector'])
state = params['state']
api_rval = ocobj.get()
#####
# Get
#####
if state == 'list':
if api_rval['returncode'] != 0:
return {'changed': False, 'failed': True, 'msg': api_rval}
return {'changed': False, 'results': api_rval, 'state': state}
########
# Delete
########
if state == 'absent':
# verify its not in our results
if (params['name'] is not None or params['selector'] is not None) and \
(len(api_rval['results']) == 0 or \
('items' in api_rval['results'][0] and len(api_rval['results'][0]['items']) == 0)):
return {'changed': False, 'state': state}
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a delete'}
api_rval = ocobj.delete()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
# create/update: Must define a name beyond this point
if not params['name']:
return {'failed': True, 'msg': 'Please specify a name when state is present.'}
if state == 'present':
########
# Create
########
if not Utils.exists(api_rval['results'], params['name']):
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a create'}
# Create it here
api_rval = ocobj.create(params['files'], params['content'])
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = ocobj.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# Remove files
if params['files'] and params['delete_after']:
Utils.cleanup(params['files'])
return {'changed': True, 'results': api_rval, 'state': state}
########
# Update
########
# if a file path is passed, use it.
update = ocobj.needs_update(params['files'], params['content'])
if not isinstance(update, bool):
return {'failed': True, 'msg': update}
# No changes
if not update:
if params['files'] and params['delete_after']:
Utils.cleanup(params['files'])
return {'changed': False, 'results': api_rval['results'][0], 'state': state}
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed an update.'}
api_rval = ocobj.update(params['files'],
params['content'],
params['force'])
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = ocobj.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
# -*- -*- -*- End included fragment: class/oc_obj.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_obj.py -*- -*- -*-
# pylint: disable=too-many-branches
def main():
'''
ansible oc module for services
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
namespace=dict(default='default', type='str'),
all_namespaces=dict(defaul=False, type='bool'),
name=dict(default=None, type='str'),
files=dict(default=None, type='list'),
kind=dict(required=True, type='str'),
delete_after=dict(default=False, type='bool'),
content=dict(default=None, type='dict'),
force=dict(default=False, type='bool'),
selector=dict(default=None, type='str'),
field_selector=dict(default=None, type='str'),
),
mutually_exclusive=[["content", "files"], ["selector", "name"], ["field_selector", "name"]],
supports_check_mode=True,
)
rval = OCObject.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_obj.py -*- -*- -*-
| apache-2.0 | 9,208,056,315,244,281,000 | 33.396334 | 118 | 0.53247 | false |
luwei0917/awsemmd_script | script/CalcRg.py | 1 | 2726 | #!/usr/bin/python
# ----------------------------------------------------------------------
# Copyright (2010) Aram Davtyan and Garegin Papoian
# Papoian's Group, University of Maryland at Collage Park
# http://papoian.chem.umd.edu/
# Last Update: 03/04/2011
# ----------------------------------------------------------------------
import sys
from VectorAlgebra import *
atom_type = {'1' : 'C', '2' : 'N', '3' : 'O', '4' : 'C', '5' : 'H', '6' : 'C'}
atom_desc = {'1' : 'C-Alpha', '2' : 'N', '3' : 'O', '4' : 'C-Beta', '5' : 'H-Beta', '6' : 'C-Prime'}
PDB_type = {'1' : 'CA', '2' : 'N', '3' : 'O', '4' : 'CB', '5' : 'HB', '6' : 'C' }
class Atom:
No = 0
ty = ''
x = 0.0
y = 0.0
z = 0.0
desc = ''
def __init__(self, No, ty, No_m, x, y, z, desc=''):
self.No = No
self.ty = ty
self.No_m = No_m
self.x = x
self.y = y
self.z = z
self.desc = desc
def write_(self, f):
f.write(str(self.No))
f.write(' ')
f.write(PDB_type[self.No_m])
f.write(' ')
f.write(str(round(self.x,8)))
f.write(' ')
f.write(str(round(self.y,8)))
f.write(' ')
f.write(str(round(self.z,8)))
f.write(' ')
f.write(self.desc)
f.write('\n')
if len(sys.argv)!=3:
print "\nCalcQValue.py Input_file Output_file\n"
sys.exit()
input_file = sys.argv[1]
output_file = ""
if len(sys.argv)>2: output_file = sys.argv[2]
n_atoms = 0
i_atom = 0
item = ''
step = 0
ca_atoms = []
box = []
A = []
out = open(output_file, 'w')
def computeRg():
if len(ca_atoms)==0:
print "Error. Empty snapshot"
exit()
N = len(ca_atoms)
Rg = 0.0
for ia in range(0, N):
for ja in range(ia+1, N):
rv = vector(ca_atoms[ia], ca_atoms[ja])
rsq = pow(rv[0],2)+pow(rv[1],2)+pow(rv[2],2)
Rg = Rg + rsq
Rg = sqrt(Rg/N/N)
return Rg
lfile = open(input_file)
for l in lfile:
l = l.strip()
if l[:5]=="ITEM:":
item = l[6:]
else:
if item == "TIMESTEP":
if len(ca_atoms)>0:
rg = computeRg()
out.write(str(round(rg,5)))
out.write(' ')
n_atoms = len(ca_atoms)
step = int(l)
ca_atoms = []
box = []
A = []
elif item == "NUMBER OF ATOMS":
n_atoms = int(l)
elif item[:10] == "BOX BOUNDS":
box.append(l)
l = l.split()
A.append([float(l[0]), float(l[1])])
elif item[:5] == "ATOMS":
l = l.split()
i_atom = l[0]
x = float(l[2])
y = float(l[3])
z = float(l[4])
x = (A[0][1] - A[0][0])*x + A[0][0]
y = (A[1][1] - A[1][0])*y + A[1][0]
z = (A[2][1] - A[2][0])*z + A[2][0]
desc = atom_desc[l[1]]
if desc=='C-Alpha':
# atom = Atom(i_atom, atom_type[l[1]], l[1], x, y, z, desc)
atom = [x,y,z]
ca_atoms.append(atom)
lfile.close()
if len(ca_atoms)>0:
rg = computeRg()
out.write(str(round(rg,5)))
out.write(' ')
n_atoms = len(ca_atoms)
out.close()
| mit | -197,910,826,636,064,500 | 20.296875 | 100 | 0.495231 | false |
yuvipanda/jupyterhub-kubernetes-spawner | tests/test_spawner.py | 1 | 18995 | import json
import os
import time
from unittest.mock import Mock
import pytest
from jupyterhub.objects import Hub
from jupyterhub.objects import Server
from jupyterhub.orm import Spawner
from kubernetes.client.models import V1Capabilities
from kubernetes.client.models import V1Container
from kubernetes.client.models import V1PersistentVolumeClaim
from kubernetes.client.models import V1Pod
from kubernetes.client.models import V1SecurityContext
from traitlets.config import Config
from kubespawner import KubeSpawner
class MockUser(Mock):
name = 'fake'
server = Server()
def __init__(self, **kwargs):
super().__init__()
for key, value in kwargs.items():
setattr(self, key, value)
@property
def escaped_name(self):
return self.name
@property
def url(self):
return self.server.url
class MockOrmSpawner(Mock):
name = 'server'
server = None
def test_deprecated_config():
"""Deprecated config is handled correctly"""
with pytest.warns(DeprecationWarning):
c = Config()
# both set, non-deprecated wins
c.KubeSpawner.singleuser_fs_gid = 5
c.KubeSpawner.fs_gid = 10
# only deprecated set, should still work
c.KubeSpawner.hub_connect_ip = '10.0.1.1'
c.KubeSpawner.singleuser_extra_pod_config = extra_pod_config = {"key": "value"}
c.KubeSpawner.image_spec = 'abc:123'
c.KubeSpawner.image_pull_secrets = 'k8s-secret-a'
spawner = KubeSpawner(hub=Hub(), config=c, _mock=True)
assert spawner.hub.connect_ip == '10.0.1.1'
assert spawner.fs_gid == 10
assert spawner.extra_pod_config == extra_pod_config
# deprecated access gets the right values, too
assert spawner.singleuser_fs_gid == spawner.fs_gid
assert spawner.singleuser_extra_pod_config == spawner.extra_pod_config
assert spawner.image == 'abc:123'
assert spawner.image_pull_secrets[0]["name"] == 'k8s-secret-a'
def test_deprecated_runtime_access():
"""Runtime access/modification of deprecated traits works"""
spawner = KubeSpawner(_mock=True)
spawner.singleuser_uid = 10
assert spawner.uid == 10
assert spawner.singleuser_uid == 10
spawner.uid = 20
assert spawner.uid == 20
assert spawner.singleuser_uid == 20
spawner.image_spec = 'abc:latest'
assert spawner.image_spec == 'abc:latest'
assert spawner.image == 'abc:latest'
spawner.image = 'abc:123'
assert spawner.image_spec == 'abc:123'
assert spawner.image == 'abc:123'
spawner.image_pull_secrets = 'k8s-secret-a'
assert spawner.image_pull_secrets[0]["name"] == 'k8s-secret-a'
def test_spawner_values():
"""Spawner values are set correctly"""
spawner = KubeSpawner(_mock=True)
def set_id(spawner):
return 1
assert spawner.uid == None
spawner.uid = 10
assert spawner.uid == 10
spawner.uid = set_id
assert spawner.uid == set_id
spawner.uid = None
assert spawner.uid == None
assert spawner.gid == None
spawner.gid = 20
assert spawner.gid == 20
spawner.gid = set_id
assert spawner.gid == set_id
spawner.gid = None
assert spawner.gid == None
assert spawner.fs_gid == None
spawner.fs_gid = 30
assert spawner.fs_gid == 30
spawner.fs_gid = set_id
assert spawner.fs_gid == set_id
spawner.fs_gid = None
assert spawner.fs_gid == None
def check_up(url, ssl_ca=None, ssl_client_cert=None, ssl_client_key=None):
"""Check that a url responds with a non-error code
For use in exec_python_in_pod,
which means imports need to be in the function
Uses stdlib only because requests isn't always available in the target pod
"""
from urllib import request
import ssl
if ssl_ca:
context = ssl.create_default_context(
purpose=ssl.Purpose.SERVER_AUTH, cafile=ssl_ca
)
if ssl_client_cert:
context.load_cert_chain(certfile=ssl_client_cert, keyfile=ssl_client_key)
else:
context = None
# disable redirects (this would be easier if we ran exec in an image with requests)
class NoRedirect(request.HTTPRedirectHandler):
def redirect_request(self, req, fp, code, msg, headers, newurl):
return None
opener = request.build_opener(NoRedirect, request.HTTPSHandler(context=context))
try:
u = opener.open(url)
except request.HTTPError as e:
if e.status >= 400:
raise
u = e
print(u.status)
@pytest.mark.asyncio
async def test_spawn_start(
kube_ns,
kube_client,
config,
hub,
exec_python,
):
spawner = KubeSpawner(
hub=hub,
user=MockUser(name="start"),
config=config,
api_token="abc123",
oauth_client_id="unused",
)
# empty spawner isn't running
status = await spawner.poll()
assert isinstance(status, int)
pod_name = spawner.pod_name
# start the spawner
url = await spawner.start()
# verify the pod exists
pods = kube_client.list_namespaced_pod(kube_ns).items
pod_names = [p.metadata.name for p in pods]
assert pod_name in pod_names
# pod should be running when start returns
pod = kube_client.read_namespaced_pod(namespace=kube_ns, name=pod_name)
assert pod.status.phase == "Running"
# verify poll while running
status = await spawner.poll()
assert status is None
# make sure spawn url is correct
r = exec_python(check_up, {"url": url}, _retries=3)
assert r == "302"
# stop the pod
await spawner.stop()
# verify pod is gone
pods = kube_client.list_namespaced_pod(kube_ns).items
pod_names = [p.metadata.name for p in pods]
assert pod_name not in pod_names
# verify exit status
status = await spawner.poll()
assert isinstance(status, int)
@pytest.mark.asyncio
async def test_spawn_internal_ssl(
kube_ns,
kube_client,
ssl_app,
hub_pod_ssl,
hub_ssl,
config,
exec_python_pod,
):
hub_pod_name = hub_pod_ssl.metadata.name
spawner = KubeSpawner(
config=config,
hub=hub_ssl,
user=MockUser(name="ssl"),
api_token="abc123",
oauth_client_id="unused",
internal_ssl=True,
internal_trust_bundles=ssl_app.internal_trust_bundles,
internal_certs_location=ssl_app.internal_certs_location,
)
# initialize ssl config
hub_paths = await spawner.create_certs()
spawner.cert_paths = await spawner.move_certs(hub_paths)
# start the spawner
url = await spawner.start()
pod_name = "jupyter-%s" % spawner.user.name
# verify the pod exists
pods = kube_client.list_namespaced_pod(kube_ns).items
pod_names = [p.metadata.name for p in pods]
assert pod_name in pod_names
# verify poll while running
status = await spawner.poll()
assert status is None
# verify service and secret exist
secret_name = spawner.secret_name
secrets = kube_client.list_namespaced_secret(kube_ns).items
secret_names = [s.metadata.name for s in secrets]
assert secret_name in secret_names
service_name = pod_name
services = kube_client.list_namespaced_service(kube_ns).items
service_names = [s.metadata.name for s in services]
assert service_name in service_names
# resolve internal-ssl paths in hub-ssl pod
# these are in /etc/jupyterhub/internal-ssl
hub_ssl_dir = "/etc/jupyterhub"
hub_ssl_ca = os.path.join(hub_ssl_dir, ssl_app.internal_trust_bundles["hub-ca"])
# use certipy to resolve these?
hub_internal = os.path.join(hub_ssl_dir, "internal-ssl", "hub-internal")
hub_internal_cert = os.path.join(hub_internal, "hub-internal.crt")
hub_internal_key = os.path.join(hub_internal, "hub-internal.key")
r = exec_python_pod(
hub_pod_name,
check_up,
{
"url": url,
"ssl_ca": hub_ssl_ca,
"ssl_client_cert": hub_internal_cert,
"ssl_client_key": hub_internal_key,
},
_retries=3,
)
assert r == "302"
# stop the pod
await spawner.stop()
# verify pod is gone
pods = kube_client.list_namespaced_pod(kube_ns).items
pod_names = [p.metadata.name for p in pods]
assert "jupyter-%s" % spawner.user.name not in pod_names
# verify service and secret are gone
# it may take a little while for them to get cleaned up
for i in range(5):
secrets = kube_client.list_namespaced_secret(kube_ns).items
secret_names = {s.metadata.name for s in secrets}
services = kube_client.list_namespaced_service(kube_ns).items
service_names = {s.metadata.name for s in services}
if secret_name in secret_names or service_name in service_names:
time.sleep(1)
else:
break
assert secret_name not in secret_names
assert service_name not in service_names
@pytest.mark.asyncio
async def test_spawn_progress(kube_ns, kube_client, config, hub_pod, hub):
spawner = KubeSpawner(
hub=hub,
user=MockUser(name="progress"),
config=config,
)
# empty spawner isn't running
status = await spawner.poll()
assert isinstance(status, int)
# start the spawner
start_future = spawner.start()
# check progress events
messages = []
async for progress in spawner.progress():
assert 'progress' in progress
assert isinstance(progress['progress'], int)
assert 'message' in progress
assert isinstance(progress['message'], str)
messages.append(progress['message'])
# ensure we can serialize whatever we return
with open(os.devnull, "w") as devnull:
json.dump(progress, devnull)
assert 'Started container' in '\n'.join(messages)
await start_future
# stop the pod
await spawner.stop()
@pytest.mark.asyncio
async def test_get_pod_manifest_tolerates_mixed_input():
"""
Test that the get_pod_manifest function can handle a either a dictionary or
an object both representing V1Container objects and that the function
returns a V1Pod object containing V1Container objects.
"""
c = Config()
dict_model = {
'name': 'mock_name_1',
'image': 'mock_image_1',
'command': ['mock_command_1'],
}
object_model = V1Container(
name="mock_name_2",
image="mock_image_2",
command=['mock_command_2'],
security_context=V1SecurityContext(
privileged=True,
run_as_user=0,
capabilities=V1Capabilities(add=['NET_ADMIN']),
),
)
c.KubeSpawner.init_containers = [dict_model, object_model]
spawner = KubeSpawner(config=c, _mock=True)
# this test ensures the following line doesn't raise an error
manifest = await spawner.get_pod_manifest()
# and tests the return value's types
assert isinstance(manifest, V1Pod)
assert isinstance(manifest.spec.init_containers[0], V1Container)
assert isinstance(manifest.spec.init_containers[1], V1Container)
_test_profiles = [
{
'display_name': 'Training Env - Python',
'slug': 'training-python',
'default': True,
'kubespawner_override': {
'image': 'training/python:label',
'cpu_limit': 1,
'mem_limit': 512 * 1024 * 1024,
},
},
{
'display_name': 'Training Env - Datascience',
'slug': 'training-datascience',
'kubespawner_override': {
'image': 'training/datascience:label',
'cpu_limit': 4,
'mem_limit': 8 * 1024 * 1024 * 1024,
},
},
]
@pytest.mark.asyncio
async def test_user_options_set_from_form():
spawner = KubeSpawner(_mock=True)
spawner.profile_list = _test_profiles
# render the form
await spawner.get_options_form()
spawner.user_options = spawner.options_from_form(
{'profile': [_test_profiles[1]['slug']]}
)
assert spawner.user_options == {
'profile': _test_profiles[1]['slug'],
}
# nothing should be loaded yet
assert spawner.cpu_limit is None
await spawner.load_user_options()
for key, value in _test_profiles[1]['kubespawner_override'].items():
assert getattr(spawner, key) == value
@pytest.mark.asyncio
async def test_user_options_api():
spawner = KubeSpawner(_mock=True)
spawner.profile_list = _test_profiles
# set user_options directly (e.g. via api)
spawner.user_options = {'profile': _test_profiles[1]['slug']}
# nothing should be loaded yet
assert spawner.cpu_limit is None
await spawner.load_user_options()
for key, value in _test_profiles[1]['kubespawner_override'].items():
assert getattr(spawner, key) == value
@pytest.mark.asyncio
async def test_default_profile():
spawner = KubeSpawner(_mock=True)
spawner.profile_list = _test_profiles
spawner.user_options = {}
# nothing should be loaded yet
assert spawner.cpu_limit is None
await spawner.load_user_options()
for key, value in _test_profiles[0]['kubespawner_override'].items():
assert getattr(spawner, key) == value
def test_pod_name_no_named_servers():
c = Config()
c.JupyterHub.allow_named_servers = False
user = Config()
user.name = "user"
orm_spawner = Spawner()
spawner = KubeSpawner(config=c, user=user, orm_spawner=orm_spawner, _mock=True)
assert spawner.pod_name == "jupyter-user"
def test_pod_name_named_servers():
c = Config()
c.JupyterHub.allow_named_servers = True
user = Config()
user.name = "user"
orm_spawner = Spawner()
orm_spawner.name = "server"
spawner = KubeSpawner(config=c, user=user, orm_spawner=orm_spawner, _mock=True)
assert spawner.pod_name == "jupyter-user--server"
def test_pod_name_escaping():
c = Config()
c.JupyterHub.allow_named_servers = True
user = Config()
user.name = "some_user"
orm_spawner = Spawner()
orm_spawner.name = "test-server!"
spawner = KubeSpawner(config=c, user=user, orm_spawner=orm_spawner, _mock=True)
assert spawner.pod_name == "jupyter-some-5fuser--test-2dserver-21"
def test_pod_name_custom_template():
user = MockUser()
user.name = "some_user"
pod_name_template = "prefix-{username}-suffix"
spawner = KubeSpawner(user=user, pod_name_template=pod_name_template, _mock=True)
assert spawner.pod_name == "prefix-some-5fuser-suffix"
def test_pod_name_collision():
user1 = MockUser()
user1.name = "user-has-dash"
orm_spawner1 = Spawner()
orm_spawner1.name = ""
user2 = MockUser()
user2.name = "user-has"
orm_spawner2 = Spawner()
orm_spawner2.name = "2ddash"
spawner = KubeSpawner(user=user1, orm_spawner=orm_spawner1, _mock=True)
assert spawner.pod_name == "jupyter-user-2dhas-2ddash"
assert spawner.pvc_name == "claim-user-2dhas-2ddash"
named_spawner = KubeSpawner(user=user2, orm_spawner=orm_spawner2, _mock=True)
assert named_spawner.pod_name == "jupyter-user-2dhas--2ddash"
assert spawner.pod_name != named_spawner.pod_name
assert named_spawner.pvc_name == "claim-user-2dhas--2ddash"
assert spawner.pvc_name != named_spawner.pvc_name
def test_spawner_can_use_list_of_image_pull_secrets():
secrets = ["ecr", "regcred", "artifactory"]
c = Config()
c.KubeSpawner.image_spec = "private.docker.registry/jupyter:1.2.3"
c.KubeSpawner.image_pull_secrets = secrets
spawner = KubeSpawner(hub=Hub(), config=c, _mock=True)
assert spawner.image_pull_secrets == secrets
secrets = [dict(name=secret) for secret in secrets]
c = Config()
c.KubeSpawner.image_spec = "private.docker.registry/jupyter:1.2.3"
c.KubeSpawner.image_pull_secrets = secrets
spawner = KubeSpawner(hub=Hub(), config=c, _mock=True)
assert spawner.image_pull_secrets == secrets
@pytest.mark.asyncio
async def test_pod_connect_ip(kube_ns, kube_client, config, hub_pod, hub):
config.KubeSpawner.pod_connect_ip = (
"jupyter-{username}--{servername}.foo.example.com"
)
user = MockUser(name="connectip")
# w/o servername
spawner = KubeSpawner(hub=hub, user=user, config=config)
# start the spawner
res = await spawner.start()
# verify the pod IP and port
assert res == "http://jupyter-connectip.foo.example.com:8888"
await spawner.stop()
# w/ servername
spawner = KubeSpawner(
hub=hub,
user=user,
config=config,
orm_spawner=MockOrmSpawner(),
)
# start the spawner
res = await spawner.start()
# verify the pod IP and port
assert res == "http://jupyter-connectip--server.foo.example.com:8888"
await spawner.stop()
def test_get_pvc_manifest():
c = Config()
c.KubeSpawner.pvc_name_template = "user-{username}"
c.KubeSpawner.storage_extra_labels = {"user": "{username}"}
c.KubeSpawner.storage_selector = {"matchLabels": {"user": "{username}"}}
spawner = KubeSpawner(config=c, _mock=True)
manifest = spawner.get_pvc_manifest()
assert isinstance(manifest, V1PersistentVolumeClaim)
assert manifest.metadata.name == "user-mock-5fname"
assert manifest.metadata.labels == {
"user": "mock-5fname",
"hub.jupyter.org/username": "mock-5fname",
"app": "jupyterhub",
"component": "singleuser-storage",
"heritage": "jupyterhub",
}
assert manifest.spec.selector == {"matchLabels": {"user": "mock-5fname"}}
@pytest.mark.asyncio
async def test_url_changed(kube_ns, kube_client, config, hub_pod, hub):
user = MockUser(name="url")
config.KubeSpawner.pod_connect_ip = (
"jupyter-{username}--{servername}.foo.example.com"
)
spawner = KubeSpawner(hub=hub, user=user, config=config)
spawner.db = Mock()
# start the spawner
res = await spawner.start()
pod_host = "http://jupyter-url.foo.example.com:8888"
assert res == pod_host
# Mock an incorrect value in the db
# Can occur e.g. by interrupting a launch with a hub restart
# or possibly weird network things in kubernetes
spawner.server = Server.from_url(res + "/users/url/")
spawner.server.ip = "1.2.3.4"
spawner.server.port = 0
assert spawner.server.host == "http://1.2.3.4:0"
assert spawner.server.base_url == "/users/url/"
# poll checks the url, and should restore the correct value
await spawner.poll()
# verify change noticed and persisted to db
assert spawner.server.host == pod_host
assert spawner.db.commit.call_count == 1
# base_url should be left alone
assert spawner.server.base_url == "/users/url/"
previous_commit_count = spawner.db.commit.call_count
# run it again, to make sure we aren't incorrectly detecting and committing
# changes on every poll
await spawner.poll()
assert spawner.db.commit.call_count == previous_commit_count
await spawner.stop()
| bsd-3-clause | -1,392,545,702,530,550,500 | 29.198728 | 87 | 0.650066 | false |
jasondunsmore/heat | heat/tests/openstack/cinder/test_volume_type_encryption.py | 1 | 4129 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from heat.engine import stack
from heat.engine import template
from heat.tests import common
from heat.tests import utils
cinder_volume_type_encryption = {
'heat_template_version': '2015-04-30',
'resources': {
'my_encrypted_vol_type': {
'type': 'OS::Cinder::EncryptedVolumeType',
'properties': {
'provider': 'nova.volume.encryptors.luks.LuksEncryptor',
'control_location': 'front-end',
'cipher': 'aes-xts-plain64',
'key_size': '512',
'volume_type': '01bd581d-33fe-4d6d-bd7b-70ae076d39fb'
}
}
}
}
class CinderEncryptedVolumeTypeTest(common.HeatTestCase):
def setUp(self):
super(CinderEncryptedVolumeTypeTest, self).setUp()
self.ctx = utils.dummy_context()
self.stack = stack.Stack(
self.ctx, 'cinder_vol_type_encryption_test_stack',
template.Template(cinder_volume_type_encryption)
)
self.my_encrypted_vol_type = self.stack['my_encrypted_vol_type']
cinder = mock.MagicMock()
self.cinderclient = mock.MagicMock()
self.my_encrypted_vol_type.client = cinder
cinder.return_value = self.cinderclient
self.volume_encryption_types = (
self.cinderclient.volume_encryption_types)
def test_handle_create(self):
value = mock.MagicMock()
volume_type_id = '01bd581d-33fe-4d6d-bd7b-70ae076d39fb'
value.volume_type_id = volume_type_id
self.volume_encryption_types.create.return_value = value
with mock.patch.object(self.my_encrypted_vol_type.client_plugin(),
'get_volume_type') as mock_get_volume_type:
mock_get_volume_type.return_value = volume_type_id
self.my_encrypted_vol_type.handle_create()
mock_get_volume_type.assert_called_once_with(volume_type_id)
specs = {
'control_location': 'front-end',
'cipher': 'aes-xts-plain64',
'key_size': 512,
'provider': 'nova.volume.encryptors.luks.LuksEncryptor'
}
self.volume_encryption_types.create.assert_called_once_with(
volume_type=volume_type_id, specs=specs)
self.assertEqual(volume_type_id,
self.my_encrypted_vol_type.resource_id)
def test_handle_update(self):
update_args = {
'control_location': 'back-end',
'key_size': 256,
'cipher': 'aes-cbc-essiv',
'provider':
'nova.volume.encryptors.cryptsetup.CryptsetupEncryptor'
}
volume_type_id = '01bd581d-33fe-4d6d-bd7b-70ae076d39fb'
self.my_encrypted_vol_type.resource_id = volume_type_id
self.my_encrypted_vol_type.handle_update(json_snippet=None,
tmpl_diff=None,
prop_diff=update_args)
self.volume_encryption_types.update.assert_called_once_with(
volume_type=volume_type_id, specs=update_args)
def test_volume_type_show_resource(self):
volume_type_id = '01bd581d-33fe-4d6d-bd7b-70ae076d39fb'
self.my_encrypted_vol_type.resource_id = volume_type_id
volume_type = mock.Mock()
volume_type._info = {'vtype': 'info'}
self.volume_encryption_types.get.return_value = volume_type
self.assertEqual({'vtype': 'info'},
self.my_encrypted_vol_type.FnGetAtt('show'))
| apache-2.0 | -3,264,372,015,145,327,000 | 38.701923 | 78 | 0.610559 | false |
Aloomaio/googleads-python-lib | examples/adwords/v201802/extensions/add_site_links.py | 1 | 5277 | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Adds sitelinks to a campaign using the CampaignExtensionSettingService.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
from datetime import datetime
from googleads import adwords
from googleads import errors
from pytz import timezone
CAMPAIGN_ID = 'INSERT_CAMPAIGN_ID_HERE'
def main(client, campaign_id):
# Initialize appropriate services.
campaign_extension_setting_service = client.GetService(
'CampaignExtensionSettingService', version='v201802')
customer_service = client.GetService('CustomerService', version='v201802')
# Find the matching customer and its time zone. The getCustomers method will
# return a single Customer object corresponding to the configured
# clientCustomerId.
customer = customer_service.getCustomers()[0]
customer_tz = timezone(customer['dateTimeZone'])
time_fmt = '%s %s' % ('%Y%m%d %H%M%S', customer_tz)
print ('Found customer ID %d with time zone "%s".'
% (customer['customerId'], customer['dateTimeZone']))
# Create the sitelinks
sitelink1 = {
'xsi_type': 'SitelinkFeedItem',
'sitelinkText': 'Store Hours',
'sitelinkFinalUrls': {'urls': ['http://www.example.com/storehours']}
}
# Show the Thanksgiving specials link only from 20 - 27 Nov.
sitelink2 = {
'xsi_type': 'SitelinkFeedItem',
'sitelinkText': 'Thanksgiving Specials',
'sitelinkFinalUrls': {'urls': ['http://www.example.com/thanksgiving']},
# The time zone of the start and end date/times must match the time zone
# of the customer.
'startTime': datetime(datetime.now().year, 11, 20, 0, 0, 0, 0,
customer_tz).strftime(time_fmt),
'endTime': datetime(datetime.now().year, 11, 27, 23, 59, 59, 59,
customer_tz).strftime(time_fmt),
# Target this sitelink for United States only. For valid geolocation
# codes, see:
# https://developers.google.com/adwords/api/docs/appendix/geotargeting
'geoTargeting': {'id': 2840},
# Restrict targeting only to people physically within the United States.
# Otherwise, this could also show to people interested in the United
# States, but not physically located there.
'geoTargetingRestriction': {
'geoRestriction': 'LOCATION_OF_PRESENCE'
}
}
# Show the wifi details primarily for high end mobile users.
sitelink3 = {
'xsi_type': 'SitelinkFeedItem',
'sitelinkText': 'Wifi Available',
'sitelinkFinalUrls': {'urls': ['http://www.example.com/mobile/wifi']},
# See https://developers.google.com/adwords/api/docs/appendix/platforms
# for device criteria IDs.
'devicePreference': {'devicePreference': '30001'},
# Target this sitelink only when the ad is triggered by the keyword
# "free wifi."
'keywordTargeting': {
'text': 'free wifi',
'matchType': 'BROAD'
}
}
# Show the happy hours link only during Mon - Fri 6PM to 9PM.
sitelink4 = {
'xsi_type': 'SitelinkFeedItem',
'sitelinkText': 'Happy hours',
'sitelinkFinalUrls': {'urls': ['http://www.example.com/happyhours']},
'scheduling': {
'feedItemSchedules': [
{
'dayOfWeek': day,
'startHour': '18',
'startMinute': 'ZERO',
'endHour': '21',
'endMinute': 'ZERO'
} for day in ['MONDAY', 'TUESDAY', 'WEDNESDAY', 'THURSDAY',
'FRIDAY']
]
}
}
# Create your Campaign Extension Settings. This associates the sitelinks
# to your campaign.
campaign_extension_setting = {
'campaignId': campaign_id,
'extensionType': 'SITELINK',
'extensionSetting': {
'extensions': [sitelink1, sitelink2, sitelink3, sitelink4]
}
}
operation = {
'operator': 'ADD',
'operand': campaign_extension_setting
}
# Add the extensions.
response = campaign_extension_setting_service.mutate([operation])
if 'value' in response:
print ('Extension setting with type "%s" was added to campaignId "%d".' %
(response['value'][0]['extensionType'],
response['value'][0]['campaignId']))
else:
raise errors.GoogleAdsError('No extension settings were added.')
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, CAMPAIGN_ID)
| apache-2.0 | 1,125,957,785,789,636,000 | 35.143836 | 78 | 0.653781 | false |
rodrigoancavil/repy | parseurl.py | 1 | 1217 | #!/bin/python
# exmaple 1: parse a url with re module
# this take a string with a url and split the protocol, ip or nameserver and port
# if port don't exist and the protocol is http the port is 80.
import re
url = raw_input('url : ')
# check if the url has the form protocol://hostname:port
# protocol is character set [a-zA-Z]{3,} ftp, http, mongodb, ssh, etc.
# hostname is character set [a-zA-Z0-9\\.\-] www.server.com, 127.0.0.1, etc.
# port is numeric [0-9] 80, 8080, 21, 22, 25, 27001, etc. If you omit the port, assume 80 if protocol is http.
# ToDO:
# - If protocol is ftp and you omit the port, set port number 21
# - If protocol is ssh and you omit the port, set port number 22
# Etc...
parser = re.compile('[a-zA-Z]{2,}://[a-zA-Z0-9\\.\-]+(?::[0-9]{2,}|:?)$')
if bool(parser.search(url)):
print 'The url is valid << %s >>'%url
protocol = re.search('\w+(?=://)',url).group()
hostname = re.search('(?<=://)[a-zA-Z0-9\\.\-]+(?=:)*',url).group()
port = re.search('(?<=:)[0-9]{2,}',url)
print protocol
print hostname
if port != None:
print port.group()
else:
if protocol == 'http': print '80'
else:
print 'The url is invalid << %s >>'%url
| gpl-2.0 | -8,859,035,382,673,495,000 | 33.771429 | 110 | 0.601479 | false |
Queens-Applied-Sustainability/RTMSuite | example/real_example/go.py | 1 | 1868 | from collections import Counter
from numpy import nan
from rtm import SMARTS
import rtms
import pdb
CONFIG_FILE = open('config.yaml')
DATA_FILE = open('time-series.csv')
print "Importing configuration...",
site_info, csv_map, run_config = rtms.importer.config(CONFIG_FILE)
print "done."
print "Importing data...",
timeseries_in = rtms.importer.data(DATA_FILE, csv_map)
print "imported {} rows.".format(len(timeseries_in))
print "Selecting clear days...",
selector = rtms.Selector(site_info['latitude'], site_info['longitude'])
time_irrad_clear = selector.select(timeseries_in[['time', 'irradiance']])
print "selected {} ({:.1%}) clear points.".format(
*((lambda c: [c, float(c)/len(timeseries_in)])
(Counter(time_irrad_clear['clear'])[True])))
print "Preparing clear days for aerosol optical depth optimization...",
# make a list of dictionaries of settings and targets
names = time_irrad_clear.dtype.names
config_dicts = [dict(zip(names, record)) for record in time_irrad_clear]
optimizeable = []
for config_dict, clear in zip(config_dicts, time_irrad_clear['clear']):
if clear:
config = dict(config_dict)
config.pop('clear')
target = config.pop('irradiance')
optimizeable.append({
'settings': config,
'target': target,
})
print "done."
print "Submitting to optimizer for SMARTS..."
aods = rtms.optimize(optimizeable, site_info, SMARTS, 'angstroms_coefficient')
print "optimzed {} points ({:.1%}) of {} selected clear points.".format(
*((lambda l:((lambda s,t:[s,float(s)/t,t])(l - Counter(aods)[nan],l))
)(len(aods))))
print "Interpolating AOD between successful optimizations...",
optimes = [o['settings']['time'] for o in optimizeable]
time_aod = [[t, aod] for t, aod in zip(optimes, aods)]
interp_aods = rtms.interpolate(time_aod)
print "done."
pdb.set_trace() | gpl-3.0 | -8,564,920,868,122,504,000 | 31.789474 | 78 | 0.680407 | false |
Asurada2015/TFAPI_translation | array_ops/tf_dynamic_partition.py | 1 | 1609 | """tf.dynamic_partition(data, partitions, num_partitions, name = None)动态分区
解释:根据从partitions中取得的索引,将data分割成num_partitions份。
我们先从partitions.ndim 中取出一个元祖js,那么切片data[js, ...]将成为输出数据outputs[partitions[js]]的一部分。
我们将js按照字典序排列,即js里面的值为(0, 0, ..., 1, 1, ..., 2, 2, ..., ..., num_partitions - 1, num_partitions - 1, ...)。
我们将partitions[js] = i的值放入outputs[i]。outputs[i]中的第一维对应于partitions.values == i的位置。
"""
"""# Scalar partitions
partitions = 1
num_partitions = 2
data = [10, 20]
outputs[0] = [] # Empty with shape [0, 2]
outputs[1] = [[10, 20]]
# Vector partitions 向量分区
partitions = [0, 0, 1, 1, 0] 取得索引
num_partitions = 2 将数据分割成两份
data = [10, 20, 30, 40, 50]
outputs[0] = [10, 20, 50]
outputs[1] = [30, 40]
"""
import tensorflow as tf
sess = tf.Session()
num_partitions = 2
partitions = [0, 0, 1, 1, 0]
data = [10, 20, 30, 40, 50]
output = tf.dynamic_partition(data, partitions=partitions, num_partitions=num_partitions)
print(sess.run(output))
# [array([10, 20, 50]), array([30, 40])]
sess.close()
"""输入参数:
● data: 一个Tensor。
● partitions: 一个Tensor,数据类型必须是int32。任意数据维度,但其中的值必须是在范围[0, num_partitions)。
● num_partitions: 一个int,其值必须不小于1。输出的切片个数。
● name:(可选)为这个操作取一个名字。
输出参数:
● 一个数组Tensor,数据类型和data相同。"""
| apache-2.0 | 7,023,399,171,701,598,000 | 30 | 105 | 0.677419 | false |
ninewires/discover | notes/python/discover/recon/person.py | 1 | 1495 | #!/usr/bin/env python
import os
# variables
colorBlue = "\033[01;34m{0}\033[00m"
##############################################################################################################
runlocally()
os.system("clear")
banner()
print colorBlue.format("RECON")
print
firstName = raw_input("First name: ")
if firstName == "":
error()
lastName = raw_input("Last name: ")
if lastName == "":
error()
webbrowser.open("http://www.411.com/name/"+firstName+"-"+lastName+"/")
time.sleep(2)
uripath="http://www.advancedbackgroundchecks.com/search/results.aspx?type=&fn=${"+firstName+"}&mi=&ln=${"+lastName+"}&age=&city=&state="
webbrowser.open(uripath)
time.sleep(2)
webbrowser.open("https://www.linkedin.com/pub/dir/"+firstName+"/"+lastName)
time.sleep(2)
webbrowser.open("http://www.peekyou.com/"+firstName+"%5f"+lastName)
time.sleep(2)
webbrowser.open("http://phonenumbers.addresses.com/people/"+firstName+"+"+lastName)
time.sleep(2)
webbrowser.open("https://pipl.com/search/?q="+firstName+"+"+lastName)
time.sleep(2)
webbrowser.open("http://www.spokeo.com/"+firstName+"-"+lastName)
time.sleep(2)
webbrowser.open("https://twitter.com/search?q=%22"+firstName+"%20"+lastName+"%22")
time.sleep(2)
webbrowser.open("https://www.youtube.com/results?search_query="+firstName+"+"+lastName)
time.sleep(2)
webbrowser.open("http://www.zabasearch.com/query1_zaba.php?sname="+firstName+"%20"+lastName+"&state=ALL&ref=$ref&se=$se&doby=&city=&name_style=1&tm=&tmr=")
print
print
sys.exit(0)
| mit | -8,426,199,026,342,802,000 | 28.9 | 155 | 0.652843 | false |
izolight/django-seoultransport | busgokr/models.py | 1 | 2624 | from django.db import models
class RouteType(models.Model):
id = models.IntegerField(primary_key=True)
name = models.CharField(max_length=20)
color = models.CharField(max_length=10)
def __str__(self):
return self.name
class Location(models.Model):
name = models.CharField(max_length=30)
def __str__(self):
return self.name
class BusStation(models.Model):
id = models.IntegerField(primary_key=True)
arsid = models.IntegerField(unique=True, null=True)
name = models.ForeignKey(Location, null=True)
latitude = models.DecimalField(max_digits=18, decimal_places=15, null=True)
longitude = models.DecimalField(max_digits=18, decimal_places=15, null=True)
def __str__(self):
if self.name:
return str(self.name)
return str(self.id)
class Corporation(models.Model):
name = models.CharField(max_length=30)
def __str__(self):
return self.name
class BusRoute(models.Model):
id = models.IntegerField(primary_key=True)
name = models.CharField(max_length=20)
length = models.DecimalField(max_digits=4, decimal_places=1)
route_type = models.ForeignKey(RouteType)
first_time = models.DateTimeField()
last_time = models.DateTimeField()
first_station = models.ForeignKey('BusStation', related_name='first_station', null=True)
last_station = models.ForeignKey('BusStation', related_name='last_station', null=True)
interval = models.IntegerField()
first_low_time = models.DateTimeField(null=True)
last_low_time = models.DateTimeField(null=True)
corporation = models.ForeignKey('Corporation')
def __str__(self):
return self.name
class SearchedLive(models.Model):
busroute = models.ForeignKey(BusRoute)
def __str__(self):
return str(self.busroute)
class Section(models.Model):
id = models.IntegerField(primary_key=True)
distance = models.DecimalField(max_digits=8, decimal_places=3)
speed = models.IntegerField()
def __str__(self):
return str(self.id)
class Sequence(models.Model):
number = models.IntegerField()
section = models.ForeignKey('Section', null=True)
turnstation = models.ForeignKey('BusStation', related_name='turnstation')
station = models.ForeignKey('BusStation')
is_turnstation = models.BooleanField(default=False)
route = models.ForeignKey('BusRoute')
direction = models.ForeignKey(Location, null=True)
first_time = models.TimeField(null=True)
last_time = models.TimeField(null=True)
def __str__(self):
return str(self.route) + '-' + str(self.number) | bsd-2-clause | 1,073,920,046,047,536,800 | 29.523256 | 92 | 0.6875 | false |
serein7/openag_brain | src/openag_brain/software_modules/video_writer.py | 1 | 6194 | #!/usr/bin/env python
"""
Stitches together the images from a recipe run and stores them as a video on
the recipe_start data point
"""
import os
import time
import rospy
import tempfile
import subprocess
from openag.cli.config import config as cli_config
from openag.couch import Server
from openag.db_names import ENVIRONMENTAL_DATA_POINT
from openag.var_types import RECIPE_START, RECIPE_END, AERIAL_IMAGE
class VideoWriter(object):
def __init__(self, server, environment, variable):
self.image_dir = tempfile.mkdtemp()
self.data_db = server[ENVIRONMENTAL_DATA_POINT]
self.environment = environment
self.variable = variable
self.start_doc = None
self.end_doc = None
# Initialize change feeds
self.last_seq_by_var = {}
last_db_seq = self.data_db.changes(
limit=1, descending=True
)['last_seq']
for var in [RECIPE_START, RECIPE_END, self.variable]:
self.last_seq_by_var[var] = last_db_seq
# Figure out when the most recent recipe started
start_view = self.data_db.view("openag/by_variable", startkey=[
self.environment, "desired", RECIPE_START.name
], endkey=[
self.environment, "desired", RECIPE_START.name, {}
], group_level=3)
if len(start_view) == 0:
# No recipe has ever been run on this machine
return
self.start_doc = start_view.rows[0].value
# Make sure the recipe hasn't ended yet
end_view = self.data_db.view("openag/by_variable", startkey=[
self.environment, "desired", RECIPE_END.name
], endkey=[
self.environment, "desired", RECIPE_END.name, {}
], group_level=3)
if len(end_view):
self.end_doc = end_view.rows[0].value
if (self.end_doc["timestamp"] > self.start_doc["timestamp"]):
return
# Download all of the images from the recipe run so far
image_view = self.data_db.view("openag/by_variable", startkey=[
self.environment, "measured", AERIAL_IMAGE.name,
self.start_doc["timestamp"]
], endkey=[
self.environment, "measured", AERIAL_IMAGE.name, {}
], group_level=4)
for row in image_view:
self.download_image(row.value)
self.update_video()
def __del__(self):
import shutil
shutil.rmtree(self.image_dir)
def run(self):
while True:
time.sleep(5)
if rospy.is_shutdown():
break
if self.start_doc and (not self.end_doc or self.start_doc["timestamp"] > self.end_doc["timestamp"]):
# A recipe is running
# Check if it has ended
end_docs = self.get_variable_changes(RECIPE_END)
for end_doc in end_docs:
if end_doc["timestamp"] > self.end_doc["timestamp"]:
self.end_doc = end_doc
# Update the timelapse
res = self.get_variable_changes(self.variable)
should_update_video = False
for img in res:
if img["timestamp"] > self.start_doc["timestamp"]:
self.download_image(img)
should_update_video = True
if should_update_video:
self.update_video()
else:
# No recipe is running
# Check if a recipe has started
res = self.get_variable_changes(RECIPE_START)
if len(res):
self.start_doc = res[-1]
def get_variable_changes(self, variable):
"""
Get a list of all new environmental data points of the given variable
since the last time this function was called with that variable
"""
res = self.data_db.changes(
since=self.last_seq_by_var.get(variable, 0),
filter="openag/by_variable", variables=[variable],
include_docs=True
)
self.last_seq_by_var[variable] = res["last_seq"]
return [x["doc"] for x in res["results"]]
def download_image(self, doc):
"""
Downloads the image stored as a attachment on the given document and
stores it in the folder with the rest of the images for the current
recipe run
"""
image = self.data_db.get_attachment(doc, "image")
if image is None:
# We might see the document before the attachment is uploaded. Wait
# a little while and try again
time.sleep(1)
image = self.data_db.get_attachment(doc, "image")
file_name = str(int(doc["timestamp"])) + ".png"
file_path = os.path.join(self.image_dir, file_name)
with open(file_path, "w+") as f:
f.write(image.read())
def update_video(self):
"""
Constructs a video from the images already downloaded and stores it in
the RECIPE_START document for the current recipe run
"""
out_file = os.path.join(self.image_dir, "out.mp4")
if os.path.isfile(out_file):
os.remove(out_file)
if subprocess.call([
"ffmpeg", "-framerate", "1", "-pattern_type", "glob", "-i",
"*.png", "-c:v", "libx264", "out.mp4"
], cwd=self.image_dir):
raise RuntimeError("Failed to update video")
with open(out_file) as f:
print self.data_db.put_attachment(
self.start_doc, f, "timelapse", "video/mp4"
)
if __name__ == '__main__':
db_server = cli_config["local_server"]["url"]
if not db_server:
raise RuntimeError("No local database specified")
server = Server(db_server)
rospy.init_node("video_writer")
namespace = rospy.get_namespace()
if namespace == '/':
raise RuntimeError(
"Video writer cannot be run in the global namespace. Please "
"designate an environment for this module."
)
environment = namespace.split('/')[-2]
mod = VideoWriter(server, environment, AERIAL_IMAGE)
mod.run()
| gpl-3.0 | 2,755,527,196,217,202,700 | 37.7125 | 112 | 0.570552 | false |
HybridF5/jacket | jacket/compute/conf/vnc.py | 1 | 7868 | # Copyright (c) 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
vnc_group = cfg.OptGroup(
'vnc',
title='VNC options',
help="""
Virtual Network Computer (VNC) can be used to provide remote desktop
console access to instances for tenants and/or administrators.""")
enabled = cfg.BoolOpt(
'enabled',
default=True,
deprecated_group='DEFAULT',
deprecated_name='vnc_enabled',
help="""Enable VNC related features.
Guests will get created with graphical devices to support this. Clients
(for example Horizon) can then establish a VNC connection to the guest.
Possible values:
* True: Enables the feature
* False: Disables the feature
Services which consume this:
* ``compute-compute``
Related options:
* None
""")
keymap = cfg.StrOpt(
'keymap',
default='en-us',
deprecated_group='DEFAULT',
deprecated_name='vnc_keymap',
help="""Keymap for VNC.
The keyboard mapping (keymap) determines which keyboard layout a VNC
session should use by default.
Possible values:
* A keyboard layout which is supported by the underlying hypervisor on
this node. This is usually an 'IETF language tag' (for example
'en-us'). If you use QEMU as hypervisor, you should find the list
of supported keyboard layouts at ``/usr/share/qemu/keymaps``.
Services which consume this:
* ``compute-compute``
Related options:
* None
""")
# TODO(sfinucan): This should be an IPOpt
vncserver_listen = cfg.StrOpt(
'vncserver_listen',
default='127.0.0.1',
deprecated_group='DEFAULT',
help="""
The IP address on which an instance should listen to for incoming VNC
connection requests on this node.
Possible values:
* An IP address
Services which consume this:
* ``compute-compute``
Related options:
* None
""")
# TODO(sfinucan): This should be an IPOpt
vncserver_proxyclient_address = cfg.StrOpt(
'vncserver_proxyclient_address',
default='127.0.0.1',
deprecated_group='DEFAULT',
help="""
Private, internal address of VNC console proxy.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients.
This option sets the private address to which proxy clients, such as
``compute-xvpvncproxy``, should connect to.
Possible values:
* An IP address
Services which consume this:
* ``compute-compute``
Related options:
* None
""")
# TODO(sfinucan): This should be an IPOpt
novncproxy_host = cfg.StrOpt(
'novncproxy_host',
default='0.0.0.0',
deprecated_group='DEFAULT',
help="""
IP address that the noVNC console proxy should bind to.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. noVNC provides
VNC support through a websocket-based client.
This option sets the private address to which the noVNC console proxy
service should bind to.
Possible values:
* An IP address
Services which consume this:
* ``compute-compute``
Related options:
* novncproxy_port
* novncproxy_base_url
""")
# TODO(sfinucan): This should be a PortOpt
novncproxy_port = cfg.IntOpt(
'novncproxy_port',
default=6080,
min=1,
max=65535,
deprecated_group='DEFAULT',
help="""
Port that the noVNC console proxy should bind to.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. noVNC provides
VNC support through a websocket-based client.
This option sets the private port to which the noVNC console proxy
service should bind to.
Possible values:
* A port number
Services which consume this:
* ``compute-compute``
Related options:
* novncproxy_host
* novncproxy_base_url
""")
novncproxy_base_url = cfg.StrOpt(
'novncproxy_base_url',
default='http://127.0.0.1:6080/vnc_auto.html',
deprecated_group='DEFAULT',
help="""
Public address of noVNC VNC console proxy.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. noVNC provides
VNC support through a websocket-based client.
This option sets the public base URL to which client systems will
connect. noVNC clients can use this address to connect to the noVNC
instance and, by extension, the VNC sessions.
Possible values:
* A URL
Services which consume this:
* ``compute-compute``
Related options:
* novncproxy_host
* novncproxy_port
""")
# TODO(sfinucan): This should be an IPOpt
xvpvncproxy_host = cfg.StrOpt(
'xvpvncproxy_host',
default='0.0.0.0',
deprecated_group='DEFAULT',
help="""
IP address that the XVP VNC console proxy should bind to.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. Xen provides
the Xenserver VNC Proxy, or XVP, as an alternative to the
websocket-based noVNC proxy used by Libvirt. In contrast to noVNC,
XVP clients are Java-based.
This option sets the private address to which the XVP VNC console proxy
service should bind to.
Possible values:
* An IP address
Services which consume this:
* ``compute-compute``
Related options:
* xvpvncproxy_port
* xvpvncproxy_base_url
""")
# TODO(sfinucan): This should be a PortOpt
xvpvncproxy_port = cfg.IntOpt(
'xvpvncproxy_port',
default=6081,
min=1,
max=65535,
deprecated_group='DEFAULT',
help="""
Port that the XVP VNC console proxy should bind to.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. Xen provides
the Xenserver VNC Proxy, or XVP, as an alternative to the
websocket-based noVNC proxy used by Libvirt. In contrast to noVNC,
XVP clients are Java-based.
This option sets the private port to which the XVP VNC console proxy
service should bind to.
Possible values:
* A port number
Services which consume this:
* ``compute-compute``
Related options:
* xvpvncproxy_host
* xvpvncproxy_base_url
""")
xvpvncproxy_base_url = cfg.StrOpt(
'xvpvncproxy_base_url',
default='http://127.0.0.1:6081/console',
deprecated_group='DEFAULT',
help="""
Public address of XVP VNC console proxy.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. Xen provides
the Xenserver VNC Proxy, or XVP, as an alternative to the
websocket-based noVNC proxy used by Libvirt. In contrast to noVNC,
XVP clients are Java-based.
This option sets the public base URL to which client systems will
connect. XVP clients can use this address to connect to the XVP
instance and, by extension, the VNC sessions.
Possible values:
* A URL
Services which consume this:
* ``compute-compute``
Related options:
* xvpvncproxy_host
* xvpvncproxy_port
""")
ALL_OPTS = [
enabled,
keymap,
vncserver_listen,
vncserver_proxyclient_address,
novncproxy_host,
novncproxy_port,
novncproxy_base_url,
xvpvncproxy_host,
xvpvncproxy_port,
xvpvncproxy_base_url]
CLI_OPTS = [
novncproxy_host,
novncproxy_port]
def register_opts(conf):
conf.register_group(vnc_group)
conf.register_opts(ALL_OPTS, group=vnc_group)
def register_cli_opts(conf):
conf.register_cli_opts(CLI_OPTS, group=vnc_group)
def list_opts():
return {vnc_group: ALL_OPTS}
| apache-2.0 | 3,083,081,730,921,231,400 | 22.486567 | 78 | 0.734875 | false |
thpmacedo/granjaSucker | granjaUpdateStatistics.py | 1 | 11815 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os
import logging
import time
import sqlite3
from os.path import basename
################################################################################
# STATIC DEF
################################################################################
PATH_GRANJA_DB = 'sqlite/granjaResult.sqlite'
################################################################################
# GLOBAL DEF
################################################################################
################################################################################
################################################################################
def updateStatistics():
func_name = sys._getframe().f_code.co_name
logger = logging.getLogger(func_name)
logger.debug(PATH_GRANJA_DB)
dbConnection = sqlite3.connect(PATH_GRANJA_DB)
dbCursor = dbConnection.cursor()
####################
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "KGV RACE TRACKS", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "KGV RACE TRANCKS", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "KVG RACE TRACKS", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "KGV RACE TRANKS", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "CIRUITO", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "CIRCUITO", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "CRICUITO", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "-", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = trim(trackConfig);''')
dbCursor.execute('''UPDATE RACES SET trackConfig = ltrim(trackConfig, '0');''')
dbCursor.execute('''UPDATE RACES SET trackConfig = trim(trackConfig);''')
dbConnection.commit()
####################
dbCursor.execute('''DROP TABLE IF EXISTS LAST_RACES;''')
dbCursor.execute('''CREATE TABLE LAST_RACES AS
SELECT raceId,driverClass,trackConfig,COUNT(kartNumber) AS gridSize
FROM races GROUP BY raceId ORDER BY raceId DESC LIMIT 100;''')
dbCursor.execute('''DROP VIEW IF EXISTS VIEW_LAST_RACES;''')
dbCursor.execute('''CREATE VIEW VIEW_LAST_RACES AS
SELECT driverClass,COUNT(raceId) AS qtRaces,MAX(raceId) as lastRaceId
FROM LAST_RACES GROUP BY driverClass;''')
dbCursor.execute('''DROP VIEW IF EXISTS VIEW_LAST_RACES_PER_TRACK;''')
dbCursor.execute('''CREATE VIEW VIEW_LAST_RACES_PER_TRACK AS
SELECT driverClass,trackConfig,COUNT(raceId) AS qtRaces,MAX(raceId) as lastRaceId
FROM LAST_RACES GROUP BY driverClass,trackConfig;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS INDOOR_RANKING_LAPTIME_C_MODA;''')
dbCursor.execute('''CREATE TABLE INDOOR_RANKING_LAPTIME_C_MODA AS
SELECT kartNumber, driverName, MIN(bestLapTime) AS 'BEST_LAP', AVG(bestLapTime) AS 'AVG_LAP', COUNT(*) AS LAPS
FROM races
WHERE driverClass = 'INDOOR'
AND trackConfig IN (SELECT trackConfig FROM VIEW_LAST_RACES_PER_TRACK WHERE driverClass = 'INDOOR' ORDER BY qtRaces DESC LIMIT 1)
AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY kartNumber
ORDER BY BEST_LAP;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS PAROLIN_RANKING_LAPTIME_C_MODA;''')
dbCursor.execute('''CREATE TABLE PAROLIN_RANKING_LAPTIME_C_MODA AS
SELECT kartNumber, driverName, MIN(bestLapTime) AS 'BEST_LAP', AVG(bestLapTime) AS 'AVG_LAP', COUNT(*) AS LAPS
FROM races
WHERE driverClass = 'PAROLIN'
AND trackConfig IN (SELECT trackConfig FROM VIEW_LAST_RACES_PER_TRACK WHERE driverClass = 'PAROLIN' ORDER BY qtRaces DESC LIMIT 1)
AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY kartNumber
ORDER BY BEST_LAP;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS GERAL_RANKING_LAPTIME_C_MODA;''')
dbCursor.execute('''CREATE TABLE GERAL_RANKING_LAPTIME_C_MODA AS
SELECT driverClass, driverName, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
WHERE
trackConfig IN (SELECT trackConfig FROM (SELECT trackConfig,COUNT(*) AS qt FROM RACES ORDER BY qt DESC LIMIT 1))
AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY driverClass
ORDER BY BEST_LAP;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS GERAL_RANKING_LAPTIME;''')
dbCursor.execute('''CREATE TABLE GERAL_RANKING_LAPTIME AS
SELECT trackConfig, driverName, driverClass, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
WHERE
(driverClass='INDOOR' OR driverClass='PAROLIN')
AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY trackConfig;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS ALLTIME_RANKING_LAPTIME;''')
dbCursor.execute('''CREATE TABLE ALLTIME_RANKING_LAPTIME AS
SELECT trackConfig, driverName, driverClass, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
GROUP BY trackConfig;''')
dbCursor.execute('''DROP TABLE IF EXISTS ALLTIME_RANKING_LAPTIME_INDOOR;''')
dbCursor.execute('''CREATE TABLE ALLTIME_RANKING_LAPTIME_INDOOR AS
SELECT trackConfig, driverName, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
WHERE driverClass='INDOOR'
GROUP BY trackConfig;''')
dbCursor.execute('''DROP TABLE IF EXISTS ALLTIME_RANKING_LAPTIME_PAROLIN;''')
dbCursor.execute('''CREATE TABLE ALLTIME_RANKING_LAPTIME_PAROLIN AS
SELECT trackConfig, driverName, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
WHERE driverClass='PAROLIN'
GROUP BY trackConfig;''')
dbConnection.commit()
####################
# CKC_BI_INDOOR
####################
dbCursor.execute('''DROP TABLE IF EXISTS INDOOR_KART_POS_FINISH;''')
dbCursor.execute('''CREATE TABLE INDOOR_KART_POS_FINISH AS
SELECT kartNumber, positionFinish, COUNT(*) AS posCount
FROM races
WHERE driverClass='INDOOR' AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY kartNumber, positionFinish;''')
dbCursor.execute('''DROP TABLE IF EXISTS INDOOR_RANKING_PODIUM;''')
dbCursor.execute('''CREATE TABLE INDOOR_RANKING_PODIUM AS
SELECT
*
,(0.40*ifnull(qt1,0) + 0.25*ifnull(qt2,0) + 0.15*ifnull(qt3,0) + 0.10*ifnull(qt4,0) + 0.07*ifnull(qt5,0) + 0.03*ifnull(qt6,0)) / qtRaces AS PODIUM_RATE
,ifnull(1.0*qt1,0) / qtRaces AS p1ratio
,ifnull(1.0*qt2,0) / qtRaces AS p2ratio
,ifnull(1.0*qt3,0) / qtRaces AS p3ratio
,ifnull(1.0*qt4,0) / qtRaces AS p4ratio
,ifnull(1.0*qt5,0) / qtRaces AS p5ratio
,ifnull(1.0*qt6,0) / qtRaces AS p6ratio
FROM (
SELECT kartNumber,
SUM(posCount) AS qtRaces
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=1) AS qt1
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=2) AS qt2
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=3) AS qt3
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=4) AS qt4
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=5) AS qt5
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=6) AS qt6
FROM INDOOR_KART_POS_FINISH e
GROUP BY kartNumber
)
WHERE qtRaces > 30
ORDER BY PODIUM_RATE DESC;''')
dbCursor.execute('''CREATE TEMPORARY TABLE IF NOT EXISTS TEMP_INDOOR_RANKING_PODIUM AS
SELECT * FROM INDOOR_RANKING_PODIUM A ORDER BY A.PODIUM_RATE DESC;''')
dbCursor.execute('''CREATE TEMPORARY TABLE IF NOT EXISTS TEMP_INDOOR_RANKING_LAPTIME_C_MODA AS
SELECT * FROM INDOOR_RANKING_LAPTIME_C_MODA A ORDER BY A.BEST_LAP ASC;''')
dbCursor.execute('''DROP TABLE IF EXISTS CKC_BI_INDOOR;''')
dbCursor.execute('''CREATE TABLE CKC_BI_INDOOR AS
SELECT P.kartNumber
,P.qt1,P.qt2,P.qt3,P.qt4,P.qt5,P.qt6,P.qtRaces
,P.PODIUM_RATE
,P.rowid AS RANK_PODIUM
,T.BEST_LAP
,T.AVG_LAP
,T.rowid AS RANK_LAPTIME
,0.0125 * (P.rowid + T.rowid) AS SCORE
FROM TEMP_INDOOR_RANKING_PODIUM P,TEMP_INDOOR_RANKING_LAPTIME_C_MODA T
WHERE P.kartNumber=T.kartNumber
GROUP BY P.kartNumber
ORDER BY SCORE;''')
#,0.0125 * (P.rowid + T.rowid) AS SCORE
#,0.00625 * (P.rowid + 3 * T.rowid) AS SCORE
# 1/(40+40) = .0125
# 1/(40+3*40) = .00625
dbConnection.commit()
####################
# CKC_BI_PAROLIN
####################
dbCursor.execute('''DROP TABLE IF EXISTS PAROLIN_KART_POS_FINISH;''')
dbCursor.execute('''CREATE TABLE PAROLIN_KART_POS_FINISH AS
SELECT kartNumber, positionFinish, COUNT(*) AS posCount
FROM races
WHERE driverClass='PAROLIN' AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY kartNumber, positionFinish;''')
dbCursor.execute('''DROP TABLE IF EXISTS PAROLIN_RANKING_PODIUM;''')
dbCursor.execute('''CREATE TABLE PAROLIN_RANKING_PODIUM AS
SELECT *,(0.28*ifnull(qt1,0) + 0.20*ifnull(qt2,0) + 0.17*ifnull(qt3,0) + 0.14*ifnull(qt4,0) + 0.11*ifnull(qt5,0) + 0.09*ifnull(qt6,0)) / qtRaces AS PODIUM_RATE
FROM (
SELECT kartNumber,
SUM(posCount) AS qtRaces
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=1) AS qt1
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=2) AS qt2
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=3) AS qt3
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=4) AS qt4
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=5) AS qt5
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=6) AS qt6
FROM PAROLIN_KART_POS_FINISH e
GROUP BY kartNumber
)
WHERE qtRaces > 30
ORDER BY PODIUM_RATE DESC;''')
dbCursor.execute('''CREATE TEMPORARY TABLE IF NOT EXISTS TEMP_PAROLIN_RANKING_PODIUM AS
SELECT * FROM PAROLIN_RANKING_PODIUM A ORDER BY A.PODIUM_RATE DESC;''')
dbCursor.execute('''CREATE TEMPORARY TABLE IF NOT EXISTS TEMP_PAROLIN_RANKING_LAPTIME_C_MODA AS
SELECT * FROM PAROLIN_RANKING_LAPTIME_C_MODA A ORDER BY A.BEST_LAP ASC;''')
dbCursor.execute('''DROP TABLE IF EXISTS CKC_BI_PAROLIN;''')
dbCursor.execute('''CREATE TABLE CKC_BI_PAROLIN AS
SELECT P.kartNumber
,P.qt1,P.qt2,P.qt3,P.qt4,P.qt5,P.qt6,P.qtRaces
,P.PODIUM_RATE
,P.rowid AS RANK_PODIUM
,T.BEST_LAP
,T.AVG_LAP
,T.rowid AS RANK_LAPTIME
,0.00625 * (P.rowid + 3 * T.rowid) AS SCORE
FROM TEMP_PAROLIN_RANKING_PODIUM P,TEMP_PAROLIN_RANKING_LAPTIME_C_MODA T
WHERE P.kartNumber=T.kartNumber
GROUP BY P.kartNumber
ORDER BY SCORE;''')
dbConnection.commit()
####################
####################
dbConnection.execute('''VACUUM;''')
dbConnection.commit()
####################
dbConnection.close()
###
logger.debug("DONE")
################################################################################
# MAIN
################################################################################
def main():
appName = sys.argv[0]
logging.basicConfig(
# filename = './log/' + appName + '_' + time.strftime("%Y%m%d_%H%M%S") + '.log',
datefmt = '%Y-%m%d %H:%M:%S',
format = '%(asctime)s | %(levelname)s | %(name)s | %(message)s',
level = logging.INFO
)
func_name = sys._getframe().f_code.co_name
logger = logging.getLogger(func_name)
logger.info('Started')
###
updateStatistics()
###
logger.info('Finished')
################################################################################
################################################################################
if __name__ == '__main__':
main()
| gpl-3.0 | 7,718,151,535,844,989,000 | 43.417293 | 161 | 0.653915 | false |
ilogue/pyrsa | tests/test_noise_ceiling.py | 1 | 1800 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 18 16:59:33 2020
@author: heiko
"""
import unittest
import numpy as np
from parameterized import parameterized
class TestNoiseCeiling(unittest.TestCase):
def test_cv_noise_ceiling(self):
from pyrsa.inference import cv_noise_ceiling
from pyrsa.inference import sets_k_fold_rdm
from pyrsa.rdm import RDMs
dis = np.random.rand(11, 10) # 11 5x5 rdms
mes = "Euclidean"
des = {'subj': 0}
rdm_des = {'session': np.array([1, 1, 2, 2, 4, 5, 6, 7, 7, 7, 7])}
pattern_des = {'type': np.array([0, 1, 2, 2, 4])}
rdms = RDMs(
dissimilarities=dis,
rdm_descriptors=rdm_des,
pattern_descriptors=pattern_des,
dissimilarity_measure=mes,
descriptors=des
)
_, test_set, ceil_set = sets_k_fold_rdm(rdms, k_rdm=3, random=False)
_, _ = cv_noise_ceiling(rdms, ceil_set, test_set, method='cosine')
@parameterized.expand([
['cosine'],
['rho-a'],
['tau-a'],
['spearman'],
['corr'],
])
def test_boot_noise_ceiling_runs_for_method(self, method):
from pyrsa.inference import boot_noise_ceiling
from pyrsa.rdm import RDMs
dis = np.random.rand(11, 10) # 11 5x5 rdms
mes = "Euclidean"
des = {'subj': 0}
rdm_des = {'session': np.array([1, 1, 2, 2, 4, 5, 6, 7, 7, 7, 7])}
pattern_des = {'type': np.array([0, 1, 2, 2, 4])}
rdms = RDMs(
dissimilarities=dis,
rdm_descriptors=rdm_des,
pattern_descriptors=pattern_des,
dissimilarity_measure=mes,
descriptors=des
)
_, _ = boot_noise_ceiling(rdms, method=method)
| lgpl-3.0 | 5,800,685,468,620,511,000 | 30.578947 | 76 | 0.552222 | false |
pantuza/art-gallery | src/triangle.py | 1 | 1522 | # -*- coding:utf-8 -*-
from point import Point
from side import Side
class Triangle(object):
""" Class representing a Triangle that is composed by
three Point objects
"""
def __init__(self, u, v, w):
if not all(isinstance(point, Point) for point in (u, v, w)):
raise TypeError("u, v, w must be Point objects", (u, v, w))
self.u, self.v, self.w = u, v, w
def __repr__(self):
return "[(%s, %s), (%s, %s), (%s, %s)]" \
% (self.u.x, self.u.y, self.v.x, self.v.y, self.w.x, self.w.y)
def __iter__(self):
yield self.u
yield self.v
yield self.w
def sides(self):
return (Side(self.u, self.v), Side(self.v, self.w), Side(self.w, self.u))
def opposite(self, side):
if self.u == side.p0:
if self.v == side.p1:
return self.w
else:
return self.v
elif self.u == side.p1:
if self.v == side.p0:
return self.w
else:
return self.v
return self.u
# Testing class
if __name__ == "__main__":
u = Point(0, 2)
v = Point(2, 0)
w = Point(5, 5)
triangle = Triangle(u, v, w)
print triangle
print "Point u = %s" % str(triangle.u)
print "Point v = %s" % str(triangle.v)
print "Point w = %s" % str(triangle.w)
# Testing class iterability
for point in triangle:
print point
# Testing the exception
Triangle(None, None, None)
| gpl-2.0 | -6,126,048,377,337,462,000 | 23.95082 | 81 | 0.507884 | false |
kidscancode/gamedev | tutorials/tilemap/part 18/sprites.py | 1 | 8372 | import pygame as pg
from random import uniform, choice, randint, random
from settings import *
from tilemap import collide_hit_rect
import pytweening as tween
vec = pg.math.Vector2
def collide_with_walls(sprite, group, dir):
if dir == 'x':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if hits[0].rect.centerx > sprite.hit_rect.centerx:
sprite.pos.x = hits[0].rect.left - sprite.hit_rect.width / 2
if hits[0].rect.centerx < sprite.hit_rect.centerx:
sprite.pos.x = hits[0].rect.right + sprite.hit_rect.width / 2
sprite.vel.x = 0
sprite.hit_rect.centerx = sprite.pos.x
if dir == 'y':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if hits[0].rect.centery > sprite.hit_rect.centery:
sprite.pos.y = hits[0].rect.top - sprite.hit_rect.height / 2
if hits[0].rect.centery < sprite.hit_rect.centery:
sprite.pos.y = hits[0].rect.bottom + sprite.hit_rect.height / 2
sprite.vel.y = 0
sprite.hit_rect.centery = sprite.pos.y
class Player(pg.sprite.Sprite):
def __init__(self, game, x, y):
self._layer = PLAYER_LAYER
self.groups = game.all_sprites
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.player_img
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = PLAYER_HIT_RECT
self.hit_rect.center = self.rect.center
self.vel = vec(0, 0)
self.pos = vec(x, y)
self.rot = 0
self.last_shot = 0
self.health = PLAYER_HEALTH
def get_keys(self):
self.rot_speed = 0
self.vel = vec(0, 0)
keys = pg.key.get_pressed()
if keys[pg.K_LEFT] or keys[pg.K_a]:
self.rot_speed = PLAYER_ROT_SPEED
if keys[pg.K_RIGHT] or keys[pg.K_d]:
self.rot_speed = -PLAYER_ROT_SPEED
if keys[pg.K_UP] or keys[pg.K_w]:
self.vel = vec(PLAYER_SPEED, 0).rotate(-self.rot)
if keys[pg.K_DOWN] or keys[pg.K_s]:
self.vel = vec(-PLAYER_SPEED / 2, 0).rotate(-self.rot)
if keys[pg.K_SPACE]:
now = pg.time.get_ticks()
if now - self.last_shot > BULLET_RATE:
self.last_shot = now
dir = vec(1, 0).rotate(-self.rot)
pos = self.pos + BARREL_OFFSET.rotate(-self.rot)
Bullet(self.game, pos, dir)
self.vel = vec(-KICKBACK, 0).rotate(-self.rot)
choice(self.game.weapon_sounds['gun']).play()
MuzzleFlash(self.game, pos)
def update(self):
self.get_keys()
self.rot = (self.rot + self.rot_speed * self.game.dt) % 360
self.image = pg.transform.rotate(self.game.player_img, self.rot)
self.rect = self.image.get_rect()
self.rect.center = self.pos
self.pos += self.vel * self.game.dt
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center
def add_health(self, amount):
self.health += amount
if self.health > PLAYER_HEALTH:
self.health = PLAYER_HEALTH
class Mob(pg.sprite.Sprite):
def __init__(self, game, x, y):
self._layer = MOB_LAYER
self.groups = game.all_sprites, game.mobs
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.mob_img.copy()
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = MOB_HIT_RECT.copy()
self.hit_rect.center = self.rect.center
self.pos = vec(x, y)
self.vel = vec(0, 0)
self.acc = vec(0, 0)
self.rect.center = self.pos
self.rot = 0
self.health = MOB_HEALTH
self.speed = choice(MOB_SPEEDS)
self.target = game.player
def avoid_mobs(self):
for mob in self.game.mobs:
if mob != self:
dist = self.pos - mob.pos
if 0 < dist.length() < AVOID_RADIUS:
self.acc += dist.normalize()
def update(self):
target_dist = self.target.pos - self.pos
if target_dist.length_squared() < DETECT_RADIUS**2:
if random() < 0.002:
choice(self.game.zombie_moan_sounds).play()
self.rot = target_dist.angle_to(vec(1, 0))
self.image = pg.transform.rotate(self.game.mob_img, self.rot)
self.rect.center = self.pos
self.acc = vec(1, 0).rotate(-self.rot)
self.avoid_mobs()
self.acc.scale_to_length(self.speed)
self.acc += self.vel * -1
self.vel += self.acc * self.game.dt
self.pos += self.vel * self.game.dt + 0.5 * self.acc * self.game.dt ** 2
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center
if self.health <= 0:
choice(self.game.zombie_hit_sounds).play()
self.kill()
def draw_health(self):
if self.health > 60:
col = GREEN
elif self.health > 30:
col = YELLOW
else:
col = RED
width = int(self.rect.width * self.health / MOB_HEALTH)
self.health_bar = pg.Rect(0, 0, width, 7)
if self.health < MOB_HEALTH:
pg.draw.rect(self.image, col, self.health_bar)
class Bullet(pg.sprite.Sprite):
def __init__(self, game, pos, dir):
self._layer = BULLET_LAYER
self.groups = game.all_sprites, game.bullets
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.bullet_img
self.rect = self.image.get_rect()
self.hit_rect = self.rect
self.pos = vec(pos)
self.rect.center = pos
spread = uniform(-GUN_SPREAD, GUN_SPREAD)
self.vel = dir.rotate(spread) * BULLET_SPEED
self.spawn_time = pg.time.get_ticks()
def update(self):
self.pos += self.vel * self.game.dt
self.rect.center = self.pos
if pg.sprite.spritecollideany(self, self.game.walls):
self.kill()
if pg.time.get_ticks() - self.spawn_time > BULLET_LIFETIME:
self.kill()
class Obstacle(pg.sprite.Sprite):
def __init__(self, game, x, y, w, h):
self.groups = game.walls
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.rect = pg.Rect(x, y, w, h)
self.hit_rect = self.rect
self.x = x
self.y = y
self.rect.x = x
self.rect.y = y
class MuzzleFlash(pg.sprite.Sprite):
def __init__(self, game, pos):
self._layer = EFFECTS_LAYER
self.groups = game.all_sprites
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
size = randint(20, 50)
self.image = pg.transform.scale(choice(game.gun_flashes), (size, size))
self.rect = self.image.get_rect()
self.pos = pos
self.rect.center = pos
self.spawn_time = pg.time.get_ticks()
def update(self):
if pg.time.get_ticks() - self.spawn_time > FLASH_DURATION:
self.kill()
class Item(pg.sprite.Sprite):
def __init__(self, game, pos, type):
self._layer = ITEMS_LAYER
self.groups = game.all_sprites, game.items
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.item_images[type]
self.rect = self.image.get_rect()
self.type = type
self.pos = pos
self.rect.center = pos
self.tween = tween.easeInOutSine
self.step = 0
self.dir = 1
def update(self):
# bobbing motion
offset = BOB_RANGE * (self.tween(self.step / BOB_RANGE) - 0.5)
self.rect.centery = self.pos.y + offset * self.dir
self.step += BOB_SPEED
if self.step > BOB_RANGE:
self.step = 0
self.dir *= -1
| mit | 4,963,443,543,521,539,000 | 36.711712 | 84 | 0.556976 | false |
bes422/JDI | Python/JDI/core/logger/jdi_logger.py | 1 | 1215 | import logging
from JDI.core.logger.log_levels import LogLevels
class JDILogger(object):
def __init__(self, name="JDI Logger"):
self.logger = logging.getLogger(name)
self.__basic_settings()
log_level = {LogLevels.INFO, LogLevels.FATAL}
def info(self, log_msg):
if LogLevels.INFO in self.log_level:
self.logger.info(log_msg)
def debug(self, log_msg):
if LogLevels.DEBUG in self.log_level:
self.logger.debug(log_msg)
def fatal(self, log_msg):
if LogLevels.FATAL in self.log_level:
self.logger.fatal(log_msg)
def warning(self, log_msg):
if LogLevels.WARNING in self.log_level:
self.logger.warning(log_msg)
def error(self, log_msg):
if LogLevels.ERROR in self.log_level:
self.logger.error(log_msg)
def __basic_settings(self):
self.logger.setLevel(LogLevels.INFO.value[0])
self.logger.setLevel(LogLevels.DEBUG.value[0])
self.logger.setLevel(LogLevels.FATAL.value[0])
hdlr = logging.FileHandler('jdi.log')
hdlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
self.logger.addHandler(hdlr)
| gpl-3.0 | 395,174,346,117,769,300 | 28.634146 | 85 | 0.634568 | false |
beav/pulp | server/pulp/plugins/conduits/upload.py | 2 | 1171 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from pulp.plugins.conduits.mixins import (
AddUnitMixin, SingleRepoUnitsMixin, SearchUnitsMixin,
ImporterConduitException)
class UploadConduit(AddUnitMixin, SingleRepoUnitsMixin, SearchUnitsMixin):
def __init__(self, repo_id, importer_id, association_owner_type,
association_owner_id):
AddUnitMixin.__init__(self, repo_id, importer_id,
association_owner_type, association_owner_id)
SingleRepoUnitsMixin.__init__(self, repo_id, ImporterConduitException)
SearchUnitsMixin.__init__(self, ImporterConduitException)
| gpl-2.0 | -1,919,100,708,639,992,800 | 45.8 | 78 | 0.723932 | false |
alunduil/fig | compose/project.py | 1 | 12689 | from __future__ import absolute_import
from __future__ import unicode_literals
import logging
from functools import reduce
from docker.errors import APIError
from .config import ConfigurationError
from .config import get_service_name_from_net
from .const import DEFAULT_TIMEOUT
from .const import LABEL_ONE_OFF
from .const import LABEL_PROJECT
from .const import LABEL_SERVICE
from .container import Container
from .legacy import check_for_legacy_containers
from .service import Service
from .utils import parallel_execute
log = logging.getLogger(__name__)
def sort_service_dicts(services):
# Topological sort (Cormen/Tarjan algorithm).
unmarked = services[:]
temporary_marked = set()
sorted_services = []
def get_service_names(links):
return [link.split(':')[0] for link in links]
def get_service_dependents(service_dict, services):
name = service_dict['name']
return [
service for service in services
if (name in get_service_names(service.get('links', [])) or
name in service.get('volumes_from', []) or
name == get_service_name_from_net(service.get('net')))
]
def visit(n):
if n['name'] in temporary_marked:
if n['name'] in get_service_names(n.get('links', [])):
raise DependencyError('A service can not link to itself: %s' % n['name'])
if n['name'] in n.get('volumes_from', []):
raise DependencyError('A service can not mount itself as volume: %s' % n['name'])
else:
raise DependencyError('Circular import between %s' % ' and '.join(temporary_marked))
if n in unmarked:
temporary_marked.add(n['name'])
for m in get_service_dependents(n, services):
visit(m)
temporary_marked.remove(n['name'])
unmarked.remove(n)
sorted_services.insert(0, n)
while unmarked:
visit(unmarked[-1])
return sorted_services
class Project(object):
"""
A collection of services.
"""
def __init__(self, name, services, client):
self.name = name
self.services = services
self.client = client
def labels(self, one_off=False):
return [
'{0}={1}'.format(LABEL_PROJECT, self.name),
'{0}={1}'.format(LABEL_ONE_OFF, "True" if one_off else "False"),
]
@classmethod
def from_dicts(cls, name, service_dicts, client):
"""
Construct a ServiceCollection from a list of dicts representing services.
"""
project = cls(name, [], client)
for service_dict in sort_service_dicts(service_dicts):
links = project.get_links(service_dict)
volumes_from = project.get_volumes_from(service_dict)
net = project.get_net(service_dict)
project.services.append(Service(client=client, project=name, links=links, net=net,
volumes_from=volumes_from, **service_dict))
return project
@property
def service_names(self):
return [service.name for service in self.services]
def get_service(self, name):
"""
Retrieve a service by name. Raises NoSuchService
if the named service does not exist.
"""
for service in self.services:
if service.name == name:
return service
raise NoSuchService(name)
def validate_service_names(self, service_names):
"""
Validate that the given list of service names only contains valid
services. Raises NoSuchService if one of the names is invalid.
"""
valid_names = self.service_names
for name in service_names:
if name not in valid_names:
raise NoSuchService(name)
def get_services(self, service_names=None, include_deps=False):
"""
Returns a list of this project's services filtered
by the provided list of names, or all services if service_names is None
or [].
If include_deps is specified, returns a list including the dependencies for
service_names, in order of dependency.
Preserves the original order of self.services where possible,
reordering as needed to resolve dependencies.
Raises NoSuchService if any of the named services do not exist.
"""
if service_names is None or len(service_names) == 0:
return self.get_services(
service_names=self.service_names,
include_deps=include_deps
)
else:
unsorted = [self.get_service(name) for name in service_names]
services = [s for s in self.services if s in unsorted]
if include_deps:
services = reduce(self._inject_deps, services, [])
uniques = []
[uniques.append(s) for s in services if s not in uniques]
return uniques
def get_links(self, service_dict):
links = []
if 'links' in service_dict:
for link in service_dict.get('links', []):
if ':' in link:
service_name, link_name = link.split(':', 1)
else:
service_name, link_name = link, None
try:
links.append((self.get_service(service_name), link_name))
except NoSuchService:
raise ConfigurationError('Service "%s" has a link to service "%s" which does not exist.' % (service_dict['name'], service_name))
del service_dict['links']
return links
def get_volumes_from(self, service_dict):
volumes_from = []
if 'volumes_from' in service_dict:
for volume_name in service_dict.get('volumes_from', []):
try:
service = self.get_service(volume_name)
volumes_from.append(service)
except NoSuchService:
try:
container = Container.from_id(self.client, volume_name)
volumes_from.append(container)
except APIError:
raise ConfigurationError('Service "%s" mounts volumes from "%s", which is not the name of a service or container.' % (service_dict['name'], volume_name))
del service_dict['volumes_from']
return volumes_from
def get_net(self, service_dict):
if 'net' in service_dict:
net_name = get_service_name_from_net(service_dict.get('net'))
if net_name:
try:
net = self.get_service(net_name)
except NoSuchService:
try:
net = Container.from_id(self.client, net_name)
except APIError:
raise ConfigurationError('Service "%s" is trying to use the network of "%s", which is not the name of a service or container.' % (service_dict['name'], net_name))
else:
net = service_dict['net']
del service_dict['net']
else:
net = None
return net
def start(self, service_names=None, **options):
for service in self.get_services(service_names):
service.start(**options)
def stop(self, service_names=None, **options):
parallel_execute(
objects=self.containers(service_names),
obj_callable=lambda c: c.stop(**options),
msg_index=lambda c: c.name,
msg="Stopping"
)
def pause(self, service_names=None, **options):
for service in reversed(self.get_services(service_names)):
service.pause(**options)
def unpause(self, service_names=None, **options):
for service in self.get_services(service_names):
service.unpause(**options)
def kill(self, service_names=None, **options):
parallel_execute(
objects=self.containers(service_names),
obj_callable=lambda c: c.kill(**options),
msg_index=lambda c: c.name,
msg="Killing"
)
def remove_stopped(self, service_names=None, **options):
all_containers = self.containers(service_names, stopped=True)
stopped_containers = [c for c in all_containers if not c.is_running]
parallel_execute(
objects=stopped_containers,
obj_callable=lambda c: c.remove(**options),
msg_index=lambda c: c.name,
msg="Removing"
)
def restart(self, service_names=None, **options):
for service in self.get_services(service_names):
service.restart(**options)
def build(self, service_names=None, no_cache=False):
for service in self.get_services(service_names):
if service.can_be_built():
service.build(no_cache)
else:
log.info('%s uses an image, skipping' % service.name)
def up(self,
service_names=None,
start_deps=True,
allow_recreate=True,
force_recreate=False,
do_build=True,
timeout=DEFAULT_TIMEOUT):
if force_recreate and not allow_recreate:
raise ValueError("force_recreate and allow_recreate are in conflict")
services = self.get_services(service_names, include_deps=start_deps)
for service in services:
service.remove_duplicate_containers()
plans = self._get_convergence_plans(
services,
allow_recreate=allow_recreate,
force_recreate=force_recreate,
)
return [
container
for service in services
for container in service.execute_convergence_plan(
plans[service.name],
do_build=do_build,
timeout=timeout
)
]
def _get_convergence_plans(self,
services,
allow_recreate=True,
force_recreate=False):
plans = {}
for service in services:
updated_dependencies = [
name
for name in service.get_dependency_names()
if name in plans
and plans[name].action == 'recreate'
]
if updated_dependencies and allow_recreate:
log.debug(
'%s has upstream changes (%s)',
service.name, ", ".join(updated_dependencies),
)
plan = service.convergence_plan(
allow_recreate=allow_recreate,
force_recreate=True,
)
else:
plan = service.convergence_plan(
allow_recreate=allow_recreate,
force_recreate=force_recreate,
)
plans[service.name] = plan
return plans
def pull(self, service_names=None):
for service in self.get_services(service_names, include_deps=True):
service.pull()
def containers(self, service_names=None, stopped=False, one_off=False):
if service_names:
self.validate_service_names(service_names)
else:
service_names = self.service_names
containers = list(filter(None, [
Container.from_ps(self.client, container)
for container in self.client.containers(
all=stopped,
filters={'label': self.labels(one_off=one_off)})]))
def matches_service_names(container):
return container.labels.get(LABEL_SERVICE) in service_names
if not containers:
check_for_legacy_containers(
self.client,
self.name,
self.service_names,
)
return [c for c in containers if matches_service_names(c)]
def _inject_deps(self, acc, service):
dep_names = service.get_dependency_names()
if len(dep_names) > 0:
dep_services = self.get_services(
service_names=list(set(dep_names)),
include_deps=True
)
else:
dep_services = []
dep_services.append(service)
return acc + dep_services
class NoSuchService(Exception):
def __init__(self, name):
self.name = name
self.msg = "No such service: %s" % self.name
def __str__(self):
return self.msg
class DependencyError(ConfigurationError):
pass
| apache-2.0 | 1,203,699,659,850,815,200 | 33.294595 | 186 | 0.563953 | false |
asya-bergal/led-matrix-server | CPW.py | 1 | 8468 | import datetime
import urllib
import json
import leds
import time
from PIL import ImageFont
from PIL import Image
from PIL import ImageDraw
eventMessage = ""
eventMessage2 = ""
eventMessage3 = ""
def CPW (day,hour,minute):
intro = "Next Event In Random Hall:"
intro2 =""
global eventMessage
global eventMessage2
global eventMessage3
eventMessage = ""
eventMessage2 = ""
eventMessage3 = ""
if(day == 16):
if(hour < 16 or (hour == 16 and minute <=17)):
eventMessage = "Knitting Circle Taqueria 14:17 - 16:17 BMF "
print('hoi;')
elif(hour <19 or (hour == 19 and minute <=17)):
eventMessage = "Clam Olympics 18:17 - 19:17 Clam "
print('dfs')
elif (hour <20 or (hour == 20 and minute <=17)):
eventMessage = "Chemistry and Cake w/ LN2 ice cream continued 19:17 - 20:17 Pecker "
elif(hour <23 or (hour == 23 and minute <=59)):
eventMessage = "BBQ and Spinning on the Roofdeck w/ Giga Curry 21:47 - 24:47 Black Hole+Roofdeck "
eventMessage2 = "Five SCPs at Freddy's 22:47 - 24:47 Destiny "
eventMessage3 = "Crazy Cat Lady Make-A-Thon 23:47 - 24:47 Loop "
if(day == 17):
if (hour == 0 and minute <=47):
eventMessage = "BBQ and Spinning on the Roofdeck w/ Giga Curry 21:47 - 24:47 Black Hole+Roofdeck"
eventMessage2 = "Five SCPs at Freddy's 22:47 - 24:47 Destiny"
eventMessage3 = "Crazy Cat Lady Make-A-Thon 23:47 - 24:47 Loop"
elif (hour<12 or (hour == 12 and minute <=17)):
eventMessage = "Nerf Chess 11:17 - 12:17 Foo"
elif (hour<14 or (hour == 14 and minute <=47)):
eventMessage = "Physics and Coffee 2:17 PM - 16:17 Pecker"
eventMessage2 = "Dumpling Hylomorphisms 12:17 PM - 2:47 PM Black Hole"
elif (hour<14 or (hour == 14 and minute <=17)):
eventMessage = "Physics and Coffee 2:17 PM - 16:17 Pecker"
eventMessage2 = "Rocky Horrible's Nerdy Singalong Blog w/ LN2 ice cream 3:47 PM - 17:47 AIW "
elif (hour<17 or (hour == 17 and minute <=47)):
eventMessage = "Rocky Horrible's Nerdy Singalong Blog w/ LN2 ice cream 3:47 PM - 17:47 AIW "
eventMessage2 = "mitBEEF 17:00 - 18:00 Foo "
elif (hour<18 or (hour == 18 and minute <=1)):
eventMessage = "mitBEEF 17:00 - 18:00 Foo"
eventMessage2 = "Math and Tea 17:47 - 20:47 Pecker "
elif (hour<20 or (hour == 20 and minute <=47)):
eventMessage = "Math and Tea 17:47 - 20:47 Pecker "
elif (hour<22 or (hour == 22 and minute <=17)):
eventMessage = "Star Trek on Infinite Loop 20:47 - 22:47 Loop"
eventMessage2 = "Duct Tape Construction w/ Cookies by Committee 21:47 - 11:47 PM Black Hole"
elif (hour<23 or (hour == 23 and minute <=47)):
eventMessage = "Duct Tape Construction w/ Cookies by Committee 21:47 - 11:47 PM Black Hole"
eventMessage2 = "PowerPoint Karaoke + Latte Art 10:47 PM - 12:47 PM Foo "
elif (hour<23 or (hour == 23 and minute <=59)):
eventMessage = "PowerPoint Karaoke + Latte Art 22:47 - 24:47 Foo "
if(day == 18):
if (hour == 0 and minute <= 47):
eventMessage = "PowerPoint Karaoke + Latte Art 10:47 - 24:47 Foo"
elif (hour< 11 or (hour == 11 and minute <= 47)):
eventMessage = "Saturday Morning Breakfast Cartoons w/ Ceiling Tile Painting 9:47 AM - 11:47 AM Loop"
elif (hour< 13 or (hour == 13 and minute <= 17)):
eventMessage = "Epic Mealtime of Destiny 11:47 - 13:17 Destiny"
elif (hour< 15 or (hour == 15 and minute <= 47)):
eventMessage = "Carbonated Fruit! 13:47 - 15:47 Black Hole"
elif (hour< 17 or (hour == 17 and minute <= 17)):
eventMessage = "Storytime with Cruft w/ Liquid Nitrogen Ice Cream and Truffles 15:17 - 17:17 Foo "
eventMessage2 = "Random Plays Randomly + Smash! 16:17 - 17:47 AIW"
elif (hour< 17 or (hour == 17 and minute <= 47)):
eventMessage = "Random Plays Randomly + Smash! 16:17 - 17:47 AIW"
elif (hour< 21 or (hour == 21 and minute <= 47)):
eventMessage = "InterDorm Potluck Event 19:30 - 21:47 Foo "
eventMessage2 = "Chainmail w/ Experimental Smoothies 20:47 - 21:47 Destiny"
elif (hour< 23 or (hour == 23 and minute <= 59)):
eventMessage = "Pecker Board Game Night + Teach You Tichu 21:47 - 24:47 Pecker "
eventMessage2 = "(Almost) Life-Sized Settlers of Catan 21:47 - 24:47 Foo "
if (day == 19):
if (hour == 0 and minute <= 47):
eventMessage = "Pecker Board Game Night + Teach You Tichu 21:47 - 24:47 Pecker"
eventMessage2 = "(Almost) Life-Sized Settlers of Catan 21:47 - 24:47 Foo"
else:
eventMessage = "Tea Time with Teddy 11:47 - 12:47 BMF"
print("1" + eventMessage + "\n 2" + eventMessage2 + "\n 3" + eventMessage3)
font = ImageFont.truetype("/usr/share/fonts/pixelmix.ttf", 8)
widthIntro, ignore = font.getsize(intro)
widthMessage, ignore = font.getsize(intro2 + eventMessage+eventMessage2+eventMessage3)
currentEvents = Image.new("RGB", (widthMessage + 10, 16), "black")
introText = [("Next event in ",(127,63,0)), (eventMessage, (118,13,13)) ]
text = [("R",(127,0,0)), ("a",(127,63,0)),("n",(127,127,0)),("d",(14,86,60)),("o",(10,81,102)),("m",(79,0,127)), (" Hall: ",(127,63,0)), (eventMessage2,(53,45,103)),(eventMessage3,(0,101,44))]
x = 0
for element in introText:
drawIntro = ImageDraw.Draw(currentEvents).text((x,0),element[0], element[1], font=font)
x = x + font.getsize(element[0])[0]
x = 0
count = 0
for element in text:
count += 1
drawCurrentEvents = ImageDraw.Draw(currentEvents).text((x,8),element[0],element[1],font=font)
x = x + font.getsize(element[0])[0]
if count == 7:
x = x + font.getsize("Next event in ")[0] - font.getsize("Random Hall: ")[0]
currentEvents = currentEvents.rotate(180)
currentEvents.save("currentEvents.ppm")
leds.uploadPPM("currentEvents.ppm")
def Weather ():
try:
weatherURL = urllib.URLopener().retrieve("http://api.wunderground.com/api/efb7f164a8ddf6f5/conditions/forecast/q/pws:KMACAMBR9.json","weather.json")
with open("weather.json",'r') as weatherData:
weather = json.load(weatherData)
except:
return None
current = "Currently: " + str(weather["current_observation"]["temp_c"]) + u'\N{DEGREE SIGN}' + "C | " + str(weather["current_observation"]["weather"])
current1 = current.split("|")[0] + "|"
current2 = current.split("|")[1]
forecastFor = "Forecast for " + str(weather["forecast"]["simpleforecast"]["forecastday"][0]["date"]["weekday_short"]) + ": "
forecastHi = "Hi:" + str(weather["forecast"]["simpleforecast"]["forecastday"][0]["high"]["celsius"]) + u'\N{DEGREE SIGN}' + "C "
forecastLo = "Lo:" + str(weather["forecast"]["simpleforecast"]["forecastday"][0]["low"]["celsius"]) + u'\N{DEGREE SIGN}' + "C "
forecastConditions = str(weather["forecast"]["simpleforecast"]["forecastday"][0]["conditions"])
text = [(forecastFor ,(127,63,0)),(forecastHi,(100,4,10)),(forecastLo,(35,82,90)),(forecastConditions,(101,80,80))]
forecast = forecastFor + forecastHi + forecastLo + forecastConditions
font = ImageFont.truetype("/usr/share/fonts/pixelmix.ttf", 8)
widthCurrent, ignore = font.getsize(current)
widthForecast, ignore = font.getsize(forecast)
currentWeather = Image.new("RGB", (widthForecast + 10, 16), "black")
drawCurrentWeather = ImageDraw.Draw(currentWeather).text((0,0),current1, (127,63,0), font=font)
drawCurrentWeather = ImageDraw.Draw(currentWeather).text((font.getsize(current1)[0],0),current2, (101,80,80), font=font)
x = 0
for element in text:
drawCurrentForecast = ImageDraw.Draw(currentWeather).text((x,8),element[0],element[1],font=font)
x = x + font.getsize(element[0])[0]
currentWeather = currentWeather.rotate(180)
currentWeather.save("currentWeather.ppm")
'''
widthHello, ignore = font.getsize('Welcome to Random Hall')
welcome = Image.new('RGB',(widthHello + 32,16),'black')
drawWelcome = ImageDraw.Draw(welcome).text((0,0),'Welcome to Random Hall',(256,126,0),font=font)
welcome = welcome.rotate(180)
welcome.save('welcome.ppm')
leds.uploadPPM("welcome.ppm")
'''
leds.uploadPPM("currentWeather.ppm")
print(current + "\n" + forecastFor + forecastHi + forecastLo + forecastConditions )
def updateWeather():
leds.uploadPPM("currentWeather.ppm")
if __name__ =='__main__':
while (True):
Weather()
for i in range(50):
now = datetime.datetime.now()
CPW(now.day,now.hour + 1,now.minute)
sleepTime = (len("Random Hall: ") + len(eventMessage2) + len(eventMessage3))/5 +6
time.sleep(sleepTime)
updateWeather()
time.sleep(10)
| gpl-2.0 | -1,864,003,059,169,031,000 | 47.113636 | 193 | 0.658243 | false |
sindhus/hasjob | hasjob/views/location.py | 2 | 2094 | # -*- coding: utf-8 -*-
from collections import OrderedDict
from datetime import datetime
from flask import redirect, abort
from baseframe.forms import render_form
from ..models import db, agelimit, Location, JobLocation, JobPost, POSTSTATUS
from ..forms import NewLocationForm, EditLocationForm
from .. import app, lastuser
from .helper import location_geodata
@app.route('/in/new', methods=['GET', 'POST'])
@lastuser.requires_permission('siteadmin')
def location_new():
now = datetime.utcnow()
geonames = OrderedDict([(r.geonameid, None) for r in
db.session.query(JobLocation.geonameid, db.func.count(JobLocation.geonameid).label('count')).join(
JobPost).filter(JobPost.status.in_(POSTSTATUS.LISTED), JobPost.datetime > now - agelimit,
~JobLocation.geonameid.in_(db.session.query(Location.id))
).group_by(JobLocation.geonameid).order_by(db.text('count DESC')).limit(100)])
data = location_geodata(geonames.keys())
for row in data.values():
geonames[row['geonameid']] = row
choices = [('%s/%s' % (row['geonameid'], row['name']), row['picker_title']) for row in geonames.values()]
form = NewLocationForm()
form.geoname.choices = choices
if form.validate_on_submit():
geonameid, name = form.geoname.data.split('/', 1)
geonameid = int(geonameid)
title = geonames[geonameid]['use_title']
location = Location(id=geonameid, name=name, title=title)
db.session.add(location)
db.session.commit()
return redirect(location.url_for('edit'), code=303)
return render_form(form=form, title="Add a location")
@app.route('/in/<name>/edit', methods=['GET', 'POST'])
@lastuser.requires_permission('siteadmin')
def location_edit(name):
location = Location.get(name)
if not location:
abort(404)
form = EditLocationForm(obj=location)
if form.validate_on_submit():
form.populate_obj(location)
db.session.commit()
return redirect(location.url_for(), code=303)
return render_form(form=form, title="Edit location")
| agpl-3.0 | -9,207,969,869,731,752,000 | 38.509434 | 109 | 0.677173 | false |
emeric254/gala-stri-website | Handlers/ListingHandler.py | 1 | 1504 | # -*- coding: utf-8 -*-
import json
import logging
from tornado.web import authenticated
from Handlers.BaseHandler import BaseHandler
from Tools import PostgreSQL
logger = logging.getLogger(__name__)
class ListingHandler(BaseHandler):
"""Listing Handler which require a connected user"""
@authenticated
def get(self, path_request):
if path_request == 'inscrits':
self.write(json.dumps(PostgreSQL.get_all_inscrit()))
return
elif path_request == 'accompagnants':
self.write(json.dumps(PostgreSQL.get_all_accompagnants()))
return
elif path_request.startswith('inscrits') and '/' in path_request:
(_, id) = path_request.rsplit('/', 1)
try:
id = int(id)
if id < 0:
raise ValueError
except ValueError:
self.send_error(status_code=400)
return
self.write(json.dumps(PostgreSQL.get_all_accompagnants_inscrit(id)))
return
self.send_error(status_code=400)
@authenticated
def delete(self, path_request):
if path_request.startswith('inscrits/'):
PostgreSQL.supprimer_inscrit(path_request[9:])
self.write({})
return
elif path_request.startswith('accompagnants/'):
PostgreSQL.supprimer_accompagnant(path_request[14:])
self.write({})
return
self.send_error(status_code=400)
| mit | -2,530,221,862,486,792,000 | 31.695652 | 80 | 0.59109 | false |
BlakeTeam/VHDLCodeGenerator | lib/System.py | 1 | 7406 | #-------------------------------------------------------------------------------
# PROJECT: VHDL Code Generator
# NAME: System
#
# LICENSE: GNU-GPL V3
#-------------------------------------------------------------------------------
__author__ = "BlakeTeam"
import lib.signature
from lib import *
from .Block import Block as _Block
from lib.Connection import Connection as _Connection
IN = 1
OUT = 0
class System:
def __init__(self,name,input_info,output_info):
""" Structure that handles an abstract system
:String name: Name of the system (Name of the project)
:Int[] input_info: List with the name & size of the input ports of the system
:Int[] output_info: List with the name & size of the output ports of the system
"""
self.name = name # The name of the system
self.block_name = set() # The name of all blocks on the system
self.conn_name = set() # The name of all connections on the system
self.block = [] # Block list of the system
self.connections = {} # Connection dictionary of the system <Abstract Connection: QGraphicsLineItem>
self.system_input = _Block((),[size for name,size in input_info],self)
# Setting names to input ports
for i in range(len(input_info)):
self.system_input.output_ports[i].name = input_info[i][0]
self.system_input.screenPos = (-50,0)
self.system_input.setName("SystemInput")
self.system_output = _Block([size for name,size in output_info],(),self)
# Setting names to input ports
for i in range(len(output_info)):
self.system_output.input_ports[i].name = output_info[i][0]
self.system_output.screenPos = (50,0)
self.system_output.setName("SystemOutput")
self.input_info = input_info
self.output_info = output_info
self.input_names = [name for name,size in input_info]
self.output_names = [name for name,size in output_info]
self.includedLibrary = ["ieee.std_logic_1164.all"] #TODO: Revisar esto, hay que modificarlo
def buildVHDLCode(self):
""" Building the code that will be generated.
"""
fileText = lib.signature.signature()
# Including libraries
fileText += "-- Including libraries\nLIBRARY ieee;\n"
for i in self.includedLibrary:
fileText += "USE %s;\n"%i
fileText += "\n"
fileText += "ENTITY %s IS\n"%self.name
fileText += "-- Generating ports\n"
fileText += "PORT (\n"
# Generating input ports
for i in self.system_input.output_ports:
fileText += "%s: IN std_logic%s;\n"%(i.name,"" if i.size == 1 else "_vector(%d downto 0)"%(i.size - 1)) #TODO: Aqui cambie
# Generating output ports
for i in self.system_output.input_ports:
fileText += "%s: OUT std_logic%s;\n"%(i.name,"" if i.size == 1 else "_vector(%d downto 0)"%(i.size - 1)) #TODO: Aqui cambie
fileText = fileText[:-2]
fileText += ");\n"
fileText += "END %s;\n"%self.name
# Architecture Implementation
fileText += "\n-- Architecture Implementation\n"
fileText += "ARCHITECTURE Arq_%s OF %s IS\n"%(self.name,self.name)
fileText += "BEGIN\n"
# Port declaration
fileText += "-- Port declaration\n"
# TODO: Overrated RAM
for i in self.block:
signals = i.getSignals()
inputSig = []
outputSig = []
tempSig = []
for name,size,mode in signals:
if mode == IN:
inputSig.append((name,size))
elif mode == OUT:
outputSig.append((name,size))
else:
tempSig.append((name,size))
fileText += "\n-- Declaring %s's ports%s\n"%(i.name," & temporary signals" if len(tempSig) != 0 else "") #TODO: Aqui cambie y moví la linea de lugar
fileText += "-- Input ports\n"
for name,size in inputSig:
fileText += "signal %s__%s: std_logic%s;\n"%(i.name,name,"" if size == 1 else "_vector(%d downto 0)"%(size - 1)) #TODO: Aqui cambie
fileText += "\n-- Output ports\n"
for name,size in outputSig:
fileText += "signal %s__%s: std_logic%s;\n"%(i.name,name,"" if size == 1 else "_vector(%d downto 0)"%(size - 1)) #TODO: Aqui cambie
if len(tempSig) != 0: #TODO: Aqui cambie
fileText += "\n-- Temporary signals\n"
for name,size in tempSig:
fileText += "signal %s__%s: std_logic%s;\n"%(i.name,name,"" if size == 1 else "_vector(%d downto 0)"%(size - 1)) #TODO: Aqui cambie
# Defining connections
fileText += "\n-- Defining connections\n"
for i in self.block:
for port_inp in i.input_ports:
receiver = i.name + "__" + port_inp.name
if self.system_input == port_inp.connection.out_block:
sender = port_inp.connection.out_block.output_ports[port_inp.connection.ind_output].name
else:
sender = port_inp.connection.out_block.name + "__" + port_inp.connection.out_block.output_ports[port_inp.connection.ind_output].name
fileText += "%s <= %s;\n"%(receiver, sender)
fileText += "\n"
# Block implementations
fileText += "\n-- Blocks implementation\n"
for i in self.block:
fileText += "-- Implementation of %s block\n"%i.name
fileText += i.generate()
fileText += "\n"
# Connecting outputs
fileText += "-- Connecting outputs\n"
for i in self.system_output.input_ports:
fileText += "%s <= %s__%s;\n"%(i.name,i.connection.out_block.name,i.connection.out_block.output_ports[i.connection.ind_output].name)
fileText += "END Arq_%s;\n"%self.name
# print("\nGENERATED CODE\n")
# print(fileText)
return fileText
def __getitem__(self, name):
""" Find a port for his name.
This function starts for input ports.
If the port exist it returns the reference to the port & mode(IN/OUT)
Else it returns -1
:String name: The name of the wanted port/
"""
try:
pos = self.input_names.index(name)
return pos,IN
except ValueError:
try:
pos = self.output_names.index(name)
return pos,OUT
except ValueError:
return -1
def connect(self,output_block,ind_output,input_block,ind_input,visualConnection = None):
"""
:param output_block:
:param ind_output:
:param input_block:
:param ind_input:
"""
conn = _Connection(output_block,ind_output,input_block,ind_input,self) # Creating the connection between 2 blocks
output_block.output_ports[ind_output].connection.append(conn) # Linking the connection with the output block
input_block.input_ports[ind_input].connection = conn # Linking the connection with the input block
self.connections.update({conn:visualConnection}) # Adding the connection to the connection list (on the system)
return conn | gpl-3.0 | 2,981,545,971,807,642,600 | 39.692308 | 160 | 0.559352 | false |
NickShaffner/rhea | test/test_system/test_memmap_command_bridge.py | 1 | 4179 |
"""
Test and verify the memory-mapped command bridge (memmap_command_bridge).
Unlike the actual designs this test does not use a generic peripheral
but instead a specific peripheral / slave is used for each bus type,
other tests verify the generic ability.
"""
from __future__ import print_function, division
from random import randint
import traceback
import myhdl
from myhdl import (Signal, intbv, always_seq, always_comb,
instance, delay, StopSimulation,)
from rhea import Global, Clock, Reset, Signals
from rhea.system import Barebone, FIFOBus
from rhea.cores.memmap import command_bridge
from rhea.cores.fifo import fifo_fast
from rhea.utils import CommandPacket
from rhea.utils.test import run_testbench, tb_args, tb_default_args
@myhdl.block
def memmap_peripheral_bb(clock, reset, bb):
""" Emulate Barebone memory-mapped reads and writes"""
assert isinstance(bb, Barebone)
mem = {}
@always_seq(clock.posedge, reset=reset)
def beh_writes():
addr = int(bb.address)
bb.done.next = not (bb.write or bb.read)
if bb.write:
mem[addr] = int(bb.write_data)
@always_comb
def beh_reads():
addr = int(bb.address)
if bb.read:
if addr not in mem:
mem[addr] = 0
bb.read_data.next = mem[addr]
else:
bb.read_data.next = 0
return beh_writes, beh_reads
def test_memmap_command_bridge(args=None):
nloops = 37
args = tb_default_args(args)
clock = Clock(0, frequency=50e6)
reset = Reset(0, active=1, async=False)
glbl = Global(clock, reset)
fifobus = FIFOBus()
memmap = Barebone(glbl, data_width=32, address_width=28)
fifobus.clock = clock
@myhdl.block
def bench_command_bridge():
tbclk = clock.gen()
tbdut = command_bridge(glbl, fifobus, memmap)
readpath, writepath = FIFOBus(), FIFOBus()
readpath.clock = writepath.clock = clock
tbmap = fifobus.assign_read_write_paths(readpath, writepath)
tbftx = fifo_fast(glbl, writepath) # user write path
tbfrx = fifo_fast(glbl, readpath) # user read path
# @todo: add other bus types
tbmem = memmap_peripheral_bb(clock, reset, memmap)
# save the data read ...
read_value = []
@instance
def tbstim():
yield reset.pulse(32)
fifobus.read.next = False
fifobus.write.next = False
assert not fifobus.full
assert fifobus.empty
assert fifobus.read_data == 0
fifobus.write_data.next = 0
try:
# test a single address
pkt = CommandPacket(True, 0x0000)
yield pkt.put(readpath)
yield pkt.get(writepath, read_value, [0])
pkt = CommandPacket(False, 0x0000, [0x5555AAAA])
yield pkt.put(readpath)
yield pkt.get(writepath, read_value, [0x5555AAAA])
# test a bunch of random addresses
for ii in range(nloops):
randaddr = randint(0, (2**20)-1)
randdata = randint(0, (2**32)-1)
pkt = CommandPacket(False, randaddr, [randdata])
yield pkt.put(readpath)
yield pkt.get(writepath, read_value, [randdata])
except Exception as err:
print("Error: {}".format(str(err)))
traceback.print_exc()
yield delay(2000)
raise StopSimulation
wp_read, wp_valid = Signals(bool(0), 2)
wp_read_data = Signal(intbv(0)[8:])
wp_empty, wp_full = Signals(bool(0), 2)
@always_comb
def tbmon():
wp_read.next = writepath.read
wp_read_data.next = writepath.read_data
wp_valid.next = writepath.read_valid
wp_full.next = writepath.full
wp_empty.next = writepath.empty
return tbclk, tbdut, tbmap, tbftx, tbfrx, tbmem, tbstim, tbmon
run_testbench(bench_command_bridge, args=args)
if __name__ == '__main__':
test_memmap_command_bridge(tb_args())
| mit | 908,277,074,828,410,000 | 30.186567 | 73 | 0.592008 | false |
zmughal/pygments-mirror | tests/test_latex_formatter.py | 1 | 1504 | # -*- coding: utf-8 -*-
"""
Pygments LaTeX formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import os
import unittest
import tempfile
from pygments.formatters import LatexFormatter
from pygments.lexers import PythonLexer
import support
TESTFILE, TESTDIR = support.location(__file__)
class LatexFormatterTest(unittest.TestCase):
def test_valid_output(self):
with open(TESTFILE) as fp:
tokensource = list(PythonLexer().get_tokens(fp.read()))
fmt = LatexFormatter(full=True, encoding='latin1')
handle, pathname = tempfile.mkstemp('.tex')
# place all output files in /tmp too
old_wd = os.getcwd()
os.chdir(os.path.dirname(pathname))
tfile = os.fdopen(handle, 'wb')
fmt.format(tokensource, tfile)
tfile.close()
try:
import subprocess
po = subprocess.Popen(['latex', '-interaction=nonstopmode',
pathname], stdout=subprocess.PIPE)
ret = po.wait()
output = po.stdout.read()
po.stdout.close()
except OSError:
# latex not available
pass
else:
if ret:
print(output)
self.assertFalse(ret, 'latex run reported errors')
os.unlink(pathname)
os.chdir(old_wd)
| bsd-2-clause | -1,551,528,746,510,471,000 | 26.851852 | 71 | 0.584441 | false |
jelly/calibre | src/calibre/ebooks/docx/to_html.py | 1 | 32913 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import sys, os, re, math, errno, uuid
from collections import OrderedDict, defaultdict
from lxml import html
from lxml.html.builder import (
HTML, HEAD, TITLE, BODY, LINK, META, P, SPAN, BR, DIV, SUP, A, DT, DL, DD, H1)
from calibre import guess_type
from calibre.ebooks.docx.container import DOCX, fromstring
from calibre.ebooks.docx.names import XML, generate_anchor
from calibre.ebooks.docx.styles import Styles, inherit, PageProperties
from calibre.ebooks.docx.numbering import Numbering
from calibre.ebooks.docx.fonts import Fonts
from calibre.ebooks.docx.images import Images
from calibre.ebooks.docx.tables import Tables
from calibre.ebooks.docx.footnotes import Footnotes
from calibre.ebooks.docx.cleanup import cleanup_markup
from calibre.ebooks.docx.theme import Theme
from calibre.ebooks.docx.toc import create_toc
from calibre.ebooks.docx.fields import Fields
from calibre.ebooks.docx.settings import Settings
from calibre.ebooks.metadata.opf2 import OPFCreator
from calibre.utils.localization import canonicalize_lang, lang_as_iso639_1
NBSP = '\xa0'
class Text:
def __init__(self, elem, attr, buf):
self.elem, self.attr, self.buf = elem, attr, buf
def add_elem(self, elem):
setattr(self.elem, self.attr, ''.join(self.buf))
self.elem, self.attr, self.buf = elem, 'tail', []
def html_lang(docx_lang):
lang = canonicalize_lang(docx_lang)
if lang and lang != 'und':
lang = lang_as_iso639_1(lang)
if lang:
return lang
class Convert(object):
def __init__(self, path_or_stream, dest_dir=None, log=None, detect_cover=True, notes_text=None, notes_nopb=False, nosupsub=False):
self.docx = DOCX(path_or_stream, log=log)
self.namespace = self.docx.namespace
self.ms_pat = re.compile(r'\s{2,}')
self.ws_pat = re.compile(r'[\n\r\t]')
self.log = self.docx.log
self.detect_cover = detect_cover
self.notes_text = notes_text or _('Notes')
self.notes_nopb = notes_nopb
self.nosupsub = nosupsub
self.dest_dir = dest_dir or os.getcwdu()
self.mi = self.docx.metadata
self.body = BODY()
self.theme = Theme(self.namespace)
self.settings = Settings(self.namespace)
self.tables = Tables(self.namespace)
self.fields = Fields(self.namespace)
self.styles = Styles(self.namespace, self.tables)
self.images = Images(self.namespace, self.log)
self.object_map = OrderedDict()
self.html = HTML(
HEAD(
META(charset='utf-8'),
TITLE(self.mi.title or _('Unknown')),
LINK(rel='stylesheet', type='text/css', href='docx.css'),
),
self.body
)
self.html.text='\n\t'
self.html[0].text='\n\t\t'
self.html[0].tail='\n'
for child in self.html[0]:
child.tail = '\n\t\t'
self.html[0][-1].tail = '\n\t'
self.html[1].text = self.html[1].tail = '\n'
lang = html_lang(self.mi.language)
if lang:
self.html.set('lang', lang)
self.doc_lang = lang
else:
self.doc_lang = None
def __call__(self):
doc = self.docx.document
relationships_by_id, relationships_by_type = self.docx.document_relationships
self.fields(doc, self.log)
self.read_styles(relationships_by_type)
self.images(relationships_by_id)
self.layers = OrderedDict()
self.framed = [[]]
self.frame_map = {}
self.framed_map = {}
self.anchor_map = {}
self.link_map = defaultdict(list)
self.link_source_map = {}
self.toc_anchor = None
self.block_runs = []
paras = []
self.log.debug('Converting Word markup to HTML')
self.read_page_properties(doc)
self.current_rels = relationships_by_id
for wp, page_properties in self.page_map.iteritems():
self.current_page = page_properties
if wp.tag.endswith('}p'):
p = self.convert_p(wp)
self.body.append(p)
paras.append(wp)
self.read_block_anchors(doc)
self.styles.apply_contextual_spacing(paras)
self.mark_block_runs(paras)
# Apply page breaks at the start of every section, except the first
# section (since that will be the start of the file)
self.styles.apply_section_page_breaks(self.section_starts[1:])
notes_header = None
orig_rid_map = self.images.rid_map
if self.footnotes.has_notes:
self.body.append(H1(self.notes_text))
notes_header = self.body[-1]
notes_header.set('class', 'notes-header')
for anchor, text, note in self.footnotes:
dl = DL(id=anchor)
dl.set('class', 'footnote')
self.body.append(dl)
dl.append(DT('[', A('←' + text, href='#back_%s' % anchor, title=text)))
dl[-1][0].tail = ']'
dl.append(DD())
paras = []
self.images.rid_map = self.current_rels = note.rels[0]
for wp in note:
if wp.tag.endswith('}tbl'):
self.tables.register(wp, self.styles)
self.page_map[wp] = self.current_page
else:
p = self.convert_p(wp)
dl[-1].append(p)
paras.append(wp)
self.styles.apply_contextual_spacing(paras)
self.mark_block_runs(paras)
for p, wp in self.object_map.iteritems():
if len(p) > 0 and not p.text and len(p[0]) > 0 and not p[0].text and p[0][0].get('class', None) == 'tab':
# Paragraph uses tabs for indentation, convert to text-indent
parent = p[0]
tabs = []
for child in parent:
if child.get('class', None) == 'tab':
tabs.append(child)
if child.tail:
break
else:
break
indent = len(tabs) * self.settings.default_tab_stop
style = self.styles.resolve(wp)
if style.text_indent is inherit or (hasattr(style.text_indent, 'endswith') and style.text_indent.endswith('pt')):
if style.text_indent is not inherit:
indent = float(style.text_indent[:-2]) + indent
style.text_indent = '%.3gpt' % indent
parent.text = tabs[-1].tail or ''
map(parent.remove, tabs)
self.images.rid_map = orig_rid_map
self.resolve_links()
self.styles.cascade(self.layers)
self.tables.apply_markup(self.object_map, self.page_map)
numbered = []
for html_obj, obj in self.object_map.iteritems():
raw = obj.get('calibre_num_id', None)
if raw is not None:
lvl, num_id = raw.partition(':')[0::2]
try:
lvl = int(lvl)
except (TypeError, ValueError):
lvl = 0
numbered.append((html_obj, num_id, lvl))
self.numbering.apply_markup(numbered, self.body, self.styles, self.object_map, self.images)
self.apply_frames()
if len(self.body) > 0:
self.body.text = '\n\t'
for child in self.body:
child.tail = '\n\t'
self.body[-1].tail = '\n'
self.log.debug('Converting styles to CSS')
self.styles.generate_classes()
for html_obj, obj in self.object_map.iteritems():
style = self.styles.resolve(obj)
if style is not None:
css = style.css
if css:
cls = self.styles.class_name(css)
if cls:
html_obj.set('class', cls)
for html_obj, css in self.framed_map.iteritems():
cls = self.styles.class_name(css)
if cls:
html_obj.set('class', cls)
if notes_header is not None:
for h in self.namespace.children(self.body, 'h1', 'h2', 'h3'):
notes_header.tag = h.tag
cls = h.get('class', None)
if cls and cls != 'notes-header':
notes_header.set('class', '%s notes-header' % cls)
break
self.fields.polish_markup(self.object_map)
self.log.debug('Cleaning up redundant markup generated by Word')
self.cover_image = cleanup_markup(self.log, self.html, self.styles, self.dest_dir, self.detect_cover, self.namespace.XPath)
return self.write(doc)
def read_page_properties(self, doc):
current = []
self.page_map = OrderedDict()
self.section_starts = []
for p in self.namespace.descendants(doc, 'w:p', 'w:tbl'):
if p.tag.endswith('}tbl'):
self.tables.register(p, self.styles)
current.append(p)
continue
sect = tuple(self.namespace.descendants(p, 'w:sectPr'))
if sect:
pr = PageProperties(self.namespace, sect)
paras = current + [p]
for x in paras:
self.page_map[x] = pr
self.section_starts.append(paras[0])
current = []
else:
current.append(p)
if current:
self.section_starts.append(current[0])
last = self.namespace.XPath('./w:body/w:sectPr')(doc)
pr = PageProperties(self.namespace, last)
for x in current:
self.page_map[x] = pr
def read_styles(self, relationships_by_type):
def get_name(rtype, defname):
name = relationships_by_type.get(rtype, None)
if name is None:
cname = self.docx.document_name.split('/')
cname[-1] = defname
if self.docx.exists('/'.join(cname)):
name = name
if name and name.startswith('word/word') and not self.docx.exists(name):
name = name.partition('/')[2]
return name
nname = get_name(self.namespace.names['NUMBERING'], 'numbering.xml')
sname = get_name(self.namespace.names['STYLES'], 'styles.xml')
sename = get_name(self.namespace.names['SETTINGS'], 'settings.xml')
fname = get_name(self.namespace.names['FONTS'], 'fontTable.xml')
tname = get_name(self.namespace.names['THEMES'], 'theme1.xml')
foname = get_name(self.namespace.names['FOOTNOTES'], 'footnotes.xml')
enname = get_name(self.namespace.names['ENDNOTES'], 'endnotes.xml')
numbering = self.numbering = Numbering(self.namespace)
footnotes = self.footnotes = Footnotes(self.namespace)
fonts = self.fonts = Fonts(self.namespace)
foraw = enraw = None
forel, enrel = ({}, {}), ({}, {})
if sename is not None:
try:
seraw = self.docx.read(sename)
except KeyError:
self.log.warn('Settings %s do not exist' % sename)
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
self.log.warn('Settings %s file missing' % sename)
else:
self.settings(fromstring(seraw))
if foname is not None:
try:
foraw = self.docx.read(foname)
except KeyError:
self.log.warn('Footnotes %s do not exist' % foname)
else:
forel = self.docx.get_relationships(foname)
if enname is not None:
try:
enraw = self.docx.read(enname)
except KeyError:
self.log.warn('Endnotes %s do not exist' % enname)
else:
enrel = self.docx.get_relationships(enname)
footnotes(fromstring(foraw) if foraw else None, forel, fromstring(enraw) if enraw else None, enrel)
if fname is not None:
embed_relationships = self.docx.get_relationships(fname)[0]
try:
raw = self.docx.read(fname)
except KeyError:
self.log.warn('Fonts table %s does not exist' % fname)
else:
fonts(fromstring(raw), embed_relationships, self.docx, self.dest_dir)
if tname is not None:
try:
raw = self.docx.read(tname)
except KeyError:
self.log.warn('Styles %s do not exist' % sname)
else:
self.theme(fromstring(raw))
styles_loaded = False
if sname is not None:
try:
raw = self.docx.read(sname)
except KeyError:
self.log.warn('Styles %s do not exist' % sname)
else:
self.styles(fromstring(raw), fonts, self.theme)
styles_loaded = True
if not styles_loaded:
self.styles(None, fonts, self.theme)
if nname is not None:
try:
raw = self.docx.read(nname)
except KeyError:
self.log.warn('Numbering styles %s do not exist' % nname)
else:
numbering(fromstring(raw), self.styles, self.docx.get_relationships(nname)[0])
self.styles.resolve_numbering(numbering)
def write(self, doc):
toc = create_toc(doc, self.body, self.resolved_link_map, self.styles, self.object_map, self.log, self.namespace)
raw = html.tostring(self.html, encoding='utf-8', doctype='<!DOCTYPE html>')
with lopen(os.path.join(self.dest_dir, 'index.html'), 'wb') as f:
f.write(raw)
css = self.styles.generate_css(self.dest_dir, self.docx, self.notes_nopb, self.nosupsub)
if css:
with lopen(os.path.join(self.dest_dir, 'docx.css'), 'wb') as f:
f.write(css.encode('utf-8'))
opf = OPFCreator(self.dest_dir, self.mi)
opf.toc = toc
opf.create_manifest_from_files_in([self.dest_dir])
for item in opf.manifest:
if item.media_type == 'text/html':
item.media_type = guess_type('a.xhtml')[0]
opf.create_spine(['index.html'])
if self.cover_image is not None:
opf.guide.set_cover(self.cover_image)
def process_guide(E, guide):
if self.toc_anchor is not None:
guide.append(E.reference(
href='index.html#' + self.toc_anchor, title=_('Table of Contents'), type='toc'))
toc_file = os.path.join(self.dest_dir, 'toc.ncx')
with lopen(os.path.join(self.dest_dir, 'metadata.opf'), 'wb') as of, open(toc_file, 'wb') as ncx:
opf.render(of, ncx, 'toc.ncx', process_guide=process_guide)
if os.path.getsize(toc_file) == 0:
os.remove(toc_file)
return os.path.join(self.dest_dir, 'metadata.opf')
def read_block_anchors(self, doc):
doc_anchors = frozenset(self.namespace.XPath('./w:body/w:bookmarkStart[@w:name]')(doc))
if doc_anchors:
current_bm = set()
rmap = {v:k for k, v in self.object_map.iteritems()}
for p in self.namespace.descendants(doc, 'w:p', 'w:bookmarkStart[@w:name]'):
if p.tag.endswith('}p'):
if current_bm and p in rmap:
para = rmap[p]
if 'id' not in para.attrib:
para.set('id', generate_anchor(next(iter(current_bm)), frozenset(self.anchor_map.itervalues())))
for name in current_bm:
self.anchor_map[name] = para.get('id')
current_bm = set()
elif p in doc_anchors:
anchor = self.namespace.get(p, 'w:name')
if anchor:
current_bm.add(anchor)
def convert_p(self, p):
dest = P()
self.object_map[dest] = p
style = self.styles.resolve_paragraph(p)
self.layers[p] = []
self.frame_map[p] = style.frame
self.add_frame(dest, style.frame)
current_anchor = None
current_hyperlink = None
hl_xpath = self.namespace.XPath('ancestor::w:hyperlink[1]')
def p_parent(x):
# Ensure that nested <w:p> tags are handled. These can occur if a
# textbox is present inside a paragraph.
while True:
x = x.getparent()
try:
if x.tag.endswith('}p'):
return x
except AttributeError:
break
for x in self.namespace.descendants(p, 'w:r', 'w:bookmarkStart', 'w:hyperlink', 'w:instrText'):
if p_parent(x) is not p:
continue
if x.tag.endswith('}r'):
span = self.convert_run(x)
if current_anchor is not None:
(dest if len(dest) == 0 else span).set('id', current_anchor)
current_anchor = None
if current_hyperlink is not None:
try:
hl = hl_xpath(x)[0]
self.link_map[hl].append(span)
self.link_source_map[hl] = self.current_rels
x.set('is-link', '1')
except IndexError:
current_hyperlink = None
dest.append(span)
self.layers[p].append(x)
elif x.tag.endswith('}bookmarkStart'):
anchor = self.namespace.get(x, 'w:name')
if anchor and anchor not in self.anchor_map and anchor != '_GoBack':
# _GoBack is a special bookmark inserted by Word 2010 for
# the return to previous edit feature, we ignore it
old_anchor = current_anchor
self.anchor_map[anchor] = current_anchor = generate_anchor(anchor, frozenset(self.anchor_map.itervalues()))
if old_anchor is not None:
# The previous anchor was not applied to any element
for a, t in tuple(self.anchor_map.iteritems()):
if t == old_anchor:
self.anchor_map[a] = current_anchor
elif x.tag.endswith('}hyperlink'):
current_hyperlink = x
elif x.tag.endswith('}instrText') and x.text and x.text.strip().startswith('TOC '):
old_anchor = current_anchor
anchor = str(uuid.uuid4())
self.anchor_map[anchor] = current_anchor = generate_anchor('toc', frozenset(self.anchor_map.itervalues()))
self.toc_anchor = current_anchor
if old_anchor is not None:
# The previous anchor was not applied to any element
for a, t in tuple(self.anchor_map.iteritems()):
if t == old_anchor:
self.anchor_map[a] = current_anchor
if current_anchor is not None:
# This paragraph had no <w:r> descendants
dest.set('id', current_anchor)
current_anchor = None
m = re.match(r'heading\s+(\d+)$', style.style_name or '', re.IGNORECASE)
if m is not None:
n = min(6, max(1, int(m.group(1))))
dest.tag = 'h%d' % n
if style.bidi is True:
dest.set('dir', 'rtl')
border_runs = []
common_borders = []
for span in dest:
run = self.object_map[span]
style = self.styles.resolve_run(run)
if not border_runs or border_runs[-1][1].same_border(style):
border_runs.append((span, style))
elif border_runs:
if len(border_runs) > 1:
common_borders.append(border_runs)
border_runs = []
for border_run in common_borders:
spans = []
bs = {}
for span, style in border_run:
style.get_border_css(bs)
style.clear_border_css()
spans.append(span)
if bs:
cls = self.styles.register(bs, 'text_border')
wrapper = self.wrap_elems(spans, SPAN())
wrapper.set('class', cls)
if not dest.text and len(dest) == 0 and not style.has_visible_border():
# Empty paragraph add a non-breaking space so that it is rendered
# by WebKit
dest.text = NBSP
# If the last element in a block is a <br> the <br> is not rendered in
# HTML, unless it is followed by a trailing space. Word, on the other
# hand inserts a blank line for trailing <br>s.
if len(dest) > 0 and not dest[-1].tail:
if dest[-1].tag == 'br':
dest[-1].tail = NBSP
elif len(dest[-1]) > 0 and dest[-1][-1].tag == 'br' and not dest[-1][-1].tail:
dest[-1][-1].tail = NBSP
return dest
def wrap_elems(self, elems, wrapper):
p = elems[0].getparent()
idx = p.index(elems[0])
p.insert(idx, wrapper)
wrapper.tail = elems[-1].tail
elems[-1].tail = None
for elem in elems:
try:
p.remove(elem)
except ValueError:
# Probably a hyperlink that spans multiple
# paragraphs,theoretically we should break this up into
# multiple hyperlinks, but I can't be bothered.
elem.getparent().remove(elem)
wrapper.append(elem)
return wrapper
def resolve_links(self):
self.resolved_link_map = {}
for hyperlink, spans in self.link_map.iteritems():
relationships_by_id = self.link_source_map[hyperlink]
span = spans[0]
if len(spans) > 1:
span = self.wrap_elems(spans, SPAN())
span.tag = 'a'
self.resolved_link_map[hyperlink] = span
tgt = self.namespace.get(hyperlink, 'w:tgtFrame')
if tgt:
span.set('target', tgt)
tt = self.namespace.get(hyperlink, 'w:tooltip')
if tt:
span.set('title', tt)
rid = self.namespace.get(hyperlink, 'r:id')
if rid and rid in relationships_by_id:
span.set('href', relationships_by_id[rid])
continue
anchor = self.namespace.get(hyperlink, 'w:anchor')
if anchor and anchor in self.anchor_map:
span.set('href', '#' + self.anchor_map[anchor])
continue
self.log.warn('Hyperlink with unknown target (rid=%s, anchor=%s), ignoring' %
(rid, anchor))
# hrefs that point nowhere give epubcheck a hernia. The element
# should be styled explicitly by Word anyway.
# span.set('href', '#')
rmap = {v:k for k, v in self.object_map.iteritems()}
for hyperlink, runs in self.fields.hyperlink_fields:
spans = [rmap[r] for r in runs if r in rmap]
if not spans:
continue
span = spans[0]
if len(spans) > 1:
span = self.wrap_elems(spans, SPAN())
span.tag = 'a'
tgt = hyperlink.get('target', None)
if tgt:
span.set('target', tgt)
tt = hyperlink.get('title', None)
if tt:
span.set('title', tt)
url = hyperlink.get('url', None)
if url is None:
anchor = hyperlink.get('anchor', None)
if anchor in self.anchor_map:
span.set('href', '#' + self.anchor_map[anchor])
continue
self.log.warn('Hyperlink field with unknown anchor: %s' % anchor)
else:
if url in self.anchor_map:
span.set('href', '#' + self.anchor_map[url])
continue
span.set('href', url)
for img, link, relationships_by_id in self.images.links:
parent = img.getparent()
idx = parent.index(img)
a = A(img)
a.tail, img.tail = img.tail, None
parent.insert(idx, a)
tgt = link.get('target', None)
if tgt:
a.set('target', tgt)
tt = link.get('title', None)
if tt:
a.set('title', tt)
rid = link['id']
if rid in relationships_by_id:
dest = relationships_by_id[rid]
if dest.startswith('#'):
if dest[1:] in self.anchor_map:
a.set('href', '#' + self.anchor_map[dest[1:]])
else:
a.set('href', dest)
def convert_run(self, run):
ans = SPAN()
self.object_map[ans] = run
text = Text(ans, 'text', [])
for child in run:
if self.namespace.is_tag(child, 'w:t'):
if not child.text:
continue
space = child.get(XML('space'), None)
preserve = False
ctext = child.text
if space != 'preserve':
# Remove leading and trailing whitespace. Word ignores
# leading and trailing whitespace without preserve
ctext = ctext.strip(' \n\r\t')
# Only use a <span> with white-space:pre-wrap if this element
# actually needs it, i.e. if it has more than one
# consecutive space or it has newlines or tabs.
multi_spaces = self.ms_pat.search(ctext) is not None
preserve = multi_spaces or self.ws_pat.search(ctext) is not None
if preserve:
text.add_elem(SPAN(ctext, style="white-space:pre-wrap"))
ans.append(text.elem)
else:
text.buf.append(ctext)
elif self.namespace.is_tag(child, 'w:cr'):
text.add_elem(BR())
ans.append(text.elem)
elif self.namespace.is_tag(child, 'w:br'):
typ = self.namespace.get(child, 'w:type')
if typ in {'column', 'page'}:
br = BR(style='page-break-after:always')
else:
clear = child.get('clear', None)
if clear in {'all', 'left', 'right'}:
br = BR(style='clear:%s'%('both' if clear == 'all' else clear))
else:
br = BR()
text.add_elem(br)
ans.append(text.elem)
elif self.namespace.is_tag(child, 'w:drawing') or self.namespace.is_tag(child, 'w:pict'):
for img in self.images.to_html(child, self.current_page, self.docx, self.dest_dir):
text.add_elem(img)
ans.append(text.elem)
elif self.namespace.is_tag(child, 'w:footnoteReference') or self.namespace.is_tag(child, 'w:endnoteReference'):
anchor, name = self.footnotes.get_ref(child)
if anchor and name:
l = A(SUP(name, id='back_%s' % anchor), href='#' + anchor, title=name)
l.set('class', 'noteref')
text.add_elem(l)
ans.append(text.elem)
elif self.namespace.is_tag(child, 'w:tab'):
spaces = int(math.ceil((self.settings.default_tab_stop / 36) * 6))
text.add_elem(SPAN(NBSP * spaces))
ans.append(text.elem)
ans[-1].set('class', 'tab')
elif self.namespace.is_tag(child, 'w:noBreakHyphen'):
text.buf.append(u'\u2011')
elif self.namespace.is_tag(child, 'w:softHyphen'):
text.buf.append(u'\u00ad')
if text.buf:
setattr(text.elem, text.attr, ''.join(text.buf))
style = self.styles.resolve_run(run)
if style.vert_align in {'superscript', 'subscript'}:
ans.tag = 'sub' if style.vert_align == 'subscript' else 'sup'
if style.lang is not inherit:
lang = html_lang(style.lang)
if lang is not None and lang != self.doc_lang:
ans.set('lang', lang)
if style.rtl is True:
ans.set('dir', 'rtl')
return ans
def add_frame(self, html_obj, style):
last_run = self.framed[-1]
if style is inherit:
if last_run:
self.framed.append([])
return
if last_run:
if last_run[-1][1] == style:
last_run.append((html_obj, style))
else:
self.framed[-1].append((html_obj, style))
else:
last_run.append((html_obj, style))
def apply_frames(self):
for run in filter(None, self.framed):
style = run[0][1]
paras = tuple(x[0] for x in run)
parent = paras[0].getparent()
idx = parent.index(paras[0])
frame = DIV(*paras)
parent.insert(idx, frame)
self.framed_map[frame] = css = style.css(self.page_map[self.object_map[paras[0]]])
self.styles.register(css, 'frame')
if not self.block_runs:
return
rmap = {v:k for k, v in self.object_map.iteritems()}
for border_style, blocks in self.block_runs:
paras = tuple(rmap[p] for p in blocks)
parent = paras[0].getparent()
if parent.tag in ('ul', 'ol'):
ul = parent
parent = ul.getparent()
idx = parent.index(ul)
frame = DIV(ul)
else:
idx = parent.index(paras[0])
frame = DIV(*paras)
parent.insert(idx, frame)
self.framed_map[frame] = css = border_style.css
self.styles.register(css, 'frame')
def mark_block_runs(self, paras):
def process_run(run):
max_left = max_right = 0
has_visible_border = None
for p in run:
style = self.styles.resolve_paragraph(p)
if has_visible_border is None:
has_visible_border = style.has_visible_border()
max_left, max_right = max(style.margin_left, max_left), max(style.margin_right, max_right)
if has_visible_border:
style.margin_left = style.margin_right = inherit
if p is not run[0]:
style.padding_top = 0
else:
border_style = style.clone_border_styles()
if has_visible_border:
border_style.margin_top, style.margin_top = style.margin_top, inherit
if p is not run[-1]:
style.padding_bottom = 0
else:
if has_visible_border:
border_style.margin_bottom, style.margin_bottom = style.margin_bottom, inherit
style.clear_borders()
if p is not run[-1]:
style.apply_between_border()
if has_visible_border:
border_style.margin_left, border_style.margin_right = max_left,max_right
self.block_runs.append((border_style, run))
run = []
for p in paras:
if run and self.frame_map.get(p) == self.frame_map.get(run[-1]):
style = self.styles.resolve_paragraph(p)
last_style = self.styles.resolve_paragraph(run[-1])
if style.has_identical_borders(last_style):
run.append(p)
continue
if len(run) > 1:
process_run(run)
run = [p]
if len(run) > 1:
process_run(run)
if __name__ == '__main__':
import shutil
from calibre.utils.logging import default_log
default_log.filter_level = default_log.DEBUG
dest_dir = os.path.join(os.getcwdu(), 'docx_input')
if os.path.exists(dest_dir):
shutil.rmtree(dest_dir)
os.mkdir(dest_dir)
Convert(sys.argv[-1], dest_dir=dest_dir, log=default_log)()
| gpl-3.0 | -1,772,661,482,108,892,200 | 40.554293 | 134 | 0.520009 | false |
ltilve/chromium | tools/telemetry/telemetry/core/browser_options.py | 1 | 15478 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import logging
import optparse
import os
import shlex
import socket
import sys
from telemetry.core import browser_finder
from telemetry.core import browser_finder_exceptions
from telemetry.core import device_finder
from telemetry.core import platform
from telemetry.core.platform.profiler import profiler_finder
from telemetry.core import profile_types
from telemetry.core import util
from telemetry.core import wpr_modes
util.AddDirToPythonPath(
util.GetChromiumSrcDir(), 'third_party', 'webpagereplay')
import net_configs # pylint: disable=F0401
class BrowserFinderOptions(optparse.Values):
"""Options to be used for discovering a browser."""
def __init__(self, browser_type=None):
optparse.Values.__init__(self)
self.browser_type = browser_type
self.browser_executable = None
self.chrome_root = None
self.device = None
self.cros_ssh_identity = None
self.extensions_to_load = []
# If set, copy the generated profile to this path on exit.
self.output_profile_path = None
self.cros_remote = None
self.profiler = None
self.verbosity = 0
self.browser_options = BrowserOptions()
self.output_file = None
self.android_rndis = False
self.no_performance_mode = False
def __repr__(self):
return str(sorted(self.__dict__.items()))
def Copy(self):
return copy.deepcopy(self)
def CreateParser(self, *args, **kwargs):
parser = optparse.OptionParser(*args, **kwargs)
# Selection group
group = optparse.OptionGroup(parser, 'Which browser to use')
group.add_option('--browser',
dest='browser_type',
default=None,
help='Browser type to run, '
'in order of priority. Supported values: list,%s' %
','.join(browser_finder.FindAllBrowserTypes(self)))
group.add_option('--browser-executable',
dest='browser_executable',
help='The exact browser to run.')
group.add_option('--chrome-root',
dest='chrome_root',
help='Where to look for chrome builds.'
'Defaults to searching parent dirs by default.')
group.add_option('--device',
dest='device',
help='The device ID to use.'
'If not specified, only 0 or 1 connected devices are supported. If'
'specified as "android", all available Android devices are used.')
group.add_option('--target-arch',
dest='target_arch',
help='The target architecture of the browser. Options available are: '
'x64, x86_64, arm, arm64 and mips. '
'Defaults to the default architecture of the platform if omitted.')
group.add_option(
'--remote',
dest='cros_remote',
help='The hostname of a remote ChromeOS device to use.')
group.add_option(
'--remote-ssh-port',
type=int,
default=socket.getservbyname('ssh'),
dest='cros_remote_ssh_port',
help='The SSH port of the remote ChromeOS device (requires --remote).')
identity = None
testing_rsa = os.path.join(
util.GetChromiumSrcDir(),
'third_party', 'chromite', 'ssh_keys', 'testing_rsa')
if os.path.exists(testing_rsa):
identity = testing_rsa
group.add_option('--identity',
dest='cros_ssh_identity',
default=identity,
help='The identity file to use when ssh\'ing into the ChromeOS device')
parser.add_option_group(group)
# Debugging options
group = optparse.OptionGroup(parser, 'When things go wrong')
profiler_choices = profiler_finder.GetAllAvailableProfilers()
group.add_option(
'--profiler', default=None, type='choice',
choices=profiler_choices,
help='Record profiling data using this tool. Supported values: ' +
', '.join(profiler_choices))
group.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed)')
group.add_option('--print-bootstrap-deps',
action='store_true',
help='Output bootstrap deps list.')
parser.add_option_group(group)
# Platform options
group = optparse.OptionGroup(parser, 'Platform options')
group.add_option('--no-performance-mode', action='store_true',
help='Some platforms run on "full performance mode" where the '
'test is executed at maximum CPU speed in order to minimize noise '
'(specially important for dashboards / continuous builds). '
'This option prevents Telemetry from tweaking such platform settings.')
group.add_option('--android-rndis', dest='android_rndis', default=False,
action='store_true', help='Use RNDIS forwarding on Android.')
group.add_option('--no-android-rndis', dest='android_rndis',
action='store_false', help='Do not use RNDIS forwarding on Android.'
' [default]')
parser.add_option_group(group)
# Browser options.
self.browser_options.AddCommandLineArgs(parser)
real_parse = parser.parse_args
def ParseArgs(args=None):
defaults = parser.get_default_values()
for k, v in defaults.__dict__.items():
if k in self.__dict__ and self.__dict__[k] != None:
continue
self.__dict__[k] = v
ret = real_parse(args, self) # pylint: disable=E1121
if self.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif self.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
if self.device == 'list':
devices = device_finder.GetDevicesMatchingOptions(self)
print 'Available devices:'
for device in devices:
print ' ', device.name
sys.exit(0)
if self.browser_executable and not self.browser_type:
self.browser_type = 'exact'
if self.browser_type == 'list':
devices = device_finder.GetDevicesMatchingOptions(self)
if not devices:
sys.exit(0)
browser_types = {}
for device in devices:
try:
possible_browsers = browser_finder.GetAllAvailableBrowsers(self,
device)
browser_types[device.name] = sorted(
[browser.browser_type for browser in possible_browsers])
except browser_finder_exceptions.BrowserFinderException as ex:
print >> sys.stderr, 'ERROR: ', ex
sys.exit(1)
print 'Available browsers:'
if len(browser_types) == 0:
print ' No devices were found.'
for device_name in sorted(browser_types.keys()):
print ' ', device_name
for browser_type in browser_types[device_name]:
print ' ', browser_type
sys.exit(0)
# Parse browser options.
self.browser_options.UpdateFromParseResults(self)
return ret
parser.parse_args = ParseArgs
return parser
def AppendExtraBrowserArgs(self, args):
self.browser_options.AppendExtraBrowserArgs(args)
def MergeDefaultValues(self, defaults):
for k, v in defaults.__dict__.items():
self.ensure_value(k, v)
class BrowserOptions(object):
"""Options to be used for launching a browser."""
def __init__(self):
self.browser_type = None
self.show_stdout = False
# When set to True, the browser will use the default profile. Telemetry
# will not provide an alternate profile directory.
self.dont_override_profile = False
self.profile_dir = None
self.profile_type = None
self._extra_browser_args = set()
self.extra_wpr_args = []
self.wpr_mode = wpr_modes.WPR_OFF
self.netsim = None
# The amount of time Telemetry should wait for the browser to start.
# This property is not exposed as a command line option.
self._browser_startup_timeout = 30
self.disable_background_networking = True
self.no_proxy_server = False
self.browser_user_agent_type = None
self.clear_sytem_cache_for_browser_and_profile_on_start = False
self.startup_url = 'about:blank'
# Background pages of built-in component extensions can interfere with
# performance measurements.
self.disable_component_extensions_with_background_pages = True
# Disable default apps.
self.disable_default_apps = True
# Whether to use the new code path for choosing an ephemeral port for
# DevTools. The bots set this to true. When Chrome 37 reaches stable,
# remove this setting and the old code path. http://crbug.com/379980
self.use_devtools_active_port = False
def __repr__(self):
return str(sorted(self.__dict__.items()))
def IsCrosBrowserOptions(self):
return False
@classmethod
def AddCommandLineArgs(cls, parser):
############################################################################
# Please do not add any more options here without first discussing with #
# a telemetry owner. This is not the right place for platform-specific #
# options. #
############################################################################
group = optparse.OptionGroup(parser, 'Browser options')
profile_choices = profile_types.GetProfileTypes()
group.add_option('--profile-type',
dest='profile_type',
type='choice',
default='clean',
choices=profile_choices,
help=('The user profile to use. A clean profile is used by default. '
'Supported values: ' + ', '.join(profile_choices)))
group.add_option('--profile-dir',
dest='profile_dir',
help='Profile directory to launch the browser with. '
'A clean profile is used by default')
group.add_option('--extra-browser-args',
dest='extra_browser_args_as_string',
help='Additional arguments to pass to the browser when it starts')
group.add_option('--extra-wpr-args',
dest='extra_wpr_args_as_string',
help=('Additional arguments to pass to Web Page Replay. '
'See third_party/webpagereplay/replay.py for usage.'))
group.add_option('--netsim', default=None, type='choice',
choices=net_configs.NET_CONFIG_NAMES,
help=('Run benchmark under simulated network conditions. '
'Will prompt for sudo. Supported values: ' +
', '.join(net_configs.NET_CONFIG_NAMES)))
group.add_option('--show-stdout',
action='store_true',
help='When possible, will display the stdout of the process')
# This hidden option is to be removed, and the older code path deleted,
# once Chrome 37 reaches Stable. http://crbug.com/379980
group.add_option('--use-devtools-active-port',
action='store_true',
help=optparse.SUPPRESS_HELP)
parser.add_option_group(group)
group = optparse.OptionGroup(parser, 'Compatibility options')
group.add_option('--gtest_output',
help='Ignored argument for compatibility with runtest.py harness')
parser.add_option_group(group)
group = optparse.OptionGroup(parser, 'Synthetic gesture options')
synthetic_gesture_source_type_choices = ['default', 'mouse', 'touch']
group.add_option('--synthetic-gesture-source-type',
dest='synthetic_gesture_source_type',
default='default', type='choice',
choices=synthetic_gesture_source_type_choices,
help='Specify the source type for synthtic gestures. Note that some ' +
'actions only support a specific source type. ' +
'Supported values: ' +
', '.join(synthetic_gesture_source_type_choices))
parser.add_option_group(group)
def UpdateFromParseResults(self, finder_options):
"""Copies our options from finder_options"""
browser_options_list = [
'extra_browser_args_as_string',
'extra_wpr_args_as_string',
'netsim',
'profile_dir',
'profile_type',
'show_stdout',
'synthetic_gesture_source_type',
'use_devtools_active_port',
]
for o in browser_options_list:
a = getattr(finder_options, o, None)
if a is not None:
setattr(self, o, a)
delattr(finder_options, o)
self.browser_type = finder_options.browser_type
if hasattr(self, 'extra_browser_args_as_string'): # pylint: disable=E1101
tmp = shlex.split(
self.extra_browser_args_as_string) # pylint: disable=E1101
self.AppendExtraBrowserArgs(tmp)
delattr(self, 'extra_browser_args_as_string')
if hasattr(self, 'extra_wpr_args_as_string'): # pylint: disable=E1101
tmp = shlex.split(
self.extra_wpr_args_as_string) # pylint: disable=E1101
self.extra_wpr_args.extend(tmp)
delattr(self, 'extra_wpr_args_as_string')
if self.profile_type == 'default':
self.dont_override_profile = True
if self.profile_dir and self.profile_type != 'clean':
logging.critical(
"It's illegal to specify both --profile-type and --profile-dir.\n"
"For more information see: http://goo.gl/ngdGD5")
sys.exit(1)
if self.profile_dir and not os.path.isdir(self.profile_dir):
logging.critical(
"Directory specified by --profile-dir (%s) doesn't exist "
"or isn't a directory.\n"
"For more information see: http://goo.gl/ngdGD5" % self.profile_dir)
sys.exit(1)
if not self.profile_dir:
self.profile_dir = profile_types.GetProfileDir(self.profile_type)
# This deferred import is necessary because browser_options is imported in
# telemetry/telemetry/__init__.py.
finder_options.browser_options = CreateChromeBrowserOptions(self)
@property
def extra_browser_args(self):
return self._extra_browser_args
@property
def browser_startup_timeout(self):
return self._browser_startup_timeout
@browser_startup_timeout.setter
def browser_startup_timeout(self, value):
self._browser_startup_timeout = value
def AppendExtraBrowserArgs(self, args):
if isinstance(args, list):
self._extra_browser_args.update(args)
else:
self._extra_browser_args.add(args)
def CreateChromeBrowserOptions(br_options):
browser_type = br_options.browser_type
if (platform.GetHostPlatform().GetOSName() == 'chromeos' or
(browser_type and browser_type.startswith('cros'))):
return CrosBrowserOptions(br_options)
return br_options
class ChromeBrowserOptions(BrowserOptions):
"""Chrome-specific browser options."""
def __init__(self, br_options):
super(ChromeBrowserOptions, self).__init__()
# Copy to self.
self.__dict__.update(br_options.__dict__)
class CrosBrowserOptions(ChromeBrowserOptions):
"""ChromeOS-specific browser options."""
def __init__(self, br_options):
super(CrosBrowserOptions, self).__init__(br_options)
# Create a browser with oobe property.
self.create_browser_with_oobe = False
# Clear enterprise policy before logging in.
self.clear_enterprise_policy = True
# Disable GAIA/enterprise services.
self.disable_gaia_services = True
self.auto_login = True
self.gaia_login = False
self.username = '[email protected]'
self.password = ''
def IsCrosBrowserOptions(self):
return True
| bsd-3-clause | 3,075,533,926,595,646,500 | 35.764846 | 80 | 0.645238 | false |
redreamality/broca | broca/tokenize/keyword/pos.py | 1 | 2250 | """
A naive keyword extractor which just pulls out nouns and noun phrases.
Was using the PerceptronTagger is _way_ faster than NLTK's default tagger, and more accurate to boot.
See <http://stevenloria.com/tutorial-state-of-the-art-part-of-speech-tagging-in-textblob/>.
However, it complicates the library's installation, and the spacy tagger is quite fast and good too.
"""
from broca.common.shared import spacy
from broca.tokenize import Tokenizer
from broca.tokenize.util import prune
CFG = {
('NNP', 'NNP'): 'NNP',
('NN', 'NN'): 'NNI',
('NNI', 'NN'): 'NNI',
('JJ', 'JJ'): 'JJ',
('JJ', 'NN'): 'NNI',
}
class POS(Tokenizer):
def tokenize(self, docs):
tags = ['NN', 'NNS', 'NNP', 'NNPS']
keywords = []
for doc in docs:
toks = spacy(doc, tag=True, parse=False, entity=False)
tagged = [(t.lower_.strip(), t.tag_) for t in toks]
kws = [t for t, tag in tagged if tag in tags]
kws += extract_noun_phrases(tagged)
keywords.append(kws)
return prune(keywords)
def extract_noun_phrases(tagged_doc):
"""
(From textblob)
"""
tags = _normalize_tags(tagged_doc)
merge = True
while merge:
merge = False
for x in range(0, len(tags) - 1):
t1 = tags[x]
t2 = tags[x + 1]
key = t1[1], t2[1]
value = CFG.get(key, '')
if value:
merge = True
tags.pop(x)
tags.pop(x)
match = '%s %s' % (t1[0], t2[0])
pos = value
tags.insert(x, (match, pos))
break
matches = [t[0] for t in tags if t[1] in ['NNP', 'NNI']]
return matches
def _normalize_tags(chunk):
"""
(From textblob)
Normalize the corpus tags.
("NN", "NN-PL", "NNS") -> "NN"
"""
ret = []
for word, tag in chunk:
if tag == 'NP-TL' or tag == 'NP':
ret.append((word, 'NNP'))
continue
if tag.endswith('-TL'):
ret.append((word, tag[:-3]))
continue
if tag.endswith('S'):
ret.append((word, tag[:-1]))
continue
ret.append((word, tag))
return ret
| mit | -5,785,700,199,775,519,000 | 26.439024 | 101 | 0.519556 | false |
emfoundation/asset-manager | asset_manager/file_manager/tests/test_assets.py | 1 | 9922 | from django.conf import settings
from django.test import TestCase
from django.core.files.base import ContentFile
from file_manager.models import Asset, Folder
from file_manager import s3_utils
import logging
logging.basicConfig(
filename=settings.LOGFILE,
level=logging.INFO,
format=' %(asctime)s - %(levelname)s - %(message)s'
)
# logging.disable(logging.CRITICAL)
# Create your tests here.
class AssetModelFileNameTests(TestCase):
# Create tests for correct Asset filename
@classmethod
def setUpTestData(cls):
"""
Setup initial conditions for class test methods
"""
cls.test_folder, created = Folder.objects.get_or_create(name='Test Folder')
cls.test_folder.save()
def clear_models(self):
logging.info('Clearing models...')
for folder in Folder.objects.filter(parent=self.test_folder):
folder.delete()
def tearDown(self):
self.clear_models()
# ------------ Tests ------------ #
def test_filename_on_create_asset_without_file(self):
"""
Tests filename on create Asset without file
"""
logging.info('Test filename on create asset without a file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
a = Asset(name='a', parent=f)
a.save()
self.assertEqual(a.file.name, '')
# should fail pre #200 fix
def test_filename_on_edit_parent_without_file(self):
"""
Test filename on edit parent without file
"""
logging.info('Test filename on edit parent without file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
f2 = Folder(name='f2', parent=self.test_folder)
f2.save()
a = Asset(name='a', parent=f)
a.save()
a.parent=f2
a.save()
self.assertEqual(a.file.name, '')
# should fail pre #200 fix
def test_filename_on_reverse_edit_parent_without_file(self):
"""
Test filename on reverse edit parent without file
"""
logging.info('Test filename on reverse edit parent without file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
f2 = Folder(name='f2', parent=self.test_folder)
f2.save()
a = Asset(name='a', parent=f)
a.save()
a.parent=f2
a.save()
a.parent=f
a.save()
self.assertEqual(a.file.name, '')
def test_filename_on_add_file(self):
"""
Test filename on add file
"""
logging.info('Test filename on add file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
self.assertEqual(a.file.name, str(f.id) + '/file.txt')
def test_filename_on_edit_parent_and_add_file(self):
"""
Test filename on edit parent and add file
"""
logging.info('Test filename on edit parent and add file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
f2 = Folder(name='f2', parent=self.test_folder)
f2.save()
a = Asset(name='a', parent=f)
a.save()
a.parent = f2
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
self.assertEqual(a.file.name, str(f2.id) + '/file.txt')
def test_filename_on_create_asset_with_file(self):
"""
Tests filename on create Asset
"""
logging.info('Test filename on create asset...')
f = Folder(name='f', parent=self.test_folder)
f.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
self.assertEqual(a.file.name, str(f.id) + '/file.txt')
def test_filename_on_edit_parent_with_file(self):
"""
Test filename on edit parent with file
"""
logging.info('Test filename on edit parent with file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
f2 = Folder(name='f2', parent=self.test_folder)
f2.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
a.parent = f2
a.save()
self.assertEqual(a.file.name, str(f2.id) + '/file.txt')
def test_filename_on_reverse_edit_parent_with_file(self):
"""
Test filename on reverse edit parent with file
"""
logging.info('Test filename on reverse edit parent with file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
f2 = Folder(name='f2', parent=self.test_folder)
f2.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
a.parent = f2
a.save()
a.parent = f
a.save()
self.assertEqual(a.file.name, str(f.id) + '/file.txt')
def test_filename_on_remove_file(self):
"""
Test filename on remove file
"""
logging.info('Test filename on remove file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
a.file.delete()
self.assertEqual(a.file.name, None)
def test_filename_on_edit_parent_and_remove_file(self):
"""
Test filename on edit parent and remove file
"""
logging.info('Test filename on edit parent and remove file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
f2 = Folder(name='f2', parent=self.test_folder)
f2.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
a.parent = f2
a.file.delete()
self.assertEqual(a.file.name, None)
class AssetModelTests(TestCase):
@classmethod
def setUpTestData(cls):
"""
Setup initial conditions for class test methods
"""
cls.test_folder, created = Folder.objects.get_or_create(name='Test Folder')
cls.test_folder.save()
def get_assets(self):
"""
Returns a comma separated list of all Asset file paths
"""
asset_list = []
assets = Asset.objects.all()
for asset in assets:
asset_list.append(asset.file.name)
return ','.join(asset_list)
def clear_models(self):
logging.info('Clearing models...')
for folder in Folder.objects.filter(parent=self.test_folder):
folder.delete()
def tearDown(self):
self.clear_models()
# ------------ Tests ------------ #
def test_create_asset(self):
"""
Tests Asset creation
"""
logging.info('Test create asset...')
f = Folder(name='f', parent=self.test_folder)
f.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
self.assertEqual(self.get_assets(), str(a.parent.id) + '/file.txt')
def test_update_asset_file(self):
"""
Test update Asset file only
"""
logging.info('Test update asset file...')
f = Folder(name='f', parent=self.test_folder)
f.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
a.file.save('file2.txt', ContentFile('Content2'.encode('utf-8')))
self.assertEqual(self.get_assets(), str(a.parent.id) + '/file2.txt')
def test_update_asset_parent(self):
"""
Test update Asset parent only
"""
logging.info('Test update asset parent...')
f = Folder(name='f', parent=self.test_folder)
f.save()
f2 = Folder(name='f2', parent=self.test_folder)
f2.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
a.parent = f2
a.save()
self.assertEqual(self.get_assets(), str(a.parent.id) + '/file.txt')
def test_update_asset_file_and_parent(self):
"""
Test update Asset file and parent simultaneously
"""
logging.info('Test update asset file and parent simultaneously...')
f = Folder(name='f', parent=self.test_folder)
f.save()
f2 = Folder(name='f2', parent=self.test_folder)
f2.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
a.parent = f2
a.file.save('file2.txt', ContentFile('Content2'.encode('utf-8')))
self.assertEqual(self.get_assets(), str(a.parent.id) + '/file2.txt')
def test_delete_asset(self):
"""
Test delete Asset
"""
logging.info('Test delete asset...')
f = Folder(name='f', parent=self.test_folder)
f.save()
a = Asset(name='a', parent=f)
a.file.save('file.txt', ContentFile('Content'.encode('utf-8')))
a.delete()
self.assertEqual(self.get_assets(), '')
def test_delete_folder(self):
"""
Test delete Folder, contained Assets and should be deleted accordingly
"""
logging.info('Test delete folder...')
f = Folder(name='f', parent=self.test_folder)
f.save()
a1 = Asset(name='a1', parent=f)
a1.file.save('file1.txt', ContentFile('Content1'.encode('utf-8')))
a2 = Asset(name='a2', parent=f)
a2.file.save('file2.txt', ContentFile('Content2'.encode('utf-8')))
a3 = Asset(name='a3', parent=f)
a3.file.save('file3.txt', ContentFile('Content3'.encode('utf-8')))
f.delete()
self.assertEqual(self.get_assets(), '')
| gpl-3.0 | -2,933,353,532,974,946,300 | 27.59366 | 83 | 0.566015 | false |
psiinon/addons-server | src/olympia/discovery/management/commands/extract_content_strings.py | 1 | 3587 | # -*- coding: utf-8 -*-
import json
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import requests
import olympia.core.logger
log = olympia.core.logger.getLogger('z.discovery.extract_content_strings')
class BaseAPIParser():
def get_results_content(self):
results = self.fetch_strings_from_api()
log.info(f'Building "{self.l10n_comment}" strings.')
return '\n'.join(
self.build_output_for_item(item) for item in results)
def fetch_strings_from_api(self):
log.info(f'Fetching {self.l10n_comment} from the API.')
response = requests.get(self.api)
if response.status_code != 200:
raise CommandError(f'Fetching {self.l10n_comment} failed.')
return json.loads(response.content)['results']
def _get_item(self, item, field):
# A sub field is selected with "." e.g. addon.authors.name
fields = field.split('.', maxsplit=1)
sub_item = item.get(fields[0])
if len(fields) == 1 or not sub_item:
# Easy case, no subfields or empty/missing already.
return sub_item
if isinstance(sub_item, list):
# It's a list, but we're selecting sub fields so iterate through.
values = []
for sub_sub in sub_item:
value = self._get_item(sub_sub, fields[1])
# we don't want lists of lists, so flatten along the way
if isinstance(value, list):
values.extend(value)
else:
values.append(value)
return values
else:
# We just need to select the item from a sub field.
return self._get_item(sub_item, fields[1])
def build_output_for_item(self, item):
output = []
for field in self.fields:
values = self._get_item(item, field)
if not isinstance(values, list):
values = [values]
for value in values:
if value:
output.append(self.build_output_for_single_value(value))
return ''.join(output)
def build_output_for_single_value(self, value):
output = (
'{# L10n: %s #}\n'
'{%% trans %%}%s{%% endtrans %%}\n' % (self.l10n_comment, value))
return output
class DiscoItemAPIParser(BaseAPIParser):
api = settings.DISCOVERY_EDITORIAL_CONTENT_API
l10n_comment = 'editorial content for the discovery pane.'
fields = ('custom_heading', 'custom_description')
class SecondaryHeroShelfAPIParser(BaseAPIParser):
api = settings.SECONDARY_HERO_EDITORIAL_CONTENT_API
l10n_comment = 'editorial content for the secondary hero shelves.'
fields = ('headline', 'description', 'cta.text', 'modules.description',
'modules.cta.text')
class Command(BaseCommand):
help = ('Extract editorial disco pane and secondary hero shelf content '
'that need to be translated.')
def handle(self, *args, **options):
disco = DiscoItemAPIParser()
secondary_hero = SecondaryHeroShelfAPIParser()
results_content = (
disco.get_results_content() + '\n' +
secondary_hero.get_results_content())
self.generate_file_from_api_results(results_content)
def generate_file_from_api_results(self, results_content):
log.info('Writing Editorial content strings file.')
with open(settings.EDITORIAL_CONTENT_FILENAME, 'wb') as f:
f.write(results_content.encode('utf-8'))
| bsd-3-clause | -8,826,200,749,400,742,000 | 35.979381 | 77 | 0.609702 | false |
plamut/ggrc-core | test/selenium/src/lib/constants/url.py | 2 | 1194 | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Constants for URLs construction."""
# pylint: disable=wildcard-import
# pylint: disable=unused-wildcard-import
from lib.constants.objects import * # noqa; the names are later exported
# URL's parts for objects
API = "api"
DASHBOARD = "dashboard"
ADMIN_DASHBOARD = "admin"
AUDIT = AUDITS + "/{0}"
RELATIONSHIPS = "relationships"
OBJECT_OWNERS = "object_owners"
CONTACTS = "contacts"
QUERY = "query"
# url path for user
DEFAULT_EMAIL_DOMAIN = "example.com"
DEFAULT_USER_EMAIL = "user@" + DEFAULT_EMAIL_DOMAIN
DEFAULT_USER_HREF = "/".join([API, PEOPLE, str(1)])
class Widget(object):
"""URL's parts for widgets."""
# pylint: disable=too-few-public-methods
# admin dashboard page
CUSTOM_ATTRIBUTES = "#custom_attribute_widget"
EVENTS = "#events_list_widget"
ROLES = "#roles_list_widget"
PEOPLE = "#people_list_widget"
# widgets
INFO = "#info_widget"
AUDITS = "#audit_widget"
ASSESSMENTS = "#assessment_widget"
ASSESSMENT_TEMPLATES = "#assessment_template_widget"
CONTROLS = "#control_widget"
ISSUES = "#issue_widget"
PROGRAMS = "#program_widget"
| apache-2.0 | -1,714,638,477,626,481,400 | 28.121951 | 78 | 0.706868 | false |
pytest-dev/py | py/_path/common.py | 4 | 14818 | """
"""
import warnings
import os
import sys
import posixpath
import fnmatch
import py
# Moved from local.py.
iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
try:
# FileNotFoundError might happen in py34, and is not available with py27.
import_errors = (ImportError, FileNotFoundError)
except NameError:
import_errors = (ImportError,)
try:
from os import fspath
except ImportError:
def fspath(path):
"""
Return the string representation of the path.
If str or bytes is passed in, it is returned unchanged.
This code comes from PEP 519, modified to support earlier versions of
python.
This is required for python < 3.6.
"""
if isinstance(path, (py.builtin.text, py.builtin.bytes)):
return path
# Work from the object's type to match method resolution of other magic
# methods.
path_type = type(path)
try:
return path_type.__fspath__(path)
except AttributeError:
if hasattr(path_type, '__fspath__'):
raise
try:
import pathlib
except import_errors:
pass
else:
if isinstance(path, pathlib.PurePath):
return py.builtin.text(path)
raise TypeError("expected str, bytes or os.PathLike object, not "
+ path_type.__name__)
class Checkers:
_depend_on_existence = 'exists', 'link', 'dir', 'file'
def __init__(self, path):
self.path = path
def dir(self):
raise NotImplementedError
def file(self):
raise NotImplementedError
def dotfile(self):
return self.path.basename.startswith('.')
def ext(self, arg):
if not arg.startswith('.'):
arg = '.' + arg
return self.path.ext == arg
def exists(self):
raise NotImplementedError
def basename(self, arg):
return self.path.basename == arg
def basestarts(self, arg):
return self.path.basename.startswith(arg)
def relto(self, arg):
return self.path.relto(arg)
def fnmatch(self, arg):
return self.path.fnmatch(arg)
def endswith(self, arg):
return str(self.path).endswith(arg)
def _evaluate(self, kw):
for name, value in kw.items():
invert = False
meth = None
try:
meth = getattr(self, name)
except AttributeError:
if name[:3] == 'not':
invert = True
try:
meth = getattr(self, name[3:])
except AttributeError:
pass
if meth is None:
raise TypeError(
"no %r checker available for %r" % (name, self.path))
try:
if py.code.getrawcode(meth).co_argcount > 1:
if (not meth(value)) ^ invert:
return False
else:
if bool(value) ^ bool(meth()) ^ invert:
return False
except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
# EBUSY feels not entirely correct,
# but its kind of necessary since ENOMEDIUM
# is not accessible in python
for name in self._depend_on_existence:
if name in kw:
if kw.get(name):
return False
name = 'not' + name
if name in kw:
if not kw.get(name):
return False
return True
class NeverRaised(Exception):
pass
class PathBase(object):
""" shared implementation for filesystem path objects."""
Checkers = Checkers
def __div__(self, other):
return self.join(fspath(other))
__truediv__ = __div__ # py3k
def basename(self):
""" basename part of path. """
return self._getbyspec('basename')[0]
basename = property(basename, None, None, basename.__doc__)
def dirname(self):
""" dirname part of path. """
return self._getbyspec('dirname')[0]
dirname = property(dirname, None, None, dirname.__doc__)
def purebasename(self):
""" pure base name of the path."""
return self._getbyspec('purebasename')[0]
purebasename = property(purebasename, None, None, purebasename.__doc__)
def ext(self):
""" extension of the path (including the '.')."""
return self._getbyspec('ext')[0]
ext = property(ext, None, None, ext.__doc__)
def dirpath(self, *args, **kwargs):
""" return the directory path joined with any given path arguments. """
return self.new(basename='').join(*args, **kwargs)
def read_binary(self):
""" read and return a bytestring from reading the path. """
with self.open('rb') as f:
return f.read()
def read_text(self, encoding):
""" read and return a Unicode string from reading the path. """
with self.open("r", encoding=encoding) as f:
return f.read()
def read(self, mode='r'):
""" read and return a bytestring from reading the path. """
with self.open(mode) as f:
return f.read()
def readlines(self, cr=1):
""" read and return a list of lines from the path. if cr is False, the
newline will be removed from the end of each line. """
if sys.version_info < (3, ):
mode = 'rU'
else: # python 3 deprecates mode "U" in favor of "newline" option
mode = 'r'
if not cr:
content = self.read(mode)
return content.split('\n')
else:
f = self.open(mode)
try:
return f.readlines()
finally:
f.close()
def load(self):
""" (deprecated) return object unpickled from self.read() """
f = self.open('rb')
try:
import pickle
return py.error.checked_call(pickle.load, f)
finally:
f.close()
def move(self, target):
""" move this path to target. """
if target.relto(self):
raise py.error.EINVAL(
target,
"cannot move path into a subdirectory of itself")
try:
self.rename(target)
except py.error.EXDEV: # invalid cross-device link
self.copy(target)
self.remove()
def __repr__(self):
""" return a string representation of this path. """
return repr(str(self))
def check(self, **kw):
""" check a path for existence and properties.
Without arguments, return True if the path exists, otherwise False.
valid checkers::
file=1 # is a file
file=0 # is not a file (may not even exist)
dir=1 # is a dir
link=1 # is a link
exists=1 # exists
You can specify multiple checker definitions, for example::
path.check(file=1, link=1) # a link pointing to a file
"""
if not kw:
kw = {'exists': 1}
return self.Checkers(self)._evaluate(kw)
def fnmatch(self, pattern):
"""return true if the basename/fullname matches the glob-'pattern'.
valid pattern characters::
* matches everything
? matches any single character
[seq] matches any character in seq
[!seq] matches any char not in seq
If the pattern contains a path-separator then the full path
is used for pattern matching and a '*' is prepended to the
pattern.
if the pattern doesn't contain a path-separator the pattern
is only matched against the basename.
"""
return FNMatcher(pattern)(self)
def relto(self, relpath):
""" return a string which is the relative part of the path
to the given 'relpath'.
"""
if not isinstance(relpath, (str, PathBase)):
raise TypeError("%r: not a string or path object" %(relpath,))
strrelpath = str(relpath)
if strrelpath and strrelpath[-1] != self.sep:
strrelpath += self.sep
#assert strrelpath[-1] == self.sep
#assert strrelpath[-2] != self.sep
strself = self.strpath
if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
if os.path.normcase(strself).startswith(
os.path.normcase(strrelpath)):
return strself[len(strrelpath):]
elif strself.startswith(strrelpath):
return strself[len(strrelpath):]
return ""
def ensure_dir(self, *args):
""" ensure the path joined with args is a directory. """
return self.ensure(*args, **{"dir": True})
def bestrelpath(self, dest):
""" return a string which is a relative path from self
(assumed to be a directory) to dest such that
self.join(bestrelpath) == dest and if not such
path can be determined return dest.
"""
try:
if self == dest:
return os.curdir
base = self.common(dest)
if not base: # can be the case on windows
return str(dest)
self2base = self.relto(base)
reldest = dest.relto(base)
if self2base:
n = self2base.count(self.sep) + 1
else:
n = 0
l = [os.pardir] * n
if reldest:
l.append(reldest)
target = dest.sep.join(l)
return target
except AttributeError:
return str(dest)
def exists(self):
return self.check()
def isdir(self):
return self.check(dir=1)
def isfile(self):
return self.check(file=1)
def parts(self, reverse=False):
""" return a root-first list of all ancestor directories
plus the path itself.
"""
current = self
l = [self]
while 1:
last = current
current = current.dirpath()
if last == current:
break
l.append(current)
if not reverse:
l.reverse()
return l
def common(self, other):
""" return the common part shared with the other path
or None if there is no common part.
"""
last = None
for x, y in zip(self.parts(), other.parts()):
if x != y:
return last
last = x
return last
def __add__(self, other):
""" return new path object with 'other' added to the basename"""
return self.new(basename=self.basename+str(other))
def __cmp__(self, other):
""" return sort value (-1, 0, +1). """
try:
return cmp(self.strpath, other.strpath)
except AttributeError:
return cmp(str(self), str(other)) # self.path, other.path)
def __lt__(self, other):
try:
return self.strpath < other.strpath
except AttributeError:
return str(self) < str(other)
def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
""" yields all paths below the current one
fil is a filter (glob pattern or callable), if not matching the
path will not be yielded, defaulting to None (everything is
returned)
rec is a filter (glob pattern or callable) that controls whether
a node is descended, defaulting to None
ignore is an Exception class that is ignoredwhen calling dirlist()
on any of the paths (by default, all exceptions are reported)
bf if True will cause a breadthfirst search instead of the
default depthfirst. Default: False
sort if True will sort entries within each directory level.
"""
for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
yield x
def _sortlist(self, res, sort):
if sort:
if hasattr(sort, '__call__'):
warnings.warn(DeprecationWarning(
"listdir(sort=callable) is deprecated and breaks on python3"
), stacklevel=3)
res.sort(sort)
else:
res.sort()
def samefile(self, other):
""" return True if other refers to the same stat object as self. """
return self.strpath == str(other)
def __fspath__(self):
return self.strpath
class Visitor:
def __init__(self, fil, rec, ignore, bf, sort):
if isinstance(fil, py.builtin._basestring):
fil = FNMatcher(fil)
if isinstance(rec, py.builtin._basestring):
self.rec = FNMatcher(rec)
elif not hasattr(rec, '__call__') and rec:
self.rec = lambda path: True
else:
self.rec = rec
self.fil = fil
self.ignore = ignore
self.breadthfirst = bf
self.optsort = sort and sorted or (lambda x: x)
def gen(self, path):
try:
entries = path.listdir()
except self.ignore:
return
rec = self.rec
dirs = self.optsort([p for p in entries
if p.check(dir=1) and (rec is None or rec(p))])
if not self.breadthfirst:
for subdir in dirs:
for p in self.gen(subdir):
yield p
for p in self.optsort(entries):
if self.fil is None or self.fil(p):
yield p
if self.breadthfirst:
for subdir in dirs:
for p in self.gen(subdir):
yield p
class FNMatcher:
def __init__(self, pattern):
self.pattern = pattern
def __call__(self, path):
pattern = self.pattern
if (pattern.find(path.sep) == -1 and
iswin32 and
pattern.find(posixpath.sep) != -1):
# Running on Windows, the pattern has no Windows path separators,
# and the pattern has one or more Posix path separators. Replace
# the Posix path separators with the Windows path separator.
pattern = pattern.replace(posixpath.sep, path.sep)
if pattern.find(path.sep) == -1:
name = path.basename
else:
name = str(path) # path.strpath # XXX svn?
if not os.path.isabs(pattern):
pattern = '*' + path.sep + pattern
return fnmatch.fnmatch(name, pattern)
| mit | -7,689,056,048,696,364,000 | 31.283224 | 82 | 0.537522 | false |
KeepSafe/zendesk-helpcenter-cms | src/test/fixtures/__init__.py | 1 | 1176 | import model
def simple_category():
category = model.Category('category', 'category desc', 'category')
category.meta = {'id': 'category id', 'webtranslateit_ids': {'content': 'category translate id'}}
section = model.Section(category, 'section', 'section desc', 'section')
section.meta = {'id': 'section id', 'webtranslateit_ids': {'content': 'section translate id'}}
article = model.Article(section, 'article', 'body', 'article')
article.meta = {'id': 'article id',
'webtranslateit_ids': {'body': 'body translate id', 'content': 'article translate id'}}
category.sections.append(section)
section.articles.append(article)
return category
def category_with_translations():
category = simple_category()
group_translation = model.GroupTranslation('pl', 'dummy translation name', 'dummy translation description')
category.translations.append(group_translation)
category.sections[0].translations.append(group_translation)
article_translation = model.ArticleTranslation('pl', 'dummy name', 'dummy body')
category.sections[0].articles[0].translations.append(article_translation)
return category
| apache-2.0 | 9,108,598,873,475,510,000 | 48 | 111 | 0.69983 | false |
Ray1235/CoDMayaTools | CoDMayaTools.py | 1 | 161584 | # Copyright 2016, Ray1235
# CoDMayaTools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------------------
#
# Change log now available on Github!
#
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------- Customization (You can change these values!) ----------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# Maximum number of warnings to show per export
MAX_WARNINGS_SHOWN = 1
# Number of slots in the export windows
EXPORT_WINDOW_NUMSLOTS = 100
# To export any black vertices as white, set to 'True'. Otherwise, set to 'False'.
CONVERT_BLACK_VERTS_TO_WHITE = False
# Enable Support for ExportX/Export2Bin
USE_EXPORT_X = False
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------- Global ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
import os
import maya.cmds as cmds
import maya.mel as mel
import math
import sys
import datetime
import os.path
import traceback
import maya.OpenMaya as OpenMaya
import maya.OpenMayaAnim as OpenMayaAnim
import urllib2
import socket
import subprocess
import webbrowser
import Queue
import _winreg as reg
import time
import struct
import shutil
import zipfile
import re
import json
from PyCoD import xmodel as xModel
from PyCoD import xanim as xAnim
from array import array
from subprocess import Popen, PIPE, STDOUT
WarningsDuringExport = 0 # Number of warnings shown during current export
CM_TO_INCH = 0.3937007874015748031496062992126 # 1cm = 50/127in
M_PI = 3.14159265359
FILE_VERSION = 2.9
VERSION_CHECK_URL = "https://raw.githubusercontent.com/Ray1235/CoDMayaTools/master/version"
# Registry path for global data storage
GLOBAL_STORAGE_REG_KEY = (reg.HKEY_CURRENT_USER, "Software\\CoDMayaTools")
# name : control code name, control friendly name, data storage node name, refresh function, export function
OBJECT_NAMES = {'menu' : ["CoDMayaToolsMenu", "Call of Duty Tools", None, None, None],
'progress' : ["CoDMayaToolsProgressbar", "Progress", None, None, None],
'xmodel': ["CoDMayaXModelExportWindow", "Export XModel", "XModelExporterInfo", "RefreshXModelWindow", "ExportXModel"],
'xanim' : ["CoDMayaXAnimExportWindow", "Export XAnim", "XAnimExporterInfo", "RefreshXAnimWindow", "ExportXAnim"],
'xcam' : ["CoDMayaXCamExportWindow", "Export XCam", "XCamExporterInfo", "RefreshXCamWindow", "ExportXCam"]}
# Working Directory
WORKING_DIR = os.path.dirname(os.path.realpath(__file__))
# Current Game
currentGame = "none"
# Format (JOINT, PARENTNAME) : NEWNAME
# Leave parent to None to rename regardless.
RENAME_DICTONARY = {("tag_weapon", "tag_torso") : "tag_weapon_right",
("tag_weapon1", "tag_torso") : "tag_weapon_left",
("j_gun", None) : "tag_weapon",
("j_gun1", None) : "tag_weapon_le",
("tag_flash1", "j_gun1") : "tag_flash_le",
("tag_brass1", None) : "tag_brass_le",
}
# Tags to attach
GUN_BASE_TAGS = ["j_gun", "j_gun1", "j_gun", "j_gun1", "tag_weapon", "tag_weapon1"]
VIEW_HAND_TAGS = ["t7:tag_weapon_right", "t7:tag_weapon_left", "tag_weapon", "tag_weapon1", "tag_weapon_right", "tag_weapon_left"]
# Supported xModel Versions for importing.
SUPPORTED_XMODELS = [25, 62]
# xModel Versions based off games
XMODEL_VERSION = {
"CoD1" : 5,
"CoD2" : 6,
"CoD4" : 6,
"CoD5" : 6,
"CoD7" : 6,
"CoD12": 7
}
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------ Init ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def CreateMenu():
cmds.setParent(mel.eval("$temp1=$gMainWindow"))
if cmds.control(OBJECT_NAMES['menu'][0], exists=True):
cmds.deleteUI(OBJECT_NAMES['menu'][0], menu=True)
menu = cmds.menu(OBJECT_NAMES['menu'][0],
label=OBJECT_NAMES["menu"][1],tearOff=True)
# Export tools
cmds.menuItem(label=OBJECT_NAMES['xmodel'][1]+"...",
command="CoDMayaTools.ShowWindow('xmodel')")
cmds.menuItem(label=OBJECT_NAMES['xanim'][1]+"...",
command="CoDMayaTools.ShowWindow('xanim')")
cmds.menuItem(label=OBJECT_NAMES['xcam'][1]+"...",
command="CoDMayaTools.ShowWindow('xcam')")
# Import tools
cmds.menuItem(divider=True)
cmds.menuItem(label="Import XModel...",
subMenu=True)
cmds.menuItem(label="...from CoD7",
command="CoDMayaTools.ImportXModel('CoD7')")
cmds.menuItem(label="...from CoD5",
command="CoDMayaTools.ImportXModel('CoD5')")
cmds.menuItem(label="...from CoD4",
command="CoDMayaTools.ImportXModel('CoD4')")
cmds.setParent(menu,
menu=True)
cmds.menuItem(divider=True)
# Utilities Menu
util_menu = cmds.menuItem(label="Utilities",
subMenu=True)
cmds.menuItem(divider=True)
# Rays Animation Toolkit
cmds.menuItem(label="Ray's Camera Animation Toolkit",
subMenu=True)
cmds.menuItem(label="Mark as camera",
command="CoDMayaTools.setObjectAlias('camera')")
cmds.menuItem(label="Mark as weapon",
command="CoDMayaTools.setObjectAlias('weapon')")
cmds.menuItem(divider=True)
cmds.menuItem(label="Generate camera animation",
command="CoDMayaTools.GenerateCamAnim()")
cmds.menuItem(divider=True)
cmds.menuItem(label="Remove camera animation in current range",
command=RemoveCameraKeys)
cmds.menuItem(label="Reset camera",
command=RemoveCameraAnimData)
cmds.setParent(util_menu,
menu=True)
# Attach Weapon To Rig
cmds.menuItem(divider=True)
cmds.menuItem(label="Attach Weapon to Rig", command=lambda x:WeaponBinder())
# IWIxDDS
cmds.menuItem(divider=True)
cmds.menuItem(label="Convert IWI to DDS",
command=lambda x:IWIToDDSUser())
# Viewmodel Tools
cmds.menuItem(label="ViewModel Tools", subMenu=True)
cmds.menuItem(label="Create New Gunsleeve Maya File",
command=lambda x:CreateNewGunsleeveMayaFile())
cmds.menuItem(label="Create New ViewModel Rig File",
command=lambda x:CreateNewViewmodelRigFile())
cmds.menuItem(label="Switch Gun in Current Rig File",
command=lambda x:SwitchGunInCurrentRigFile())
cmds.setParent(menu, menu=True)
# Settings
cmds.menuItem(divider=True)
settings_menu = cmds.menuItem(label="Settings", subMenu=True)
# Game/Game Folder Settings
cmds.menuItem(label="Game Settings", subMenu=True)
cmds.menuItem(label="Set CoD 1 Root Folder", command=lambda x:SetRootFolder(None, 'CoD1'))
cmds.menuItem(label="Set CoD 2 Root Folder", command=lambda x:SetRootFolder(None, 'CoD2'))
cmds.menuItem(label="Set MW Root Folder", command=lambda x:SetRootFolder(None, 'CoD4'))
cmds.menuItem(label="Set WaW Root Folder", command=lambda x:SetRootFolder(None, 'CoD5'))
cmds.menuItem(label="Set Bo1 Root Folder", command=lambda x:SetRootFolder(None, 'CoD7'))
cmds.menuItem(label="Set Bo3 Root Folder", command=lambda x:SetRootFolder(None, 'CoD12'))
cmds.menuItem(divider=True)
cmds.radioMenuItemCollection()
games = GetCurrentGame(True)
cmds.menuItem( label="Current Game:")
cmds.menuItem( label='CoD 1', radioButton=games["CoD1"], command=lambda x:SetCurrentGame("CoD1"))
cmds.menuItem( label='CoD 2', radioButton=games["CoD2"], command=lambda x:SetCurrentGame("CoD2"))
cmds.menuItem( label='CoD MW', radioButton=games["CoD4"], command=lambda x:SetCurrentGame("CoD4"))
cmds.menuItem( label='CoD WaW', radioButton=games["CoD5"], command=lambda x:SetCurrentGame("CoD5"))
cmds.menuItem( label='CoD Bo1', radioButton=games["CoD7"], command=lambda x:SetCurrentGame("CoD7"))
cmds.menuItem( label='CoD Bo3', radioButton=games["CoD12"] , command=lambda x:SetCurrentGame("CoD12"))
cmds.setParent(settings_menu, menu=True)
# ExportX/Export2Bin Options (Deprecated)
if(USE_EXPORT_X):
cmds.menuItem("E2B", label='Use ExportX', checkBox=QueryToggableOption('E2B'), command=lambda x:SetToggableOption('E2B') )
cmds.menuItem(label="Set Path to ExportX", command=lambda x:SetExport2Bin())
# Misc. Options.
cmds.menuItem(divider=True)
cmds.menuItem("AutomaticRename", label='Automatically rename joints (J_GUN, etc.)', checkBox=QueryToggableOption('AutomaticRename'), command=lambda x:SetToggableOption('AutomaticRename') )
cmds.menuItem("PrefixNoteType", label='Automatically prefix notetracks with type (sndnt# or rmbnt#)', checkBox=QueryToggableOption('PrefixNoteType'), command=lambda x:SetToggableOption('PrefixNoteType') )
cmds.menuItem("MeshMerge", label='Merge Meshes on export', checkBox=QueryToggableOption('MeshMerge'), command=lambda x:SetToggableOption('MeshMerge') )
cmds.menuItem("AutoUpdate", label='Auto Updates', checkBox=QueryToggableOption('AutoUpdate'), command=lambda x:SetToggableOption('AutoUpdate') )
# cmds.menuItem("PrintExport", label='Print xmodel_export information.', checkBox=QueryToggableOption('PrintExport'), command=lambda x:SetToggableOption('PrintExport')" )
cmds.setParent(menu, menu=True)
cmds.menuItem(divider=True)
# For easy script updating
cmds.menuItem(label="Reload Script", command="reload(CoDMayaTools)")
# Tools Info
cmds.menuItem(label="About", command=lambda x:AboutWindow())
def SetCurrentGame(game=None):
if game is None:
return
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
except WindowsError:
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
reg.SetValueEx(storageKey, "CurrentGame", 0, reg.REG_SZ, game )
def GetCurrentGame(return_dict=False):
games = {
"CoD1" : False,
"CoD2" : False,
"CoD4" : False,
"CoD5" : False,
"CoD7" : False,
"CoD12" : False
}
# Try get the current game set.
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
game = reg.QueryValueEx(storageKey, "CurrentGame")[0]
except WindowsError:
# Failed, create it and fall back to Bo3
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
try:
reg.SetValueEx(storageKey, "CurrentGame", 0, reg.REG_SZ , 0 ,"CoD12")
game = reg.QueryValueEx(storageKey, "CurrentGame")[0]
except:
# Fall back to Black Ops III if a game isn't set, and we can't create one.
game = "CoD12"
games[game] = True
# Return dictonary for radio buttons
if return_dict:
return games
# Return current game for everything else
else:
return game
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------- Import Common --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ImportFileSelectDialog(codRootPath, type):
print(codRootPath)
importFrom = None
if cmds.about(version=True)[:4] == "2012": # There is a bug in later versions of Maya with the file browser dialog and files with no extension
importFrom = cmds.fileDialog2(fileMode=1, fileFilter="%s Files (*)" % type, caption="Import %s" % type, startingDirectory=os.path.join(codRootPath, "raw/%s/" % type.lower()))
else:
importFrom = cmds.fileDialog2(fileMode=1, dialogStyle=1, fileFilter="%s Files (*)" % type, caption="Import %s" % type, startingDirectory=os.path.join(codRootPath, "raw/%s/" % type.lower()))
if importFrom == None or len(importFrom) == 0 or importFrom[0].strip() == "":
return None
path = importFrom[0].strip()
pathSplit = os.path.splitext(path) # Fix bug with Maya 2013
if pathSplit[1] == ".*":
path = pathSplit
return path
def UnitQuaternionToDegrees(x, y, z):
w = math.sqrt(1 - x*x - y*y - z*z) # The 4th component of a quaternion can be found from the other 3 components in unit quaternions
euler = OpenMaya.MQuaternion(x, y, z, w).asEulerRotation()
return (math.degrees(euler.x), math.degrees(euler.y), math.degrees(euler.z))
def ReadJointRotation(f):
rot = struct.unpack('<hhh', f.read(6))
# Rotation is stored as a unit quaternion, but only the X, Y, and Z components are given, as integers scaled to -32768 to 32767
rot = UnitQuaternionToDegrees(rot[0] / 32768.0, rot[1] / 32768.0, rot[2] / 32768.0)
return rot
def ReadNullTerminatedString(f):
byte = f.read(1)
string = ""
while struct.unpack('B', byte)[0] != 0:
string += byte
byte = f.read(1)
return string
def AutoCapsJointName(name):
if name.startswith("tag"):
return name.upper()
name = name.capitalize()
name = name.replace("_le_", "_LE_")
name = name.replace("_ri_", "_RI_")
if name[-3:] == "_le":
name = name[:-3] + "_LE"
if name[-3:] == "_ri":
name = name[:-3] + "_RI"
# Capitalize the letter after each underscore
indices = set([m.start() for m in re.finditer("_", name)])
return "".join(c.upper() if (i-1) in indices else c for i, c in enumerate(name))
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Import XAnim --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ImportXAnim(game):
codRootPath = GetRootFolder(None, game) # Only call this once, because it might create a dialog box
xanimPath = ImportFileSelectDialog(codRootPath, "XAnim")
if not xanimPath:
return
print("Importing XAnim '%s'" % os.path.basename(xanimPath))
with open(xanimPath, "rb") as f:
# Check file version
version = f.read(2)
if len(version) == 0 or struct.unpack('H', version)[0] != 17:
MessageBox("ERROR: Not a valid XAnim file")
return
# Header
numFrames = struct.unpack('<H', f.read(2))[0]
numJoints = struct.unpack('<H', f.read(2))[0]
fileInfoBitfield = struct.unpack('<H', f.read(2))[0]
framerate = struct.unpack('<H', f.read(2))[0]
# Get anim type as string
animType = "absolute"
if fileInfoBitfield & 2:
animType = "delta"
elif fileInfoBitfield & 256:
animType = "relative"
elif fileInfoBitfield & 1024:
animType = "additive"
# ???
if animType == "absolute":
f.read(2) # ???
else:
print("Cannot read anim type '%s'" % animType)
return
# Read joint names
joints = []
for i in range(numJoints):
joints.append(ReadNullTerminatedString(f))
print joints
# Read joint frame data
for i in range(numJoints):
numRotations = struct.unpack('<H', f.read(2))[0]
for j in range(numRotations):
rot = ReadJointRotation(f)
numPositions = struct.unpack('<H', f.read(2))[0]
for j in range(numPositions):
pos = struct.unpack('<fff', f.read(12))
print pos
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Import XModel -------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ImportXModel(game):
codRootPath = GetRootFolder(None, game) # Only call this once, because it might create a dialog box
xmodelPath = ImportFileSelectDialog(codRootPath, "XModel")
if not xmodelPath:
return
# Show progress bar
if cmds.control("w"+OBJECT_NAMES['progress'][0], exists=True):
cmds.deleteUI("w"+OBJECT_NAMES['progress'][0])
progressWindow = cmds.window("w"+OBJECT_NAMES['progress'][0], title=OBJECT_NAMES['progress'][1], width=302, height=22)
cmds.columnLayout()
progressControl = cmds.progressBar(OBJECT_NAMES['progress'][0], width=300, progress=0)
cmds.showWindow(progressWindow)
cmds.refresh() # Force the progress bar to be drawn
try:
print("Importing XModel '%s'" % os.path.basename(xmodelPath))
with open(xmodelPath, "rb") as f:
version = f.read(2)
if len(version) == 0 or struct.unpack('H', version)[0] not in SUPPORTED_XMODELS:
MessageBox("ERROR: Not a valid XModel file")
print("")
if game == "CoD4":
f.read(25) # ???
ReadNullTerminatedString(f)
elif game == "CoD5":
f.read(26) # ???
ReadNullTerminatedString(f)
elif game == "CoD7":
f.read(28) # ???
ReadNullTerminatedString(f)
ReadNullTerminatedString(f)
f.read(5)
print(f.tell())
lods = []
for i in range(4): # 4 is possible number of lods
someInt = struct.unpack('<I', f.read(4))
lodFileName = ReadNullTerminatedString(f)
if lodFileName != "":
lods.append({"name":lodFileName})
if len(lods) == 0:
MessageBox("ERROR: This XModel has no data (no LOD files)!")
return
f.read(4) # Spacer if next int isn't 0, otherwise ???
count = struct.unpack('<I', f.read(4))[0]
print(count)
for i in range(count):
subcount = struct.unpack('<I', f.read(4))[0]
f.read((subcount * 48) + 36) # ???
for lod in lods:
materials = []
numMaterials = struct.unpack('<H', f.read(2))[0]
for i in range(numMaterials):
materials.append(ReadNullTerminatedString(f))
lod["materials"] = materials
# Load joint data (24 bytes per joint) ???
lodToLoad = lods[0]
if len(lods) > 1:
buttons = []
lodDict = {}
for lod in lods:
buttons.append(lod["name"])
lodDict[lod["name"]] = lod
buttons.sort()
result = cmds.confirmDialog(title="Select LOD level", message="This model has more than one LOD level. Select which one to import:", button=buttons, defaultButton=buttons[0], dismissString="EXIT")
if result in lodDict:
lodToLoad = lodDict[result]
lodToLoad["transformGroup"] = cmds.group(empty=True, name=lodToLoad["name"])
lodToLoad["materialMaps"] = LoadMaterials(lodToLoad, codRootPath)
lodToLoad["joints"] = LoadJoints(lodToLoad, codRootPath)
LoadSurfaces(lodToLoad, codRootPath, game)
AutoIKHandles(lodToLoad)
cmds.select(lodToLoad["transformGroup"], replace=True)
finally:
# Delete progress bar
cmds.deleteUI(progressWindow, window=True)
def LoadSurfaces(lod, codRootPath, game):
print("Loading XModel surface '%s'" % lod["name"])
with open(os.path.join(codRootPath, "raw/xmodelsurfs/%s" % lod["name"]), "rb") as f:
version = f.read(2)
if len(version) == 0 or struct.unpack('H', version)[0] not in SUPPORTED_XMODELS:
MessageBox("ERROR: Not a valid XModel surface file")
return
numMeshes = struct.unpack('<H', f.read(2))[0]
if numMeshes != len(lod["materials"]):
MessageBox("ERROR: Different number of meshes and materials on LOD '%s'" % lod["name"])
return
meshesCreated = []
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=numMeshes*5+1, progress=0)
for i in range(numMeshes):
cmds.window("w"+OBJECT_NAMES['progress'][0], edit=True, title="Loading mesh %i..." % i)
# Read mesh header
a = struct.unpack('<B', f.read(1))[0] # ???
b = struct.unpack('<H', f.read(2))[0] # ???
numVerts = struct.unpack('<H', f.read(2))[0]
numTris = struct.unpack('<H', f.read(2))[0]
numVerts2 = struct.unpack('<H', f.read(2))[0]
physiqued = numVerts != numVerts2
if physiqued:
f.read(2) # ???
print("\tMesh %i is physiqued... this may not load correctly" % i)
if numVerts2 != 0:
while struct.unpack('H', f.read(2))[0] != 0: # Search for next 0 short ???
pass
f.read(2) # ???
else:
# If a mesh is being influenced by only 1 vert
# then it only stores vert. index. with 1.0
# influence.
bone = struct.unpack('<I', f.read(4)) # ???
single_joint_bind = lod["joints"][bone[0]]["name"]
vertexArray = OpenMaya.MFloatPointArray()
uArray = OpenMaya.MFloatArray()
vArray = OpenMaya.MFloatArray()
polygonCounts = OpenMaya.MIntArray(numTris, 3)
polygonConnects = OpenMaya.MIntArray()
normals = []
vertsWeights = []
ProgressBarStep()
bones = []
# Read vertices
for j in range(numVerts):
# f.read(12) # ???
normal = struct.unpack('<fff', f.read(12)) # ???
normal = OpenMaya.MVector(normal[0], normal[1], normal[2])
color = struct.unpack('<BBBB', f.read(4))
uv = struct.unpack('<ff', f.read(8))
if game == "CoD7":
f.read(28)
else:
f.read(24)
numWeights = 0
finalBoneNumber = 0
if physiqued:
numWeights = struct.unpack('<B', f.read(1))[0]
finalBoneNumber = struct.unpack('<H', f.read(2))[0]
pos = struct.unpack('<fff', f.read(12))
totalWeight = 0
weights = []
for k in range(numWeights):
weight = struct.unpack('<HH', f.read(4)) # [0] = bone number, [1] = weight mapped to integer (range 0-(2^16))
totalWeight += weight[1] / 65536.0
joint = lod["joints"][weight[0]]["name"]
weights.append((joint, weight[1] / 65536.0))
weights.append((lod["joints"][finalBoneNumber]["name"], 1 - totalWeight)) # Final bone gets remaining weight
vertsWeights.append(weights)
vertexArray.append(pos[0]/CM_TO_INCH, pos[1]/CM_TO_INCH, pos[2]/CM_TO_INCH)
normals.append(normal)
uArray.append(uv[0])
vArray.append(1-uv[1])
# Read face indices
tris_list = OpenMaya.MIntArray()
vert_list = OpenMaya.MIntArray()
_normals = OpenMaya.MVectorArray()
for j in range(numTris):
face = struct.unpack('<HHH', f.read(6))
tris_list.append(j)
tris_list.append(j)
tris_list.append(j)
polygonConnects.append(face[0])
polygonConnects.append(face[2])
polygonConnects.append(face[1])
vert_list.append(face[0])
vert_list.append(face[2])
vert_list.append(face[1])
_normals.append(normals[face[0]])
_normals.append(normals[face[2]])
_normals.append(normals[face[1]])
ProgressBarStep()
# Create mesh
mesh = OpenMaya.MFnMesh()
transform = mesh.create(numVerts, numTris, vertexArray, polygonCounts, polygonConnects)
mesh.setFaceVertexNormals(_normals, tris_list, vert_list)
# UV map
mesh.setUVs(uArray, vArray)
mesh.assignUVs(polygonCounts, polygonConnects)
# Rename mesh
transformDagPath = OpenMaya.MDagPath()
OpenMaya.MDagPath.getAPathTo(transform, transformDagPath)
newPath = cmds.parent(transformDagPath.fullPathName(), lod["transformGroup"])
newPath = cmds.rename(newPath, "mesh%i" % i)
meshesCreated.append(newPath)
ProgressBarStep()
# Joint weights
# TODO: Make this faster!!! Soooo sloowwwwwww
if physiqued:
skin = cmds.skinCluster(lod["joints"][0]["name"], newPath)[0] # Bind the mesh to the root joint for now
for j, vertWeights in enumerate(vertsWeights):
cmds.skinPercent(skin, "%s.vtx[%i]" % (newPath, j), zeroRemainingInfluences=True, transformValue=vertWeights)
else:
skin = cmds.skinCluster(single_joint_bind, newPath,tsb=True, mi=1)[0]
ProgressBarStep()
# Apply textures
shader = cmds.shadingNode("lambert", name=lod["materials"][i], asShader=True)
cmds.select(newPath)
cmds.hyperShade(assign=shader)
colorMap = cmds.shadingNode("file", name=lod["materials"][i] + "_colorMap", asTexture=True)
cmds.connectAttr("%s.outColor" % colorMap, "%s.color" % shader)
if "colorMap" in lod["materialMaps"][lod["materials"][i]]:
cmds.setAttr("%s.fileTextureName" % colorMap, os.path.join(codRootPath, "raw/images/%s/%s.dds" % (lod["name"], lod["materialMaps"][lod["materials"][i]]["colorMap"])), type="string")
# Merge duplicates
mel.eval("polyMergeVertex -d 0.01 -am 1 -ch 0 %s;" % newPath) # Merge duplicate verts
mel.eval("polyMergeUV -d 0.01 -ch 0 %s;" % newPath) # Merge duplicate UVs
ProgressBarStep()
if len(f.read(1)) != 0: # Check if it's at the end of the file
MessageBox("The export completed, however it's quite likely that at least one of the meshes did not import correctly. See the Script Editor output for more information.")
ProgressBarStep()
def LoadJoints(lod, codRootPath):
print("Loading XModel joints '%s'" % lod["name"])
cmds.window("w"+OBJECT_NAMES['progress'][0], edit=True, title="Loading joints...")
joints = []
if not os.path.exists(os.path.join(codRootPath, "raw/xmodelparts/%s" % lod["name"])):
# cmds.joint("tag_origin", orientation=(0,0,0), position=(0,0,0), relative=True)
return
with open(os.path.join(codRootPath, "raw/xmodelparts/%s" % lod["name"]), "rb") as f:
version = f.read(2)
if len(version) == 0 or struct.unpack('H', version)[0] not in SUPPORTED_XMODELS:
MessageBox("ERROR: Not a valid XModel parts file")
return
# Number of bones
numJoints = struct.unpack('<H', f.read(2))[0]
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=numJoints*2+1, progress=0)
if numJoints == 0: # Special case
joints.append({"parent": -1, "pos": (0.0,0.0,0.0), "rot": (0.0,0.0,0.0), "name": "TAG_ORIGIN"})
cmds.select(clear=True)
cmds.joint(name=joints[0]["name"], orientation=(0.0,0.0,0.0), position=(0.0,0.0,0.0), relative=True)
ProgressBarStep()
return joints
f.read(2) # ???
# Joint data
joints.append({"parent": -1, "pos": (0.0,0.0,0.0), "rot": (0.0,0.0,0.0)}) # parent joint
for i in range(numJoints):
parentJoint = struct.unpack('<B', f.read(1))[0]
pos = struct.unpack('<fff', f.read(12))
rot = ReadJointRotation(f)
joints.append({"parent": parentJoint, "pos": pos, "rot": rot})
ProgressBarStep()
for i in range(numJoints+1):
joints[i]["name"] = ReadNullTerminatedString(f).lower()
for joint in joints:
if joint["parent"] >= 0: # Select parent
cmds.select(joints[joint["parent"]]["name"], replace=True)
else:
cmds.select(clear=True)
cmds.joint(name=joint["name"], orientation=joint["rot"], position=(joint["pos"][0]/CM_TO_INCH, joint["pos"][1]/CM_TO_INCH, joint["pos"][2]/CM_TO_INCH), relative=True)
ProgressBarStep()
ProgressBarStep()
return joints
def LoadMaterials(lod, codRootPath):
noDupMaterials = list(set(lod["materials"]))
cmds.window("w"+OBJECT_NAMES['progress'][0], edit=True, title="Loading materials...")
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=len(noDupMaterials)*2+1, progress=0)
iwdImages = LoadMainIWDImages(codRootPath)
ProgressBarStep()
# Create output folder
if not os.path.exists(os.path.join(codRootPath, "raw/images/%s/" % lod["name"])):
os.makedirs(os.path.join(codRootPath, "raw/images/%s/" % lod["name"]))
# Create material info file
infofile = open(os.path.join(codRootPath, "raw/images/%s/%s" % (lod["name"], "%s Material Info.txt" % lod["name"])), "w")
# Load materials
outMaterialList = {}
for material in noDupMaterials:
materialMaps = {}
# http://www.diegologic.net/diegologic/Programming/CoD4%20Material.html
path = os.path.join(codRootPath, "raw/materials/%s" % material)
path = os.path.normpath(path)
print("Loading material '%s'" % material)
if not os.path.exists(path):
print("Failed loading material, path does not exist.")
continue
with open(path, "rb") as f:
f.read(48) # Skip start of header
numMaps = struct.unpack('<H', f.read(2))[0]
f.read(14) # Skip the rest of header
for i in range(numMaps):
mapTypeOffset = struct.unpack('<I', f.read(4))[0]
f.read(4) # Skip
mapNameOffset = struct.unpack('<I', f.read(4))[0]
current = f.tell()
f.seek(mapTypeOffset)
mapType = ReadNullTerminatedString(f)
f.seek(mapNameOffset)
mapName = ReadNullTerminatedString(f)
f.seek(current)
materialMaps[mapType] = mapName
infofile.write("Material: %s\n" % material)
for type, mapName in materialMaps.items():
infofile.write("\t%s: %s\n" % (type, mapName))
infofile.write("\n")
outMaterialList[material] = materialMaps
ProgressBarStep()
# Gather .iwis
rawImages = os.listdir(os.path.join(codRootPath, "raw/images/"))
for type, mapName in materialMaps.items():
outPath = os.path.join(codRootPath, "raw/images/%s/%s%s" % (lod["name"], mapName, ".iwi"))
if os.path.exists(outPath):
continue
if (mapName + ".iwi") in rawImages:
shutil.copy(os.path.join(codRootPath, "raw/images/%s%s" % (mapName, ".iwi")), os.path.join(codRootPath, "raw/images/%s/" % lod["name"]))
elif (mapName + ".iwi") in iwdImages:
iwdName = iwdImages[mapName + ".iwi"]
zip = zipfile.ZipFile(os.path.join(codRootPath, "main/%s" % iwdName), "r")
# Extract from zip
source = zip.open("images/%s%s" % (mapName, ".iwi"))
target = file(outPath, "wb")
shutil.copyfileobj(source, target)
source.close()
target.close()
if type == "colorMap":
try:
IWIToDDS(outPath)
except:
print(traceback.format_exc())
ProgressBarStep()
infofile.close()
return outMaterialList
def AutoIKHandles(lod):
if len(lod["joints"]) < 2:
return
result = cmds.confirmDialog(title="Auto IK Handles", message="Is this a character (player or AI) model?", button=["Yes", "No"], defaultButton="No", dismissString="No")
if result == "Yes":
# Arms
SafeIKHandle("IK_Arm_LE", "J_Shoulder_LE", "J_Wrist_LE")
SafeIKHandle("IK_Arm_RI", "J_Shoulder_RI", "J_Wrist_RI")
# Left hand
SafeIKHandle("IK_Index_LE", "J_Index_LE_1", "J_Index_LE_3")
SafeIKHandle("IK_Mid_LE", "J_Mid_LE_1", "J_Mid_LE_3")
SafeIKHandle("IK_Ring_LE", "J_Ring_LE_1", "J_Ring_LE_3")
SafeIKHandle("IK_Pinky_LE", "J_Pinky_LE_1", "J_Pinky_LE_3")
SafeIKHandle("IK_Thumb_LE", "J_Thumb_LE_1", "J_Thumb_LE_3")
# Right hand
SafeIKHandle("IK_Index_RI", "J_Index_RI_1", "J_Index_RI_3")
SafeIKHandle("IK_Mid_RI", "J_Mid_RI_1", "J_Mid_RI_3")
SafeIKHandle("IK_Ring_RI", "J_Ring_RI_1", "J_Ring_RI_3")
SafeIKHandle("IK_Pinky_RI", "J_Pinky_RI_1", "J_Pinky_RI_3")
SafeIKHandle("IK_Thumb_RI", "J_Thumb_RI_1", "J_Thumb_RI_3")
# Legs
SafeIKHandle("IK_Leg_LE", "J_Hip_LE", "J_Ankle_LE")
SafeIKHandle("IK_Leg_RI", "J_Hip_RI", "J_Ankle_RI")
def SafeIKHandle(name, joint1, joint2):
# Only apply the IK Handle if both joints exist
if cmds.objExists(joint1) and cmds.nodeType(joint1) == 'joint' and cmds.objExists(joint2) and cmds.nodeType(joint2) == 'joint':
cmds.ikHandle(name=name, startJoint=joint1, endEffector=joint2)
def LoadMainIWDImages(codRootPath):
iwdImages = {}
if not os.path.exists(os.path.join(codRootPath, "main/")):
return iwdImages
iwds = os.listdir(os.path.join(codRootPath, "main/"))
for iwd in iwds:
if not iwd.endswith(".iwd"):
continue
zip = zipfile.ZipFile(os.path.join(codRootPath, "main/") + iwd, "r")
images = zip.namelist()
images = [x for x in images if x.startswith("images/")]
for i in range(len(images)):
imageName = images[i][7:]
if len(imageName) > 0:
iwdImages[imageName] = iwd
return iwdImages
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------- IWI to DDS ---------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def IWIToDDS(inIWIPath):
splitPath = os.path.splitext(inIWIPath)
outDDSPath = splitPath[0] + ".dds"
supported_headers = [6, 13]
print("Converting %s to DDS" % os.path.basename(inIWIPath))
iwi_data = {}
# Offsets are different for V13 IWIs
iwi_data[6] = [8, 7]
iwi_data[13] = [9, 8]
if not os.path.exists(inIWIPath):
return False
with open(inIWIPath, 'rb') as inf:
# http://www.matejtomcik.com/Public/Projects/IWIExtractor/
if inf.read(3) != "IWi": # Read file identifier
print("\tERROR: Not a valid IWI file")
return False
header = struct.unpack('<BBBHHBBIIII', inf.read(25))
print("Header Version: %i" % header[0])
if header[0] not in supported_headers: # Make sure it's V6 or V13 IWI
print("\tERROR: Unsupported IWI version")
return False
imageType = None
if header[1] == 0xB: # DXT1
imageType = "DXT1"
elif header[1] == 0xC: # DXT3
imageType = "DXT3"
elif header[1] == 0xD: # DXT5
imageType = "DXT5"
else:
print("\tERROR: Unknown image format")
return False
print("Writing_DDS")
with open(outDDSPath, 'wb') as outf:
# http://msdn.microsoft.com/en-us/library/windows/desktop/bb943991(v=vs.85).aspx
outf.write("DDS ") # File indentifier
print("Written that stuff1")
# DDS_HEADER size, flags, height, width, pitch, depth, mipmap count
outf.write(struct.pack('<7I', 124, 659463, header[4], header[3], 0, 0, 1))
outf.write(struct.pack('<11I', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)) # Reserved
# DDS_PIXELFORMAT size, flags, type, masks
outf.write(struct.pack('II4s5I', 32, 4, imageType, 0, 0, 0, 0, 0))
# DDS_HEADER caps1
outf.write(struct.pack('5I', 4198408, 0, 0, 0, 0))
print("Written that stuff")
# Copy Images
# MIPMAP 0
inf.seek(header[iwi_data[header[0]][0]])
outf.write(inf.read(header[iwi_data[header[0]][1]] - header[iwi_data[header[0]][0]]))
# # MIPMAP 1
# inf.seek(header[9])
# outf.write(inf.read(header[8] - header[9]))
# # MIPMAP 2
# inf.seek(header[10])
# outf.write(inf.read(header[9] - header[10]))
return True
def IWIToDDSUser():
codRootPath = GetRootFolder() # Only call this once, because it might create a dialog box
files = cmds.fileDialog2(fileMode=4, fileFilter="IWI Images (*.iwi)", caption="Select IWI file", startingDirectory=os.path.join(codRootPath, "raw/images/"))
if files == None or len(files) == 0 or files[0].strip() == "":
return
success = True
for file in files:
if not IWIToDDS(file):
success = False
if not success:
MessageBox("One or more of the IWIs failed to convert. See the Script Editor output for more information.")
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------- Export Joints (XModel and XAnim) ----------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def GetJointList(export_type=None):
# Joints list.
joints = []
# Try get the cosmetic bone.
if export_type == "xmodel":
try:
# Get it.
cosmeticBone = cmds.getAttr(OBJECT_NAMES["xmodel"][2]+ ".Cosmeticbone").split("|")[-1].split(":")[-1]
# Does it exist in scene?
if not cmds.objExists(cosmeticBone):
# If it doesn't, don't assign a cosmetic bone.
cosmeticBone = None
else:
cosmeticBone = cosmeticBone.split("|")[-1].split(":")[-1]
except:
# No cosmetic set.
cosmeticBone = None
# Cosmetic Bones List
cosmetic_list = []
# Cosmetic Bone ID (for xmodel_export)
cosmetic_id = None
else:
# No cosmetic set.
cosmeticBone = None
# Cosmetic Bones List
cosmetic_list = []
# Cosmetic Bone ID (for xmodel_export)
cosmetic_id = None
# Get selected objects
selectedObjects = OpenMaya.MSelectionList()
OpenMaya.MGlobal.getActiveSelectionList(selectedObjects)
for i in range(selectedObjects.length()):
# Get object path and node
dagPath = OpenMaya.MDagPath()
selectedObjects.getDagPath(i, dagPath)
dagNode = OpenMaya.MFnDagNode(dagPath)
# Ignore nodes that aren't joints or arn't top-level
if not dagPath.hasFn(OpenMaya.MFn.kJoint) or not RecursiveCheckIsTopNode(selectedObjects, dagNode):
continue
# Breadth first search of joint tree
searchQueue = Queue.Queue(0)
searchQueue.put((-1, dagNode, True)) # (index = child node's parent index, child node)
while not searchQueue.empty():
node = searchQueue.get()
index = len(joints)
if node[2]:
# Don't use main root bone.
if node[0] > -1:
# Name of the bones parent, none for Root bone. Split it to remove dagpath seperator and namespace.
bone_parentname = joints[node[0]][1].split("|")[-1].split(":")[-1]
else:
# Skip.
bone_parentname = None
# Name of the bone. Split it to remove dagpath seperator and namespace.
bone_name = node[1].partialPathName().split("|")[-1].split(":")[-1]
# Check for automatic rename.
if QueryToggableOption("AutomaticRename"):
# Run over dictonary for possible joints to rename.
for potjoints, new_name in RENAME_DICTONARY.iteritems():
# Found one
if bone_name == potjoints[0]:
# Check if it's a child bone of what we want, None to rename regardless.
if potjoints[1] is None or potjoints[1] == bone_parentname:
bone_name = new_name
# Check if we have cosmetic bone.
if cosmeticBone is not None and bone_parentname == cosmeticBone:
# Append it.
cosmetic_list.append((node[0], bone_name, node[1]))
else:
# Not a cosmetic, add it to normal joints.
joints.append((node[0], bone_name, node[1]))
# Our cosmetic parent.
if bone_name == cosmeticBone:
cosmetic_id = index
else:
index = node[0]
for i in range(node[1].childCount()):
dagPath = OpenMaya.MDagPath()
childNode = OpenMaya.MFnDagNode(node[1].child(i))
childNode.getPath(dagPath)
searchQueue.put((index, childNode, selectedObjects.hasItem(dagPath) and dagPath.hasFn(OpenMaya.MFn.kJoint)))
# Cosmetic bones must be at the end, so append them AFTER we've added other bones.
joints = joints + cosmetic_list
return joints, cosmetic_list, cosmetic_id
def GetCameraList():
cameras = []
# Get selected objects
selectedObjects = OpenMaya.MSelectionList()
OpenMaya.MGlobal.getActiveSelectionList(selectedObjects)
for i in range(selectedObjects.length()):
# Get object path and node
dagPath = OpenMaya.MDagPath()
selectedObjects.getDagPath(i, dagPath)
dagNode = OpenMaya.MFnDagNode(dagPath)
# Ignore nodes that aren't cameras or arn't top-level
if not dagPath.hasFn(OpenMaya.MFn.kCamera):
ProgressBarStep()
continue
# Breadth first search of camera tree
searchQueue = Queue.Queue(0)
searchQueue.put((-1, dagNode, True)) # (index = child node's parent index, child node)
while not searchQueue.empty():
node = searchQueue.get()
index = len(cameras)
if node[2]:
cameras.append((node[0], node[1]))
else:
index = node[0]
for i in range(node[1].childCount()):
dagPath = OpenMaya.MDagPath()
childNode = OpenMaya.MFnDagNode(node[1].child(i))
childNode.getPath(dagPath)
searchQueue.put((index, childNode, selectedObjects.hasItem(dagPath) and dagPath.hasFn(OpenMaya.MFn.kCamera)))
ProgressBarStep()
return cameras
def RecursiveCheckIsTopNode(cSelectionList, currentNode): # Checks if the given node has ANY selected parent, grandparent, etc joints
if currentNode.parentCount() == 0:
return True
for i in range(currentNode.parentCount()):
parentDagPath = OpenMaya.MDagPath()
parentNode = OpenMaya.MFnDagNode(currentNode.parent(i))
parentNode.getPath(parentDagPath)
if not parentDagPath.hasFn(OpenMaya.MFn.kJoint): # Not a joint, but still check parents
if not RecursiveCheckIsTopNode(cSelectionList, parentNode):
return False # A parent joint is selected, we're done
else:
continue # No parent joints are selected, ignore this node
if cSelectionList.hasItem(parentDagPath):
return False
else:
if not RecursiveCheckIsTopNode(cSelectionList, parentNode):
return False
return True
def GetJointData(jointNode, frame=0):
# Get the joint's transform
path = OpenMaya.MDagPath()
jointNode.getPath(path)
transform = OpenMaya.MFnTransform(path)
# Get joint position
pos = transform.getTranslation(OpenMaya.MSpace.kWorld)
# Get scale (almost always 1)
scaleUtil = OpenMaya.MScriptUtil()
scaleUtil.createFromList([1,1,1], 3)
scalePtr = scaleUtil.asDoublePtr()
transform.getScale(scalePtr)
scale = [OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 0), OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 1), OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 2)]
# Get rotation matrix (mat is a 4x4, but the last row and column arn't needed)
rotQuaternion = OpenMaya.MQuaternion()
transform.getRotation(rotQuaternion, OpenMaya.MSpace.kWorld)
mat = rotQuaternion.asMatrix()
# Debug info: as euler rotation
#eulerRotation = rotQuaternion.asEulerRotation()
#eulerRotation.reorderIt(OpenMaya.MEulerRotation.kXYZ)
# Instead of writing it return it to Export function.
joint_offset = (pos.x*CM_TO_INCH, pos.y*CM_TO_INCH, pos.z*CM_TO_INCH)
joint_matrix = [(mat(0,0), mat(0,1), mat(0,2)),
(mat(1,0), mat(1,1), mat(1,2)),
(mat(2,0), mat(2,1), mat(2,2))]
joint_scale = (scale[0], scale[1], scale[2])
return joint_offset, joint_matrix, joint_scale
def WriteNodeFloat(f, name, value, no_p=False):
if no_p:
f.write("\"%s\" : %f \n" % (name, value))
else:
f.write("\"%s\" : %f ,\n" % (name, value))
def WriteCameraData(initial, outJSON, cameraNode):
# Get the camera's transform
path = OpenMaya.MDagPath()
cameraNode.getPath(path)
cam = OpenMaya.MFnCamera(path)
transform = OpenMaya.MFnTransform(path)
#print fov
#fov = 40
aspectRatio = cam.aspectRatio()
flen = cam.focalLength() * (CM_TO_INCH*1.732050807568877)
fov = cam.verticalFieldOfView() * (180 / M_PI) * 1.571428571428571
fdist = cam.focusDistance() * CM_TO_INCH
fstop = cam.fStop()
lense = 10
# Get camera position
pos = transform.getTranslation(OpenMaya.MSpace.kWorld)
# Get scale (almost always 1)
scaleUtil = OpenMaya.MScriptUtil()
scaleUtil.createFromList([1,1,1], 3)
scalePtr = scaleUtil.asDoublePtr()
transform.getScale(scalePtr)
scale = [OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 0), OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 1), OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 2)]
# Get rotation matrix (mat is a 4x4, but the last row and column arn't needed)
rotQuaternion = OpenMaya.MQuaternion()
transform.getRotation(rotQuaternion, OpenMaya.MSpace.kWorld)
mat = rotQuaternion.asMatrix()
# Debug info: as euler rotation
eulerRotation = rotQuaternion.asEulerRotation()
eulerRotation.reorderIt(OpenMaya.MEulerRotation.kXYZ)
# euler rotation is in radians, not degrees
eulerRotation.x = eulerRotation.x - (3.141/2)
eulerRotation.z = eulerRotation.z - (3.141/2)
#eulerRotation.y = eulerRotation.y + (3.141/2)
#print ("%f %f %f" % (eulerRotation.x*180/3.141, eulerRotation.y*180/3.141, eulerRotation.z*180/3.141))
mat = eulerRotation.asMatrix()
# Write
if(initial):
outJSON.update({
"aspectratio" : aspectRatio
})
outJSON.update({
"origin" : [pos.y * CM_TO_INCH, pos.x * -CM_TO_INCH, pos.z * CM_TO_INCH],
"dir" : [mat(1,0), mat(1,1), mat(1,2)],
"up" : [mat(2,0), mat(2,1), mat(2,2)],
"right" : [mat(0,0), mat(0,1), mat(0,2)],
"flen" : flen,
"fov" : fov,
"fdist" : fdist,
"fstop" : fstop,
"lense" : lense
})
#outJSON["origin"] = [pos.y * CM_TO_INCH, pos.x * -CM_TO_INCH, pos.z * CM_TO_INCH]
#float(pos.x*-CM_TO_INCH), float(pos.z*CM_TO_INCH)])
#outJSON["dir"] = [mat(1,0), mat(1,1), mat(1,2)]
#outJSON["up"] = [mat(2,0), mat(2,1), mat(2,2)]
#outJSON["right"] = [mat(0,0), mat(0,1), mat(0,2)]
#outJSON["flen"] = flen
#outJSON["fov"] = fov
#outJSON["fdist"] = fdist
#outJSON["fstop"] = fstop
#outJSON["lense"] = lense
# Get count for progress bar. No impact on export speed.
def GetNumInfo(selectedObjects):
# Mesh array to check for duplicates.
meshes = []
maxValue = 0
maxValue += len(cmds.ls(selection = True, type = "joint"))
for i in range(0, selectedObjects.length()):
# Grab mesh.
object = OpenMaya.MObject()
dagPath = OpenMaya.MDagPath()
selectedObjects.getDependNode(i, object)
selectedObjects.getDagPath(i, dagPath)
# Check it's a mesh.
if not dagPath.hasFn(OpenMaya.MFn.kMesh):
continue
dagPath.extendToShape()
# Check for duplicate.
if dagPath.partialPathName() in meshes:
continue
# Append.
meshes.append(dagPath.partialPathName())
# Get vert count for this mesh.
maxValue += OpenMaya.MItMeshVertex(dagPath).count()
# Get Face found for this mesh.
maxValue += OpenMaya.MItMeshPolygon(dagPath).count()
# Return value * 2 because we will loop over 2x for getting info and writing it to export.
return maxValue
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Export XModel -------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ExportXModel(filePath, make_gdt=True):
# Get number of objects selected.
numSelectedObjects = len(cmds.ls(selection=True))
# Check if nothing is selected
if numSelectedObjects == 0:
return "Error: No objects selected for export"
# Check if we want to merge meshes into 1.
merge_mesh = QueryToggableOption('MeshMerge')
# Get Version for this xModel based off current game.
version = XMODEL_VERSION[GetCurrentGame()]
# Create Directory/ies
try:
directory = os.path.dirname(filePath)
if not os.path.exists(directory):
os.makedirs(directory)
except OSError as e:
typex, value, traceback = sys.exc_info()
return "Unable to create file:\n\n%s" % value.strerror
# Create new xModel Object
xmodel = xModel.Model()
# Get list of joints including cosmetics
joints = GetJointList("xmodel")
# Export Mesh Information
ExportMeshData(joints[0], xmodel, merge_mesh)
# Export Joints
if joints[0]:
# Run through joints
for i, joint in enumerate(joints[0]):
# Get Data for this joint
boneData = GetJointData(joint[2])
# Check for cosmetic
if(joint[0] == joints[2]):
bone.cosmetic = True
bone = xModel.Bone(joint[1], joint[0])
# Offset
bone.offset = boneData[0]
# Rotation
bone.matrix = boneData[1]
# Scale
bone.scale = boneData[2]
# Append it.
xmodel.bones.append(bone)
# No bones selected, export just TAG_ORIGIN
else:
dummy_bone = xModel.Bone("TAG_ORIGIN", -1)
dummy_bone.offset = (0, 0, 0)
dummy_bone.matrix = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
xmodel.bones.append(dummy_bone)
# Get Extension to determine export type.
extension = os.path.splitext(filePath)[-1].lower()
# Write xModel
if(extension == ".xmodel_bin"):
xmodel.WriteFile_Bin(filePath, version)
else:
xmodel.WriteFile_Raw(filePath, version)
# Seperate Conversion via Export2Bin/ExportX
# Change variable at config at top to enable.
if USE_EXPORT_X:
if QueryToggableOption('E2B'):
try:
RunExport2Bin(filePath)
except:
MessageBox("The xmodel exported successfully however Export2Bin/ExportX failed to run, the model will need to be converted manually.\n\nPlease check your paths.")
def GetMaterialsFromMesh(mesh, dagPath):
textures = {}
# http://rabidsquirrelgames.googlecode.com/svn/trunk/Maya%20plugin/fileExportCmd.py
# The code below gets a dictionary of [material name: material file name], ex: [a_material: a_material.dds]
shaders = OpenMaya.MObjectArray()
shaderIndices = OpenMaya.MIntArray()
mesh.getConnectedShaders(dagPath.instanceNumber(), shaders, shaderIndices)
for i in range(shaders.length()):
shaderNode = OpenMaya.MFnDependencyNode(shaders[i])
shaderPlug = shaderNode.findPlug("surfaceShader")
material = OpenMaya.MPlugArray()
shaderPlug.connectedTo(material, 1, 0);
for j in range(material.length()):
materialNode = OpenMaya.MFnDependencyNode(material[j].node())
colorPlug = materialNode.findPlug("color")
dgIt = OpenMaya.MItDependencyGraph(
colorPlug,
OpenMaya.MFn.kFileTexture,
OpenMaya.MItDependencyGraph.kUpstream,
OpenMaya.MItDependencyGraph.kBreadthFirst,
OpenMaya.MItDependencyGraph.kNodeLevel)
texturePath = ""
try: # If there is no texture, this part can throw an exception
dgIt.disablePruningOnFilter()
textureNode = OpenMaya.MFnDependencyNode(dgIt.currentItem())
texturePlug = textureNode.findPlug("fileTextureName")
texturePath = os.path.basename(texturePlug.asString())
except Exception:
pass
textures[i] = (materialNode.name(), texturePath)
texturesToFaces = []
for i in range(shaderIndices.length()):
if shaderIndices[i] in textures:
texturesToFaces.append(textures[shaderIndices[i]])
else:
texturesToFaces.append(None)
return texturesToFaces
# Converts a set of vertices (toConvertVertexIndices) from object-relative IDs to face-relative IDs
# vertexIndices is a list of object-relative vertex indices in face order (from polyIter.getVertices)
# toConvertVertexIndices is any set of vertices from the same faces as vertexIndices, not necessarily the same length
# Returns false if a vertex index is unable to be converted (= bad vertex values)
def VerticesObjRelToLocalRel(vertexIndices, toConvertVertexIndices):
# http://svn.gna.org/svn/cal3d/trunk/cal3d/plugins/cal3d_maya_exporter/MayaMesh.cpp
localVertexIndices = OpenMaya.MIntArray()
for i in range(toConvertVertexIndices.length()):
found = False
for j in range(vertexIndices.length()):
if toConvertVertexIndices[i] == vertexIndices[j]:
localVertexIndices.append(j)
found = True
break
if not found:
return False
return localVertexIndices
def ExportMeshData(joints, xmodel, merge_mesh = True):
meshes = []
verts = []
tris = []
materialDict = {}
materials = []
# xModel
# Convert the joints to a dictionary, for simple searching for joint indices
jointDict = {}
for i, joint in enumerate(joints):
jointDict[joint[2].partialPathName()] = i
# Get all selected objects
selectedObjects = OpenMaya.MSelectionList()
OpenMaya.MGlobal.getActiveSelectionList(selectedObjects)
# The global vert index at the start of each object
currentStartingVertIndex = 0
global_mesh = xModel.Mesh("GlobalMesh")
progressInfo = GetNumInfo(selectedObjects)
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue = progressInfo)
# Loop through all objects
for i in range(0, selectedObjects.length()):
# Get data on object
object = OpenMaya.MObject()
dagPath = OpenMaya.MDagPath()
selectedObjects.getDependNode(i, object)
selectedObjects.getDagPath(i, dagPath)
# Ignore dag nodes that aren't shapes or shape transforms
if not dagPath.hasFn(OpenMaya.MFn.kMesh):
continue
# Lower path to shape node
# Selecting a shape transform or shape will get the same dagPath to the shape using this
dagPath.extendToShape()
# Check for duplicates
if dagPath.partialPathName() in meshes:
continue
# Add shape to list
meshes.append(dagPath.partialPathName())
# Create new xMesh
xmesh = xModel.Mesh(dagPath.partialPathName())
# Get Maya Mesh
mesh = OpenMaya.MFnMesh(dagPath)
# Get skin cluster
clusterName = mel.eval("findRelatedSkinCluster " + dagPath.partialPathName())
# Check for skin
hasSkin = False
if clusterName != None and clusterName != "" and not clusterName.isspace():
hasSkin = True
selList = OpenMaya.MSelectionList()
selList.add(clusterName)
clusterNode = OpenMaya.MObject()
selList.getDependNode(0, clusterNode)
skin = OpenMayaAnim.MFnSkinCluster(clusterNode)
# Get vertices
vertIter = OpenMaya.MItMeshVertex(dagPath)
# Loop until we're done iterating over vertices
while not vertIter.isDone():
# Create Vertex
vertex = xModel.Vertex(
(
vertIter.position(OpenMaya.MSpace.kWorld).x*CM_TO_INCH,
vertIter.position(OpenMaya.MSpace.kWorld).y*CM_TO_INCH,
vertIter.position(OpenMaya.MSpace.kWorld).z*CM_TO_INCH
)
)
# Check for influences
if hasSkin:
# Get weight values
weightValues = OpenMaya.MDoubleArray()
numWeights = OpenMaya.MScriptUtil() # Need this because getWeights crashes without being passed a count
skin.getWeights(dagPath, vertIter.currentItem(), weightValues, numWeights.asUintPtr())
# Get weight names
weightJoints = OpenMaya.MDagPathArray()
skin.influenceObjects(weightJoints)
# Make sure the list of weight values and names match
if weightValues.length() != weightJoints.length():
PrintWarning("Failed to retrieve vertex weight list on '%s.vtx[%d]'; using default joints." %
(dagPath.partialPathName(), vertIter.index()))
# Remove weights of value 0 or weights from unexported joints
finalWeights = []
weightsSize = 0
for i in range(0, weightJoints.length()):
# 0.000001 is the smallest decimal in xmodel exports
if weightValues[i] < 0.000001:
continue
jointName = weightJoints[i].partialPathName()
# Check for unexported joints.
if not jointName in jointDict:
PrintWarning("Unexported joint %s is influencing vertex '%s.vtx[%d]' by %f%%\n" %
(("'%s'" % jointName).ljust(15), dagPath.partialPathName(), vertIter.index(), weightValues[i]*100))
else:
finalWeights.append([jointDict[jointName], weightValues[i]])
weightsSize += weightValues[i]
# Make sure the total weight adds up to 1
if weightsSize > 0:
weightMultiplier = 1 / weightsSize
for weight in finalWeights:
weight[1] *= weightMultiplier
vertex.weights.append((weight[0], weight[1]))
# Check if no weights were written (can happend due to deformers)
if not len(vertex.weights):
vertex.weights.append((0, 1.0))
# Add to mesh
xmesh.verts.append(vertex)
# Next vert
ProgressBarStep()
vertIter.next()
# Get materials used by this mesh
meshMaterials = GetMaterialsFromMesh(mesh, dagPath)
# Loop through all faces
polyIter = OpenMaya.MItMeshPolygon(dagPath)
# Loop until we're done iterating over polygons
while not polyIter.isDone():
# Get this poly's material
polyMaterial = meshMaterials[polyIter.index()]
# Every face must have a material
if polyMaterial == None:
PrintWarning("Found no material on face '%s.f[%d]'; ignoring face" %
(dagPath.partialPathName(), polyIter.index()))
polyIter.next()
continue
# Add this poly's material to the global list of used materials
if not polyMaterial[0] in materialDict:
materialDict[polyMaterial[0]] = len(materials)
materials.append(polyMaterial)
# Get vertex indices of this poly, and the vertex indices of this poly's triangles
trianglePoints = OpenMaya.MPointArray()
triangleIndices = OpenMaya.MIntArray()
vertexIndices = OpenMaya.MIntArray()
polyIter.getTriangles(trianglePoints, triangleIndices)
polyIter.getVertices(vertexIndices)
# localTriangleIndices is the same as triangleIndices,
# except each vertex is listed as the face-relative index
# instead of the object-realtive index
localTriangleIndices = VerticesObjRelToLocalRel(vertexIndices, triangleIndices)
if localTriangleIndices == False:
return ("Failed to convert object-relative vertices to face-relative on poly '%s.f[%d]'" %
(dagPath.partialPathName(), polyIter.index()))
# Note: UVs, normals, and colors, are "per-vertex per face", because even though two faces may share
# a vertex, they might have different UVs, colors, or normals. So, each face has to contain this info
# for each of it's vertices instead of each vertex alone
# UVs
Us = OpenMaya.MFloatArray()
Vs = OpenMaya.MFloatArray()
# Normals
normals = OpenMaya.MVectorArray()
# Attempt to get UVs
try:
polyIter.getUVs(Us, Vs)
except:
PrintWarning("Failed to aquire UVs on face '%s.f[%d]'; ignoring face" %
(dagPath.partialPathName(), polyIter.index()))
polyIter.next()
continue
# Attempt to get Normals
try:
polyIter.getNormals(normals, OpenMaya.MSpace.kWorld)
except:
PrintWarning("Failed to aquire Normals on face '%s.f[%d]'; ignoring face" %
(dagPath.partialPathName(), polyIter.index()))
polyIter.next()
continue
# Loop indices
# vertexIndices.length() has 3 values per triangle
for i in range(triangleIndices.length()/3):
# New xModel Face
xface = xModel.Face(0 if merge_mesh else len(meshes)-1 , materialDict[polyMaterial[0]])
# Put local indices into an array for easy access
faceIndices = [
localTriangleIndices[i*3],
localTriangleIndices[i*3+1],
localTriangleIndices[i*3+2]
]
# Vertex Colors
vertColors = [
OpenMaya.MColor(),
OpenMaya.MColor(),
OpenMaya.MColor()
]
# Grab colors
polyIter.getColor(vertColors[0], faceIndices[0])
polyIter.getColor(vertColors[1], faceIndices[1])
polyIter.getColor(vertColors[2], faceIndices[2])
# Face Order
face_order = [0, 2, 1]
# Export Face-Vertex Data
for e in range(3):
xface.indices[face_order[e]] = xModel.FaceVertex(
# Vertex
currentStartingVertIndex + triangleIndices[i*3 + e],
# Normal (XYZ)
(
normals[faceIndices[e]].x,
normals[faceIndices[e]].y,
normals[faceIndices[e]].z
),
# Color (RGBA)
(
vertColors[e].r,
vertColors[e].g,
vertColors[e].b,
vertColors[e].a,
),
# UV (UV)
(Us[faceIndices[e]], 1-Vs[faceIndices[e]]))
# Append face
xmesh.faces.append(xface)
# Next poly
ProgressBarStep()
polyIter.next()
# Update starting vertex index
currentStartingVertIndex = len(verts)
xmodel.meshes.append(xmesh)
# Add Materials
for material in materials:
xmodel.materials.append(
xModel.Material(material[0].split(":")[-1],
"Lambert",
{"color_map" : material[1].split(":")[-1]})
)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Export XAnim --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ExportXAnim(filePath):
# Progress bar
numSelectedObjects = len(cmds.ls(selection=True))
if numSelectedObjects == 0:
return "Error: No objects selected for export"
# Get data
joints = GetJointList()
if len(joints[0]) == 0:
return "Error: No joints selected for export"
# Get settings
frameStart = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameStartField", query=True, value=True)
frameEnd = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameEndField", query=True, value=True)
fps = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FPSField", query=True, value=True)
QMultiplier = math.pow(2,cmds.intField(OBJECT_NAMES['xanim'][0]+"_qualityField", query=True, value=True))
multiplier = 1/QMultiplier
fps = fps/multiplier
# Reverse Bool
reverse = cmds.checkBox("CoDMAYA_ReverseAnim", query=True, value=True)
# Export Tag Align
write_tag_align = cmds.checkBox("CoDMAYA_TAGALIGN", query=True, value=True)
# Frame Range
frame_range = range(int(frameStart/multiplier), int((frameEnd+1)/multiplier))
# Check if we want to reverse this anim.
if reverse:
frame_range = list(reversed(frame_range))
if frameStart < 0 or frameStart > frameEnd:
return "Error: Invalid frame range (start < 0 or start > end)"
if fps <= 0:
return "Error: Invalid FPS (fps < 0)"
if multiplier <= 0 or multiplier > 1:
return "Error: Invalid multiplier (multiplier < 0 && multiplier >= 1)"
# Set Progress bar to our frame length
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=len(frame_range) + 1)
# Create Directory/ies
try:
directory = os.path.dirname(filePath)
if not os.path.exists(directory):
os.makedirs(directory)
except OSError as e:
typex, value, traceback = sys.exc_info()
return "Unable to create file:\n\n%s" % value.strerror
# Create new xAnim
xanim = xAnim.Anim()
# Set Frame Rate
xanim.framerate = fps
# Add Joints
for i, joint in enumerate(joints[0]):
xanim.parts.append(xAnim.PartInfo(joint[1]))
# Export Tag Align (required for some anims)
if write_tag_align:
xanim.parts.append(xAnim.PartInfo("TAG_ALIGN"))
# Loop through frames
for n, i in enumerate(frame_range):
# Jump to frame
cmds.currentTime(i)
# Create Frame
frame = xAnim.Frame(n)
# Loop through joints
for j, joint in enumerate(joints[0]):
# Create Frame Part
frame_bone = xAnim.FramePart()
# Grab joint data for this part.
boneData = GetJointData(joint[2])
# Offset
frame_bone.offset = boneData[0]
# Rotation
frame_bone.matrix = boneData[1]
# Append it.
frame.parts.append(frame_bone)
# Export Tag Align (required for some anims)
if write_tag_align:
frame_bone = xAnim.FramePart()
frame_bone.offset = (0, 0, 0)
frame_bone.matrix = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
frame.parts.append(frame_bone)
# Add Frame
xanim.frames.append(frame)
# Increment Progress
ProgressBarStep()
# Get Notetracks for this Slot
slotIndex = cmds.optionMenu(OBJECT_NAMES['xanim'][0]+"_SlotDropDown", query=True, select=True)
noteList = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".notetracks[%i]" % slotIndex)) or ""
# Notes (seperated by comma)
notes = noteList.split(",")
# Run through note list
for note in notes:
# Split (frame : note string)
parts = note.split(":")
# Check for empty/bad string
if note.strip() == "" or len(parts) < 2:
continue
# Check for abc characters (allow rumble/sound notes prefixes)
name = "".join([c for c in parts[0] if c.isalnum() or c=="_"]).replace("sndnt", "sndnt#").replace("rmbnt", "rmbnt#")
if name == "":
continue
# Get Frame and attempt to parse it
frame=0
try:
frame = int(parts[1]) - frameStart
if(reverse):
frame = (len(frame_range) - 1) - frame
except ValueError:
continue
# Add to our notes list.
xanim.notes.append(xAnim.Note(frame, name))
# Get Extension
extension = os.path.splitext(filePath)[-1].lower()
# Export Bin
if(extension == ".xanim_bin"):
xanim.WriteFile_Bin(filePath, 3)
# Export Export
else:
xanim.WriteFile_Raw(filePath, 3)
# Refresh
cmds.refresh()
# Seperate Conversion via Export2Bin/ExportX
# Change variable at config at top to enable.
if USE_EXPORT_X:
if QueryToggableOption('E2B'):
try:
RunExport2Bin(filePath)
except:
MessageBox("The animation exported successfully however Export2Bin/ExportX failed to run, the animation will need to be converted manually.\n\nPlease check your paths.")
def WriteDummyTargetModelBoneRoot(outJSON, numframes):
# f.write("""
# "targetModelBoneRoots" : [
# {
# "name" : "TAG_ORIGIN",
# "animation" : [
# """)
outJSON["targetModelBoneRoots"] = [{
"name" : "TAG_ORIGIN",
"animation" : [],
},
{
"name" : "TAG_ALIGN",
"animation" : [],
}
]
for i in range(0,numframes):
outJSON["targetModelBoneRoots"][0]["animation"].append({
"frame" : i,
"offset" : [0.0,0.0,0.0],
"axis" : {
"x" : [0.0, -1.0, 0.0],
"y" : [1.0, 0.0, 0.0],
"z" : [0.0, 0.0, 1.0]
}
})
for i in range(0, numframes):
outJSON["targetModelBoneRoots"][1]["animation"].append({
"frame": i,
"offset": [0.0, 0.0, 0.0],
"axis": {
"x": [0.0, -1.0, 0.0],
"y": [1.0, 0.0, 0.0],
"z": [0.0, 0.0, 1.0]
}
})
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Export XCam --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ExportXCam(filePath):
# Progress bar
numSelectedObjects = len(cmds.ls(selection=True))
if numSelectedObjects == 0:
return "Error: No objects selected for export"
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=numSelectedObjects+1)
# Get data
cameras = GetCameraList()
if len(cameras) == 0:
return "Error: No cameras selected for export"
# Get settings
frameStart = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameStartField", query=True, value=True)
frameEnd = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameEndField", query=True, value=True)
fps = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FPSField", query=True, value=True)
# QMultiplier = math.pow(2,cmds.intField(OBJECT_NAMES['xcam'][0]+"_qualityField", query=True, value=True))
#multiplier = 1/QMultiplier
multiplier = 1
fps = fps/multiplier
if frameStart < 0 or frameStart > frameEnd:
return "Error: Invalid frame range (start < 0 or start > end)"
if fps <= 0:
return "Error: Invalid FPS (fps < 0)"
if multiplier <= 0 or multiplier > 1:
return "Error: Invalid multiplier (multiplier < 0 && multiplier >= 1)"
# Open file
f = None
try:
# Create export directory if it doesn't exist
directory = os.path.dirname(filePath)
if not os.path.exists(directory):
os.makedirs(directory)
# Create files
f = open(filePath, 'w')
except (IOError, OSError) as e:
typex, value, traceback = sys.exc_info()
return "Unable to create files:\n\n%s" % value.strerror
fLength = ((frameEnd-frameStart+1) / multiplier)
# Write header
outputJSON = {
"version" : 1,
"framerate" : fps,
"numframes" : fLength
}
outputJSON["scene"] = os.path.normpath(os.path.abspath(cmds.file(query=True, sceneName=True))).encode('ascii', 'ignore').replace('\\','/')
outputJSON["align"] = {
"tag" : "tag_align",
"offset" : [0.0000, 0.0000, 0.0000],
"axis" : {
"x" : [0.0, -1.0, 0.0],
"y" : [1.0, 0.0, 0.0],
"z" : [0.0, 0.0, 1.0]
}
}
WriteDummyTargetModelBoneRoot(outputJSON, fLength)
# Write parts
outputJSON["cameras"] = []
currentFrame = cmds.currentTime(query=True)
for i, camera in enumerate(cameras):
name = camera[1].partialPathName().split("|")
name = name[len(name)-1].split(":") # Remove namespace prefixes
name = name[len(name)-1]
outputJSON["cameras"].append({
"name" : name,
"index" : i,
"type" : "Perspective",
"aperture" : "FOCAL_LENGTH"
});
WriteCameraData(True, outputJSON["cameras"][i], camera[1])
#outputJSON["cameras"][i]["aspectratio"] = 16.0/9.0
outputJSON["cameras"][i]["nearz"] = 4
outputJSON["cameras"][i]["farz"] = 4000
outputJSON["cameras"][i]["animation"] = []
for j in range(int(frameStart), int((frameEnd+1))):
cmds.currentTime(j)
outputJSON["cameras"][i]["animation"].append({
"frame" : j
})
WriteCameraData(False, outputJSON["cameras"][i]["animation"][j-frameStart], camera[1])
outputJSON["cameraSwitch"] = []
cmds.currentTime(currentFrame)
ProgressBarStep()
# Write notetrack
slotIndex = cmds.optionMenu(OBJECT_NAMES['xcam'][0]+"_SlotDropDown", query=True, select=True)
noteList = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
cleanNotes = []
for note in notes:
parts = note.split(":")
if note.strip() == "" or len(parts) < 2:
continue
name = "".join([c for c in parts[0] if c.isalnum() or c=="_"])
if name == "":
continue
frame=0
try:
frame = int(parts[1])
except ValueError:
continue
cleanNotes.append((name, frame))
outputJSON["notetracks"] = []
for note in cleanNotes:
outputJSON["notetracks"].append({
"name" : note[0],
"frame" : note[1]
})
#f.write("{\n \"name\" : \"%s\",\n \"frame\" : %d\n},\n" % (note[0], note[1]))
json.dump(outputJSON, f, indent=4)
f.close()
ProgressBarStep()
cmds.refresh()
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------ Viewmodel Tools -------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def DoesObjectExist(name, type):
if not cmds.objExists(name):
MessageBox("Error: Missing %s '%s'" % (type, name))
return False
return True
def CreateNewGunsleeveMayaFile():
global WarningsDuringExport
# Save reminder
if not SaveReminder(False):
return
# Get paths
filePath = cmds.file(query=True, sceneName=True)
split1 = os.path.split(filePath)
split2 = os.path.splitext(split1[1])
exportPath = os.path.join(split1[0], "gunsleeves_" + split2[0] + ".xmodel_export")
# Create a new file and import models
cmds.file(force=True, newFile=True)
cmds.file(os.path.join(GetRootFolder(), "bin/maya/rigs/viewmodel/ViewModel_DefMesh.mb"), i=True, type="mayaBinary")
cmds.file(filePath, i=True, type="mayaBinary")
# Check to make sure objects exist
if not DoesObjectExist("J_Gun", "joint"): return
if not DoesObjectExist("tag_weapon", "tag"): return
if not DoesObjectExist("GunExport", "object set"): return
if not DoesObjectExist("DefViewSkeleton", "object set"): return
if not DoesObjectExist("tag_view", "tag"): return
if not cmds.objExists("viewmodelSleeves_OpForce") and not cmds.objExists("viewmodelSleeves_Marines"):
MessageBox("Error: Missing viewsleeves 'viewmodelSleeves_OpForce' or 'viewmodelSleeves_Marines'")
return
# Attach gun to rig
cmds.select("J_Gun", replace=True)
cmds.select("tag_weapon", add=True)
cmds.parent()
# Select things to export
cmds.select("GunExport", replace=True)
cmds.select("DefViewSkeleton", toggle=True)
cmds.select("tag_view", toggle=True)
if cmds.objExists("viewmodelSleeves_OpForce"):
cmds.select("viewmodelSleeves_OpForce", toggle=True, hierarchy=True)
else:
cmds.select("viewmodelSleeves_Marines", toggle=True, hierarchy=True)
# Export
if cmds.control("w"+OBJECT_NAMES['progress'][0], exists=True):
cmds.deleteUI("w"+OBJECT_NAMES['progress'][0])
progressWindow = cmds.window("w"+OBJECT_NAMES['progress'][0], title=OBJECT_NAMES['progress'][1], width=302, height=22, sizable=False)
cmds.columnLayout()
progressControl = cmds.progressBar(OBJECT_NAMES['progress'][0], width=300)
cmds.showWindow(progressWindow)
cmds.refresh() # Force the progress bar to be drawn
# Export
WarningsDuringExport = 0
response = None
try:
response = ExportXModel(exportPath)
except Exception as e:
response = "An unhandled error occurred during export:\n\n" + traceback.format_exc()
# Delete progress bar
cmds.deleteUI(progressWindow, window=True)
# Handle response
if type(response) == str or type(response) == unicode:
MessageBox(response)
elif WarningsDuringExport > 0:
MessageBox("Warnings occurred during export. Check the script editor output for more details.")
if type(response) != str and type(response) != unicode:
MessageBox("Export saved to\n\n" + os.path.normpath(exportPath))
def CreateNewViewmodelRigFile():
# Save reminder
if not SaveReminder(False):
return
# Get path
filePath = cmds.file(query=True, sceneName=True)
# Create a new file and import models
cmds.file(force=True, newFile=True)
cmds.file(os.path.join(GetRootFolder(), "bin/maya/rigs/viewmodel/ViewModel_Rig.mb"), reference=True, type="mayaBinary", namespace="rig", options="v=0")
cmds.file(filePath, reference=True, type="mayaBinary", namespace="VM_Gun")
# Check to make sure objects exist
if not DoesObjectExist("VM_Gun:J_Gun", "joint"): return
if not cmds.objExists("rig:DefMesh:tag_weapon") and not cmds.objExists("ConRig:DefMesh:tag_weapon"):
MessageBox("Error: Missing viewsleeves 'rig:DefMesh:tag_weapon' or 'ConRig:DefMesh:tag_weapon'")
return
# Connect gun to rig
if cmds.objExists("rig:DefMesh:tag_weapon"):
cmds.select("rig:DefMesh:tag_weapon", replace=True)
else:
cmds.select("ConRig:DefMesh:tag_weapon", replace=True)
cmds.select("VM_Gun:J_Gun", toggle=True)
cmds.parentConstraint(weight=1, name="VMParentConstraint")
cmds.select(clear=True)
def SwitchGunInCurrentRigFile():
# Save reminder
if not SaveReminder():
return
# Make sure the rig is correct
if not cmds.objExists("rig:DefMesh:tag_weapon") and not cmds.objExists("ConRig:DefMesh:tag_weapon"):
MessageBox("Error: Missing rig:DefMesh:tag_weapon' or 'ConRig:DefMesh:tag_weapon'")
return
if not DoesObjectExist("VM_Gun:J_Gun", "joint"): return
# Prompt user to select a new gun file
gunPath = cmds.fileDialog2(fileMode=1, fileFilter="Maya Files (*.ma *.mb)", caption="Select a New Gun File", startingDirectory=GetRootFolder())
if gunPath == None or len(gunPath) == 0 or gunPath[0].strip() == "":
return
gunPath = gunPath[0].strip()
# Delete the constraint
cmds.delete("VMParentConstraint")
# Delete any hand attachments
if cmds.objExists("rig:Hand_Extra_RI_GRP.Parent"):
parentRI = cmds.getAttr("rig:Hand_Extra_RI_GRP.Parent")
if parentRI != "":
cmds.delete(parentRI)
if cmds.objExists("rig:Hand_Extra_LE_GRP.Parent"):
parentLE = cmds.getAttr("rig:Hand_Extra_LE_GRP.Parent")
if parentLE != "":
cmds.delete(parentLE)
# Switch guns
cmds.file(gunPath, loadReference="VM_GunRN");
# Connect gun to rig
if cmds.objExists("rig:DefMesh:tag_weapon"):
cmds.select("rig:DefMesh:tag_weapon", replace=True)
else:
cmds.select("ConRig:DefMesh:tag_weapon", replace=True)
cmds.select("VM_Gun:J_Gun", toggle=True)
cmds.parentConstraint(weight=1, name="VMParentConstraint")
cmds.select(clear=True)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------- XModel Export Window ----------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def CreateXModelWindow():
# Create window
if cmds.control(OBJECT_NAMES['xmodel'][0], exists=True):
cmds.deleteUI(OBJECT_NAMES['xmodel'][0])
cmds.window(OBJECT_NAMES['xmodel'][0], title=OBJECT_NAMES['xmodel'][1], width=340, height=1, retain=True, maximizeButton=False)
form = cmds.formLayout(OBJECT_NAMES['xmodel'][0]+"_Form")
# Controls
slotDropDown = cmds.optionMenu(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown", changeCommand="CoDMayaTools.RefreshXModelWindow()", annotation="Each slot contains different a export path, settings, and saved selection")
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
cmds.menuItem(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown"+("_s%i" % i), label="Slot %i" % i)
separator1 = cmds.separator(style='in', height=16)
separator2 = cmds.separator(style='in')
saveToLabel = cmds.text(label="Save to:", annotation="This is where the .xmodel_export is saved to")
saveToField = cmds.textField(OBJECT_NAMES['xmodel'][0]+"_SaveToField", height=21, changeCommand="CoDMayaTools.GeneralWindow_SaveToField('xmodel')", annotation="This is where the .xmodel_export is saved to")
fileBrowserButton = cmds.button(label="...", height=21, command="CoDMayaTools.GeneralWindow_FileBrowser('xmodel')", annotation="Open a file browser dialog")
exportSelectedButton = cmds.button(label="Export Selected", command="CoDMayaTools.GeneralWindow_ExportSelected('xmodel', False)", annotation="Export all currently selected objects from the scene (current frame)\nWarning: Will automatically overwrite if the export path if it already exists")
saveSelectionButton = cmds.button(label="Save Selection", command="CoDMayaTools.GeneralWindow_SaveSelection('xmodel')", annotation="Save the current object selection")
getSavedSelectionButton = cmds.button(label="Get Saved Selection", command="CoDMayaTools.GeneralWindow_GetSavedSelection('xmodel')", annotation="Reselect the saved selection")
exportMultipleSlotsButton = cmds.button(label="Export Multiple Slots", command="CoDMayaTools.GeneralWindow_ExportMultiple('xmodel')", annotation="Automatically export multiple slots at once, using each slot's saved selection")
exportInMultiExportCheckbox = cmds.checkBox(OBJECT_NAMES['xmodel'][0]+"_UseInMultiExportCheckBox", label="Use current slot for Export Multiple", changeCommand="CoDMayaTools.GeneralWindow_ExportInMultiExport('xmodel')", annotation="Check this make the 'Export Multiple Slots' button export this slot")
setCosmeticParentbone = cmds.button(OBJECT_NAMES['xmodel'][0]+"_MarkCosmeticParent", label="Set selected as Cosmetic Parent", command="CoDMayaTools.SetCosmeticParent('xmodel')", annotation="Set this bone as our cosmetic parent. All bones under this will be cosmetic.")
RemoveCosmeticParent = cmds.button(OBJECT_NAMES['xmodel'][0]+"_ClearCosmeticParent", label="Clear Cosmetic Parent", command="CoDMayaTools.ClearCosmeticParent('xmodel')", annotation="Remove the cosmetic parent.")
# Setup form
cmds.formLayout(form, edit=True,
attachForm=[(slotDropDown, 'top', 6), (slotDropDown, 'left', 10), (slotDropDown, 'right', 10),
(separator1, 'left', 0), (separator1, 'right', 0),
(separator2, 'left', 0), (separator2, 'right', 0),
(saveToLabel, 'left', 12),
(fileBrowserButton, 'right', 10),
(exportMultipleSlotsButton, 'bottom', 6), (exportMultipleSlotsButton, 'left', 10),
(exportInMultiExportCheckbox, 'bottom', 9), (exportInMultiExportCheckbox, 'right', 6),
(exportSelectedButton, 'left', 10),
(saveSelectionButton, 'right', 10),
(setCosmeticParentbone, 'left', 10),
(RemoveCosmeticParent, 'left', 10)],
#(exportSelectedButton, 'bottom', 6), (exportSelectedButton, 'left', 10),
#(saveSelectionButton, 'bottom', 6), (saveSelectionButton, 'right', 10),
#(getSavedSelectionButton, 'bottom', 6)],
attachControl=[ (separator1, 'top', 0, slotDropDown),
(saveToLabel, 'bottom', 9, exportSelectedButton),
(saveToField, 'bottom', 5, exportSelectedButton), (saveToField, 'left', 5, saveToLabel), (saveToField, 'right', 5, fileBrowserButton),
(fileBrowserButton, 'bottom', 5, exportSelectedButton),
(exportSelectedButton, 'bottom', 5, separator2),
(saveSelectionButton, 'bottom', 5, separator2),
(setCosmeticParentbone, 'bottom', 5, separator2),
(RemoveCosmeticParent, 'bottom', 5, separator2),
(saveSelectionButton, 'bottom', 5, setCosmeticParentbone),
(exportSelectedButton, 'bottom', 5, setCosmeticParentbone),
(setCosmeticParentbone, 'bottom', 5, RemoveCosmeticParent),
(getSavedSelectionButton, 'bottom', 5, separator2), (getSavedSelectionButton, 'right', 10, saveSelectionButton),
(getSavedSelectionButton, 'bottom', 5, setCosmeticParentbone),
(separator2, 'bottom', 5, exportMultipleSlotsButton)])
def RefreshXModelWindow():
# Refresh/create node
if len(cmds.ls(OBJECT_NAMES['xmodel'][2])) == 0:
cmds.createNode("renderLayer", name=OBJECT_NAMES['xmodel'][2], skipSelect=True)
cmds.lockNode(OBJECT_NAMES['xmodel'][2], lock=False)
if not cmds.attributeQuery("slot", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="slot", attributeType='short', defaultValue=1)
if not cmds.attributeQuery("paths", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="paths", multi=True, dataType='string')
cmds.setAttr(OBJECT_NAMES['xmodel'][2]+".paths", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("selections", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="selections", multi=True, dataType='stringArray')
cmds.setAttr(OBJECT_NAMES['xmodel'][2]+".selections", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("useinmultiexport", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="useinmultiexport", multi=True, attributeType='bool', defaultValue=False)
cmds.setAttr(OBJECT_NAMES['xmodel'][2]+".useinmultiexport", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("Cosmeticbone", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="Cosmeticbone", dataType="string")
cmds.lockNode(OBJECT_NAMES['xmodel'][2], lock=True)
# Set values
slotIndex = cmds.optionMenu(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown", query=True, select=True)
path = cmds.getAttr(OBJECT_NAMES['xmodel'][2]+(".paths[%i]" % slotIndex))
cmds.setAttr(OBJECT_NAMES['xmodel'][2]+".slot", slotIndex)
cmds.textField(OBJECT_NAMES['xmodel'][0]+"_SaveToField", edit=True, fileName=path)
useInMultiExport = cmds.getAttr(OBJECT_NAMES['xmodel'][2]+(".useinmultiexport[%i]" % slotIndex))
cmds.checkBox(OBJECT_NAMES['xmodel'][0]+"_UseInMultiExportCheckBox", edit=True, value=useInMultiExport)
def SetCosmeticParent(reqarg):
selection = cmds.ls(selection = True, type = "joint")
if(len(selection) > 1):
MessageBox("Only 1 Cosmetic Parent is allowed.")
return
elif(len(selection) == 0):
MessageBox("No joint selected.")
return
cmds.setAttr(OBJECT_NAMES['xmodel'][2] + ".Cosmeticbone", selection[0], type="string")
MessageBox("\"%s\" has now been set as the cosmetic parent." % str(selection[0]))
def ClearCosmeticParent(reqarg):
cosmetic_bone = cmds.getAttr(OBJECT_NAMES["xmodel"][2]+ ".Cosmeticbone")
if cosmetic_bone is None:
cmds.error("No cosmetic bone set.")
cosmetic_bone = cosmetic_bone.split("|")[-1].split(":")[-1]
if cosmetic_bone != "" or cosmetic_bone is not None:
cmds.setAttr(OBJECT_NAMES['xmodel'][2] + ".Cosmeticbone", "", type="string")
MessageBox("Cosmetic Parent \"%s\" has now been removed." % cosmetic_bone)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------- XAnim Export Window ----------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def CreateXAnimWindow():
# Create window
if cmds.control(OBJECT_NAMES['xanim'][0], exists=True):
cmds.deleteUI(OBJECT_NAMES['xanim'][0])
cmds.window(OBJECT_NAMES['xanim'][0], title=OBJECT_NAMES['xanim'][1], width=1, height=1, retain=True, maximizeButton=False)
form = cmds.formLayout(OBJECT_NAMES['xanim'][0]+"_Form")
# Controls
slotDropDown = cmds.optionMenu(OBJECT_NAMES['xanim'][0]+"_SlotDropDown", changeCommand="CoDMayaTools.RefreshXAnimWindow()", annotation="Each slot contains different a export path, frame range, notetrack, and saved selection")
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
cmds.menuItem(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown"+("_s%i" % i), label="Slot %i" % i)
separator1 = cmds.separator(style='in')
separator2 = cmds.separator(style='in')
separator3 = cmds.separator(style='in')
framesLabel = cmds.text(label="Frames:", annotation="Range of frames to export")
framesStartField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameStartField", height=21, width=35, minValue=0, changeCommand="CoDMayaTools.UpdateFrameRange('xanim')", annotation="Starting frame to export (inclusive)")
framesToLabel = cmds.text(label="to")
framesEndField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameEndField", height=21, width=35, minValue=0, changeCommand="CoDMayaTools.UpdateFrameRange('xanim')", annotation="Ending frame to export (inclusive)")
GrabFrames = cmds.button(label="Grab Frames", width=75, command="CoDMayaTools.SetFrames('xanim')", annotation="Get frame end and start from scene.")
fpsLabel = cmds.text(label="FPS:")
fpsField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FPSField", height=21, width=35, value=1, minValue=1, changeCommand="CoDMayaTools.UpdateFramerate('xanim')", annotation="Animation FPS")
qualityLabel = cmds.text(label="Quality (0-10)", annotation="Quality of the animation, higher values result in less jitter but produce larger files. Default is 0")
qualityField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_qualityField", height=21, width=35, value=0, minValue=0, maxValue=10, step=1, changeCommand="CoDMayaTools.UpdateMultiplier('xanim')", annotation="Quality of the animation, higher values result in less jitter but produce larger files.")
notetracksLabel = cmds.text(label="Notetrack:", annotation="Notetrack info for the animation")
noteList = cmds.textScrollList(OBJECT_NAMES['xanim'][0]+"_NoteList", allowMultiSelection=False, selectCommand="CoDMayaTools.SelectNote('xanim')", annotation="List of notes in the notetrack")
addNoteButton = cmds.button(label="Add Note", width=75, command="CoDMayaTools.AddNote('xanim')", annotation="Add a note to the notetrack")
ReadNotesButton = cmds.button(label="Grab Notes", width=75, command="CoDMayaTools.ReadNotetracks('xanim')", annotation="Grab Notes from Notetrack in Outliner")
ClearNotes = cmds.button(label="Clear Notes", width=75, command="CoDMayaTools.ClearNotes('xanim')", annotation="Clear ALL notetracks.")
RenameNoteTrack = cmds.button(label="Rename Note", command="CoDMayaTools.RenameNotes('xanim')", annotation="Rename the currently selected note.")
removeNoteButton = cmds.button(label="Remove Note", command="CoDMayaTools.RemoveNote('xanim')", annotation="Remove the currently selected note from the notetrack")
noteFrameLabel = cmds.text(label="Frame:", annotation="The frame the currently selected note is applied to")
noteFrameField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_NoteFrameField", changeCommand="CoDMayaTools.UpdateNoteFrame('xanim')", height=21, width=30, minValue=0, annotation="The frame the currently selected note is applied to")
saveToLabel = cmds.text(label="Save to:", annotation="This is where .xanim_export is saved to")
saveToField = cmds.textField(OBJECT_NAMES['xanim'][0]+"_SaveToField", height=21, changeCommand="CoDMayaTools.GeneralWindow_SaveToField('xanim')", annotation="This is where .xanim_export is saved to")
fileBrowserButton = cmds.button(label="...", height=21, command="CoDMayaTools.GeneralWindow_FileBrowser('xanim', \"XAnim Intermediate File (*.xanim_export)\")", annotation="Open a file browser dialog")
exportSelectedButton = cmds.button(label="Export Selected", command="CoDMayaTools.GeneralWindow_ExportSelected('xanim', False)", annotation="Export all currently selected joints from the scene (specified frames)\nWarning: Will automatically overwrite if the export path if it already exists")
saveSelectionButton = cmds.button(label="Save Selection", command="CoDMayaTools.GeneralWindow_SaveSelection('xanim')", annotation="Save the current object selection")
getSavedSelectionButton = cmds.button(label="Get Saved Selection", command="CoDMayaTools.GeneralWindow_GetSavedSelection('xanim')", annotation="Reselect the saved selection")
exportMultipleSlotsButton = cmds.button(label="Export Multiple Slots", command="CoDMayaTools.GeneralWindow_ExportMultiple('xanim')", annotation="Automatically export multiple slots at once, using each slot's saved selection")
exportInMultiExportCheckbox = cmds.checkBox(OBJECT_NAMES['xanim'][0]+"_UseInMultiExportCheckBox", label="Use current slot for Export Multiple", changeCommand="CoDMayaTools.GeneralWindow_ExportInMultiExport('xanim')", annotation="Check this make the 'Export Multiple Slots' button export this slot")
ReverseAnimation = cmds.checkBox("CoDMAYA_ReverseAnim", label="Export Animation Reversed", annotation="Check this if you want to export the anim. backwards. Usefule for reversing to make opposite sprints, etc.", value=False)
TagAlignExport = cmds.checkBox("CoDMAYA_TAGALIGN", label="Export TAG_ALIGN", annotation="Check this if you want to export TAG_ALIGN with the animation, required for some animations (Not needed for Viewmodel Animations)", value=False)
# Setup form
cmds.formLayout(form, edit=True,
attachForm=[(slotDropDown, 'top', 6), (slotDropDown, 'left', 10), (slotDropDown, 'right', 10),
(separator1, 'left', 0), (separator1, 'right', 0),
(framesLabel, 'left', 10),
(fpsLabel, 'left', 10),
(qualityLabel, 'left', 10),
(notetracksLabel, 'left', 10),
(noteList, 'left', 10),
(ReverseAnimation, 'left', 10),
(TagAlignExport, 'left', 10),
(addNoteButton, 'right', 10),
(ReadNotesButton, 'right', 10),
(RenameNoteTrack, 'right', 10),
(ClearNotes, 'right', 10),
(removeNoteButton, 'right', 10),
(noteFrameField, 'right', 10),
(separator2, 'left', 0), (separator2, 'right', 0),
(saveToLabel, 'left', 12),
(fileBrowserButton, 'right', 10),
(exportMultipleSlotsButton, 'bottom', 6), (exportMultipleSlotsButton, 'left', 10),
(exportInMultiExportCheckbox, 'bottom', 9), (exportInMultiExportCheckbox, 'right', 6),
(exportSelectedButton, 'left', 10),
(saveSelectionButton, 'right', 10),
(separator3, 'left', 0), (separator3, 'right', 0)],
attachControl=[ (separator1, 'top', 6, slotDropDown),
(framesLabel, 'top', 8, separator1),
(framesStartField, 'top', 5, separator1), (framesStartField, 'left', 4, framesLabel),
(framesToLabel, 'top', 8, separator1), (framesToLabel, 'left', 4+35+4, framesLabel),
(framesEndField, 'top', 5, separator1), (framesEndField, 'left', 4, framesToLabel),
(GrabFrames, 'top', 5, separator1), (GrabFrames, 'left', 4, framesEndField),
(fpsLabel, 'top', 8, framesStartField),
(fpsField, 'top', 5, framesStartField), (fpsField, 'left', 21, fpsLabel),
(qualityLabel, 'top', 8, fpsField),
(qualityField, 'top', 5, fpsField), (qualityField, 'left', 21, qualityLabel),
(notetracksLabel, 'top', 5, qualityLabel),
(noteList, 'top', 5, notetracksLabel), (noteList, 'right', 10, removeNoteButton), (noteList, 'bottom', 60, separator2),
(ReverseAnimation, 'top', 10, noteList), (ReverseAnimation, 'right', 10, removeNoteButton),
(TagAlignExport, 'top', 5, ReverseAnimation),
(addNoteButton, 'top', 5, notetracksLabel),
(ReadNotesButton, 'top', 5, addNoteButton),
(RenameNoteTrack, 'top', 5, ReadNotesButton),
(ClearNotes, 'top', 5, RenameNoteTrack),
(removeNoteButton, 'top', 5, ClearNotes),
(noteFrameField, 'top', 5, removeNoteButton),
(noteFrameLabel, 'top', 8, removeNoteButton), (noteFrameLabel, 'right', 4, noteFrameField),
(separator2, 'bottom', 5, fileBrowserButton),
(saveToLabel, 'bottom', 10, exportSelectedButton),
(saveToField, 'bottom', 5, exportSelectedButton), (saveToField, 'left', 5, saveToLabel), (saveToField, 'right', 5, fileBrowserButton),
(fileBrowserButton, 'bottom', 5, exportSelectedButton),
(exportSelectedButton, 'bottom', 5, separator3),
(saveSelectionButton, 'bottom', 5, separator3),
(getSavedSelectionButton, 'bottom', 5, separator3), (getSavedSelectionButton, 'right', 10, saveSelectionButton),
(separator3, 'bottom', 5, exportMultipleSlotsButton)
])
def RefreshXAnimWindow():
# Refresh/create node
if len(cmds.ls(OBJECT_NAMES['xanim'][2])) == 0:
cmds.createNode("renderLayer", name=OBJECT_NAMES['xanim'][2], skipSelect=True)
cmds.lockNode(OBJECT_NAMES['xanim'][2], lock=False)
if not cmds.attributeQuery("slot", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="slot", attributeType='short', defaultValue=1)
if not cmds.attributeQuery("paths", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="paths", multi=True, dataType='string')
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".paths", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("selections", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="selections", multi=True, dataType='stringArray')
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".selections", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("frameRanges", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="frameRanges", multi=True, dataType='long2')
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".frameRanges", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("framerate", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="framerate", multi=True, attributeType='long', defaultValue=30)
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".framerate", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("multiplier", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="multiplier", multi=True, attributeType='long', defaultValue=30)
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".multiplier", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("notetracks", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="notetracks", multi=True, dataType='string') # Formatted as "<name>:<frame>,<name>:<frame>,..."
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".notetracks", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("useinmultiexport", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="useinmultiexport", multi=True, attributeType='bool', defaultValue=False)
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".useinmultiexport", size=EXPORT_WINDOW_NUMSLOTS)
cmds.lockNode(OBJECT_NAMES['xanim'][2], lock=True)
# Set values
slotIndex = cmds.optionMenu(OBJECT_NAMES['xanim'][0]+"_SlotDropDown", query=True, select=True)
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".slot", slotIndex)
path = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".paths[%i]" % slotIndex))
cmds.textField(OBJECT_NAMES['xanim'][0]+"_SaveToField", edit=True, fileName=path)
frameRange = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".frameRanges[%i]" % slotIndex))
if frameRange == None:
cmds.setAttr(OBJECT_NAMES['xanim'][2]+(".frameRanges[%i]" % slotIndex), 0, 0, type='long2')
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameStartField", edit=True, value=0)
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameEndField", edit=True, value=0)
else:
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameStartField", edit=True, value=frameRange[0][0])
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameEndField", edit=True, value=frameRange[0][1])
framerate = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".framerate[%i]" % slotIndex))
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FPSField", edit=True, value=framerate)
noteFrameField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_NoteFrameField", edit=True, value=0)
cmds.textScrollList(OBJECT_NAMES['xanim'][0]+"_NoteList", edit=True, removeAll=True)
noteList = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
for note in notes:
parts = note.split(":")
if note.strip() == "" or len(parts) == 0:
continue
name = "".join([c for c in parts[0] if c.isalnum() or c=="_"]).replace("sndnt", "sndnt#").replace("rmbnt", "rmbnt#")
if name == "":
continue
cmds.textScrollList(OBJECT_NAMES['xanim'][0]+"_NoteList", edit=True, append=name)
useInMultiExport = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".useinmultiexport[%i]" % slotIndex))
cmds.checkBox(OBJECT_NAMES['xanim'][0]+"_UseInMultiExportCheckBox", edit=True, value=useInMultiExport)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------- XCam Export Window -----------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def CreateXCamWindow():
# Create window
if cmds.control(OBJECT_NAMES['xcam'][0], exists=True):
cmds.deleteUI(OBJECT_NAMES['xcam'][0])
cmds.window(OBJECT_NAMES['xcam'][0], title=OBJECT_NAMES['xcam'][1], width=1, height=1, retain=True, maximizeButton=False)
form = cmds.formLayout(OBJECT_NAMES['xcam'][0]+"_Form")
# Controls
slotDropDown = cmds.optionMenu(OBJECT_NAMES['xcam'][0]+"_SlotDropDown", changeCommand="CoDMayaTools.RefreshXCamWindow()", annotation="Each slot contains different a export path, frame range, notetrack, and saved selection")
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
cmds.menuItem(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown"+("_s%i" % i), label="Slot %i" % i)
separator1 = cmds.separator(style='in')
separator2 = cmds.separator(style='in')
separator3 = cmds.separator(style='in')
framesLabel = cmds.text(label="Frames:", annotation="Range of frames to export")
framesStartField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameStartField", height=21, width=35, minValue=0, changeCommand="CoDMayaTools.UpdateFrameRange('xcam')", annotation="Starting frame to export (inclusive)")
framesToLabel = cmds.text(label="to")
framesEndField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameEndField", height=21, width=35, minValue=0, changeCommand="CoDMayaTools.UpdateFrameRange('xcam')", annotation="Ending frame to export (inclusive)")
fpsLabel = cmds.text(label="FPS:")
fpsField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FPSField", height=21, width=35, value=1, minValue=1, changeCommand="CoDMayaTools.UpdateFramerate('xcam')", annotation="Animation FPS")
#qualityLabel = cmds.text(label="Quality (0-10)", annotation="Quality of the animation, higher values result in less jitter but produce larger files. Default is 0")
#qualityField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_qualityField", height=21, width=35, value=0, minValue=0, maxValue=10, step=1, changeCommand=XCamWindow_UpdateMultiplier, annotation="Quality of the animation, higher values result in less jitter but produce larger files.")
notetracksLabel = cmds.text(label="Notetrack:", annotation="Notetrack info for the animation")
noteList = cmds.textScrollList(OBJECT_NAMES['xcam'][0]+"_NoteList", allowMultiSelection=False, selectCommand="CoDMayaTools.SelectNote('xcam')", annotation="List of notes in the notetrack")
addNoteButton = cmds.button(label="Add Note", width=75, command="CoDMayaTools.AddNote('xcam')", annotation="Add a note to the notetrack")
ReadNotesButton = cmds.button(label="Grab Notes", width=75, command="CoDMayaTools.ReadNotetracks('xcam')", annotation="Grab Notes from Notetrack in Outliner")
RenameNoteTrack = cmds.button(label="Rename Note", command="CoDMayaTools.RenameNotes('xcam')", annotation="Rename the currently selected note.")
removeNoteButton = cmds.button(label="Remove Note", command="CoDMayaTools.RemoveNote('xcam')", annotation="Remove the currently selected note from the notetrack")
noteFrameLabel = cmds.text(label="Frame:", annotation="The frame the currently selected note is applied to")
noteFrameField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_NoteFrameField", changeCommand="CoDMayaTools.UpdateNoteFrame('xcam')", height=21, width=30, minValue=0, annotation="The frame the currently selected note is applied to")
GrabFrames = cmds.button(label="Grab Frames", width=75, command="CoDMayaTools.SetFrames('xcam')", annotation="Get frame end and start from scene.")
ClearNotes = cmds.button(label="Clear Notes", width=75, command="CoDMayaTools.ClearNotes('xcam')", annotation="Clear ALL notetracks.")
saveToLabel = cmds.text(label="Save to:", annotation="This is where .xcam_export is saved to")
saveToField = cmds.textField(OBJECT_NAMES['xcam'][0]+"_SaveToField", height=21, changeCommand="CoDMayaTools.GeneralWindow_SaveToField('xcam')", annotation="This is where .xcam_export is saved to")
fileBrowserButton = cmds.button(label="...", height=21, command="CoDMayaTools.GeneralWindow_FileBrowser('xcam', \"XCam Intermediate File (*.xcam_export)\")", annotation="Open a file browser dialog")
exportSelectedButton = cmds.button(label="Export Selected", command="CoDMayaTools.GeneralWindow_ExportSelected('xcam', False)", annotation="Export all currently selected joints from the scene (specified frames)\nWarning: Will automatically overwrite if the export path if it already exists")
saveSelectionButton = cmds.button(label="Save Selection", command="CoDMayaTools.GeneralWindow_SaveSelection('xcam')", annotation="Save the current object selection")
getSavedSelectionButton = cmds.button(label="Get Saved Selection", command="CoDMayaTools.GeneralWindow_GetSavedSelection('xcam')", annotation="Reselect the saved selection")
exportMultipleSlotsButton = cmds.button(label="Export Multiple Slots", command="CoDMayaTools.GeneralWindow_ExportMultiple('xcam')", annotation="Automatically export multiple slots at once, using each slot's saved selection")
exportInMultiExportCheckbox = cmds.checkBox(OBJECT_NAMES['xcam'][0]+"_UseInMultiExportCheckBox", label="Use current slot for Export Multiple", changeCommand="CoDMayaTools.GeneralWindow_ExportInMultiExport('xcam')", annotation="Check this make the 'Export Multiple Slots' button export this slot")
#ReverseAnimation = cmds.checkBox("CoDMAYA_ReverseAnim", label="Export Animation Reversed", annotation="Check this if you want to export the anim. backwards. Usefule for reversing to make opposite sprints, etc.", value=False)
# Setup form
cmds.formLayout(form, edit=True,
attachForm=[(slotDropDown, 'top', 6), (slotDropDown, 'left', 10), (slotDropDown, 'right', 10),
(separator1, 'left', 0), (separator1, 'right', 0),
(framesLabel, 'left', 10),
(fpsLabel, 'left', 10),
#(qualityLabel, 'left', 10),
(notetracksLabel, 'left', 10),
(noteList, 'left', 10),
#(ReverseAnimation, 'left', 10),
(addNoteButton, 'right', 10),
(ReadNotesButton, 'right', 10),
(RenameNoteTrack, 'right', 10),
(ClearNotes, 'right', 10),
(removeNoteButton, 'right', 10),
(noteFrameField, 'right', 10),
(separator2, 'left', 0), (separator2, 'right', 0),
(saveToLabel, 'left', 12),
(fileBrowserButton, 'right', 10),
(exportMultipleSlotsButton, 'bottom', 6), (exportMultipleSlotsButton, 'left', 10),
(exportInMultiExportCheckbox, 'bottom', 9), (exportInMultiExportCheckbox, 'right', 6),
(exportSelectedButton, 'left', 10),
(saveSelectionButton, 'right', 10),
(separator3, 'left', 0), (separator3, 'right', 0)],
attachControl=[ (separator1, 'top', 6, slotDropDown),
(framesLabel, 'top', 8, separator1),
(framesStartField, 'top', 5, separator1), (framesStartField, 'left', 4, framesLabel),
(framesToLabel, 'top', 8, separator1), (framesToLabel, 'left', 4+35+4, framesLabel),
(framesEndField, 'top', 5, separator1), (framesEndField, 'left', 4, framesToLabel),
(GrabFrames, 'top', 5, separator1), (GrabFrames, 'left', 4, framesEndField),
(fpsLabel, 'top', 8, framesStartField),
(fpsField, 'top', 5, framesStartField), (fpsField, 'left', 21, fpsLabel),
#(qualityLabel, 'top', 8, fpsField),
#(qualityField, 'top', 5, fpsField), (qualityField, 'left', 21, qualityLabel),
(notetracksLabel, 'top', 5, fpsField),
(noteList, 'top', 5, notetracksLabel), (noteList, 'right', 10, removeNoteButton), (noteList, 'bottom', 60, separator2),
#(ReverseAnimation, 'top', 10, noteList), (ReverseAnimation, 'right', 10, removeNoteButton),
(addNoteButton, 'top', 5, notetracksLabel),
(ReadNotesButton, 'top', 5, addNoteButton),
(RenameNoteTrack, 'top', 5, ReadNotesButton),
(ClearNotes, 'top', 5, RenameNoteTrack),
(removeNoteButton, 'top', 5, ClearNotes),
(noteFrameField, 'top', 5, removeNoteButton),
(noteFrameLabel, 'top', 8, removeNoteButton), (noteFrameLabel, 'right', 4, noteFrameField),
(separator2, 'bottom', 5, fileBrowserButton),
(saveToLabel, 'bottom', 10, exportSelectedButton),
(saveToField, 'bottom', 5, exportSelectedButton), (saveToField, 'left', 5, saveToLabel), (saveToField, 'right', 5, fileBrowserButton),
(fileBrowserButton, 'bottom', 5, exportSelectedButton),
(exportSelectedButton, 'bottom', 5, separator3),
(saveSelectionButton, 'bottom', 5, separator3),
(getSavedSelectionButton, 'bottom', 5, separator3), (getSavedSelectionButton, 'right', 10, saveSelectionButton),
(separator3, 'bottom', 5, exportMultipleSlotsButton)
])
def RefreshXCamWindow():
# Refresh/create node
if len(cmds.ls(OBJECT_NAMES['xcam'][2])) == 0:
cmds.createNode("renderLayer", name=OBJECT_NAMES['xcam'][2], skipSelect=True)
cmds.lockNode(OBJECT_NAMES['xcam'][2], lock=False)
if not cmds.attributeQuery("slot", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="slot", attributeType='short', defaultValue=1)
if not cmds.attributeQuery("paths", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="paths", multi=True, dataType='string')
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".paths", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("selections", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="selections", multi=True, dataType='stringArray')
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".selections", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("frameRanges", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="frameRanges", multi=True, dataType='long2')
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".frameRanges", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("framerate", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="framerate", multi=True, attributeType='long', defaultValue=30)
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".framerate", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("multiplier", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="multiplier", multi=True, attributeType='long', defaultValue=30)
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".multiplier", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("notetracks", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="notetracks", multi=True, dataType='string') # Formatted as "<name>:<frame>,<name>:<frame>,..."
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".notetracks", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("useinmultiexport", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="useinmultiexport", multi=True, attributeType='bool', defaultValue=False)
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".useinmultiexport", size=EXPORT_WINDOW_NUMSLOTS)
cmds.lockNode(OBJECT_NAMES['xcam'][2], lock=True)
# Set values
slotIndex = cmds.optionMenu(OBJECT_NAMES['xcam'][0]+"_SlotDropDown", query=True, select=True)
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".slot", slotIndex)
path = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".paths[%i]" % slotIndex))
cmds.textField(OBJECT_NAMES['xcam'][0]+"_SaveToField", edit=True, fileName=path)
frameRange = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".frameRanges[%i]" % slotIndex))
if frameRange == None:
cmds.setAttr(OBJECT_NAMES['xcam'][2]+(".frameRanges[%i]" % slotIndex), 0, 0, type='long2')
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameStartField", edit=True, value=0)
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameEndField", edit=True, value=0)
else:
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameStartField", edit=True, value=frameRange[0][0])
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameEndField", edit=True, value=frameRange[0][1])
framerate = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".framerate[%i]" % slotIndex))
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FPSField", edit=True, value=framerate)
noteFrameField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_NoteFrameField", edit=True, value=0)
cmds.textScrollList(OBJECT_NAMES['xcam'][0]+"_NoteList", edit=True, removeAll=True)
noteList = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
for note in notes:
parts = note.split(":")
if note.strip() == "" or len(parts) == 0:
continue
name = "".join([c for c in parts[0] if c.isalnum() or c=="_"])
if name == "":
continue
cmds.textScrollList(OBJECT_NAMES['xcam'][0]+"_NoteList", edit=True, append=name)
useInMultiExport = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".useinmultiexport[%i]" % slotIndex))
cmds.checkBox(OBJECT_NAMES['xcam'][0]+"_UseInMultiExportCheckBox", edit=True, value=useInMultiExport)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------- xAnim/xCam Export Data --------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def SetFrames(windowID):
"""
Querys start and end frames and set thems for the window given by windowID
"""
start = cmds.playbackOptions(minTime=True, query=True)
end = cmds.playbackOptions(maxTime=True, query=True) # Query start and end froms.
cmds.intField(OBJECT_NAMES[windowID][0] + "_FrameStartField", edit=True, value=start)
cmds.intField(OBJECT_NAMES[windowID][0] + "_FrameEndField", edit=True, value=end)
UpdateFrameRange(windowID)
def UpdateFrameRange(windowID):
"""
Updates start and end frame when set by user or by other means.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
start = cmds.intField(OBJECT_NAMES[windowID][0]+"_FrameStartField", query=True, value=True)
end = cmds.intField(OBJECT_NAMES[windowID][0]+"_FrameEndField", query=True, value=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".frameRanges[%i]" % slotIndex), start, end, type='long2')
def UpdateFramerate(windowID):
"""
Updates framerate when set by user or by other means.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
fps = cmds.intField(OBJECT_NAMES[windowID][0]+"_FPSField", query=True, value=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".framerate[%i]" % slotIndex), fps)
def UpdateMultiplier(windowID):
"""
Updates multiplier when set by user or by other means.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
fps = cmds.intField(OBJECT_NAMES[windowID][0]+"_qualityField", query=True, value=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".multiplier[%i]" % slotIndex), fps)
def AddNote(windowID):
"""
Add notetrack to window and attribute when user creates one.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
if cmds.promptDialog(title="Add Note to Slot %i's Notetrack" % slotIndex, message="Enter the note's name:\t\t ") != "Confirm":
return
userInput = cmds.promptDialog(query=True, text=True)
noteName = "".join([c for c in userInput if c.isalnum() or c=="_"]).replace("sndnt", "sndnt#").replace("rmbnt", "rmbnt#") # Remove all non-alphanumeric characters
if noteName == "":
MessageBox("Invalid note name")
return
existingItems = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, allItems=True)
if existingItems != None and noteName in existingItems:
MessageBox("A note with this name already exists")
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
noteList += "%s:%i," % (noteName, cmds.currentTime(query=True))
cmds.setAttr(OBJECT_NAMES['xanim'][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, append=noteName, selectIndexedItem=len((existingItems or []))+1)
SelectNote(windowID)
def __get_notetracks__():
"""Loads all the notetracks in the scene"""
if not cmds.objExists("SENotes"):
cmds.rename(cmds.spaceLocator(), "SENotes")
if not cmds.objExists("SENotes.Notetracks"):
cmds.addAttr("SENotes", longName="Notetracks",
dataType="string", storable=True)
cmds.setAttr("SENotes.Notetracks", "{}", type="string")
# Load the existing notetracks buffer, then ensure we have this notetrack
return json.loads(cmds.getAttr("SENotes.Notetracks"))
def ReadNotetracks(windowID):
"""
Read notetracks from imported animations.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
existingItems = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, allItems=True)
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
# Get Notetracks
notetracks = __get_notetracks__()
# Add notetrack type prefix automatically
write_note_type = QueryToggableOption('PrefixNoteType')
for note, frames in notetracks.iteritems():
# Ignore end/loop_end
if note == "end" or note == "loop_end":
continue
# Check if we want to write notetype
# and if note is not already prefixed.
if(write_note_type and not "nt#" in note):
# Set Sound Note as Standard
note_type = "sndnt"
# Split notetrack's name
notesplit = note.split("_")
# Check is this a rumble (first word will be viewmodel/reload)
if(notesplit[0] == "viewmodel" or notesplit[0] == "reload"):
note_type = "rmbnt"
note = note.replace("viewmodel", "reload")
# Append
note = "#".join((note_type, note))
# Loop through note frames
for frame in frames:
# Append to list and scroll list
noteList += "%s:%i," % (note, frame)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, append=note, selectIndexedItem=len((existingItems or []))+1)
# Set selected note
SelectNote(windowID)
def RenameNotes(windowID):
"""
Rename selected notetrack.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
currentIndex = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, selectIndexedItem=True)
if currentIndex != None and len(currentIndex) > 0 and currentIndex[0] >= 1:
if cmds.promptDialog(title="Rename NoteTrack in slot", message="Enter new notetrack name:\t\t ") != "Confirm":
return
userInput = cmds.promptDialog(query=True, text=True)
noteName = "".join([c for c in userInput if c.isalnum() or c=="_"]).replace("sndnt", "sndnt#").replace("rmbnt", "rmbnt#") # Remove all non-alphanumeric characters
if noteName == "":
MessageBox("Invalid note name")
return
currentIndex = currentIndex[0]
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
noteInfo = notes[currentIndex-1].split(":")
note = int(noteInfo[1])
NoteTrack = userInput
# REMOVE NOTE
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, removeIndexedItem=currentIndex)
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
del notes[currentIndex-1]
noteList = ",".join(notes)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
# REMOVE NOTE
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
noteList += "%s:%i," % (NoteTrack, note) # Add Notes to Aidan's list.
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, append=NoteTrack, selectIndexedItem=currentIndex)
SelectNote(windowID)
def RemoveNote(windowID):
"""
Remove Note
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
currentIndex = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, selectIndexedItem=True)
if currentIndex != None and len(currentIndex) > 0 and currentIndex[0] >= 1:
currentIndex = currentIndex[0]
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, removeIndexedItem=currentIndex)
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
del notes[currentIndex-1]
noteList = ",".join(notes)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
SelectNote(windowID)
def ClearNotes(windowID):
"""
Clear ALL notetracks.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
notes = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, allItems=True)
if notes is None:
return
for note in notes:
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, removeItem=note)
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notetracks = noteList.split(",")
del notetracks
noteList = ""
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
SelectNote(windowID)
def UpdateNoteFrame(windowID):
"""
Update notetrack information.
"""
newFrame = cmds.intField(OBJECT_NAMES[windowID][0] + "_NoteFrameField", query = True, value = True)
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
currentIndex = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, selectIndexedItem=True)
if currentIndex != None and len(currentIndex) > 0 and currentIndex[0] >= 1:
currentIndex = currentIndex[0]
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
parts = notes[currentIndex-1].split(":")
if len(parts) < 2:
parts("Error parsing notetrack string (A) at %i: %s" % (currentIndex, noteList))
notes[currentIndex-1] = "%s:%i" % (parts[0], newFrame)
noteList = ",".join(notes)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
def SelectNote(windowID):
"""
Select notetrack
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
currentIndex = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, selectIndexedItem=True)
if currentIndex != None and len(currentIndex) > 0 and currentIndex[0] >= 1:
currentIndex = currentIndex[0]
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
parts = notes[currentIndex-1].split(":")
if len(parts) < 2:
error("Error parsing notetrack string (B) at %i: %s" % (currentIndex, noteList))
frame=0
try:
frame = int(parts[1])
except ValueError:
pass
noteFrameField = cmds.intField(OBJECT_NAMES[windowID][0]+"_NoteFrameField", edit=True, value=frame)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------- General Export Window ---------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# GeneralWindow_... are callback functions that are used by both export windows
def GeneralWindow_SaveToField(windowID):
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
filePath = cmds.textField(OBJECT_NAMES[windowID][0]+"_SaveToField", query=True, fileName=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".paths[%i]" % slotIndex), filePath, type='string')
def GeneralWindow_FileBrowser(windowID, formatExtension="*"):
current_game = GetCurrentGame()
defaultFolder = GetRootFolder(None, current_game)
if windowID == 'xanim':
defaultFolder = defaultFolder + 'xanim_export/'
# Switch these around depending on title user has selected.
# and whether we're using ExportX
formatExtension = (
"XAnim Binary File (.xanim_bin) (*.xanim_bin);;"
"XAnim ASCII File (.xanim_export) (*.xanim_export)"
if GetCurrentGame() == "CoD12" and not USE_EXPORT_X else
"XAnim ASCII File (.xanim_export) (*.xanim_export);;"
"XAnim Binary File (.xanim_bin) (*.xanim_bin)")
elif windowID == 'xcam':
defaultFolder = defaultFolder + 'xanim_export/'
elif windowID == 'xmodel':
defaultFolder = defaultFolder + 'model_export/'
# Switch these around depending on title user has selected.
# and whether we're using ExportX
formatExtension = (
"XModel Binary File (.xmodel_bin) (*.xmodel_bin);;"
"XModel ASCII File (.xmodel_export) (*.xmodel_export)"
if GetCurrentGame() == "CoD12" and not USE_EXPORT_X else
"XModel ASCII File (.xmodel_export) (*.xmodel_export);;"
"XModel Binary File (.xmodel_bin) (*.xmodel_bin)")
saveTo = cmds.fileDialog2(fileMode=0, fileFilter=formatExtension, caption="Export To", startingDirectory=defaultFolder)
if saveTo == None or len(saveTo) == 0 or saveTo[0].strip() == "":
return
saveTo = saveTo[0].strip()
cmds.textField(OBJECT_NAMES[windowID][0]+"_SaveToField", edit=True, fileName=saveTo)
GeneralWindow_SaveToField(windowID)
def GeneralWindow_SaveSelection(windowID):
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
selection = cmds.ls(selection=True)
if selection == None or len(selection) == 0:
return
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".selections[%i]" % slotIndex), len(selection), *selection, type='stringArray')
def GeneralWindow_GetSavedSelection(windowID):
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
selection = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".selections[%i]" % slotIndex))
validSelection = []
for obj in selection:
if cmds.objExists(obj):
validSelection.append(obj)
# Remove non-existing objects from the saved list
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".selections[%i]" % slotIndex), len(validSelection), *validSelection, type='stringArray')
if validSelection == None or len(validSelection) == 0:
MessageBox("No selection saved to slot %i" % slotIndex)
return False
cmds.select(validSelection)
return True
def GeneralWindow_ExportSelected(windowID, exportingMultiple):
global WarningsDuringExport
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
# Get path
filePath = cmds.textField(OBJECT_NAMES[windowID][0]+"_SaveToField", query=True, fileName=True)
if filePath.strip() == "":
if exportingMultiple:
MessageBox("Invalid path on slot %i:\n\nPath is empty." % slotIndex)
else:
MessageBox("Invalid path:\n\nPath is empty.")
return
if os.path.isdir(filePath):
if exportingMultiple:
MessageBox("Invalid path on slot %i:\n\nPath points to an existing directory." % slotIndex)
else:
MessageBox("Invalid path:\n\nPath points to an existing directory.")
return
# Save reminder
if not exportingMultiple and not SaveReminder():
return
# Progress bar
if cmds.control("w"+OBJECT_NAMES['progress'][0], exists=True):
cmds.deleteUI("w"+OBJECT_NAMES['progress'][0])
progressWindow = cmds.window("w"+OBJECT_NAMES['progress'][0], title=OBJECT_NAMES['progress'][1], width=302, height=22, sizeable=False)
cmds.columnLayout()
progressControl = cmds.progressBar(OBJECT_NAMES['progress'][0], width=300)
if QueryToggableOption("PrintExport") and windowID == "xmodel":
cmds.scrollField("ExportLog", editable=False, wordWrap=False, width = 300)
cmds.showWindow(progressWindow)
cmds.refresh() # Force the progress bar to be drawn
# Export
if not exportingMultiple:
WarningsDuringExport = 0
response = None
try:
exec("response = %s(\"%s\")" % (OBJECT_NAMES[windowID][4], filePath))
except Exception as e:
response = "An unhandled error occurred during export:\n\n" + traceback.format_exc()
cmds.deleteUI(progressWindow, window=True)
# Handle response
if type(response) == str or type(response) == unicode:
if exportingMultiple:
MessageBox("Slot %i\n\n%s" % (slotIndex, response))
else:
MessageBox(response)
elif WarningsDuringExport > 0 and not exportingMultiple:
MessageBox("Warnings occurred during export. Check the script editor output for more details.")
def GeneralWindow_ExportMultiple(windowID):
originalSlotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
any = False
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
useInMultiExport = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".useinmultiexport[%i]" % i))
if useInMultiExport:
any = True
break
if not any:
MessageBox("No slots set to export.")
return
if not SaveReminder():
return
WarningsDuringExport = 0
originalSelection = cmds.ls(selection=True)
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
useInMultiExport = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".useinmultiexport[%i]" % i))
if useInMultiExport:
print "Exporting slot %i in multiexport" % i
cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", edit=True, select=i)
exec(OBJECT_NAMES[windowID][3] + "()") # Refresh window
if GeneralWindow_GetSavedSelection(windowID):
GeneralWindow_ExportSelected(windowID, True)
if originalSelection == None or len(originalSelection) == 0:
cmds.select(clear=True)
else:
cmds.select(originalSelection)
if WarningsDuringExport > 0:
MessageBox("Warnings occurred during export. Check the script editor output for more details.")
# Reset slot
cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", edit=True, select=originalSlotIndex)
exec(OBJECT_NAMES[windowID][3] + "()") # Refresh window
def GeneralWindow_ExportInMultiExport(windowID):
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
useInMultiExport = cmds.checkBox(OBJECT_NAMES[windowID][0]+"_UseInMultiExportCheckBox", query=True, value=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".useinmultiexport[%i]" % slotIndex), useInMultiExport)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------- General GUI --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def SaveReminder(allowUnsaved=True):
if cmds.file(query=True, modified=True):
if cmds.file(query=True, exists=True):
result = cmds.confirmDialog(message="Save changes to %s?" % cmds.file(query=True, sceneName=True), button=["Yes", "No", "Cancel"], defaultButton="Yes", title="Save Changes")
if result == "Yes":
cmds.file(save=True)
elif result != "No":
return False
else: # The file has never been saved (has no name)
if allowUnsaved:
result = cmds.confirmDialog(message="The current scene is not saved. Continue?", button=["Yes", "No"], defaultButton="Yes", title="Save Changes")
if result != "Yes":
return False
else:
MessageBox("The scene needs to be saved first")
return False
return True
def PrintWarning(message):
global WarningsDuringExport
if WarningsDuringExport < MAX_WARNINGS_SHOWN:
print "WARNING: %s" % message
WarningsDuringExport += 1
elif WarningsDuringExport == MAX_WARNINGS_SHOWN:
print "More warnings not shown because printing text is slow...\n"
WarningsDuringExport = MAX_WARNINGS_SHOWN+1
def MessageBox(message):
cmds.confirmDialog(message=message, button='OK', defaultButton='OK', title=OBJECT_NAMES['menu'][1])
def ShowWindow(windowID):
exec(OBJECT_NAMES[windowID][3] + "()") # Refresh window
cmds.showWindow(OBJECT_NAMES[windowID][0])
def ProgressBarStep():
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, step=1)
def LogExport(text, isWarning = False):
if QueryToggableOption("PrintExport"):
if isWarning:
global WarningsDuringExport
if WarningsDuringExport < MAX_WARNINGS_SHOWN:
cmds.scrollField("ExportLog", edit = True, insertText = text)
WarningsDuringExport += 1
elif WarningsDuringExport == MAX_WARNINGS_SHOWN:
cmds.scrollField("ExportLog", edit = True, insertText = "More warnings not shown because printing text is slow...\n")
WarningsDuringExport = MAX_WARNINGS_SHOWN+1
else:
cmds.scrollField("ExportLog", edit = True, insertText = text)
def AboutWindow():
result = cmds.confirmDialog(message="Call of Duty Tools for Maya, created by Aidan Shafran (with assistance from The Internet).\nMaintained by Ray1235 (Maciej Zaremba) & Scobalula\n\nThis script is under the GNU General Public License. You may modify or redistribute this script, however it comes with no warranty. Go to http://www.gnu.org/licenses/ for more details.\n\nVersion: %.2f" % FILE_VERSION, button=['OK', 'Visit Github Repo', 'CoD File Formats'], defaultButton='OK', title="About " + OBJECT_NAMES['menu'][1])
if result == "Visit Github Repo":
webbrowser.open("https://github.com/Ray1235/CoDMayaTools")
elif result == "CoD File Formats":
webbrowser.open("http://aidanshafran.com/codmayatools/codformats.html")
def LegacyWindow():
result = cmds.confirmDialog(message="""CoD1 mode exports models that are compatible with CoD1.
When this mode is disabled, the plugin will export models that are compatible with CoD2 and newer.
""", button=['OK'], defaultButton='OK', title="Legacy options")
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------- Get/Set Root Folder ----------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def SetRootFolder(msg=None, game="none"):
#if game == "none":
# game = currentGame
#if game == "none":
# res = cmds.confirmDialog(message="Please select the game you're working with", button=['OK'], defaultButton='OK', title="WARNING")
# return None
# Get current root folder (this also makes sure the reg key exists)
codRootPath = GetRootFolder(False, game)
# Open input box
#if cmds.promptDialog(title="Set Root Path", message=msg or "Change your root path:\t\t\t", text=codRootPath) != "Confirm":
# return None
codRootPath = cmds.fileDialog2(fileMode=3, dialogStyle=2)[0] + "/"
# Check to make sure the path exists
if not os.path.isdir(codRootPath):
MessageBox("Given root path does not exist")
return None
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_SET_VALUE)
reg.SetValueEx(storageKey, "%sRootPath" % game, 0, reg.REG_SZ, codRootPath)
reg.CloseKey(storageKey)
return codRootPath
def GetRootFolder(firstTimePrompt=False, game="none"):
codRootPath = ""
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
codRootPath = reg.QueryValueEx(storageKey, "%sRootPath" % game)[0]
reg.CloseKey(storageKey)
except WindowsError:
print(traceback.format_exc())
# First time, create key
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
reg.SetValueEx(storageKey, "RootPath", 0, reg.REG_SZ, "")
reg.CloseKey(storageKey)
if not os.path.isdir(codRootPath):
codRootPath = ""
# First-time prompt
if firstTimePrompt:
result = SetRootFolder("Your root folder path hasn't been confirmed yet. If the following is not\ncorrect, please fix it:", game)
if result:
codRootPath = result
return codRootPath
def RunExport2Bin(file):
p = GetExport2Bin()
directory = os.path.dirname(os.path.realpath(file))
if os.path.splitext(os.path.basename(p))[0] == "export2bin":
p = subprocess.Popen([p, "*"], cwd=directory)
elif os.path.splitext(os.path.basename(p))[0] == "exportx":
p = subprocess.Popen([p, "-f %s" % file])
p.wait()
if(QueryToggableOption('DeleteExport')):
os.remove(file)
def SetExport2Bin():
export2binpath = cmds.fileDialog2(fileMode=1, dialogStyle=2)[0]
# Check to make sure the path exists
if not os.path.isfile(export2binpath):
MessageBox("Given path does not exist")
return ""
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_SET_VALUE)
reg.SetValueEx(storageKey, "Export2BinPath", 0, reg.REG_SZ, export2binpath)
reg.CloseKey(storageKey)
return export2binpath
def GetExport2Bin(skipSet=True):
export2binpath = ""
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
export2binpath = reg.QueryValueEx(storageKey, "Export2BinPath")[0]
reg.CloseKey(storageKey)
except WindowsError:
# First time, create key
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
reg.SetValueEx(storageKey, "Export2BinPath", 0, reg.REG_SZ, "")
reg.CloseKey(storageKey)
if not os.path.isfile(export2binpath):
export2binpath = ""
if not skipSet:
result = SetExport2Bin()
if result:
export2binpath = result
return export2binpath
def CheckForUpdatesEXE():
# Check if we want updates
if QueryToggableOption("AutoUpdate"):
# Try run application
try:
p = ("%s -name %s -version %f -version_info_url %s"
% (os.path.join(WORKING_DIR, "autoUpdate.exe"),
"CoDMayaTools.py",
FILE_VERSION,
VERSION_CHECK_URL))
subprocess.Popen("%s %f" % (os.path.join(WORKING_DIR, "Updater.exe"), FILE_VERSION))
except:
# Failed, exit.
return
else:
return
#def SetGame(name):
# currentGame = name
##########################################################
# Ray's Animation Toolkit #
# #
# Credits: #
# Aidan - teaching me how to make plugins like this :) #
##########################################################
def GenerateCamAnim(reqarg=""):
useDefMesh = False
if (cmds.objExists(getObjectByAlias("camera")) == False):
print "Camera doesn't exist"
return
if (cmds.objExists(getObjectByAlias("weapon")) == False):
print "Weapon doesn't exist"
return
animStart = cmds.playbackOptions(query=True, minTime=True)
animEnd = cmds.playbackOptions(query=True, maxTime=True)
jointGun = cmds.xform(getObjectByAlias("weapon"), query=True, rotation=True)
jointGunPos = cmds.xform(getObjectByAlias("weapon"), query=True, translation=True)
GunMoveXorig = jointGunPos[0]*-0.025
GunRotYAddorig = jointGunPos[0]*-0.5
GunRotXAddorig = jointGunPos[1]*-0.25
progressW = cmds.progressWindow(minValue=animStart,maxValue=animEnd)
for i in range(int(animStart),int(animEnd+1)):
cmds.currentTime(i)
jointGun = cmds.xform(getObjectByAlias("weapon"), query=True, rotation=True)
jointGunPos = cmds.xform(getObjectByAlias("weapon"), query=True, translation=True)
GunMoveX = jointGunPos[0]*-0.025
GunRotYAdd = jointGunPos[0]*-0.5
GunRotXAdd = jointGunPos[1]*-0.25
GunRot = jointGun
GunRot[0] = jointGun[0]
GunRot[0] = GunRot[0] * 0.025
GunRot[1] = jointGun[1]
GunRot[1] = GunRot[1] * 0.025
GunRot[2] = jointGun[2]
GunRot[2] = GunRot[2] * 0.025
print GunRot
print jointGun
cmds.select(getObjectByAlias("camera"), replace=True)
# cmds.rotate(GunRot[0], GunRot[1], GunRot[2], rotateXYZ=True)
cmds.setKeyframe(v=(GunMoveX-GunMoveXorig),at='translateX')
cmds.setKeyframe(v=GunRot[0]+(GunRotXAdd-GunRotXAddorig),at='rotateX')
cmds.setKeyframe(v=(GunRot[1]+(GunRotYAdd-GunRotYAddorig)),at='rotateY')
cmds.setKeyframe(v=GunRot[2],at='rotateZ')
cmds.progressWindow(edit=True,step=1)
cmds.progressWindow(edit=True,endProgress=True)
def RemoveCameraKeys(reqarg=""):
if (cmds.objExists(getObjectByAlias("camera")) == False):
print "ERROR: Camera doesn't exist"
return
else:
print "Camera exists!"
jointCamera = cmds.joint(getObjectByAlias("camera"), query=True)
animStart = cmds.playbackOptions(query=True, minTime=True)
animEnd = cmds.playbackOptions(query=True, maxTime=True)
cmds.select(getObjectByAlias("camera"), replace=True)
#cmds.setAttr('tag_camera.translateX',0)
#cmds.setAttr('tag_camera.translateY',0)
#cmds.setAttr('tag_camera.translateZ',0)
#cmds.setAttr('tag_camera.rotateX',0)
#cmds.setAttr('tag_camera.rotateY',0)
#cmds.setAttr('tag_camera.rotateZ',0)
# cmds.rotate(GunRot[0], GunRot[1], GunRot[2], rotateXYZ=True)
cmds.cutKey(clear=True,time=(animStart,animEnd+1))
def RemoveCameraAnimData(reqarg=""):
if (cmds.objExists(getObjectByAlias("camera")) == False):
print "ERROR: Camera doesn't exist"
return
else:
print "Camera exists!"
jointCamera = cmds.joint(getObjectByAlias("camera"), query=True)
animStart = cmds.playbackOptions(query=True, animationStartTime=True)
animEnd = cmds.playbackOptions(query=True, animationEndTime=True)
cmds.cutKey(clear=True,time=(animStart,animEnd+1))
cmds.select(getObjectByAlias("camera"), replace=True)
cmds.setAttr(getObjectByAlias("camera")+'.translateX',0)
cmds.setAttr(getObjectByAlias("camera")+'.translateY',0)
cmds.setAttr(getObjectByAlias("camera")+'.translateZ',0)
cmds.setAttr(getObjectByAlias("camera")+'.rotateX',0)
cmds.setAttr(getObjectByAlias("camera")+'.rotateY',0)
cmds.setAttr(getObjectByAlias("camera")+'.rotateZ',0)
def setObjectAlias(aname):
if len(cmds.ls("CoDMayaTools")) == 0:
cmds.createNode("renderLayer", name="CoDMayaTools", skipSelect=True)
if not cmds.attributeQuery("objAlias%s" % aname, node="CoDMayaTools", exists=True):
cmds.addAttr("CoDMayaTools", longName="objAlias%s" % aname, dataType='string')
objects = cmds.ls(selection=True);
if len(objects) == 1:
print "Marking selected object as %s" % aname
else:
print "Selected more than 1 object or none at all"
return
obj = objects[0]
cmds.setAttr("CoDMayaTools.objAlias%s" % aname, obj, type='string')
def getObjectByAlias(aname):
if len(cmds.ls("CoDMayaTools")) == 0:
cmds.createNode("renderLayer", name="CoDMayaTools", skipSelect=True)
if not cmds.attributeQuery("objAlias%s" % aname, node="CoDMayaTools", exists=True):
return ""
return cmds.getAttr("CoDMayaTools.objAlias%s" % aname) or ""
# Bind the weapon to hands
def WeaponBinder():
# Call of Duty specific
for x in xrange(0, len(GUN_BASE_TAGS)):
try:
# Select both tags and parent them
cmds.select(GUN_BASE_TAGS[x], replace = True)
cmds.select(VIEW_HAND_TAGS[x], toggle = True)
# Connect
cmds.connectJoint(connectMode = True)
# Parent
mel.eval("parent " + GUN_BASE_TAGS[x] + " " + VIEW_HAND_TAGS[x])
# Reset the positions of both bones
cmds.setAttr(GUN_BASE_TAGS[x] + ".t", 0, 0, 0)
cmds.setAttr(GUN_BASE_TAGS[x] + ".jo", 0, 0, 0)
cmds.setAttr(GUN_BASE_TAGS[x] + ".rotate", 0, 0, 0)
# Reset the rotation of the parent tag
cmds.setAttr(VIEW_HAND_TAGS[x] + ".jo", 0, 0, 0)
cmds.setAttr(VIEW_HAND_TAGS[x] + ".rotate", 0, 0, 0)
# Remove
cmds.select(clear = True)
except:
pass
def SetToggableOption(name="", val=0):
if not val:
val = int(cmds.menuItem(name, query=True, checkBox=True ))
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
except WindowsError:
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
reg.SetValueEx(storageKey, "Setting_%s" % name, 0, reg.REG_DWORD, val )
def QueryToggableOption(name=""):
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
reg.QueryValueEx(storageKey, "Setting_%s" % name)[0]
except WindowsError:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
try:
reg.SetValueEx(storageKey, "Setting_%s" % name, 0, reg.REG_DWORD , 0 )
except:
return 1
return reg.QueryValueEx(storageKey, "Setting_%s" % name)[0]
# ---- Create windows ----
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
except WindowsError:
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1]) # Seems to fail because above in the bin function it tries to open the key but doesn't exist and stops there, so I heck it and added this.
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
codRootPath = reg.QueryValueEx(storageKey, "RootPath")[0]
reg.CloseKey(storageKey)
except WindowsError:
cmds.confirmDialog(message="It looks like this is your first time running CoD Maya Tools.\nYou will be asked to choose your game's root path.", button=['OK'], defaultButton='OK', title="First time configuration") #MessageBox("Please set your root path before starting to work with CoD Maya Tools")
result = cmds.confirmDialog(message="Which Game will you be working with? (Can be changed in settings)\n\nCoD4 = MW, CoD5 = WaW, CoD7 = BO1, CoD12 = Bo3", button=['CoD1', 'CoD2', 'CoD4', "CoD5", "CoD7", "CoD12"], defaultButton='OK', title="First time configuration") #MessageBox("Please set your root path before starting to work with CoD Maya Tools")
SetCurrentGame(result)
SetRootFolder(None, result)
res = cmds.confirmDialog(message="Enable Automatic Updates?", button=['Yes', 'No'], defaultButton='No', title="First time configuration")
if res == "Yes":
SetToggableOption(name="AutoUpdate", val=1)
else:
SetToggableOption(name="AutoUpdate", val=0)
cmds.confirmDialog(message="You're set! You can now export models and anims to any CoD!")
CheckForUpdatesEXE()
CreateMenu()
CreateXAnimWindow()
CreateXModelWindow()
CreateXCamWindow()
print "CoDMayaTools initialized."
| gpl-3.0 | 279,508,431,598,202,340 | 48.054038 | 523 | 0.586234 | false |
shlomif/patool | patoolib/programs/unalz.py | 1 | 1039 | # -*- coding: utf-8 -*-
# Copyright (C) 2010-2012 Bastian Kleineidam
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Archive commands for the unalz program."""
def extract_alzip (archive, compression, cmd, **kwargs):
"""Extract a ALZIP archive."""
return [cmd, '-d', kwargs['outdir'], archive]
def list_alzip (archive, compression, cmd, **kwargs):
"""List a ALZIP archive."""
return [cmd, '-l', archive]
test_alzip = list_alzip
| gpl-3.0 | -4,237,572,598,105,218,600 | 37.481481 | 71 | 0.717998 | false |
pbanaszkiewicz/amy | amy/workshops/migrations/0012_auto_20150612_0807.py | 1 | 2658 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
TRANSLATE_NAMES = {
'Git': ['swc/git'],
'Make': ['swc/make'],
'Matlab': ['swc/matlab'],
'Mercurial': ['swc/hg'],
'Python': ['swc/python', 'dc/python'],
'R': ['swc/r', 'dc/r'],
'Regexp': ['swc/regexp'],
'SQL': ['swc/sql', 'dc/sql'],
'Subversion': ['swc/svn'],
'Unix': ['swc/shell', 'dc/shell'],
None: ['dc/spreadsheet', 'dc/cloud']
}
EXTRA_LEGACY_NAMES = ['MATLAB']
def add_new_lesson_names(apps, schema_editor):
'''Add instances of Lesson named after lessons.'''
Lesson = apps.get_model('workshops', 'Lesson')
for (old_name, new_names) in TRANSLATE_NAMES.items():
for name in new_names:
Lesson.objects.create(name=name)
def fix_duplicate_names(apps, schema_editor):
'''Fix references to lessons with case sensitivity in names.'''
Lesson = apps.get_model('workshops', 'Lesson')
Qualification = apps.get_model('workshops', 'Qualification')
try:
right_lesson = Lesson.objects.get(name='Matlab')
wrong_lesson = Lesson.objects.get(name='MATLAB')
Qualification.objects.filter(lesson=wrong_lesson) \
.update(lesson=right_lesson)
except Lesson.DoesNotExist:
pass
def replace_qualifications(apps, schema_editor):
'''Add qualification entries with new lesson names and delete old ones.'''
Lesson = apps.get_model('workshops', 'Lesson')
Qualification = apps.get_model('workshops', 'Qualification')
for q in Qualification.objects.all():
old_name = q.lesson.name
new_names = TRANSLATE_NAMES[old_name]
for name in new_names:
lesson = Lesson.objects.get(name=name)
Qualification.objects.create(lesson=lesson,
person=q.person)
q.delete()
def remove_old_skill_names(apps, schema_editor):
'''Remove legacy instances of Lesson named after skills.'''
Lesson = apps.get_model('workshops', 'Lesson')
for (old_name, new_names) in TRANSLATE_NAMES.items():
if old_name:
Lesson.objects.filter(name=old_name).delete()
for old_name in EXTRA_LEGACY_NAMES:
Lesson.objects.filter(name=old_name).delete()
class Migration(migrations.Migration):
dependencies = [
('workshops', '0011_auto_20150612_0803'),
]
operations = [
migrations.RunPython(add_new_lesson_names),
migrations.RunPython(fix_duplicate_names),
migrations.RunPython(replace_qualifications),
migrations.RunPython(remove_old_skill_names)
]
| mit | 4,028,732,690,800,716,000 | 32.64557 | 78 | 0.624153 | false |
google/shaka-player | build/checkversion.py | 1 | 2659 | #!/usr/bin/env python
#
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Checks that all the versions match."""
from __future__ import print_function
import logging
import os
import re
import shakaBuildHelpers
def player_version():
"""Gets the version of the library from player.js."""
path = os.path.join(shakaBuildHelpers.get_source_base(), 'lib', 'player.js')
with shakaBuildHelpers.open_file(path, 'r') as f:
match = re.search(r'shaka\.Player\.version = \'(.*)\'', f.read())
return match.group(1) if match else ''
def changelog_version():
"""Gets the version of the library from the CHANGELOG."""
path = os.path.join(shakaBuildHelpers.get_source_base(), 'CHANGELOG.md')
with shakaBuildHelpers.open_file(path, 'r') as f:
match = re.search(r'## (.*) \(', f.read())
return match.group(1) if match else ''
def main(_):
"""Checks that all the versions in the library match."""
changelog = changelog_version()
player = player_version()
git = shakaBuildHelpers.git_version()
npm = shakaBuildHelpers.npm_version()
print('git version: ' + git)
print('npm version: ' + npm)
print('player version: ' + player)
print('changelog version: ' + changelog)
ret = 0
if 'dirty' in git:
logging.error('Git version is dirty.')
ret = 1
elif 'unknown' in git:
logging.error('Git version is not a tag.')
ret = 1
elif not re.match(r'^v[0-9]+\.[0-9]+\.[0-9]+(?:-[a-z0-9]+)?$', git):
logging.error('Git version is a malformed release version.')
logging.error('It should be a \'v\', followed by three numbers')
logging.error('separated by dots, optionally followed by a hyphen')
logging.error('and a pre-release identifier. See http://semver.org/')
ret = 1
if 'v' + npm != git:
logging.error('NPM version does not match git version.')
ret = 1
if player != git + '-uncompiled':
logging.error('Player version does not match git version.')
ret = 1
if 'v' + changelog != git:
logging.error('Changelog version does not match git version.')
ret = 1
return ret
if __name__ == '__main__':
shakaBuildHelpers.run_main(main)
| apache-2.0 | 7,412,155,136,251,303,000 | 30.654762 | 78 | 0.67469 | false |
Ambuj-UF/ConCat-1.0 | src/Utils/Bio/SearchIO/_model/_base.py | 1 | 2621 | # Copyright 2012 by Wibowo Arindrarto. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Abstract base classes for the SearchIO object model."""
import sys
# Add path to Bio
sys.path.append('../../..')
from Bio._utils import getattr_str, trim_str
__docformat__ = "restructuredtext en"
class _BaseSearchObject(object):
"""Abstract class for SearchIO objects."""
_NON_STICKY_ATTRS = ()
def _transfer_attrs(self, obj):
"""Transfer instance attributes to the given object.
This method is used to transfer attributes set externally (for example
using `setattr`) to a new object created from this one (for example
from slicing).
The reason this method is necessary is because different parsers will
set different attributes for each QueryResult, Hit, HSP, or HSPFragment
objects, depending on the attributes they found in the search output
file. Ideally, we want these attributes to 'stick' with any new instance
object created from the original one.
"""
# list of attribute names we don't want to transfer
for attr in self.__dict__:
if attr not in self._NON_STICKY_ATTRS:
setattr(obj, attr, self.__dict__[attr])
class _BaseHSP(_BaseSearchObject):
"""Abstract base class for HSP objects."""
def _str_hsp_header(self):
"""Prints the alignment header info."""
lines = []
# set query id line
qid_line = trim_str(' Query: %s %s' %
(self.query_id, self.query_description), 80, '...')
# set hit id line
hid_line = trim_str(' Hit: %s %s' %
(self.hit_id, self.hit_description), 80, '...')
lines.append(qid_line)
lines.append(hid_line)
# coordinates
query_start = getattr_str(self, 'query_start')
query_end = getattr_str(self, 'query_end')
hit_start = getattr_str(self, 'hit_start')
hit_end = getattr_str(self, 'hit_end')
# strands
try:
qstrand = self.query_strand
hstrand = self.hit_strand
except ValueError:
qstrand = self.query_strand_all[0]
hstrand = self.hit_strand_all[0]
lines.append('Query range: [%s:%s] (%r)' % (query_start, query_end,
qstrand))
lines.append(' Hit range: [%s:%s] (%r)' % (hit_start, hit_end,
hstrand))
return '\n'.join(lines)
| gpl-2.0 | 8,958,869,091,809,492,000 | 33.038961 | 80 | 0.604349 | false |
jck/uhdl | uhdl/helpers.py | 1 | 1634 | import functools
import wrapt
from myhdl import SignalType, ResetSignal, delay, always, instance, Simulation
class Clock(SignalType):
"""Clock class for use in simulations"""
def __init__(self, period=2):
self.period = period
if period % 2 != 0:
raise ValueError("period must be divisible by 2")
super(Clock, self).__init__(False)
def gen(self):
@always(delay(self.period/2))
def _clock():
self.next = not self
return _clock
class Reset(ResetSignal):
"""Reset class for use in simulations"""
def __init__(self, val=0, active=0, async=True):
super(Reset, self).__init__(val, active, async)
def pulse(self, time=5):
@instance
def _reset():
self.next = self.active
yield delay(time)
self.next = not self.active
return _reset
def run_sim(*args, **kwargs):
return Simulation(*args).run(**kwargs)
def sim(wrapped=None, duration=None, quiet=False):
"""Decorator which simplifies running a :class:`myhdl.Simulation`
Usage:
.. code-block:: python
@sim
def function_which_returns_generators(...):
...
@sim(duration=n, quiet=False)
def function_which_returns_generators(...):
...
"""
if wrapped is None:
return functools.partial(sim, duration=duration, quiet=quiet)
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
return run_sim(wrapped(*args, **kwargs), duration=duration, quiet=quiet)
return wrapper(wrapped)
| bsd-3-clause | -5,990,776,191,799,713,000 | 24.53125 | 80 | 0.588739 | false |
vandenheuvel/tribler | Tribler/Test/Community/Triblerchain/test_community.py | 1 | 24938 | from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue, Deferred
from twisted.internet.task import deferLater
from twisted.internet.threads import blockingCallFromThread
from Tribler.Test.Community.Trustchain.test_community import BaseTestTrustChainCommunity
from Tribler.Test.Community.Trustchain.test_trustchain_utilities import TrustChainTestCase
from Tribler.community.triblerchain.block import TriblerChainBlock
from Tribler.community.triblerchain.community import TriblerChainCommunity, PendingBytes, TriblerChainCommunityCrawler
from Tribler.community.trustchain.community import HALF_BLOCK, CRAWL
from Tribler.community.tunnel.routing import Circuit
from Tribler.dispersy.requestcache import IntroductionRequestCache
from Tribler.dispersy.tests.dispersytestclass import DispersyTestFunc
from Tribler.dispersy.util import blocking_call_on_reactor_thread
class TestPendingBytes(TrustChainTestCase):
"""
This class contains tests for the PendingBytes object
"""
def test_add_pending_bytes(self):
"""
Test adding to pending bytes
"""
pending_bytes = PendingBytes(20, 30)
self.assertTrue(pending_bytes.add(20, 30))
self.assertFalse(pending_bytes.add(-100, -100))
class TestTriblerChainCommunity(BaseTestTrustChainCommunity):
"""
Class that tests the TriblerChainCommunity on an integration level.
"""
@staticmethod
def set_expectation(node, req, up, down):
node.community.pending_bytes[req.community.my_member.public_key] = PendingBytes(down, up)
@blocking_call_on_reactor_thread
@inlineCallbacks
def create_nodes(self, *args, **kwargs):
nodes = yield DispersyTestFunc.create_nodes(self, *args, community_class=TriblerChainCommunity,
memory_database=False, **kwargs)
for outer in nodes:
for inner in nodes:
if outer != inner:
outer.send_identity(inner)
returnValue(nodes)
@blocking_call_on_reactor_thread
@inlineCallbacks
def test_cleanup_pending_bytes(self):
"""
Test cleaning of pending bytes
"""
node, = yield self.create_nodes(1)
node.community.pending_bytes['a'] = 1234
self.assertIn('a', node.community.pending_bytes)
node.community.cleanup_pending('a')
self.assertNotIn('a', node.community.pending_bytes)
@blocking_call_on_reactor_thread
@inlineCallbacks
def test_on_tunnel_remove(self):
"""
Test the on_tunnel_remove handler function for a circuit
"""
# Arrange
node, other = yield self.create_nodes(2)
tunnel_node = Circuit(long(0), 0)
tunnel_other = Circuit(long(0), 0)
tunnel_node.bytes_up = tunnel_other.bytes_down = 12 * 1024 * 1024
tunnel_node.bytes_down = tunnel_other.bytes_up = 14 * 1024 * 1024
# Act
node.call(node.community.on_tunnel_remove, None, None, tunnel_node, self._create_target(node, other))
other.call(other.community.on_tunnel_remove, None, None, tunnel_other, self._create_target(other, node))
yield deferLater(reactor, 5.1, lambda: None)
# Assert
_, signature_request = node.receive_message(names=[HALF_BLOCK]).next()
node.give_message(signature_request, other)
yield deferLater(reactor, 0.1, lambda: None)
_, signature_request = other.receive_message(names=[HALF_BLOCK]).next()
other.give_message(signature_request, node)
yield deferLater(reactor, 0.1, lambda: None)
self.assertBlocksInDatabase(node, 2)
self.assertBlocksInDatabase(other, 2)
self.assertBlocksAreEqual(node, other)
@blocking_call_on_reactor_thread
@inlineCallbacks
def test_on_tunnel_remove_small(self):
"""
Test the on_tunnel_remove handler function for a circuit
"""
# Arrange
node, other = yield self.create_nodes(2)
tunnel_node = Circuit(long(0), 0)
tunnel_other = Circuit(long(0), 0)
tunnel_node.bytes_up = tunnel_other.bytes_down = 1024
tunnel_node.bytes_down = tunnel_other.bytes_up = 2 * 1024
# Act
node.call(node.community.on_tunnel_remove, None, None, tunnel_node, self._create_target(node, other))
other.call(other.community.on_tunnel_remove, None, None, tunnel_other, self._create_target(other, node))
yield deferLater(reactor, 5.1, lambda: None)
# Assert
with self.assertRaises(StopIteration):
self.assertFalse(node.receive_message(names=[HALF_BLOCK]).next())
with self.assertRaises(StopIteration):
self.assertFalse(other.receive_message(names=[HALF_BLOCK]).next())
self.assertBlocksInDatabase(node, 0)
self.assertBlocksInDatabase(other, 0)
@blocking_call_on_reactor_thread
@inlineCallbacks
def test_on_tunnel_remove_append_pending(self):
"""
Test the on_tunnel_remove handler function for a circuit
"""
# Arrange
node, other = yield self.create_nodes(2)
tunnel_node = Circuit(long(0), 0)
tunnel_node.bytes_up = 12 * 1024 * 1024
tunnel_node.bytes_down = 14 * 1024 * 1024
# Act
node.call(node.community.on_tunnel_remove, None, None, tunnel_node, self._create_target(node, other))
node.call(node.community.on_tunnel_remove, None, None, tunnel_node, self._create_target(node, other))
yield deferLater(reactor, 5.1, lambda: None)
self.assertEqual(node.community.pending_bytes[other.community.my_member.public_key].up, 2*tunnel_node.bytes_up)
self.assertEqual(node.community.pending_bytes[other.community.my_member.public_key].down,
2*tunnel_node.bytes_down)
def test_receive_request_invalid(self):
"""
Test the community to receive a request message.
"""
# Arrange
node, other = self.create_nodes(2)
target_other = self._create_target(node, other)
TestTriblerChainCommunity.set_expectation(other, node, 10, 5)
transaction = {"up": 10, "down": 5}
node.call(node.community.sign_block, target_other, other.my_member.public_key, transaction)
_, block_req = other.receive_message(names=[HALF_BLOCK]).next()
# Act
# construct faked block
block = block_req.payload.block
block.transaction["up"] += 10
block.transaction["total_up"] = block.transaction["up"]
block_req = node.community.get_meta_message(HALF_BLOCK).impl(
authentication=tuple(),
distribution=(node.community.claim_global_time(),),
destination=(target_other,),
payload=(block,))
other.give_message(block_req, node)
# Assert
self.assertBlocksInDatabase(other, 0)
self.assertBlocksInDatabase(node, 1)
with self.assertRaises(StopIteration):
# No signature responses, or crawl requests should have been sent
node.receive_message(names=[HALF_BLOCK, CRAWL]).next()
def test_receive_request_twice(self):
"""
Test the community to receive a request message twice.
"""
# Arrange
node, other = self.create_nodes(2)
target_other = self._create_target(node, other)
transaction = {"up": 10, "down": 5}
TestTriblerChainCommunity.set_expectation(node, other, 50, 50)
TestTriblerChainCommunity.set_expectation(other, node, 50, 50)
TestTriblerChainCommunity.create_block(node, other, target_other, transaction)
# construct faked block
block = node.call(node.community.persistence.get_latest, node.my_member.public_key)
block_req = node.community.get_meta_message(HALF_BLOCK).impl(
authentication=tuple(),
distribution=(node.community.claim_global_time(),),
destination=(target_other,),
payload=(block,))
other.give_message(block_req, node)
# Assert
self.assertBlocksInDatabase(other, 2)
self.assertBlocksInDatabase(node, 2)
with self.assertRaises(StopIteration):
# No signature responses, or crawl requests should have been sent
node.receive_message(names=[HALF_BLOCK, CRAWL]).next()
def test_receive_request_too_much(self):
"""
Test the community to receive a request that claims more than we are prepared to sign
"""
# Arrange
node, other = self.create_nodes(2)
target_other = self._create_target(node, other)
TestTriblerChainCommunity.set_expectation(other, node, 3, 3)
transaction = {"up": 10, "down": 5}
node.call(node.community.sign_block, target_other, other.my_member.public_key, transaction)
# Act
other.give_message(other.receive_message(names=[HALF_BLOCK]).next()[1], node)
# Assert
self.assertBlocksInDatabase(other, 1)
self.assertBlocksInDatabase(node, 1)
with self.assertRaises(StopIteration):
# No signature responses, or crawl requests should have been sent
node.receive_message(names=[HALF_BLOCK, CRAWL]).next()
def test_receive_request_unknown_pend(self):
"""
Test the community to receive a request that claims about a peer we know nothing about
"""
# Arrange
node, other = self.create_nodes(2)
target_other = self._create_target(node, other)
transaction = {"up": 10, "down": 5}
node.call(node.community.sign_block, target_other, other.my_member.public_key, transaction)
# Act
other.give_message(other.receive_message(names=[HALF_BLOCK]).next()[1], node)
# Assert
self.assertBlocksInDatabase(other, 1)
self.assertBlocksInDatabase(node, 1)
with self.assertRaises(StopIteration):
# No signature responses, or crawl requests should have been sent
node.receive_message(names=[HALF_BLOCK, CRAWL]).next()
def test_block_values(self):
"""
If a block is created between two nodes both
should have the correct total_up and total_down of the signature request.
"""
# Arrange
node, other = self.create_nodes(2)
TestTriblerChainCommunity.set_expectation(node, other, 50, 50)
TestTriblerChainCommunity.set_expectation(other, node, 50, 50)
transaction = {"up": 10, "down": 5}
# Act
TestTriblerChainCommunity.create_block(node, other, self._create_target(node, other), transaction)
# Assert
block = node.call(TriblerChainBlock.create, transaction, node.community.persistence,
node.community.my_member.public_key)
self.assertEqual(20, block.transaction["total_up"])
self.assertEqual(10, block.transaction["total_down"])
block = other.call(TriblerChainBlock.create, transaction, other.community.persistence,
other.community.my_member.public_key)
self.assertEqual(15, block.transaction["total_up"])
self.assertEqual(15, block.transaction["total_down"])
def test_block_values_after_request(self):
"""
After a request is sent, a node should update its totals.
"""
# Arrange
node, other = self.create_nodes(2)
transaction = {"up": 10, "down": 5}
node.call(node.community.sign_block, self._create_target(node, other), other.my_member.public_key, transaction)
# Assert
block = node.call(TriblerChainBlock.create, transaction, node.community.persistence,
node.community.my_member.public_key)
self.assertEqual(20, block.transaction["total_up"])
self.assertEqual(10, block.transaction["total_down"])
def test_crawler_on_introduction_received(self):
"""
Test the crawler takes a step when an introduction is made by the walker
"""
# Arrange
TriblerChainCommunityCrawler.CrawlerDelay = 10000000
crawler = DispersyTestFunc.create_nodes(self, 1, community_class=TriblerChainCommunityCrawler,
memory_database=False)[0]
node, = self.create_nodes(1)
node._community.cancel_pending_task("take fast steps")
node._community.cancel_pending_task("take step")
node._community.cancel_pending_task("start_walking")
target_node_from_crawler = self._create_target(node, crawler)
# when we call on_introduction request it is going to forward the argument to it's super implementation.
# Dispersy will error if it does not expect this, and the target code will not be tested. So we pick at
# dispersy's brains to make it accept the intro response.
intro_request_info = crawler.call(IntroductionRequestCache, crawler.community, None)
intro_response = node.create_introduction_response(target_node_from_crawler, node.lan_address, node.wan_address,
node.lan_address, node.wan_address,
u"unknown", False, intro_request_info.number)
intro_response._candidate = target_node_from_crawler
crawler.community.request_cache._identifiers[
crawler.community.request_cache._create_identifier(intro_request_info.number, u"introduction-request")
] = intro_request_info
# and we don't actually want to send the crawl request since the counter party is fake, just count if it is run
counter = [0]
def on_crawl_request(cand, pk, sequence_number=None):
# Ignore live edge request
if sequence_number != -1:
counter[0] += 1
crawler.community.send_crawl_request = on_crawl_request
# Act
crawler.call(crawler.community.on_introduction_response, [intro_response])
# Assert
self.assertEqual(counter[0], 1)
def test_get_statistics_no_blocks(self):
"""
Test the get_statistics method where last block is none
"""
node, = self.create_nodes(1)
statistics = node.community.get_statistics()
assert isinstance(statistics, dict), type(statistics)
assert len(statistics) > 0
def test_get_statistics_with_previous_block(self):
"""
Test the get_statistics method where a last block exists
"""
# Arrange
node, other = self.create_nodes(2)
transaction = {"up": 10, "down": 5}
TestTriblerChainCommunity.create_block(node, other, self._create_target(node, other), transaction)
# Get statistics
statistics = node.community.get_statistics()
assert isinstance(statistics, dict), type(statistics)
assert len(statistics) > 0
def test_get_statistics_for_not_self(self):
"""
Test the get_statistics method where a last block exists
"""
# Arrange
node, other = self.create_nodes(2)
transaction = {"up": 10, "down": 5}
TestTriblerChainCommunity.create_block(node, other, self._create_target(node, other), transaction)
# Get statistics
statistics = node.community.get_statistics(public_key=other.community.my_member.public_key)
assert isinstance(statistics, dict), type(statistics)
assert len(statistics) > 0
def test_get_trust(self):
"""
Test that the trust nodes have for each other is the upload + the download total of all blocks.
"""
# Arrange
node, other = self.create_nodes(2)
transaction = {'up': 10, 'down': 5, 'total_up': 10, 'total_down': 5}
TestTriblerChainCommunity.create_block(node, other, self._create_target(node, other), transaction)
TestTriblerChainCommunity.create_block(other, node, self._create_target(other, node), transaction)
# Get statistics
node_trust = blockingCallFromThread(reactor, node.community.get_trust, other.community.my_member)
other_trust = blockingCallFromThread(reactor, other.community.get_trust, node.community.my_member)
self.assertEqual(node_trust, 15)
self.assertEqual(other_trust, 15)
def test_get_default_trust(self):
"""
Test that the trust between nodes without blocks is 1.
"""
# Arrange
node, other = self.create_nodes(2)
# Get statistics
node_trust = blockingCallFromThread(reactor, node.community.get_trust, other.community.my_member)
other_trust = blockingCallFromThread(reactor, other.community.get_trust, node.community.my_member)
self.assertEqual(node_trust, 1)
self.assertEqual(other_trust, 1)
def test_get_node_empty(self):
"""
Check whether get_node returns the correct node if no past data is given.
"""
node, = self.create_nodes(1)
self.assertEqual({"total_up": 3, "total_down": 5, "total_neighbors": 2},
node.community.get_node("test", [], 3, 5, 2))
def test_get_node_maximum(self):
"""
Check whether get_node returns the maximum of total_up and total_down.
"""
node, = self.create_nodes(1)
nodes = {"test": {"total_up": 1, "total_down": 10, "total_neighbors": 2}}
self.assertEqual({"total_up": 3, "total_down": 10, "total_neighbors": 2},
node.community.get_node("test", nodes, 3, 5, 1))
def test_get_node_request_total_traffic(self):
"""
Check whether get_node requires a total_traffic method if no total_up and total_down is given.
"""
node, = self.create_nodes(1)
node.community.persistence.total_traffic = lambda _: [5, 6, 2]
self.assertEqual({"total_up": 5, "total_down": 6, "total_neighbors": 2},
node.community.get_node('74657374', []))
def test_format_edges(self):
"""
Verify whether format_edges returns the correct nodes and edges
"""
node, = self.create_nodes(1)
edge_list = [
# [pk_a, pk_b, a->b, b->a, a_up, a_down, a_neighbors]
("aa", "bb", 10, 15, 10, 15, 1),
("bb", "cc", 8, 3, 23, 13, 2)
]
node.community.persistence.total_traffic = lambda pk: (0, 0, 1)
expected_nodes = {
"aa": {"total_up": 10, "total_down": 15, "total_neighbors": 1},
"bb": {"total_up": 23, "total_down": 13, "total_neighbors": 2},
"cc": {"total_up": 0, "total_down": 0, "total_neighbors": 1}
}
expected_edges = {
"aa": [("bb", 10, 15)],
"bb": [("aa", 15, 10), ("cc", 8, 3)],
"cc": [("bb", 3, 8)]
}
nodes, edges = node.community.format_edges(edge_list, "aa")
self.assertDictEqual(nodes, expected_nodes)
self.assertDictEqual(expected_edges, edges)
def test_build_graph_no_edges(self):
"""
Verify whether get_graph returns a correct result if no edges are present.
"""
node, = self.create_nodes(1)
nodes = {
"aa": {"total_up": 0, "total_down": 0, "total_neighbors": 0}
}
edges = {}
expected_nodes = [
{"public_key": "aa", "total_up": 0, "total_down": 0, "total_neighbors": 0, "score": 0.5}
]
expected_edges = []
actual_nodes, actual_edges = node.community.build_graph((nodes, edges), "aa", 2, 0, [])
self.assertListEqual(expected_nodes, actual_nodes)
self.assertListEqual(expected_edges, actual_edges)
def test_build_graph(self):
"""
Verify whether get_graph returns a correct list of nodes and edges
"""
node, = self.create_nodes(1)
nodes = {
"aa": {"total_up": 0, "total_down": 0, "total_neighbors": 2},
"bb": {"total_up": 1, "total_down": 1, "total_neighbors": 5},
"cc": {"total_up": 2, "total_down": 2, "total_neighbors": 2},
"dd": {"total_up": 3, "total_down": 3, "total_neighbors": 1},
"ee": {"total_up": 4, "total_down": 4, "total_neighbors": 1},
"ff": {"total_up": 5, "total_down": 5, "total_neighbors": 1}
}
edges = {
"aa": [("bb", 0, 0), ("cc", 0, 0), ("ff", 0, 0)],
"bb": [("aa", 0, 0), ("cc", 0, 0), ("ee", 0, 0), ("ff", 0, 0), ("dd", 0, 0)],
"cc": [("aa", 0, 0), ("bb", 0, 0)],
}
expected_nodes = [
{"public_key": "aa", "total_up": 0, "total_down": 0, "total_neighbors": 2, "score": 0.5},
{"public_key": "bb", "total_up": 1, "total_down": 1, "total_neighbors": 5, "score": 0.5},
{"public_key": "cc", "total_up": 2, "total_down": 2, "total_neighbors": 2, "score": 0.5},
]
expected_edges = [
{"from": "aa", "to": "bb", "amount": 0},
{"from": "bb", "to": "aa", "amount": 0},
{"from": "aa", "to": "cc", "amount": 0},
{"from": "cc", "to": "aa", "amount": 0},
{"from": "bb", "to": "cc", "amount": 0},
{"from": "cc", "to": "bb", "amount": 0},
]
actual_nodes, actual_edges = node.community.build_graph((nodes, edges), "aa", 1, 1, ["cc"])
self.assertItemsEqual(expected_nodes, actual_nodes)
self.assertItemsEqual(expected_edges, actual_edges)
def test_get_graph_circular(self):
"""
Verify whether get_graph returns a correct list of nodes and edges when of circular form
"""
node, = self.create_nodes(1)
nodes = {
"aa": {"total_up": 0, "total_down": 0, "total_neighbors": 2},
"bb": {"total_up": 1, "total_down": 1, "total_neighbors": 5},
"cc": {"total_up": 2, "total_down": 2, "total_neighbors": 2},
}
edges = {
"aa": [("bb", 0, 0), ("cc", 0, 0)],
"bb": [("aa", 0, 0), ("cc", 0, 0)],
"cc": [("aa", 0, 0), ("bb", 0, 0)]
}
expected_nodes = [
{"public_key": "aa", "total_up": 0, "total_down": 0, "total_neighbors": 2, "score": 0.5},
{"public_key": "bb", "total_up": 1, "total_down": 1, "total_neighbors": 5, "score": 0.5},
{"public_key": "cc", "total_up": 2, "total_down": 2, "total_neighbors": 2, "score": 0.5},
]
expected_edges = [
{"from": "aa", "to": "bb", "amount": 0},
{"from": "bb", "to": "aa", "amount": 0},
{"from": "aa", "to": "cc", "amount": 0},
{"from": "cc", "to": "aa", "amount": 0},
{"from": "bb", "to": "cc", "amount": 0},
{"from": "cc", "to": "bb", "amount": 0},
]
def verify_result((actual_nodes, actual_edges)):
self.assertItemsEqual(expected_nodes, actual_nodes)
self.assertItemsEqual(expected_edges, actual_edges)
node, = self.create_nodes(1)
node.community.persistence.get_graph_edges = lambda _1, _2: Deferred()
node.community.format_edges = lambda _1, _2: (nodes, edges)
d = node.community.get_graph("aa", 1, 2, [])
d.addCallback(verify_result)
d.callback("test")
return d
def test_get_graph(self):
"""
Verify whether the get_graph method adds the two callbacks correctly
"""
test_result = "test_1"
test_public_key = "test_2"
test_neighbor_level = "test_3"
test_max_neighbors = "test_4"
test_mandatory_nodes = "test_5"
test_nodes_edges = ("test_6", "test_7")
test_final_result = "test_8"
def mock_format(result, public_key):
self.assertEqual(result, test_result)
self.assertEqual(public_key, test_public_key)
return test_nodes_edges
def mock_build((nodes, edges), public_key, neighbor_level, max_neighbors, mandatory_nodes):
self.assertEqual(nodes, test_nodes_edges[0])
self.assertEqual(edges, test_nodes_edges[1])
self.assertEqual(public_key, test_public_key)
self.assertEqual(neighbor_level, test_neighbor_level)
self.assertEqual(max_neighbors, test_max_neighbors)
self.assertEqual(mandatory_nodes, test_mandatory_nodes)
return test_final_result
node, = self.create_nodes(1)
node.community.persistence.get_graph_edges = lambda _1, _2: Deferred()
node.community.format_edges = mock_format
node.community.build_graph = mock_build
d = node.community.get_graph(test_public_key, test_neighbor_level, test_max_neighbors, test_mandatory_nodes)
d.addCallback(self.assertEqual, test_final_result)
d.callback(test_result)
return d
| lgpl-3.0 | -8,858,512,603,026,695,000 | 42.220104 | 120 | 0.604619 | false |
omniti-labs/circus | src/circus/module/activate_metrics.py | 1 | 2178 | #!/usr/bin/env python
__cmdname__ = 'activate_metrics'
__cmdopts__ = ''
import sys
import log
import util
class Module(object):
def __init__(self, api, account):
self.api = api
self.account = account
def command(self, opts, pattern, *metrics_to_enable):
"""Activate metrics for checks
Arguments:
pattern -- Pattern for checks
metrics_to_enable -- List of metrics to enable
"""
checks, groups = util.find_checks(self.api, pattern)
already_enabled = {}
# Pick only one check per check bundle
bundles = {}
for c in checks:
if c['bundle_id'] in bundles:
continue
bundles[c['bundle_id']] = c
log.msg("Retrieving metrics for checks")
count = 0
for c in bundles.values():
count += 1
print "\r%s/%s" % (count, len(bundles)),
sys.stdout.flush()
rv = self.api.list_metrics(check_id=c['check_id'])
already_enabled[c['check_id']] = []
for metric in sorted(rv):
if metric['enabled']:
already_enabled[c['check_id']].append(metric['name'])
log.msg("Metrics to enable: %s" % (', '.join(metrics_to_enable)))
log.msg("About to enable metrics for the following checks")
for c in bundles.values():
log.msg(" %s (%s)" % (c['name'],
', '.join(already_enabled[c['check_id']])))
if util.confirm():
for c in bundles.values():
# Enable metrics here
log.msgnb("%s..." % c['name'])
all_metrics = set(already_enabled[c['check_id']]) \
| set(metrics_to_enable)
if all_metrics != set(already_enabled[c['check_id']]):
# The set of metrics has changed, apply the edit
self.api.edit_check_bundle(
bundle_id=c['bundle_id'],
metric_name=list(all_metrics))
log.msgnf("Done")
else:
log.msgnf("No changes")
| isc | -3,565,853,606,667,211,000 | 33.571429 | 73 | 0.495409 | false |
calico/basenji | bin/basenji_sad_ref_multi.py | 1 | 6256 | #!/usr/bin/env python
# Copyright 2017 Calico LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from optparse import OptionParser
import glob
import os
import pickle
import shutil
import subprocess
import sys
import h5py
import numpy as np
try:
import zarr
except ImportError:
pass
import slurm
from basenji_sad_multi import collect_h5
"""
basenji_sad_ref_multi.py
Compute SNP expression difference scores for variants in a VCF file,
using multiple processes.
"""
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <params_file> <model_file> <vcf_file>'
parser = OptionParser(usage)
# sad
parser.add_option('-c', dest='center_pct',
default=0.25, type='float',
help='Require clustered SNPs lie in center region [Default: %default]')
parser.add_option('-f', dest='genome_fasta',
default='%s/data/hg19.fa' % os.environ['BASENJIDIR'],
help='Genome FASTA for sequences [Default: %default]')
parser.add_option('--flip', dest='flip_ref',
default=False, action='store_true',
help='Flip reference/alternate alleles when simple [Default: %default]')
parser.add_option('-n', dest='norm_file',
default=None,
help='Normalize SAD scores')
parser.add_option('-o',dest='out_dir',
default='sad',
help='Output directory for tables and plots [Default: %default]')
parser.add_option('--pseudo', dest='log_pseudo',
default=1, type='float',
help='Log2 pseudocount [Default: %default]')
parser.add_option('--rc', dest='rc',
default=False, action='store_true',
help='Average forward and reverse complement predictions [Default: %default]')
parser.add_option('--shifts', dest='shifts',
default='0', type='str',
help='Ensemble prediction shifts [Default: %default]')
parser.add_option('--stats', dest='sad_stats',
default='SAD',
help='Comma-separated list of stats to save. [Default: %default]')
parser.add_option('-t', dest='targets_file',
default=None, type='str',
help='File specifying target indexes and labels in table format')
parser.add_option('--ti', dest='track_indexes',
default=None, type='str',
help='Comma-separated list of target indexes to output BigWig tracks')
parser.add_option('--threads', dest='threads',
default=False, action='store_true',
help='Run CPU math and output in a separate thread [Default: %default]')
parser.add_option('-u', dest='penultimate',
default=False, action='store_true',
help='Compute SED in the penultimate layer [Default: %default]')
# multi
parser.add_option('-e', dest='conda_env',
default='tf2.2-gpu',
help='Anaconda environment [Default: %default]')
parser.add_option('--name', dest='name',
default='sad', help='SLURM name prefix [Default: %default]')
parser.add_option('--max_proc', dest='max_proc',
default=None, type='int',
help='Maximum concurrent processes [Default: %default]')
parser.add_option('-p', dest='processes',
default=None, type='int',
help='Number of processes, passed by multi script')
parser.add_option('-q', dest='queue',
default='gtx1080ti',
help='SLURM queue on which to run the jobs [Default: %default]')
parser.add_option('-r', dest='restart',
default=False, action='store_true',
help='Restart a partially completed job [Default: %default]')
(options, args) = parser.parse_args()
if len(args) != 3:
parser.error('Must provide parameters and model files and VCF file')
else:
params_file = args[0]
model_file = args[1]
vcf_file = args[2]
#######################################################
# prep work
if os.path.isdir(options.out_dir):
if not options.restart:
print('Please remove %s' % options.out_dir, file=sys.stderr)
exit(1)
else:
os.mkdir(options.out_dir)
# pickle options
options_pkl_file = '%s/options.pkl' % options.out_dir
options_pkl = open(options_pkl_file, 'wb')
pickle.dump(options, options_pkl)
options_pkl.close()
#######################################################
# launch worker threads
jobs = []
for pi in range(options.processes):
if not options.restart or not job_completed(options, pi):
cmd = '. /home/drk/anaconda3/etc/profile.d/conda.sh;'
cmd += ' conda activate %s;' % options.conda_env
cmd += ' basenji_sad_ref.py %s %s %d' % (
options_pkl_file, ' '.join(args), pi)
name = '%s_p%d' % (options.name, pi)
outf = '%s/job%d.out' % (options.out_dir, pi)
errf = '%s/job%d.err' % (options.out_dir, pi)
j = slurm.Job(cmd, name,
outf, errf,
queue=options.queue, gpu=1,
mem=22000, time='14-0:0:0')
jobs.append(j)
slurm.multi_run(jobs, max_proc=options.max_proc, verbose=True,
launch_sleep=10, update_sleep=60)
#######################################################
# collect output
collect_h5('sad.h5', options.out_dir, options.processes)
# for pi in range(options.processes):
# shutil.rmtree('%s/job%d' % (options.out_dir,pi))
def job_completed(options, pi):
"""Check whether a specific job has generated its
output file."""
out_file = '%s/job%d/sad.h5' % (options.out_dir, pi)
return os.path.isfile(out_file) or os.path.isdir(out_file)
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
| apache-2.0 | 9,131,499,811,372,269,000 | 34.146067 | 84 | 0.598465 | false |
ecell/ecell3 | ecell/frontend/session-monitor/ecell/ui/osogo/Window.py | 1 | 7162 | #::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
# This file is part of the E-Cell System
#
# Copyright (C) 1996-2016 Keio University
# Copyright (C) 2008-2016 RIKEN
# Copyright (C) 2005-2009 The Molecular Sciences Institute
#
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
#
# E-Cell System is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# E-Cell System is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with E-Cell System -- see the file COPYING.
# If not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#END_HEADER
#
#'Design: Kenta Hashimoto <[email protected]>',
#'Design and application Framework: Koichi Takahashi <[email protected]>',
#'Programming: Yuki Fujita',
# 'Yoshiya Matsubara',
# 'Yuusuke Saito'
#
# modified by Masahiro Sugimoto <[email protected]> at
# E-Cell Project, Lab. for Bioinformatics, Keio University.
#
import os
import gtk
import gtk.gdk
from ecell.ui.osogo.config import *
import ecell.ui.osogo.glade_compat as glade
class Window:
"""The super class of Window class.
[Note]:This class is not Window widget itself, but has widget instance.
"""
def __init__( self, gladeFile=None, rootWidget=None ):
"""Constructor
gladeFile -- a glade file name (str:absolute path/relative path)
rootWidget -- a root widget (str or None)
"""
self.gladeFile = gladeFile # glade file name
self.rootWidget = rootWidget # a root property
self.widgets = None # widgets instance
# Default title is classname of this class.
self.theTitle = self.__class__.__name__
def openWindow( self ):
"""
loads Glade file
Returns None
[Note]:If IOError happens during reading Glade file,
throws an exception.
"""
# ------------------------------------------------
# checks and loads glade file
# ------------------------------------------------
if os.access( self.gladeFile, os.R_OK ):
if self.rootWidget != None:
self.widgets = glade.XML( self.gladeFile, root= str( self.rootWidget ) )
else:
self.widgets = glade.XML( self.gladeFile, root= None )
else:
raise IOError( "can't read %s." %self.gladeFile )
def addHandlers( self, aHandlers ):
"""sets handlers
aHandlers -- a signal handler map (dict)
Returns None
"""
if type(aHandlers) != dict:
raise TypeError("%s must be dict." %str(aHandlers) )
self.widgets.signal_autoconnect( aHandlers )
def __getitem__( self, aKey ):
"""returns wiget specified by the key
aKey -- a widget name (str)
Returns a widget (gtk.Widget)
[Note]:When this window has not the widget specified by the key,
throws an exception.
"""
return self.widgets.get_widget( aKey )
def getWidget( self, aKey ):
"""returns wiget specified by the key
aKey -- a widget name (str)
Returns a widget (gtk.Widget)
[Note]:This method is same as __getitem__ method.
"""
return self[ aKey ]
def setIconList( self, anIconFile16, anIconFile32 ):
"""sets the window icon according to icon size
anIconFile16 --- icon 16x16 filename
anIconFile32 --- icon 32x32 filename
"""
aPixbuf16 = gtk.gdk.pixbuf_new_from_file(anIconFile16)
aPixbuf32 = gtk.gdk.pixbuf_new_from_file(anIconFile32)
theWidget=self[ self.__class__.__name__ ]
if theWidget!=None:
theWidget.set_icon_list( aPixbuf16, aPixbuf32 )
def editTitle( self, aTitle ):
"""edits and saves title
aTitle -- a title to save (str)
Returns None
"""
# save title
# Although self.theTitle looks verbose, self.getTitle() method
# returns self.theTitle. See the comment of getTitle() method
self.theTitle = aTitle
# get window widget ( The name of window widget is class name )
theWidget=self[ self.__class__.__name__ ]
# There are some cases theWidget is None.
# - When this method is called after 'destroy' signal.
# - When this window is attached other Window.
# In those cases, do not change title.
if theWidget!=None:
theWidget.set_title( self.theTitle )
def getTitle( self ):
"""gets title of this Window
Returns a title (str)
[Note]: This method returs not the title of widget but self.theTitle.
Because when this method is called after 'destroy' signal,
all widgets are None.
"""
return self.theTitle
def getParent( self ):
"""Returns a Parent Window (Window) # Not gtk.Window
"""
if self.rootWidget == None:
return self
else:
return self.__getParent( self.rootWidget )
def __getParent( self, *arg ):
"""Returns a Parent Window (Window) # Not gtk.Window
"""
if arg[0].rootWidget == None:
return arg[0]
else:
return arg[0].__getParent( self.rootWidget )
def getAllChildren( self ):
"""Returns all widget on this Window (list of widget)
Other windows in same glade file are not included.
"""
aChildren = self[self.__class__.__name__].get_children()
return self.__getChildren( aChildren )
def __getChildren( self, aChildren ):
"""Returns all widget on this Window (list of widget)
Other windows in same glade file are not included.
"""
aChildrenList = [] # list of widget
for aChild in aChildren:
# when aChild has no children, append it to list.
try:
aChild.get_children()
except AttributeError:
aChildrenList.append( aChild )
else:
# when aChild has no children, append it to list.
if len(aChild.get_children()) == 0:
aChildrenList.append( aChild )
else:
# when aChild has children, call this method.
aChildrenList += self.__getChildren( aChild.get_children() )
return aChildren + aChildrenList
def show_all( self ):
"""shows all widgets of this window
Returns None
"""
self[self.__class__.__name__].show_all()
# end of Window
| lgpl-3.0 | 2,354,077,049,409,439,000 | 32.46729 | 88 | 0.573164 | false |
quadrismegistus/prosodic | prosodic/lib/MeterPosition.py | 1 | 2254 | import string
from copy import copy
from Parse import Parse
class MeterPosition(Parse):
def __init__(self, meter, meterVal): # meterVal represents whether the position is 's' or 'w'
self.slots=[]
self.children=self.slots
self.meter = meter
self.constraintScores = {}
for constraint in meter.constraints:
self.constraintScores[constraint] = 0
self.meterVal = meterVal
for slot in self.slots:
slot.meter=meterVal
self.feat('prom.meter',(meterVal=='s'))
#self.feat('meter',self.meterVal2)
#self.token = ""
def __copy__(self):
other = MeterPosition(self.meter, self.meterVal)
other.slots = self.slots[:]
for k,v in list(self.constraintScores.items()):
other.constraintScores[k]=copy(v)
return other
@property
def has_viol(self):
return bool(sum(self.constraintScores.values()))
@property
def violated(self):
viold=[]
for c,viol in list(self.constraintScores.items()):
if viol:
viold+=[c]
return viold
@property
def isStrong(self):
return self.meterVal.startswith("s")
def append(self,slot):
#self.token = ""
self.slots.append(slot)
@property
def meterVal2(self):
return ''.join([self.meterVal for x in self.slots])
@property
def mstr(self):
return ''.join([self.meterVal for n in range(len(self.slots))])
def posfeats(self):
posfeats={'prom.meter':[]}
for slot in self.slots:
for k,v in list(slot.feats.items()):
if (not k in posfeats):
posfeats[k]=[]
posfeats[k]+=[v]
posfeats['prom.meter']+=[self.meterVal]
for k,v in list(posfeats.items()):
posfeats[k]=tuple(v)
return posfeats
#
# def __repr__(self):
#
# if not self.token:
# slotTokens = []
#
# for slot in self.slots:
# #slotTokens.append(self.u2s(slot.token))
# slotTokens.append(slot.token)
#
# self.token = '.'.join(slotTokens)
#
# if self.meterVal == 's':
# self.token = self.token.upper()
# else:
# self.token = self.token.lower()
# return self.token
def __repr__(self):
return self.token
@property
def token(self):
if not hasattr(self,'_token') or not self._token:
token = '.'.join([slot.token for slot in self.slots])
token=token.upper() if self.meterVal=='s' else token.lower()
self._token=token
return self._token
| gpl-3.0 | 392,770,631,571,258,000 | 22.479167 | 94 | 0.661934 | false |
lundjordan/releasewarrior-2.0 | releasewarrior/balrog.py | 1 | 6332 | import logging
import re
import requests
from copy import deepcopy
from mozilla_version.balrog import BalrogReleaseName
BALROG_API_ROOT = 'https://aus5.mozilla.org/api/v1'
log = logging.getLogger(name=__name__)
class BalrogError(Exception):
pass
class TooManyBlobsFoundError(BalrogError):
def __init__(self, blob_name, found_blobs):
super().__init__('Multiple blobs found for "{}": {}'.format(blob_name, found_blobs))
class NoBlobFoundError(BalrogError):
def __init__(self, blob_name):
super().__init__('No blob found for "{}"'.format(blob_name))
def get_release_blob(blob_name):
url = '{}/releases/{}'.format(BALROG_API_ROOT, blob_name)
req = requests.get(url, verify=True, timeout=4)
req.raise_for_status()
return req.json()
def get_releases(blob_name, name_prefix=None):
url = '{}/releases'.format(BALROG_API_ROOT)
params = {
'product': extract_product_from_blob_name(blob_name),
'name_prefix': blob_name if name_prefix is None else name_prefix,
'names_only': True
}
req = requests.get(url, verify=True, params=params, timeout=4)
req.raise_for_status()
return req.json()['names']
def extract_product_from_blob_name(blob_name):
return blob_name.split('-')[0]
def ensure_blob_name_exists_on_balrog(blob_name):
releases = get_releases(blob_name)
if len(releases) > 1:
raise TooManyBlobsFoundError(blob_name, releases)
if len(releases) < 1:
raise NoBlobFoundError(blob_name)
def craft_wnp_blob(orig_blob, wnp_url, for_channels, for_locales=None, for_version=None):
blob_name = orig_blob['name']
for_channels = [channel.strip() for channel in for_channels.split(',')]
for_locales = get_for_locales(blob_name, for_locales)
for_version = get_for_version(blob_name, for_version)
new_blob = deepcopy(orig_blob)
update_rules = new_blob.setdefault('updateLine', [])
existing_wnp_rules = [
rule for rule in update_rules if rule.get('fields', {}).get('actions', '') == "showURL"
]
number_of_existing_rules = len(existing_wnp_rules)
if number_of_existing_rules > 1:
raise NotImplementedError('Cannot handle releases that have more than 1 WNP rule')
elif number_of_existing_rules == 1:
existing_wnp_rule = existing_wnp_rules[0]
log.warn('replacing existing rule: {}'.format(existing_wnp_rule))
update_rules.remove(existing_wnp_rule)
wnp_rule = {
'fields': {
'actions': 'showURL',
'openURL': wnp_url,
},
'for': {
'channels': for_channels,
'locales': for_locales,
'versions': [for_version],
},
}
update_rules.append(wnp_rule)
log.info('New updateLine rules: {}'.format(update_rules))
return new_blob
def get_for_locales(blob_name, for_locales=None):
if for_locales is None:
product = extract_product_from_blob_name(blob_name)
all_releases_names_for_product = get_releases(blob_name, name_prefix=product)
previous_release = find_previous_release(blob_name, all_releases_names_for_product)
previous_release_blob = get_release_blob(blob_name=previous_release)
for_locales = _get_locales_from_blob(previous_release_blob, previous_release)
log.info('for_locales gotten from previous "{}": {}'.format(previous_release, for_locales))
else:
for_locales = [locale.strip() for locale in for_locales.split(',')]
log.info('Using for_locales from command line: {}'.format(for_locales))
if not isinstance(for_locales, list):
raise BalrogError('{} is not a list'.format(for_locales))
return for_locales
_ENDS_WITH_BUILD_REGEX = re.compile(r'build\d+$')
def find_previous_release(blob_name, all_releases_names_for_product):
original_release = BalrogReleaseName.parse(blob_name)
# ends_with_build strips out nightly blobs and the ones that were created manually
ends_with_build = [
release
for release in all_releases_names_for_product
if _ENDS_WITH_BUILD_REGEX.search(release)
]
balrog_releases = [BalrogReleaseName.parse(release) for release in ends_with_build]
same_type = [
release
for release in balrog_releases
if release.version.version_type == original_release.version.version_type
]
if original_release.version.is_release:
same_type = [
release for release in same_type if release.version.is_release
] # strips ESR out
elif original_release.version.is_esr:
same_type = [
release for release in same_type if release.version.is_esr
] # strips release out
sorted_releases = same_type
sorted_releases.sort(reverse=True)
for release in sorted_releases:
if release < original_release:
previous_release = str(release)
log.info('Previous release was: {}'.format(previous_release))
return previous_release
raise BalrogError('Could not find a version smaller than {}'.format(original_release))
def _get_locales_from_blob(blob, blob_name):
locales = []
for rule in blob.get('updateLine', []):
candidate_locales = rule.get('for', {}).get('locales', [])
if candidate_locales:
if locales:
raise BalrogError(
'Too many locales defined in blob "{}". Found {} and {}'.format(
blob_name, candidate_locales, locales
)
)
locales = candidate_locales
if not locales:
raise BalrogError('No locales found in blob "{}"'.format(blob_name))
return locales
_FOR_VERSION_PATTERN = re.compile(r'<\d+\.0')
def get_for_version(blob_name, for_version=None):
if for_version is None:
balrog_release = BalrogReleaseName.parse(blob_name)
for_version = '<{}.0'.format(balrog_release.version.major_number)
log.info('for_version build from original blob: {}'.format(for_version))
else:
log.info('Using for_version from command line: {}'.format(for_version))
if _FOR_VERSION_PATTERN.match(for_version) is None:
raise BalrogError('{} does not match a valid for_version pattern'.format(for_version))
return for_version
| mpl-2.0 | -8,396,954,852,463,452,000 | 32.680851 | 99 | 0.643399 | false |
ImTheLucKyOne/check_mk_emcunity | emcunity300/perfometer/emcunity_lun.py | 1 | 2232 | #!/usr/bin/env python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
#
# Written / Edited by Philipp Näther
# [email protected]
# Perf-O-Meters for Check_MK's checks
#
# They are called with:
# 1. row -> a dictionary of the data row with at least the
# keys "service_perf_data", "service_state" and "service_check_command"
# 2. The check command (might be extracted from the performance data
# in a PNP-like manner, e.g if perfdata is "value=10.5;0;100.0;20;30 [check_disk]
# 3. The parsed performance data as a list of 7-tuples of
# (varname, value, unit, warn, crit, min, max)
def perfometer_emcunity_lun(row, check_command, perf_data):
used_mb = perf_data[0][1]
maxx = perf_data[0][-1]
# perf data might be incomplete, if trending perfdata is off...
uncommitted_mb = 0
for entry in perf_data:
if entry[0] == "uncommitted":
uncommitted_mb = entry[1]
break
perc_used = 100 * (float(used_mb) / float(maxx))
perc_uncommitted = 100 * (float(uncommitted_mb) / float(maxx))
perc_totally_free = 100 - perc_used - perc_uncommitted
h = '<table><tr>'
if perc_used + perc_uncommitted <= 100:
# Regular handling, no overcommitt
h += perfometer_td(perc_used, "#00ffc6")
h += perfometer_td(perc_uncommitted, "#eeccff")
h += perfometer_td(perc_totally_free, "white")
else:
# Visualize overcommitted space by scaling to total overcommittment value
# and drawing the capacity as red line in the perfometer
total = perc_used + perc_uncommitted
perc_used_bar = perc_used * 100 / total
perc_uncommitted_bar = perc_uncommitted * 100 / total
perc_free = (100 - perc_used) * 100 / total
h += perfometer_td(perc_used_bar, "#00ffc6")
h += perfometer_td(perc_free, "#eeccff")
h += perfometer_td(1, "red") # This line visualizes the capacity
h += perfometer_td(perc_uncommitted - perc_free, "#eeccff")
h += "</tr></table>"
legend = "%0.2f%%" % perc_used
if uncommitted_mb:
legend += " (+%0.2f%%)" % perc_uncommitted
return legend, h
perfometers["check_mk-emcunity_lun"] = perfometer_emcunity_lun
| gpl-3.0 | 892,500,301,874,742,900 | 38.839286 | 84 | 0.628418 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.