repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
owlabs/incubator-airflow | airflow/utils/tests.py | 1 | 4852 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
import unittest
import attr
from airflow.models import TaskInstance
from airflow.models.baseoperator import BaseOperator, BaseOperatorLink
from airflow.utils.decorators import apply_defaults
def skipUnlessImported(module, obj):
import importlib
try:
m = importlib.import_module(module)
except ImportError:
m = None
return unittest.skipUnless(
obj in dir(m),
"Skipping test because {} could not be imported from {}".format(
obj, module))
def assertEqualIgnoreMultipleSpaces(case, first, second, msg=None):
def _trim(s):
return re.sub(r"\s+", " ", s.strip())
return case.assertEqual(_trim(first), _trim(second), msg)
# Custom Operator and extra Operator Links used for Tests in tests_views.py
class AirflowLink(BaseOperatorLink):
"""
Operator Link for Apache Airflow Website
"""
name = 'airflow'
def get_link(self, operator, dttm):
return 'should_be_overridden'
class Dummy2TestOperator(BaseOperator):
"""
Example of an Operator that has an extra operator link
and will be overriden by the one defined in tests/plugins/test_plugin.py
"""
operator_extra_links = (
AirflowLink(),
)
class Dummy3TestOperator(BaseOperator):
"""
Example of an operator that has no extra Operator link.
An operator link would be added to this operator via Airflow plugin
"""
operator_extra_links = ()
@attr.s
class CustomBaseIndexOpLink(BaseOperatorLink):
index = attr.ib(type=int) # type: int
@property
def name(self):
return 'BigQuery Console #{index}'.format(index=self.index + 1)
def get_link(self, operator, dttm):
ti = TaskInstance(task=operator, execution_date=dttm)
search_queries = ti.xcom_pull(task_ids=operator.task_id, key='search_query')
if not search_queries:
return None
if len(search_queries) < self.index:
return None
search_query = search_queries[self.index]
return 'https://console.cloud.google.com/bigquery?j={}'.format(search_query)
class CustomOpLink(BaseOperatorLink):
"""
Operator Link for Apache Airflow Website
"""
name = 'Google Custom'
def get_link(self, operator, dttm):
ti = TaskInstance(task=operator, execution_date=dttm)
search_query = ti.xcom_pull(task_ids=operator.task_id, key='search_query')
return 'http://google.com/custom_base_link?search={}'.format(search_query)
class CustomOperator(BaseOperator):
template_fields = ['bash_command']
@property
def operator_extra_links(self):
"""
Return operator extra links
"""
if isinstance(self.bash_command, str) or self.bash_command is None:
return (
CustomOpLink(),
)
return (
CustomBaseIndexOpLink(i) for i, _ in enumerate(self.bash_command)
)
@apply_defaults
def __init__(self, bash_command=None, *args, **kwargs):
super(CustomOperator, self).__init__(*args, **kwargs)
self.bash_command = bash_command
def execute(self, context):
self.log.info("Hello World!")
context['task_instance'].xcom_push(key='search_query', value="dummy_value")
class GoogleLink(BaseOperatorLink):
"""
Operator Link for Apache Airflow Website for Google
"""
name = 'google'
operators = [Dummy3TestOperator, CustomOperator]
def get_link(self, operator, dttm):
return 'https://www.google.com'
class AirflowLink2(BaseOperatorLink):
"""
Operator Link for Apache Airflow Website for 1.10.5
"""
name = 'airflow'
operators = [Dummy2TestOperator, Dummy3TestOperator]
def get_link(self, operator, dttm):
return 'https://airflow.apache.org/1.10.5/'
class GithubLink(BaseOperatorLink):
"""
Operator Link for Apache Airflow Github
"""
name = 'github'
def get_link(self, operator, dttm):
return 'https://github.com/apache/airflow'
| apache-2.0 | 8,383,117,429,937,916,000 | 28.585366 | 84 | 0.670033 | false |
wehriam/awspider | awspider/aws/sdb.py | 1 | 29905 | import base64
import hmac
import hashlib
import urllib
import xml.etree.cElementTree as ET
from datetime import datetime
import time
import dateutil.parser
import logging
from twisted.internet.defer import DeferredList
from ..requestqueuer import RequestQueuer
from .lib import etree_to_dict, safe_quote_tuple
LOGGER = logging.getLogger("main")
SDB_NAMESPACE = "{http://sdb.amazonaws.com/doc/2009-04-15/}"
def base10toN(num,n):
"""Change a to a base-n number.
Up to base-36 is supported without special notation."""
num_rep={10:'a',
11:'b',
12:'c',
13:'d',
14:'e',
15:'f',
16:'g',
17:'h',
18:'i',
19:'j',
20:'k',
21:'l',
22:'m',
23:'n',
24:'o',
25:'p',
26:'q',
27:'r',
28:'s',
29:'t',
30:'u',
31:'v',
32:'w',
33:'x',
34:'y',
35:'z'}
new_num_string=''
current=num
while current!=0:
remainder=current%n
if 36>remainder>9:
remainder_string=num_rep[remainder]
elif remainder>=36:
remainder_string='('+str(remainder)+')'
else:
remainder_string=str(remainder)
new_num_string=remainder_string+new_num_string
current=current/n
return new_num_string
def base10to36(i):
return base10toN(i, 36)
def base36to10(s):
return int(s, 36)
def sdb_now(offset=0):
"""Return an 11 character, zero padded string with the current Unixtime.
**Keyword arguments:**
* *offset* -- Offset in seconds. (Default 0)
"""
return str(int(offset + time.time())).zfill(11)
def sdb_now_add(seconds, offset=0):
"""Return an 11 character, zero padded string with the current Unixtime
plus an integer.
**Arguments:**
* *seconds* -- Seconds to add to the current time.
**Keyword arguments:**
* *offset* -- Offset in seconds. (Default 0)
"""
return str(int(offset + time.time() + seconds)).zfill(11)
def sdb_parse_time(date_string, offset=0):
"""Parse a date string, then return an 11 character, zero padded
string with the current Unixtime plus an integer.
**Arguments:**
* *date_string* -- Date string
**Keyword arguments:**
* *offset* -- Offset in seconds. (Default 0)
"""
parsed_time = time.mktime(dateutil.parser.parse(date_string).timetuple())
return str(int(offset + parsed_time)).zfill(11)
def sdb_latitude(latitude):
"""Return an 8 character, zero padded string version of the
latitude parameter.
**Arguments:**
* *latitude* -- Latitude.
"""
adjusted = (90 + float(latitude)) * 100000
return str(int(adjusted)).zfill(8)
def sdb_longitude(longitude):
"""Return an 8 character, zero padded string version of the
longitude parameter.
**Arguments:**
* *longitude* -- Longitude.
"""
adjusted = (180 + float(longitude)) * 100000
return str(int(adjusted)).zfill(8)
class AmazonSDB:
"""
Amazon Simple Database API.
"""
host = "sdb.amazonaws.com"
box_usage = 0.0
def __init__(self, aws_access_key_id, aws_secret_access_key, rq=None):
"""
**Arguments:**
* *aws_access_key_id* -- Amazon AWS access key ID
* *aws_secret_access_key* -- Amazon AWS secret access key
**Keyword arguments:**
* *rq* -- Optional RequestQueuer object.
"""
if rq is None:
self.rq = RequestQueuer()
else:
self.rq = rq
self.aws_access_key_id = aws_access_key_id
self.aws_secret_access_key = aws_secret_access_key
self.rq.setHostMaxRequestsPerSecond(self.host, 0)
self.rq.setHostMaxSimultaneousRequests(self.host, 0)
def copyDomain(self, source_domain, destination_domain):
"""
Copy all elements of a source domain to a destination domain.
**Arguments:**
* *source_domain* -- Source domain name
* *destination_domain* -- Destination domain name
"""
d = self.checkAndCreateDomain(destination_domain)
d.addCallback(self._copyDomainCallback, source_domain,
destination_domain)
return d
def _copyDomainCallback(self, data, source_domain, destination_domain):
return self._copyDomainCallback2(source_domain, destination_domain)
def _copyDomainCallback2(self, source_domain, destination_domain,
next_token=None, total_box_usage=0):
parameters = {}
parameters["Action"] = "Select"
parameters["SelectExpression"] = "SELECT * FROM `%s`" % source_domain
if next_token is not None:
parameters["NextToken"] = next_token
d = self._request(parameters)
d.addCallback(self._copyDomainCallback3,
source_domain=source_domain,
destination_domain=destination_domain,
total_box_usage=total_box_usage)
d.addErrback(self._genericErrback)
return d
def _copyDomainCallback3(self, data, source_domain, destination_domain,
total_box_usage=0):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
total_box_usage += box_usage
next_token_element = xml.find(".//%sNextToken" % SDB_NAMESPACE)
if next_token_element is not None:
next_token = next_token_element.text
else:
next_token = None
items = xml.findall(".//%sItem" % SDB_NAMESPACE)
results = {}
for item in items:
key = item.find("./%sName" % SDB_NAMESPACE).text
attributes = item.findall("%sAttribute" % SDB_NAMESPACE)
attribute_dict = {}
for attribute in attributes:
attr_name = attribute.find("./%sName" % SDB_NAMESPACE).text
attr_value = attribute.find("./%sValue" % SDB_NAMESPACE).text
if attr_name in attribute_dict:
attribute_dict[attr_name].append(attr_value)
else:
attribute_dict[attr_name] = [attr_value]
results[key] = attribute_dict
deferreds = []
for key in results:
d = self.putAttributes(destination_domain, key, results[key])
d.addErrback(self._copyPutAttributesErrback, destination_domain, key, results[key])
deferreds.append(d)
d = DeferredList(deferreds, consumeErrors=True)
d.addCallback(self._copyDomainCallback4, source_domain,
destination_domain, next_token=next_token, total_box_usage=total_box_usage)
return d
def _copyDomainCallback4(self, data, source_domain, destination_domain,
next_token=None, total_box_usage=0):
for row in data:
if row[0] == False:
raise row[1]
if next_token is not None:
return self._copyDomainCallback2(
source_domain=source_domain,
destination_domain=destination_domain,
next_token=next_token,
total_box_usage=total_box_usage)
LOGGER.debug("""CopyDomain:\n%s -> %s\nBox usage: %s""" % (
source_domain,
destination_domain,
total_box_usage))
return True
def _copyPutAttributesErrback(self, error, destination_domain, key, attributes, count=0):
if count < 3:
d = self.putAttributes(destination_domain, key, attributes)
d.addErrback(self._copyPutAttributesErrback, destination_domain, key, attributes, count=count + 1)
return d
return error
def checkAndCreateDomain(self, domain):
"""
Check for a SimpleDB domain's existence. If it does not exist,
create it.
**Arguments:**
* *domain* -- Domain name
"""
d = self.domainMetadata(domain)
d.addErrback(self._checkAndCreateDomainErrback, domain)
return d
def _checkAndCreateDomainErrback(self, error, domain):
if hasattr(error, "value") and hasattr(error.value, "status"):
if int(error.value.status) == 400:
d = self.createDomain(domain)
d.addErrback(self._checkAndCreateDomainErrback2, domain)
return d
message = "Could not find or create domain '%s'." % domain
raise Exception(message)
def _checkAndCreateDomainErrback2(self, error, domain):
message = "Could not create domain '%s'" % domain
raise Exception(message)
def createDomain(self, domain):
"""
Create a SimpleDB domain.
**Arguments:**
* *domain* -- Domain name
"""
parameters = {
"Action":"CreateDomain",
"DomainName":domain
}
d = self._request(parameters)
d.addCallback(self._createDomainCallback, domain)
d.addErrback(self._genericErrback)
return d
def _createDomainCallback(self, data, domain):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
LOGGER.debug("Created SimpleDB domain '%s'. Box usage: %s" % (domain,
box_usage))
return True
def deleteDomain(self, domain):
"""
Delete a SimpleDB domain.
**Arguments:**
* *domain* -- Domain name
"""
parameters = {}
parameters["Action"] = "DeleteDomain"
parameters["DomainName"] = domain
d = self._request(parameters)
d.addCallback(self._deleteDomainCallback, domain)
d.addErrback(self._genericErrback)
return d
def _deleteDomainCallback(self, data, domain):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
LOGGER.debug("Deleted SimpleDB domain '%s'. Box usage: %s" % (domain,
box_usage))
return True
def listDomains(self):
"""
List SimpleDB domains associated with an account.
"""
return self._listDomains()
def _listDomains(self,
next_token=None,
previous_results=None,
total_box_usage=0):
parameters = {}
parameters["Action"] = "ListDomains"
if next_token is not None:
parameters["NextToken"] = next_token
d = self._request(parameters)
d.addCallback(self._listDomainsCallback,
previous_results=previous_results,
total_box_usage=total_box_usage)
d.addErrback(self._genericErrback)
return d
def _listDomainsCallback(self,
data,
previous_results=None,
total_box_usage=0):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
total_box_usage += box_usage
xml_response = etree_to_dict(xml, namespace=SDB_NAMESPACE)
if "DomainName" in xml_response["ListDomainsResult"][0]:
results = xml_response["ListDomainsResult"][0]["DomainName"]
else:
results = []
if previous_results is not None:
results.extend(previous_results)
if "NextToken" in xml_response["ListDomainsResult"]:
next_token = xml_response["ListDomainsResult"][0]["NextToken"][0]
return self._listDomains(next_token=next_token,
previous_results=results,
total_box_usage=total_box_usage)
LOGGER.debug("Listed domains. Box usage: %s" % total_box_usage)
return results
def domainMetadata(self, domain):
"""
Return meta-information about a domain.
**Arguments:**
* *domain* -- Domain name
"""
parameters = {}
parameters["Action"] = "DomainMetadata"
parameters["DomainName"] = domain
d = self._request(parameters)
d.addCallback(self._domainMetadataCallback, domain)
d.addErrback(self._genericErrback)
return d
def _domainMetadataCallback(self, data, domain):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
LOGGER.debug("Got SimpleDB domain '%s' metadata. Box usage: %s" % (
domain,
box_usage))
xml_response = etree_to_dict(xml, namespace=SDB_NAMESPACE)
return xml_response["DomainMetadataResult"][0]
def batchPutAttributes(self, domain, attributes_by_item_name,
replace_by_item_name=None):
"""
Batch put attributes into domain.
**Arguments:**
* *domain* -- Domain name
* *attributes_by_item_name* -- Dictionary of dictionaries. First
level keys are the item name, value is dictionary of key/value
pairs. Example: ``{"item_name":{"attribute_name":"value"}}``
**Keyword arguments:**
* *replace_by_item_name* -- Dictionary of lists. First level keys
are the item names, value is a list of of attributes that should
be overwritten. ``{"item_name":["attribute_name"]}`` (Default
empty dictionary)
"""
if replace_by_item_name is None:
replace_by_item_name = {}
if len(attributes_by_item_name) > 25:
raise Exception("Too many items in batchPutAttributes. Up to 25 items per call allowed.")
for item_name in replace_by_item_name:
if not isinstance(replace_by_item_name[item_name], list):
raise Exception("Replace argument '%s' must be a list." % item_name)
for item_name in attributes_by_item_name:
if not isinstance(attributes_by_item_name[item_name], dict):
raise Exception("Attributes argument '%s' must be a dictionary." % item_name)
parameters = {}
parameters["Action"] = "BatchPutAttributes"
parameters["DomainName"] = domain
i = 0
for item_name in attributes_by_item_name:
parameters["Item.%s.ItemName" % i] = item_name
attributes_list = []
for attribute in attributes_by_item_name[item_name].items():
# If the attribute is a list, split into multiple attributes.
if isinstance(attribute[1], list):
for value in attribute[1]:
attributes_list.append((attribute[0], value))
else:
attributes_list.append(attribute)
j = 0
for attribute in attributes_list:
parameters["Item.%s.Attribute.%s.Name" % (i,j)] = attribute[0]
parameters["Item.%s.Attribute.%s.Value" % (i,j)] = attribute[1]
if item_name in replace_by_item_name:
if attribute[0] in replace_by_item_name[item_name]:
parameters["Item.%s.Attribute.%s.Replace" % (i,j)] = "true"
j += 1
i += 1
d = self._request(parameters)
d.addCallback(
self._batchPutAttributesCallback,
domain,
attributes_by_item_name)
d.addErrback(self._genericErrback)
return d
def _batchPutAttributesCallback(self,
data,
domain,
attributes_by_item_name):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
LOGGER.debug("""Batch put attributes %s in SimpleDB domain '%s'. Box usage: %s""" % (
attributes_by_item_name,
domain,
box_usage))
return True
def putAttributes(self, domain, item_name, attributes, replace=None):
"""
Put attributes into domain at item_name.
**Arguments:**
* *domain* -- Domain name
* *item_name* -- Item name
* *attributes* -- Dictionary of attributes
**Keyword arguments:**
* *replace* -- List of attributes that should be overwritten
(Default empty list)
"""
if replace is None:
replace = []
if not isinstance(replace, list):
raise Exception("Replace argument must be a list.")
if not isinstance(attributes, dict):
raise Exception("Attributes argument must be a dictionary.")
parameters = {}
parameters["Action"] = "PutAttributes"
parameters["DomainName"] = domain
parameters["ItemName"] = item_name
attributes_list = []
for attribute in attributes.items():
# If the attribute is a list, split into multiple attributes.
if isinstance(attribute[1], list):
for value in attribute[1]:
attributes_list.append((attribute[0], value))
else:
attributes_list.append(attribute)
i = 0
for attribute in attributes_list:
parameters["Attribute.%s.Name" % i] = attribute[0]
parameters["Attribute.%s.Value" % i] = attribute[1]
if attribute[0] in replace:
parameters["Attribute.%s.Replace" % i] = "true"
i += 1
d = self._request(parameters)
d.addCallback(self._putAttributesCallback, domain, item_name, attributes)
d.addErrback(self._genericErrback)
return d
def _putAttributesCallback(self, data, domain, item_name, attributes):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
LOGGER.debug("""Put attributes %s on '%s' in SimpleDB domain '%s'. Box usage: %s""" % (
attributes,
item_name,
domain,
box_usage))
return True
def getAttributes(self, domain, item_name, attribute_name=None):
"""
Get one or all attributes from domain at item_name.
**Arguments:**
* *domain* -- Domain name
* *item_name* -- Item name
**Keyword arguments:**
* *attribute_name* -- Name of specific attribute to get (Default None)
"""
parameters = {}
parameters["Action"] = "GetAttributes"
parameters["DomainName"] = domain
parameters["ItemName"] = item_name
if attribute_name is not None:
parameters["AttributeName"] = attribute_name
d = self._request(parameters)
d.addCallback(self._getAttributesCallback, domain, item_name)
d.addErrback(self._genericErrback)
return d
def _getAttributesCallback(self, data, domain, item_name):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
LOGGER.debug("""Got attributes from '%s' in SimpleDB domain '%s'. Box usage: %s""" % (
item_name,
domain,
box_usage))
xml_response = etree_to_dict(xml, namespace=SDB_NAMESPACE)
attributes = {}
if xml_response["GetAttributesResult"][0] is None:
raise Exception("Item does not exist.")
for attribute in xml_response["GetAttributesResult"][0]['Attribute']:
if attribute["Name"][0] not in attributes:
attributes[attribute["Name"][0]] = []
attributes[attribute["Name"][0]].extend(attribute["Value"])
return attributes
def delete(self, domain, item_name):
"""
Delete all attributes from domain at item_name.
**Arguments:**
* *domain* -- Domain name
* *item_name* -- Item name
"""
return self.deleteAttributes(domain, item_name)
def deleteAttributes(self, domain, item_name, attributes=None):
"""
Delete one or all attributes from domain at item_name.
**Arguments:**
* *domain* -- Domain name
* *item_name* -- Item name
**Keyword arguments:**
* *attributes* -- List of attribute names, or dictionary of
attribute name / value pairs. (Default empty dict)
"""
if attributes is None:
attributes = {}
if not isinstance(attributes, dict) and \
not isinstance(attributes, list):
message = "Attributes parameter must be a dictionary or a list."
raise Exception(message)
parameters = {}
parameters["Action"] = "DeleteAttributes"
parameters["DomainName"] = domain
parameters["ItemName"] = item_name
if isinstance(attributes, dict):
attr_count = 1
for key in attributes:
parameters["Attribute.%s.Name" % attr_count] = key
parameters["Attribute.%s.Value" % attr_count] = attributes[key]
attr_count += 1
if isinstance(attributes, list):
attr_count = 0
for key in attributes:
parameters["Attribute.%s.Name" % attr_count] = key
attr_count += 1
d = self._request(parameters)
d.addCallback(self._deleteAttributesCallback, domain, item_name)
d.addErrback(self._genericErrback)
return d
def _deleteAttributesCallback(self, data, domain, item_name):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
LOGGER.debug("""Deleted attributes from '%s' in SimpleDB domain '%s'. Box usage: %s""" % (
item_name,
domain,
box_usage))
return True
def select(self, select_expression, max_results=0):
"""
Run a select query
**Arguments:**
* *select_expression* -- Select expression
"""
if "count(" in select_expression.lower():
return self._selectCount(select_expression)
return self._select(select_expression, max_results=max_results)
def _selectCount(self, select_expression, next_token=None,
previous_count=0,
total_box_usage=0):
parameters = {}
parameters["Action"] = "Select"
parameters["SelectExpression"] = select_expression
if next_token is not None:
parameters["NextToken"] = next_token
d = self._request(parameters)
d.addCallback(self._selectCountCallback,
select_expression=select_expression,
previous_count=previous_count,
total_box_usage=total_box_usage)
d.addErrback(self._genericErrback)
return d
def _selectCountCallback(self, data, select_expression=None,
previous_count=0,
total_box_usage=0):
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
total_box_usage += box_usage
next_token_element = xml.find(".//%sNextToken" % SDB_NAMESPACE)
if next_token_element is not None:
next_token = next_token_element.text
else:
next_token = None
count = previous_count + int(xml.find(".//%sValue" % SDB_NAMESPACE).text)
if next_token is not None:
return self._selectCount(select_expression, next_token=next_token,
previous_count=count,
total_box_usage=total_box_usage)
LOGGER.debug("""Select:\n'%s'\nBox usage: %s""" % (
select_expression,
total_box_usage))
return count
def _select(self, select_expression, next_token=None,
previous_results=None,
total_box_usage=0,
max_results=0):
parameters = {}
parameters["Action"] = "Select"
parameters["SelectExpression"] = select_expression
if next_token is not None:
parameters["NextToken"] = next_token
d = self._request(parameters)
d.addCallback(self._selectCallback,
select_expression=select_expression,
previous_results=previous_results,
total_box_usage=total_box_usage,
max_results=max_results)
d.addErrback(self._genericErrback)
return d
def _selectCallback(self, data, select_expression=None,
previous_results=None,
total_box_usage=0,
max_results=0):
if previous_results is not None:
results = previous_results
else:
results = {}
xml = ET.fromstring(data["response"])
box_usage = float(xml.find(".//%sBoxUsage" % SDB_NAMESPACE).text)
self.box_usage += box_usage
total_box_usage += box_usage
next_token_element = xml.find(".//%sNextToken" % SDB_NAMESPACE)
if next_token_element is not None:
next_token = next_token_element.text
else:
next_token = None
items = xml.findall(".//%sItem" % SDB_NAMESPACE)
for item in items:
key = item.find("./%sName" % SDB_NAMESPACE).text
attributes = item.findall("%sAttribute" % SDB_NAMESPACE)
attribute_dict = {}
for attribute in attributes:
attr_name = attribute.find("./%sName" % SDB_NAMESPACE).text
attr_value = attribute.find("./%sValue" % SDB_NAMESPACE).text
if attr_name in attribute_dict:
attribute_dict[attr_name].append(attr_value)
else:
attribute_dict[attr_name] = [attr_value]
results[key] = attribute_dict
if next_token is not None:
if max_results == 0 or len(results) < max_results:
return self._select(select_expression, next_token=next_token,
previous_results=results,
total_box_usage=total_box_usage,
max_results=max_results)
LOGGER.debug("""Select:\n'%s'\nBox usage: %s""" % (
select_expression,
total_box_usage))
return results
def _request(self, parameters):
"""
Add authentication parameters and make request to Amazon.
**Arguments:**
* *parameters* -- Key value pairs of parameters
"""
parameters = self._getAuthorization("GET", parameters)
query_string = urllib.urlencode(parameters)
url = "https://%s/?%s" % (self.host, query_string)
if len(url) > 4096:
del parameters['Signature']
parameters = self._getAuthorization("POST", parameters)
query_string = urllib.urlencode(parameters)
url = "https://%s" % (self.host)
d = self.rq.getPage(url, method="POST", postdata=query_string)
return d
else:
d = self.rq.getPage(url, method="GET")
return d
def _canonicalize(self, parameters):
"""
Canonicalize parameters for use with AWS Authorization.
**Arguments:**
* *parameters* -- Key value pairs of parameters
**Returns:**
* A safe-quoted string representation of the parameters.
"""
parameters = parameters.items()
parameters.sort(lambda x, y:cmp(x[0], y[0]))
return "&".join([safe_quote_tuple(x) for x in parameters])
def _getAuthorization(self, method, parameters):
"""
Create authentication parameters.
**Arguments:**
* *method* -- HTTP method of the request
* *parameters* -- Key value pairs of parameters
**Returns:**
* A dictionary of authorization parameters
"""
signature_parameters = {
"AWSAccessKeyId":self.aws_access_key_id,
"SignatureVersion":"2",
"SignatureMethod":"HmacSHA256",
'Timestamp':datetime.utcnow().isoformat()[0:19]+"+00:00",
"AWSAccessKeyId":self.aws_access_key_id,
"Version":"2009-04-15"
}
signature_parameters.update(parameters)
query_string = self._canonicalize(signature_parameters)
string_to_sign = "%(method)s\n%(host)s\n%(resource)s\n%(qs)s" % {
"method":method,
"host":self.host.lower(),
"resource":"/",
"qs":query_string,
}
args = [self.aws_secret_access_key, string_to_sign, hashlib.sha256]
signature = base64.encodestring(hmac.new(*args).digest()).strip()
signature_parameters.update({'Signature': signature})
return signature_parameters
def _genericErrback(self, error):
if hasattr(error, "value"):
if hasattr(error.value, "response"):
xml = ET.XML(error.value.response)
try:
LOGGER.debug(xml.find(".//Message").text)
except Exception, e:
pass
return error
| mit | 3,535,296,104,513,814,500 | 36.664987 | 110 | 0.562281 | false |
marcino239/reactor_arm | reactor_controller/scripts/joystick_control.py | 1 | 4438 | import gflags
import dynamixel
import time
import sys
import pygame
SERVO_STEP = 10
FLAGS = gflags.FLAGS
gflags.DEFINE_string( 'port', '/dev/ttyUSB0', 'dynamixel port' )
gflags.DEFINE_integer( 'baud', 1000000, 'baud rate' )
gflags.DEFINE_integer( 'min_id', 1, 'lowest dynamixel ID' )
gflags.DEFINE_integer( 'max_id', 8, 'highest dynamixel ID' )
gflags.DEFINE_enum( 'command', '', [ '', 'position', 'torque_on', 'torque_off', 'control' ], 'command to execute' )
if __name__ == '__main__':
flags = FLAGS( sys.argv )
serial = dynamixel.SerialStream( port=FLAGS.port,
baudrate=FLAGS.baud,
timeout=1 )
# Instantiate network object
net = dynamixel.DynamixelNetwork( serial )
# Populate network with dynamixel objects
for servoId in range( FLAGS.min_id, FLAGS.max_id + 1 ):
newDynamixel = dynamixel.Dynamixel( servoId, net )
net._dynamixel_map[ servoId ] = newDynamixel
servos = net.get_dynamixels()
print( 'network initialised' )
if FLAGS.command == '' or FLAGS.command == 'torque_off':
for d in servos:
d.torque_enable = False
elif FLAGS.command == 'position':
while True:
pos = [ d.current_position for d in servos ]
pos_txt = [ '{:4d}'.format( p ) for p in pos ]
print( ''.join( pos_txt ) )
time.sleep( 0.25 )
elif FLAGS.command == 'torque_off':
for d in servos:
d.torque_enable = True
elif FLAGS.command == 'control':
pygame.init()
pygame.joystick.init()
js = pygame.joystick.Joystick( 0 )
js.init()
# torque on
for d in servos:
d.moving_speed = 50
d.torque_enable = True
d.torque_limit = 800
d.max_torque = 800
d.goal_position = 512
# Send all the commands to the servos.
net.synchronize()
print( 'moving to default position' )
time.sleep( 5 )
print( 'done' )
# get initial positions
servo_pos = [ d.current_position for d in servos ]
clip = lambda x: int( min( 1023.0, max( 0.0, x ) ) )
while True:
pygame.event.pump()
axis = [ js.get_axis( a ) for a in range( 27 ) ]
servo_pos[0] = clip( servo_pos[0] - axis[0] * SERVO_STEP )
servo_pos[1] = clip( servo_pos[1] + axis[1] * SERVO_STEP )
servo_pos[2] = clip( servo_pos[2] + axis[3] * SERVO_STEP )
servo_pos[3] = clip( servo_pos[3] + axis[2] * SERVO_STEP )
servo_pos[4] = clip( servo_pos[4] - (axis[12] + 1.0) * SERVO_STEP / 2 + (axis[13] + 1.0) * SERVO_STEP / 2 )
servo_pos[5] = clip( servo_pos[5] - (axis[14] + 1.0) * SERVO_STEP / 2 + (axis[15] + 1.0) * SERVO_STEP / 2 )
if axis[0] != 0.0:
# shoulder yaw
servos[0].goal_position = servo_pos[0]
else:
if abs( servos[0].current_position - servo_pos[0] ) > SERVO_STEP:
servo_pos[0] = servos[0].current_position
if axis[1] != 0.0:
# shoulder piych - coupling
servos[1].goal_position = servo_pos[1]
servos[2].goal_position = 1024 - servo_pos[1]
else:
if abs( servos[1].current_position - servo_pos[1] ) > SERVO_STEP:
servo_pos[1] = servos[1].current_position
if axis[3] != 0.0:
# elbow pitch - coupling
servos[3].goal_position = servo_pos[2]
servos[4].goal_position = 1024 - servo_pos[2]
else:
if abs( servos[3].current_position - servo_pos[2] ) > SERVO_STEP:
servo_pos[2] = servos[3].current_position
if axis[2] != 0.0:
# wrist pitch
servos[5].goal_position = servo_pos[3]
else:
if abs( servos[5].current_position - servo_pos[3] ) > SERVO_STEP:
servo_pos[3] = servos[5].current_position
# wrist roll
servos[6].goal_position = servo_pos[4]
# gripper
servos[7].goal_position = servo_pos[5]
# show desired position
# print( ''.join( [ '{:4d}'.format( p ) for p in servo_pos ] ) )
# current position
# print( ''.join( [ '{:5d}'.format( d.current_position ) for d in servos ] ) )
# goal position
# print( ''.join( [ '{:5d}'.format( d.goal_position ) for d in servos ] ) )
# diff position
# print( 'diff: ' + ''.join( [ '{:5d}'.format( d.current_position - d.goal_position ) for d in servos ] ) )
# current temperature
# print( ''.join( [ '{:3d}'.format( d.current_temperature ) for d in servos ] ) )
# current load
# print( ''.join( [ '{:5d}'.format( d.current_load ) for d in servos ] ) )
# current load and temperature
print( ''.join( [ '{:5d},{:3d} '.format( d.current_load, d.current_temperature ) for d in servos ] ) )
# Send all the commands to the servos.
net.synchronize()
time.sleep( 0.05 )
| gpl-2.0 | 807,746,683,606,955,400 | 28.785235 | 115 | 0.614241 | false |
XianwuLin/block_games | snake-ai/snake.py | 1 | 10849 | # coding: utf-8
import curses
from curses import KEY_RIGHT, KEY_LEFT, KEY_UP, KEY_DOWN
from random import randint
# 蛇运动的场地长宽
HEIGHT = 10
WIDTH = 20
FIELD_SIZE = HEIGHT * WIDTH
# 蛇头总是位于snake数组的第一个元素
HEAD = 0
# 用来代表不同东西的数字,由于矩阵上每个格子会处理成到达食物的路径长度,
# 因此这三个变量间需要有足够大的间隔(>HEIGHT*WIDTH)
FOOD = 0
UNDEFINED = (HEIGHT + 1) * (WIDTH + 1)
SNAKE = 2 * UNDEFINED
# 由于snake是一维数组,所以对应元素直接加上以下值就表示向四个方向移动
LEFT = -1
RIGHT = 1
UP = -WIDTH
DOWN = WIDTH
# 错误码
ERR = -1111
# 用一维数组来表示二维的东西
# board表示蛇运动的矩形场地
# 初始化蛇头在(1,1)的地方,第0行,HEIGHT行,第0列,WIDTH列为围墙,不可用
# 初始蛇长度为1
board = [0] * FIELD_SIZE
snake = [0] * (FIELD_SIZE+1)
snake[HEAD] = 1*WIDTH+1
snake_size = 1
# 与上面变量对应的临时变量,蛇试探性地移动时使用
tmpboard = [0] * FIELD_SIZE
tmpsnake = [0] * (FIELD_SIZE+1)
tmpsnake[HEAD] = 1*WIDTH+1
tmpsnake_size = 1
# food:食物位置(0~FIELD_SIZE-1),初始在(3, 3)
# best_move: 运动方向
food = 3 * WIDTH + 3
best_move = ERR
# 运动方向数组
mov = [LEFT, RIGHT, UP, DOWN]
# 接收到的键 和 分数
key = KEY_RIGHT
score = 1 #分数也表示蛇长
# 检查一个cell有没有被蛇身覆盖,没有覆盖则为free,返回true
def is_cell_free(idx, psize, psnake):
return not (idx in psnake[:psize])
# 检查某个位置idx是否可向move方向运动
def is_move_possible(idx, move):
flag = False
if move == LEFT:
flag = True if idx%WIDTH > 1 else False
elif move == RIGHT:
flag = True if idx%WIDTH < (WIDTH-2) else False
elif move == UP:
flag = True if idx > (2*WIDTH-1) else False # 即idx/WIDTH > 1
elif move == DOWN:
flag = True if idx < (FIELD_SIZE-2*WIDTH) else False # 即idx/WIDTH < HEIGHT-2
return flag
# 重置board
# board_refresh后,UNDEFINED值都变为了到达食物的路径长度
# 如需要还原,则要重置它
def board_reset(psnake, psize, pboard):
for i in xrange(FIELD_SIZE):
if i == food:
pboard[i] = FOOD
elif is_cell_free(i, psize, psnake): # 该位置为空
pboard[i] = UNDEFINED
else: # 该位置为蛇身
pboard[i] = SNAKE
# 广度优先搜索遍历整个board,
# 计算出board中每个非SNAKE元素到达食物的路径长度
def board_refresh(pfood, psnake, pboard):
queue = []
queue.append(pfood)
inqueue = [0] * FIELD_SIZE
found = False
# while循环结束后,除了蛇的身体,
# 其它每个方格中的数字代码从它到食物的路径长度
while len(queue)!=0:
idx = queue.pop(0)
if inqueue[idx] == 1: continue
inqueue[idx] = 1
for i in xrange(4):
if is_move_possible(idx, mov[i]):
if idx + mov[i] == psnake[HEAD]:
found = True
if pboard[idx+mov[i]] < SNAKE: # 如果该点不是蛇的身体
if pboard[idx+mov[i]] > pboard[idx]+1:
pboard[idx+mov[i]] = pboard[idx] + 1
if inqueue[idx+mov[i]] == 0:
queue.append(idx+mov[i])
return found
# 从蛇头开始,根据board中元素值,
# 从蛇头周围4个领域点中选择最短路径
def choose_shortest_safe_move(psnake, pboard):
best_move = ERR
min = SNAKE
for i in xrange(4):
if is_move_possible(psnake[HEAD], mov[i]) and pboard[psnake[HEAD]+mov[i]]<min:
min = pboard[psnake[HEAD]+mov[i]]
best_move = mov[i]
return best_move
# 从蛇头开始,根据board中元素值,
# 从蛇头周围4个领域点中选择最远路径
def choose_longest_safe_move(psnake, pboard):
best_move = ERR
max = -1
for i in xrange(4):
if is_move_possible(psnake[HEAD], mov[i]) and pboard[psnake[HEAD]+mov[i]]<UNDEFINED and pboard[psnake[HEAD]+mov[i]]>max:
max = pboard[psnake[HEAD]+mov[i]]
best_move = mov[i]
return best_move
# 检查是否可以追着蛇尾运动,即蛇头和蛇尾间是有路径的
# 为的是避免蛇头陷入死路
# 虚拟操作,在tmpboard,tmpsnake中进行
def is_tail_inside():
global tmpboard, tmpsnake, food, tmpsnake_size
tmpboard[tmpsnake[tmpsnake_size-1]] = 0 # 虚拟地将蛇尾变为食物(因为是虚拟的,所以在tmpsnake,tmpboard中进行)
tmpboard[food] = SNAKE # 放置食物的地方,看成蛇身
result = board_refresh(tmpsnake[tmpsnake_size-1], tmpsnake, tmpboard) # 求得每个位置到蛇尾的路径长度
for i in xrange(4): # 如果蛇头和蛇尾紧挨着,则返回False。即不能follow_tail,追着蛇尾运动了
if is_move_possible(tmpsnake[HEAD], mov[i]) and tmpsnake[HEAD]+mov[i]==tmpsnake[tmpsnake_size-1] and tmpsnake_size>3:
result = False
return result
# 让蛇头朝着蛇尾运行一步
# 不管蛇身阻挡,朝蛇尾方向运行
def follow_tail():
global tmpboard, tmpsnake, food, tmpsnake_size
tmpsnake_size = snake_size
tmpsnake = snake[:]
board_reset(tmpsnake, tmpsnake_size, tmpboard) # 重置虚拟board
tmpboard[tmpsnake[tmpsnake_size-1]] = FOOD # 让蛇尾成为食物
tmpboard[food] = SNAKE # 让食物的地方变成蛇身
board_refresh(tmpsnake[tmpsnake_size-1], tmpsnake, tmpboard) # 求得各个位置到达蛇尾的路径长度
tmpboard[tmpsnake[tmpsnake_size-1]] = SNAKE # 还原蛇尾
return choose_longest_safe_move(tmpsnake, tmpboard) # 返回运行方向(让蛇头运动1步)
# 在各种方案都不行时,随便找一个可行的方向来走(1步),
def any_possible_move():
global food , snake, snake_size, board
best_move = ERR
board_reset(snake, snake_size, board)
board_refresh(food, snake, board)
min = SNAKE
for i in xrange(4):
if is_move_possible(snake[HEAD], mov[i]) and board[snake[HEAD]+mov[i]]<min:
min = board[snake[HEAD]+mov[i]]
best_move = mov[i]
return best_move
def shift_array(arr, size):
for i in xrange(size, 0, -1):
arr[i] = arr[i-1]
def new_food():
global food, snake_size
cell_free = False
while not cell_free:
w = randint(1, WIDTH-2)
h = randint(1, HEIGHT-2)
food = h * WIDTH + w
cell_free = is_cell_free(food, snake_size, snake)
win.addch(food/WIDTH, food%WIDTH, '@')
# 真正的蛇在这个函数中,朝pbest_move走1步
def make_move(pbest_move):
global key, snake, board, snake_size, score
shift_array(snake, snake_size)
snake[HEAD] += pbest_move
# 按esc退出,getch同时保证绘图的流畅性,没有它只会看到最终结果
win.timeout(10)
event = win.getch()
key = key if event == -1 else event
if key == 27: return
p = snake[HEAD]
win.addch(p/WIDTH, p%WIDTH, '*')
# 如果新加入的蛇头就是食物的位置
# 蛇长加1,产生新的食物,重置board(因为原来那些路径长度已经用不上了)
if snake[HEAD] == food:
board[snake[HEAD]] = SNAKE # 新的蛇头
snake_size += 1
score += 1
if snake_size < FIELD_SIZE: new_food()
else: # 如果新加入的蛇头不是食物的位置
board[snake[HEAD]] = SNAKE # 新的蛇头
board[snake[snake_size]] = UNDEFINED # 蛇尾变为空格
win.addch(snake[snake_size]/WIDTH, snake[snake_size]%WIDTH, ' ')
# 虚拟地运行一次,然后在调用处检查这次运行可否可行
# 可行才真实运行。
# 虚拟运行吃到食物后,得到虚拟下蛇在board的位置
def virtual_shortest_move():
global snake, board, snake_size, tmpsnake, tmpboard, tmpsnake_size, food
tmpsnake_size = snake_size
tmpsnake = snake[:] # 如果直接tmpsnake=snake,则两者指向同一处内存
tmpboard = board[:] # board中已经是各位置到达食物的路径长度了,不用再计算
board_reset(tmpsnake, tmpsnake_size, tmpboard)
food_eated = False
while not food_eated:
board_refresh(food, tmpsnake, tmpboard)
move = choose_shortest_safe_move(tmpsnake, tmpboard)
shift_array(tmpsnake, tmpsnake_size)
tmpsnake[HEAD] += move # 在蛇头前加入一个新的位置
# 如果新加入的蛇头的位置正好是食物的位置
# 则长度加1,重置board,食物那个位置变为蛇的一部分(SNAKE)
if tmpsnake[HEAD] == food:
tmpsnake_size += 1
board_reset(tmpsnake, tmpsnake_size, tmpboard) # 虚拟运行后,蛇在board的位置(label101010)
tmpboard[food] = SNAKE
food_eated = True
else: # 如果蛇头不是食物的位置,则新加入的位置为蛇头,最后一个变为空格
tmpboard[tmpsnake[HEAD]] = SNAKE
tmpboard[tmpsnake[tmpsnake_size]] = UNDEFINED
# 如果蛇与食物间有路径,则调用本函数
def find_safe_way():
global snake, board
safe_move = ERR
# 虚拟地运行一次,因为已经确保蛇与食物间有路径,所以执行有效
# 运行后得到虚拟下蛇在board中的位置,即tmpboard,见label101010
virtual_shortest_move() # 该函数唯一调用处
if is_tail_inside(): # 如果虚拟运行后,蛇头蛇尾间有通路,则选最短路运行(1步)
return choose_shortest_safe_move(snake, board)
safe_move = follow_tail() # 否则虚拟地follow_tail 1步,如果可以做到,返回true
return safe_move
curses.initscr()
win = curses.newwin(HEIGHT, WIDTH, 0, 0)
win.keypad(1)
curses.noecho()
curses.curs_set(0)
win.border(0)
win.nodelay(1)
win.addch(food/WIDTH, food%WIDTH, '@')
while key != 27:
win.border(0)
win.addstr(0, 2, 'S:' + str(score) + ' ')
win.timeout(10)
# 接收键盘输入,同时也使显示流畅
event = win.getch()
key = key if event == -1 else event
# 重置矩阵
board_reset(snake, snake_size, board)
# 如果蛇可以吃到食物,board_refresh返回true
# 并且board中除了蛇身(=SNAKE),其它的元素值表示从该点运动到食物的最短路径长
if board_refresh(food, snake, board):
best_move = find_safe_way() # find_safe_way的唯一调用处
else:
best_move = follow_tail()
if best_move == ERR:
best_move = any_possible_move()
# 上面一次思考,只得出一个方向,运行一步
if best_move != ERR: make_move(best_move)
else: break
curses.endwin()
print("\nScore - " + str(score))
| mit | -5,874,944,903,512,969,000 | 28.52069 | 128 | 0.625044 | false |
rk700/rbook | rbook/r_pdf.py | 1 | 6074 | #!/usr/bin/env python
#-*- coding: utf8 -*-
#
# Copyright (C) 2012 Ruikai Liu <[email protected]>
#
# This file is part of rbook.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with rbook. If not, see <http://www.gnu.org/licenses/>.
import subprocess
import wx
import doc_scroll
import fitz
class DocScroll(doc_scroll.DocScroll):
def __init__(self, parent, current_page_idx):
#self.ctx = parent.ctx
#self.currentPage = parent.document.loadPage(current_page_idx)
#self.width = parent.document.load_page(current_page_idx).bound_page().get_width()
self.width = parent.document.loadPage(current_page_idx).bound().width
doc_scroll.DocScroll.__init__(self, parent, current_page_idx)
self.panel.Bind(wx.EVT_MOTION, self.on_motion)
self.panel.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
def on_motion(self, event):
cx, cy = event.GetPositionTuple()
mouse_on_link = False
link = self.links
while link:
rect = fitz.Rect(link.rect).transform(self.trans)
if cx >= rect.x0 and cx <= rect.x1 and \
cy >= rect.y0 and cy <= rect.y1:
mouse_on_link = True
break
link = link.next
if mouse_on_link:
self.panel.SetCursor(wx.StockCursor(wx.CURSOR_HAND))
self.link_context = (link.dest.kind, \
link.dest.page, \
link.dest.flags, \
link.dest.lt, \
link.dest.uri)
else:
self.panel.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
self.link_context = None
def on_left_down(self, event):
if not self.link_context is None:
if self.link_context[0] == fitz.LINK_GOTO:
# after change page, link_context becomes None,
# so we need to record the pos
pos = self.link_context[3]
flag = self.link_context[2]
self.parent.change_page(self.link_context[1])
if flag & fitz.LINK_FLAG_T_VALID:
pos = fitz.Point(pos).transform(self.trans)
self.Scroll(-1, (self.height-pos.y)/self.scroll_unit)
elif self.link_context[0] == fitz.LINK_URI:
subprocess.Popen(('xdg-open', self.link_context[4]))
event.Skip()
def set_page_size(self):
self.trans = fitz.Matrix(self.scale, self.scale)
self.rect = fitz.Rect(self.page_rect).transform(self.trans) #page_rect is the unscaled one
self.irect = self.rect.round()
self.width = self.irect.width
self.height = self.irect.height
def do_drawing(self):
self.buffer = wx.BitmapFromBuffer(self.width,
self.height,
self.pix.samples)
dc = wx.BufferedDC(wx.ClientDC(self.panel),
self.buffer)
def set_current_page(self, current_page_idx, draw=True, scroll=None, scale=None):
self.hitbbox = []
if scale:
self.scale = scale
current_page = self.parent.document.loadPage(current_page_idx)
self.page_rect = current_page.bound()
#self.orig_width = self.page_rect.width
self.set_page_size()
self.text_sheet = fitz.TextSheet()
self.text_page = fitz.TextPage(self.page_rect)
self.display_list = fitz.DisplayList(self.page_rect)
current_page.run(fitz.Device(self.display_list), self.trans)
self.links = current_page.loadLinks()
self.link_context = None
self.display_list.run(fitz.Device(self.text_sheet, self.text_page), fitz.Identity, self.rect)
if draw:
self.setup_drawing(scroll=scroll)
def setup_drawing(self, hitbbox=None, scroll=None):
doc_scroll.DocScroll.setup_drawing(self, hitbbox, scroll)
self.pix = fitz.Pixmap(fitz.Colorspace(fitz.CS_RGB), self.irect)
self.pix.clearWith(255);
self.display_list.run(fitz.Device(self.pix, None), fitz.Identity, self.rect)
if hitbbox:
for rect in hitbbox:
self.pix.invertIRect(rect.round())
self.do_drawing()
def new_scale_setup_drawing(self):
try:
#hitbbox = self.hitbbox[self.parent.hit]
self.setup_drawing()
except IndexError:
self.setup_drawing()
def scroll_to_next_found(self, hit):
trans_hitbbox = self.trans.transform_irect(self.hitbbox[hit][0])
self.setup_drawing(self.hitbbox[hit],
(trans_hitbbox.x0/self.scroll_unit,
trans_hitbbox.y0/self.scroll_unit))
def get_hitbbox(self, s):
return self.text_page.search(s, self.parent.main_frame.settings['ic'])
def search_in_current(self, newhit):
old_hitbbox = self.hitbbox[self.parent.hit]
for bbox in old_hitbbox:
self.pix.invert_pixmap(self.trans.transform_irect(bbox))
new_hitbbox = self.hitbbox[newhit]
for bbox in new_hitbbox:
self.pix.invert_pixmap(self.trans.transform_irect(bbox))
self.do_drawing()
self.Scroll(new_hitbbox[0].x0/self.scroll_unit,
new_hitbbox[0].y0/self.scroll_unit)
def on_refresh(self):
self.parent.document = fitz.Document(self.parent.filepath)
self.parent.n_pages = self.parent.document.pageCount
| gpl-3.0 | 3,068,230,262,222,277,600 | 36.9625 | 101 | 0.596806 | false |
fy0/my-leetcode | 1028. Recover a Tree From Preorder Traversal/main.py | 1 | 1267 |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
import re
class Solution:
def recoverFromPreorder(self, S: str) -> TreeNode:
items = list(map(lambda x: [len(x[0]), x[1]], re.findall(r'(-*)(\d+)', S)))
if not items:
return None
if items[0][0] != 0:
raise
nodes = [TreeNode(items[0][1])]
cur_depth = 0
def node_add(cur_node, v):
n = TreeNode(v)
if not cur_node.left:
cur_node.left = n
else:
cur_node.right = n
nodes.append(n)
for d, v in items[1:]:
if d > cur_depth:
# 层级增加,添加下级节点
cur_depth += 1
node_add(nodes[-1], v)
elif d < cur_depth:
# 层级减少,先pop到指定位置,然后添加下级节点
nodes = nodes[:d]
cur_depth = d
node_add(nodes[-1], v)
else:
# 层级不变,添加同级节点
nodes.pop()
node_add(nodes[-1], v)
return nodes[0]
| apache-2.0 | 4,045,531,576,449,108,000 | 22.66 | 83 | 0.426881 | false |
jwhitlock/kuma | kuma/scrape/tests/test_source.py | 1 | 9151 | # -*- coding: utf-8 -*-
"""Tests for the Source class."""
from __future__ import unicode_literals
import mock
import pytest
from kuma.scrape.sources import Source
from . import mock_requester, mock_storage
class FakeSource(Source):
"""A Fake source for testing shared Source functionality."""
PARAM_NAME = 'name'
OPTIONS = {
'pressed': ('bool', False),
'length': ('int', 0),
'unbounded': ('int_all', 0),
'flavor': ('text', ''),
}
def test_init_param():
"""Omitted Source parameters are initialized to defaults."""
source = FakeSource('param')
assert source.name == 'param'
assert source.length == 0
assert source.pressed is False
assert source.unbounded == 0
assert source.flavor == ''
@pytest.mark.parametrize(
'option,value',
(('pressed', True),
('length', 1),
('unbounded', 'all'),
('flavor', 'curry'),
), ids=('bool', 'int', 'int_all', 'text'))
def test_init_options(option, value):
"""Source parameters are initialized by name."""
source = FakeSource('popcorn', **{option: value})
assert source.name == 'popcorn'
assert getattr(source, option) == value
def test_init_invalid_option():
"""An invalid parameter name raises an exception."""
with pytest.raises(Exception):
FakeSource('param', unknown=1)
def test_merge_none():
"""An empty merge does not change the Source state."""
source = FakeSource('merge')
source.state = source.STATE_PREREQ
assert source.merge_options() == {}
assert source.state == source.STATE_PREREQ
@pytest.mark.parametrize(
'option,lesser_value,greater_value',
(('pressed', False, True),
('length', 1, 2),
('unbounded', 2, 3),
), ids=('bool', 'int', 'int_all'))
def test_merge_less(option, lesser_value, greater_value):
"""A merge to smaller parameters keeps the current values and state."""
source = FakeSource('merge', **{option: greater_value})
source.state = source.STATE_PREREQ
assert source.merge_options(**{option: lesser_value}) == {}
assert getattr(source, option) == greater_value
assert source.state == source.STATE_PREREQ
@pytest.mark.parametrize(
'option,value',
(('pressed', True),
('length', 2),
('unbounded', 1),
('flavor', 'country'),
), ids=('bool', 'int', 'int_all', 'text'))
def test_merge_same(option, value):
"""A merge with the current values keeps the current state."""
source = FakeSource('merge', **{option: value})
source.state = source.STATE_PREREQ
assert source.merge_options(**{option: value}) == {}
assert getattr(source, option) == value
assert source.state == source.STATE_PREREQ
@pytest.mark.parametrize(
'option,lesser_value,greater_value',
(('pressed', False, True),
('length', 1, 2),
('unbounded', 2, 3),
), ids=('bool', 'int', 'int_all'))
def test_merge_upgrade(option, lesser_value, greater_value):
"""An updating merge updates the values and resets the state."""
source = FakeSource('merge', **{option: lesser_value})
source.state = source.STATE_PREREQ
result = source.merge_options(**{option: greater_value})
assert result == {option: greater_value}
assert getattr(source, option) == greater_value
assert source.state == source.STATE_INIT
def test_merge_more_multiple():
"""Multiple parameters can be updated in one merge call."""
source = FakeSource('merge')
res = source.merge_options(
length=1, pressed=True, unbounded=1, flavor='salty')
assert res == {
'length': 1, 'pressed': True, 'unbounded': 1, 'flavor': 'salty'}
def test_merge_int_all():
"""For the 'int_all' parameter type, 'all' is a valid and maximum value."""
source = FakeSource('merge')
assert source.merge_options(unbounded='all') == {'unbounded': 'all'}
assert source.merge_options(unbounded='all') == {}
def test_merge_text():
"""For the 'text' parameter type, any non-empty change is an update."""
source = FakeSource('merge')
assert source.merge_options(flavor='sweet') == {'flavor': 'sweet'}
assert source.merge_options(flavor='sour') == {'flavor': 'sour'}
assert source.merge_options(flavor='sour') == {}
assert source.merge_options(flavor='sweet') == {'flavor': 'sweet'}
assert source.merge_options(flavor='') == {}
def test_current_options_default():
"""current_options returns empty dict for default options."""
source = FakeSource('default')
assert source.current_options() == {}
@pytest.mark.parametrize(
'option,value',
(('pressed', True),
('length', 1),
('unbounded', 'all'),
('flavor', 'curry'),
), ids=('bool', 'int', 'int_all', 'text'))
def test_current_options_nondefault(option, value):
"""current_options returns the non-default options as a dict."""
source = FakeSource('default', **{option: value})
assert source.current_options() == {option: value}
@pytest.mark.parametrize(
'option_type,option,bad_value',
(('bool', 'pressed', 1),
('int', 'length', '0'),
('int_all', 'unbounded', '1'),
('text', 'flavor', 1),
), ids=('bool', 'int', 'int_all', 'text'))
def test_invalid_values(option_type, option, bad_value):
"""Invalid parameter values raise a ValueError."""
with pytest.raises(ValueError) as err:
FakeSource('fails', **{option: bad_value})
assert option_type in str(err.value)
@pytest.mark.parametrize(
"href,decoded", [
(b'binary', u'binary'),
(b'%E7%A7%BB%E8%A1%8C%E4%BA%88%E5%AE%9A', u'移行予定'),
(u'Slug#Anchor_\u2014_With_Dash', u'Slug#Anchor_\u2014_With_Dash'),
])
def test_decode_href(href, decoded):
"""Source.decode_href() turns URL-encoded hrefs into unicode strings."""
source = FakeSource('conversions')
assert decoded == source.decode_href(href)
def test_source_error_str():
"""The Source.Error exception can be turned into a string."""
error1 = Source.SourceError('A simple error')
assert "%s" % error1 == 'A simple error'
error2 = Source.SourceError('A formatted error, like "%s" and %d.',
"a string", 123)
assert "%s" % error2 == 'A formatted error, like "a string" and 123.'
def test_gather_done_is_done():
"""A source that is done can still be gathered."""
source = FakeSource('existing')
source.state = source.STATE_DONE
assert source.gather(mock_requester(), mock_storage()) == []
assert source.state == source.STATE_DONE
assert source.freshness == source.FRESH_UNKNOWN
def test_gather_load_storage_existing():
"""A source that is already in storage loads quickly."""
source = FakeSource('existing')
source.load_and_validate_existing = mock.Mock(
return_value=(True, ['next']))
ret = source.gather(mock_requester(), mock_storage())
assert ret == ['next']
assert source.state == source.STATE_DONE
assert source.freshness == source.FRESH_NO
def test_gather_load_storage_error():
"""A source can raise an error when loading from storage."""
source = FakeSource('existing')
source.load_and_validate_existing = mock.Mock(
side_effect=source.SourceError('Storage complained.'))
ret = source.gather(mock_requester(), mock_storage())
assert ret == []
assert source.state == source.STATE_ERROR
assert source.freshness == source.FRESH_UNKNOWN
def test_gather_load_prereqs_more_needed():
"""A source can request other sources as prerequisites."""
source = FakeSource('needs_prereqs')
data = {'needs': ['bonus']}
source.load_prereqs = mock.Mock(return_value=(False, data))
ret = source.gather(mock_requester(), mock_storage())
assert ret == ['bonus']
assert source.state == source.STATE_PREREQ
assert source.freshness == source.FRESH_UNKNOWN
def test_gather_load_prereqs_error():
"""A source may raise an error when loading prerequisites."""
source = FakeSource('bad_prereqs')
source.load_prereqs = mock.Mock(side_effect=source.SourceError('bad'))
ret = source.gather(mock_requester(), mock_storage())
assert ret == []
assert source.state == source.STATE_ERROR
assert source.freshness == source.FRESH_UNKNOWN
def test_gather_save_data_error():
"""A source can fail when saving the data."""
source = FakeSource('needs_prereqs')
source.load_prereqs = mock.Mock(return_value=(True, {}))
source.save_data = mock.Mock(side_effect=source.SourceError('failed'))
ret = source.gather(mock_requester(), mock_storage())
assert ret == []
assert source.state == source.STATE_ERROR
assert source.freshness == source.FRESH_YES
def test_gather_success_with_more_sources():
"""A source with all prereqs can request further sources."""
source = FakeSource('needs_prereqs')
source.load_prereqs = mock.Mock(return_value=(True, {}))
source.save_data = mock.Mock(return_value=['bonus'])
ret = source.gather(mock_requester(), mock_storage())
assert ret == ['bonus']
assert source.state == source.STATE_DONE
assert source.freshness == source.FRESH_YES
| mpl-2.0 | 7,577,405,861,420,672,000 | 33.896947 | 79 | 0.643224 | false |
TeamPurple/Cyber | yowsup_dev/src/whatspy_flask.py | 1 | 2908 | import sys, os, time, shutil
from Yowsup.connectionmanager import YowsupConnectionManager
from Yowsup.Common.debugger import Debugger
USERNAME = '972583340860'
PASSWORD = 'jag6FSF6MicZmp9M8lrsSqoXYo8='.decode('base64')
phase = None
cm = None
signals_interface = None
methods_interface = None
# Utils
def phone_number2jid(phone_number):
return phone_number + '@s.whatsapp.net'
# Login
def cb_auth_success(username):
global phase
print 'Authed %s' % username
methods_interface.call('ready')
phase = True
def cb_auth_fail(username, reason):
global phase
print 'Auth Fail!', username, reason
phase = False
def login():
methods_interface.call('auth_login', (USERNAME, PASSWORD))
# Presence
def cb_presence_updated_once(jid, last_seen):
''' TODO: save the time to something and then use the web app to load it'''
global time_got
print 'HELLo'
print 'Last seen @', time.ctime(time.time() - last_seen)
time_got = True
# Contacts
def cb_contact_gotProfilePicture_once(jid, picture_id, image_path):
''' for eric to use for web app
TODO: modify the path so that it goes to where you want
'''
global photo_got
phone_number = jid.split('@')[0]
print 'Got', phone_number
shutil.copyfile(image_path, os.path.join(args.t, phone_number + '.jpg'))
photo_got = True
# Misc
def cb_disconnected(reason):
print 'Disconnected because %s' % reason
sys.exit(0)
# Main
def setup():
global cm, signals_interface, methods_interface
Debugger.enabled = False
cm = YowsupConnectionManager()
cm.setAutoPong(True)
signals_interface = cm.getSignalsInterface()
methods_interface = cm.getMethodsInterface()
signals_interface.registerListener('auth_success', cb_auth_success)
signals_interface.registerListener('auth_fail', cb_auth_fail)
signals_interface.registerListener('disconnected', cb_disconnected)
def get_photo_time(phone_number):
''' phone number includes the country code
'''
global photo_got, time_got
setup()
login()
photo_got = False
time_got = False
while phase is None:
time.sleep(0.5)
signals_interface.registerListener('contact_gotProfilePicture', cb_contact_gotProfilePicture_once)
signals_interface.registerListener('presence_updated', cb_presence_updated_once)
jid = phone_number2jid(phone_number)
methods_interface.call('presence_request', (jid,))
methods_interface.call('contact_getProfilePicture', (jid,))
timeout = 0
while not (photo_got and time_got) and timeout < 1:
#TODO: Time out the request for both photo and time depending on whats available
print photo_got, time_got
time.sleep(0.25)
timeout += 0.25
# methods_interface.call('disconnect', ('closed!',))
get_photo_time('16094755004')
get_photo_time('16094755004')
| gpl-3.0 | 2,017,714,758,593,760,500 | 26.17757 | 102 | 0.686039 | false |
RedhawkSDR/integration-gnuhawk | components/regenerate_bb/tests/test_regenerate_bb.py | 1 | 4069 | #!/usr/bin/env python
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of GNUHAWK.
#
# GNUHAWK is free software: you can redistribute it and/or modify is under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/.
#
import unittest
import ossie.utils.testing
import os
from omniORB import any
class ComponentTests(ossie.utils.testing.ScaComponentTestCase):
"""Test for all component implementations in regenerate_bb"""
def testScaBasicBehavior(self):
#######################################################################
# Launch the component with the default execparams
execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False)
execparams = dict([(x.id, any.from_any(x.value)) for x in execparams])
self.launch(execparams)
#######################################################################
# Verify the basic state of the component
self.assertNotEqual(self.comp, None)
self.assertEqual(self.comp.ref._non_existent(), False)
self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True)
#######################################################################
# Validate that query returns all expected parameters
# Query of '[]' should return the following set of properties
expectedProps = []
expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True))
expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True))
props = self.comp.query([])
props = dict((x.id, any.from_any(x.value)) for x in props)
# Query may return more than expected, but not less
for expectedProp in expectedProps:
self.assertEquals(props.has_key(expectedProp.id), True)
#######################################################################
# Verify that all expected ports are available
for port in self.scd.get_componentfeatures().get_ports().get_uses():
port_obj = self.comp.getPort(str(port.get_usesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True)
for port in self.scd.get_componentfeatures().get_ports().get_provides():
port_obj = self.comp.getPort(str(port.get_providesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a(port.get_repid()), True)
#######################################################################
# Make sure start and stop can be called without throwing exceptions
self.comp.start()
self.comp.stop()
#######################################################################
# Simulate regular component shutdown
self.comp.releaseObject()
# TODO Add additional tests here
#
# See:
# ossie.utils.bulkio.bulkio_helpers,
# ossie.utils.bluefile.bluefile_helpers
# for modules that will assist with testing components with BULKIO ports
if __name__ == "__main__":
ossie.utils.testing.main("../regenerate_bb.spd.xml") # By default tests all implementations
| gpl-3.0 | 4,217,778,358,626,160,600 | 46.870588 | 133 | 0.592529 | false |
USGSDenverPychron/pychron | pychron/headless_config_loadable.py | 1 | 1222 | #!/usr/bin/python
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.base_config_loadable import BaseConfigLoadable
from pychron.headless_loggable import HeadlessLoggable
class HeadlessConfigLoadable(BaseConfigLoadable, HeadlessLoggable):
"""
"""
# ============= EOF =============================================
| apache-2.0 | 4,949,899,419,939,401,000 | 38.419355 | 81 | 0.555646 | false |
blaze/dask | dask/array/slicing.py | 1 | 40834 | from itertools import product
import math
from numbers import Integral, Number
from operator import add, getitem, itemgetter
import warnings
import functools
import bisect
import numpy as np
from tlz import memoize, merge, pluck, concat, accumulate
from .. import core
from .. import config
from .. import utils
from ..highlevelgraph import HighLevelGraph
from ..base import tokenize, is_dask_collection
colon = slice(None, None, None)
def _sanitize_index_element(ind):
"""Sanitize a one-element index."""
if isinstance(ind, Number):
ind2 = int(ind)
if ind2 != ind:
raise IndexError("Bad index. Must be integer-like: %s" % ind)
else:
return ind2
elif ind is None:
return None
else:
raise TypeError("Invalid index type", type(ind), ind)
def sanitize_index(ind):
"""Sanitize the elements for indexing along one axis
>>> sanitize_index([2, 3, 5])
array([2, 3, 5])
>>> sanitize_index([True, False, True, False])
array([0, 2])
>>> sanitize_index(np.array([1, 2, 3]))
array([1, 2, 3])
>>> sanitize_index(np.array([False, True, True]))
array([1, 2])
>>> type(sanitize_index(np.int32(0)))
<class 'int'>
>>> sanitize_index(1.0)
1
>>> sanitize_index(0.5)
Traceback (most recent call last):
...
IndexError: Bad index. Must be integer-like: 0.5
"""
if ind is None:
return None
elif isinstance(ind, slice):
return slice(
_sanitize_index_element(ind.start),
_sanitize_index_element(ind.stop),
_sanitize_index_element(ind.step),
)
elif isinstance(ind, Number):
return _sanitize_index_element(ind)
elif is_dask_collection(ind):
return ind
index_array = np.asanyarray(ind)
if index_array.dtype == bool:
nonzero = np.nonzero(index_array)
if len(nonzero) == 1:
# If a 1-element tuple, unwrap the element
nonzero = nonzero[0]
return np.asanyarray(nonzero)
elif np.issubdtype(index_array.dtype, np.integer):
return index_array
elif np.issubdtype(index_array.dtype, np.floating):
int_index = index_array.astype(np.intp)
if np.allclose(index_array, int_index):
return int_index
else:
check_int = np.isclose(index_array, int_index)
first_err = index_array.ravel()[np.flatnonzero(~check_int)[0]]
raise IndexError("Bad index. Must be integer-like: %s" % first_err)
else:
raise TypeError("Invalid index type", type(ind), ind)
def slice_array(out_name, in_name, blockdims, index, itemsize):
"""
Master function for array slicing
This function makes a new dask that slices blocks along every
dimension and aggregates (via cartesian product) each dimension's
slices so that the resulting block slices give the same results
as the original slice on the original structure
Index must be a tuple. It may contain the following types
int, slice, list (at most one list), None
Parameters
----------
in_name - string
This is the dask variable name that will be used as input
out_name - string
This is the dask variable output name
blockshape - iterable of integers
index - iterable of integers, slices, lists, or None
itemsize : int
The number of bytes required for each element of the array.
Returns
-------
Dict where the keys are tuples of
(out_name, dim_index[, dim_index[, ...]])
and the values are
(function, (in_name, dim_index, dim_index, ...),
(slice(...), [slice()[,...]])
Also new blockdims with shapes of each block
((10, 10, 10, 10), (20, 20))
Examples
--------
>>> dsk, blockdims = slice_array('y', 'x', [(20, 20, 20, 20, 20)],
... (slice(10, 35),)) # doctest: +SKIP
>>> dsk # doctest: +SKIP
{('y', 0): (getitem, ('x', 0), (slice(10, 20),)),
('y', 1): (getitem, ('x', 1), (slice(0, 15),))}
>>> blockdims # doctest: +SKIP
((10, 15),)
See Also
--------
This function works by successively unwrapping cases and passing down
through a sequence of functions.
slice_with_newaxis : handle None/newaxis case
slice_wrap_lists : handle fancy indexing with lists
slice_slices_and_integers : handle everything else
"""
blockdims = tuple(map(tuple, blockdims))
# x[:, :, :] - Punt and return old value
if all(
isinstance(index, slice) and index == slice(None, None, None) for index in index
):
suffixes = product(*[range(len(bd)) for bd in blockdims])
dsk = dict(((out_name,) + s, (in_name,) + s) for s in suffixes)
return dsk, blockdims
# Add in missing colons at the end as needed. x[5] -> x[5, :, :]
not_none_count = sum(i is not None for i in index)
missing = len(blockdims) - not_none_count
index += (slice(None, None, None),) * missing
# Pass down to next function
dsk_out, bd_out = slice_with_newaxes(out_name, in_name, blockdims, index, itemsize)
bd_out = tuple(map(tuple, bd_out))
return dsk_out, bd_out
def slice_with_newaxes(out_name, in_name, blockdims, index, itemsize):
"""
Handle indexing with Nones
Strips out Nones then hands off to slice_wrap_lists
"""
# Strip Nones from index
index2 = tuple([ind for ind in index if ind is not None])
where_none = [i for i, ind in enumerate(index) if ind is None]
where_none_orig = list(where_none)
for i, x in enumerate(where_none):
n = sum(isinstance(ind, Integral) for ind in index[:x])
if n:
where_none[i] -= n
# Pass down and do work
dsk, blockdims2 = slice_wrap_lists(out_name, in_name, blockdims, index2, itemsize)
if where_none:
expand = expander(where_none)
expand_orig = expander(where_none_orig)
# Insert ",0" into the key: ('x', 2, 3) -> ('x', 0, 2, 0, 3)
dsk2 = {
(out_name,) + expand(k[1:], 0): (v[:2] + (expand_orig(v[2], None),))
for k, v in dsk.items()
if k[0] == out_name
}
# Add back intermediate parts of the dask that weren't the output
dsk3 = merge(dsk2, {k: v for k, v in dsk.items() if k[0] != out_name})
# Insert (1,) into blockdims: ((2, 2), (3, 3)) -> ((2, 2), (1,), (3, 3))
blockdims3 = expand(blockdims2, (1,))
return dsk3, blockdims3
else:
return dsk, blockdims2
def slice_wrap_lists(out_name, in_name, blockdims, index, itemsize):
"""
Fancy indexing along blocked array dasks
Handles index of type list. Calls slice_slices_and_integers for the rest
See Also
--------
take : handle slicing with lists ("fancy" indexing)
slice_slices_and_integers : handle slicing with slices and integers
"""
assert all(isinstance(i, (slice, list, Integral, np.ndarray)) for i in index)
if not len(blockdims) == len(index):
raise IndexError("Too many indices for array")
# Do we have more than one list in the index?
where_list = [
i for i, ind in enumerate(index) if isinstance(ind, np.ndarray) and ind.ndim > 0
]
if len(where_list) > 1:
raise NotImplementedError("Don't yet support nd fancy indexing")
# Is the single list an empty list? In this case just treat it as a zero
# length slice
if where_list and not index[where_list[0]].size:
index = list(index)
index[where_list.pop()] = slice(0, 0, 1)
index = tuple(index)
# No lists, hooray! just use slice_slices_and_integers
if not where_list:
return slice_slices_and_integers(out_name, in_name, blockdims, index)
# Replace all lists with full slices [3, 1, 0] -> slice(None, None, None)
index_without_list = tuple(
slice(None, None, None) if isinstance(i, np.ndarray) else i for i in index
)
# lists and full slices. Just use take
if all(isinstance(i, np.ndarray) or i == slice(None, None, None) for i in index):
axis = where_list[0]
blockdims2, dsk3 = take(
out_name, in_name, blockdims, index[where_list[0]], itemsize, axis=axis
)
# Mixed case. Both slices/integers and lists. slice/integer then take
else:
# Do first pass without lists
tmp = "slice-" + tokenize((out_name, in_name, blockdims, index))
dsk, blockdims2 = slice_slices_and_integers(
tmp, in_name, blockdims, index_without_list
)
# After collapsing some axes due to int indices, adjust axis parameter
axis = where_list[0]
axis2 = axis - sum(
1 for i, ind in enumerate(index) if i < axis and isinstance(ind, Integral)
)
# Do work
blockdims2, dsk2 = take(out_name, tmp, blockdims2, index[axis], 8, axis=axis2)
dsk3 = merge(dsk, dsk2)
return dsk3, blockdims2
def slice_slices_and_integers(out_name, in_name, blockdims, index):
"""
Dask array indexing with slices and integers
See Also
--------
_slice_1d
"""
from .core import unknown_chunk_message
shape = tuple(cached_cumsum(dim, initial_zero=True)[-1] for dim in blockdims)
for dim, ind in zip(shape, index):
if np.isnan(dim) and ind != slice(None, None, None):
raise ValueError(
"Arrays chunk sizes are unknown: %s%s" % (shape, unknown_chunk_message)
)
assert all(isinstance(ind, (slice, Integral)) for ind in index)
assert len(index) == len(blockdims)
# Get a list (for each dimension) of dicts{blocknum: slice()}
block_slices = list(map(_slice_1d, shape, blockdims, index))
sorted_block_slices = [sorted(i.items()) for i in block_slices]
# (in_name, 1, 1, 2), (in_name, 1, 1, 4), (in_name, 2, 1, 2), ...
in_names = list(product([in_name], *[pluck(0, s) for s in sorted_block_slices]))
# (out_name, 0, 0, 0), (out_name, 0, 0, 1), (out_name, 0, 1, 0), ...
out_names = list(
product(
[out_name],
*[
range(len(d))[::-1] if i.step and i.step < 0 else range(len(d))
for d, i in zip(block_slices, index)
if not isinstance(i, Integral)
]
)
)
all_slices = list(product(*[pluck(1, s) for s in sorted_block_slices]))
dsk_out = {
out_name: (getitem, in_name, slices)
for out_name, in_name, slices in zip(out_names, in_names, all_slices)
}
new_blockdims = [
new_blockdim(d, db, i)
for d, i, db in zip(shape, index, blockdims)
if not isinstance(i, Integral)
]
return dsk_out, new_blockdims
def _slice_1d(dim_shape, lengths, index):
"""Returns a dict of {blocknum: slice}
This function figures out where each slice should start in each
block for a single dimension. If the slice won't return any elements
in the block, that block will not be in the output.
Parameters
----------
dim_shape - the number of elements in this dimension.
This should be a positive, non-zero integer
blocksize - the number of elements per block in this dimension
This should be a positive, non-zero integer
index - a description of the elements in this dimension that we want
This might be an integer, a slice(), or an Ellipsis
Returns
-------
dictionary where the keys are the integer index of the blocks that
should be sliced and the values are the slices
Examples
--------
Trivial slicing
>>> _slice_1d(100, [60, 40], slice(None, None, None))
{0: slice(None, None, None), 1: slice(None, None, None)}
100 length array cut into length 20 pieces, slice 0:35
>>> _slice_1d(100, [20, 20, 20, 20, 20], slice(0, 35))
{0: slice(None, None, None), 1: slice(0, 15, 1)}
Support irregular blocks and various slices
>>> _slice_1d(100, [20, 10, 10, 10, 25, 25], slice(10, 35))
{0: slice(10, 20, 1), 1: slice(None, None, None), 2: slice(0, 5, 1)}
Support step sizes
>>> _slice_1d(100, [15, 14, 13], slice(10, 41, 3))
{0: slice(10, 15, 3), 1: slice(1, 14, 3), 2: slice(2, 12, 3)}
>>> _slice_1d(100, [20, 20, 20, 20, 20], slice(0, 100, 40)) # step > blocksize
{0: slice(0, 20, 40), 2: slice(0, 20, 40), 4: slice(0, 20, 40)}
Also support indexing single elements
>>> _slice_1d(100, [20, 20, 20, 20, 20], 25)
{1: 5}
And negative slicing
>>> _slice_1d(100, [20, 20, 20, 20, 20], slice(100, 0, -3)) # doctest: +NORMALIZE_WHITESPACE
{4: slice(-1, -21, -3),
3: slice(-2, -21, -3),
2: slice(-3, -21, -3),
1: slice(-1, -21, -3),
0: slice(-2, -20, -3)}
>>> _slice_1d(100, [20, 20, 20, 20, 20], slice(100, 12, -3)) # doctest: +NORMALIZE_WHITESPACE
{4: slice(-1, -21, -3),
3: slice(-2, -21, -3),
2: slice(-3, -21, -3),
1: slice(-1, -21, -3),
0: slice(-2, -8, -3)}
>>> _slice_1d(100, [20, 20, 20, 20, 20], slice(100, -12, -3))
{4: slice(-1, -12, -3)}
"""
chunk_boundaries = cached_cumsum(lengths)
if isinstance(index, Integral):
# use right-side search to be consistent with previous result
i = bisect.bisect_right(chunk_boundaries, index)
if i > 0:
# the very first chunk has no relative shift
ind = index - chunk_boundaries[i - 1]
else:
ind = index
return {int(i): int(ind)}
assert isinstance(index, slice)
if index == colon:
return {k: colon for k in range(len(lengths))}
step = index.step or 1
if step > 0:
start = index.start or 0
stop = index.stop if index.stop is not None else dim_shape
else:
start = index.start if index.start is not None else dim_shape - 1
start = dim_shape - 1 if start >= dim_shape else start
stop = -(dim_shape + 1) if index.stop is None else index.stop
# posify start and stop
if start < 0:
start += dim_shape
if stop < 0:
stop += dim_shape
d = dict()
if step > 0:
istart = bisect.bisect_right(chunk_boundaries, start)
istop = bisect.bisect_left(chunk_boundaries, stop)
# the bound is not exactly tight; make it tighter?
istop = min(istop + 1, len(lengths))
# jump directly to istart
if istart > 0:
start = start - chunk_boundaries[istart - 1]
stop = stop - chunk_boundaries[istart - 1]
for i in range(istart, istop):
length = lengths[i]
if start < length and stop > 0:
d[i] = slice(start, min(stop, length), step)
start = (start - length) % step
else:
start = start - length
stop -= length
else:
rstart = start # running start
istart = bisect.bisect_left(chunk_boundaries, start)
istop = bisect.bisect_right(chunk_boundaries, stop)
# the bound is not exactly tight; make it tighter?
istart = min(istart + 1, len(chunk_boundaries) - 1)
istop = max(istop - 1, -1)
for i in range(istart, istop, -1):
chunk_stop = chunk_boundaries[i]
# create a chunk start and stop
if i == 0:
chunk_start = 0
else:
chunk_start = chunk_boundaries[i - 1]
# if our slice is in this chunk
if (chunk_start <= rstart < chunk_stop) and (rstart > stop):
d[i] = slice(
rstart - chunk_stop,
max(chunk_start - chunk_stop - 1, stop - chunk_stop),
step,
)
# compute the next running start point,
offset = (rstart - (chunk_start - 1)) % step
rstart = chunk_start + offset - 1
# replace 0:20:1 with : if appropriate
for k, v in d.items():
if v == slice(0, lengths[k], 1):
d[k] = slice(None, None, None)
if not d: # special case x[:0]
d[0] = slice(0, 0, 1)
return d
def partition_by_size(sizes, seq):
"""
>>> partition_by_size([10, 20, 10], [1, 5, 9, 12, 29, 35])
[array([1, 5, 9]), array([ 2, 19]), array([5])]
"""
seq = np.asanyarray(seq)
left = np.empty(len(sizes) + 1, dtype=int)
left[0] = 0
right = np.cumsum(sizes, out=left[1:])
locations = np.empty(len(sizes) + 1, dtype=int)
locations[0] = 0
locations[1:] = np.searchsorted(seq, right)
return [(seq[j:k] - l) for j, k, l in zip(locations[:-1], locations[1:], left)]
def issorted(seq):
"""Is sequence sorted?
>>> issorted([1, 2, 3])
True
>>> issorted([3, 1, 2])
False
"""
if len(seq) == 0:
return True
return np.all(seq[:-1] <= seq[1:])
def slicing_plan(chunks, index):
"""Construct a plan to slice chunks with the given index
Parameters
----------
chunks : Tuple[int]
One dimensions worth of chunking information
index : np.ndarray[int]
The index passed to slice on that dimension
Returns
-------
out : List[Tuple[int, np.ndarray]]
A list of chunk/sub-index pairs corresponding to each output chunk
"""
index = np.asanyarray(index)
cum_chunks = cached_cumsum(chunks)
chunk_locations = np.searchsorted(cum_chunks, index, side="right")
where = np.where(np.diff(chunk_locations))[0] + 1
where = np.concatenate([[0], where, [len(chunk_locations)]])
out = []
for i in range(len(where) - 1):
sub_index = index[where[i] : where[i + 1]]
chunk = chunk_locations[where[i]]
if chunk > 0:
sub_index = sub_index - cum_chunks[chunk - 1]
out.append((chunk, sub_index))
return out
def take(outname, inname, chunks, index, itemsize, axis=0):
"""Index array with an iterable of index
Handles a single index by a single list
Mimics ``np.take``
>>> chunks, dsk = take('y', 'x', [(20, 20, 20, 20)], [5, 1, 47, 3], 8, axis=0)
>>> chunks
((2, 1, 1),)
>>> dsk # doctest: +SKIP
{('y', 0): (getitem, (np.concatenate, [(getitem, ('x', 0), ([1, 3, 5],)),
(getitem, ('x', 2), ([7],))],
0),
(2, 0, 4, 1))}
When list is sorted we retain original block structure
>>> chunks, dsk = take('y', 'x', [(20, 20, 20, 20)], [1, 3, 5, 47], 8, axis=0)
>>> chunks
((3, 1),)
>>> dsk # doctest: +SKIP
{('y', 0): (getitem, ('x', 0), ([1, 3, 5],)),
('y', 2): (getitem, ('x', 2), ([7],))}
When any indexed blocks would otherwise grow larger than
dask.config.array.chunk-size, we might split them,
depending on the value of ``dask.config.slicing.split-large-chunks``.
>>> import dask
>>> with dask.config.set({"array.slicing.split-large-chunks": True}):
... chunks, dsk = take('y', 'x', [(1, 1, 1), (1000, 1000), (1000, 1000)],
... [0] + [1] * 6 + [2], axis=0, itemsize=8)
>>> chunks
((1, 3, 3, 1), (1000, 1000), (1000, 1000))
"""
from .core import PerformanceWarning
plan = slicing_plan(chunks[axis], index)
if len(plan) >= len(chunks[axis]) * 10:
factor = math.ceil(len(plan) / len(chunks[axis]))
warnings.warn(
"Slicing with an out-of-order index is generating %d "
"times more chunks" % factor,
PerformanceWarning,
stacklevel=6,
)
index = np.asarray(index)
# Check for chunks from the plan that would violate the user's
# configured chunk size.
nbytes = utils.parse_bytes(config.get("array.chunk-size"))
other_chunks = [chunks[i] for i in range(len(chunks)) if i != axis]
other_numel = np.prod([sum(x) for x in other_chunks])
if math.isnan(other_numel):
warnsize = maxsize = math.inf
else:
maxsize = math.ceil(nbytes / (other_numel * itemsize))
warnsize = maxsize * 5
split = config.get("array.slicing.split-large-chunks", None)
# Warn only when the default is not specified.
warned = split is not None
for _, index_list in plan:
if not warned and len(index_list) > warnsize:
msg = (
"Slicing is producing a large chunk. To accept the large\n"
"chunk and silence this warning, set the option\n"
" >>> with dask.config.set(**{'array.slicing.split_large_chunks': False}):\n"
" ... array[indexer]\n\n"
"To avoid creating the large chunks, set the option\n"
" >>> with dask.config.set(**{'array.slicing.split_large_chunks': True}):\n"
" ... array[indexer]"
)
warnings.warn(msg, PerformanceWarning, stacklevel=6)
warned = True
where_index = []
index_lists = []
for where_idx, index_list in plan:
index_length = len(index_list)
if split and index_length > maxsize:
index_sublist = np.array_split(
index_list, math.ceil(index_length / maxsize)
)
index_lists.extend(index_sublist)
where_index.extend([where_idx] * len(index_sublist))
else:
index_lists.append(np.array(index_list))
where_index.append(where_idx)
dims = [range(len(bd)) for bd in chunks]
indims = list(dims)
indims[axis] = list(range(len(where_index)))
keys = list(product([outname], *indims))
outdims = list(dims)
outdims[axis] = where_index
slices = [[colon] * len(bd) for bd in chunks]
slices[axis] = index_lists
slices = list(product(*slices))
inkeys = list(product([inname], *outdims))
values = [(getitem, inkey, slc) for inkey, slc in zip(inkeys, slices)]
chunks2 = list(chunks)
chunks2[axis] = tuple(map(len, index_lists))
dsk = dict(zip(keys, values))
return tuple(chunks2), dsk
def posify_index(shape, ind):
"""Flip negative indices around to positive ones
>>> posify_index(10, 3)
3
>>> posify_index(10, -3)
7
>>> posify_index(10, [3, -3])
array([3, 7])
>>> posify_index((10, 20), (3, -3))
(3, 17)
>>> posify_index((10, 20), (3, [3, 4, -3])) # doctest: +NORMALIZE_WHITESPACE
(3, array([ 3, 4, 17]))
"""
if isinstance(ind, tuple):
return tuple(map(posify_index, shape, ind))
if isinstance(ind, Integral):
if ind < 0 and not math.isnan(shape):
return ind + shape
else:
return ind
if isinstance(ind, (np.ndarray, list)) and not math.isnan(shape):
ind = np.asanyarray(ind)
return np.where(ind < 0, ind + shape, ind)
return ind
@memoize
def _expander(where):
if not where:
def expand(seq, val):
return seq
return expand
else:
decl = """def expand(seq, val):
return ({left}) + tuple({right})
"""
left = []
j = 0
for i in range(max(where) + 1):
if i in where:
left.append("val, ")
else:
left.append("seq[%d], " % j)
j += 1
right = "seq[%d:]" % j
left = "".join(left)
decl = decl.format(**locals())
ns = {}
exec(compile(decl, "<dynamic>", "exec"), ns, ns)
return ns["expand"]
def expander(where):
"""Create a function to insert value at many locations in sequence.
>>> expander([0, 2])(['a', 'b', 'c'], 'z')
('z', 'a', 'z', 'b', 'c')
"""
return _expander(tuple(where))
def new_blockdim(dim_shape, lengths, index):
"""
>>> new_blockdim(100, [20, 10, 20, 10, 40], slice(0, 90, 2))
[10, 5, 10, 5, 15]
>>> new_blockdim(100, [20, 10, 20, 10, 40], [5, 1, 30, 22])
[4]
>>> new_blockdim(100, [20, 10, 20, 10, 40], slice(90, 10, -2))
[16, 5, 10, 5, 4]
"""
if index == slice(None, None, None):
return lengths
if isinstance(index, list):
return [len(index)]
assert not isinstance(index, Integral)
pairs = sorted(_slice_1d(dim_shape, lengths, index).items(), key=itemgetter(0))
slices = [
slice(0, lengths[i], 1) if slc == slice(None, None, None) else slc
for i, slc in pairs
]
if isinstance(index, slice) and index.step and index.step < 0:
slices = slices[::-1]
return [int(math.ceil((1.0 * slc.stop - slc.start) / slc.step)) for slc in slices]
def replace_ellipsis(n, index):
"""Replace ... with slices, :, : ,:
>>> replace_ellipsis(4, (3, Ellipsis, 2))
(3, slice(None, None, None), slice(None, None, None), 2)
>>> replace_ellipsis(2, (Ellipsis, None))
(slice(None, None, None), slice(None, None, None), None)
"""
# Careful about using in or index because index may contain arrays
isellipsis = [i for i, ind in enumerate(index) if ind is Ellipsis]
if not isellipsis:
return index
else:
loc = isellipsis[0]
extra_dimensions = n - (len(index) - sum(i is None for i in index) - 1)
return (
index[:loc] + (slice(None, None, None),) * extra_dimensions + index[loc + 1 :]
)
def normalize_slice(idx, dim):
"""Normalize slices to canonical form
Parameters
----------
idx: slice or other index
dim: dimension length
Examples
--------
>>> normalize_slice(slice(0, 10, 1), 10)
slice(None, None, None)
"""
if isinstance(idx, slice):
if math.isnan(dim):
return idx
start, stop, step = idx.indices(dim)
if step > 0:
if start == 0:
start = None
if stop >= dim:
stop = None
if step == 1:
step = None
if stop is not None and start is not None and stop < start:
stop = start
elif step < 0:
if start >= dim - 1:
start = None
if stop < 0:
stop = None
return slice(start, stop, step)
return idx
def normalize_index(idx, shape):
"""Normalize slicing indexes
1. Replaces ellipses with many full slices
2. Adds full slices to end of index
3. Checks bounding conditions
4. Replace multidimensional numpy arrays with dask arrays
5. Replaces numpy arrays with lists
6. Posify's integers and lists
7. Normalizes slices to canonical form
Examples
--------
>>> normalize_index(1, (10,))
(1,)
>>> normalize_index(-1, (10,))
(9,)
>>> normalize_index([-1], (10,))
(array([9]),)
>>> normalize_index(slice(-3, 10, 1), (10,))
(slice(7, None, None),)
>>> normalize_index((Ellipsis, None), (10,))
(slice(None, None, None), None)
>>> normalize_index(np.array([[True, False], [False, True], [True, True]]), (3, 2))
(dask.array<array, shape=(3, 2), dtype=bool, chunksize=(3, 2), chunktype=numpy.ndarray>,)
"""
from .core import from_array
if not isinstance(idx, tuple):
idx = (idx,)
# if a > 1D numpy.array is provided, cast it to a dask array
if len(idx) > 0 and len(shape) > 1:
i = idx[0]
if isinstance(i, np.ndarray) and i.shape == shape:
idx = (from_array(i), *idx[1:])
idx = replace_ellipsis(len(shape), idx)
n_sliced_dims = 0
for i in idx:
if hasattr(i, "ndim") and i.ndim >= 1:
n_sliced_dims += i.ndim
elif i is None:
continue
else:
n_sliced_dims += 1
idx = idx + (slice(None),) * (len(shape) - n_sliced_dims)
if len([i for i in idx if i is not None]) > len(shape):
raise IndexError("Too many indices for array")
none_shape = []
i = 0
for ind in idx:
if ind is not None:
none_shape.append(shape[i])
i += 1
else:
none_shape.append(None)
for i, d in zip(idx, none_shape):
if d is not None:
check_index(i, d)
idx = tuple(map(sanitize_index, idx))
idx = tuple(map(normalize_slice, idx, none_shape))
idx = posify_index(none_shape, idx)
return idx
def check_index(ind, dimension):
"""Check validity of index for a given dimension
Examples
--------
>>> check_index(3, 5)
>>> check_index(5, 5)
Traceback (most recent call last):
...
IndexError: Index is not smaller than dimension 5 >= 5
>>> check_index(6, 5)
Traceback (most recent call last):
...
IndexError: Index is not smaller than dimension 6 >= 5
>>> check_index(-1, 5)
>>> check_index(-6, 5)
Traceback (most recent call last):
...
IndexError: Negative index is not greater than negative dimension -6 <= -5
>>> check_index([1, 2], 5)
>>> check_index([6, 3], 5)
Traceback (most recent call last):
...
IndexError: Index out of bounds 5
>>> check_index(slice(0, 3), 5)
>>> check_index([True], 1)
>>> check_index([True, True], 3)
Traceback (most recent call last):
...
IndexError: Boolean array length 2 doesn't equal dimension 3
>>> check_index([True, True, True], 1)
Traceback (most recent call last):
...
IndexError: Boolean array length 3 doesn't equal dimension 1
"""
# unknown dimension, assumed to be in bounds
if np.isnan(dimension):
return
elif isinstance(ind, (list, np.ndarray)):
x = np.asanyarray(ind)
if x.dtype == bool:
if x.size != dimension:
raise IndexError(
"Boolean array length %s doesn't equal dimension %s"
% (x.size, dimension)
)
elif (x >= dimension).any() or (x < -dimension).any():
raise IndexError("Index out of bounds %s" % dimension)
elif isinstance(ind, slice):
return
elif is_dask_collection(ind):
return
elif ind is None:
return
elif ind >= dimension:
raise IndexError(
"Index is not smaller than dimension %d >= %d" % (ind, dimension)
)
elif ind < -dimension:
msg = "Negative index is not greater than negative dimension %d <= -%d"
raise IndexError(msg % (ind, dimension))
def slice_with_int_dask_array(x, index):
"""Slice x with at most one 1D dask arrays of ints.
This is a helper function of :meth:`Array.__getitem__`.
Parameters
----------
x: Array
index: tuple with as many elements as x.ndim, among which there are
one or more Array's with dtype=int
Returns
-------
tuple of (sliced x, new index)
where the new index is the same as the input, but with slice(None)
replaced to the original slicer where a 1D filter has been applied and
one less element where a zero-dimensional filter has been applied.
"""
from .core import Array
assert len(index) == x.ndim
fancy_indexes = [
isinstance(idx, (tuple, list))
or (isinstance(idx, (np.ndarray, Array)) and idx.ndim > 0)
for idx in index
]
if sum(fancy_indexes) > 1:
raise NotImplementedError("Don't yet support nd fancy indexing")
out_index = []
dropped_axis_cnt = 0
for in_axis, idx in enumerate(index):
out_axis = in_axis - dropped_axis_cnt
if isinstance(idx, Array) and idx.dtype.kind in "iu":
if idx.ndim == 0:
idx = idx[np.newaxis]
x = slice_with_int_dask_array_on_axis(x, idx, out_axis)
x = x[tuple(0 if i == out_axis else slice(None) for i in range(x.ndim))]
dropped_axis_cnt += 1
elif idx.ndim == 1:
x = slice_with_int_dask_array_on_axis(x, idx, out_axis)
out_index.append(slice(None))
else:
raise NotImplementedError(
"Slicing with dask.array of ints only permitted when "
"the indexer has zero or one dimensions"
)
else:
out_index.append(idx)
return x, tuple(out_index)
def slice_with_int_dask_array_on_axis(x, idx, axis):
"""Slice a ND dask array with a 1D dask arrays of ints along the given
axis.
This is a helper function of :func:`slice_with_int_dask_array`.
"""
from .core import Array, blockwise, from_array
from . import chunk
assert 0 <= axis < x.ndim
if np.isnan(x.chunks[axis]).any():
raise NotImplementedError(
"Slicing an array with unknown chunks with "
"a dask.array of ints is not supported"
)
# Calculate the offset at which each chunk starts along axis
# e.g. chunks=(..., (5, 3, 4), ...) -> offset=[0, 5, 8]
offset = np.roll(np.cumsum(x.chunks[axis]), 1)
offset[0] = 0
offset = from_array(offset, chunks=1)
# Tamper with the declared chunks of offset to make blockwise align it with
# x[axis]
offset = Array(offset.dask, offset.name, (x.chunks[axis],), offset.dtype)
# Define axis labels for blockwise
x_axes = tuple(range(x.ndim))
idx_axes = (x.ndim,) # arbitrary index not already in x_axes
offset_axes = (axis,)
p_axes = x_axes[: axis + 1] + idx_axes + x_axes[axis + 1 :]
y_axes = x_axes[:axis] + idx_axes + x_axes[axis + 1 :]
# Calculate the cartesian product of every chunk of x vs every chunk of idx
p = blockwise(
chunk.slice_with_int_dask_array,
p_axes,
x,
x_axes,
idx,
idx_axes,
offset,
offset_axes,
x_size=x.shape[axis],
axis=axis,
dtype=x.dtype,
)
# Aggregate on the chunks of x along axis
y = blockwise(
chunk.slice_with_int_dask_array_aggregate,
y_axes,
idx,
idx_axes,
p,
p_axes,
concatenate=True,
x_chunks=x.chunks[axis],
axis=axis,
dtype=x.dtype,
)
return y
def slice_with_bool_dask_array(x, index):
"""Slice x with one or more dask arrays of bools
This is a helper function of `Array.__getitem__`.
Parameters
----------
x: Array
index: tuple with as many elements as x.ndim, among which there are
one or more Array's with dtype=bool
Returns
-------
tuple of (sliced x, new index)
where the new index is the same as the input, but with slice(None)
replaced to the original slicer when a filter has been applied.
Note: The sliced x will have nan chunks on the sliced axes.
"""
from .core import Array, blockwise, elemwise
out_index = [
slice(None) if isinstance(ind, Array) and ind.dtype == bool else ind
for ind in index
]
if len(index) == 1 and index[0].ndim == x.ndim:
if not np.isnan(x.shape).any() and not np.isnan(index[0].shape).any():
x = x.ravel()
index = tuple(i.ravel() for i in index)
elif x.ndim > 1:
warnings.warn(
"When slicing a Dask array of unknown chunks with a boolean mask "
"Dask array, the output array may have a different ordering "
"compared to the equivalent NumPy operation. This will raise an "
"error in a future release of Dask.",
stacklevel=3,
)
y = elemwise(getitem, x, *index, dtype=x.dtype)
name = "getitem-" + tokenize(x, index)
dsk = {(name, i): k for i, k in enumerate(core.flatten(y.__dask_keys__()))}
chunks = ((np.nan,) * y.npartitions,)
graph = HighLevelGraph.from_collections(name, dsk, dependencies=[y])
return Array(graph, name, chunks, x.dtype), out_index
if any(
isinstance(ind, Array) and ind.dtype == bool and ind.ndim != 1 for ind in index
):
raise NotImplementedError(
"Slicing with dask.array of bools only permitted when "
"the indexer has only one dimension or when "
"it has the same dimension as the sliced "
"array"
)
indexes = [
ind if isinstance(ind, Array) and ind.dtype == bool else slice(None)
for ind in index
]
arginds = []
i = 0
for ind in indexes:
if isinstance(ind, Array) and ind.dtype == bool:
new = (ind, tuple(range(i, i + ind.ndim)))
i += x.ndim
else:
new = (slice(None), None)
i += 1
arginds.append(new)
arginds = list(concat(arginds))
out = blockwise(
getitem_variadic,
tuple(range(x.ndim)),
x,
tuple(range(x.ndim)),
*arginds,
dtype=x.dtype
)
chunks = []
for ind, chunk in zip(index, out.chunks):
if isinstance(ind, Array) and ind.dtype == bool:
chunks.append((np.nan,) * len(chunk))
else:
chunks.append(chunk)
out._chunks = tuple(chunks)
return out, tuple(out_index)
def getitem_variadic(x, *index):
return x[index]
def make_block_sorted_slices(index, chunks):
"""Generate blockwise-sorted index pairs for shuffling an array.
Parameters
----------
index : ndarray
An array of index positions.
chunks : tuple
Chunks from the original dask array
Returns
-------
index2 : ndarray
Same values as `index`, but each block has been sorted
index3 : ndarray
The location of the values of `index` in `index2`
Examples
--------
>>> index = np.array([6, 0, 4, 2, 7, 1, 5, 3])
>>> chunks = ((4, 4),)
>>> a, b = make_block_sorted_slices(index, chunks)
Notice that the first set of 4 items are sorted, and the
second set of 4 items are sorted.
>>> a
array([0, 2, 4, 6, 1, 3, 5, 7])
>>> b
array([3, 0, 2, 1, 7, 4, 6, 5])
"""
from .core import slices_from_chunks
slices = slices_from_chunks(chunks)
if len(slices[0]) > 1:
slices = [slice_[0] for slice_ in slices]
offsets = np.roll(np.cumsum(chunks[0]), 1)
offsets[0] = 0
index2 = np.empty_like(index)
index3 = np.empty_like(index)
for slice_, offset in zip(slices, offsets):
a = index[slice_]
b = np.sort(a)
c = offset + np.argsort(b.take(np.argsort(a)))
index2[slice_] = b
index3[slice_] = c
return index2, index3
def shuffle_slice(x, index):
"""A relatively efficient way to shuffle `x` according to `index`.
Parameters
----------
x : Array
index : ndarray
This should be an ndarray the same length as `x` containing
each index position in ``range(0, len(x))``.
Returns
-------
Array
"""
from .core import PerformanceWarning
chunks1 = chunks2 = x.chunks
if x.ndim > 1:
chunks1 = (chunks1[0],)
index2, index3 = make_block_sorted_slices(index, chunks1)
with warnings.catch_warnings():
warnings.simplefilter("ignore", PerformanceWarning)
return x[index2].rechunk(chunks2)[index3]
class _HashIdWrapper(object):
"""Hash and compare a wrapped object by identity instead of value"""
def __init__(self, wrapped):
self.wrapped = wrapped
def __eq__(self, other):
if not isinstance(other, _HashIdWrapper):
return NotImplemented
return self.wrapped is other.wrapped
def __ne__(self, other):
if not isinstance(other, _HashIdWrapper):
return NotImplemented
return self.wrapped is not other.wrapped
def __hash__(self):
return id(self.wrapped)
@functools.lru_cache()
def _cumsum(seq, initial_zero):
if isinstance(seq, _HashIdWrapper):
seq = seq.wrapped
if initial_zero:
return tuple(accumulate(add, seq, 0))
else:
return tuple(accumulate(add, seq))
def cached_cumsum(seq, initial_zero=False):
"""Compute :meth:`toolz.accumulate` with caching.
Caching is by the identify of `seq` rather than the value. It is thus
important that `seq` is a tuple of immutable objects, and this function
is intended for use where `seq` is a value that will persist (generally
block sizes).
Parameters
----------
seq : tuple
Values to cumulatively sum.
initial_zero : bool, optional
If true, the return value is prefixed with a zero.
Returns
-------
tuple
"""
if isinstance(seq, tuple):
# Look up by identity first, to avoid a linear-time __hash__
# if we've seen this tuple object before.
result = _cumsum(_HashIdWrapper(seq), initial_zero)
else:
# Construct a temporary tuple, and look up by value.
result = _cumsum(tuple(seq), initial_zero)
return result
| bsd-3-clause | -1,367,151,266,620,555,500 | 30.147216 | 97 | 0.568938 | false |
eReuse/DeviceHub | ereuse_devicehub/resources/event/device/allocate/settings.py | 1 | 1485 | from ereuse_devicehub.resources.account.settings import unregistered_user, unregistered_user_doc
from ereuse_devicehub.resources.event.device.settings import EventWithDevices, \
EventSubSettingsMultipleDevices, materialized_components
class Allocate(EventWithDevices):
to = {
'type': ['objectid', 'dict', 'string'], # We should not add string but it does not work otherwise...
'data_relation': {
'resource': 'accounts',
'field': '_id',
'embeddable': True,
},
'schema': unregistered_user,
'doc': 'The user the devices are allocated to. ' + unregistered_user_doc,
'get_from_data_relation_or_create': 'email',
'required': True,
'sink': 2
}
toOrganization = {
'type': 'string',
'readonly': True,
'materialized': True,
'doc': 'Materialization of the organization that, by the time of the allocation, the user worked in.'
}
components = materialized_components
class AllocateSettings(EventSubSettingsMultipleDevices):
_schema = Allocate
fa = 'fa-hand-o-right'
sink = -5
extra_response_fields = EventSubSettingsMultipleDevices.extra_response_fields + ['to']
short_description = 'Assign the devices to someone, so that person \'owns\' the device'
# Receiver OR ReceiverEmail. We need to hook this in a required field so it is always executed
# And @type is an always required field so we can happily hook on it
| agpl-3.0 | -407,023,383,197,654,600 | 39.135135 | 109 | 0.66532 | false |
kubernetes-client/python | kubernetes/client/models/v1_priority_class.py | 1 | 11062 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.18
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1PriorityClass(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'description': 'str',
'global_default': 'bool',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'preemption_policy': 'str',
'value': 'int'
}
attribute_map = {
'api_version': 'apiVersion',
'description': 'description',
'global_default': 'globalDefault',
'kind': 'kind',
'metadata': 'metadata',
'preemption_policy': 'preemptionPolicy',
'value': 'value'
}
def __init__(self, api_version=None, description=None, global_default=None, kind=None, metadata=None, preemption_policy=None, value=None, local_vars_configuration=None): # noqa: E501
"""V1PriorityClass - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._description = None
self._global_default = None
self._kind = None
self._metadata = None
self._preemption_policy = None
self._value = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if description is not None:
self.description = description
if global_default is not None:
self.global_default = global_default
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if preemption_policy is not None:
self.preemption_policy = preemption_policy
self.value = value
@property
def api_version(self):
"""Gets the api_version of this V1PriorityClass. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1PriorityClass. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1PriorityClass.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1PriorityClass. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def description(self):
"""Gets the description of this V1PriorityClass. # noqa: E501
description is an arbitrary string that usually provides guidelines on when this priority class should be used. # noqa: E501
:return: The description of this V1PriorityClass. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this V1PriorityClass.
description is an arbitrary string that usually provides guidelines on when this priority class should be used. # noqa: E501
:param description: The description of this V1PriorityClass. # noqa: E501
:type: str
"""
self._description = description
@property
def global_default(self):
"""Gets the global_default of this V1PriorityClass. # noqa: E501
globalDefault specifies whether this PriorityClass should be considered as the default priority for pods that do not have any priority class. Only one PriorityClass can be marked as `globalDefault`. However, if more than one PriorityClasses exists with their `globalDefault` field set to true, the smallest value of such global default PriorityClasses will be used as the default priority. # noqa: E501
:return: The global_default of this V1PriorityClass. # noqa: E501
:rtype: bool
"""
return self._global_default
@global_default.setter
def global_default(self, global_default):
"""Sets the global_default of this V1PriorityClass.
globalDefault specifies whether this PriorityClass should be considered as the default priority for pods that do not have any priority class. Only one PriorityClass can be marked as `globalDefault`. However, if more than one PriorityClasses exists with their `globalDefault` field set to true, the smallest value of such global default PriorityClasses will be used as the default priority. # noqa: E501
:param global_default: The global_default of this V1PriorityClass. # noqa: E501
:type: bool
"""
self._global_default = global_default
@property
def kind(self):
"""Gets the kind of this V1PriorityClass. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1PriorityClass. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1PriorityClass.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1PriorityClass. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1PriorityClass. # noqa: E501
:return: The metadata of this V1PriorityClass. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1PriorityClass.
:param metadata: The metadata of this V1PriorityClass. # noqa: E501
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def preemption_policy(self):
"""Gets the preemption_policy of this V1PriorityClass. # noqa: E501
PreemptionPolicy is the Policy for preempting pods with lower priority. One of Never, PreemptLowerPriority. Defaults to PreemptLowerPriority if unset. This field is alpha-level and is only honored by servers that enable the NonPreemptingPriority feature. # noqa: E501
:return: The preemption_policy of this V1PriorityClass. # noqa: E501
:rtype: str
"""
return self._preemption_policy
@preemption_policy.setter
def preemption_policy(self, preemption_policy):
"""Sets the preemption_policy of this V1PriorityClass.
PreemptionPolicy is the Policy for preempting pods with lower priority. One of Never, PreemptLowerPriority. Defaults to PreemptLowerPriority if unset. This field is alpha-level and is only honored by servers that enable the NonPreemptingPriority feature. # noqa: E501
:param preemption_policy: The preemption_policy of this V1PriorityClass. # noqa: E501
:type: str
"""
self._preemption_policy = preemption_policy
@property
def value(self):
"""Gets the value of this V1PriorityClass. # noqa: E501
The value of this priority class. This is the actual priority that pods receive when they have the name of this class in their pod spec. # noqa: E501
:return: The value of this V1PriorityClass. # noqa: E501
:rtype: int
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this V1PriorityClass.
The value of this priority class. This is the actual priority that pods receive when they have the name of this class in their pod spec. # noqa: E501
:param value: The value of this V1PriorityClass. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and value is None: # noqa: E501
raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501
self._value = value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1PriorityClass):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1PriorityClass):
return True
return self.to_dict() != other.to_dict()
| apache-2.0 | 1,955,843,117,827,839,500 | 37.276817 | 411 | 0.644549 | false |
yittg/Snipping | snipping/prompt_toolkit/layout.py | 1 | 5507 | """snipping.prompt_toolkit.layout
wrappers for layout
"""
from prompt_toolkit.key_binding import vi_state
from prompt_toolkit.layout import containers
from prompt_toolkit.layout import controls
from prompt_toolkit.layout import dimension
from prompt_toolkit.layout import highlighters
from prompt_toolkit.layout import margins
from prompt_toolkit.layout import processors
from prompt_toolkit.layout import screen
from prompt_toolkit.layout import toolbars
from snipping.prompt_toolkit import style
from snipping.prompt_toolkit import buffers
class NumberredMargin(margins.NumberredMargin):
""" A simple and customized `create_margin` of origin `NumberredMargin`
"""
def create_margin(self, cli, wr_info, width, height):
visible_line_to_input_line = wr_info.visible_line_to_input_line
token = style.Token.LineNumber
token_error = style.ErrorLineNo
result = []
app = cli.application
snippet = buffers.get_content(app)
cp = app.engine.compile(snippet)
for y in range(wr_info.window_height):
line_number = visible_line_to_input_line.get(y)
if line_number is not None:
if cp is not None and line_number + 1 == cp:
result.append((token_error,
('%i ' % (line_number + 1)).rjust(width)))
else:
result.append((token,
('%i ' % (line_number + 1)).rjust(width)))
result.append((style.Token, '\n'))
return result
def dim(min_=None, max_=None, exact=None):
if exact is not None:
return dimension.LayoutDimension.exact(exact)
return dimension.LayoutDimension(min=min_, max=max_)
def horizontal_line(min_width=None, max_width=None, char=' '):
height = dim(exact=1)
width = dim(min_=min_width, max_=max_width)
content = controls.FillControl(char, token=style.Line)
return containers.Window(width=width, height=height, content=content)
def vertical_line(min_height=None, max_height=None, char=' '):
width = dim(exact=1)
height = dim(min_=min_height, max_=max_height)
content = controls.FillControl(char, token=style.Line)
return containers.Window(width=width, height=height, content=content)
def text_window_bar(name=None, key_binding_manager=None):
def get_tokens(cli):
text_style = style.Bar.Text
display_text, read_only = buffers.buffer_display(cli.application, name)
if not read_only and cli.current_buffer_name == name:
vi_mode = key_binding_manager.get_vi_state(cli).input_mode
if vi_mode == vi_state.InputMode.INSERT:
text_style = style.Bar.Hl_Text
tokens = [(text_style, display_text),
(text_style, u' \u2022 ')]
if vi_mode == vi_state.InputMode.INSERT:
tokens.append((text_style, 'INSERT'))
elif vi_mode == vi_state.InputMode.NAVIGATION:
tokens.append((text_style, 'NORMAL'))
else:
tokens.append((text_style, '[ ]'))
return tokens
else:
return [(text_style, display_text)]
return toolbars.TokenListToolbar(
get_tokens, default_char=screen.Char(' ', style.Bar.Text))
def normal_text_window(name=None, lang=None, lineno=False,
leading_space=False, trailing_space=False,
width=None, height=None):
if name is None:
name = buffers.DEFAULT_BUFFER
bf_attrs = {'buffer_name': name,
'lexer': style.get_lexer_by_lang(lang),
'highlighters': [highlighters.SelectionHighlighter()]}
input_processors = []
if leading_space:
input_processors.append(processors.ShowLeadingWhiteSpaceProcessor())
if trailing_space:
input_processors.append(processors.ShowTrailingWhiteSpaceProcessor())
if input_processors:
bf_attrs['input_processors'] = input_processors
win_attrs = {}
left_margins = []
if lineno:
left_margins.append(NumberredMargin(name))
if left_margins:
win_attrs['left_margins'] = left_margins
if height is not None:
win_attrs['height'] = height
if width is not None:
win_attrs['width'] = width
content = controls.BufferControl(**bf_attrs)
return containers.Window(content=content, **win_attrs)
def horizontal_tokenlist_window(get_tokens, align='left'):
tlc_attrs = {}
if align == 'center':
tlc_attrs['align_center'] = True
if align == 'right':
tlc_attrs['align_right'] = True
height = dim(exact=1)
content = controls.TokenListControl(get_tokens, **tlc_attrs)
return containers.Window(height=height, content=content)
def window_rows(windows):
return containers.HSplit(windows)
def window_columns(windows):
return containers.VSplit(windows)
def text_window_with_bar(name=None, lang=None, lineno=False,
leading_space=False, trailing_space=False,
width=None, height=None, key_binding_manager=None):
if name is None:
name = buffers.DEFAULT_BUFFER
return window_rows([
normal_text_window(
name=name, lang=lang, lineno=lineno,
leading_space=leading_space, trailing_space=trailing_space,
width=width, height=height),
text_window_bar(name=name, key_binding_manager=key_binding_manager),
])
| mit | -1,226,969,072,952,198,400 | 33.85443 | 79 | 0.632831 | false |
thecarebot/carebot | tests/test_spreadsheet.py | 1 | 3336 | #!/usr/bin/env python
import datetime
from mock import patch
try:
import unittest2 as unittest
except ImportError:
import unittest
import app_config
app_config.DATABASE_NAME = 'carebot_test.db'
app_config.date_cutoff = datetime.date(1997, 1, 1)
from scrapers.npr_spreadsheet import SpreadsheetScraper
from util.config import Config
from util.models import Story
from tests.test_util.db import clear_stories
class TestSpreadsheet(unittest.TestCase):
source = {
'doc_key': 'foo-bar-baz'
}
def test_scrape_spreadsheet(self):
"""
Make sure we grab the right data from spreadsheets
"""
scraper = SpreadsheetScraper(self.source)
stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')
self.assertEqual(len(stories), 4)
self.assertEqual(stories[0]['date'], '42467') # Crappy excel date format
self.assertEqual(stories[0]['graphic_slug'], 'voting-wait-20160404')
self.assertEqual(stories[0]['graphic_type'], 'Graphic')
self.assertEqual(stories[0]['story_headline'], 'What Keeps Election Officials Up At Night? Fear Of Long Lines At The Polls')
self.assertEqual(stories[0]['story_url'], 'http://www.npr.org/2016/04/07/473293026/what-keeps-election-officials-up-at-night-fear-of-long-lines-at-the-polls')
self.assertEqual(stories[0]['contact'], 'Alyson Hurt')
self.assertEqual(stories[0]['date'], '42467')
self.assertEqual(stories[3]['graphic_slug'], 'seed-market-20160405')
self.assertEqual(stories[3]['graphic_type'], 'Graphic')
self.assertEqual(stories[3]['story_headline'], 'Big Seed: Consolidation Is Shrinking The Industry Even Further')
self.assertEqual(stories[3]['story_url'], 'http://www.npr.org/sections/thesalt/2016/04/06/472960018/big-seed-consolidation-is-shrinking-the-industry-even-further')
self.assertEqual(stories[3]['contact'], 'Alyson Hurt')
@patch('util.s3.Uploader.upload', return_value='http://image-url-here')
def test_write_spreadsheet(self, mock_upload):
"""
Make sure we save the stories to the database when scraping from a
spreadsheet
"""
clear_stories()
scraper = SpreadsheetScraper(self.source)
stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')
scraper.write(stories)
results = Story.select()
self.assertEqual(len(results), 4)
for idx, story in enumerate(stories):
self.assertEqual(results[idx].name, story['story_headline'])
self.assertEqual(results[idx].url, story['story_url'])
@patch('util.s3.Uploader.upload')
def test_write_spreadsheet_duplicates(self, mock_upload):
"""
Make sure stories don't get inserted more than once
"""
mock_upload.return_value = 'http://image-url-here'
clear_stories()
scraper = SpreadsheetScraper(self.source)
stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')
# Insert the stories
scraper.write(stories)
results = Story.select()
self.assertEqual(len(results), 4)
# Now insert them again and make sure we don't have duplicates
scraper.write(stories)
results = Story.select()
self.assertEqual(len(results), 4)
| mit | 1,149,627,926,656,244,500 | 37.344828 | 171 | 0.667866 | false |
infrae/moai | moai/metadata.py | 1 | 9916 |
from lxml.builder import ElementMaker
XSI_NS = 'http://www.w3.org/2001/XMLSchema-instance'
class OAIDC(object):
"""The standard OAI Dublin Core metadata format.
Every OAI feed should at least provide this format.
It is registered under the name 'oai_dc'
"""
def __init__(self, prefix, config, db):
self.prefix = prefix
self.config = config
self.db = db
self.ns = {'oai_dc': 'http://www.openarchives.org/OAI/2.0/oai_dc/',
'dc':'http://purl.org/dc/elements/1.1/'}
self.schemas = {'oai_dc': 'http://www.openarchives.org/OAI/2.0/oai_dc.xsd'}
def get_namespace(self):
return self.ns[self.prefix]
def get_schema_location(self):
return self.schemas[self.prefix]
def __call__(self, element, metadata):
data = metadata.record
OAI_DC = ElementMaker(namespace=self.ns['oai_dc'],
nsmap =self.ns)
DC = ElementMaker(namespace=self.ns['dc'])
oai_dc = OAI_DC.dc()
oai_dc.attrib['{%s}schemaLocation' % XSI_NS] = '%s %s' % (
self.ns['oai_dc'],
self.schemas['oai_dc'])
for field in ['title', 'creator', 'subject', 'description',
'publisher', 'contributor', 'type', 'format',
'identifier', 'source', 'language', 'date',
'relation', 'coverage', 'rights']:
el = getattr(DC, field)
for value in data['metadata'].get(field, []):
if field == 'identifier' and data['metadata'].get('url'):
value = data['metadata']['url'][0]
oai_dc.append(el(value))
element.append(oai_dc)
class MODS(object):
"""This is the minimods formats as defined by DARE.
It is registered as prefix 'mods'.'
"""
def __init__(self, prefix, config, db):
self.prefix = prefix
self.config = config
self.db = db
self.ns = {'mods': 'http://www.loc.gov/mods/v3',
'xml':'http://www.w3.org/XML/1998/namespace',
'dai': 'info:eu-repo/dai'}
self.schemas = {
'mods': 'http://www.loc.gov/standards/mods/v3/mods-3-2.xsd'}
def get_namespace(self):
return self.ns[self.prefix]
def get_schema_location(self):
return self.schemas[self.prefix]
def __call__(self, element, metadata):
data = metadata.record
MODS = ElementMaker(namespace=self.ns['mods'], nsmap=self.ns)
DAI = ElementMaker(namespace=self.ns['dai'], nsmap=self.ns)
mods = MODS.mods(version="3.2")
if data['metadata'].get('identifier'):
mods.append(MODS.identifier(data['metadata']['identifier'][0],
type="uri"))
if data['metadata'].get('url'):
mods.append(MODS.location(MODS.url(data['metadata']['url'][0])))
if data['metadata'].get('title'):
titleInfo = MODS.titleInfo(
MODS.title(data['metadata']['title'][0])
)
titleInfo.attrib['{%s}lang' % self.ns['xml']] = data['metadata'].get(
'language', ['en'])[0]
mods.append(titleInfo)
if data['metadata'].get('description'):
mods.append(MODS.abstract(data['metadata']['description'][0]))
for ctype in ['author', 'editor', 'advisor']:
contributor_data = []
for id in data['metadata'].get('%s_rel' % ctype, []):
contributor = self.db.get_metadata(id)
contributor['id'] = id
contributor_data.append(contributor)
if data['metadata'].get('%s_data' % ctype):
contributor_data = [s for s in data['metadata'][
'%s_data' % ctype]]
if not contributor_data:
contributor_data = [{'name':[a]} for a in data[
'metadata'].get(ctype, [])]
dai_list = []
for contributor in contributor_data:
unique_id = data['record']['id'] + '_' + contributor.get(
'id', contributor['name'][0])
if unique_id[0].isdigit():
unique_id = '_'+unique_id
unique_id = unique_id.replace(':', '')
name = MODS.name(
MODS.displayForm(contributor['name'][0]),
type='personal',
ID=unique_id
)
surname = contributor.get('surname')
if surname:
name.append(MODS.namePart(surname[0], type="family"))
firstname = contributor.get('firstname')
if firstname:
name.append(MODS.namePart(firstname[0], type="given"))
role = contributor.get('role')
if role:
role = role[0]
else:
roles = {'author': 'aut', 'editor': 'edt', 'advisor':'ths'}
role = roles[ctype]
name.append(
MODS.role(
MODS.roleTerm(role,
type='code',
authority='marcrelator')
))
mods.append(name)
dai = contributor.get('dai')
if dai:
dai_list.append((unique_id, dai))
if dai_list:
daiList = DAI.daiList()
for id, dai in dai_list:
daiList.append(DAI.identifier(
dai[0],
IDref=id,
authority='info:eu-repo/dai/nl'))
mods.append(MODS.extension(daiList))
dgg = data['metadata'].get('degree_grantor')
if dgg:
mods.append(MODS.name(
MODS.namePart(dgg[0]),
MODS.role(
MODS.roleTerm('dgg',
authority="marcrelator",
type="code")
),
type="corporate"))
if data['metadata'].get('language'):
mods.append(MODS.language(
MODS.languageTerm(data['metadata']['language'][0],
type="code",
authority="rfc3066")))
for host in ['journal', 'series']:
title = data['metadata'].get('%s_title' % host)
part_type = {'journal': 'host'}.get(host, host)
relitem = MODS.relatedItem(type=part_type)
if title:
relitem.append(MODS.titleInfo(MODS.title(title[0])))
else:
continue
issn = data['metadata'].get('%s_issn' % host)
if issn:
relitem.append(
MODS.identifier('urn:issn:%s' % issn[0],
type="uri"))
volume = data['metadata'].get('%s_volume' % host)
issue = data['metadata'].get('%s_issue' % host)
start_page = data['metadata'].get('%s_start_page' % host)
end_page = data['metadata'].get('%s_end_page' % host)
if volume or issue or end_page or start_page:
part = MODS.part()
if volume:
part.append(MODS.detail(MODS.number(volume[0]),
type="volume"))
if issue:
part.append(MODS.detail(MODS.number(issue[0]),
type="issue"))
if start_page or end_page:
extent = MODS.extent(unit="page")
if start_page:
extent.append(MODS.start(start_page[0]))
if end_page:
extent.append(MODS.end(end_page[0]))
part.append(extent)
relitem.append(part)
if data['metadata'].get('%s_publisher' % host):
relitem.append(
MODS.originInfo(
MODS.publisher(
data['metadata']['%s_publisher' % host][0])))
mods.append(relitem)
origin = MODS.originInfo()
mods.append(origin)
if data['metadata'].get('publisher'):
origin.append(MODS.publisher(data['metadata']['publisher'][0]))
if data['metadata'].get('date'):
origin.append(MODS.dateIssued(data['metadata']['date'][0],
encoding='iso8601'))
mods.append(MODS.typeOfResource('text'))
if data['metadata'].get('dare_type'):
mods.append(MODS.genre(data['metadata']['dare_type'][0]))
classifications = data['metadata'].get('classification', [])
for classification in classifications:
if classification.count('#') == 1:
authority, value = classification.split('#')
mods.append(MODS.classification(value, authority=authority))
else:
mods.append(MODS.classification(classification))
subjects = data['metadata'].get('subject', [])
if subjects:
s_el = MODS.subject()
for subject in subjects:
s_el.append(MODS.topic(subject))
mods.append(s_el)
if data['metadata'].get('rights'):
mods.append(MODS.accessCondition(data['metadata']['rights'][0]))
mods.attrib['{%s}schemaLocation' % XSI_NS] = '%s %s' % (
self.ns['mods'],
self.schemas['mods'])
element.append(mods)
| bsd-3-clause | 2,232,625,122,930,430,000 | 36.992337 | 83 | 0.469242 | false |
deepmind/acme | acme/wrappers/action_repeat.py | 1 | 1582 | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper that implements action repeats."""
from acme import types
from acme.wrappers import base
import dm_env
class ActionRepeatWrapper(base.EnvironmentWrapper):
"""Action repeat wrapper."""
def __init__(self, environment: dm_env.Environment, num_repeats: int = 1):
super().__init__(environment)
self._num_repeats = num_repeats
def step(self, action: types.NestedArray) -> dm_env.TimeStep:
# Initialize accumulated reward and discount.
reward = 0.
discount = 1.
# Step the environment by repeating action.
for _ in range(self._num_repeats):
timestep = self._environment.step(action)
# Accumulate reward and discount.
reward += timestep.reward * discount
discount *= timestep.discount
# Don't go over episode boundaries.
if timestep.last():
break
# Replace the final timestep's reward and discount.
return timestep._replace(reward=reward, discount=discount)
| apache-2.0 | 64,054,636,518,076,696 | 31.958333 | 76 | 0.719343 | false |
treycucco/pxp | pxp/stdlib/operator.py | 1 | 10810 | from decimal import Decimal
from pxp.exception import OperatorError
from pxp.function import FunctionArg, FunctionList, InjectedFunction
from pxp.stdlib.types import number_t, string_t, boolean_t
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Number Operators
def op_number_add(resolver, left, right):
"""Returns the sum of two numbers."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval + rval
def op_number_subtract(resolver, left, right):
"""Returns the difference of two numbers."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval - rval
def op_number_multiply(resolver, left, right):
"""Returns the product of two numbers."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval * rval
def op_number_divide(resolver, left, right):
"""Returns the quotient of two numbers."""
rval = resolver.resolve(right)
if rval == Decimal(0):
raise OperatorError("Divide by 0")
lval = resolver.resolve(left)
return lval / rval
def op_number_modulus(resolver, left, right):
"""Returns the remainder from left / right."""
rval = resolver.resolve(right)
if rval == Decimal(0):
raise OperatorError("Divide by 0")
lval = resolver.resolve(left)
return lval % rval
def op_number_exponentiate(resolver, left, right):
"""Returns the value of left raised to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval ** rval
def op_number_negate(resolver, arg):
"""Returns the negation of arg."""
aval = resolver.resolve(arg)
return -1 * aval
def op_number_null_coalesce(resolver, left, right):
"""Returns the left if left is not null, otherwise right. Right is not resolved until it is
determined that left is null.
"""
lval = resolver.resolve(left, none_ok=True)
if lval is not None:
return lval
else:
rval = resolver.resolve(right, none_ok=True)
return rval
def op_number_cmp_equal(resolver, left, right):
"""Returns True if left is equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval == rval
def op_number_cmp_not_equal(resolver, left, right):
"""Returns True if left is not equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval != rval
def op_number_cmp_greater_than_or_equal(resolver, left, right):
"""Returns True if left is greater than or equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval >= rval
def op_number_cmp_less_than_or_equal(resolver, left, right):
"""Returns True if left is less than or equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval <= rval
def op_number_cmp_greater_than(resolver, left, right):
"""Returns True if left is strictly greater than right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval > rval
def op_number_cmp_less_than(resolver, left, right):
"""Returns True if left is strictly less than right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval < rval
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# String operators
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def op_string_add(resolver, left, right):
"""Returns the concatenation of left and right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval + rval
def op_string_null_coalesce(resolver, left, right):
"""Returns the left if left is not null, otherwise right. Right is not resolved until it is
determined that left is null.
"""
lval = resolver.resolve(left, none_ok=True)
if lval is not None:
return lval
else:
rval = resolver.resolve(right, none_ok=True)
return rval
def op_string_cmp_equal(resolver, left, right):
"""Returns True if left is lexicographically equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval == rval
def op_string_cmp_not_equal(resolver, left, right):
"""Returns True if left is not lexicographically equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval != rval
def op_string_cmp_greater_than_or_equal(resolver, left, right):
"""Returns True if left is lexicographically greater than or equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval >= rval
def op_string_cmp_less_than_or_equal(resolver, left, right):
"""Returns True if left is lexicographically less than or equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval <= rval
def op_string_cmp_greater_than(resolver, left, right):
"""Returns True if left is lexicographically strictly greater than right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval > rval
def op_string_cmp_less_than(resolver, left, right):
"""Returns True if left is lexicographically strictly less than right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval < rval
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Boolean Operators
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def op_boolean_null_coalesce(resolver, left, right):
"""Returns the left if left is not null, otherwise right. Right is not resolved until it is
determined that left is null.
"""
lval = resolver.resolve(left, none_ok=True)
if lval is not None:
return lval
else:
rval = resolver.resolve(right, none_ok=True)
return rval
def op_boolean_cmp_equal(resolver, left, right):
"""Returns True if left is equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval == rval
def op_boolean_cmp_not_equal(resolver, left, right):
"""Returns True if left is not equal to right."""
lval = resolver.resolve(left)
rval = resolver.resolve(right)
return lval != rval
def op_boolean_logical_not(resolver, arg):
"""Returns the negation of arg. If arg is True, False is returned. If arg is False, True is
returned.
"""
aval = resolver.resolve(arg)
return not aval
def op_boolean_logical_and(resolver, left, right):
"""Returns True if both left and right evaluate to True, False otherwise.
If left is not True, the value of right doesn't matter, so right will not be evaluated.
"""
# Short circuit
lval = resolver.resolve(left)
if not lval:
return False
rval = resolver.resolve(right)
if rval:
return True
return False
def op_boolean_logical_or(resolver, left, right):
"""Returns True if left or right evaluate to True, False otherwise.
If left is True, the value of right doesn't matter, so right will not be evaluated.
"""
# Short circuit
lval = resolver.resolve(left)
if lval:
return True
rval = resolver.resolve(right)
if rval:
return True
return False
operator_functions = FunctionList((
InjectedFunction("operator+", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), number_t, op_number_add),
InjectedFunction("operator-", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), number_t, op_number_subtract),
InjectedFunction("operator*", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), number_t, op_number_multiply),
InjectedFunction("operator/", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), number_t, op_number_divide),
InjectedFunction("operator%", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), number_t, op_number_modulus),
InjectedFunction("operator^", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), number_t, op_number_exponentiate),
InjectedFunction("operatorunary-", (FunctionArg(number_t, "arg"), ), number_t, op_number_negate),
InjectedFunction("operator?", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), number_t, op_number_null_coalesce),
InjectedFunction("operator=", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), boolean_t, op_number_cmp_equal),
InjectedFunction("operator!=", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), boolean_t, op_number_cmp_not_equal),
InjectedFunction("operator>=", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), boolean_t, op_number_cmp_greater_than_or_equal),
InjectedFunction("operator<=", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), boolean_t, op_number_cmp_less_than_or_equal),
InjectedFunction("operator>", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), boolean_t, op_number_cmp_greater_than),
InjectedFunction("operator<", (FunctionArg(number_t, "left"), FunctionArg(number_t, "right")), boolean_t, op_number_cmp_less_than),
InjectedFunction("operator+", (FunctionArg(string_t, "left"), FunctionArg(string_t, "right")), string_t, op_string_add),
InjectedFunction("operator?", (FunctionArg(string_t, "left"), FunctionArg(string_t, "right")), string_t, op_string_null_coalesce),
InjectedFunction("operator=", (FunctionArg(string_t, "left"), FunctionArg(string_t, "right")), boolean_t, op_string_cmp_equal),
InjectedFunction("operator!=", (FunctionArg(string_t, "left"), FunctionArg(string_t, "right")), boolean_t, op_string_cmp_not_equal),
InjectedFunction("operator>=", (FunctionArg(string_t, "left"), FunctionArg(string_t, "right")), boolean_t, op_string_cmp_greater_than_or_equal),
InjectedFunction("operator<=", (FunctionArg(string_t, "left"), FunctionArg(string_t, "right")), boolean_t, op_string_cmp_less_than_or_equal),
InjectedFunction("operator>", (FunctionArg(string_t, "left"), FunctionArg(string_t, "right")), boolean_t, op_string_cmp_greater_than),
InjectedFunction("operator<", (FunctionArg(string_t, "left"), FunctionArg(string_t, "right")), boolean_t, op_string_cmp_less_than),
InjectedFunction("operator?", (FunctionArg(boolean_t, "left"), FunctionArg(boolean_t, "right")), boolean_t, op_boolean_null_coalesce),
InjectedFunction("operator=", (FunctionArg(boolean_t, "left"), FunctionArg(boolean_t, "right")), boolean_t, op_boolean_cmp_equal),
InjectedFunction("operator!=", (FunctionArg(boolean_t, "left"), FunctionArg(boolean_t, "right")), boolean_t, op_boolean_cmp_not_equal),
InjectedFunction("operatorunary!", (FunctionArg(boolean_t, "arg"), ), boolean_t, op_boolean_logical_not),
InjectedFunction("operator&", (FunctionArg(boolean_t, "left"), FunctionArg(boolean_t, "right")), boolean_t, op_boolean_logical_and),
InjectedFunction("operator|", (FunctionArg(boolean_t, "left"), FunctionArg(boolean_t, "right")), boolean_t, op_boolean_logical_or)
))
| bsd-3-clause | -3,144,274,327,464,263,700 | 36.797203 | 146 | 0.684736 | false |
gerard256/script.trakt | rating.py | 1 | 7947 | # -*- coding: utf-8 -*-
"""Module used to launch rating dialogues and send ratings to Trakt"""
import xbmc
import xbmcaddon
import xbmcgui
import utilities as utils
import globals
import logging
logger = logging.getLogger(__name__)
__addon__ = xbmcaddon.Addon("script.trakt")
def ratingCheck(media_type, summary_info, watched_time, total_time, playlist_length):
"""Check if a video should be rated and if so launches the rating dialog"""
logger.debug("Rating Check called for '%s'" % media_type)
if not utils.getSettingAsBool("rate_%s" % media_type):
logger.debug("'%s' is configured to not be rated." % media_type)
return
if summary_info is None:
logger.debug("Summary information is empty, aborting.")
return
watched = (watched_time / total_time) * 100
if watched >= utils.getSettingAsFloat("rate_min_view_time"):
if (playlist_length <= 1) or utils.getSettingAsBool("rate_each_playlist_item"):
rateMedia(media_type, summary_info)
else:
logger.debug("Rate each playlist item is disabled.")
else:
logger.debug("'%s' does not meet minimum view time for rating (watched: %0.2f%%, minimum: %0.2f%%)" % (media_type, watched, utils.getSettingAsFloat("rate_min_view_time")))
def rateMedia(media_type, itemsToRate, unrate=False, rating=None):
"""Launches the rating dialog"""
for summary_info in itemsToRate:
if not utils.isValidMediaType(media_type):
logger.debug("Not a valid media type")
return
elif 'user' not in summary_info:
logger.debug("No user data")
return
s = utils.getFormattedItemName(media_type, summary_info)
logger.debug("Summary Info %s" % summary_info)
if unrate:
rating = None
if summary_info['user']['ratings']['rating'] > 0:
rating = 0
if not rating is None:
logger.debug("'%s' is being unrated." % s)
__rateOnTrakt(rating, media_type, summary_info, unrate=True)
else:
logger.debug("'%s' has not been rated, so not unrating." % s)
return
rerate = utils.getSettingAsBool('rate_rerate')
if rating is not None:
if summary_info['user']['ratings']['rating'] == 0:
logger.debug("Rating for '%s' is being set to '%d' manually." % (s, rating))
__rateOnTrakt(rating, media_type, summary_info)
else:
if rerate:
if not summary_info['user']['ratings']['rating'] == rating:
logger.debug("Rating for '%s' is being set to '%d' manually." % (s, rating))
__rateOnTrakt(rating, media_type, summary_info)
else:
utils.notification(utils.getString(32043), s)
logger.debug("'%s' already has a rating of '%d'." % (s, rating))
else:
utils.notification(utils.getString(32041), s)
logger.debug("'%s' is already rated." % s)
return
if summary_info['user']['ratings'] and summary_info['user']['ratings']['rating']:
if not rerate:
logger.debug("'%s' has already been rated." % s)
utils.notification(utils.getString(32041), s)
return
else:
logger.debug("'%s' is being re-rated." % s)
xbmc.executebuiltin('Dialog.Close(all, true)')
gui = RatingDialog(
"script-trakt-RatingDialog.xml",
__addon__.getAddonInfo('path'),
media_type=media_type,
media=summary_info,
rerate=rerate
)
gui.doModal()
if gui.rating:
rating = gui.rating
if rerate:
rating = gui.rating
if summary_info['user']['ratings'] and summary_info['user']['ratings']['rating'] > 0 and rating == summary_info['user']['ratings']['rating']:
rating = 0
if rating == 0 or rating == "unrate":
__rateOnTrakt(rating, gui.media_type, gui.media, unrate=True)
else:
__rateOnTrakt(rating, gui.media_type, gui.media)
else:
logger.debug("Rating dialog was closed with no rating.")
del gui
#Reset rating and unrate for multi part episodes
unrate=False
rating=None
def __rateOnTrakt(rating, media_type, media, unrate=False):
logger.debug("Sending rating (%s) to Trakt.tv" % rating)
params = media
if utils.isMovie(media_type):
key = 'movies'
params['rating'] = rating
elif utils.isShow(media_type):
key = 'shows'
params['rating'] = rating
elif utils.isSeason(media_type):
key = 'shows'
params['seasons'] = [{'rating': rating, 'number': media['season']}]
elif utils.isEpisode(media_type):
key = 'episodes'
params['rating'] = rating
else:
return
root = {key: [params]}
if not unrate:
data = globals.traktapi.addRating(root)
else:
data = globals.traktapi.removeRating(root)
if data:
s = utils.getFormattedItemName(media_type, media)
if 'not_found' in data and not data['not_found']['movies'] and not data['not_found']['episodes'] and not data['not_found']['shows']:
if not unrate:
utils.notification(utils.getString(32040), s)
else:
utils.notification(utils.getString(32042), s)
else:
utils.notification(utils.getString(32044), s)
class RatingDialog(xbmcgui.WindowXMLDialog):
buttons = {
11030: 1,
11031: 2,
11032: 3,
11033: 4,
11034: 5,
11035: 6,
11036: 7,
11037: 8,
11038: 9,
11039: 10
}
focus_labels = {
11030: 32028,
11031: 32029,
11032: 32030,
11033: 32031,
11034: 32032,
11035: 32033,
11036: 32034,
11037: 32035,
11038: 32036,
11039: 32027
}
def __init__(self, xmlFile, resourcePath, forceFallback=False, media_type=None, media=None, rerate=False):
self.media_type = media_type
self.media = media
self.rating = None
self.rerate = rerate
self.default_rating = utils.getSettingAsInt('rating_default')
def onInit(self):
s = utils.getFormattedItemName(self.media_type, self.media)
self.getControl(10012).setLabel(s)
rateID = 11029 + self.default_rating
if self.rerate and self.media['user']['ratings'] and int(self.media['user']['ratings']['rating']) > 0:
rateID = 11029 + int(self.media['user']['ratings']['rating'])
self.setFocus(self.getControl(rateID))
def onClick(self, controlID):
if controlID in self.buttons:
self.rating = self.buttons[controlID]
self.close()
def onFocus(self, controlID):
if controlID in self.focus_labels:
s = utils.getString(self.focus_labels[controlID])
if self.rerate:
if self.media['user']['ratings'] and self.media['user']['ratings']['rating'] == self.buttons[controlID]:
if utils.isMovie(self.media_type):
s = utils.getString(32037)
elif utils.isShow(self.media_type):
s = utils.getString(32038)
elif utils.isEpisode(self.media_type):
s = utils.getString(32039)
elif utils.isSeason(self.media_type):
s = utils.getString(32132)
else:
pass
self.getControl(10013).setLabel(s)
else:
self.getControl(10013).setLabel('')
| gpl-2.0 | -5,166,443,607,900,316,000 | 34.959276 | 179 | 0.556436 | false |
oblique-labs/pyVM | rpython/memory/gctypelayout.py | 1 | 22275 | from rpython.rtyper.lltypesystem import lltype, llmemory, llarena, llgroup
from rpython.rtyper import rclass
from rpython.rtyper.lltypesystem.lloperation import llop
from rpython.rlib.debug import ll_assert
from rpython.rlib.rarithmetic import intmask
from rpython.tool.identity_dict import identity_dict
class GCData(object):
"""The GC information tables, and the query functions that the GC
calls to decode their content. The encoding of this information
is done by encode_type_shape(). These two places should be in sync,
obviously, but in principle no other code should depend on the
details of the encoding in TYPE_INFO.
"""
_alloc_flavor_ = 'raw'
OFFSETS_TO_GC_PTR = lltype.Array(lltype.Signed)
# A CUSTOM_FUNC is either a destructor, or a custom tracer.
# A destructor is called when the object is about to be freed.
# A custom tracer (CT) enumerates the addresses that contain GCREFs.
# Both are called with the address of the object as only argument.
CUSTOM_FUNC = lltype.FuncType([llmemory.Address], lltype.Void)
CUSTOM_FUNC_PTR = lltype.Ptr(CUSTOM_FUNC)
# structure describing the layout of a typeid
TYPE_INFO = lltype.Struct("type_info",
("infobits", lltype.Signed), # combination of the T_xxx consts
("customfunc", CUSTOM_FUNC_PTR),
("fixedsize", lltype.Signed),
("ofstoptrs", lltype.Ptr(OFFSETS_TO_GC_PTR)),
hints={'immutable': True},
)
VARSIZE_TYPE_INFO = lltype.Struct("varsize_type_info",
("header", TYPE_INFO),
("varitemsize", lltype.Signed),
("ofstovar", lltype.Signed),
("ofstolength", lltype.Signed),
("varofstoptrs", lltype.Ptr(OFFSETS_TO_GC_PTR)),
hints={'immutable': True},
)
TYPE_INFO_PTR = lltype.Ptr(TYPE_INFO)
VARSIZE_TYPE_INFO_PTR = lltype.Ptr(VARSIZE_TYPE_INFO)
def __init__(self, type_info_group):
assert isinstance(type_info_group, llgroup.group)
self.type_info_group = type_info_group
self.type_info_group_ptr = type_info_group._as_ptr()
def get(self, typeid):
res = llop.get_group_member(GCData.TYPE_INFO_PTR,
self.type_info_group_ptr,
typeid)
_check_valid_type_info(res)
return res
def get_varsize(self, typeid):
res = llop.get_group_member(GCData.VARSIZE_TYPE_INFO_PTR,
self.type_info_group_ptr,
typeid)
_check_valid_type_info_varsize(res)
return res
def q_is_varsize(self, typeid):
infobits = self.get(typeid).infobits
return (infobits & T_IS_VARSIZE) != 0
def q_has_gcptr_in_varsize(self, typeid):
infobits = self.get(typeid).infobits
return (infobits & T_HAS_GCPTR_IN_VARSIZE) != 0
def q_has_gcptr(self, typeid):
infobits = self.get(typeid).infobits
return (infobits & T_HAS_GCPTR) != 0
def q_is_gcarrayofgcptr(self, typeid):
infobits = self.get(typeid).infobits
return (infobits & T_IS_GCARRAY_OF_GCPTR) != 0
def q_cannot_pin(self, typeid):
typeinfo = self.get(typeid)
ANY = (T_HAS_GCPTR | T_IS_WEAKREF)
return (typeinfo.infobits & ANY) != 0 or bool(typeinfo.customfunc)
def q_finalizer_handlers(self):
adr = self.finalizer_handlers # set from framework.py or gcwrapper.py
return llmemory.cast_adr_to_ptr(adr, lltype.Ptr(FIN_HANDLER_ARRAY))
def q_destructor_or_custom_trace(self, typeid):
return self.get(typeid).customfunc
def q_is_old_style_finalizer(self, typeid):
typeinfo = self.get(typeid)
return (typeinfo.infobits & T_HAS_OLDSTYLE_FINALIZER) != 0
def q_offsets_to_gc_pointers(self, typeid):
return self.get(typeid).ofstoptrs
def q_fixed_size(self, typeid):
return self.get(typeid).fixedsize
def q_varsize_item_sizes(self, typeid):
return self.get_varsize(typeid).varitemsize
def q_varsize_offset_to_variable_part(self, typeid):
return self.get_varsize(typeid).ofstovar
def q_varsize_offset_to_length(self, typeid):
return self.get_varsize(typeid).ofstolength
def q_varsize_offsets_to_gcpointers_in_var_part(self, typeid):
return self.get_varsize(typeid).varofstoptrs
def q_weakpointer_offset(self, typeid):
infobits = self.get(typeid).infobits
if infobits & T_IS_WEAKREF:
return weakptr_offset
return -1
def q_member_index(self, typeid):
infobits = self.get(typeid).infobits
return infobits & T_MEMBER_INDEX
def q_is_rpython_class(self, typeid):
infobits = self.get(typeid).infobits
return infobits & T_IS_RPYTHON_INSTANCE != 0
def q_has_custom_trace(self, typeid):
infobits = self.get(typeid).infobits
return infobits & T_HAS_CUSTOM_TRACE != 0
def q_fast_path_tracing(self, typeid):
# return True if none of the flags T_HAS_GCPTR_IN_VARSIZE,
# T_IS_GCARRAY_OF_GCPTR or T_HAS_CUSTOM_TRACE is set
T_ANY_SLOW_FLAG = (T_HAS_GCPTR_IN_VARSIZE |
T_IS_GCARRAY_OF_GCPTR |
T_HAS_CUSTOM_TRACE)
infobits = self.get(typeid).infobits
return infobits & T_ANY_SLOW_FLAG == 0
def set_query_functions(self, gc):
gc.set_query_functions(
self.q_is_varsize,
self.q_has_gcptr_in_varsize,
self.q_is_gcarrayofgcptr,
self.q_finalizer_handlers,
self.q_destructor_or_custom_trace,
self.q_is_old_style_finalizer,
self.q_offsets_to_gc_pointers,
self.q_fixed_size,
self.q_varsize_item_sizes,
self.q_varsize_offset_to_variable_part,
self.q_varsize_offset_to_length,
self.q_varsize_offsets_to_gcpointers_in_var_part,
self.q_weakpointer_offset,
self.q_member_index,
self.q_is_rpython_class,
self.q_has_custom_trace,
self.q_fast_path_tracing,
self.q_has_gcptr,
self.q_cannot_pin)
def _has_got_custom_trace(self, typeid):
type_info = self.get(typeid)
type_info.infobits |= (T_HAS_CUSTOM_TRACE | T_HAS_GCPTR)
# the lowest 16bits are used to store group member index
T_MEMBER_INDEX = 0xffff
T_IS_VARSIZE = 0x010000
T_HAS_GCPTR_IN_VARSIZE = 0x020000
T_IS_GCARRAY_OF_GCPTR = 0x040000
T_IS_WEAKREF = 0x080000
T_IS_RPYTHON_INSTANCE = 0x100000 # the type is a subclass of OBJECT
T_HAS_CUSTOM_TRACE = 0x200000
T_HAS_OLDSTYLE_FINALIZER = 0x400000
T_HAS_GCPTR = 0x1000000
T_KEY_MASK = intmask(0xFE000000) # bug detection only
T_KEY_VALUE = intmask(0x5A000000) # bug detection only
def _check_valid_type_info(p):
ll_assert(p.infobits & T_KEY_MASK == T_KEY_VALUE, "invalid type_id")
def _check_valid_type_info_varsize(p):
ll_assert(p.header.infobits & (T_KEY_MASK | T_IS_VARSIZE) ==
(T_KEY_VALUE | T_IS_VARSIZE),
"invalid varsize type_id")
def check_typeid(typeid):
# xxx does not perform a full check of validity, just checks for nonzero
ll_assert(llop.is_group_member_nonzero(lltype.Bool, typeid),
"invalid type_id")
def encode_type_shape(builder, info, TYPE, index):
"""Encode the shape of the TYPE into the TYPE_INFO structure 'info'."""
offsets = offsets_to_gc_pointers(TYPE)
infobits = index
info.ofstoptrs = builder.offsets2table(offsets, TYPE)
if len(offsets) > 0:
infobits |= T_HAS_GCPTR
#
fptrs = builder.special_funcptr_for_type(TYPE)
if fptrs:
if "destructor" in fptrs:
info.customfunc = fptrs["destructor"]
if "old_style_finalizer" in fptrs:
info.customfunc = fptrs["old_style_finalizer"]
infobits |= T_HAS_OLDSTYLE_FINALIZER
#
if not TYPE._is_varsize():
info.fixedsize = llarena.round_up_for_allocation(
llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size)
# note about round_up_for_allocation(): in the 'info' table
# we put a rounded-up size only for fixed-size objects. For
# varsize ones, the GC must anyway compute the size at run-time
# and round up that result.
else:
infobits |= T_IS_VARSIZE
varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info)
info.fixedsize = llmemory.sizeof(TYPE, 0)
if isinstance(TYPE, lltype.Struct):
ARRAY = TYPE._flds[TYPE._arrayfld]
ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld)
varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY)
varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0)
else:
assert isinstance(TYPE, lltype.GcArray)
ARRAY = TYPE
if (isinstance(ARRAY.OF, lltype.Ptr)
and ARRAY.OF.TO._gckind == 'gc'):
infobits |= T_IS_GCARRAY_OF_GCPTR
varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY)
varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0)
assert isinstance(ARRAY, lltype.Array)
if ARRAY.OF != lltype.Void:
offsets = offsets_to_gc_pointers(ARRAY.OF)
else:
offsets = ()
if len(offsets) > 0:
infobits |= T_HAS_GCPTR_IN_VARSIZE | T_HAS_GCPTR
varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF)
varinfo.varitemsize = llmemory.sizeof(ARRAY.OF)
if builder.is_weakref_type(TYPE):
infobits |= T_IS_WEAKREF
if is_subclass_of_object(TYPE):
infobits |= T_IS_RPYTHON_INSTANCE
info.infobits = infobits | T_KEY_VALUE
# ____________________________________________________________
class TypeLayoutBuilder(object):
can_add_new_types = True
can_encode_type_shape = True # set to False initially by the JIT
size_of_fixed_type_info = llmemory.sizeof(GCData.TYPE_INFO)
def __init__(self, GCClass, lltype2vtable=None):
self.GCClass = GCClass
self.lltype2vtable = lltype2vtable
self.make_type_info_group()
self.id_of_type = {} # {LLTYPE: type_id}
self.iseen_roots = identity_dict()
# the following are lists of addresses of gc pointers living inside the
# prebuilt structures. It should list all the locations that could
# possibly point to a GC heap object.
# this lists contains pointers in GcStructs and GcArrays
self.addresses_of_static_ptrs = []
# this lists contains pointers in raw Structs and Arrays
self.addresses_of_static_ptrs_in_nongc = []
# for debugging, the following list collects all the prebuilt
# GcStructs and GcArrays
self.all_prebuilt_gc = []
self._special_funcptrs = {}
self.offsettable_cache = {}
def make_type_info_group(self):
self.type_info_group = llgroup.group("typeinfo")
# don't use typeid 0, may help debugging
DUMMY = lltype.Struct("dummy", ('x', lltype.Signed))
dummy = lltype.malloc(DUMMY, immortal=True, zero=True)
self.type_info_group.add_member(dummy)
def get_type_id(self, TYPE):
try:
return self.id_of_type[TYPE]
except KeyError:
assert self.can_add_new_types
assert isinstance(TYPE, (lltype.GcStruct, lltype.GcArray))
# Record the new type_id description as a TYPE_INFO structure.
# build the TYPE_INFO structure
if not TYPE._is_varsize():
fullinfo = lltype.malloc(GCData.TYPE_INFO,
immortal=True, zero=True)
info = fullinfo
else:
fullinfo = lltype.malloc(GCData.VARSIZE_TYPE_INFO,
immortal=True, zero=True)
info = fullinfo.header
type_id = self.type_info_group.add_member(fullinfo)
if self.can_encode_type_shape:
encode_type_shape(self, info, TYPE, type_id.index)
else:
self._pending_type_shapes.append((info, TYPE, type_id.index))
# store it
self.id_of_type[TYPE] = type_id
self.add_vtable_after_typeinfo(TYPE)
return type_id
def add_vtable_after_typeinfo(self, TYPE):
# if gcremovetypeptr is False, then lltype2vtable is None and it
# means that we don't have to store the vtables in type_info_group.
if self.lltype2vtable is None:
return
# does the type have a vtable?
vtable = self.lltype2vtable.get(TYPE, None)
if vtable is not None:
# yes. check that in this case, we are not varsize
assert not TYPE._is_varsize()
vtable = lltype.normalizeptr(vtable)
self.type_info_group.add_member(vtable)
else:
# no vtable from lltype2vtable -- double-check to be sure
# that it's not a subclass of OBJECT.
assert not is_subclass_of_object(TYPE)
def get_info(self, type_id):
res = llop.get_group_member(GCData.TYPE_INFO_PTR,
self.type_info_group._as_ptr(),
type_id)
_check_valid_type_info(res)
return res
def get_info_varsize(self, type_id):
res = llop.get_group_member(GCData.VARSIZE_TYPE_INFO_PTR,
self.type_info_group._as_ptr(),
type_id)
_check_valid_type_info_varsize(res)
return res
def is_weakref_type(self, TYPE):
return TYPE == WEAKREF
def encode_type_shapes_now(self):
if not self.can_encode_type_shape:
self.can_encode_type_shape = True
for info, TYPE, index in self._pending_type_shapes:
encode_type_shape(self, info, TYPE, index)
del self._pending_type_shapes
def delay_encoding(self):
# used by the JIT
self._pending_type_shapes = []
self.can_encode_type_shape = False
def offsets2table(self, offsets, TYPE):
if len(offsets) == 0:
TYPE = lltype.Void # we can share all zero-length arrays
try:
return self.offsettable_cache[TYPE]
except KeyError:
cachedarray = lltype.malloc(GCData.OFFSETS_TO_GC_PTR,
len(offsets), immortal=True)
for i, value in enumerate(offsets):
cachedarray[i] = value
self.offsettable_cache[TYPE] = cachedarray
return cachedarray
def close_table(self):
# make sure we no longer add members to the type_info_group.
self.can_add_new_types = False
self.offsettable_cache = None
return self.type_info_group
def special_funcptr_for_type(self, TYPE):
if TYPE in self._special_funcptrs:
return self._special_funcptrs[TYPE]
fptr1, is_lightweight = self.make_destructor_funcptr_for_type(TYPE)
fptr2 = self.make_custom_trace_funcptr_for_type(TYPE)
result = {}
if fptr1:
if is_lightweight:
result["destructor"] = fptr1
else:
result["old_style_finalizer"] = fptr1
if fptr2:
result["custom_trace"] = fptr2
self._special_funcptrs[TYPE] = result
return result
def make_destructor_funcptr_for_type(self, TYPE):
# must be overridden for proper destructor support
return None, False
def make_custom_trace_funcptr_for_type(self, TYPE):
# must be overridden for proper custom tracer support
return None
def initialize_gc_query_function(self, gc):
gcdata = GCData(self.type_info_group)
gcdata.set_query_functions(gc)
return gcdata
def consider_constant(self, TYPE, value, gc):
if value is not lltype.top_container(value):
return
if value in self.iseen_roots:
return
self.iseen_roots[value] = True
if isinstance(TYPE, lltype.GcOpaqueType):
self.consider_constant(lltype.typeOf(value.container),
value.container, gc)
return
if isinstance(TYPE, (lltype.GcStruct, lltype.GcArray)):
typeid = self.get_type_id(TYPE)
hdr = gc.gcheaderbuilder.new_header(value)
adr = llmemory.cast_ptr_to_adr(hdr)
gc.init_gc_object_immortal(adr, typeid)
self.all_prebuilt_gc.append(value)
# The following collects the addresses of all the fields that have
# a GC Pointer type, inside the current prebuilt object. All such
# fields are potential roots: unless the structure is immutable,
# they could be changed later to point to GC heap objects.
adr = llmemory.cast_ptr_to_adr(value._as_ptr())
if TYPE._gckind == "gc":
if gc.prebuilt_gc_objects_are_static_roots or gc.DEBUG:
appendto = self.addresses_of_static_ptrs
else:
return
else:
appendto = self.addresses_of_static_ptrs_in_nongc
for a in gc_pointers_inside(value, adr, mutable_only=True):
appendto.append(a)
# ____________________________________________________________
#
# Helpers to discover GC pointers inside structures
def offsets_to_gc_pointers(TYPE):
offsets = []
if isinstance(TYPE, lltype.Struct):
for name in TYPE._names:
FIELD = getattr(TYPE, name)
if isinstance(FIELD, lltype.Array):
continue # skip inlined array
baseofs = llmemory.offsetof(TYPE, name)
suboffsets = offsets_to_gc_pointers(FIELD)
for s in suboffsets:
try:
knownzero = s == 0
except TypeError:
knownzero = False
if knownzero:
offsets.append(baseofs)
else:
offsets.append(baseofs + s)
# sanity check
#ex = lltype.Ptr(TYPE)._example()
#adr = llmemory.cast_ptr_to_adr(ex)
#for off in offsets:
# (adr + off)
elif isinstance(TYPE, lltype.Ptr) and TYPE.TO._gckind == 'gc':
offsets.append(0)
return offsets
def gc_pointers_inside(v, adr, mutable_only=False):
t = lltype.typeOf(v)
if isinstance(t, lltype.Struct):
skip = ()
if mutable_only:
if t._hints.get('immutable'):
return
if 'immutable_fields' in t._hints:
skip = t._hints['immutable_fields'].all_immutable_fields()
for n, t2 in t._flds.iteritems():
if isinstance(t2, lltype.Ptr) and t2.TO._gckind == 'gc':
if n not in skip:
yield adr + llmemory.offsetof(t, n)
elif isinstance(t2, (lltype.Array, lltype.Struct)):
for a in gc_pointers_inside(getattr(v, n),
adr + llmemory.offsetof(t, n),
mutable_only):
yield a
elif isinstance(t, lltype.Array):
if mutable_only and t._hints.get('immutable'):
return
if isinstance(t.OF, lltype.Ptr) and t.OF.TO._gckind == 'gc':
for i in range(len(v.items)):
yield adr + llmemory.itemoffsetof(t, i)
elif isinstance(t.OF, lltype.Struct):
for i in range(len(v.items)):
for a in gc_pointers_inside(v.items[i],
adr + llmemory.itemoffsetof(t, i),
mutable_only):
yield a
def zero_gc_pointers(p):
TYPE = lltype.typeOf(p).TO
zero_gc_pointers_inside(p, TYPE)
def zero_gc_pointers_inside(p, TYPE):
if isinstance(TYPE, lltype.Struct):
for name, FIELD in TYPE._flds.items():
if isinstance(FIELD, lltype.Ptr) and FIELD.TO._gckind == 'gc':
setattr(p, name, lltype.nullptr(FIELD.TO))
elif isinstance(FIELD, lltype.ContainerType):
zero_gc_pointers_inside(getattr(p, name), FIELD)
elif isinstance(TYPE, lltype.Array):
ITEM = TYPE.OF
if isinstance(ITEM, lltype.Ptr) and ITEM.TO._gckind == 'gc':
null = lltype.nullptr(ITEM.TO)
for i in range(p._obj.getlength()):
p[i] = null
elif isinstance(ITEM, lltype.ContainerType):
for i in range(p._obj.getlength()):
zero_gc_pointers_inside(p[i], ITEM)
def is_subclass_of_object(TYPE):
while isinstance(TYPE, lltype.GcStruct):
if TYPE is rclass.OBJECT:
return True
_, TYPE = TYPE._first_struct()
return False
########## weakrefs ##########
# framework: weakref objects are small structures containing only an address
WEAKREF = lltype.GcStruct("weakref", ("weakptr", llmemory.Address))
WEAKREFPTR = lltype.Ptr(WEAKREF)
sizeof_weakref= llmemory.sizeof(WEAKREF)
empty_weakref = lltype.malloc(WEAKREF, immortal=True)
empty_weakref.weakptr = llmemory.NULL
weakptr_offset = llmemory.offsetof(WEAKREF, "weakptr")
def ll_weakref_deref(wref):
wref = llmemory.cast_weakrefptr_to_ptr(WEAKREFPTR, wref)
return wref.weakptr
def convert_weakref_to(targetptr):
# Prebuilt weakrefs don't really need to be weak at all,
# but we need to emulate the structure expected by ll_weakref_deref().
if not targetptr:
return empty_weakref
else:
link = lltype.malloc(WEAKREF, immortal=True)
link.weakptr = llmemory.cast_ptr_to_adr(targetptr)
return link
########## finalizers ##########
FIN_TRIGGER_FUNC = lltype.FuncType([], lltype.Void)
FIN_HANDLER_ARRAY = lltype.Array(('deque', llmemory.Address),
('trigger', lltype.Ptr(FIN_TRIGGER_FUNC)))
| mit | 4,813,011,452,356,338,000 | 38.919355 | 79 | 0.590797 | false |
inveniosoftware-contrib/invenio-workflows-ui | invenio_workflows_ui/serializers/__init__.py | 1 | 1527 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Record serialization."""
from __future__ import absolute_import, print_function
import json
from .response import (
workflow_responsify,
search_responsify,
action_responsify,
file_responsify
)
from .json import JSONSerializer
json_v1 = JSONSerializer()
json_serializer = workflow_responsify(json_v1, 'application/json')
json_search_serializer = search_responsify(json_v1, 'application/json')
json_action_serializer = action_responsify(json_v1, 'application/json')
json_file_serializer = file_responsify(json_v1, 'application/json')
| gpl-2.0 | -2,496,835,084,541,541,400 | 34.511628 | 76 | 0.75704 | false |
DesertBus/txircd | txircd/modules/cmd_list.py | 1 | 1917 | from twisted.words.protocols import irc
from txircd.modbase import Command
from txircd.utils import irc_lower
from fnmatch import fnmatch
class ListCommand(Command):
def onUse(self, user, data):
chancache = []
for channame, channel in self.ircd.channels.iteritems():
if data["chanfilter"] is not None:
filterMatch = False
for filterEntry in data["chanfilter"]:
if fnmatch(channame, filterEntry):
filterMatch = True
break
if not filterMatch:
continue
chancache.append({
"channel": channel,
"name": channel.name,
"users": len(channel.users),
"topic": channel.topic if channel.topic else ""
})
if "listdata" in self.ircd.actions:
for action in self.ircd.actions["listdata"]:
chancache = action(user, chancache)
if not chancache:
break
for cdata in chancache:
user.sendMessage(irc.RPL_LIST, cdata["name"], str(cdata["users"]), ":[{}] {}".format(cdata["channel"].modeString(user), cdata["topic"]))
user.sendMessage(irc.RPL_LISTEND, ":End of channel list")
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "LIST", ":You have not registered")
return {}
if params:
chanFilter = irc_lower(params[0]).split(",")
else:
chanFilter = None
return {
"user": user,
"chanfilter": chanFilter
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"LIST": ListCommand()
}
} | bsd-3-clause | -929,418,014,203,899,100 | 33.872727 | 148 | 0.523213 | false |
heiths/allura | ForgeSVN/forgesvn/tests/test_tasks.py | 1 | 2840 | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import shutil
import unittest
import os
import tg
import mock
from pylons import tmpl_context as c
from paste.deploy.converters import asbool
from nose.tools import assert_equal
from alluratest.controller import setup_basic_test
from allura import model as M
from allura.lib import helpers as h
from allura.tasks import repo_tasks
from forgesvn.tests import with_svn
class TestRepoTasks(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
if asbool(tg.config.get('smtp.mock')):
self.smtp_mock = mock.patch('allura.lib.mail_util.smtplib.SMTP')
self.smtp_mock.start()
def tearDown(self):
if asbool(tg.config.get('smtp.mock')):
self.smtp_mock.stop()
@with_svn
def setup_with_tools(self):
h.set_context('test', 'src', neighborhood='Projects')
def test_init(self):
ns = M.Notification.query.find().count()
with mock.patch.object(c.app.repo, 'init') as f:
repo_tasks.init()
M.main_orm_session.flush()
assert f.called_with()
assert_equal(ns, M.Notification.query.find().count())
def test_clone(self):
ns = M.Notification.query.find().count()
with mock.patch.object(c.app.repo, 'init_as_clone') as f:
repo_tasks.clone('foo', 'bar', 'baz')
M.main_orm_session.flush()
f.assert_called_with('foo', 'bar', 'baz')
assert ns + 1 == M.Notification.query.find().count()
def test_refresh(self):
with mock.patch.object(c.app.repo, 'refresh') as f:
repo_tasks.refresh()
f.assert_called_with()
def test_uninstall(self):
with mock.patch.object(shutil, 'rmtree') as f:
repo_tasks.uninstall()
f.assert_called_with(
os.path.join(tg.config['scm.repos.root'], 'svn/p/test/src'),
ignore_errors=True)
| apache-2.0 | -6,506,497,155,874,890,000 | 33.634146 | 76 | 0.637676 | false |
akiokio/centralfitestoque | src/.pycharm_helpers/test_generator.py | 1 | 15858 | # encoding: utf-8
"""
Tests basic things that generator3 consists of.
NOTE: does not work in Jython 2.2 or IronPython 1.x, because pyparsing does not.
"""
import unittest
from generator3 import *
M = ModuleRedeclarator
import sys
IS_CLI = sys.platform == 'cli'
VERSION = sys.version_info[:2] # only (major, minor)
class TestRestoreFuncByDocComment(unittest.TestCase):
"""
Tries to restore function signatures by doc strings.
"""
def setUp(self):
self.m = ModuleRedeclarator(None, None, '/dev/null')
def testTrivial(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(a, b, c) ololo", "f", "f", None)
self.assertEquals(result, "f(a, b, c)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testTrivialNested(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(a, (b, c), d) ololo", "f", "f", None)
self.assertEquals(result, "f(a, (b, c), d)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testWithDefault(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(a, b, c=1) ololo", "f", "f", None)
self.assertEquals(result, "f(a, b, c=1)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testNestedWithDefault(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(a, (b1, b2), c=1) ololo", "f", "f", None)
self.assertEquals(result, "f(a, (b1, b2), c=1)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testAbstractDefault(self):
# like new(S, ...)
result, ret_sig, note = self.m.parseFuncDoc('blah f(a, b=obscuredefault) ololo', "f", "f", None)
self.assertEquals(result, "f(a, b=None)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testWithReserved(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(class, object, def) ololo", "f", "f", None)
self.assertEquals(result, "f(p_class, p_object, p_def)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testWithReservedOpt(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(foo, bar[, def]) ololo", "f", "f", None)
self.assertEquals(result, "f(foo, bar, p_def=None)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testPseudoNested(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(a, (b1, b2, ...)) ololo", "f", "f", None)
self.assertEquals(result, "f(a, b_tuple)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testImportLike(self):
# __import__
result, ret_sig, note = self.m.parseFuncDoc("blah f(name, globals={}, locals={}, fromlist=[], level=-1) ololo",
"f", "f", None)
self.assertEquals(result, "f(name, globals={}, locals={}, fromlist=[], level=-1)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testOptionalBracket(self):
# reduce
result, ret_sig, note = self.m.parseFuncDoc("blah f(function, sequence[, initial]) ololo", "f", "f", None)
self.assertEquals(result, "f(function, sequence, initial=None)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testWithMore(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(foo [, bar1, bar2, ...]) ololo", "f", "f", None)
self.assertEquals(result, "f(foo, *bar)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testNestedOptionals(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(foo [, bar1 [, bar2]]) ololo", "f", "f", None)
self.assertEquals(result, "f(foo, bar1=None, bar2=None)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testInnerTuple(self):
result, ret_sig, note = self.m.parseFuncDoc("blah load_module(name, file, filename, (suffix, mode, type)) ololo"
, "load_module", "load_module", None)
self.assertEquals(result, "load_module(name, file, filename, (suffix, mode, type))")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testIncorrectInnerTuple(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(a, (b=1, c=2)) ololo", "f", "f", None)
self.assertEquals(result, "f(a, p_b)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testNestedOnly(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f((foo, bar, baz)) ololo", "f", "f", None)
self.assertEquals(result, "f((foo, bar, baz))")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testTwoPseudoNested(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f((a1, a2, ...), (b1, b2,..)) ololo", "f", "f", None)
self.assertEquals(result, "f(a_tuple, b_tuple)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testTwoPseudoNestedWithLead(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(x, (a1, a2, ...), (b1, b2,..)) ololo", "f", "f", None)
self.assertEquals(result, "f(x, a_tuple, b_tuple)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testPseudoNestedRange(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f((a1, ..., an), b) ololo", "f", "f", None)
self.assertEquals(result, "f(a_tuple, b)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testIncorrectList(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(x, y, 3, $) ololo", "f", "f", None)
self.assertEquals(result, "f(x, y, *args, **kwargs)")
self.assertEquals(note, M.SIG_DOC_UNRELIABLY)
def testIncorrectStarredList(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(x, *y, 3, $) ololo", "f", "f", None)
self.assertEquals(result, "f(x, *y, **kwargs)")
self.assertEquals(note, M.SIG_DOC_UNRELIABLY)
def testClashingNames(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(x, y, (x, y), z) ololo", "f", "f", None)
self.assertEquals(result, "f(x, y, (x_1, y_1), z)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testQuotedParam(self):
# like __delattr__
result, ret_sig, note = self.m.parseFuncDoc("blah getattr('name') ololo", "getattr", "getattr", None)
self.assertEquals(result, "getattr(name)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testQuotedParam2(self):
# like __delattr__, too
result, ret_sig, note = self.m.parseFuncDoc('blah getattr("name") ololo', "getattr", "getattr", None)
self.assertEquals(result, "getattr(name)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testOptionalTripleDot(self):
# like new(S, ...)
result, ret_sig, note = self.m.parseFuncDoc('blah f(foo, ...) ololo', "f", "f", None)
self.assertEquals(result, "f(foo, *more)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testUnderscoredName(self):
# like new(S, ...)
result, ret_sig, note = self.m.parseFuncDoc('blah f(foo_one, _bar_two) ololo', "f", "f", None)
self.assertEquals(result, "f(foo_one, _bar_two)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testDashedName(self):
# like new(S, ...)
result, ret_sig, note = self.m.parseFuncDoc('blah f(something-else, for-a-change) ololo', "f", "f", None)
self.assertEquals(result, "f(something_else, for_a_change)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testSpacedDefault(self):
# like new(S, ...)
result, ret_sig, note = self.m.parseFuncDoc('blah f(a, b = 1) ololo', "f", "f", None)
self.assertEquals(result, "f(a, b=1)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testSpacedName(self):
# like new(S, ...)
result, ret_sig, note = self.m.parseFuncDoc('blah femme(skirt or pants) ololo', "femme", "femme", None)
self.assertEquals(result, "femme(skirt_or_pants)")
self.assertEquals(note, M.SIG_DOC_NOTE)
class TestRestoreMethodByDocComment(unittest.TestCase):
"""
Restoring with a class name set
"""
def setUp(self):
self.m = ModuleRedeclarator(None, None, '/dev/null')
def testPlainMethod(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(self, foo, bar) ololo", "f", "f", "SomeClass")
self.assertEquals(result, "f(self, foo, bar)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testInsertSelf(self):
result, ret_sig, note = self.m.parseFuncDoc("blah f(foo, bar) ololo", "f", "f", "SomeClass")
self.assertEquals(result, "f(self, foo, bar)")
self.assertEquals(note, M.SIG_DOC_NOTE)
class TestAnnotatedParameters(unittest.TestCase):
"""
f(foo: int) and friends; in doc comments, happen in 2.x world, too.
"""
def setUp(self):
self.m = ModuleRedeclarator(None, None, '/dev/null')
def testMixed(self):
result, ret_sig, note = self.m.parseFuncDoc('blah f(i: int, foo) ololo', "f", "f", None)
self.assertEquals(result, "f(i, foo)")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testNested(self):
result, ret_sig, note = self.m.parseFuncDoc('blah f(i: int, (foo: bar, boo: Decimal)) ololo', "f", "f", None)
self.assertEquals(result, "f(i, (foo, boo))")
self.assertEquals(note, M.SIG_DOC_NOTE)
def testSpaced(self):
result, ret_sig, note = self.m.parseFuncDoc('blah f(i: int, j :int, k : int) ololo', "f", "f", None)
self.assertEquals(result, "f(i, j, k)")
self.assertEquals(note, M.SIG_DOC_NOTE)
if not IS_CLI and VERSION < (3, 0):
class TestInspect(unittest.TestCase):
"""
See that inspect actually works if needed
"""
def setUp(self):
self.m = ModuleRedeclarator(None, None, '/dev/null')
def testSimple(self):
def target(a, b, c=1, *d, **e):
return a, b, c, d, e
result = self.m.restoreByInspect(target)
self.assertEquals(result, "(a, b, c=1, *d, **e)")
def testNested(self):
# NOTE: Py3k can't handle nested tuple args, thus we compile it conditionally
code = (
"def target(a, (b, c), d, e=1):\n"
" return a, b, c, d, e"
)
namespace = {}
eval(compile(code, "__main__", "single"), namespace)
target = namespace['target']
result = self.m.restoreByInspect(target)
self.assertEquals(result, "(a, (b, c), d, e=1)")
class _DiffPrintingTestCase(unittest.TestCase):
def assertEquals(self, etalon, specimen, msg=None):
if type(etalon) == str and type(specimen) == str and etalon != specimen:
print("%s" % "\n")
# print side by side
ei = iter(etalon.split("\n"))
si = iter(specimen.split("\n"))
if VERSION < (3, 0):
si_next = si.next
else:
si_next = si.__next__
for el in ei:
try: sl = si_next()
except StopIteration: break # I wish the exception would just work as break
if el != sl:
print("!%s" % el)
print("?%s" % sl)
else:
print(">%s" % sl)
# one of the iters might not end yet
for el in ei:
print("!%s" % el)
for sl in si:
print("?%s" % sl)
raise self.failureException(msg)
else:
self.failUnlessEqual(etalon, specimen, msg)
class TestSpecialCases(unittest.TestCase):
"""
Tests cases where predefined overrides kick in
"""
def setUp(self):
import sys
if VERSION >= (3, 0):
import builtins as the_builtins
self.builtins_name = the_builtins.__name__
else:
import __builtin__ as the_builtins
self.builtins_name = the_builtins.__name__
self.m = ModuleRedeclarator(the_builtins, None, '/dev/null', doing_builtins=True)
def _testBuiltinFuncName(self, func_name, expected):
class_name = None
self.assertTrue(self.m.isPredefinedBuiltin(self.builtins_name, class_name, func_name))
result, note = self.m.restorePredefinedBuiltin(class_name, func_name)
self.assertEquals(result, func_name + expected)
self.assertEquals(note, "known special case of " + func_name)
def testZip(self):
self._testBuiltinFuncName("zip", "(seq1, seq2, *more_seqs)")
def testRange(self):
self._testBuiltinFuncName("range", "(start=None, stop=None, step=None)")
def testFilter(self):
self._testBuiltinFuncName("filter", "(function_or_none, sequence)")
# we caould want to test a calss without __dict__, but it takes a C extension to really create one,
class TestDataOutput(_DiffPrintingTestCase):
"""
Tests for sanity of output of data members
"""
def setUp(self):
self.m = ModuleRedeclarator(self, None, 4) # Pass anything with __dict__ as module
def checkFmtValue(self, data, expected):
buf = Buf(self.m)
self.m.fmtValue(buf.out, data, 0)
result = "".join(buf.data).strip()
self.assertEquals(expected, result)
def testRecursiveDict(self):
data = {'a': 1}
data['b'] = data
expected = "\n".join((
"{",
" 'a': 1,",
" 'b': '<value is a self-reference, replaced by this string>',",
"}"
))
self.checkFmtValue(data, expected)
def testRecursiveList(self):
data = [1]
data.append(data)
data.append(2)
data.append([10, data, 20])
expected = "\n".join((
"[",
" 1,",
" '<value is a self-reference, replaced by this string>',",
" 2,",
" [",
" 10,",
" '<value is a self-reference, replaced by this string>',",
" 20,",
" ],",
"]"
))
self.checkFmtValue(data, expected)
if not IS_CLI:
class TestReturnTypes(unittest.TestCase):
"""
Tests for sanity of output of data members
"""
def setUp(self):
self.m = ModuleRedeclarator(None, None, 4)
def checkRestoreFunction(self, doc, expected):
spec, ret_literal, note = self.m.parseFuncDoc(doc, "foo", "foo", None)
self.assertEqual(expected, ret_literal, "%r != %r; spec=%r, note=%r" % (expected, ret_literal, spec, note))
pass
def testSimpleArrowInt(self):
doc = "This is foo(bar) -> int"
self.checkRestoreFunction(doc, "0")
def testSimpleArrowList(self):
doc = "This is foo(bar) -> list"
self.checkRestoreFunction(doc, "[]")
def testArrowListOf(self):
doc = "This is foo(bar) -> list of int"
self.checkRestoreFunction(doc, "[]")
# def testArrowTupleOf(self):
# doc = "This is foo(bar) -> (a, b,..)"
# self.checkRestoreFunction(doc, "()")
def testSimplePrefixInt(self):
doc = "This is int foo(bar)"
self.checkRestoreFunction(doc, "0")
def testSimplePrefixObject(self):
doc = "Makes an instance: object foo(bar)"
self.checkRestoreFunction(doc, "object()")
if VERSION < (3, 0):
# TODO: we only support it in 2.x; must update when we do it in 3.x, too
def testSimpleArrowFile(self):
doc = "Opens a file: foo(bar) -> file"
self.checkRestoreFunction(doc, "file('/dev/null')")
def testUnrelatedPrefix(self):
doc = """
Consumes a list of int
foo(bar)
"""
self.checkRestoreFunction(doc, None)
###
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | 9,880,757,391,117,172 | 37.397094 | 120 | 0.570942 | false |
razco/music_similarity | midi/midi_editting.py | 1 | 5807 | '''
Created on Aug 26, 2017
@author: Raz
'''
import mido
import numpy as np
def midifile_to_dict(mid):
tracks = []
for track in mid.tracks:
tracks.append([vars(msg).copy() for msg in track])
return {
'ticks_per_beat': mid.ticks_per_beat,
'tracks': tracks,
}
def test():
aaa = mido.MidiFile('AUD_DW0146.mid')
aaa.tracks
print 'buya'
mid_dict = midifile_to_dict(aaa)
track_data = np.array(mid_dict['tracks'][0])
notes_inds = np.flatnonzero(np.array(['note' in mid_dict['tracks'][0][idx] for idx in xrange(len(track_data))]))
notes_data = track_data[notes_inds]
outfile = mido.MidiFile()
track = mido.MidiTrack()
outfile.tracks.append(track)
notes_inds_to_keep = np.array(range(10, 50, 1)) # inds in the levenshtein mat that are similar
orig_notes_inds_to_keep = set(notes_inds[notes_inds_to_keep])
for idx in xrange(len(track_data) - 1, -1, -1):
msg = aaa.tracks[0][idx]
if 'note' in msg.type and idx not in orig_notes_inds_to_keep:
aaa.tracks[0].pop(idx)
aaa.save('part_melody.mid')
def run(midi_file, all_midi_notes_inds, midi_series_len, midi_start_note, track, channel):
# exports a part of the midi file at specified track, channel
# find start idx and end idx
min_idx = np.inf
max_idx = -np.inf
midi_notes_inds = all_midi_notes_inds[track][channel]
print 'num inds:', len(midi_notes_inds)
for note_idx in xrange(midi_start_note, midi_start_note + midi_series_len):
idxs = midi_notes_inds[note_idx]
min_idx = min(min_idx, min(idxs))
max_idx = max(max_idx, min(idxs)) # taking the min because it's the "note_on"
orig_note_ind_to_keep_start = min_idx
orig_note_ind_to_keep_end = max_idx
aaa = mido.MidiFile(midi_file)
notes_off_missed = []
for note_inds in midi_notes_inds[midi_start_note: midi_start_note + midi_series_len]:
curr_note_off = max(note_inds)
if curr_note_off > orig_note_ind_to_keep_end:
# max(note_inds) is the note off message of the note
notes_off_missed.append(curr_note_off)
if len(notes_off_missed) > 0:
# if there are notes off that outside orig_note_ind_to_keep_end,
# increase their time, so that when all the other messages that
# are not in the valid range are removed, the time remains ok.
time_to_add_to_missed_note_off = 0
max_note_off_missed = max(notes_off_missed)
notes_off_missed = set(notes_off_missed)
for idx in xrange(orig_note_ind_to_keep_end + 1, max_note_off_missed + 1):
msg = aaa.tracks[track][idx]
if idx in notes_off_missed:
msg.time += time_to_add_to_missed_note_off
time_to_add_to_missed_note_off = 0
else:
time_to_add_to_missed_note_off += msg.time
for idx in xrange(len(aaa.tracks[track]) - 1, -1, -1):
msg = aaa.tracks[track][idx]
if idx in notes_off_missed:
continue
if 'note' in msg.type and (
idx < orig_note_ind_to_keep_start or
idx > orig_note_ind_to_keep_end
):
# if 'note' in msg.type and idx not in rel_notes_inds:
aaa.tracks[track].pop(idx)
elif 'note' in msg.type and msg.channel != channel:
for extra_time_idx in xrange(idx + 1, len(aaa.tracks[track])):
if 'note' in msg.type and (
orig_note_ind_to_keep_start <= extra_time_idx
<= orig_note_ind_to_keep_end
):
aaa.tracks[track][extra_time_idx].time += msg.time
break
aaa.tracks[track].pop(idx)
for track_idx in xrange(len(aaa.tracks) - 1, -1, -1):
if track_idx != track:
aaa.tracks.pop(track_idx)
aaa.save('part_melody_%s' % midi_file.split('/')[-1])
# running shift0:
# score: 561.000000
# running shift1:
# score: 719.000000
# running shift2:
# score: 707.000000
# running shift3:
# score: 691.000000
# running shift4:
# score: 749.000000
# running shift5:
# score: 671.000000
# running shift6:
# score: 805.000000
# running shift7:
# score: 731.000000
# running shift8:
# score: 763.000000
# running shift9:
# score: 789.000000
# running shift10:
# score: 789.000000
# running shift11:
# score: 849.000000
# running window...
# best match with window: 38.000000 at music1 index 98, and music2 index 393
def get_instrument_length(notes_inds, track, channel):
return len(notes_inds[track][channel])
def main():
import midifile_to_notes
# -1 "midi_files/Frere Jacques.mid" -2 "midi_files/Mahler Symphony No.1 Mov.3.mid"
midi_file1 = 'midi_files/Frere Jacques.mid'
# midi_file1 = 'midi_files/the carpenters - please mr postman.mid'
# midi_file1 = 'midi_files/chaka_khan_aint_nobody.mid'
# midi_file1 = 'midi_files/sting - shape of my heart.mid'
# midi_file1 = 'midi_files/Feels - pharrel williams.mid'
_, midi_notes_inds1 = midifile_to_notes.extract_notes(midi_file1)
track = 1
channel = 0
midi_start_note1 = 1
midi_series_len1 = 22
run(midi_file1, midi_notes_inds1, midi_series_len1, midi_start_note1, track, channel)
midi_file2 = 'midi_files/Mahler Symphony No.1 Mov.3.mid'
# midi_file2 = 'midi_files/portugal the man - feel it still.mid'
# midi_file2 = 'midi_files/felix_jaehn_aint_nobody.mid'
# midi_file2 = 'midi_files/Sugababes - Shape.mid'
_, midi_notes_inds2 = midifile_to_notes.extract_notes(midi_file2)
track = 5
channel = 4
midi_start_note2 = 0
midi_series_len2 = 27
run(midi_file2, midi_notes_inds2, midi_series_len2, midi_start_note2, track, channel)
if __name__ == '__main__':
main()
| mit | -5,851,520,322,637,116,000 | 32.959064 | 116 | 0.61667 | false |
futuresimple/triggear | app/main.py | 1 | 2415 | import os
import motor.motor_asyncio
from aiohttp import web
from app.clients.github_client import GithubClient
from app.clients.jenkinses_clients import JenkinsesClients
from app.clients.mongo_client import MongoClient
from app.config.triggear_config import TriggearConfig
from app.controllers.github_controller import GithubController
from app.controllers.health_controller import HealthController
from app.controllers.pipeline_controller import PipelineController
from app.middlewares.authentication_middleware import AuthenticationMiddleware
from app.middlewares.exceptions_middleware import exceptions
from app.routes import Routes
from app.triggear_heart import TriggearHeart
def main() -> None:
app_config = TriggearConfig()
motor_mongo = motor.motor_asyncio.AsyncIOMotorClient(os.environ.get('MONGO_URL'))
gh_client = GithubClient(app_config.github_token)
mongo_client = MongoClient(mongo=motor_mongo)
jenkinses_clients = JenkinsesClients(app_config)
triggear_heart = TriggearHeart(mongo_client, gh_client, jenkinses_clients)
github_controller = GithubController(triggear_heart=triggear_heart, github_client=gh_client, config=app_config)
pipeline_controller = PipelineController(github_client=gh_client, mongo_client=mongo_client)
health_controller = HealthController()
authentication_middleware = AuthenticationMiddleware(config=app_config)
app = web.Application(middlewares=(authentication_middleware.authentication, exceptions))
app.router.add_post(Routes.GITHUB.route, github_controller.handle_hook)
app.router.add_post(Routes.REGISTER.route, pipeline_controller.handle_register)
app.router.add_post(Routes.STATUS.route, pipeline_controller.handle_status)
app.router.add_post(Routes.COMMENT.route, pipeline_controller.handle_comment)
app.router.add_get(Routes.HEALTH.route, health_controller.handle_health_check)
app.router.add_get(Routes.MISSING.route, pipeline_controller.handle_missing)
app.router.add_post(Routes.DEREGISTER.route, pipeline_controller.handle_deregister)
app.router.add_post(Routes.CLEAR.route, pipeline_controller.handle_clear)
app.router.add_post(Routes.DEPLOYMENT.route, pipeline_controller.handle_deployment)
app.router.add_post(Routes.DEPLOYMENT_STATUS.route, pipeline_controller.handle_deployment_status)
web.run_app(app)
if __name__ == "__main__": # pragma: no cover
main()
| mit | 1,916,583,991,106,913,000 | 48.285714 | 115 | 0.797516 | false |
EpicScriptTime/update-wrapper | updatewrapper/utils/display.py | 1 | 2279 | import termcolor
def ask_yes_no(question, default=True, spacing=True):
"""
Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of True or False.
"""
valid = {'yes': True, 'y': True, 'ye': True, 'no': False, 'n': False}
if default is None:
prompt = ' [y/n] '
elif default is True:
prompt = ' [Y/n] '
elif default is False:
prompt = ' [y/N] '
else:
raise ValueError('invalid default answer: `%s`' % default)
while True:
choice = input(question + prompt).strip().lower()
if spacing:
print() # Required for spacing
if default is not None and choice == '':
return default
elif choice in valid.keys():
return valid[choice]
else:
print('Please respond with `yes` or `no`.')
def print_banner():
print(" _ _ _ _____ _ ")
print(" _ _ _ __ __| | __ _| |_ ___ __ ___ __ __ _ _ __ _ __ ___ _ __ __ _/ | |___ / / |")
print("| | | | '_ \ / _` |/ _` | __/ _ \____\ \ /\ / / '__/ _` | '_ \| '_ \ / _ \ '__| \ \ / / | |_ \ | |")
print("| |_| | |_) | (_| | (_| | || __/_____\ V V /| | | (_| | |_) | |_) | __/ | \ V /| |_ ___) || |")
print(" \__,_| .__/ \__,_|\__,_|\__\___| \_/\_/ |_| \__,_| .__/| .__/ \___|_| \_/ |_(_)____(_)_|")
print(" |_| |_| |_| ")
print()
def print_info(text):
termcolor.cprint(text, 'cyan', attrs=['bold'])
print()
def print_notice(text):
termcolor.cprint(text, 'magenta', attrs=['bold'])
print()
def print_success(text):
termcolor.cprint(text, 'green', attrs=['bold'])
print()
def print_warning(text):
termcolor.cprint(text, 'yellow', attrs=['bold'])
print()
def print_error(text):
termcolor.cprint(text, 'red', attrs=['bold'])
print()
| mit | 3,996,592,966,298,821,000 | 30.652778 | 113 | 0.425186 | false |
frosty308/webapps | forms.py | 1 | 8021 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Copyright (c) 2017-2018 Alan Frost, All rights reserved.
Implementation of user forms
"""
from flask_wtf import FlaskForm
from wtforms import (BooleanField, HiddenField, PasswordField, StringField, SubmitField,
FileField, ValidationError)
from wtforms.validators import Length, InputRequired, Email, EqualTo, Regexp
from utils import check_name, check_password, check_username, check_phone
#import phonenumbers
#https://github.com/daviddrysdale/python-phonenumbers
class UserNameValidator(object):
""" User name validator, unicode except for control, punctuation, separator or symbols
"""
def __init__(self, message=None):
if not message:
message = u'Invalid user name'
self.message = message
def __call__(self, form, field):
length = field.data and len(field.data) or 0
if length == 0:
pass
elif check_username(field.data):
pass
else:
raise ValidationError(self.message)
class NameValidator(object):
""" Display name validator, unicode except for control, symbols and non-space separator
"""
def __init__(self, message=None):
if not message:
message = u'Invalid user name'
self.message = message
def __call__(self, form, field):
length = field.data and len(field.data) or 0
if length == 0:
pass
elif check_name(field.data):
pass
else:
raise ValidationError(self.message)
class PasswordValidator(object):
""" Simple password validator for at least 8 characters with a lower, upper and digit
"""
def __init__(self, message=None):
if not message:
message = u'Password must be at least 8 characters, with UPPER/lowercase and numbers'
self.message = message
def __call__(self, form, field):
length = field.data and len(field.data) or 0
if length == 0:
pass
elif check_password(field.data):
pass
else:
raise ValidationError(self.message)
class PhoneNumberValidator(object):
""" Phone number validator
"""
def __init__(self, message=None):
if not message:
message = u'* Invalid phone number'
self.message = message
def __call__(self, form, field):
length = field.data and len(field.data) or 0
if length == 0:
pass
elif check_phone(field.data):
pass
else:
raise ValidationError(self.message)
#else:
# try:
# input_number = phonenumbers.parse(field.data)
# if not (phonenumbers.is_valid_number(input_number)):
# raise ValidationError(self.message)
# except:
# input_number = phonenumbers.parse("+1"+field.data)
# if not (phonenumbers.is_valid_number(input_number)):
# raise ValidationError(self.message)
class LoginForm(FlaskForm):
""" Login
"""
email = StringField('Email', validators=[
InputRequired(),
Email()])
password = PasswordField('Password', validators=[
InputRequired(),
Length(8, 64)])
remember = BooleanField('Keep me logged in')
submit = SubmitField('Login')
class InviteForm(FlaskForm):
""" Invite a new user
"""
email = StringField('Email', validators=[
InputRequired(),
Email()])
phone = StringField('Phone', validators=[
PhoneNumberValidator()])
user = StringField('Name', validators=[InputRequired(), NameValidator()])
submit = SubmitField('Invite')
class AcceptForm(FlaskForm):
""" Accept invitation with link token, temporary password and code
"""
action = HiddenField('Action')
email = HiddenField('Email')
token = HiddenField('Token')
user = StringField('Name', validators=[InputRequired(), NameValidator()])
phone = StringField('Phone', validators=[PhoneNumberValidator()])
oldpassword = PasswordField('Password', validators=[
InputRequired(),
PasswordValidator()])
password = PasswordField('New Password', validators=[
InputRequired(),
EqualTo('confirm', message='Passwords must match')
])
code = StringField('Code', validators=[InputRequired(), Regexp(r'^(\d{6,8})$')])
confirm = PasswordField('Confirm password', validators=[InputRequired()])
submit = SubmitField('Accept Invitation')
class ConfirmForm(FlaskForm):
""" Confirm account with token
"""
action = HiddenField('Action')
email = HiddenField('Email')
token = HiddenField('Token')
code = StringField('Code', validators=[InputRequired(), Regexp(r'^(\d{6,8})$')])
submit = SubmitField('Confirm Account')
class VerifyForm(FlaskForm):
""" Verify 2FA code
"""
action = HiddenField('Action')
email = HiddenField('Email')
phone = HiddenField('Phone')
code = StringField('Code', validators=[InputRequired(), Regexp(r'^(\d{6,8})$')])
submit = SubmitField('Verify Code')
class UploadForm(FlaskForm):
""" Upload an artistic work
"""
file = FileField('Filename')
title = StringField('Title', validators=[Length(2, 128)])
artform = StringField('Artform', validators=[Length(0, 128)])
created = StringField('Date', validators=[Length(6, 32)])
dimensions = StringField('Dimensions', validators=[Length(0, 64)])
tags = StringField('Tags', validators=[Length(0, 128)])
submit = SubmitField('Upload Image')
class ResendForm(FlaskForm):
""" Resend a confirmtion or verification token
"""
action = HiddenField('Action')
email = StringField('Email Address', validators=[
InputRequired(),
Email()])
phone = StringField('phone', validators=[PhoneNumberValidator()])
submit = SubmitField('Get New Code')
class RegistrationForm(FlaskForm):
""" Register a new account
"""
user = StringField('Name', validators=[InputRequired(), NameValidator()])
email = StringField('Email Address', validators=[
InputRequired(),
Email()])
phone = StringField('Phone', validators=[PhoneNumberValidator()])
password = PasswordField('New password', validators=[
InputRequired(),
PasswordValidator(),
EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Confirm password', validators=[InputRequired()])
token = StringField('Token', validators=[InputRequired()])
class ChangePasswordForm(FlaskForm):
""" Change password
"""
email = HiddenField('Email')
oldpassword = PasswordField('Password', validators=[
InputRequired(),
Length(8, 64)])
password = PasswordField('New Password', validators=[
InputRequired(),
PasswordValidator(),
EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Confirm password', validators=[InputRequired()])
submit = SubmitField('Change Password')
class ForgotPasswordForm(FlaskForm):
""" Request a password reset
"""
email = StringField('Email Address', validators=[
InputRequired(),
Email()])
submit = SubmitField('Request Password Reset')
class ResetPasswordForm(FlaskForm):
""" Reset a password with link token, temporary password and code
"""
email = HiddenField('Email')
action = HiddenField('Action')
token = HiddenField('Token')
oldpassword = PasswordField('Password', validators=[
InputRequired(),
Length(8, 64)])
password = PasswordField('New Password', validators=[
InputRequired(),
PasswordValidator(),
EqualTo('confirm', message='Passwords must match')
])
code = StringField('Code', validators=[InputRequired(), Regexp(r'^(\d{6,8})$')])
confirm = PasswordField('Confirm password', validators=[InputRequired()])
submit = SubmitField('Reset Password')
| gpl-3.0 | -5,773,261,079,788,126,000 | 33.424893 | 97 | 0.63234 | false |
apache/incubator-singa | examples/autograd/resnet.py | 1 | 8531 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# the code is modified from
# https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
from singa import autograd
from singa import tensor
from singa import device
from singa import opt
import numpy as np
from tqdm import trange
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return autograd.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=1,
bias=False,
)
class BasicBlock(autograd.Layer):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = autograd.BatchNorm2d(planes)
self.conv2 = conv3x3(planes, planes)
self.bn2 = autograd.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def __call__(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = autograd.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out = autograd.add(out, residual)
out = autograd.relu(out)
return out
class Bottleneck(autograd.Layer):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = autograd.Conv2d(
inplanes, planes, kernel_size=1, bias=False
)
self.bn1 = autograd.BatchNorm2d(planes)
self.conv2 = autograd.Conv2d(
planes, planes, kernel_size=3, stride=stride, padding=1, bias=False
)
self.bn2 = autograd.BatchNorm2d(planes)
self.conv3 = autograd.Conv2d(
planes, planes * self.expansion, kernel_size=1, bias=False
)
self.bn3 = autograd.BatchNorm2d(planes * self.expansion)
self.downsample = downsample
self.stride = stride
def __call__(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = autograd.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = autograd.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out = autograd.add(out, residual)
out = autograd.relu(out)
return out
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152']
class ResNet(autograd.Layer):
def __init__(self, block, layers, num_classes=1000):
self.inplanes = 64
super(ResNet, self).__init__()
self.conv1 = autograd.Conv2d(
3, 64, kernel_size=7, stride=2, padding=3, bias=False
)
self.bn1 = autograd.BatchNorm2d(64)
self.maxpool = autograd.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = autograd.AvgPool2d(7, stride=1)
self.fc = autograd.Linear(512 * block.expansion, num_classes)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
conv = autograd.Conv2d(
self.inplanes,
planes * block.expansion,
kernel_size=1,
stride=stride,
bias=False,
)
bn = autograd.BatchNorm2d(planes * block.expansion)
def downsample(x):
return bn(conv(x))
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
def forward(x):
for layer in layers:
x = layer(x)
return x
return forward
def __call__(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = autograd.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = autograd.flatten(x)
x = self.fc(x)
return x
def resnet18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
return model
def resnet34(pretrained=False, **kwargs):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
return model
def resnet50(pretrained=False, **kwargs):
"""Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
return model
def resnet101(pretrained=False, **kwargs):
"""Constructs a ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
return model
def resnet152(pretrained=False, **kwargs):
"""Constructs a ResNet-152 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
return model
if __name__ == "__main__":
model = resnet50()
print("Start intialization............")
dev = device.create_cuda_gpu_on(0)
niters = 100
batch_size = 32
IMG_SIZE = 224
sgd = opt.SGD(lr=0.1, momentum=0.9, weight_decay=1e-5)
tx = tensor.Tensor((batch_size, 3, IMG_SIZE, IMG_SIZE), dev)
ty = tensor.Tensor((batch_size,), dev, tensor.int32)
autograd.training = True
x = np.random.randn(batch_size, 3, IMG_SIZE, IMG_SIZE).astype(np.float32)
y = np.random.randint(0, 1000, batch_size, dtype=np.int32)
tx.copy_from_numpy(x)
ty.copy_from_numpy(y)
import time
dev.Sync()
start = time.time()
fd = 0
softmax = 0
update = 0
with trange(niters) as t:
for _ in t:
dev.Sync()
tick = time.time()
x = model(tx)
dev.Sync()
fd += time.time() - tick
tick = time.time()
loss = autograd.softmax_cross_entropy(x, ty)
dev.Sync()
softmax += time.time() - tick
for p, g in autograd.backward(loss):
dev.Sync() # this "for" loops for a large number of times, so can slow down
tick = time.time()
sgd.update(p, g)
dev.Sync() # this "for" loops for a large number of times, so can slow down
update += time.time() - tick
dev.Sync()
end = time.time()
throughput = float(niters * batch_size) / (end - start)
print("Throughput = {} per second".format(throughput))
titer = (end - start) / float(niters)
tforward = float(fd) / float(niters)
tsoftmax = float(softmax) / float(niters)
tbackward = titer - tforward - tsoftmax
tsgd = float(update) / float(niters)
print("Total={}, forward={}, softmax={}, backward={}, sgd={}".format(titer, tforward, tsoftmax, tbackward, tsgd))
| apache-2.0 | 8,992,445,720,500,284,000 | 28.519031 | 117 | 0.593717 | false |
bxlab/HiFive_Paper | Scripts/Figures/fivec_hicpipe_algorithm_comparison.py | 1 | 14324 | #!/usr/bin/env python
import sys
import os
import numpy
from pyx import canvas, text, path, graph, color, trafo, unit, attr, deco, style, bitmap
import h5py
import hifive
unit.set(defaultunit="cm")
text.set(mode="latex")
text.preamble(r"\usepackage{times}")
text.preamble(r"\usepackage{sansmath}")
text.preamble(r"\sansmath")
text.preamble(r"\renewcommand*\familydefault{\sfdefault}")
painter = graph.axis.painter.regular( labeldist=0.1, labelattrs=[text.size(-3)], titleattrs=[text.size(-3)] )
methods = ['Raw', 'Prob', 'Exp', 'Bin', 'Exp-KR']
method_colors = {
'Prob':color.cmyk.Black,
'Exp':color.cmyk.CadetBlue,
'Bin':color.cmyk.MidnightBlue,
'Raw':color.cmyk.Dandelion,
'Exp-KR':color.cmyk.Mahogany,
}
def main():
out_fname = sys.argv[1]
basedir = '/'.join(os.path.dirname(os.path.realpath(__file__)).split('/')[:-2])
hic_phillips_fname1 = "%s/Data/HiC/HiCPipe/HM/mm9_ESC_NcoI_Phillips.hch" % basedir
hic_phillips_fname2 = "%s/Data/HiC/HiCPipe/HM/mm9_ESC_HindIII_Phillips.hch" % basedir
hic_nora_fname1 = "%s/Data/HiC/HiCPipe/HM/mm9_ESC_NcoI_Nora.hch" % basedir
hic_nora_fname2 = "%s/Data/HiC/HiCPipe/HM/mm9_ESC_HindIII_Nora.hch" % basedir
hic_phillips1 = h5py.File(hic_phillips_fname1, 'r')
hic_phillips2 = h5py.File(hic_phillips_fname2, 'r')
hic_nora1 = h5py.File(hic_nora_fname1, 'r')
hic_nora2 = h5py.File(hic_nora_fname2, 'r')
hm_phillips = {}
hm_nora = {}
for key in hic_phillips1.keys():
if key.count('unbinned_counts') == 0:
continue
region = int(key.split('.')[0])
hm_phillips[region] = dynamically_bin(hic_phillips1, hic_phillips2, region)
for key in hic_nora1.keys():
if key.count('unbinned_counts') == 0:
continue
region = int(key.split('.')[0])
hm_nora[region] = dynamically_bin(hic_nora1, hic_nora2, region)
fivec_fnames = {
"Prob_Phillips":"%s/Data/FiveC/HiFive/Phillips_ESC_probnodist.fcp" % basedir,
"Prob_Nora":"%s/Data/FiveC/HiFive/Nora_ESC_male_E14_probnodist.fcp" % basedir,
"Bin_Phillips":"%s/Data/FiveC/HiFive/Phillips_ESC_binnodist.fcp" % basedir,
"Bin_Nora":"%s/Data/FiveC/HiFive/Nora_ESC_male_E14_binnodist.fcp" % basedir,
"Exp_Phillips":"%s/Data/FiveC/HiFive/Phillips_ESC_expnodist.fcp" % basedir,
"Exp_Nora":"%s/Data/FiveC/HiFive/Nora_ESC_male_E14_expnodist.fcp" % basedir,
"Exp-KR_Phillips":"%s/Data/FiveC/HiFive/Phillips_ESC_expKRnodist.fcp" % basedir,
"Exp-KR_Nora":"%s/Data/FiveC/HiFive/Nora_ESC_male_E14_expKRnodist.fcp" % basedir,
}
data = {}
imgs = {}
ratio1 = 0
ratio2 = 0
for meth in ['Prob', 'Bin', 'Exp', 'Exp-KR']:
fc = hifive.FiveC(fivec_fnames["%s_Phillips" % meth])
fragments = fc.frags['fragments'][...]
regions = fc.frags['regions'][...]
counts = numpy.zeros(0, dtype=numpy.float64)
expected = numpy.zeros(0, dtype=numpy.float64)
hic_counts = numpy.zeros(0, dtype=numpy.float64)
hic_expected = numpy.zeros(0, dtype=numpy.float64)
skipped = []
for i in range(fc.frags['regions'].shape[0]):
temp = fc.cis_heatmap(i, datatype='fragment', arraytype='compact', binsize=0, skipfiltered=True)
if temp is None:
skipped.append(i)
continue
counts = numpy.hstack((counts, temp[:, :, 0].ravel()))
expected = numpy.hstack((expected, temp[:, :, 1].ravel()))
if i == 6:
ratio1 = temp.shape[1] / float(temp.shape[0])
imgs["%s_Phillips" % meth] = hifive.plotting.plot_full_array(temp, symmetricscaling=False)
if meth == 'Prob':
temp1 = numpy.zeros((temp.shape[0], temp.shape[1]), dtype=numpy.float32)
temp1[numpy.where(temp[:, :, 0] > 0.0)] = 1
if i == 6:
imgs["Raw_Phillips"] = hifive.plotting.plot_full_array(
numpy.dstack((temp[:, :, 0], temp1)), symmetricscaling=False)
binbounds = numpy.hstack((
fragments['start'][regions['start_frag'][i]:regions['stop_frag'][i]].reshape(-1, 1),
fragments['stop'][regions['start_frag'][i]:regions['stop_frag'][i]].reshape(-1, 1)))
valid = numpy.where(fc.filter[regions['start_frag'][i]:regions['stop_frag'][i]])[0]
binbounds = binbounds[valid, :]
temp = hm_phillips[i]
strands = fragments['strand'][regions['start_frag'][i]:regions['stop_frag'][i]][valid]
temp = temp[numpy.where(strands == 0)[0], :, :][:, numpy.where(strands == 1)[0], :]
hic_counts = numpy.hstack((hic_counts, temp[:, :, 0].ravel()))
hic_expected = numpy.hstack((hic_expected, temp[:, :, 1].ravel()))
if i == 6:
imgs["HiC_Phillips"] = hifive.plotting.plot_full_array(temp, symmetricscaling=False)
if meth == 'Prob':
data["Raw_Phillips"] = numpy.copy(counts)
where = numpy.where(hic_expected > 0.0)[0]
hic_counts[where] /= hic_expected[where]
data["HiC_Phillips"] = numpy.copy(hic_counts)
where = numpy.where(expected > 0.0)[0]
counts[where] /= expected[where]
data["%s_Phillips" % meth] = numpy.copy(counts)
fc = hifive.FiveC(fivec_fnames["%s_Nora" % meth])
temp = fc.cis_heatmap(0, datatype='fragment', arraytype='compact', binsize=0, skipfiltered=True)
ratio2 = temp.shape[1] / float(temp.shape[0])
imgs["%s_Nora" % meth] = hifive.plotting.plot_full_array(temp, symmetricscaling=False)
counts = temp[:, :, 0].ravel()
expected = temp[:, :, 1].ravel()
if meth == 'Prob':
temp1 = numpy.zeros((temp.shape[0], temp.shape[1]), dtype=numpy.float32)
temp1[numpy.where(temp[:, :, 0] > 0.0)] = 1
imgs["Raw_Nora"] = hifive.plotting.plot_full_array(
numpy.dstack((temp[:, :, 0], temp1)), symmetricscaling=False)
data["Raw_Nora"] = numpy.copy(counts)
fragments = fc.frags['fragments'][...]
regions = fc.frags['regions'][...]
binbounds = numpy.hstack((
fragments['start'][regions['start_frag'][0]:regions['stop_frag'][0]].reshape(-1, 1),
fragments['stop'][regions['start_frag'][0]:regions['stop_frag'][0]].reshape(-1, 1)))
binbounds = binbounds[numpy.where(fc.filter[regions['start_frag'][0]:regions['stop_frag'][0]])[0], :]
temp = hm_nora[0]
strands = fragments['strand'][regions['start_frag'][0]:regions['stop_frag'][0]]
temp = temp[numpy.where(strands==0)[0], :, :][:, numpy.where(strands == 1)[0], :]
imgs["HiC_Nora"] = hifive.plotting.plot_full_array(temp, symmetricscaling=False)
hic_counts = temp[:, :, 0].ravel()
hic_expected = temp[:, :, 1].ravel()
where = numpy.where(hic_expected > 0.0)[0]
hic_counts[where] /= hic_expected[where]
data["HiC_Nora"] = numpy.copy(hic_counts)
where = numpy.where(expected > 0.0)[0]
counts[where] /= expected[where]
data["%s_Nora" % meth] = numpy.copy(counts)
correlations = {}
output = open(out_fname.replace('pdf', 'txt'), 'w')
print >> output, "Method\tPhillips\tNora"
for meth in methods:
temp = [meth]
for name in ["Phillips", "Nora"]:
valid = numpy.where((data["%s_%s" % (meth, name)] > 0.0) * (data["HiC_%s" % name] > 0.0))
correlations["%s_%s" % (meth, name)] = numpy.corrcoef(numpy.log(data["%s_%s" % (meth, name)][valid]),
numpy.log(data["HiC_%s" % name][valid]))[0, 1]
temp.append(str(correlations["%s_%s" % (meth, name)]))
print >> output, '\t'.join(temp)
output.close()
width = 16.8
spacer = 0.3
c = canvas.canvas()
plot_width = (width - spacer * 3.0 - 0.4) / 4.0
for i, meth in enumerate(["Raw", "Prob", "HiC"]):
meth_names = {"Raw":"Raw", "Prob":"HiFive", "HiC":"HiC"}
c.text(plot_width * (i + 1.5) + spacer * (i + 1), (ratio1 + ratio2) * plot_width + spacer + 0.1,
"%s" % meth_names[meth], [text.halign.center, text.valign.bottom, text.size(-2)])
c.insert(bitmap.bitmap(0, 0, imgs["%s_Phillips" % meth], width=plot_width),
[trafo.translate((i + 1) * (plot_width + spacer), plot_width * ratio2 + spacer)])
c.insert(bitmap.bitmap(0, 0, imgs["%s_Nora" % meth], width=plot_width),
[trafo.translate((i + 1) * (plot_width + spacer), 0)])
g = graph.graphxy(width=plot_width - 0.8, height=plot_width * ratio1,
x=graph.axis.nestedbar(painter=graph.axis.painter.bar(nameattrs=None)),
y=graph.axis.lin(painter=painter),
x2=graph.axis.lin(parter=None, min=0, max=1),
y2=graph.axis.lin(parter=None, min=0, max=1))
for i, meth in enumerate(methods):
Y = numpy.zeros(2, dtype=numpy.float32)
col = method_colors[meth]
for j, name in enumerate(["Phillips", "Nora"]):
Y[j] = correlations["%s_%s" % (meth, name)]
g.plot(graph.data.points(zip(zip(range(Y.shape[0]), [i] * Y.shape[0]), Y), xname=1, y=2),
[graph.style.changebar([col])])
g.text(-0.8, plot_width * ratio1 * 0.5, "Correlation",
[text.halign.center, text.valign.top, text.size(-3), trafo.rotate(90)])
g.text((plot_width - 0.8) * 0.25, -0.1, "Phillips",
[text.halign.center, text.valign.top, text.size(-3)])
g.text((plot_width - 0.8) * 0.75, -0.1, "Nora",
[text.halign.center, text.valign.top, text.size(-3)])
c.insert(g, [trafo.translate(0.8, plot_width * ratio2 + spacer)])
c.text(width, (ratio1 + ratio2 * 0.5) * plot_width + spacer, "Phillips",
[text.halign.center, text.valign.top, trafo.rotate(-90), text.size(-2)])
c.text(width, ratio1 * 0.5 * plot_width, "Nora",
[text.halign.center, text.valign.top, trafo.rotate(-90), text.size(-2)])
meth_names = {"Raw":"Raw", "Prob":"HiFive-Probability", "Exp":"HiFive-Express", "Bin":"HiFive-Binning",
"Exp-KR":"HiFive-ExpressKR", "Exp-KR-dist":"HiFive-ExpressKR-dist"}
for i, meth in enumerate(methods):
c.fill(path.rect(1.0, plot_width * ratio1 - 1.0 - i * 0.5, 0.2, 0.2), [method_colors[meth]])
c.text(1.3, plot_width * ratio1 - 0.9 - i * 0.5, "%s" % meth_names[meth],
[text.halign.left, text.valign.middle, text.size(-3)])
c.writePDFfile(out_fname)
def dynamically_bin(hic1, hic2, region):
counts = hic1['%i.counts' % region][...] + hic2['%i.counts' % region][...]
expected = hic1['%i.expected' % region][...] + hic2['%i.expected' % region][...]
upper = numpy.zeros((counts.shape[0], 2), dtype=numpy.float32)
upper[:, 0] = counts
upper[:, 1] = expected
mids1 = hic1['%i.mids' % region][...]
mids2 = hic2['%i.mids' % region][...]
indices = numpy.triu_indices(mids1.shape[0], 1)
unbinned_counts1 = numpy.zeros((mids1.shape[0], mids1.shape[0]), dtype=numpy.int32)
unbinned_counts1[indices] = hic1['%i.unbinned_counts' % region][...]
unbinned_counts1[indices[1], indices[0]] = unbinned_counts1[indices]
unbinned_expected1 = numpy.zeros((mids1.shape[0], mids1.shape[0]), dtype=numpy.int32)
unbinned_expected1[indices] = hic1['%i.unbinned_expected' % region][...]
unbinned_expected1[indices[1], indices[0]] = unbinned_expected1[indices]
indices = numpy.triu_indices(mids2.shape[0], 1)
unbinned_counts2 = numpy.zeros((mids2.shape[0], mids2.shape[0]), dtype=numpy.int32)
unbinned_counts2[indices] = hic2['%i.unbinned_counts' % region][...]
unbinned_counts2[indices[1], indices[0]] = unbinned_counts2[indices]
unbinned_expected2 = numpy.zeros((mids2.shape[0], mids2.shape[0]), dtype=numpy.int32)
unbinned_expected2[indices] = hic2['%i.unbinned_expected' % region][...]
unbinned_expected2[indices[1], indices[0]] = unbinned_expected2[indices]
bounds = hic1['%i.bounds' % region][...]
allmids = numpy.zeros((mids1.shape[0] + mids2.shape[0], 2), dtype=numpy.int32)
allmids[:mids1.shape[0], 0] = mids1 - 1
allmids[:mids1.shape[0], 1] = mids1 + 1
allmids[mids1.shape[0]:, 0] = mids2 - 1
allmids[mids1.shape[0]:, 1] = mids2 + 1
allmids = allmids[numpy.argsort(allmids[:, 0]), :]
indices1 = numpy.searchsorted(allmids[:, 1], mids1)
indices1_1 = (indices1.reshape(-1, 1) * allmids.shape[0] + indices1.reshape(1, -1)).ravel()
indices2 = numpy.searchsorted(allmids[:, 1], mids2)
indices2_1 = (indices2.reshape(-1, 1) * allmids.shape[0] + indices2.reshape(1, -1)).ravel()
unbinned = numpy.zeros((allmids.shape[0], allmids.shape[0], 2), dtype=numpy.float32)
unbinned[:, :, 0] += numpy.bincount(indices1_1, minlength=allmids.shape[0] ** 2,
weights=unbinned_counts1.ravel()).reshape(allmids.shape[0], -1)
unbinned[:, :, 1] += numpy.bincount(indices1_1, minlength=allmids.shape[0] ** 2,
weights=unbinned_expected1.ravel()).reshape(allmids.shape[0], -1)
unbinned[:, :, 0] += numpy.bincount(indices2_1, minlength=allmids.shape[0] ** 2,
weights=unbinned_counts2.ravel()).reshape(allmids.shape[0], -1)
unbinned[:, :, 1] += numpy.bincount(indices2_1, minlength=allmids.shape[0] ** 2,
weights=unbinned_expected2.ravel()).reshape(allmids.shape[0], -1)
indices = numpy.triu_indices(unbinned.shape[0], 1)
unbinned = unbinned[indices[0], indices[1], :]
indices = numpy.triu_indices(bounds.shape[0], 1)
hifive.hic_binning.dynamically_bin_cis_array(unbinned, allmids, upper, bounds,
expansion_binsize=0, minobservations=25)
binned = numpy.zeros((bounds.shape[0], bounds.shape[0], 2), dtype=numpy.float32)
binned[indices[0], indices[1], :] = upper
binned[indices[1], indices[0], :] = upper
return binned
if __name__ == "__main__":
main()
| bsd-3-clause | 1,219,841,942,067,948,000 | 55.616601 | 113 | 0.578749 | false |
delete/estofadora | estofadora/statement/views.py | 1 | 4766 | from datetime import datetime
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.contrib import messages
from estofadora.core.utils import MONTHS, last_day_of, month_before_of
from .forms import CashForm
from .models import Cash, Balance
@login_required
def home(request):
return render(request, 'statement/statement.html')
@login_required
def cash(request):
context = {}
date = datetime.now().date()
content = Cash.objects.filter(date=date)
form = CashForm(initial={'date': date})
if request.method == 'POST':
if 'search_form' in request.POST:
date = request.POST.get('search_date')
# Format the date to 21/12/2015 or 2015-12-21
try:
date = datetime.strptime(date, '%d/%m/%Y').date()
except ValueError:
date = datetime.strptime(date, '%Y-%m-%d').date()
content = Cash.objects.filter(date=date)
else:
form = CashForm(request.POST)
if form.is_valid():
form.save()
messages.success(request, 'Registrado com sucesso!')
return redirect(reverse('statement:cash'))
total_before = Balance.total_balance_before(date)
content, balance = Cash.create_balance(content, total_before)
context['form'] = form
context['content'] = content
context['total_value'] = balance
context['total_before'] = total_before
context['choose_date'] = date
context['section'] = 'cash'
return render(request, 'statement/cash.html', context)
@login_required
def delete(request, pk):
cash = get_object_or_404(Cash, pk=pk)
cash.delete()
messages.success(request, 'Registro removido com sucesso!')
return redirect(reverse('statement:cash'))
@login_required
def edit(request, pk):
context = {}
cash = get_object_or_404(Cash, pk=pk)
if request.method == 'POST':
form = CashForm(request.POST, instance=cash)
if form.is_valid():
form.save()
return render(
request, 'statement/item_edit_form_success.html',
{'item': cash}
)
else:
context['form_error'] = True
else:
form = CashForm(instance=cash)
context['form'] = form
context['item'] = cash
return render(request, 'statement/item_edit_form.html', context)
@login_required
def cash_month(request):
context = {}
date = datetime.now().date()
year = date.year
month = date.month
# If a date was not given, filter by the atual date.
content = Cash.filter_by_date(month=month, year=year)
total_value = Cash.total_value_by_date(month=month, year=year)
if request.method == 'POST':
month = int(request.POST.get('selectmonth'))
year = int(request.POST.get('selectyear'))
content = Cash.filter_by_date(month=month, year=year)
total_value = Cash.total_value_by_date(month=month, year=year)
y, m = month_before_of(year, month)
last_day_of_month_before = last_day_of(y, m)
total_before = Balance.total_balance_before(last_day_of_month_before)
content, total_value = Cash.create_balance(content, total_before)
context['content'] = content
context['total_value'] = total_value
context['total_before'] = total_before
context['choose_month'] = month
context['choose_year'] = year
context['months'] = MONTHS
context['years'] = Cash.list_years()
context['section'] = 'cash_month'
return render(request, 'statement/cash_month.html', context)
@login_required
def cash_annual(request):
context = {}
# If an year was not given, use the atual year.
year = datetime.now().date().year
if request.method == 'POST':
year = int(request.POST.get('selectyear'))
balances = []
month = 1
while month < 13:
# Get the total balance from January to December.
balance = Balance.balance_from_month(year=year, month=month)
balances.append(float(balance))
month += 1
total_value = Cash.total_value_by_date(year=year)
# Get the previous year to sum the total of it.
january = 1
y, m = month_before_of(year, january)
last_day_year_before = last_day_of(y, m)
total_before = Balance.total_balance_before(last_day_year_before)
context['total_value'] = total_value
context['total_before'] = total_before
context['choose_year'] = year
context['balances'] = balances
context['years'] = Cash.list_years()
context['section'] = 'cash_annual'
return render(request, 'statement/cash_annual.html', context)
| mit | 1,433,432,891,108,522,200 | 28.239264 | 73 | 0.634914 | false |
stormi/tsunami | src/primaires/salle/commandes/ouvrir/__init__.py | 1 | 3297 | # -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'ouvrir'."""
from primaires.interpreteur.commande.commande import Commande
from primaires.interpreteur.masque.exceptions.erreur_interpretation import \
ErreurInterpretation
class CmdOuvrir(Commande):
"""Commande 'ouvrir'"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "ouvrir", "open")
self.nom_categorie = "bouger"
self.schema = "<nom_sortie>"
self.aide_courte = "ouvre une porte"
self.aide_longue = \
"Cette commande permet d'ouvrir une sortie de la salle où " \
"vous vous trouvez."
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
sortie = dic_masques["nom_sortie"].sortie
salle = personnage.salle
nom_complet = sortie.nom_complet.capitalize()
personnage.agir("ouvrir")
if not sortie.porte:
raise ErreurInterpretation(
"|err|Cette sortie n'est pas une porte.|ff|")
if not sortie.porte.fermee:
raise ErreurInterpretation(
"Cette porte est déjà ouverte.|ff|".format(nom_complet))
if sortie.porte.verrouillee:
raise ErreurInterpretation(
"Cette porte semble fermée à clef.".format(nom_complet))
if not personnage.est_immortel() and not sortie.salle_dest.peut_entrer(
personnage):
raise ErreurInterpretation(
"Vous ne pouvez ouvrir cette porte.")
sortie.porte.ouvrir()
personnage << "Vous ouvrez {}.".format(sortie.nom_complet)
salle.envoyer("{{}} ouvre {}.".format(sortie.nom_complet), personnage)
| bsd-3-clause | -1,509,933,335,406,380,800 | 42.866667 | 79 | 0.696657 | false |
thomasmf/nomenine | src/core/runtime/clause/plus.py | 1 | 1148 |
ROOT_SCOPE_METHOD( MD( 'Plus', 'PLUS_FACTORY_single()' ) )
TEST( """ Or @ ( . [ ( Plus @ y ) ( Plus @ x ) ] flatten () ) consume [ x x y y y x y y ] value value == x """ )
OBJECT( 'PLUS_FACTORY',
methods = [
MS( ARG( CW( '@' ), CG( 'CLAUSE', 'clause' ) ), """
JUMP__return_ANY( CONTEXT, CONTEXT, $CA(PLUS_new( PARAM_clause )) ) ;
""" ),
]
)
OBJECT( 'PLUS',
inherit = [ 'CLAUSE' ],
attributes = [
A( 'ANY', 'type' ),
],
methods = [
MS( ARG( CW( 'consume' ), CG( 'LIST', 'phrase' ) ), """
JUMP__consume_LIST( $CA(FRAME__PLUS_0_new( CONTEXT )), $CA(STAR_new( ACTION->type )), PARAM_phrase ) ;
""" ),
],
dump = D( '%s', '$DUMP( object->type )' )
)
FRAME( 'PLUS_0',
methods = [
MS( ARG( CW( 'return' ), CG( 'ANY', 'value' ) ), """
JUMP__value( $CA(FRAME__PLUS_1_new( ACTION->parent, PARAM_value )), PARAM_value ) ;
""" ),
]
)
FRAME( 'PLUS_1',
attributes = [
A( 'ANY', 'element' ),
],
methods = [
MS( ARG( CW( 'return' ), CG( 'ANY', 'value' ) ), """
JUMP__value( $CA(FRAME__REPLACER_new( ACTION->parent, ACTION->element )), PARAM_value ) ;
""" ),
]
)
| mit | -4,228,764,090,806,747,000 | 22.428571 | 112 | 0.486934 | false |
Jimdo/ansible-fastly | tests/test_fastly_response_object.py | 1 | 2949 | #!/usr/bin/env python
import os
import unittest
import sys
from test_common import TestCommon
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'library'))
from fastly_service import FastlyConfiguration
class TestFastlyResponseObject(TestCommon):
@TestCommon.vcr.use_cassette()
def test_fastly_response_object_defaults(self):
response_object_configuration = self.minimal_configuration.copy()
response_object_configuration.update({
'response_objects': [{
'name': 'Set 200 status code',
}]
})
configuration = FastlyConfiguration(response_object_configuration)
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
self.assertEqual(service.active_version.configuration.response_objects[0].name, 'Set 200 status code')
self.assertEqual(service.active_version.configuration.response_objects[0].status, '200')
self.assertEqual(service.active_version.configuration.response_objects[0].response, 'Ok')
self.assertEqual(service.active_version.configuration, configuration)
active_version_number = service.active_version.number
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
self.assertEqual(service.active_version.number, active_version_number)
@TestCommon.vcr.use_cassette()
def test_fastly_response_object_content_content_type(self):
response_object_configuration = self.minimal_configuration.copy()
response_object_configuration.update({
'response_objects': [{
'name': 'Set 200 status code',
'status': 200,
'response': 'Ok',
'content': 'Hello from Fastly',
'content_type': 'text/plain',
}]
})
configuration = FastlyConfiguration(response_object_configuration)
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
self.assertEqual(service.active_version.configuration.response_objects[0].name, 'Set 200 status code')
self.assertEqual(service.active_version.configuration.response_objects[0].status, '200')
self.assertEqual(service.active_version.configuration.response_objects[0].response, 'Ok')
self.assertEqual(service.active_version.configuration.response_objects[0].content, 'Hello from Fastly')
self.assertEqual(service.active_version.configuration.response_objects[0].content_type, 'text/plain')
self.assertEqual(service.active_version.configuration, configuration)
active_version_number = service.active_version.number
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
self.assertEqual(service.active_version.number, active_version_number)
if __name__ == '__main__':
unittest.main()
| mit | 3,654,560,946,872,806,000 | 45.078125 | 111 | 0.699559 | false |
ericblau/ipf-xsede | ipf/glue2/application.py | 1 | 10306 |
###############################################################################
# Copyright 2011-2014 The University of Texas at Austin #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
###############################################################################
import json
from ipf.data import Data, Representation
from ipf.dt import *
from ipf.error import StepError
from ipf.step import Step
from ipf.sysinfo import ResourceName
from ipf.ipfinfo import IPFInformation, IPFInformationJson, IPFInformationTxt
from .entity import *
#######################################################################################################################
class ApplicationEnvironment(Entity):
def __init__(self):
Entity.__init__(self)
self.AppName = "unknown" # string
self.SpecifiedName = None # string
self.AppVersion = None # string
self.Repository = None # string (url)
self.State = None # string (AppEnvState_t)
self.RemovalDate = None # datetime
self.License = None # string (License_t)
self.Description = None # string
self.BestBenchmark = [] # string (Benchmark_t)
self.ParallelSupport = None # string (ParallelSupport_t)
self.MaxSlots = None # integer
self.MaxJobs = None # integer
self.MaxUserSeats = None # integer
self.FreeSlots = None # integer
self.FreeJobs = None # integer
self.FreeUserSeats = None # integer
self.ExecutionEnvironmentID = [] # string (ID)
self.ComputingManagerID = None # string (ID)
self.ApplicationHandleID = [] # string (ID)
self.Keywords = [] # string (ID)
self.Extension = {}
self.SupportStatus = None
def __str__(self):
return json.dumps(ApplicationEnvironmentOgfJson(self).toJson(), sort_keys=True, indent=4)
#######################################################################################################################
class ApplicationEnvironmentOgfJson(EntityOgfJson):
data_cls = ApplicationEnvironment
def __init__(self, data):
EntityOgfJson.__init__(self, data)
def get(self):
return json.dumps(self.toJson(), sort_keys=True, indent=4)
def toJson(self):
doc = EntityOgfJson.toJson(self)
# Specified name is descriptive Name: field from inside module file
if self.data.SpecifiedName is not None:
doc["Name"] = self.data.SpecifiedName
if self.data.AppName is not None:
doc["AppName"] = self.data.AppName
if self.data.AppVersion is not None:
doc["AppVersion"] = self.data.AppVersion
if self.data.Repository is not None:
doc["Repository"] = self.data.Repository
if self.data.State is not None:
doc["State"] = self.data.State
if self.data.RemovalDate is not None:
doc["RemovalDate"] = dateTimeToText(self.data.RemovalDate)
if self.data.License is not None:
doc["License"] = self.data.License
if self.data.Description is not None:
doc["Description"] = self.data.Description
if len(self.data.BestBenchmark) > 0:
doc["BestBenchmark"] = self.data.BestBenchmark
if self.data.ParallelSupport is not None:
doc["ParallelSupport"] = self.data.ParallelSupport
if self.data.MaxSlots is not None:
doc["MaxSlots"] = self.data.MaxSlots
if self.data.MaxJobs is not None:
doc["MaxJobs"] = self.data.MaxJobs
if self.data.MaxUserSeats is not None:
doc["MaxUserSeats"] = self.data.MaxUserSeats
if self.data.FreeSlots is not None:
doc["FreeSlots"] = self.data.FreeSlots
if self.data.FreeJobs is not None:
doc["FreeJobs"] = self.data.FreeJobs
if self.data.FreeUserSeats is not None:
doc["FreeUserSeats"] = self.data.FreeUserSeats
if len(self.data.Keywords) > 0:
doc["Keywords"] = self.data.Keywords
if len(self.data.Extension) > 0:
extensions = []
for ext in self.data.Extension:
extensions.append(ext)
#doc["Extensions"] = list(extensions)
if self.data.SupportStatus is not None:
doc["SupportStatus"] = self.data.SupportStatus
associations = {}
associations["ExecutionEnvironmentID"] = self.data.ExecutionEnvironmentID
associations["ComputingManagerID"] = self.data.ComputingManagerID
if len(self.data.ApplicationHandleID) > 0:
associations["ApplicationHandleID"] = self.data.ApplicationHandleID
doc["Associations"] = associations
return doc
#######################################################################################################################
class ApplicationHandle(Entity):
def __init__(self):
Entity.__init__(self)
# string (ApplicationHandle_t)
self.Type = "unknown"
# string
self.Value = "unknown"
# string (ID)
self.ApplicationEnvironmentID = "urn:glue2:ApplicationEnvironment:unknown"
#######################################################################################################################
class ApplicationHandleOgfJson(EntityOgfJson):
data_cls = ApplicationHandle
def __init__(self, data):
EntityOgfJson.__init__(self, data)
def get(self):
return json.dumps(self.toJson(), sort_keys=True, indent=4)
def toJson(self):
doc = EntityOgfJson.toJson(self)
doc["Type"] = self.data.Type
doc["Value"] = self.data.Value
associations = {}
associations["ApplicationEnvironmentID"] = self.data.ApplicationEnvironmentID
doc["Associations"] = associations
return doc
#######################################################################################################################
class Applications(Data):
def __init__(self, resource_name, ipfinfo):
Data.__init__(self)
self.id = resource_name
self.environments = []
self.handles = []
self.resource_name = resource_name
self.ipfinfo = ipfinfo
def add(self, env, handles):
if env.AppVersion is None:
app_version = "unknown"
else:
app_version = env.AppVersion
env.Name = "%s-%s" % (env.AppName, app_version)
env.id = "%s.%s.%s" % (app_version, env.AppName, self.resource_name)
env.ID = "urn:glue2:ApplicationEnvironment:%s.%s.%s.%s" % (
app_version, env.AppName, self.resource_name, env.path_hash)
env.ComputingManagerID = "urn:glue2:ComputingManager:%s" % (
self.resource_name)
env.ApplicationHandleID = []
for handle in handles:
handle.ApplicationEnvironmentID = env.ID
handle.Name = "%s-%s" % (env.AppName, app_version)
handle.id = "%s.%s.%s.%s" % (
handle.Type, app_version, env.AppName, self.resource_name)
handle.ID = "urn:glue2:ApplicationHandle:%s:%s.%s.%s.%s" % \
(handle.Type, app_version, env.AppName,
self.resource_name, env.path_hash)
env.ApplicationHandleID.append(handle.ID)
self.environments.append(env)
self.handles.extend(handles)
#######################################################################################################################
class ApplicationsOgfJson(Representation):
data_cls = Applications
def __init__(self, data):
Representation.__init__(
self, Representation.MIME_APPLICATION_JSON, data)
def get(self):
return json.dumps(self.toJson(), sort_keys=True, indent=4)
def toJson(self):
doc = {}
doc["ApplicationEnvironment"] = []
for env in self.data.environments:
doc["ApplicationEnvironment"].append(
ApplicationEnvironmentOgfJson(env).toJson())
doc["ApplicationHandle"] = []
for handle in self.data.handles:
doc["ApplicationHandle"].append(
ApplicationHandleOgfJson(handle).toJson())
doc["PublisherInfo"] = [IPFInformationJson(
ipfinfo).toJson() for ipfinfo in self.data.ipfinfo]
return doc
#######################################################################################################################
class ApplicationsStep(Step):
def __init__(self):
Step.__init__(self)
self.description = "produces a document containing GLUE 2 ApplicationEnvironment and ApplicationHandle"
self.time_out = 30
self.requires = [IPFInformation, ResourceName]
self.produces = [Applications]
self.resource_name = None
def run(self):
self.resource_name = self._getInput(ResourceName).resource_name
self.ipfinfo = [self._getInput(IPFInformation)]
self._output(self._run())
def _run(self):
raise StepError("ApplicationsStep._run not overriden")
#######################################################################################################################
| apache-2.0 | -30,491,066,512,194,984 | 39.574803 | 119 | 0.522511 | false |
edouardpoitras/Eva | clients/cli.py | 1 | 4617 | """
This file holds the abstract CLI class used to create command line utilities
that interact with Eva.
Please use the local_cli.py or remote_cli.py to interact with Eva via the
command line.
"""
import time
from multiprocessing import Process
class CLI(object):
"""
Interface object used to create CLI-based Eva clients.
Will take care of some of the heavy lifting, such as setting up the pubsub
consumer for Eva messages and responses, and start the interaction loop.
See the LocalCLI and RemoteCLI objects for examples.
"""
def __init__(self):
self.process = None
def start_consumer(self, queue, response_prefix='Eva Message: '):
"""
Start a pubsub consumer to receive messages from Eva.
:param queue: The channel to receive messages from.
:type queue: string
:param response_prefix: A string that will prefix all messages from the queue.
:type response_prefix: string
"""
self.process = Process(target=self.consume_messages, args=(queue, response_prefix))
self.process.start()
def consume_messages(self, queue, response_prefix):
"""
A method that consumes the messages from the queue specified.
Will automatically print the messages to the CLI.
This is the method used to fire off a separate process in the
``start_consumer`` method.
It will continuously tail the MongoDB collection holding the messages.
:param queue: The channel to receive messages from.
:type queue: string
:param response_prefix: A string that will prefix all messages from the queue.
:type response_prefix: string
"""
# Need to listen for messages and print them to the CLI.
pubsub = self.get_pubsub()
subscriber = pubsub.subscribe(queue)
# Subscriber will continuously tail the mongodb collection queue.
for message in subscriber:
if message is not None:
if isinstance(message, dict):
print('%s%s' %(response_prefix, message['output_text']))
else:
print('%s%s' %(response_prefix, message))
time.sleep(0.1)
def get_pubsub(self):
"""
A method meant to be overriden in order to get a pubsub object depending
on the requirements of the CLI client.
:return: An anypubsub object used to send and receive messages.
:rtype: `anypubsub.interfaces.PubSub <https://github.com/smarzola/anypubsub>`_
"""
# Logic here to get the proper anypubsub object.
pass
def interact(self, command=None):
"""
The main method that interacts with the Eva server.
:param command: An optional command to send Eva. If None, this method
will continuously poll the user for a new command/request after
every response from Eva.
:type command: string
"""
if command is not None:
results = self.get_results(command)
self.handle_results(results)
else:
print('=== Eva CLI ===')
while True:
command = input('You: ')
results = self.get_results(command)
if results is not None:
self.handle_results(results)
def get_results(self, command):
"""
This method is meant to be overridden in order to properly process a
command from the user and return Eva's response.
:param command: The query/command to send Eva.
:type command: string
:return: Eva's response to that query/command.
:rtype: string
"""
pass
def handle_results(self, results): #pylint: disable=R0201
"""
This method performs the necessary actions with the data returned from
Eva after a query/command.
:param results: The response dict from Eva after a query/command.
Will contain typically be a dict with the following structure::
{
'output_text': <text_here>,
'output_audio': {
'audio': <audio_data>,
'content_type': <audio_content_type>
}
}
:type results: dict
"""
if results['output_text'] is None:
print('Eva Response: ')
else:
print('Eva Response: %s' %results['output_text'])
if __name__ == '__main__':
print('Please use local_cli.py or remote_cli.py instead.')
| epl-1.0 | -474,309,522,082,213,950 | 35.354331 | 91 | 0.601473 | false |
darth-vader-lg/glcncrpi | tools/arm-bcm2708/gcc-linaro-arm-none-eabi-4.8-2014.04/arm-none-eabi/lib/v7ve/fpv4/softfp/libstdc++.a-gdb.py | 1 | 2451 | # -*- python -*-
# Copyright (C) 2009-2013 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/cbuild/slaves/oorts/crosstool-ng/builds/arm-none-eabi-win32/install/share/gcc-4.8.3/python'
libdir = '/cbuild/slaves/oorts/crosstool-ng/builds/arm-none-eabi-win32/install/arm-none-eabi/lib/v7ve/fpv4/softfp'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
| gpl-3.0 | -413,694,461,915,479,100 | 39.85 | 114 | 0.720522 | false |
MKLab-ITI/reveal-user-annotation | reveal_user_annotation/rabbitmq/rabbitmq_util.py | 1 | 3042 | __author__ = 'Georgios Rizos ([email protected])'
import sys
import subprocess
import urllib
from amqp import Connection, Message
from amqp.exceptions import PreconditionFailed
if sys.version_info > (3,):
import urllib.parse as urlparse
else:
import urlparse
def translate_rabbitmq_url(url):
if url[0:4] == "amqp":
url = "http" + url[4:]
parts = urlparse.urlparse(url)
if parts.scheme == "https":
ssl = True
else:
ssl = False
if parts.hostname is not None:
host_name = parts.hostname
else:
host_name = "localhost"
if parts.port is not None:
port = int(parts.port)
else:
port = 5672
if parts.username is not None:
user_name = parts.username
password = parts.password
else:
user_name = parts.username
password = parts.password
path_parts = parts.path.split('/')
virtual_host = urllib.parse.unquote(path_parts[1])
virtual_host = virtual_host # May be an empty path if URL is e.g. "amqp://guest:guest@localhost:5672/vhost"
if virtual_host == "":
virtual_host = "/" # Default vhost
return user_name, password, host_name, port, virtual_host, ssl
def establish_rabbitmq_connection(rabbitmq_uri):
"""
What it says on the tin.
Input: - rabbitmq_uri: A RabbitMQ URI.
Output: - connection: A RabbitMQ connection.
"""
userid, password, host, port, virtual_host, ssl = translate_rabbitmq_url(rabbitmq_uri)
connection = Connection(userid=userid,
password=password,
host=host,
port=port,
virtual_host=virtual_host,
ssl=False)
return connection
def simple_notification(connection, queue_name, exchange_name, routing_key, text_body):
"""
Publishes a simple notification.
Inputs: - connection: A rabbitmq connection object.
- queue_name: The name of the queue to be checked or created.
- exchange_name: The name of the notification exchange.
- routing_key: The routing key for the exchange-queue binding.
- text_body: The text to be published.
"""
channel = connection.channel()
try:
channel.queue_declare(queue_name, durable=True, exclusive=False, auto_delete=False)
except PreconditionFailed:
pass
try:
channel.exchange_declare(exchange_name, type="fanout", durable=True, auto_delete=False)
except PreconditionFailed:
pass
channel.queue_bind(queue_name, exchange_name, routing_key=routing_key)
message = Message(text_body)
channel.basic_publish(message, exchange_name, routing_key)
def simpler_notification(channel, queue_name, exchange_name, routing_key, text_body):
message = Message(text_body)
channel.basic_publish(message, exchange_name, routing_key)
def rabbitmq_server_service(command):
subprocess.call(["service", "rabbitmq-server", command])
| apache-2.0 | -7,088,256,928,776,268,000 | 27.971429 | 112 | 0.635766 | false |
bentzinir/Buffe | layers/conv_pool.py | 1 | 3468 | import theano as t
import numpy as np
import theano.tensor as tt
from theano.tensor.nnet import conv
from theano.tensor.signal import downsample
import common
rng = np.random.RandomState(23455)
class CONV_POOL(object):
"""Conv Pool Layer of a convolutional network """
def __init__(self, filter_shape, image_shape, border_mode='valid', poolsize=(2, 2)):
"""
Allocate a LeNetConvPoolLayer with shared variable internal parameters.
:type rng: np.random.RandomState
:param rng: a random number generator used to initialize weights
:type input: t.tensor.dtensor4
:param input: symbolic image tensor, of shape image_shape
:type filter_shape: tuple or list of length 4
:param filter_shape: (number of filters, num input feature maps,
filter height, filter width)
:type image_shape: tuple or list of length 4
:param image_shape: (batch size, num input feature maps,
image height, image width)
:type poolsize: tuple or list of length 2
:param poolsize: the downsampling (pooling) factor (#rows, #cols)
"""
self.filter_shape = filter_shape
self.image_shape = image_shape
self.border_mode = border_mode
self.poolsize = poolsize
assert image_shape[1] == filter_shape[1]
# there are "num input feature maps * filter height * filter width"
# inputs to each hidden unit
fan_in = np.prod(filter_shape[1:])
# each unit in the lower layer receives a gradient from:
# "num output feature maps * filter height * filter width" /
# pooling size
fan_out = (filter_shape[0] * np.prod(filter_shape[2:]) /
np.prod(poolsize))
# initialize weights with random weights
W_bound = np.sqrt(6. / (fan_in + fan_out))
self.W = t.shared(
np.asarray(
rng.uniform(low=-W_bound, high=W_bound, size=filter_shape),
dtype=t.config.floatX
),
borrow=True
)
# the bias is a 1D tensor -- one bias per output feature map
b_values = np.zeros((filter_shape[0],), dtype=t.config.floatX)
self.b = t.shared(value=b_values, borrow=True)
# store parameters of this layer
self.params = [self.W, self.b]
def step(self, input):
# self.input = input
# convolve input feature maps with filters
# conv_out = t.conv.conv2d(
# input=input,
# filters=self.W,
# filter_shape=filter_shape,
# image_shape=image_shape
# )
conv_out = conv.conv2d(
input=input,
filters=self.W,
filter_shape=self.filter_shape,
image_shape=self.image_shape,
border_mode=self.border_mode
)
# downsample each feature map individually, using maxpooling
pooled_out = downsample.max_pool_2d(
input=conv_out,
ds=self.poolsize,
ignore_border=True,
)
# add the bias term. Since the bias is a vector (1D array), we first
# reshape it to a tensor of shape (1, n_filters, 1, 1). Each bias will
# thus be broadcasted across mini-batches and feature map
# width & height
output = tt.tanh(pooled_out + self.b.dimshuffle('x', 0, 'x', 'x'))
return output
| mit | 1,760,589,534,825,072,000 | 33.68 | 88 | 0.5891 | false |
y4ns0l0/collectd-ceph | plugins/ceph_latency_plugin.py | 1 | 3887 | #!/usr/bin/env python
#
# vim: tabstop=4 shiftwidth=4
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; only version 2 of the License is applicable.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors:
# Ricardo Rocha <[email protected]>
#
# About this plugin:
# This plugin evaluates current latency to write to the test pool.
#
# collectd:
# http://collectd.org
# collectd-python:
# http://collectd.org/documentation/manpages/collectd-python.5.shtml
# ceph pools:
# https://ceph.com/docs/master/man/8/rados/#pool-specific-commands
#
import collectd
import re
import traceback
import subprocess
import base
class CephLatencyPlugin(base.Base):
def __init__(self):
base.Base.__init__(self)
self.prefix = 'ceph'
def get_stats(self):
"""Retrieves stats regarding latency to write to a test pool"""
ceph_cluster = "%s-%s" % (self.prefix, self.cluster)
data = {
ceph_cluster: {},
}
output = None
try:
command = "timeout 30s rados --cluster %s -p %s bench 10 write -t 1 -b 65536" % (self.cluster, format(self.testpool))
output = subprocess.check_output(command, shell=True)
except Exception as exc:
collectd.error("ceph-latency: failed to run rados bench :: %s :: %s"
% (exc, traceback.format_exc()))
return
if output is None:
collectd.error('ceph-latency: failed to run rados bench :: output was None')
regex_match = re.compile('^([a-zA-Z]+) [lL]atency\S*: \s* (\w+.?\w+)\s*', re.MULTILINE)
results = regex_match.findall(output)
if len(results) == 0:
# this is a fast hack, should put regexps into an array and try 'em all
# my format:
## Average Latency: 0.00517643
## Stddev Latency: 0.00179458
regex_match = re.compile('^([a-zA-Z]+) [lL]atency: +(\w+.?\w+)', re.MULTILINE)
results = regex_match.findall(output)
if len(results) == 0:
# hopeless
collectd.error('ceph-latency: failed to run rados bench :: output unrecognized %s' % output)
return
data[ceph_cluster]['cluster'] = {}
for key, value in results:
if key == 'Average':
data[ceph_cluster]['cluster']['avg_latency'] = float(value) * 1000
elif key == 'Stddev':
data[ceph_cluster]['cluster']['stddev_latency'] = float(value) * 1000
elif key == 'Max':
data[ceph_cluster]['cluster']['max_latency'] = float(value) * 1000
elif key == 'Min':
data[ceph_cluster]['cluster']['min_latency'] = float(value) * 1000
return data
try:
plugin = CephLatencyPlugin()
except Exception as exc:
collectd.error("ceph-latency: failed to initialize ceph latency plugin :: %s :: %s"
% (exc, traceback.format_exc()))
def configure_callback(conf):
"""Received configuration information"""
plugin.config_callback(conf)
collectd.register_read(read_callback, plugin.interval)
def read_callback():
"""Callback triggerred by collectd on read"""
plugin.read_callback()
collectd.register_init(CephLatencyPlugin.reset_sigchld)
collectd.register_config(configure_callback)
| gpl-2.0 | 4,239,711,054,730,847,700 | 34.336364 | 129 | 0.624132 | false |
eepalms/gem5-newcache | src/sim/Process.py | 1 | 3132 | # Copyright (c) 2005-2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from m5.SimObject import SimObject
from m5.params import *
from m5.proxy import *
class Process(SimObject):
type = 'Process'
abstract = True
cxx_header = "sim/process.hh"
input = Param.String('cin', "filename for stdin")
output = Param.String('cout', 'filename for stdout')
errout = Param.String('cerr', 'filename for stderr')
system = Param.System(Parent.any, "system process will run on")
max_stack_size = Param.MemorySize('64MB', 'maximum size of the stack')
@classmethod
def export_methods(cls, code):
code('bool map(Addr vaddr, Addr paddr, int size);')
class LiveProcess(Process):
type = 'LiveProcess'
cxx_header = "sim/process.hh"
executable = Param.String('', "executable (overrides cmd[0] if set)")
cmd = VectorParam.String("command line (executable plus arguments)")
env = VectorParam.String([], "environment settings")
cwd = Param.String('', "current working directory")
uid = Param.Int(100, 'user id')
euid = Param.Int(100, 'effective user id')
gid = Param.Int(100, 'group id')
egid = Param.Int(100, 'effective group id')
pid = Param.Int(100, 'process id')
ppid = Param.Int(99, 'parent process id')
simpoint = Param.UInt64(0, 'simulation point at which to start simulation')
# set P bit in SE mode
need_protect = Param.Bool(0, 'whether the program needs to be protected')
protected_start = Param.UInt64(0, 'start addres of protected data section')
protected_end = Param.UInt64(0, 'end address of protected data section')
| bsd-3-clause | -4,790,954,474,984,413,000 | 47.9375 | 79 | 0.735951 | false |
simbits/Lumiere | cabinet_orig.py | 1 | 2565 | #!/usr/bin/env python
import random
import socket
import struct
import sys
import time
from Adafruit_MCP230xx import Adafruit_MCP230XX
CABINET_VERSION='1.0b'
START_MSG='## Cabinet version %s ##' % (CABINET_VERSION)
MCAST_GRP = ('224.19.79.1', 9999)
DRAWERS = 9
USE_PULLUPS = 1
RETRIGGER_DELAY = 10 #seconds
WAIT_DELAY = 3 #seconds
if __name__ == '__main__':
mcp = Adafruit_MCP230XX(address=0x20, num_gpios=16) # MCP23017
c_state = [True] * DRAWERS
p_state = [True] * DRAWERS
trigger_delay = [0] * DRAWERS
for i in range(0, DRAWERS):
mcp.config(i, mcp.INPUT)
mcp.pullup(i, USE_PULLUPS)
p_state[i] = bool((mcp.input(i) >> i) & 0x01)
print 'initial state: %s' % (str(p_state))
print 'setting up mcast group @%s' % (str(MCAST_GRP))
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(0.2)
ttl = struct.pack('b', 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl)
try:
sock.sendto(START_MSG, MCAST_GRP)
except Exception as e:
print 'exception during send: %s' % (str(e))
sys.exit(1)
while True:
for i in range(0, DRAWERS):
if trigger_delay[i] > 0:
trigger_delay[i] = trigger_delay[i] - 1
#c_state[i] = bool((mcp.input(i) >> i) & 0x01)
c_state[i] = bool(random.randint(0,1))
triggered = {i for i in range(0, DRAWERS)
if c_state[i] != p_state[i] and
not c_state[i] and
trigger_delay[i] == 0}
closed = {i for i in range(0, DRAWERS)
if c_state[i] != p_state[i] and
c_state[i]}
for i in triggered:
trigger_delay[i] = RETRIGGER_DELAY / WAIT_DELAY
print 'prev: %s' % (p_state)
print 'cur : %s' % (c_state)
print 'd: %s' % (trigger_delay)
print 't: %s' % (triggered)
print 'c: %s' % (closed)
try:
for i in closed:
print 'sending closed drawer: %d' % (i)
sock.sendto('c:%d' % (i), MCAST_GRP)
drawer = random.choice(list(triggered))
print 'sending opened drawer %d' % (drawer)
sock.sendto('o:%d' % (drawer), MCAST_GRP)
except IndexError:
pass
except Exception as e:
print 'exception during send: %s' % (str(e))
p_state = list(c_state)
time.sleep(WAIT_DELAY) # relax a little
| mit | -227,441,266,886,782,400 | 29.176471 | 68 | 0.527875 | false |
theetcher/fxpt | fxpt/fx_refsystem/options_dialog_ui.py | 1 | 4062 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'options_dialog_ui.ui'
#
# Created: Fri Nov 18 22:58:31 2016
# by: pyside-uic 0.2.15 running on PySide 1.2.4
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(576, 351)
self.verticalLayout_2 = QtGui.QVBoxLayout(Dialog)
self.verticalLayout_2.setContentsMargins(6, 6, 6, 6)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.groupBox = QtGui.QGroupBox(Dialog)
self.groupBox.setObjectName("groupBox")
self.verticalLayout = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout.setContentsMargins(6, 6, 6, 6)
self.verticalLayout.setObjectName("verticalLayout")
self.uiLST_roots = QtGui.QListWidget(self.groupBox)
self.uiLST_roots.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.uiLST_roots.setAlternatingRowColors(True)
self.uiLST_roots.setObjectName("uiLST_roots")
self.verticalLayout.addWidget(self.uiLST_roots)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.uiBTN_add = QtGui.QPushButton(self.groupBox)
self.uiBTN_add.setObjectName("uiBTN_add")
self.horizontalLayout.addWidget(self.uiBTN_add)
self.uiBTN_remove = QtGui.QPushButton(self.groupBox)
self.uiBTN_remove.setObjectName("uiBTN_remove")
self.horizontalLayout.addWidget(self.uiBTN_remove)
self.uiBTN_setActive = QtGui.QPushButton(self.groupBox)
self.uiBTN_setActive.setObjectName("uiBTN_setActive")
self.horizontalLayout.addWidget(self.uiBTN_setActive)
self.verticalLayout.addLayout(self.horizontalLayout)
self.verticalLayout_2.addWidget(self.groupBox)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setMinimumSize(QtCore.QSize(0, 0))
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout_2.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
self.uiLST_roots.setCurrentRow(-1)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), Dialog.reject)
QtCore.QObject.connect(Dialog, QtCore.SIGNAL("finished(int)"), Dialog.onDialogFinished)
QtCore.QObject.connect(Dialog, QtCore.SIGNAL("accepted()"), Dialog.onDialogAccepted)
QtCore.QObject.connect(self.uiBTN_add, QtCore.SIGNAL("clicked()"), Dialog.onAddClicked)
QtCore.QObject.connect(self.uiBTN_setActive, QtCore.SIGNAL("clicked()"), Dialog.onSetActiveClicked)
QtCore.QObject.connect(self.uiLST_roots, QtCore.SIGNAL("itemSelectionChanged()"), Dialog.onSelectionChanged)
QtCore.QObject.connect(self.uiBTN_remove, QtCore.SIGNAL("clicked()"), Dialog.onRemoveClicked)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "FX RefSystem Options", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Dialog", "References Location Roots", None, QtGui.QApplication.UnicodeUTF8))
self.uiLST_roots.setSortingEnabled(True)
self.uiBTN_add.setText(QtGui.QApplication.translate("Dialog", "Add", None, QtGui.QApplication.UnicodeUTF8))
self.uiBTN_remove.setText(QtGui.QApplication.translate("Dialog", "Remove", None, QtGui.QApplication.UnicodeUTF8))
self.uiBTN_setActive.setText(QtGui.QApplication.translate("Dialog", "Set Active", None, QtGui.QApplication.UnicodeUTF8))
| mit | 1,141,909,398,221,489,200 | 57.869565 | 137 | 0.727228 | false |
KrisHammerberg/DEMtools | demtools.py | 1 | 4546 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
DemTools
A QGIS plugin
A suite of tools for doing neat things with DEMs
-------------------
begin : 2014-05-15
copyright : (C) 2014 by Kris Hammerberg
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
# Initialize Qt resources from file resources.py
import resources_rc
# Import the code for the dialog
import os.path
import sys
from shaDEM import shaDEM
from svf import svf
from solaraccess import SolarAccess
class DemTools:
def __init__(self, iface):
# Save reference to the QGIS interface
self.iface = iface
# save reference to tool interfaces
self.shaDEM = shaDEM(iface)
self.svf = svf(iface)
self.SolarAccess = SolarAccess(iface)
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value("locale/userLocale")[0:2]
localePath = os.path.join(self.plugin_dir, 'i18n', 'demtools_{}.qm'.format(locale))
if os.path.exists(localePath):
self.translator = QTranslator()
self.translator.load(localePath)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
#check necessary libraries
try:
import numpy
import numexpr
except ImportError:
QMessageBox.critical( self.iface.mainWindow(),"ImportError", "Plugin requires Numpy & Numexpr libraries.\n\See http://www.numpy.org & https://code.google.com/p/numexpr/" )
try:
import Pysolar as solar
except ImportError:
try:
import solar
except ImportError:
QMessageBox.critical( self.iface.mainWindow(),"ImportError", "Plugin requires Pysolar libraries.\n\See http://pysolar.org/" )
def initGui(self):
# Create action that will start plugin configuration
self.shaDEMact = QAction(
QIcon(":/plugins/demtools/shaDEM.png"),
u"ShaDEM", self.iface.mainWindow())
self.SVFact = QAction(
QIcon(":/plugins/demtools/SVF.png"),
u"SVF", self.iface.mainWindow())
self.solaract = QAction(
QIcon(":/plugins/demtools/solaraccess.png"),
u"SolarAccess", self.iface.mainWindow())
# connect the actions to the run methods
self.shaDEMact.triggered.connect(self.shaDEM.start)
self.SVFact.triggered.connect(self.svf.start)
self.solaract.triggered.connect(self.SolarAccess.start)
# Add toolbar buttons and menu items
self.iface.addToolBarIcon(self.shaDEMact)
self.iface.addPluginToRasterMenu(u"&DEM Tools", self.shaDEMact)
self.iface.addToolBarIcon(self.SVFact)
self.iface.addPluginToRasterMenu(u"&DEM Tools", self.SVFact)
self.iface.addToolBarIcon(self.solaract)
self.iface.addPluginToRasterMenu(u"&DEM Tools", self.solaract)
def unload(self):
# Remove the plugin menu items and icons
self.iface.removePluginRasterMenu(u"&DEM Tools", self.shaDEMact)
self.iface.removeToolBarIcon(self.shaDEMact)
self.iface.removePluginRasterMenu(u"&DEM Tools", self.SVFact)
self.iface.removeToolBarIcon(self.SVFact)
self.iface.removePluginRasterMenu(u"&DEM Tools", self.solaract)
self.iface.removeToolBarIcon(self.solaract)
| gpl-2.0 | -3,094,973,200,494,068,700 | 37.525424 | 183 | 0.543995 | false |
thinkopensolutions/tkobr-addons | tko_partner_relatives/__manifest__.py | 1 | 1805 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# Thinkopen Brasil
# Copyright (C) Thinkopen Solutions Brasil (<http://www.tkobr.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Partner Relatives',
'version': '0.001',
'category': 'Customizations',
'sequence': 38,
'complexity': 'normal',
'description': ''' This module adds relatives tab in partner form
''',
'author': 'ThinkOpen Solutions Brasil',
'website': 'http://www.tkobr.com',
'images': ['images/oerp61.jpeg',
],
'depends': [
'base',
],
'data': [
'partner_view.xml',
],
'init': [],
'demo': [],
'update': [],
'test': [], # YAML files with tests
'installable': True,
'application': False,
# If it's True, the modules will be auto-installed when all dependencies are installed
'auto_install': False,
'certificate': '',
}
| agpl-3.0 | 7,609,688,550,439,498,000 | 33.711538 | 90 | 0.579501 | false |
WeAreWizards/proppy | proppy/proposal.py | 1 | 6603 | from collections import defaultdict
from proppy.validators import (
is_currency,
is_date,
is_percentage,
is_present,
are_valid_deliverables,
are_valid_rates
)
from proppy.utils import (
get_work_days_interval,
to_date
)
class Proposal(object):
validation_rules = {
'customer.company': [is_present],
'customer.person': [is_present],
'customer.email': [is_present],
'project.name': [is_present],
'project.description': [is_present],
'project.worker': [is_present],
'project.currency': [is_present, is_currency],
'project.discount': [is_percentage],
'project.start': [is_present, is_date()],
'project.end': [is_present, is_date()],
'project.uat_start': [is_date(optional=True)],
'project.uat_end': [is_date(optional=True)],
'project.rates': [is_present, are_valid_rates],
'project.deliverables': [is_present, are_valid_deliverables],
}
def __init__(self, config):
self._errors = []
# Not using .get below as we have already checked for them
# when loading the toml
self.customer = config['customer']
self.project = config['project']
def _fetch_value(self, field):
"""
Allow dotted path to class objects dict, ie
customer.company is equivalent to self.customer['company']
"""
paths = field.split(".")
base = getattr(self, paths[0])
for key in paths[1:]:
base = base.get(key)
return base
def basic_validation(self):
"""
Only validates using the class validation dict: presence, type etc
Does not check business logic
"""
for field, rules in self.validation_rules.items():
value = self._fetch_value(field)
for rule in rules:
valid = rule(value)
# Only show one error at a time per field
if not valid:
self._errors.append(rule.message % field)
break
def logic_validation(self):
"""
Ensure there's no 'stupid' data, like a UAT period
lasting 1 month while the dev is 5 days or a 100% reduction.
Also saves some of the computed data back on the project object
for use in templates:
- start_date
- end_date
- dev_length
- sum_paid_deliverables
- sum_free_deliverables
"""
# can't have all the deliverable set to free
if all(d['free'] for d in self.project['deliverables']):
self._errors.append("Can't have all deliverables set to free")
return
deliverables = self.project['deliverables']
# not using a rate we haven't specified in a deliverable
rate_names = [rate['name'] for rate in self.project['rates']]
if any(d['rate'] not in rate_names for d in deliverables):
self._errors.append(
"An unknown rate was used in a deliverable"
)
return
# start and end dates are accurates given the estimates
self.project['start_date'] = to_date(self.project['start'])
self.project['end_date'] = to_date(self.project['end'])
length_project = get_work_days_interval(
self.project['start_date'], self.project['end_date']
)
dev_length = sum([d['estimate'] for d in deliverables])
self.project['dev_length'] = dev_length
dev_length /= self.project['worker']
# not too short
if dev_length > length_project:
self._errors.append(
"Project take more time than the timeline allows"
)
return
# but not too long either
if length_project > dev_length * 3:
self._errors.append(
"Project take way less time than the timeline shows"
)
return
# UAT validation: needs to be after the end date of project
# and should be shorter than the project
# UAT is not mandatory though
if 'uat_start' in self.project:
self.project['uat_start_date'] = to_date(self.project['uat_start'])
self.project['uat_end_date'] = to_date(self.project['uat_end'])
if self.project['uat_start_date'] < self.project['end_date']:
self._errors.append(
"UAT can't start before the end of the project"
)
return
length_uat = get_work_days_interval(
self.project['uat_start_date'], self.project['uat_end_date']
)
if length_uat > 14:
self._errors.append(
"UAT can't take longer than two weeks"
)
return
# And finally onto the costs validation
day_rates = {r['name']: r['amount'] for r in self.project['rates']}
# the sum we will invoice based on the deliverables itself
sum_deliverables = 0
# the sum we give away as a discount, ie free deliverables
sum_free_deliverables = 0
self.project['costs_by_rate'] = defaultdict(int)
for d in deliverables:
cost = d['estimate'] * day_rates[d['rate']]
sum_deliverables += cost
if d['free']:
sum_free_deliverables += cost
self.project['costs_by_rate'][d['rate']] += cost
self.project['sum_deliverables'] = sum_deliverables
self.project['sum_free_deliverables'] = sum_free_deliverables
# now we need to check that the discount validation is not too high
if self.project['discount'] > 50:
self._errors.append("Discount is set too high")
return
self.project['discount_amount'] = (
(sum_deliverables - sum_free_deliverables) / 100
* self.project['discount']
)
self.project['invoice_amount'] = (
sum_deliverables
- sum_free_deliverables
- self.project['discount_amount']
)
def is_valid(self):
self.basic_validation()
# If we get errors during basic validation, no need
# to bother doing the business logic one
if len(self._errors) > 0:
return False
# Call business logic before the return
self.logic_validation()
return len(self._errors) == 0
def print_errors(self):
print("ERRORS:")
print('\n'.join(self._errors))
| mit | 5,684,873,120,759,950,000 | 34.12234 | 79 | 0.562169 | false |
dann4520/vigenere_encoder_decoder | vigenere_encoder_decoder.py | 1 | 9778 | #written using Python 2.7.8
cipher_library = [{"A": "A", "B": "B", "C": "C", "D": "D", "E": "E", "F": "F", "G": "G", "H": "H", "I": "I", "J": "J", "K": "K", "L": "L", "M": "M", "N": "N", "O": "O", "P": "P", "Q": "Q", "R": "R", "S": "S", "T": "T", "U": "U", "V": "V", "W": "W", "X": "X", "Y": "Y", "Z": "Z"},
{"A": "B", "B": "C", "C": "D", "D": "E", "E": "F", "F": "G", "G": "H", "H": "I", "I": "J", "J": "K", "K": "L", "L": "M", "M": "N", "N": "O", "O": "P", "P": "Q", "Q": "R", "R": "S", "S": "T", "T": "U", "U": "V", "V": "W", "W": "X", "X": "Y", "Y": "Z", "Z": "A"},
{"A": "C", "B": "D", "C": "E", "D": "F", "E": "G", "F": "H", "G": "I", "H": "J", "I": "K", "J": "L", "K": "M", "L": "N", "M": "O", "N": "P", "O": "Q", "P": "R", "Q": "S", "R": "T", "S": "U", "T": "V", "U": "W", "V": "X", "W": "Y", "X": "Z", "Y": "A", "Z": "B"},
{"A": "D", "B": "E", "C": "F", "D": "G", "E": "H", "F": "I", "G": "J", "H": "K", "I": "L", "J": "M", "K": "N", "L": "O", "M": "P", "N": "Q", "O": "R", "P": "S", "Q": "T", "R": "U", "S": "V", "T": "W", "U": "X", "V": "Y", "W": "Z", "X": "A", "Y": "B", "Z": "C"},
{"A": "E", "B": "F", "C": "G", "D": "H", "E": "I", "F": "J", "G": "K", "H": "L", "I": "M", "J": "N", "K": "O", "L": "P", "M": "Q", "N": "R", "O": "S", "P": "T", "Q": "U", "R": "V", "S": "W", "T": "X", "U": "Y", "V": "Z", "W": "A", "X": "B", "Y": "C", "Z": "D"},
{"A": "F", "B": "G", "C": "H", "D": "I", "E": "J", "F": "K", "G": "L", "H": "M", "I": "N", "J": "O", "K": "P", "L": "Q", "M": "R", "N": "S", "O": "T", "P": "U", "Q": "V", "R": "W", "S": "X", "T": "Y", "U": "Z", "V": "A", "W": "B", "X": "C", "Y": "D", "Z": "E"},
{"A": "G", "B": "H", "C": "I", "D": "J", "E": "K", "F": "L", "G": "M", "H": "N", "I": "O", "J": "P", "K": "Q", "L": "R", "M": "S", "N": "T", "O": "U", "P": "V", "Q": "W", "R": "X", "S": "Y", "T": "Z", "U": "A", "V": "B", "W": "C", "X": "D", "Y": "E", "Z": "F"},
{"A": "H", "B": "I", "C": "J", "D": "K", "E": "L", "F": "M", "G": "N", "H": "O", "I": "P", "J": "Q", "K": "R", "L": "S", "M": "T", "N": "U", "O": "V", "P": "W", "Q": "X", "R": "Y", "S": "Z", "T": "A", "U": "B", "V": "C", "W": "D", "X": "E", "Y": "F", "Z": "G"},
{"A": "I", "B": "J", "C": "K", "D": "L", "E": "M", "F": "N", "G": "O", "H": "P", "I": "Q", "J": "R", "K": "S", "L": "T", "M": "U", "N": "V", "O": "W", "P": "X", "Q": "Y", "R": "Z", "S": "A", "T": "B", "U": "C", "V": "D", "W": "E", "X": "F", "Y": "G", "Z": "H"},
{"A": "J", "B": "K", "C": "L", "D": "M", "E": "N", "F": "O", "G": "P", "H": "Q", "I": "R", "J": "S", "K": "T", "L": "U", "M": "V", "N": "W", "O": "X", "P": "Y", "Q": "Z", "R": "A", "S": "B", "T": "C", "U": "D", "V": "E", "W": "F", "X": "G", "Y": "H", "Z": "I"},
{"A": "K", "B": "L", "C": "M", "D": "N", "E": "O", "F": "P", "G": "Q", "H": "R", "I": "S", "J": "T", "K": "U", "L": "V", "M": "W", "N": "X", "O": "Y", "P": "Z", "Q": "A", "R": "B", "S": "C", "T": "D", "U": "E", "V": "F", "W": "G", "X": "H", "Y": "I", "Z": "J"},
{"A": "L", "B": "M", "C": "N", "D": "O", "E": "P", "F": "Q", "G": "R", "H": "S", "I": "T", "J": "U", "K": "V", "L": "W", "M": "X", "N": "Y", "O": "Z", "P": "A", "Q": "B", "R": "C", "S": "D", "T": "E", "U": "F", "V": "G", "W": "H", "X": "I", "Y": "J", "Z": "K"},
{"A": "M", "B": "N", "C": "O", "D": "P", "E": "Q", "F": "R", "G": "S", "H": "T", "I": "U", "J": "V", "K": "W", "L": "X", "M": "Y", "N": "Z", "O": "A", "P": "B", "Q": "C", "R": "D", "S": "E", "T": "F", "U": "G", "V": "H", "W": "I", "X": "J", "Y": "K", "Z": "L"},
{"A": "N", "B": "O", "C": "P", "D": "Q", "E": "R", "F": "S", "G": "T", "H": "U", "I": "V", "J": "W", "K": "X", "L": "Y", "M": "Z", "N": "A", "O": "B", "P": "C", "Q": "D", "R": "E", "S": "F", "T": "G", "U": "H", "V": "I", "W": "J", "X": "K", "Y": "L", "Z": "M"},
{"A": "O", "B": "P", "C": "Q", "D": "R", "E": "S", "F": "T", "G": "U", "H": "V", "I": "W", "J": "X", "K": "Y", "L": "Z", "M": "A", "N": "B", "O": "C", "P": "D", "Q": "E", "R": "F", "S": "G", "T": "H", "U": "I", "V": "J", "W": "K", "X": "L", "Y": "M", "Z": "N"},
{"A": "P", "B": "Q", "C": "R", "D": "S", "E": "T", "F": "U", "G": "V", "H": "W", "I": "X", "J": "Y", "K": "Z", "L": "A", "M": "B", "N": "C", "O": "D", "P": "E", "Q": "F", "R": "G", "S": "H", "T": "I", "U": "J", "V": "K", "W": "L", "X": "M", "Y": "N", "Z": "O"},
{"A": "Q", "B": "R", "C": "S", "D": "T", "E": "U", "F": "V", "G": "W", "H": "X", "I": "Y", "J": "Z", "K": "A", "L": "B", "M": "C", "N": "D", "O": "E", "P": "F", "Q": "G", "R": "H", "S": "I", "T": "J", "U": "K", "V": "L", "W": "M", "X": "N", "Y": "O", "Z": "P"},
{"A": "R", "B": "S", "C": "T", "D": "U", "E": "V", "F": "W", "G": "X", "H": "Y", "I": "Z", "J": "A", "K": "B", "L": "C", "M": "D", "N": "E", "O": "F", "P": "G", "Q": "H", "R": "I", "S": "J", "T": "K", "U": "L", "V": "M", "W": "N", "X": "O", "Y": "P", "Z": "Q"},
{"A": "S", "B": "T", "C": "U", "D": "V", "E": "W", "F": "X", "G": "Y", "H": "Z", "I": "A", "J": "B", "K": "C", "L": "D", "M": "E", "N": "F", "O": "G", "P": "H", "Q": "I", "R": "J", "S": "K", "T": "L", "U": "M", "V": "N", "W": "O", "X": "P", "Y": "Q", "Z": "R"},
{"A": "T", "B": "U", "C": "V", "D": "W", "E": "X", "F": "Y", "G": "Z", "H": "A", "I": "B", "J": "C", "K": "D", "L": "E", "M": "F", "N": "G", "O": "H", "P": "I", "Q": "J", "R": "K", "S": "L", "T": "M", "U": "N", "V": "O", "W": "P", "X": "Q", "Y": "R", "Z": "S"},
{"A": "U", "B": "V", "C": "W", "D": "X", "E": "Y", "F": "Z", "G": "A", "H": "B", "I": "C", "J": "D", "K": "E", "L": "F", "M": "G", "N": "H", "O": "I", "P": "J", "Q": "K", "R": "L", "S": "M", "T": "N", "U": "O", "V": "P", "W": "Q", "X": "R", "Y": "S", "Z": "T"},
{"A": "V", "B": "W", "C": "X", "D": "Y", "E": "Z", "F": "A", "G": "B", "H": "C", "I": "D", "J": "E", "K": "F", "L": "G", "M": "H", "N": "I", "O": "J", "P": "K", "Q": "L", "R": "M", "S": "N", "T": "O", "U": "P", "V": "Q", "W": "R", "X": "S", "Y": "T", "Z": "U"},
{"A": "W", "B": "X", "C": "Y", "D": "Z", "E": "A", "F": "B", "G": "C", "H": "D", "I": "E", "J": "F", "K": "G", "L": "H", "M": "I", "N": "J", "O": "K", "P": "L", "Q": "M", "R": "N", "S": "O", "T": "P", "U": "Q", "V": "R", "W": "S", "X": "T", "Y": "U", "Z": "V"},
{"A": "X", "B": "Y", "C": "Z", "D": "A", "E": "B", "F": "C", "G": "D", "H": "E", "I": "F", "J": "G", "K": "H", "L": "I", "M": "J", "N": "K", "O": "L", "P": "M", "Q": "N", "R": "O", "S": "P", "T": "Q", "U": "R", "V": "S", "W": "T", "X": "U", "Y": "V", "Z": "W"},
{"A": "Y", "B": "Z", "C": "A", "D": "B", "E": "C", "F": "D", "G": "E", "H": "F", "I": "G", "J": "H", "K": "I", "L": "J", "M": "K", "N": "L", "O": "M", "P": "N", "Q": "O", "R": "P", "S": "Q", "T": "R", "U": "S", "V": "T", "W": "U", "X": "V", "Y": "W", "Z": "X"},
{"A": "Z", "B": "A", "C": "B", "D": "C", "E": "D", "F": "E", "G": "F", "H": "G", "I": "H", "J": "I", "K": "J", "L": "K", "M": "L", "N": "M", "O": "N", "P": "O", "Q": "P", "R": "Q", "S": "R", "T": "S", "U": "T", "V": "U", "W": "V", "X": "W", "Y": "X", "Z": "Y"}
]
key_library = {"A": 0, "B": 1, "C": 2, "D": 3, "E": 4, "F": 5, "G": 6, "H": 7, "I": 8, "J": 9, "K": 10, "L": 11, "M": 12, "N": 13, "O": 14, "P": 15, "Q": 16, "R": 17, "S": 18, "T": 19, "U": 20, "V": 21, "W": 22, "X": 23, "Y": 24, "Z": 25}
def encode_message(message, passkey):
passencoder = passkey
encoded_message = ""
count = 0
while len(passencoder) < len(message): #ensures we have a long enough passencoder
passencoder += passkey
for char in message:
if char == " ":
encoded_message += " " #passes blank spaces through
elif char not in cipher_library[0] and char != " ": #if not in cipher library
next #will omit char.
else:
encoded_letter = cipher_library[key_library[passencoder[count]]][char] #looks up encoded letter using dictionary
encoded_message += encoded_letter #passencoder[count] indicates
count += 1
print encoded_message
raw_input("Press Enter to continue...")
def decode_message(encoded_message, passkey):
#provided with an encoded message and the proper passkey will return decoded message
passencoder = passkey
while len(passencoder) < len(encoded_message):
passencoder += passkey
count = 0
decoded_message = ""
for c in encoded_message:
if c == " ":
decoded_message += " "
for key, char in cipher_library[key_library[passencoder[count]]].items():
if char == c:
decoded_message += key
count += 1
print decoded_message
raw_input("Press Enter to continue...")
user_option = ""
while user_option != "0":
user_option = raw_input("Enter '1' to encode a message." '\n'
"Enter '2' to decode a message." '\n'
"Enter '0' to quit: ")
if user_option == "0":
print "Quitting is not cool"
elif user_option == "1":
encode_message(raw_input("Input message to encode: ").upper(), passkey = raw_input("Input keyword to encode: ").upper())
elif user_option == "2":
decode_message(raw_input("Input message to decode: ").upper(), passkey = raw_input("Input keyword to decode: ").upper())
else:
print "Invalid selection. Please try again."
| mit | -7,248,983,136,332,615,000 | 99.804124 | 279 | 0.275312 | false |
jeroanan/GameCollection | UI/Handlers/HandlerFactory.py | 1 | 2201 | # copyright (c) David Wilson 2015
# This file is part of Icarus.
# Icarus is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Icarus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Icarus. If not, see <http://www.gnu.org/licenses/>
import json
from UI.Cookies.Cookies import Cookies
from UI.Handlers.Exceptions.UnrecognisedHandlerException import UnrecognisedHandlerException
from UI.Handlers.IndexHandler import IndexHandler
from UI.Handlers.Session.Session import Session
class HandlerFactory(object):
def __init__(self, interactor_factory, renderer, config):
self.__interactor_factory = interactor_factory
self.__renderer = renderer
self.__config = config
self.__handlers = self.__load_handlers()
def __load_handlers(self):
with open("UI/Handlers/handlers.json") as f:
return json.load(f)["handlers"][0]
def create(self, handler_type):
handler = None
def renew_cookies():
if handler is None:
raise ValueError("handler not set")
handler.renew_cookies()
def string_to_handler():
ht = self.__handlers[handler_type]
module = __import__("UI.Handlers." + ht, fromlist=ht)
class_ = getattr(module, ht)
return class_(self.__interactor_factory, self.__renderer)
if handler_type == "index":
handler = IndexHandler(self.__interactor_factory, self.__renderer, self.__config)
elif handler_type in self.__handlers:
handler = string_to_handler()
else:
raise UnrecognisedHandlerException
handler.session = Session()
handler.cookies = Cookies()
renew_cookies()
return handler
| gpl-3.0 | 6,035,370,178,296,243,000 | 34.5 | 93 | 0.665607 | false |
alex-pardo/ANLP-PROJECT | findDemonyms.py | 1 | 1884 |
import re
from string import *
import sys
from nltk import *
import locale
from wikitools import wiki
from wikitools import api
from wikitools import page
from wikitools import category
wikiAPI = {
'en': "http://en.wikipedia.org/w/api.php"}
site = wiki.Wiki(wikiAPI['en'])
def generateDemonym(place, add, replace):
candidates = []
for rule in replace:
if len(rule[0]) > 0 and place.endswith(rule[0]):
candidates.append(place[:-len(rule[0])]+rule[1])
for rule in add:
if len(rule[0]) == 0 or place.endswith(rule[0]):
candidates.append(place+rule[1])
return candidates
def matchCandidates(link, candidates):
text = page.Page(site, link).getWikiText()
#if 'demonym' in text.lower():
score = 0
rules = [0]*len(candidates)
pos = 0
for candidate in candidates:
if findWholeWord(candidate.lower())(text.lower()):
score += 1
rules[pos] += 1
pos += 1
return score, rules
# else:
# raise NameError('No demonym')
def findWholeWord(w):
return re.compile(r'\b({0})\b'.format(w), flags=re.IGNORECASE).search
add = []
replace = []
with open('add_rules.csv', 'r') as f:
for line in f.readlines():
line = line.replace('\n','')
tmp = line.split(',')
add.append((tmp[0],tmp[1]))
with open('replace_rules.csv', 'r') as f:
for line in f.readlines():
line = line.replace('\n','')
tmp = line.split(',')
replace.append((tmp[0],tmp[1]))
matchings = 0
test_len = 0
f = open('countries.csv', 'r')
for line in f.readlines():
line = line.replace('\n','')
try:
candidates = generateDemonym(line, add, replace)
score, rules = matchCandidates(line, candidates)
if score > 0:
matching_rules = []
for r in range(0, len(candidates)):
if rules[r]:
matching_rules.append(candidates[r])
print line, ',' ,matching_rules
if score > 0:
matchings += 1
test_len += 1
except:
pass
f.close()
print matchings, test_len
| apache-2.0 | 5,177,841,268,757,151,000 | 20.409091 | 73 | 0.654989 | false |
wzong/TinyCloud | virtualbox/apis.py | 1 | 3618 | import json
import os
from django.http import JsonResponse, StreamingHttpResponse
from django.views.decorators.http import require_http_methods
from authentication import authenticator
from machine import machine_controller
from virtualbox import virtualbox_controller
GUEST_ADDITIONS_DIR = '/var/tinymakecloud/additions/'
GUEST_TEMPLATES_DIR = '/var/tinymakecloud/templates/'
GUEST_OS_DIR = '/var/tinymakecloud/images/'
def _ListFiles(dir_path):
return [
name for name in os.listdir(dir_path)
if os.path.isfile(os.path.join(dir_path, name))
]
def StreamJson(iterator):
yield '['
seq = 0
for line in iterator:
line['seq'] = seq
line['done'] = False
seq += 1
yield json.dumps(line) + ','
yield json.dumps({'seq': seq, 'msg': '', 'done': True, 's': 'OK'})
yield ']'
def StreamError(error):
json_str = json.dumps({'s': 'FAIL', 'msg': error, 'done': True, 'seq': 0})
yield '[' + json_str + ']'
@require_http_methods(['POST'])
@authenticator.RequireAuth
def StartVms(request, vm_names):
vm_names = vm_names.split(',')
return StreamingHttpResponse(StreamJson(
virtualbox_controller.StartVms(vm_names)))
@require_http_methods(['POST'])
@authenticator.RequireAuth
def PowerOffVms(request, vm_names):
vm_names = vm_names.split(',')
return StreamingHttpResponse(StreamJson(
virtualbox_controller.PowerOffVms(vm_names)))
@require_http_methods(['POST'])
@authenticator.RequireAuth
def DeleteVms(request, vm_names):
vm_names = vm_names.split(',')
return StreamingHttpResponse(StreamJson(
virtualbox_controller.DeleteVms(vm_names)))
@require_http_methods(['POST'])
@authenticator.RequireAuth
def CreateVm(request):
reqJson = json.loads(request.body)
vm_name = reqJson['name']
machine_name = reqJson['machine']
memory = reqJson['memory']
if memory not in virtualbox_controller.OPTIONS_MEMORY:
return StreamingHttpResponse(StreamError('Invalid memory option.'))
if reqJson['guest_creation'] == 'CLONE':
guest_template = reqJson['guest_template']
guest_template_from_machine = reqJson['guest_template_from_machine']
return StreamingHttpResponse(StreamJson(
virtualbox_controller.CreateVmFromSnapshot(
vm_name,
machine_name=machine_name,
memory=str(memory),
snapshot_path=guest_template,
snapshot_from_machine=guest_template_from_machine)))
elif reqJson['guest_creation'] == 'INSTALL':
guest_image = reqJson['guest_image']
guest_image_from_machine = reqJson['guest_image_from_machine']
guest_addition = reqJson['guest_addition']
guest_addition_from_machine = reqJson['guest_addition_from_machine']
return StreamingHttpResponse(StreamJson(
virtualbox_controller.CreateVm(
vm_name,
machine_name=machine_name,
password=virtualbox_controller.GetDefaultPassword(),
memory=str(memory),
image=guest_image,
image_from_machine=guest_image_from_machine,
additions_iso=guest_addition,
additions_iso_from_machine=guest_addition_from_machine)))
else:
return StreamingHttpResponse(
StreamError('Invalid option: ' + reqJson['guest_creation']))
@require_http_methods(['POST'])
@authenticator.RequireAuth
def GetAllVmInfo(request):
return JsonResponse({'vms': virtualbox_controller.GetAllVmInfo()})
@require_http_methods(['POST'])
@authenticator.RequireAuth
def GetOptions(request):
try:
return JsonResponse(virtualbox_controller.GetDefaultVm())
except Exception as e:
return JsonResponse({'error': str(e)})
| apache-2.0 | 4,134,940,897,749,442,000 | 30.189655 | 76 | 0.698729 | false |
qilicun/python | python2/PyMOTW-1.132/PyMOTW/sqlite3/sqlite3_transaction_commit.py | 1 | 1131 | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 Doug Hellmann. All rights reserved.
#
"""Creating the schema in an sqlite3 database.
"""
#end_pymotw_header
import sqlite3
db_filename = 'todo.db'
def show_projects(conn):
cursor = conn.cursor()
cursor.execute('select name, description from project')
for name, desc in cursor.fetchall():
print ' ', name
return
with sqlite3.connect(db_filename) as conn1:
print 'Before changes:'
show_projects(conn1)
# Insert in one cursor
cursor1 = conn1.cursor()
cursor1.execute("""
insert into project (name, description, deadline)
values ('virtualenvwrapper', 'Virtualenv Extensions', '2011-01-01')
""")
print '\nAfter changes in conn1:'
show_projects(conn1)
# Select from another connection, without committing first
print '\nBefore commit:'
with sqlite3.connect(db_filename) as conn2:
show_projects(conn2)
# Commit then select from another connection
conn1.commit()
print '\nAfter commit:'
with sqlite3.connect(db_filename) as conn3:
show_projects(conn3)
| gpl-3.0 | 4,388,495,015,724,249,600 | 23.586957 | 71 | 0.671972 | false |
HewlettPackard/oneview-ansible | library/oneview_san_manager_facts.py | 1 | 3801 | #!/usr/bin/python
# -*- coding: utf-8 -*-
###
# Copyright (2016-2017) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: oneview_san_manager_facts
short_description: Retrieve facts about one or more of the OneView SAN Managers
description:
- Retrieve facts about one or more of the SAN Managers from OneView
version_added: "2.5"
requirements:
- hpeOneView >= 2.0.1
author:
- Felipe Bulsoni (@fgbulsoni)
- Thiago Miotto (@tmiotto)
- Adriane Cardozo (@adriane-cardozo)
options:
provider_display_name:
description:
- Provider Display Name.
params:
description:
- List of params to delimit, filter and sort the list of resources.
- "params allowed:
- C(start): The first item to return, using 0-based indexing.
- C(count): The number of resources to return.
- C(query): A general query string to narrow the list of resources returned.
- C(sort): The sort order of the returned data set."
extends_documentation_fragment:
- oneview
'''
EXAMPLES = '''
- name: Gather facts about all SAN Managers
oneview_san_manager_facts:
config: /etc/oneview/oneview_config.json
delegate_to: localhost
- debug: var=san_managers
- name: Gather paginated, filtered and sorted facts about SAN Managers
oneview_san_manager_facts:
config: /etc/oneview/oneview_config.json
params:
start: 0
count: 3
sort: name:ascending
query: isInternal eq false
delegate_to: localhost
- debug: var=san_managers
- name: Gather facts about a SAN Manager by provider display name
oneview_san_manager_facts:
config: /etc/oneview/oneview_config.json
provider_display_name: Brocade Network Advisor
delegate_to: localhost
- debug: var=san_managers
'''
RETURN = '''
san_managers:
description: Has all the OneView facts about the SAN Managers.
returned: Always, but can be null.
type: dict
'''
from ansible.module_utils.oneview import OneViewModuleBase
class SanManagerFactsModule(OneViewModuleBase):
argument_spec = dict(
provider_display_name=dict(type='str'),
params=dict(type='dict')
)
def __init__(self):
super(SanManagerFactsModule, self).__init__(additional_arg_spec=self.argument_spec)
self.resource_client = self.oneview_client.san_managers
def execute_module(self):
if self.module.params.get('provider_display_name'):
provider_display_name = self.module.params['provider_display_name']
san_manager = self.oneview_client.san_managers.get_by_provider_display_name(provider_display_name)
if san_manager:
resources = [san_manager]
else:
resources = []
else:
resources = self.oneview_client.san_managers.get_all(**self.facts_params)
return dict(changed=False, ansible_facts=dict(san_managers=resources))
def main():
SanManagerFactsModule().run()
if __name__ == '__main__':
main()
| apache-2.0 | 427,369,213,773,898,300 | 29.902439 | 110 | 0.676401 | false |
madmath/sous-chef | src/order/management/commands/generateorders.py | 1 | 1511 | from django.core.management.base import BaseCommand
from order.models import Order
from member.models import Client
from datetime import datetime
from django.contrib.admin.models import LogEntry, ADDITION, CHANGE
class Command(BaseCommand):
help = 'Generate orders for all clients using his preferences'
def add_arguments(self, parser):
parser.add_argument(
'--creation_date',
help='The date must be in the format YYYY-MM-DD',
)
parser.add_argument(
'delivery_date',
help='The date must be in the format YYYY-MM-DD',
)
def handle(self, *args, **options):
if options['creation_date']:
creation_date = datetime.strptime(
options['creation_date'], '%Y-%m-%d'
).date()
else:
creation_date = datetime.now().date()
delivery_date = datetime.strptime(
options['delivery_date'], '%Y-%m-%d'
).date()
clients = Client.active.all()
numorders = Order.create_orders_on_defaults(
creation_date, delivery_date, clients)
LogEntry.objects.log_action(
user_id=1, content_type_id=1,
object_id="", object_repr="Generation of order for "+str(
datetime.now().strftime('%Y-%m-%d %H:%M')),
action_flag=ADDITION,
)
print("On", creation_date,
"created", numorders,
"orders to be delivered on", delivery_date, ".")
| agpl-3.0 | 185,264,954,829,694,800 | 34.97619 | 69 | 0.581072 | false |
saihttam/kaggle-axa | RobustRegressionDriver.py | 1 | 5500 | import numpy as np
from sklearn.ensemble import GradientBoostingRegressor
from random import sample, seed
from sklearn.decomposition import TruncatedSVD
from math import floor
from sklearn import cross_validation
import numpy as np
from numpy.linalg import norm, svd
def inexact_augmented_lagrange_multiplier(X, lmbda=.01, tol=1e-3,
maxiter=100, verbose=True):
"""
Inexact Augmented Lagrange Multiplier
"""
Y = X
norm_two = norm(Y.ravel(), 2)
norm_inf = norm(Y.ravel(), np.inf) / lmbda
dual_norm = np.max([norm_two, norm_inf])
Y = Y / dual_norm
A = np.zeros(Y.shape)
E = np.zeros(Y.shape)
dnorm = norm(X, 'fro')
mu = 1.25 / norm_two
rho = 1.5
sv = 10.
n = Y.shape[0]
itr = 0
while True:
Eraw = X - A + (1/mu) * Y
Eupdate = np.maximum(Eraw - lmbda / mu, 0) + np.minimum(Eraw + lmbda / mu, 0)
U, S, V = svd(X - Eupdate + (1 / mu) * Y, full_matrices=False)
svp = (S > 1 / mu).shape[0]
if svp < sv:
sv = np.min([svp + 1, n])
else:
sv = np.min([svp + round(.05 * n), n])
Aupdate = np.dot(np.dot(U[:, :svp], np.diag(S[:svp] - 1 / mu)), V[:svp, :])
A = Aupdate
E = Eupdate
Z = X - A - E
Y = Y + mu * Z
mu = np.min([mu * rho, mu * 1e7])
itr += 1
if ((norm(Z, 'fro') / dnorm) < tol) or (itr >= maxiter):
break
if verbose:
print "Finished at iteration %d" % (itr)
return A, E
class RegressionDriver(object):
"""Class for Regression-based analysis of Driver traces"""
def __init__(self, driver, datadict, numberofrows=40): #, numfeatures = 200):
"""Initialize by providing a (positive) driver example and a dictionary of (negative) driver references."""
seed(42)
self.driver = driver
self.numfeatures = self.driver.num_features
featurelist = []
self.__clf = GradientBoostingRegressor(n_estimators=300, max_depth=4, min_samples_leaf=2)
# gbr = GradientBoostingRegressor(n_estimators=500, max_depth=10, max_features=numfeatures, random_state=42)
# pca = PCA(whiten=True, n_components=numfeatures)
# estimators = [('polyf', PolynomialFeatures()), ('scale', MinMaxScaler()), ('pca', PCA()), ('gbr', gbr)]
# self.__clf = Pipeline(estimators)
self.__indexlist = []
for trace in self.driver.traces:
self.__indexlist.append(trace.identifier)
featurelist.append(trace.features)
# Initialize train and test np arrays
self.__traindata = np.asarray(featurelist)
self.__testdata = np.asarray(featurelist)
self.__trainlabels = np.ones((self.__traindata.shape[0],))
data = np.empty((0, self.numfeatures), float)
setkeys = datadict.keys()
if driver.identifier in setkeys:
setkeys.remove(driver.identifier)
else:
setkeys = sample(setkeys, len(setkeys) - 1)
for key in setkeys:
if key != driver.identifier:
rand_smpl = [datadict[key][i] for i in sorted(sample(xrange(len(datadict[key])), numberofrows)) ]
data = np.append(data, np.asarray(rand_smpl), axis=0)
self.__traindata = np.append(self.__traindata, data, axis=0)
self.__trainlabels = np.append(self.__trainlabels, np.zeros((data.shape[0],)), axis=0)
self.__y = np.zeros((self.__testdata.shape[0],))
def classify(self, nfolds=4):
"""Perform classification"""
components = self.__traindata.shape[1]
_, train_rpca_X_np = inexact_augmented_lagrange_multiplier(np.nan_to_num(self.__traindata))
_, test_rpca_X_np = inexact_augmented_lagrange_multiplier(np.nan_to_num(self.__testdata))
skf = cross_validation.StratifiedKFold(self.__trainlabels, n_folds=nfolds)
for train_index, _ in skf:
X_train = train_rpca_X_np[train_index]
y_train = self.__trainlabels[train_index]
self.__clf.fit(X_train, y_train)
self.__y += self.__clf.predict(test_rpca_X_np)
self.__y /= float(nfolds)
# feature_importance = self.__clf.feature_importances_
# feature_importance = 100.0 * (feature_importance / feature_importance.max())
# print feature_importance
def toKaggle(self):
"""Return string in Kaggle submission format"""
returnstring = ""
for i in xrange(len(self.__indexlist) - 1):
returnstring += "%d_%d,%.6f\n" % (self.driver.identifier, self.__indexlist[i], self.__y[i])
returnstring += "%d_%d,%.6f" % (self.driver.identifier, self.__indexlist[len(self.__indexlist)-1], self.__y[len(self.__indexlist)-1])
return returnstring
def validate(self, datadict):
from sklearn.metrics import roc_auc_score
testdata = np.empty((0, self.numfeatures), float)
y_true = np.empty((0,), float)
for key in datadict.keys():
currenttestdata = np.asarray(datadict[key])
testdata = np.append(testdata, currenttestdata, axis=0)
if key != self.driver.identifier:
y_true = np.append(y_true, np.zeros((currenttestdata.shape[0],)), axis=0)
else:
y_true = np.append(y_true, np.ones((currenttestdata.shape[0],)), axis=0)
y_score = self.__clf.predict(testdata)
result = roc_auc_score(y_true, y_score)
return result | bsd-2-clause | 4,683,779,648,376,033,000 | 42.65873 | 141 | 0.586909 | false |
raymondanthony/youtube-dl | youtube_dl/extractor/xnxx.py | 1 | 1491 | # encoding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
compat_urllib_parse,
)
class XNXXIE(InfoExtractor):
_VALID_URL = r'^https?://(?:video|www)\.xnxx\.com/video(?P<id>[0-9]+)/(.*)'
_TEST = {
'url': 'http://video.xnxx.com/video1135332/lida_naked_funny_actress_5_',
'md5': '0831677e2b4761795f68d417e0b7b445',
'info_dict': {
'id': '1135332',
'ext': 'flv',
'title': 'lida » Naked Funny Actress (5)',
'age_limit': 18,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
# Get webpage content
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(r'flv_url=(.*?)&',
webpage, 'video URL')
video_url = compat_urllib_parse.unquote(video_url)
video_title = self._html_search_regex(r'<title>(.*?)\s+-\s+XNXX.COM',
webpage, 'title')
video_thumbnail = self._search_regex(r'url_bigthumb=(.*?)&',
webpage, 'thumbnail', fatal=False)
return {
'id': video_id,
'url': video_url,
'title': video_title,
'ext': 'flv',
'thumbnail': video_thumbnail,
'age_limit': 18,
}
| unlicense | 5,536,822,106,596,122,000 | 29.408163 | 80 | 0.495973 | false |
Fuchida/Archive | albme-py/albme.py | 1 | 3547 | """Perform automated searches against http://www.albme.org/.
The script will get license information such as the licensee name,
license number and expiration date information of a medical professional.
This information is then saved to a json file
"""
import json
import requests
import grequests
from BeautifulSoup import BeautifulSoup
LICENSE_TYPE = "TA"
LAST_NAME = "c"
LICENSE_NUMBER= ""
FIRST_NAME = ""
CITY = ""
SEARCH_URL = "http://www.albme.org/AlbmeSearchWeb/search"
OUTPUT_FILE = 'data.json'
def save_to_file(userDetails):
"""Save dictionary to local json file
Args:
userDetails: A dictionary of user information
"""
with open(OUTPUT_FILE, 'w') as writeFile:
writeFile.write(json.dumps(userDetails))
def perform_async_requests(urls):
"""Perform asynchronous get requests given multiple links
Args:
urls: An array of URLs
Returns:
An array of requests response objects
"""
unsentRequests = ((grequests.get(resource) for resource in urls))
#Size param specifies the number of requests to be made at a time
return grequests.map(unsentRequests, size=10)
def parse_detail_page(detailPage):
"""Fetch licensee name, number and expiration date from detail page
Args:
detailPage: A html text of the results page
Returns:
A dictionary of licensee name, number and expiration date
"""
soup = BeautifulSoup(detailPage)
details = []
for tableRow in soup.findAll('table')[0].findAll('tr'):
#Information we care about comes back in arrays with two elements
#(key:value for table data).
tableData = tableRow.findAll('td', text=True)
#if there is no value for the table data, it wont be included
if len(tableData) == 2:
details.append(tableData)
else:
continue
#from the list of items, contruct the dictionary to return
parsedDetails= {}
for detail in details:
if detail[0] =="Licensee name:":
parsedDetails["Licensee name"] = detail[1]
elif detail[0] =="License number:":
parsedDetails["License number:"] = detail[1]
elif detail[0] =="Expiration date:":
parsedDetails["Expiration date:"] = detail[1]
else:
continue
return parsedDetails
def parse_results_page(resultsPage):
"""Fetch the detail links from the results page
Args:
resultsPage: A html text of the results page
Returns:
An array of links to the details page
"""
soup = BeautifulSoup(resultsPage)
links = []
for link in soup.findAll('a'):
if link.get('href') != 'search':
#Detail links are relative, appending to make them absolute
#and dropping first period from link and "/search" from SEARCH_URL.
links.append(SEARCH_URL[:-7]+link.get('href')[1:])
else:
continue
return links
def get_user_details():
"""Make a request to the search page then crawl each detail page of result
user information will be saved to a local file
"""
#Empty strings need to be submitted for empty data,
#otherwise the server assumes none are filled
postData = {
'licenseType': LICENSE_TYPE,
'licenseNumber': LICENSE_NUMBER,
'lastName': LAST_NAME,
'firstName': FIRST_NAME,
'city':CITY}
searchResponse = requests.post(
SEARCH_URL,
data=postData
)
detailLinks = parse_results_page(searchResponse.text)
detailResponses = perform_async_requests(detailLinks)
#for each reponse object of the detail page, parse the detail page
userDetails = {}
for detail in detailResponses:
userInformation = parse_detail_page(detail.text)
userDetails[userInformation["License number:"]] = userInformation
save_to_file(userDetails)
if __name__ == "__main__":
get_user_details()
| mit | -9,057,134,204,100,410,000 | 26.076336 | 75 | 0.728503 | false |
smartinov/fennec | fennec/libs/importer/importer.py | 1 | 3594 | from rest_framework.renderers import JSONRenderer
from fennec.apps.metamodel.serializers import ColumnSerializer, BasicSchemaSerializer, BasicTableSerializer, BasicIndexSerializer, \
ForeignKeyBasicSerializer
from fennec.apps.repository.models import BranchRevisionChange
from fennec.apps.metamodel.models import Change
__author__ = 'Darko'
class FennecImporter():
def __init__(self, model=None, user=None, branch_rev=None):
self.model = model if model else []
self.user = user
self.branch_rev = branch_rev
def import_model(self):
for schema in self.model:
self.__save_schema_change(schema)
for table in schema.tables:
self.__save_table_change(table)
for column in table.columns:
self.__save_column_change(column)
for index in table.indexes:
self.__save_index_change(index)
for fk in table.foreign_keys:
self.__save_foreign_key_change(fk)
def __save_schema_change(self, schema):
serializer = BasicSchemaSerializer(schema)
json = JSONRenderer().render(serializer.data)
change = Change()
change.change_type = 0
change.is_ui_change = False
change.made_by = self.user
change.object_type = 'Schema'
change.object_code = schema.id
change.content = json
change.save()
self.__save_branch_revision_change(change)
def __save_table_change(self, table):
serializer = BasicTableSerializer(table)
json = JSONRenderer().render(serializer.data)
change = Change()
change.change_type = 0
change.is_ui_change = False
change.made_by = self.user
change.object_type = 'Table'
change.object_code = table.id
change.content = json
change.save()
self.__save_branch_revision_change(change)
def __save_column_change(self, column):
serializer = ColumnSerializer(column)
json = JSONRenderer().render(serializer.data)
change = Change()
change.change_type = 0
change.is_ui_change = False
change.made_by = self.user
change.object_type = 'Column'
change.object_code = column.id
change.content = json
change.save()
self.__save_branch_revision_change(change)
def __save_index_change(self, index):
serializer = BasicIndexSerializer(index)
json = JSONRenderer().render(serializer.data)
change = Change()
change.change_type = 0
change.is_ui_change = False
change.made_by = self.user
change.object_type = 'Index'
change.object_code = index.id
change.content = json
change.save()
self.__save_branch_revision_change(change)
def __save_foreign_key_change(self, foreign_key):
serializer = ForeignKeyBasicSerializer(foreign_key)
json = JSONRenderer().render(serializer.data)
change = Change()
change.change_type = 0
change.is_ui_change = False
change.made_by = self.user
change.object_type = 'ForeignKey'
change.object_code = foreign_key.id
change.content = json
change.save()
self.__save_branch_revision_change(change)
def __save_branch_revision_change(self, change):
br_change = BranchRevisionChange()
br_change.branch_revision_ref = self.branch_rev
br_change.change_ref = change
# br_change.id = ordinal
br_change.save() | gpl-3.0 | 3,583,280,857,935,306,000 | 32.287037 | 132 | 0.618809 | false |
co-ment/comt | src/cm/utils/comment_positioning.py | 1 | 7762 | # -*- coding: utf-8 -*-
from difflib import SequenceMatcher
#from cm.utils.spannifier import Spannifier
import sys, operator
from cm.utils.spannifier import spannify
from cm.converters.pandoc_converters import pandoc_convert
import logging
from cm.utils.spannifier import get_the_soup
import re
import html5lib
from html5lib import treebuilders
def compute_new_comment_positions(old_content, old_format, new_content, new_format, commentList):
# cf. TextVersion.get_content
previousVersionContent = pandoc_convert(old_content, old_format, 'html')
newVersionContent = pandoc_convert(new_content, new_format, 'html')
_, previous_char_list, span_starts_previous = spannify(previousVersionContent, False)
_, new_char_list, span_starts_new = spannify(newVersionContent, False)
sm = SequenceMatcher(None, previous_char_list, new_char_list)
opcodes = sm.get_opcodes()
to_remove_comments_ids = set()
# limit to real comments (not replies) and those that have scope
commentList = [c for c in commentList if not c.is_reply() and not c.is_scope_removed()]
for comment in commentList:
try:
comment.initial_start_offset = span_starts_previous[comment.start_wrapper] + comment.start_offset
comment.initial_end_offset = span_starts_previous[comment.end_wrapper] + comment.end_offset
except KeyError:
logging.error('Key error (wrapper out of bounds of span_starts_previous)')
continue
comment.computed_start_offset = comment.initial_start_offset
comment.computed_end_offset = comment.initial_end_offset
# comment.computed_start_wrapper = None
# comment.computed_end_wrapper = None
comment.valid = True
for tag, i1, i2, j1, j2 in opcodes:
#print tag, i1, i2, j1, j2
for i in xrange(len(commentList)) :
if tag != 'equal' :
comment = commentList[i]
if not comment.valid:
continue
if comment.initial_start_offset >= i2 :
# if offset
delta = ((j2 - j1) - (i2 - i1))
comment.computed_start_offset += delta
comment.computed_end_offset += delta
elif comment.initial_end_offset > i1:
comment.valid = False
# id, initial_start, initial_end, computed_start, computed_end, valid = self.computationResults[i]
for cc in commentList:
if cc.valid:
for id in xrange(len(span_starts_new.keys())):
start = span_starts_new.get(id, 0)
end = span_starts_new.get(id+1, sys.maxint)
# adjust start
if cc.computed_start_offset >= start and cc.computed_start_offset < end:
cc.start_wrapper = id
cc.start_offset = cc.computed_start_offset - start
# adjust end
if cc.computed_end_offset >= start and cc.computed_end_offset < end:
cc.end_wrapper = id
cc.end_offset = cc.computed_end_offset - start
# returns to_modify, to_remove
return [c for c in commentList if c.valid], \
[c for c in commentList if not c.valid]
def add_marker(text, color, start_ids, end_ids, with_markers, with_colors):
# TODO
# THESE 3 LINES ARE REALLY JUST FOR TESTING THIS IS COPIED FROM C-TEXT.CSS AND SHOULD BE DONE DIFFERENTLY
BCKCOLORS = ['#ffffff', '#ffffa8', '#fff6a1', '#ffeb99', '#ffde91', '#ffd08a', '#ffc182', '#ffaf7a', '#ff9d73', '#ff896b', '#ff7363', '#ff5c5c']
for i in range(14) :
BCKCOLORS.append('#ff5c5c')
ret = text
if with_markers:
end_ids.reverse()
ret = "%s%s%s"%(''.join(["[%s>"%start_id for start_id in start_ids]), ret, ''.join(["<%s]"%end_id for end_id in end_ids]))
if with_colors and color != 0 :
# For some reasons, abiwords can read background style attribute but not background-color
from cm.cm_settings import USE_ABI
if USE_ABI:
ret = "<span style='background:%s;'>%s</span>"%(BCKCOLORS[color], ret)
else:
ret = "<span style='background-color:%s;'>%s</span>"%(BCKCOLORS[color], ret)
return ret
# comments are comments and replies :
def insert_comment_markers(htmlcontent, comments, with_markers, with_colors) :
html = get_the_soup(htmlcontent) ;
if comments :
max_wrapper = max([comment.end_wrapper for comment in comments])
min_wrapper = min([comment.start_wrapper for comment in comments])
datas = {} # { wrapper_id : {'start_color':nb_of_comments_unterminated_at_wrapper_start, 'offsets':{offset: [[ids of wrappers starting at offset], [ids of wrappers ending at offset]]}}
# datas['offsets'][someoffset][0] and idem[1] will be ordered the way comments are (should be ('start_wrapper', 'start_offset', 'end_wrapper', 'end_offset') important)
cpt = 1 # starting numbered comment
for comment in comments :
if comment.is_reply() :
continue ;
# start
wrapper_data = datas.get(comment.start_wrapper, {'start_color':0, 'offsets':{}})
offset = wrapper_data.get('offsets').get(comment.start_offset, [[],[]])
offset[0].append(cpt)
wrapper_data['offsets'][comment.start_offset] = offset
datas[comment.start_wrapper] = wrapper_data
# end
wrapper_data = datas.get(comment.end_wrapper, {'start_color':0, 'offsets':{}})
offset = wrapper_data.get('offsets').get(comment.end_offset, [[],[]])
offset[1].append(cpt)
wrapper_data['offsets'][comment.end_offset] = offset
datas[comment.end_wrapper] = wrapper_data
for cc in range(comment.start_wrapper + 1, comment.end_wrapper + 1) :
wrapper_data = datas.get(cc, {'start_color':0, 'offsets':{}})
wrapper_data['start_color'] += 1
datas[cc] = wrapper_data
cpt = cpt + 1
# order ee values
for (wrapper_id, wrapper_data) in datas.items() :
start_color = wrapper_data['start_color']
offsets = sorted(wrapper_data['offsets'].items(), key=operator.itemgetter(0))
d = html.find(id = "sv-%d"%wrapper_id)
if not d: # comment detached
continue
content = d.contents[0]
spans = ""
if offsets :
color = start_color
start = 0
start_ids = []
end_ids = []
for offset, ids in offsets :
end_ids = ids[1]
end = offset
spans += add_marker(content[start:end], color, start_ids, end_ids, with_markers, with_colors)
start_ids = ids[0]
start = end
color += (len(ids[0]) - len(ids[1]))
end_ids = []
spans += add_marker(content[end:], color,start_ids, end_ids, with_markers, with_colors)
else : # the whole content is to be colored with start_color
spans += add_marker(content, start_color, [], [], with_markers, with_colors)
content.replaceWith(spans)
output = unicode(html)
# Soup has introduced HTML entities, which should be expanded
output =re.sub(r""", '"', output)
output =re.sub(r"&", '&', output)
output =re.sub(r">", '>', output)
output =re.sub(r"<", '<', output)
return unicode(output)
| agpl-3.0 | 467,451,743,071,097,000 | 39.831579 | 188 | 0.577597 | false |
esquire-/weather | weather.py | 1 | 1797 | #!/usr/bin/python
'''
Weather.py
John Heenan
14 February 2014
A simple utlity to send notifications to your phone when it starts raining outside of your windowless CS lab.
Run as a login item/launchd process/drop it in .bashrc and it will only call you when you're in the lab.
'''
import urllib2
import json
import time
import pynma
'''
Some configuration variables
'''
WU_KEY = '' # Weather Underground API Key - The free developer tier should suffice
NMA_KEY = '' # Notify My Android API Key
LOC = 'UK/London' # Weather Underground Area ID/Location Name
DELAY = 300 # Refresh interval
'''
You shouldn't need to modify anything after this point.
'''
notifier = pynma.PyNMA(NMA_KEY)
def sendMessage(message):
notifier.push("Weather Checker", message, "The weather outside the CS lab has changed. It is currently " + message + " .\nData by Weather Underground\nImplementation by J. Heenan.")
def main():
print("Weather monitor started.")
last_observation = ''
while True:
notify = False
data = urllib2.urlopen('http://api.wunderground.com/api/' + WU_KEY + '/geolookup/conditions/q/' + LOC + '.json')
json_string = data.read()
parsed_json = json.loads(json_string)
observation = parsed_json['current_observation']['weather']
if "Rain" in observation or "Snow" in observation:
if observation != last_observation:
notify = True # Send message if it has started raining/rain conditions change
if "Rain" in last_observation or "Snow" in last_observation:
if observation != last_observation:
notify = True # Send message if it was raining and it isn't. If rain conditions change this will have no effect, notify is already True
if notify:
sendMessage(observation)
last_observation = observation
time.sleep(DELAY)
if __name__ == '__main__':
main() | mpl-2.0 | 5,054,473,239,764,113,000 | 28 | 182 | 0.722315 | false |
sharoonthomas/trytond-report-html-stock | tests/test_views_depends.py | 1 | 1195 | # -*- coding: utf-8 -*-
import sys
import os
DIR = os.path.abspath(os.path.normpath(os.path.join(
__file__, '..', '..', '..', '..', '..', 'trytond'
)))
if os.path.isdir(DIR):
sys.path.insert(0, os.path.dirname(DIR))
import unittest
import trytond.tests.test_tryton
from trytond.tests.test_tryton import test_view, test_depends
class TestViewsDepends(unittest.TestCase):
'''
Test views and depends
'''
def setUp(self):
"""
Set up data used in the tests.
this method is called before each test function execution.
"""
trytond.tests.test_tryton.install_module('report_html_stock')
@unittest.skip("No views")
def test0005views(self):
'''
Test views.
'''
test_view('report_html_stock')
def test0006depends(self):
'''
Test depends.
'''
test_depends()
def suite():
"""
Define suite
"""
test_suite = trytond.tests.test_tryton.suite()
test_suite.addTests(
unittest.TestLoader().loadTestsFromTestCase(TestViewsDepends)
)
return test_suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
| bsd-3-clause | 2,409,269,749,330,876,400 | 21.980769 | 69 | 0.59749 | false |
Ayrx/cryptography | src/_cffi_src/openssl/x509v3.py | 1 | 9359 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
INCLUDES = """
#include <openssl/x509v3.h>
/*
* This is part of a work-around for the difficulty cffi has in dealing with
* `LHASH_OF(foo)` as the name of a type. We invent a new, simpler name that
* will be an alias for this type and use the alias throughout. This works
* together with another opaque typedef for the same name in the TYPES section.
* Note that the result is an opaque type.
*/
typedef LHASH_OF(CONF_VALUE) Cryptography_LHASH_OF_CONF_VALUE;
typedef STACK_OF(ACCESS_DESCRIPTION) Cryptography_STACK_OF_ACCESS_DESCRIPTION;
typedef STACK_OF(DIST_POINT) Cryptography_STACK_OF_DIST_POINT;
typedef STACK_OF(POLICYQUALINFO) Cryptography_STACK_OF_POLICYQUALINFO;
typedef STACK_OF(POLICYINFO) Cryptography_STACK_OF_POLICYINFO;
typedef STACK_OF(ASN1_INTEGER) Cryptography_STACK_OF_ASN1_INTEGER;
typedef STACK_OF(GENERAL_SUBTREE) Cryptography_STACK_OF_GENERAL_SUBTREE;
"""
TYPES = """
typedef ... Cryptography_STACK_OF_ACCESS_DESCRIPTION;
typedef ... Cryptography_STACK_OF_POLICYQUALINFO;
typedef ... Cryptography_STACK_OF_POLICYINFO;
typedef ... Cryptography_STACK_OF_ASN1_INTEGER;
typedef ... Cryptography_STACK_OF_GENERAL_SUBTREE;
typedef ... EXTENDED_KEY_USAGE;
typedef ... CONF;
typedef struct {
X509 *issuer_cert;
X509 *subject_cert;
...;
} X509V3_CTX;
typedef void * (*X509V3_EXT_D2I)(void *, const unsigned char **, long);
typedef struct {
ASN1_ITEM_EXP *it;
X509V3_EXT_D2I d2i;
...;
} X509V3_EXT_METHOD;
static const int GEN_OTHERNAME;
static const int GEN_EMAIL;
static const int GEN_X400;
static const int GEN_DNS;
static const int GEN_URI;
static const int GEN_DIRNAME;
static const int GEN_EDIPARTY;
static const int GEN_IPADD;
static const int GEN_RID;
typedef struct {
ASN1_OBJECT *type_id;
ASN1_TYPE *value;
} OTHERNAME;
typedef struct {
...;
} EDIPARTYNAME;
typedef struct {
int ca;
ASN1_INTEGER *pathlen;
} BASIC_CONSTRAINTS;
typedef struct {
Cryptography_STACK_OF_GENERAL_SUBTREE *permittedSubtrees;
Cryptography_STACK_OF_GENERAL_SUBTREE *excludedSubtrees;
} NAME_CONSTRAINTS;
typedef struct {
ASN1_INTEGER *requireExplicitPolicy;
ASN1_INTEGER *inhibitPolicyMapping;
} POLICY_CONSTRAINTS;
typedef struct {
int type;
union {
char *ptr;
OTHERNAME *otherName; /* otherName */
ASN1_IA5STRING *rfc822Name;
ASN1_IA5STRING *dNSName;
ASN1_TYPE *x400Address;
X509_NAME *directoryName;
EDIPARTYNAME *ediPartyName;
ASN1_IA5STRING *uniformResourceIdentifier;
ASN1_OCTET_STRING *iPAddress;
ASN1_OBJECT *registeredID;
/* Old names */
ASN1_OCTET_STRING *ip; /* iPAddress */
X509_NAME *dirn; /* dirn */
ASN1_IA5STRING *ia5; /* rfc822Name, dNSName, */
/* uniformResourceIdentifier */
ASN1_OBJECT *rid; /* registeredID */
ASN1_TYPE *other; /* x400Address */
} d;
...;
} GENERAL_NAME;
typedef struct {
GENERAL_NAME *base;
ASN1_INTEGER *minimum;
ASN1_INTEGER *maximum;
} GENERAL_SUBTREE;
typedef struct stack_st_GENERAL_NAME GENERAL_NAMES;
typedef struct {
ASN1_OCTET_STRING *keyid;
GENERAL_NAMES *issuer;
ASN1_INTEGER *serial;
} AUTHORITY_KEYID;
typedef struct {
ASN1_OBJECT *method;
GENERAL_NAME *location;
} ACCESS_DESCRIPTION;
typedef ... Cryptography_LHASH_OF_CONF_VALUE;
typedef ... Cryptography_STACK_OF_DIST_POINT;
typedef struct {
int type;
union {
GENERAL_NAMES *fullname;
Cryptography_STACK_OF_X509_NAME_ENTRY *relativename;
} name;
...;
} DIST_POINT_NAME;
typedef struct {
DIST_POINT_NAME *distpoint;
ASN1_BIT_STRING *reasons;
GENERAL_NAMES *CRLissuer;
...;
} DIST_POINT;
typedef struct {
ASN1_STRING *organization;
Cryptography_STACK_OF_ASN1_INTEGER *noticenos;
} NOTICEREF;
typedef struct {
NOTICEREF *noticeref;
ASN1_STRING *exptext;
} USERNOTICE;
typedef struct {
ASN1_OBJECT *pqualid;
union {
ASN1_IA5STRING *cpsuri;
USERNOTICE *usernotice;
ASN1_TYPE *other;
} d;
} POLICYQUALINFO;
typedef struct {
ASN1_OBJECT *policyid;
Cryptography_STACK_OF_POLICYQUALINFO *qualifiers;
} POLICYINFO;
"""
FUNCTIONS = """
int X509V3_EXT_add_alias(int, int);
void X509V3_set_ctx(X509V3_CTX *, X509 *, X509 *, X509_REQ *, X509_CRL *, int);
X509_EXTENSION *X509V3_EXT_nconf(CONF *, X509V3_CTX *, char *, char *);
GENERAL_NAME *GENERAL_NAME_new(void);
int GENERAL_NAME_print(BIO *, GENERAL_NAME *);
GENERAL_NAMES *GENERAL_NAMES_new(void);
void GENERAL_NAMES_free(GENERAL_NAMES *);
void *X509V3_EXT_d2i(X509_EXTENSION *);
"""
MACROS = """
/* This is a macro defined by a call to DECLARE_ASN1_FUNCTIONS in the
x509v3.h header. */
BASIC_CONSTRAINTS *BASIC_CONSTRAINTS_new(void);
void BASIC_CONSTRAINTS_free(BASIC_CONSTRAINTS *);
/* This is a macro defined by a call to DECLARE_ASN1_FUNCTIONS in the
x509v3.h header. */
AUTHORITY_KEYID *AUTHORITY_KEYID_new(void);
void AUTHORITY_KEYID_free(AUTHORITY_KEYID *);
NAME_CONSTRAINTS *NAME_CONSTRAINTS_new(void);
void NAME_CONSTRAINTS_free(NAME_CONSTRAINTS *);
OTHERNAME *OTHERNAME_new(void);
void OTHERNAME_free(OTHERNAME *);
POLICY_CONSTRAINTS *POLICY_CONSTRAINTS_new(void);
void POLICY_CONSTRAINTS_free(POLICY_CONSTRAINTS *);
void *X509V3_set_ctx_nodb(X509V3_CTX *);
int i2d_GENERAL_NAMES(GENERAL_NAMES *, unsigned char **);
GENERAL_NAMES *d2i_GENERAL_NAMES(GENERAL_NAMES **, const unsigned char **,
long);
int sk_GENERAL_NAME_num(struct stack_st_GENERAL_NAME *);
int sk_GENERAL_NAME_push(struct stack_st_GENERAL_NAME *, GENERAL_NAME *);
GENERAL_NAME *sk_GENERAL_NAME_value(struct stack_st_GENERAL_NAME *, int);
Cryptography_STACK_OF_ACCESS_DESCRIPTION *sk_ACCESS_DESCRIPTION_new_null(void);
int sk_ACCESS_DESCRIPTION_num(Cryptography_STACK_OF_ACCESS_DESCRIPTION *);
ACCESS_DESCRIPTION *sk_ACCESS_DESCRIPTION_value(
Cryptography_STACK_OF_ACCESS_DESCRIPTION *, int
);
void sk_ACCESS_DESCRIPTION_free(Cryptography_STACK_OF_ACCESS_DESCRIPTION *);
int sk_ACCESS_DESCRIPTION_push(Cryptography_STACK_OF_ACCESS_DESCRIPTION *,
ACCESS_DESCRIPTION *);
ACCESS_DESCRIPTION *ACCESS_DESCRIPTION_new(void);
void ACCESS_DESCRIPTION_free(ACCESS_DESCRIPTION *);
X509_EXTENSION *X509V3_EXT_conf_nid(Cryptography_LHASH_OF_CONF_VALUE *,
X509V3_CTX *, int, char *);
/* These aren't macros these functions are all const X on openssl > 1.0.x */
const X509V3_EXT_METHOD *X509V3_EXT_get(X509_EXTENSION *);
const X509V3_EXT_METHOD *X509V3_EXT_get_nid(int);
Cryptography_STACK_OF_DIST_POINT *sk_DIST_POINT_new_null(void);
void sk_DIST_POINT_free(Cryptography_STACK_OF_DIST_POINT *);
int sk_DIST_POINT_num(Cryptography_STACK_OF_DIST_POINT *);
DIST_POINT *sk_DIST_POINT_value(Cryptography_STACK_OF_DIST_POINT *, int);
int sk_DIST_POINT_push(Cryptography_STACK_OF_DIST_POINT *, DIST_POINT *);
void sk_POLICYINFO_free(Cryptography_STACK_OF_POLICYINFO *);
int sk_POLICYINFO_num(Cryptography_STACK_OF_POLICYINFO *);
POLICYINFO *sk_POLICYINFO_value(Cryptography_STACK_OF_POLICYINFO *, int);
int sk_POLICYINFO_push(Cryptography_STACK_OF_POLICYINFO *, POLICYINFO *);
Cryptography_STACK_OF_POLICYINFO *sk_POLICYINFO_new_null(void);
POLICYINFO *POLICYINFO_new(void);
void POLICYINFO_free(POLICYINFO *);
POLICYQUALINFO *POLICYQUALINFO_new(void);
void POLICYQUALINFO_free(POLICYQUALINFO *);
NOTICEREF *NOTICEREF_new(void);
void NOTICEREF_free(NOTICEREF *);
USERNOTICE *USERNOTICE_new(void);
void USERNOTICE_free(USERNOTICE *);
void sk_POLICYQUALINFO_free(Cryptography_STACK_OF_POLICYQUALINFO *);
int sk_POLICYQUALINFO_num(Cryptography_STACK_OF_POLICYQUALINFO *);
POLICYQUALINFO *sk_POLICYQUALINFO_value(Cryptography_STACK_OF_POLICYQUALINFO *,
int);
int sk_POLICYQUALINFO_push(Cryptography_STACK_OF_POLICYQUALINFO *,
POLICYQUALINFO *);
Cryptography_STACK_OF_POLICYQUALINFO *sk_POLICYQUALINFO_new_null(void);
Cryptography_STACK_OF_GENERAL_SUBTREE *sk_GENERAL_SUBTREE_new_null(void);
void sk_GENERAL_SUBTREE_free(Cryptography_STACK_OF_GENERAL_SUBTREE *);
int sk_GENERAL_SUBTREE_num(Cryptography_STACK_OF_GENERAL_SUBTREE *);
GENERAL_SUBTREE *sk_GENERAL_SUBTREE_value(
Cryptography_STACK_OF_GENERAL_SUBTREE *, int
);
int sk_GENERAL_SUBTREE_push(Cryptography_STACK_OF_GENERAL_SUBTREE *,
GENERAL_SUBTREE *);
GENERAL_SUBTREE *GENERAL_SUBTREE_new(void);
void sk_ASN1_INTEGER_free(Cryptography_STACK_OF_ASN1_INTEGER *);
int sk_ASN1_INTEGER_num(Cryptography_STACK_OF_ASN1_INTEGER *);
ASN1_INTEGER *sk_ASN1_INTEGER_value(Cryptography_STACK_OF_ASN1_INTEGER *, int);
int sk_ASN1_INTEGER_push(Cryptography_STACK_OF_ASN1_INTEGER *, ASN1_INTEGER *);
Cryptography_STACK_OF_ASN1_INTEGER *sk_ASN1_INTEGER_new_null(void);
X509_EXTENSION *X509V3_EXT_i2d(int, int, void *);
DIST_POINT *DIST_POINT_new(void);
void DIST_POINT_free(DIST_POINT *);
DIST_POINT_NAME *DIST_POINT_NAME_new(void);
void DIST_POINT_NAME_free(DIST_POINT_NAME *);
"""
CUSTOMIZATIONS = """
"""
| bsd-3-clause | -559,673,534,380,414,200 | 30.725424 | 79 | 0.711508 | false |
NLeSC/PattyAnalytics | scripts/registration.py | 1 | 3140 | #!/usr/bin/env python2.7
"""Registration script.
Usage:
registration.py [-h] [-d <sample>] [-U] [-u <upfile>] [-c <camfile>] <source> <drivemap> <footprint> <output>
Positional arguments:
source Source LAS file
drivemap Target LAS file to map source to
footprint Footprint for the source LAS file
output file to write output LAS to
Options:
-d <sample> Downsample source pointcloud to a percentage of number of points
[default: 0.1].
-v <voxel> Downsample source pointcloud using voxel filter to speedup ICP
[default: 0.05]
-s <scale> User override for initial scale factor
-U Dont trust the upvector completely and estimate it in
this script, too
-u <upfile> Json file containing the up vector relative to the pointcloud.
-c <camfile> CSV file containing all the camera postionions. [UNIMPLEMENTED]
"""
from __future__ import print_function
from docopt import docopt
import numpy as np
import os
import json
from patty.utils import (load, save, log)
from patty.srs import (set_srs, force_srs)
from patty.registration import (
coarse_registration,
fine_registration,
initial_registration,
)
if __name__ == '__main__':
####
# Parse comamnd line arguments
args = docopt(__doc__)
sourcefile = args['<source>']
drivemapfile = args['<drivemap>']
footprintcsv = args['<footprint>']
foutLas = args['<output>']
up_file = args['-u']
if args['-U']:
Trust_up = False
else:
Trust_up = True
try:
Downsample = float(args['-d'])
except KeyError:
Downsample = 0.1
try:
Voxel = float(args['-v'])
except KeyError:
Voxel = 0.05
try:
Initial_scale = float(args['-s'])
except:
Initial_scale = None
assert os.path.exists(sourcefile), sourcefile + ' does not exist'
assert os.path.exists(drivemapfile), drivemapfile + ' does not exist'
assert os.path.exists(footprintcsv), footprintcsv + ' does not exist'
#####
# Setup * the low-res drivemap
# * footprint
# * pointcloud
# * up-vector
log("Reading drivemap", drivemapfile)
drivemap = load(drivemapfile)
force_srs(drivemap, srs="EPSG:32633")
log("Reading footprint", footprintcsv)
footprint = load(footprintcsv)
force_srs(footprint, srs="EPSG:32633")
set_srs(footprint, same_as=drivemap)
log("Reading object", sourcefile)
pointcloud = load(sourcefile)
Up = None
try:
with open(up_file) as f:
dic = json.load(f)
Up = np.array(dic['estimatedUpDirection'])
log("Reading up_file", up_file)
except:
log("Cannot parse upfile, skipping")
initial_registration(pointcloud, Up, drivemap,
trust_up=Trust_up, initial_scale=Initial_scale)
save(pointcloud, "initial.las")
center = coarse_registration(pointcloud, drivemap, footprint, Downsample)
save(pointcloud, "coarse.las")
fine_registration(pointcloud, drivemap, center, voxelsize=Voxel)
save(pointcloud, foutLas)
| apache-2.0 | -5,553,458,156,020,115,000 | 27.288288 | 111 | 0.634076 | false |
ghetzel/webfriend | webfriend/utils/__init__.py | 1 | 1517 | import os.path
import random
import inspect
import importlib
import string
PACKAGE_ROOT = os.path.abspath(
os.path.dirname(
os.path.dirname(__file__)
)
)
PACKAGE_NAME = os.path.basename(PACKAGE_ROOT)
def random_string(count, charset=string.lowercase + string.digits):
return ''.join(random.sample(charset, count))
def autotype(value):
if isinstance(value, basestring):
if value.lower() == 'true':
return True
elif value.lower() == 'false':
return False
else:
try:
return int(value)
except ValueError:
pass
try:
return float(value)
except ValueError:
pass
return value
def get_module_from_string(string, package=None):
parts = string.split('.')
remainder = []
while len(parts):
try:
return importlib.import_module('.'.join(parts), package=package), remainder
except ImportError:
remainder = [parts.pop()] + remainder
return None, string.split('.')
def resolve_object(parts, parent=None):
if not parent:
parent = globals()
while len(parts):
proceed = False
for member in inspect.getmembers(parent):
if member[0] == parts[0]:
parent = member[1]
parts = parts[1:]
proceed = True
break
if not proceed:
return None
return parent
| bsd-2-clause | 6,987,214,187,512,982,000 | 20.671429 | 87 | 0.552406 | false |
asvetlov/bloggertool | lib/bloggertool/commands/info.py | 1 | 3641 | # commands/info.py
# Copyright (C) 2011-2014 Andrew Svetlov
# [email protected]
#
# This module is part of BloggerTool and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import os
from textwrap import dedent
from bloggertool.exceptions import ConfigError
from bloggertool.str_util import T, a
from .basecommand import BaseCommand
class InfoCommand(BaseCommand):
NAME = 'info'
HELP = "Display or change project info."
DESCR = T("""
Show project info without parameters.
Change project settings if params specified.
For more information about template setup see:
$ blog help template
""")
FLAGS = ('blogid',
'template', 'drop_template',
'source_encoding')
@classmethod
def fill_parser(cls, parser):
parser.add_argument('--blogid', help="set blog id and blog url")
# templates
exclusive = parser.add_mutually_exclusive_group()
exclusive.add_argument('--template',
help="""
template path, folder part is
'template dir'
and filename part is
'template file'.
Please note: you CANNOT set dir and file
separetely
""")
exclusive.add_argument('--drop-template', help=dedent("""\
drop template settings.
Be CAREFUL with that option"""),
action='store_true')
parser.add_argument('--source-encoding',
help=T("""
set default encoding for source files"""))
def __init__(self, args):
self.blogid = args.blogid
self.template = args.template
self.drop_template = args.drop_template
self.source_encoding = args.source_encoding
self.has_updates = any(
(getattr(args, name) for name in self.FLAGS))
def run(self):
config = self.config
info = config.info
if not self.has_updates:
out = T("""
User info:
blogid: {info.blogid!N}
url: {info.blogurl!N}
template:
dir: {info.template_dir!N}
file: {info.template_file!N}
source-encoding: {info.effective_source_encoding}
""")(info=info)
self.log.info(out)
else:
if self.blogid is not None:
info.blogid = self.blogid
if self.drop_template:
info.template_dir = None
info.template_file = None
self.log.warning("Drop template settings")
if self.template:
template_path = config.fs.expand_path(self.template)
config.fs.check_existance(template_path, role="Template")
abs_folder, fname = os.path.split(template_path)
folder = config.fs.rel_path(abs_folder)
if not folder or not fname:
raise ConfigError(a("""
Put template in subfolder of project dir {config.root}
"""))
info.template_dir = folder
info.template_file = fname
if self.source_encoding:
info.source_encoding = self.source_encoding
self.log.info("User updated")
| mit | 163,598,516,552,405,660 | 35.41 | 78 | 0.515243 | false |
nylas/sync-engine | inbox/mailsync/backends/imap/generic.py | 1 | 38201 | # deal with unicode literals: http://www.python.org/dev/peps/pep-0263/
# vim: set fileencoding=utf-8 :
"""
----------------
IMAP SYNC ENGINE
----------------
Okay, here's the deal.
The IMAP sync engine runs per-folder on each account.
Only one initial sync can be running per-account at a time, to avoid
hammering the IMAP backend too hard (Gmail shards per-user, so parallelizing
folder download won't actually increase our throughput anyway).
Any time we reconnect, we have to make sure the folder's uidvalidity hasn't
changed, and if it has, we need to update the UIDs for any messages we've
already downloaded. A folder's uidvalidity cannot change during a session
(SELECT during an IMAP session starts a session on a folder) (see
http://tools.ietf.org/html/rfc3501#section-2.3.1.1).
Note that despite a session giving you a HIGHESTMODSEQ at the start of a
SELECT, that session will still always give you the latest message list
including adds, deletes, and flag changes that have happened since that
highestmodseq. (In Gmail, there is a small delay between changes happening on
the web client and those changes registering on a connected IMAP session,
though bizarrely the HIGHESTMODSEQ is updated immediately.) So we have to keep
in mind that the data may be changing behind our backs as we're syncing.
Fetching info about UIDs that no longer exist is not an error but gives us
empty data.
Folder sync state is stored in the ImapFolderSyncStatus table to allow for
restarts.
Here's the state machine:
-----
| ---------------- ----------------------
∨ | initial sync | <-----> | initial uidinvalid |
---------- ---------------- ----------------------
| finish | | ^
---------- | |_________________________
^ ∨ |
| ---------------- ----------------------
|---| poll | <-----> | poll uidinvalid |
---------------- ----------------------
| ∧
----
We encapsulate sync engine instances in greenlets for cooperative coroutine
scheduling around network I/O.
--------------
SESSION SCOPES
--------------
Database sessions are held for as short a duration as possible---just to
query for needed information or update the local state. Long-held database
sessions reduce scalability.
"""
from __future__ import division
from datetime import datetime, timedelta
from gevent import Greenlet
import gevent
import imaplib
from sqlalchemy import func
from sqlalchemy.orm import load_only
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
from inbox.basicauth import ValidationError
from inbox.util.concurrency import retry_with_logging
from inbox.util.debug import bind_context
from inbox.util.itert import chunk
from inbox.util.misc import or_none
from inbox.util.threading import fetch_corresponding_thread, MAX_THREAD_LENGTH
from inbox.util.stats import statsd_client
from nylas.logging import get_logger
log = get_logger()
from inbox.crispin import connection_pool, retry_crispin, FolderMissingError
from inbox.models import Folder, Account, Message
from inbox.models.backends.imap import (ImapFolderSyncStatus, ImapThread,
ImapUid, ImapFolderInfo)
from inbox.models.session import session_scope
from inbox.mailsync.backends.imap import common
from inbox.mailsync.backends.base import (MailsyncDone, MailsyncError,
THROTTLE_COUNT, THROTTLE_WAIT)
from inbox.heartbeat.store import HeartbeatStatusProxy
from inbox.events.ical import import_attached_events
# Idle doesn't necessarily pick up flag changes, so we don't want to
# idle for very long, or we won't detect things like messages being
# marked as read.
IDLE_WAIT = 30
DEFAULT_POLL_FREQUENCY = 30
# Poll on the Inbox folder more often.
INBOX_POLL_FREQUENCY = 10
FAST_FLAGS_REFRESH_LIMIT = 100
SLOW_FLAGS_REFRESH_LIMIT = 2000
SLOW_REFRESH_INTERVAL = timedelta(seconds=3600)
FAST_REFRESH_INTERVAL = timedelta(seconds=30)
# Maximum number of uidinvalidity errors in a row.
MAX_UIDINVALID_RESYNCS = 5
CONDSTORE_FLAGS_REFRESH_BATCH_SIZE = 200
class FolderSyncEngine(Greenlet):
"""Base class for a per-folder IMAP sync engine."""
def __init__(self, account_id, namespace_id, folder_name,
email_address, provider_name, syncmanager_lock):
with session_scope(namespace_id) as db_session:
try:
folder = db_session.query(Folder). \
filter(Folder.name == folder_name,
Folder.account_id == account_id).one()
except NoResultFound:
raise MailsyncError(u"Missing Folder '{}' on account {}"
.format(folder_name, account_id))
self.folder_id = folder.id
self.folder_role = folder.canonical_name
# Metric flags for sync performance
self.is_initial_sync = folder.initial_sync_end is None
self.is_first_sync = folder.initial_sync_start is None
self.is_first_message = self.is_first_sync
bind_context(self, 'foldersyncengine', account_id, self.folder_id)
self.account_id = account_id
self.namespace_id = namespace_id
self.folder_name = folder_name
self.email_address = email_address
if self.folder_name.lower() == 'inbox':
self.poll_frequency = INBOX_POLL_FREQUENCY
else:
self.poll_frequency = DEFAULT_POLL_FREQUENCY
self.syncmanager_lock = syncmanager_lock
self.state = None
self.provider_name = provider_name
self.last_fast_refresh = None
self.flags_fetch_results = {}
self.conn_pool = connection_pool(self.account_id)
self.state_handlers = {
'initial': self.initial_sync,
'initial uidinvalid': self.resync_uids,
'poll': self.poll,
'poll uidinvalid': self.resync_uids,
}
self.setup_heartbeats()
Greenlet.__init__(self)
# Some generic IMAP servers are throwing UIDVALIDITY
# errors forever. Instead of resyncing those servers
# ad vitam, we keep track of the number of consecutive
# times we got such an error and bail out if it's higher than
# MAX_UIDINVALID_RESYNCS.
self.uidinvalid_count = 0
def setup_heartbeats(self):
self.heartbeat_status = HeartbeatStatusProxy(self.account_id,
self.folder_id,
self.folder_name,
self.email_address,
self.provider_name)
def _run(self):
# Bind greenlet-local logging context.
self.log = log.new(account_id=self.account_id, folder=self.folder_name,
provider=self.provider_name)
# eagerly signal the sync status
self.heartbeat_status.publish()
try:
self.update_folder_sync_status(lambda s: s.start_sync())
except IntegrityError:
# The state insert failed because the folder ID ForeignKey
# was no longer valid, ie. the folder for this engine was deleted
# while we were starting up.
# Exit the sync and let the monitor sort things out.
log.info("Folder state loading failed due to IntegrityError",
folder_id=self.folder_id, account_id=self.account_id)
raise MailsyncDone()
# NOTE: The parent ImapSyncMonitor handler could kill us at any
# time if it receives a shutdown command. The shutdown command is
# equivalent to ctrl-c.
while True:
retry_with_logging(self._run_impl, account_id=self.account_id,
provider=self.provider_name, logger=log)
def _run_impl(self):
old_state = self.state
try:
self.state = self.state_handlers[old_state]()
self.heartbeat_status.publish(state=self.state)
except UidInvalid:
self.state = self.state + ' uidinvalid'
self.uidinvalid_count += 1
self.heartbeat_status.publish(state=self.state)
# Check that we're not stuck in an endless uidinvalidity resync loop.
if self.uidinvalid_count > MAX_UIDINVALID_RESYNCS:
log.error('Resynced more than MAX_UIDINVALID_RESYNCS in a'
' row. Stopping sync.')
with session_scope(self.namespace_id) as db_session:
account = db_session.query(Account).get(self.account_id)
account.disable_sync('Detected endless uidvalidity '
'resync loop')
account.sync_state = 'stopped'
db_session.commit()
raise MailsyncDone()
except FolderMissingError:
# Folder was deleted by monitor while its sync was running.
# TODO: Monitor should handle shutting down the folder engine.
log.info('Folder disappeared. Stopping sync.',
account_id=self.account_id, folder_id=self.folder_id)
raise MailsyncDone()
except ValidationError as exc:
log.error('Error authenticating; stopping sync', exc_info=True,
account_id=self.account_id, folder_id=self.folder_id,
logstash_tag='mark_invalid')
with session_scope(self.namespace_id) as db_session:
account = db_session.query(Account).get(self.account_id)
account.mark_invalid()
account.update_sync_error(exc)
raise MailsyncDone()
# State handlers are idempotent, so it's okay if we're
# killed between the end of the handler and the commit.
if self.state != old_state:
def update(status):
status.state = self.state
self.update_folder_sync_status(update)
if self.state == old_state and self.state in ['initial', 'poll']:
# We've been through a normal state transition without raising any
# error. It's safe to reset the uidvalidity counter.
self.uidinvalid_count = 0
def update_folder_sync_status(self, cb):
# Loads the folder sync status and invokes the provided callback to
# modify it. Commits any changes and updates `self.state` to ensure
# they are never out of sync.
with session_scope(self.namespace_id) as db_session:
try:
state = ImapFolderSyncStatus.state
saved_folder_status = db_session.query(ImapFolderSyncStatus)\
.filter_by(account_id=self.account_id, folder_id=self.folder_id)\
.options(load_only(state)).one()
except NoResultFound:
saved_folder_status = ImapFolderSyncStatus(
account_id=self.account_id, folder_id=self.folder_id)
db_session.add(saved_folder_status)
cb(saved_folder_status)
db_session.commit()
self.state = saved_folder_status.state
def set_stopped(self, db_session):
self.update_folder_sync_status(lambda s: s.stop_sync())
def _report_initial_sync_start(self):
with session_scope(self.namespace_id) as db_session:
q = db_session.query(Folder).get(self.folder_id)
q.initial_sync_start = datetime.utcnow()
def _report_initial_sync_end(self):
with session_scope(self.namespace_id) as db_session:
q = db_session.query(Folder).get(self.folder_id)
q.initial_sync_end = datetime.utcnow()
@retry_crispin
def initial_sync(self):
log.bind(state='initial')
log.info('starting initial sync')
if self.is_first_sync:
self._report_initial_sync_start()
self.is_first_sync = False
with self.conn_pool.get() as crispin_client:
crispin_client.select_folder(self.folder_name, uidvalidity_cb)
# Ensure we have an ImapFolderInfo row created prior to sync start.
with session_scope(self.namespace_id) as db_session:
try:
db_session.query(ImapFolderInfo). \
filter(ImapFolderInfo.account_id == self.account_id,
ImapFolderInfo.folder_id == self.folder_id). \
one()
except NoResultFound:
imapfolderinfo = ImapFolderInfo(
account_id=self.account_id, folder_id=self.folder_id,
uidvalidity=crispin_client.selected_uidvalidity,
uidnext=crispin_client.selected_uidnext)
db_session.add(imapfolderinfo)
db_session.commit()
self.initial_sync_impl(crispin_client)
if self.is_initial_sync:
self._report_initial_sync_end()
self.is_initial_sync = False
return 'poll'
@retry_crispin
def poll(self):
log.bind(state='poll')
log.debug('polling')
self.poll_impl()
return 'poll'
@retry_crispin
def resync_uids(self):
log.bind(state=self.state)
log.warning('UIDVALIDITY changed; initiating resync')
self.resync_uids_impl()
return 'initial'
def initial_sync_impl(self, crispin_client):
# We wrap the block in a try/finally because the change_poller greenlet
# needs to be killed when this greenlet is interrupted
change_poller = None
try:
assert crispin_client.selected_folder_name == self.folder_name
remote_uids = crispin_client.all_uids()
with self.syncmanager_lock:
with session_scope(self.namespace_id) as db_session:
local_uids = common.local_uids(self.account_id, db_session,
self.folder_id)
common.remove_deleted_uids(
self.account_id, self.folder_id,
set(local_uids).difference(remote_uids))
new_uids = set(remote_uids).difference(local_uids)
with session_scope(self.namespace_id) as db_session:
account = db_session.query(Account).get(self.account_id)
throttled = account.throttled
self.update_uid_counts(
db_session,
remote_uid_count=len(remote_uids),
# This is the initial size of our download_queue
download_uid_count=len(new_uids))
change_poller = gevent.spawn(self.poll_for_changes)
bind_context(change_poller, 'changepoller', self.account_id,
self.folder_id)
uids = sorted(new_uids, reverse=True)
count = 0
for uid in uids:
# The speedup from batching appears to be less clear for
# non-Gmail accounts, so for now just download one-at-a-time.
self.download_and_commit_uids(crispin_client, [uid])
self.heartbeat_status.publish()
count += 1
if throttled and count >= THROTTLE_COUNT:
# Throttled accounts' folders sync at a rate of
# 1 message/ minute, after the first approx. THROTTLE_COUNT
# messages per folder are synced.
# Note this is an approx. limit since we use the #(uids),
# not the #(messages).
gevent.sleep(THROTTLE_WAIT)
finally:
if change_poller is not None:
# schedule change_poller to die
gevent.kill(change_poller)
def should_idle(self, crispin_client):
if not hasattr(self, '_should_idle'):
self._should_idle = (
crispin_client.idle_supported() and self.folder_name in
crispin_client.folder_names()['inbox']
)
return self._should_idle
def poll_impl(self):
with self.conn_pool.get() as crispin_client:
self.check_uid_changes(crispin_client)
if self.should_idle(crispin_client):
crispin_client.select_folder(self.folder_name,
self.uidvalidity_cb)
idling = True
try:
crispin_client.idle(IDLE_WAIT)
except Exception as exc:
# With some servers we get e.g.
# 'Unexpected IDLE response: * FLAGS (...)'
if isinstance(exc, imaplib.IMAP4.error) and \
exc.message.startswith('Unexpected IDLE response'):
log.info('Error initiating IDLE, not idling',
error=exc)
try:
# Still have to take the connection out of IDLE
# mode to reuse it though.
crispin_client.conn.idle_done()
except AttributeError:
pass
idling = False
else:
raise
else:
idling = False
# Close IMAP connection before sleeping
if not idling:
gevent.sleep(self.poll_frequency)
def resync_uids_impl(self):
# First, let's check if the UIVDALIDITY change was spurious, if
# it is, just discard it and go on.
with self.conn_pool.get() as crispin_client:
crispin_client.select_folder(self.folder_name, lambda *args: True)
remote_uidvalidity = crispin_client.selected_uidvalidity
remote_uidnext = crispin_client.selected_uidnext
if remote_uidvalidity <= self.uidvalidity:
log.debug('UIDVALIDITY unchanged')
return
# Otherwise, if the UIDVALIDITY really has changed, discard all saved
# UIDs for the folder, mark associated messages for garbage-collection,
# and return to the 'initial' state to resync.
# This will cause message and threads to be deleted and recreated, but
# uidinvalidity is sufficiently rare that this tradeoff is acceptable.
with session_scope(self.namespace_id) as db_session:
invalid_uids = {
uid for uid, in db_session.query(ImapUid.msg_uid).
filter_by(account_id=self.account_id,
folder_id=self.folder_id)
}
common.remove_deleted_uids(self.account_id, self.folder_id,
invalid_uids)
self.uidvalidity = remote_uidvalidity
self.highestmodseq = None
self.uidnext = remote_uidnext
@retry_crispin
def poll_for_changes(self):
log.new(account_id=self.account_id, folder=self.folder_name)
while True:
log.debug('polling for changes')
self.poll_impl()
def create_message(self, db_session, acct, folder, msg):
assert acct is not None and acct.namespace is not None
# Check if we somehow already saved the imapuid (shouldn't happen, but
# possible due to race condition). If so, don't commit changes.
existing_imapuid = db_session.query(ImapUid).filter(
ImapUid.account_id == acct.id, ImapUid.folder_id == folder.id,
ImapUid.msg_uid == msg.uid).first()
if existing_imapuid is not None:
log.error('Expected to create imapuid, but existing row found',
remote_msg_uid=msg.uid,
existing_imapuid=existing_imapuid.id)
return None
# Check if the message is valid.
# https://sentry.nylas.com/sentry/sync-prod/group/3387/
if msg.body is None:
log.warning('Server returned a message with an empty body.')
return None
new_uid = common.create_imap_message(db_session, acct, folder, msg)
self.add_message_to_thread(db_session, new_uid.message, msg)
db_session.flush()
# We're calling import_attached_events here instead of some more
# obvious place (like Message.create_from_synced) because the function
# requires new_uid.message to have been flushed.
# This is necessary because the import_attached_events does db lookups.
if new_uid.message.has_attached_events:
with db_session.no_autoflush:
import_attached_events(db_session, acct, new_uid.message)
# If we're in the polling state, then we want to report the metric
# for latency when the message was received vs created
if self.state == 'poll':
latency_millis = (
datetime.utcnow() - new_uid.message.received_date) \
.total_seconds() * 1000
metrics = [
'.'.join(['mailsync', 'providers', 'overall', 'message_latency']),
'.'.join(['mailsync', 'providers', self.provider_name, 'message_latency']),
]
for metric in metrics:
statsd_client.timing(metric, latency_millis)
return new_uid
def _count_thread_messages(self, thread_id, db_session):
count, = db_session.query(func.count(Message.id)). \
filter(Message.thread_id == thread_id).one()
return count
def add_message_to_thread(self, db_session, message_obj, raw_message):
"""Associate message_obj to the right Thread object, creating a new
thread if necessary."""
with db_session.no_autoflush:
# Disable autoflush so we don't try to flush a message with null
# thread_id.
parent_thread = fetch_corresponding_thread(
db_session, self.namespace_id, message_obj)
construct_new_thread = True
if parent_thread:
# If there's a parent thread that isn't too long already,
# add to it. Otherwise create a new thread.
parent_message_count = self._count_thread_messages(
parent_thread.id, db_session)
if parent_message_count < MAX_THREAD_LENGTH:
construct_new_thread = False
if construct_new_thread:
message_obj.thread = ImapThread.from_imap_message(
db_session, self.namespace_id, message_obj)
else:
parent_thread.messages.append(message_obj)
def download_and_commit_uids(self, crispin_client, uids):
start = datetime.utcnow()
raw_messages = crispin_client.uids(uids)
if not raw_messages:
return 0
new_uids = set()
with self.syncmanager_lock:
with session_scope(self.namespace_id) as db_session:
account = Account.get(self.account_id, db_session)
folder = Folder.get(self.folder_id, db_session)
for msg in raw_messages:
uid = self.create_message(db_session, account,
folder, msg)
if uid is not None:
db_session.add(uid)
db_session.flush()
new_uids.add(uid)
db_session.commit()
log.debug('Committed new UIDs', new_committed_message_count=len(new_uids))
# If we downloaded uids, record message velocity (#uid / latency)
if self.state == 'initial' and len(new_uids):
self._report_message_velocity(datetime.utcnow() - start,
len(new_uids))
if self.is_first_message:
self._report_first_message()
self.is_first_message = False
return len(new_uids)
def _report_first_message(self):
# Only record the "time to first message" in the inbox. Because users
# can add more folders at any time, "initial sync"-style metrics for
# other folders don't mean much.
if self.folder_role not in ['inbox', 'all']:
return
now = datetime.utcnow()
with session_scope(self.namespace_id) as db_session:
account = db_session.query(Account).get(self.account_id)
account_created = account.created_at
latency = (now - account_created).total_seconds() * 1000
metrics = [
'.'.join(['mailsync', 'providers', self.provider_name, 'first_message']),
'.'.join(['mailsync', 'providers', 'overall', 'first_message'])
]
for metric in metrics:
statsd_client.timing(metric, latency)
def _report_message_velocity(self, timedelta, num_uids):
latency = (timedelta).total_seconds() * 1000
latency_per_uid = float(latency) / num_uids
metrics = [
'.'.join(['mailsync', 'providers', self.provider_name,
'message_velocity']),
'.'.join(['mailsync', 'providers', 'overall', 'message_velocity'])
]
for metric in metrics:
statsd_client.timing(metric, latency_per_uid)
def update_uid_counts(self, db_session, **kwargs):
saved_status = db_session.query(ImapFolderSyncStatus).join(Folder). \
filter(ImapFolderSyncStatus.account_id == self.account_id,
Folder.name == self.folder_name).one()
# We're not updating the current_remote_count metric
# so don't update uid_checked_timestamp.
if kwargs.get('remote_uid_count') is None:
saved_status.update_metrics(kwargs)
else:
metrics = dict(uid_checked_timestamp=datetime.utcnow())
metrics.update(kwargs)
saved_status.update_metrics(metrics)
def get_new_uids(self, crispin_client):
try:
remote_uidnext = crispin_client.conn.folder_status(
self.folder_name, ['UIDNEXT']).get('UIDNEXT')
except ValueError:
# Work around issue where ValueError is raised on parsing STATUS
# response.
log.warning('Error getting UIDNEXT', exc_info=True)
remote_uidnext = None
except imaplib.IMAP4.error as e:
if '[NONEXISTENT]' in e.message:
raise FolderMissingError()
else:
raise e
if remote_uidnext is not None and remote_uidnext == self.uidnext:
return
log.debug('UIDNEXT changed, checking for new UIDs',
remote_uidnext=remote_uidnext, saved_uidnext=self.uidnext)
crispin_client.select_folder(self.folder_name, self.uidvalidity_cb)
with session_scope(self.namespace_id) as db_session:
lastseenuid = common.lastseenuid(self.account_id, db_session,
self.folder_id)
latest_uids = crispin_client.conn.fetch('{}:*'.format(lastseenuid + 1),
['UID']).keys()
new_uids = set(latest_uids) - {lastseenuid}
if new_uids:
for uid in sorted(new_uids):
self.download_and_commit_uids(crispin_client, [uid])
self.uidnext = remote_uidnext
def condstore_refresh_flags(self, crispin_client):
new_highestmodseq = crispin_client.conn.folder_status(
self.folder_name, ['HIGHESTMODSEQ'])['HIGHESTMODSEQ']
# Ensure that we have an initial highestmodseq value stored before we
# begin polling for changes.
if self.highestmodseq is None:
self.highestmodseq = new_highestmodseq
if new_highestmodseq == self.highestmodseq:
# Don't need to do anything if the highestmodseq hasn't
# changed.
return
elif new_highestmodseq < self.highestmodseq:
# This should really never happen, but if it does, handle it.
log.warning('got server highestmodseq less than saved '
'highestmodseq',
new_highestmodseq=new_highestmodseq,
saved_highestmodseq=self.highestmodseq)
return
log.debug('HIGHESTMODSEQ has changed, getting changed UIDs',
new_highestmodseq=new_highestmodseq,
saved_highestmodseq=self.highestmodseq)
crispin_client.select_folder(self.folder_name, self.uidvalidity_cb)
changed_flags = crispin_client.condstore_changed_flags(
self.highestmodseq)
remote_uids = crispin_client.all_uids()
# In order to be able to sync changes to tens of thousands of flags at
# once, we commit updates in batches. We do this in ascending order by
# modseq and periodically "checkpoint" our saved highestmodseq. (It's
# safe to checkpoint *because* we go in ascending order by modseq.)
# That way if the process gets restarted halfway through this refresh,
# we don't have to completely start over. It's also slow to load many
# objects into the SQLAlchemy session and then issue lots of commits;
# we avoid that by batching.
flag_batches = chunk(
sorted(changed_flags.items(), key=lambda (k, v): v.modseq),
CONDSTORE_FLAGS_REFRESH_BATCH_SIZE)
for flag_batch in flag_batches:
with session_scope(self.namespace_id) as db_session:
common.update_metadata(self.account_id, self.folder_id,
self.folder_role, dict(flag_batch),
db_session)
if len(flag_batch) == CONDSTORE_FLAGS_REFRESH_BATCH_SIZE:
interim_highestmodseq = max(v.modseq for k, v in flag_batch)
self.highestmodseq = interim_highestmodseq
with session_scope(self.namespace_id) as db_session:
local_uids = common.local_uids(self.account_id, db_session,
self.folder_id)
expunged_uids = set(local_uids).difference(remote_uids)
if expunged_uids:
# If new UIDs have appeared since we last checked in
# get_new_uids, save them first. We want to always have the
# latest UIDs before expunging anything, in order to properly
# capture draft revisions.
with session_scope(self.namespace_id) as db_session:
lastseenuid = common.lastseenuid(self.account_id, db_session,
self.folder_id)
if remote_uids and lastseenuid < max(remote_uids):
log.info('Downloading new UIDs before expunging')
self.get_new_uids(crispin_client)
common.remove_deleted_uids(self.account_id, self.folder_id,
expunged_uids)
self.highestmodseq = new_highestmodseq
def generic_refresh_flags(self, crispin_client):
now = datetime.utcnow()
slow_refresh_due = (
self.last_slow_refresh is None or
now > self.last_slow_refresh + SLOW_REFRESH_INTERVAL
)
fast_refresh_due = (
self.last_fast_refresh is None or
now > self.last_fast_refresh + FAST_REFRESH_INTERVAL
)
if slow_refresh_due:
self.refresh_flags_impl(crispin_client, SLOW_FLAGS_REFRESH_LIMIT)
self.last_slow_refresh = datetime.utcnow()
elif fast_refresh_due:
self.refresh_flags_impl(crispin_client, FAST_FLAGS_REFRESH_LIMIT)
self.last_fast_refresh = datetime.utcnow()
def refresh_flags_impl(self, crispin_client, max_uids):
crispin_client.select_folder(self.folder_name, self.uidvalidity_cb)
with session_scope(self.namespace_id) as db_session:
local_uids = common.local_uids(account_id=self.account_id,
session=db_session,
folder_id=self.folder_id,
limit=max_uids)
flags = crispin_client.flags(local_uids)
if (max_uids in self.flags_fetch_results and
self.flags_fetch_results[max_uids] == (local_uids, flags)):
# If the flags fetch response is exactly the same as the last one
# we got, then we don't need to persist any changes.
log.debug('Unchanged flags refresh response, '
'not persisting changes', max_uids=max_uids)
return
log.debug('Changed flags refresh response, persisting changes',
max_uids=max_uids)
expunged_uids = set(local_uids).difference(flags.keys())
common.remove_deleted_uids(self.account_id, self.folder_id,
expunged_uids)
with session_scope(self.namespace_id) as db_session:
common.update_metadata(self.account_id, self.folder_id,
self.folder_role, flags, db_session)
self.flags_fetch_results[max_uids] = (local_uids, flags)
def check_uid_changes(self, crispin_client):
self.get_new_uids(crispin_client)
if crispin_client.condstore_supported():
self.condstore_refresh_flags(crispin_client)
else:
self.generic_refresh_flags(crispin_client)
@property
def uidvalidity(self):
if not hasattr(self, '_uidvalidity'):
self._uidvalidity = self._load_imap_folder_info().uidvalidity
return self._uidvalidity
@uidvalidity.setter
def uidvalidity(self, value):
self._update_imap_folder_info('uidvalidity', value)
self._uidvalidity = value
@property
def uidnext(self):
if not hasattr(self, '_uidnext'):
self._uidnext = self._load_imap_folder_info().uidnext
return self._uidnext
@uidnext.setter
def uidnext(self, value):
self._update_imap_folder_info('uidnext', value)
self._uidnext = value
@property
def last_slow_refresh(self):
# We persist the last_slow_refresh timestamp so that we don't end up
# doing a (potentially expensive) full flags refresh for every account
# on every process restart.
if not hasattr(self, '_last_slow_refresh'):
self._last_slow_refresh = self._load_imap_folder_info(). \
last_slow_refresh
return self._last_slow_refresh
@last_slow_refresh.setter
def last_slow_refresh(self, value):
self._update_imap_folder_info('last_slow_refresh', value)
self._last_slow_refresh = value
@property
def highestmodseq(self):
if not hasattr(self, '_highestmodseq'):
self._highestmodseq = self._load_imap_folder_info().highestmodseq
return self._highestmodseq
@highestmodseq.setter
def highestmodseq(self, value):
self._highestmodseq = value
self._update_imap_folder_info('highestmodseq', value)
def _load_imap_folder_info(self):
with session_scope(self.namespace_id) as db_session:
imapfolderinfo = db_session.query(ImapFolderInfo). \
filter(ImapFolderInfo.account_id == self.account_id,
ImapFolderInfo.folder_id == self.folder_id). \
one()
db_session.expunge(imapfolderinfo)
return imapfolderinfo
def _update_imap_folder_info(self, attrname, value):
with session_scope(self.namespace_id) as db_session:
imapfolderinfo = db_session.query(ImapFolderInfo). \
filter(ImapFolderInfo.account_id == self.account_id,
ImapFolderInfo.folder_id == self.folder_id). \
one()
setattr(imapfolderinfo, attrname, value)
db_session.commit()
def uidvalidity_cb(self, account_id, folder_name, select_info):
assert folder_name == self.folder_name
assert account_id == self.account_id
selected_uidvalidity = select_info['UIDVALIDITY']
is_valid = (self.uidvalidity is None or
selected_uidvalidity <= self.uidvalidity)
if not is_valid:
raise UidInvalid(
'folder: {}, remote uidvalidity: {}, '
'cached uidvalidity: {}'.format(folder_name.encode('utf-8'),
selected_uidvalidity,
self.uidvalidity))
return select_info
class UidInvalid(Exception):
"""Raised when a folder's UIDVALIDITY changes, requiring a resync."""
pass
# This version is elsewhere in the codebase, so keep it for now
# TODO(emfree): clean this up.
def uidvalidity_cb(account_id, folder_name, select_info):
assert folder_name is not None and select_info is not None, \
"must start IMAP session before verifying UIDVALIDITY"
with session_scope(account_id) as db_session:
saved_folder_info = common.get_folder_info(account_id, db_session,
folder_name)
saved_uidvalidity = or_none(saved_folder_info, lambda i:
i.uidvalidity)
selected_uidvalidity = select_info['UIDVALIDITY']
if saved_folder_info:
is_valid = (saved_uidvalidity is None or
selected_uidvalidity <= saved_uidvalidity)
if not is_valid:
raise UidInvalid(
'folder: {}, remote uidvalidity: {}, '
'cached uidvalidity: {}'.format(folder_name.encode('utf-8'),
selected_uidvalidity,
saved_uidvalidity))
return select_info
| agpl-3.0 | -1,116,494,582,475,538,600 | 43.207176 | 91 | 0.586333 | false |
ros2/rosidl | rosidl_cli/rosidl_cli/command/generate/api.py | 1 | 3504 | # Copyright 2021 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pathlib
from .extensions import load_type_extensions
from .extensions import load_typesupport_extensions
def generate(
*,
package_name,
interface_files,
include_paths=None,
output_path=None,
types=None,
typesupports=None
):
"""
Generate source code from interface definition files.
To do so, this function leverages type representation and type
support generation support as provided by third-party package
extensions.
Each path to an interface definition file is a relative path optionally
prefixed by another path followed by a colon ':', against which the first
relative path is to be resolved.
The directory structure that these relative paths exhibit will be replicated
on output (as opposed to the prefix path, which will be ignored).
If no type representation nor type support is specified, all available ones
will be generated.
If more than one type representation or type support is generated, the
name of each will be appended to the given `output_path` to preclude
name clashes upon writing source code files.
:param package_name: name of the package to generate source code for
:param interface_files: list of paths to interface definition files
:param include_paths: optional list of paths to include dependency
interface definition files from
:param output_path: optional path to directory to hold generated
source code files, defaults to the current working directory
:param types: optional list of type representations to generate
:param typesupports: optional list of type supports to generate
:returns: list of lists of paths to generated source code files,
one group per type or type support extension invoked
"""
extensions = []
unspecific_generation = not types and not typesupports
if types or unspecific_generation:
extensions.extend(load_type_extensions(
specs=types,
strict=not unspecific_generation))
if typesupports or unspecific_generation:
extensions.extend(load_typesupport_extensions(
specs=typesupports,
strict=not unspecific_generation))
if unspecific_generation and not extensions:
raise RuntimeError('No type nor typesupport extensions were found')
if include_paths is None:
include_paths = []
if output_path is None:
output_path = pathlib.Path.cwd()
else:
os.makedirs(output_path, exist_ok=True)
if len(extensions) > 1:
return [
extension.generate(
package_name, interface_files, include_paths,
output_path=output_path / extension.name)
for extension in extensions
]
return [extensions[0].generate(
package_name, interface_files,
include_paths, output_path
)]
| apache-2.0 | 1,224,383,898,062,600,000 | 34.393939 | 80 | 0.710616 | false |
jtaghiyar/kronos | kronos/pipelineui.py | 1 | 3217 | '''
Created on May 9, 2014
@author: jtaghiyar
'''
import argparse
import os
parser = argparse.ArgumentParser(description='Pipeline user interface')
parser.add_argument('-b', '--job_scheduler',
default='drmaa',
choices=['sge','drmaa'],
help="job scheduler used to manage jobs on the cluster")
parser.add_argument('-c', '--components_dir',
default=os.getcwd(),
required=True,
help="path to components_dir")
parser.add_argument('-d', '--drmaa_library_path',
default='lib/lx24-amd64/libdrmaa.so',
type=str,
help="path of drmaa library")
parser.add_argument('-e', '--pipeline_name',
default=None,
type=str,
help="pipeline name")
parser.add_argument('-j', '--num_jobs',
default=1,
type=int,
help='maximum number of simultaneous jobs per pipeline')
parser.add_argument('-l', '--log_file',
default=None,
type=str,
help="name of the log file")
parser.add_argument('-n', '--num_pipelines',
default=1,
type=int,
help='maximum number of simultaneous running pipelines')
parser.add_argument('--no_prefix',
default=False,
action='store_true',
help="""Switch off the prefix that is added to all the
output files.""")
parser.add_argument('-p','--python_installation',
default='python',
type=str,
help="python executable")
parser.add_argument('-q', '--qsub_options',
default=None,
type=str,
help="""native qsub specifications for the cluster
in a single string""")
parser.add_argument('-r', '--run_id',
default=None,
type=str,
help="pipeline run id used for re-running")
parser.add_argument('-w', '--working_dir',
default=os.getcwd(),
help="path to the working_dir")
## should be moved to a subcommand print
parser.add_argument('--draw_vertically',
default=False,
action='store_true',
help="specify whether to draw the plot vertically")
parser.add_argument('--extension',
default="png",
type=str,
help="specify the desired extension of the resultant file")
parser.add_argument('--no_key_legend',
default=False,
action='store_true',
help="if True, hide the legend.")
parser.add_argument('--print_only',
default=False,
action='store_true',
help="""if True, print the workflow graph only without
running the pipeline.""")
args, unknown= parser.parse_known_args()
| mit | -5,165,773,639,326,006,000 | 32.863158 | 79 | 0.484613 | false |
APSL/django-kaio | kaio/mixins/email.py | 1 | 2693 | # -*- coding: utf-8 -*-
import logging
import os
from kaio import Options
from functools import partial
logger = logging.getLogger(__name__)
opts = Options()
get = partial(opts.get, section='Email')
class EmailMixin(object):
"""Settings para enviar emails"""
# Django settings: https://docs.djangoproject.com/en/1.11/ref/settings/#email-backend
@property
def DEFAULT_FROM_EMAIL(self):
return get('DEFAULT_FROM_EMAIL', 'Example <[email protected]>')
@property
def EMAIL_BACKEND(self):
backend = get('EMAIL_BACKEND')
if backend:
return backend
backend = 'django.core.mail.backends.smtp.EmailBackend'
if 'django_yubin' in self.INSTALLED_APPS:
try:
import django_yubin # noqa: F401
backend = 'django_yubin.smtp_queue.EmailBackend'
except ImportError:
logger.warn('WARNING: django_yubin in INSTALLED_APPS but not pip installed.')
return backend
@property
def EMAIL_FILE_PATH(self):
return get('EMAIL_FILE_PATH', None)
@property
def EMAIL_HOST(self):
return get('EMAIL_HOST', 'localhost')
@property
def EMAIL_HOST_PASSWORD(self):
return get('EMAIL_HOST_PASSWORD', '')
@property
def EMAIL_HOST_USER(self):
return get('EMAIL_HOST_USER', '')
@property
def EMAIL_PORT(self):
return get('EMAIL_PORT', 25)
@property
def EMAIL_SUBJECT_PREFIX(self):
return get('EMAIL_SUBJECT_PREFIX', '[Django] ')
@property
def EMAIL_USE_TLS(self):
return get('EMAIL_USE_TLS', False)
# django-yubin settings: http://django-yubin.readthedocs.org/en/latest/settings.html
@property
def MAILER_PAUSE_SEND(self):
return get('MAILER_PAUSE_SEND', False)
@property
def MAILER_USE_BACKEND(self):
return get('MAILER_USE_BACKEND', 'django.core.mail.backends.smtp.EmailBackend')
@property
def MAILER_MAIL_ADMINS_PRIORITY(self):
try:
from django_yubin import constants
priority = constants.PRIORITY_HIGH
except Exception:
priority = 1
return get('MAILER_MAIL_ADMINS_PRIORITY', priority)
@property
def MAILER_MAIL_MANAGERS_PRIORITY(self):
return get('MAILER_MAIL_MANAGERS_PRIORITY', None)
@property
def MAILER_EMPTY_QUEUE_SLEEP(self):
return get('MAILER_EMPTY_QUEUE_SLEEP', 30)
@property
def MAILER_LOCK_WAIT_TIMEOUT(self):
return get('MAILER_LOCK_WAIT_TIMEOUT', 0)
@property
def MAILER_LOCK_PATH(self):
return get("MAILER_LOCK_PATH", os.path.join(self.APP_ROOT, "send_mail"))
| bsd-3-clause | -6,720,387,585,946,271,000 | 26.20202 | 93 | 0.629781 | false |
ozgurgunes/django-cmskit | cmskit/slideshow/migrations/0001_initial.py | 1 | 5691 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Slideshow'
db.create_table('cmsplugin_slideshow', (
('cmsplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['cms.CMSPlugin'], unique=True, primary_key=True)),
))
db.send_create_signal('slideshow', ['Slideshow'])
# Adding model 'Slide'
db.create_table('slideshow_slide', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('ordering', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('slideshow', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['slideshow.Slideshow'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=216, null=True, blank=True)),
('summary', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('picture', self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True)),
('picture_width', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True)),
('picture_height', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True)),
('alt', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('url', self.gf('django.db.models.fields.CharField')(max_length=216)),
('publish', self.gf('django.db.models.fields.BooleanField')(default=False)),
('date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('date_updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, blank=True)),
))
db.send_create_signal('slideshow', ['Slide'])
def backwards(self, orm):
# Deleting model 'Slideshow'
db.delete_table('cmsplugin_slideshow')
# Deleting model 'Slide'
db.delete_table('slideshow_slide')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'slideshow.slide': {
'Meta': {'ordering': "('ordering',)", 'object_name': 'Slide'},
'alt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'picture': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'picture_height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'picture_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'publish': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slideshow': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['slideshow.Slideshow']"}),
'summary': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '216', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '216'})
},
'slideshow.slideshow': {
'Meta': {'object_name': 'Slideshow', 'db_table': "'cmsplugin_slideshow'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['slideshow'] | mit | 5,875,987,426,555,961,000 | 64.425287 | 155 | 0.583377 | false |
les69/calvin-base | calvin/runtime/south/plugins/storage/twistedimpl/securedht/tests/test_dht_server_nice4.py | 1 | 7957 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import sys
import os
import traceback
import hashlib
import twisted
import shutil
import json
from calvin.utilities.calvin_callback import CalvinCB
from calvin.utilities import calvinlogger
from calvin.utilities.utils import get_home
from calvin.runtime.south.plugins.storage.twistedimpl.securedht.append_server import *
from calvin.runtime.south.plugins.storage.twistedimpl.securedht.dht_server import *
from calvin.runtime.south.plugins.storage.twistedimpl.securedht.service_discovery_ssdp import *
from calvin.runtime.south.plugins.storage.twistedimpl.securedht.dht_server_commons import drawNetworkState
from kademlia.node import Node
from kademlia.utils import deferredDict, digest
from calvin.runtime.south.plugins.async import threads
from calvin.utilities import calvinconfig
_conf = calvinconfig.get()
_conf.add_section("security")
_conf_file = os.path.join(get_home(), ".calvin/security/test/openssl.conf")
_conf.set("security", "certificate_conf", _conf_file)
_conf.set("security", "certificate_domain", "test")
_cert_conf = None
_log = calvinlogger.get_logger(__name__)
name = "node4:"
@pytest.fixture(scope="session", autouse=True)
def cleanup(request):
def fin():
reactor.callFromThread(reactor.stop)
request.addfinalizer(fin)
@pytest.mark.interactive
@pytest.mark.slow
class TestDHT(object):
test_nodes = 2
_sucess_start = (True,)
@pytest.fixture(autouse=True, scope="class")
def setup(self, request):
global _cert_conf
_cert_conf = certificate.Config(_conf_file, "test").configuration
@pytest.inlineCallbacks
def test_dht_multi(self, monkeypatch):
iface = "0.0.0.0"
a = None
b = None
q = Queue.Queue()
def server_started(aa, *args):
for b in args:
if isinstance(b, twisted.python.failure.Failure):
b.printTraceback()
else:
_log.debug("** %s" % b)
q.put([aa,args])
try:
amount_of_servers = 5
# Twisted is using 20 threads so having > 20 server
# causes threadlocks really easily.
servers = []
callbacks = []
for servno in range(0, amount_of_servers):
a = AutoDHTServer()
servers.append(a)
callback = CalvinCB(server_started, str(servno))
servers[servno].start(iface, network="Niklas", cb=callback, name=name + "{}".format(servno))
callbacks.append(callback)
# Wait for start
started = []
while len(started) < amount_of_servers:
try:
server = yield threads.defer_to_thread(q.get)
except Queue.Empty:
_log.debug("Queue empty!")
#raise
if server not in started:
started.append(server)
#print("DHT Servers added: {}".format(started))
callbacks[int(server[0][0])].func = lambda *args, **kvargs:None
else:
print("Server: {} already started." \
" {} out of {}".format(started,
len(started),
amount_of_servers))
print("All {} out of {} started".format(started,
len(started),
amount_of_servers))
for servno in range(0, amount_of_servers):
assert [str(servno), self._sucess_start] in started
yield threads.defer_to_thread(q.queue.clear)
yield threads.defer_to_thread(time.sleep, 8)
key = "HARE"
value = json.dumps(["morot"])
set_def = servers[0].append(key=key, value=value)
set_value = yield threads.defer_to_thread(set_def.wait, 10)
assert set_value
print("Node with port {} posted append key={}, value={}".format(servers[0].dht_server.port.getHost().port, key, value))
value = json.dumps(["selleri"])
set_def = servers[0].append(key=key, value=value)
set_value = yield threads.defer_to_thread(set_def.wait, 10)
assert set_value
print("Node with port {} posted append key={}, value={}".format(servers[0].dht_server.port.getHost().port, key, value))
get_def = servers[0].get_concat(key=key)
get_value = yield threads.defer_to_thread(get_def.wait, 10)
assert set(json.loads(get_value)) == set(["morot", "selleri"])
print("Node with port {} confirmed key={}, value={} was reachable".format(servers[0].dht_server.port.getHost().port, key, get_value))
drawNetworkState("1nice_graph.png", servers, amount_of_servers)
yield threads.defer_to_thread(time.sleep, 7)
drawNetworkState("1middle_graph.png", servers, amount_of_servers)
yield threads.defer_to_thread(time.sleep, 7)
drawNetworkState("1end_graph.png", servers, amount_of_servers)
get_def = servers[0].get_concat(key=key)
get_value = yield threads.defer_to_thread(get_def.wait, 10)
assert set(json.loads(get_value)) == set(["morot", "selleri"])
print("Node with port {} got right value: {}".format(servers[0].dht_server.port.getHost().port, get_value))
value = json.dumps(["morot"])
set_def = servers[0].remove(key=key, value=value)
set_value = yield threads.defer_to_thread(set_def.wait, 10)
assert set_value
print("Node with port {} posted remove key={}, value={}".format(servers[0].dht_server.port.getHost().port, key, value))
get_def = servers[1].get_concat(key=key)
get_value = yield threads.defer_to_thread(get_def.wait, 10)
assert set(json.loads(get_value)) == set(["selleri"])
print("Node with port {} got right value: {}".format(servers[0].dht_server.port.getHost().port, get_value))
for i in range(0, amount_of_servers):
name_dir = os.path.join(_cert_conf["CA_default"]["runtimes_dir"], "{}{}".format(name, i))
filenames = os.listdir(os.path.join(name_dir, "others"))
print("Node with port {} has {} certificates in store".format(servers[i].dht_server.port.getHost().port, len(filenames)))
except AssertionError as e:
print("Node with port {} got wrong value: {}, should have been {}".format(servers[0].dht_server.port.getHost().port, get_value, value))
pytest.fail(traceback.format_exc())
except Exception as e:
traceback.print_exc()
pytest.fail(traceback.format_exc())
finally:
yield threads.defer_to_thread(time.sleep, 10)
i = 0
for server in servers:
name_dir = os.path.join(_cert_conf["CA_default"]["runtimes_dir"], name + "{}".format(i))
shutil.rmtree(os.path.join(name_dir, "others"), ignore_errors=True)
os.mkdir(os.path.join(name_dir, "others"))
i += 1
server.stop()
| apache-2.0 | -2,925,898,608,843,406,300 | 43.452514 | 147 | 0.590926 | false |
mazaclub/electrum-nmc | gui/qt/lite_window.py | 1 | 31019 | import sys
# Let's do some dep checking and handle missing ones gracefully
try:
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.Qt import Qt
import PyQt4.QtCore as QtCore
except ImportError:
print "You need to have PyQT installed to run Electrum-NMC in graphical mode."
print "If you have pip installed try 'sudo pip install pyqt' if you are on Debian/Ubuntu try 'sudo apt-get install python-qt4'."
sys.exit(0)
from decimal import Decimal as D
from electrum_nmc.util import get_resource_path as rsrc
from electrum_nmc.bitcoin import is_valid
from electrum_nmc.i18n import _
import decimal
import json
import os.path
import random
import re
import time
from electrum_nmc.wallet import Wallet, WalletStorage
import webbrowser
import history_widget
import receiving_widget
from electrum_nmc import util
import datetime
from electrum_nmc.version import ELECTRUM_VERSION as electrum_version
from electrum_nmc.util import format_satoshis, age
from main_window import ElectrumWindow
import shutil
from util import *
bitcoin = lambda v: v * 100000000
def IconButton(filename, parent=None):
pixmap = QPixmap(filename)
icon = QIcon(pixmap)
return QPushButton(icon, "", parent)
def resize_line_edit_width(line_edit, text_input):
metrics = QFontMetrics(qApp.font())
# Create an extra character to add some space on the end
text_input += "A"
line_edit.setMinimumWidth(metrics.width(text_input))
def load_theme_name(theme_path):
try:
with open(os.path.join(theme_path, "name.cfg")) as name_cfg_file:
return name_cfg_file.read().rstrip("\n").strip()
except IOError:
return None
def theme_dirs_from_prefix(prefix):
if not os.path.exists(prefix):
return []
theme_paths = {}
for potential_theme in os.listdir(prefix):
theme_full_path = os.path.join(prefix, potential_theme)
theme_css = os.path.join(theme_full_path, "style.css")
if not os.path.exists(theme_css):
continue
theme_name = load_theme_name(theme_full_path)
if theme_name is None:
continue
theme_paths[theme_name] = prefix, potential_theme
return theme_paths
def load_theme_paths():
theme_paths = {}
theme_paths.update(theme_dirs_from_prefix(util.data_dir()))
return theme_paths
class TransactionWindow(QDialog):
def set_label(self):
label = unicode(self.label_edit.text())
self.parent.wallet.labels[self.tx_id] = label
super(TransactionWindow, self).accept()
def __init__(self, transaction_id, parent):
super(TransactionWindow, self).__init__()
self.tx_id = str(transaction_id)
self.parent = parent
self.setModal(True)
self.resize(200,100)
self.setWindowTitle(_("Transaction successfully sent"))
self.layout = QGridLayout(self)
history_label = "%s\n%s" % (_("Your transaction has been sent."), _("Please enter a label for this transaction for future reference."))
self.layout.addWidget(QLabel(history_label))
self.label_edit = QLineEdit()
self.label_edit.setPlaceholderText(_("Transaction label"))
self.label_edit.setObjectName("label_input")
self.label_edit.setAttribute(Qt.WA_MacShowFocusRect, 0)
self.label_edit.setFocusPolicy(Qt.ClickFocus)
self.layout.addWidget(self.label_edit)
self.save_button = QPushButton(_("Save"))
self.layout.addWidget(self.save_button)
self.save_button.clicked.connect(self.set_label)
self.exec_()
class MiniWindow(QDialog):
def __init__(self, actuator, expand_callback, config):
super(MiniWindow, self).__init__()
self.actuator = actuator
self.config = config
self.btc_balance = None
self.use_exchanges = ["Blockchain", "CoinDesk"]
self.quote_currencies = ["BRL", "CNY", "EUR", "GBP", "RUB", "USD"]
self.actuator.set_configured_currency(self.set_quote_currency)
self.actuator.set_configured_exchange(self.set_exchange)
# Needed because price discovery is done in a different thread
# which needs to be sent back to this main one to update the GUI
self.connect(self, SIGNAL("refresh_balance()"), self.refresh_balance)
self.balance_label = BalanceLabel(self.change_quote_currency, self)
self.balance_label.setObjectName("balance_label")
# Bitcoin address code
self.address_input = QLineEdit()
self.address_input.setPlaceholderText(_("Enter a Namecoin address or contact"))
self.address_input.setObjectName("address_input")
self.address_input.setFocusPolicy(Qt.ClickFocus)
self.address_input.textChanged.connect(self.address_field_changed)
resize_line_edit_width(self.address_input,
"NC6UXyecv2pkB3yBRGRMC2T7UhogJgq3nu")
self.address_completions = QStringListModel()
address_completer = QCompleter(self.address_input)
address_completer.setCaseSensitivity(False)
address_completer.setModel(self.address_completions)
self.address_input.setCompleter(address_completer)
address_layout = QHBoxLayout()
address_layout.addWidget(self.address_input)
self.amount_input = QLineEdit()
self.amount_input.setPlaceholderText(_("... and amount") + " (%s)"%self.actuator.g.base_unit())
self.amount_input.setObjectName("amount_input")
self.amount_input.setFocusPolicy(Qt.ClickFocus)
# This is changed according to the user's displayed balance
self.amount_validator = QDoubleValidator(self.amount_input)
self.amount_validator.setNotation(QDoubleValidator.StandardNotation)
self.amount_validator.setDecimals(8)
self.amount_input.setValidator(self.amount_validator)
# This removes the very ugly OSX highlighting, please leave this in :D
self.address_input.setAttribute(Qt.WA_MacShowFocusRect, 0)
self.amount_input.setAttribute(Qt.WA_MacShowFocusRect, 0)
self.amount_input.textChanged.connect(self.amount_input_changed)
#if self.actuator.g.wallet.seed:
self.send_button = QPushButton(_("&Send"))
#else:
# self.send_button = QPushButton(_("&Create"))
self.send_button.setObjectName("send_button")
self.send_button.setDisabled(True);
self.send_button.clicked.connect(self.send)
# Creating the receive button
self.switch_button = QPushButton( QIcon(":icons/switchgui.png"),'' )
self.switch_button.setMaximumWidth(25)
self.switch_button.setFlat(True)
self.switch_button.clicked.connect(expand_callback)
main_layout = QGridLayout(self)
main_layout.addWidget(self.balance_label, 0, 0, 1, 3)
main_layout.addWidget(self.switch_button, 0, 3)
main_layout.addWidget(self.address_input, 1, 0, 1, 4)
main_layout.addWidget(self.amount_input, 2, 0, 1, 2)
main_layout.addWidget(self.send_button, 2, 2, 1, 2)
self.send_button.setMaximumWidth(125)
self.history_list = history_widget.HistoryWidget()
self.history_list.setObjectName("history")
self.history_list.hide()
self.history_list.setAlternatingRowColors(True)
main_layout.addWidget(self.history_list, 3, 0, 1, 4)
self.receiving = receiving_widget.ReceivingWidget(self)
self.receiving.setObjectName("receiving")
# Add to the right side
self.receiving_box = QGroupBox(_("Select a receiving address"))
extra_layout = QGridLayout()
# Checkbox to filter used addresses
hide_used = QCheckBox(_('Hide used addresses'))
hide_used.setChecked(True)
hide_used.stateChanged.connect(self.receiving.toggle_used)
# Events for receiving addresses
self.receiving.clicked.connect(self.receiving.copy_address)
self.receiving.itemDoubleClicked.connect(self.receiving.edit_label)
self.receiving.itemChanged.connect(self.receiving.update_label)
# Label
extra_layout.addWidget( QLabel(_('Selecting an address will copy it to the clipboard.') + '\n' + _('Double clicking the label will allow you to edit it.') ),0,0)
extra_layout.addWidget(self.receiving, 1,0)
extra_layout.addWidget(hide_used, 2,0)
extra_layout.setColumnMinimumWidth(0,200)
self.receiving_box.setLayout(extra_layout)
main_layout.addWidget(self.receiving_box,0,4,-1,3)
self.receiving_box.hide()
self.main_layout = main_layout
quit_shortcut = QShortcut(QKeySequence("Ctrl+Q"), self)
quit_shortcut.activated.connect(self.close)
close_shortcut = QShortcut(QKeySequence("Ctrl+W"), self)
close_shortcut.activated.connect(self.close)
g = self.config.get("winpos-lite",[4, 25, 351, 149])
self.setGeometry(g[0], g[1], g[2], g[3])
show_hist = self.config.get("gui_show_history",False)
self.show_history(show_hist)
show_hist = self.config.get("gui_show_receiving",False)
self.toggle_receiving_layout(show_hist)
self.setWindowIcon(QIcon(":icons/electrum-nmc.png"))
self.setWindowTitle("Electrum-NMC")
self.setWindowFlags(Qt.Window|Qt.MSWindowsFixedSizeDialogHint)
self.layout().setSizeConstraint(QLayout.SetFixedSize)
self.setObjectName("main_window")
def context_menu(self):
view_menu = QMenu()
themes_menu = view_menu.addMenu(_("&Themes"))
selected_theme = self.actuator.selected_theme()
theme_group = QActionGroup(self)
for theme_name in self.actuator.theme_names():
theme_action = themes_menu.addAction(theme_name)
theme_action.setCheckable(True)
if selected_theme == theme_name:
theme_action.setChecked(True)
class SelectThemeFunctor:
def __init__(self, theme_name, toggle_theme):
self.theme_name = theme_name
self.toggle_theme = toggle_theme
def __call__(self, checked):
if checked:
self.toggle_theme(self.theme_name)
delegate = SelectThemeFunctor(theme_name, self.toggle_theme)
theme_action.toggled.connect(delegate)
theme_group.addAction(theme_action)
view_menu.addSeparator()
show_receiving = view_menu.addAction(_("Show Receiving addresses"))
show_receiving.setCheckable(True)
show_receiving.toggled.connect(self.toggle_receiving_layout)
show_receiving.setChecked(self.config.get("gui_show_receiving",False))
show_history = view_menu.addAction(_("Show History"))
show_history.setCheckable(True)
show_history.toggled.connect(self.show_history)
show_history.setChecked(self.config.get("gui_show_history",False))
return view_menu
def toggle_theme(self, theme_name):
self.actuator.change_theme(theme_name)
# Recompute style globally
qApp.style().unpolish(self)
qApp.style().polish(self)
def closeEvent(self, event):
g = self.geometry()
self.config.set_key("winpos-lite", [g.left(),g.top(),g.width(),g.height()],True)
self.actuator.g.closeEvent(event)
qApp.quit()
def pay_from_URI(self, URI):
try:
dest_address, amount, label, message, request_url = util.parse_URI(URI)
except:
return
self.address_input.setText(dest_address)
self.address_field_changed(dest_address)
self.amount_input.setText(str(amount))
def activate(self):
pass
def deactivate(self):
pass
def set_exchange(self, use_exchange):
if use_exchange not in self.use_exchanges:
return
self.use_exchanges.remove(use_exchange)
self.use_exchanges.insert(0, use_exchange)
self.refresh_balance()
def set_quote_currency(self, currency):
"""Set and display the fiat currency country."""
if currency not in self.quote_currencies:
return
self.quote_currencies.remove(currency)
self.quote_currencies.insert(0, currency)
self.refresh_balance()
def change_quote_currency(self, forward=True):
if forward:
self.quote_currencies = \
self.quote_currencies[1:] + self.quote_currencies[0:1]
else:
self.quote_currencies = \
self.quote_currencies[-1:] + self.quote_currencies[0:-1]
self.actuator.set_config_currency(self.quote_currencies[0])
self.refresh_balance()
def refresh_balance(self):
if self.btc_balance is None:
# Price has been discovered before wallet has been loaded
# and server connect... so bail.
return
self.set_balances(self.btc_balance)
self.amount_input_changed(self.amount_input.text())
def set_balances(self, btc_balance):
"""Set the namecoin balance and update the amount label accordingly."""
self.btc_balance = btc_balance
quote_text = self.create_quote_text(btc_balance)
if quote_text:
quote_text = "(%s)" % quote_text
amount = self.actuator.g.format_amount(btc_balance)
unit = self.actuator.g.base_unit()
self.balance_label.set_balance_text(amount, unit, quote_text)
self.setWindowTitle("Electrum-NMC %s - %s %s" % (electrum_version, amount, unit))
def amount_input_changed(self, amount_text):
"""Update the number of namecoins displayed."""
self.check_button_status()
try:
amount = D(str(amount_text)) * (10**self.actuator.g.decimal_point)
except decimal.InvalidOperation:
self.balance_label.show_balance()
else:
quote_text = self.create_quote_text(amount)
if quote_text:
self.balance_label.set_amount_text(quote_text)
self.balance_label.show_amount()
else:
self.balance_label.show_balance()
def create_quote_text(self, btc_balance):
"""Return a string copy of the amount fiat currency the
user has in namecoins."""
from electrum_nmc.plugins import run_hook
r = {}
run_hook('get_fiat_balance_text', btc_balance, r)
return r.get(0,'')
def send(self):
if self.actuator.send(self.address_input.text(),
self.amount_input.text(), self):
self.address_input.setText("")
self.amount_input.setText("")
def check_button_status(self):
"""Check that the namecoin address is valid and that something
is entered in the amount before making the send button clickable."""
try:
value = D(str(self.amount_input.text())) * (10**self.actuator.g.decimal_point)
except decimal.InvalidOperation:
value = None
# self.address_input.property(...) returns a qVariant, not a bool.
# The == is needed to properly invoke a comparison.
if (self.address_input.property("isValid") == True and
value is not None and 0 < value <= self.btc_balance):
self.send_button.setDisabled(False)
else:
self.send_button.setDisabled(True)
def address_field_changed(self, address):
# label or alias, with address in brackets
match2 = re.match("(.*?)\s*\<([1-9A-HJ-NP-Za-km-z]{26,})\>",
address)
if match2:
address = match2.group(2)
self.address_input.setText(address)
if is_valid(address):
self.check_button_status()
self.address_input.setProperty("isValid", True)
self.recompute_style(self.address_input)
else:
self.send_button.setDisabled(True)
self.address_input.setProperty("isValid", False)
self.recompute_style(self.address_input)
if len(address) == 0:
self.address_input.setProperty("isValid", None)
self.recompute_style(self.address_input)
def recompute_style(self, element):
self.style().unpolish(element)
self.style().polish(element)
def copy_address(self):
receive_popup = ReceivePopup(self.receive_button)
self.actuator.copy_address(receive_popup)
def update_completions(self, completions):
self.address_completions.setStringList(completions)
def update_history(self, tx_history):
self.history_list.empty()
for item in tx_history[-10:]:
tx_hash, conf, is_mine, value, fee, balance, timestamp = item
label = self.actuator.g.wallet.get_label(tx_hash)[0]
v_str = self.actuator.g.format_amount(value, True)
self.history_list.append(label, v_str, age(timestamp))
def the_website(self):
webbrowser.open("http://electrum.org")
def toggle_receiving_layout(self, toggle_state):
if toggle_state:
self.receiving_box.show()
else:
self.receiving_box.hide()
self.config.set_key("gui_show_receiving", toggle_state)
def show_history(self, toggle_state):
if toggle_state:
self.main_layout.setRowMinimumHeight(3,200)
self.history_list.show()
else:
self.main_layout.setRowMinimumHeight(3,0)
self.history_list.hide()
self.config.set_key("gui_show_history", toggle_state)
class BalanceLabel(QLabel):
SHOW_CONNECTING = 1
SHOW_BALANCE = 2
SHOW_AMOUNT = 3
def __init__(self, change_quote_currency, parent=None):
super(QLabel, self).__init__(_("Connecting..."), parent)
self.change_quote_currency = change_quote_currency
self.state = self.SHOW_CONNECTING
self.balance_text = ""
self.amount_text = ""
self.parent = parent
def mousePressEvent(self, event):
"""Change the fiat currency selection if window background is clicked."""
if self.state != self.SHOW_CONNECTING:
if event.button() == Qt.LeftButton:
self.change_quote_currency()
else:
position = event.globalPos()
menu = self.parent.context_menu()
menu.exec_(position)
def set_balance_text(self, amount, unit, quote_text):
"""Set the amount of namecoins in the gui."""
if self.state == self.SHOW_CONNECTING:
self.state = self.SHOW_BALANCE
self.balance_text = "<span style='font-size: 18pt'>%s</span>"%amount\
+ " <span style='font-size: 10pt'>%s</span>" % unit \
+ " <span style='font-size: 10pt'>%s</span>" % quote_text
if self.state == self.SHOW_BALANCE:
self.setText(self.balance_text)
def set_amount_text(self, quote_text):
self.amount_text = "<span style='font-size: 10pt'>%s</span>" % quote_text
if self.state == self.SHOW_AMOUNT:
self.setText(self.amount_text)
def show_balance(self):
if self.state == self.SHOW_AMOUNT:
self.state = self.SHOW_BALANCE
self.setText(self.balance_text)
def show_amount(self):
if self.state == self.SHOW_BALANCE:
self.state = self.SHOW_AMOUNT
self.setText(self.amount_text)
def ok_cancel_buttons(dialog):
row_layout = QHBoxLayout()
row_layout.addStretch(1)
ok_button = QPushButton(_("OK"))
row_layout.addWidget(ok_button)
ok_button.clicked.connect(dialog.accept)
cancel_button = QPushButton(_("Cancel"))
row_layout.addWidget(cancel_button)
cancel_button.clicked.connect(dialog.reject)
return row_layout
class PasswordDialog(QDialog):
def __init__(self, parent):
super(QDialog, self).__init__(parent)
self.setModal(True)
self.password_input = QLineEdit()
self.password_input.setEchoMode(QLineEdit.Password)
main_layout = QVBoxLayout(self)
message = _('Please enter your password')
main_layout.addWidget(QLabel(message))
grid = QGridLayout()
grid.setSpacing(8)
grid.addWidget(QLabel(_('Password')), 1, 0)
grid.addWidget(self.password_input, 1, 1)
main_layout.addLayout(grid)
main_layout.addLayout(ok_cancel_buttons(self))
self.setLayout(main_layout)
def run(self):
if not self.exec_():
return
return unicode(self.password_input.text())
class ReceivePopup(QDialog):
def leaveEvent(self, event):
self.close()
def setup(self, address):
label = QLabel(_("Copied your Namecoin address to the clipboard!"))
address_display = QLineEdit(address)
address_display.setReadOnly(True)
resize_line_edit_width(address_display, address)
main_layout = QVBoxLayout(self)
main_layout.addWidget(label)
main_layout.addWidget(address_display)
self.setMouseTracking(True)
self.setWindowTitle("Electrum-NMC - " + _("Receive Namecoin payment"))
self.setWindowFlags(Qt.Window|Qt.FramelessWindowHint|
Qt.MSWindowsFixedSizeDialogHint)
self.layout().setSizeConstraint(QLayout.SetFixedSize)
#self.setFrameStyle(QFrame.WinPanel|QFrame.Raised)
#self.setAlignment(Qt.AlignCenter)
def popup(self):
parent = self.parent()
top_left_pos = parent.mapToGlobal(parent.rect().bottomLeft())
self.move(top_left_pos)
center_mouse_pos = self.mapToGlobal(self.rect().center())
QCursor.setPos(center_mouse_pos)
self.show()
class MiniActuator:
"""Initialize the definitions relating to themes and
sending/receiving namecoins."""
def __init__(self, main_window):
"""Retrieve the gui theme used in previous session."""
self.g = main_window
self.theme_name = self.g.config.get('litegui_theme','Cleanlook')
self.themes = load_theme_paths()
self.load_theme()
def load_theme(self):
"""Load theme retrieved from wallet file."""
try:
theme_prefix, theme_path = self.themes[self.theme_name]
except KeyError:
util.print_error("Theme not found!", self.theme_name)
return
full_theme_path = "%s/%s/style.css" % (theme_prefix, theme_path)
with open(full_theme_path) as style_file:
qApp.setStyleSheet(style_file.read())
def theme_names(self):
"""Sort themes."""
return sorted(self.themes.keys())
def selected_theme(self):
"""Select theme."""
return self.theme_name
def change_theme(self, theme_name):
"""Change theme."""
self.theme_name = theme_name
self.g.config.set_key('litegui_theme',theme_name)
self.load_theme()
def set_configured_exchange(self, set_exchange):
use_exchange = self.g.config.get('use_exchange')
if use_exchange is not None:
set_exchange(use_exchange)
def set_configured_currency(self, set_quote_currency):
"""Set the inital fiat currency conversion country (USD/EUR/GBP) in
the GUI to what it was set to in the wallet."""
currency = self.g.config.get('currency')
# currency can be none when Electrum is used for the first
# time and no setting has been created yet.
if currency is not None:
set_quote_currency(currency)
def set_config_exchange(self, conversion_exchange):
self.g.config.set_key('exchange',conversion_exchange,True)
self.g.update_status()
def set_config_currency(self, conversion_currency):
"""Change the wallet fiat currency country."""
self.g.config.set_key('currency',conversion_currency,True)
self.g.update_status()
def copy_address(self, receive_popup):
"""Copy the wallet addresses into the client."""
addrs = [addr for addr in self.g.wallet.addresses(True)
if not self.g.wallet.is_change(addr)]
# Select most recent addresses from gap limit
addrs = addrs[-self.g.wallet.gap_limit:]
copied_address = random.choice(addrs)
qApp.clipboard().setText(copied_address)
receive_popup.setup(copied_address)
receive_popup.popup()
def waiting_dialog(self, f):
s = Timer()
s.start()
w = QDialog()
w.resize(200, 70)
w.setWindowTitle('Electrum-NMC')
l = QLabel(_('Sending transaction, please wait.'))
vbox = QVBoxLayout()
vbox.addWidget(l)
w.setLayout(vbox)
w.show()
def ff():
s = f()
if s: l.setText(s)
else: w.close()
w.connect(s, QtCore.SIGNAL('timersignal'), ff)
w.exec_()
w.destroy()
def send(self, address, amount, parent_window):
"""Send namecoins to the target address."""
dest_address = self.fetch_destination(address)
if dest_address is None or not is_valid(dest_address):
QMessageBox.warning(parent_window, _('Error'),
_('Invalid Namecoin Address') + ':\n' + address, _('OK'))
return False
amount = D(unicode(amount)) * (10*self.g.decimal_point)
print "amount", amount
return
if self.g.wallet.use_encryption:
password_dialog = PasswordDialog(parent_window)
password = password_dialog.run()
if not password:
return
else:
password = None
fee = 0
# 0.1 BTC = 10000000
if amount < bitcoin(1) / 10:
# 0.001 BTC
fee = bitcoin(1) / 1000
try:
tx = self.g.wallet.mktx([(dest_address, amount)], password, fee)
except Exception as error:
QMessageBox.warning(parent_window, _('Error'), str(error), _('OK'))
return False
if tx.is_complete():
h = self.g.wallet.send_tx(tx)
self.waiting_dialog(lambda: False if self.g.wallet.tx_event.isSet() else _("Sending transaction, please wait..."))
status, message = self.g.wallet.receive_tx(h, tx)
if not status:
import tempfile
dumpf = tempfile.NamedTemporaryFile(delete=False)
dumpf.write(tx)
dumpf.close()
print "Dumped error tx to", dumpf.name
QMessageBox.warning(parent_window, _('Error'), message, _('OK'))
return False
TransactionWindow(message, self)
else:
filename = 'unsigned_tx_%s' % (time.mktime(time.gmtime()))
try:
fileName = QFileDialog.getSaveFileName(QWidget(), _("Select a transaction filename"), os.path.expanduser('~/%s' % (filename)))
with open(fileName,'w') as f:
f.write(json.dumps(tx.as_dict(),indent=4) + '\n')
QMessageBox.information(QWidget(), _('Unsigned transaction created'), _("Unsigned transaction was saved to file:") + " " +fileName, _('OK'))
except Exception as e:
QMessageBox.warning(QWidget(), _('Error'), _('Could not write transaction to file: %s' % e), _('OK'))
return True
def fetch_destination(self, address):
recipient = unicode(address).strip()
# alias
match1 = re.match("^(|([\w\-\.]+)@)((\w[\w\-]+\.)+[\w\-]+)$",
recipient)
# label or alias, with address in brackets
match2 = re.match("(.*?)\s*\<([1-9A-HJ-NP-Za-km-z]{26,})\>",
recipient)
if match1:
dest_address = \
self.g.wallet.get_alias(recipient, True,
self.show_message, self.question)
return dest_address
elif match2:
return match2.group(2)
else:
return recipient
class MiniDriver(QObject):
INITIALIZING = 0
CONNECTING = 1
SYNCHRONIZING = 2
READY = 3
def __init__(self, main_window, mini_window):
super(QObject, self).__init__()
self.g = main_window
self.network = main_window.network
self.window = mini_window
if self.network:
self.network.register_callback('updated',self.update_callback)
self.network.register_callback('status', self.update_callback)
self.state = None
self.initializing()
self.connect(self, SIGNAL("updatesignal()"), self.update)
self.update_callback()
# This is a hack to workaround that Qt does not like changing the
# window properties from this other thread before the runloop has
# been called from.
def update_callback(self):
self.emit(SIGNAL("updatesignal()"))
def update(self):
if not self.network:
self.initializing()
#elif not self.network.interface:
# self.initializing()
elif not self.network.is_connected():
self.connecting()
if self.g.wallet is None:
self.ready()
elif not self.g.wallet.up_to_date:
self.synchronizing()
else:
self.ready()
self.update_balance()
self.update_completions()
self.update_history()
self.window.receiving.update_list()
def initializing(self):
if self.state == self.INITIALIZING:
return
self.state = self.INITIALIZING
self.window.deactivate()
def connecting(self):
if self.state == self.CONNECTING:
return
self.state = self.CONNECTING
self.window.deactivate()
def synchronizing(self):
if self.state == self.SYNCHRONIZING:
return
self.state = self.SYNCHRONIZING
self.window.deactivate()
def ready(self):
if self.state == self.READY:
return
self.state = self.READY
self.window.activate()
def update_balance(self):
conf_balance, unconf_balance = self.g.wallet.get_balance()
balance = D(conf_balance + unconf_balance)
self.window.set_balances(balance)
def update_completions(self):
completions = []
for addr, label in self.g.wallet.labels.items():
if addr in self.g.wallet.addressbook:
completions.append("%s <%s>" % (label, addr))
self.window.update_completions(completions)
def update_history(self):
tx_history = self.g.wallet.get_tx_history()
self.window.update_history(tx_history)
if __name__ == "__main__":
app = QApplication(sys.argv)
with open(rsrc("style.css")) as style_file:
app.setStyleSheet(style_file.read())
mini = MiniWindow()
sys.exit(app.exec_())
| gpl-3.0 | -6,350,053,012,183,332,000 | 34.490847 | 169 | 0.617299 | false |
OCM-Lab-PUC/switch-chile | python_utility_scripts/create_transmission_csv.py | 1 | 5810 | # -*- coding: utf-8 -*-
# Copyright 2016 The Switch-Chile Authors. All rights reserved.
# Licensed under the Apache License, Version 2, which is in the LICENSE file.
# Operations, Control and Markets laboratory at Pontificia Universidad
# Católica de Chile.
import pandas, os, re, datetime, sys
from unidecode import unidecode
if sys.getdefaultencoding() != 'utf-8':
# Character encoding may raise errors if set in ascii or other simple
# encodings which do not support spanish characters.
reload(sys)
sys.setdefaultencoding('utf-8')
def limpiar(a):
# Devuelvo un string limpio de carácteres anómalos, espacios y comas
limpio = unidecode(a.replace(' ','_').replace('ó','o')).lower().replace(',','_')
while limpio[0] == '_':
limpio = limpio[1:]
while limpio[-1] == '_':
limpio = limpio[:-1]
return limpio
def SepararLineaSIC(a):
#Algunos nombres separan 220 KV en vez de 220KV, hacemos este cambio para que queden igual
a = a.replace('k','K').replace('v','V').replace(' KV','KV')
try:
#Divido por guion y obtengo primer elemento
se1 = limpiar(a.split('-')[0])
#Obtengo 220kv al eliminar el ultimo termino con espacio y se lo quito al string, luego divido por guion
se2 = limpiar(a.replace(a.split(' ')[-1],'').split('-')[1])
return [se1,se2]
except:
print('No es posible separar',a)
return [limpiar(a),limpiar(a)]
def SepararLineaSIC2(a):
a = a.replace('k','K').replace('v','V').replace(' KV','KV')
try:
#Divido por guion y obtengo primer elemento
se1 = limpiar(a.split('-')[0])
#Obtengo 220kv al eliminar el ultimo termino con espacio y se lo quito al string, luego divido por guion
se2 = limpiar(' '.join(a.split('-')[1].split('KV')[0].split(' ')[:-1]))
return [se1,se2]
except:
print('No es posible separar',a)
return [limpiar(a),limpiar(a)]
def SepararLineaSING(a):
try:
a = a.split('kV ')[1]
#Divido por guion y obtengo primer elemento
se1 = limpiar(a.split('-')[0])
#Obtengo 220kv al eliminar el ultimo termino con espacio y se lo quito al string, luego divido por guion
se2 = limpiar(a.split('-')[1])
return [se1,se2]
except:
print('No es posible separar',a)
return [limpiar(a),limpiar(a)]
###############################
# Obtenemos los datos del SIC #
###############################
#Archivo de conversion de unidades a abrir
transmision = pandas.read_excel('capacidad_instalada_de_transmision.xlsx', sheetname= 'SIC', parse_cols = 'E:K', skiprows=6)
transmision.columns = ['SE','Tramo','dsa','Tension (KV)', 'N','Longitud (km)','Capacidad (MVA)']
#Obtenemos las columnas
#for i,j in enumerate(transmision.columns.values):
# print(limpiar(j),'=',i)
linea = 0
tramo = 1
tension = 3
numerocircuitos = 4
longitud = 5
capacidad = 6
#Construimos un data frame de dos columnas, de subestaciones por linea
SE = pandas.DataFrame({'SE1' : [],'SE2' : [], 'SEalt1' : [],'SEalt2' : []})
for i in transmision.index:
#Mientras leamos
if pandas.isnull(transmision.ix[i,linea]):
break
subs = SepararLineaSIC2(transmision.ix[i,tramo])
subs2 = SepararLineaSIC(transmision.ix[i,linea])
#print(subs,subs2)
fila = pandas.DataFrame([[subs[0],subs[1], subs2[0], subs2[1]]], columns=['SE1','SE2','SEalt1','SEalt2'])
SE = SE.append(fila, ignore_index = True)
#Hacemos la nueva matriz con las subestaciones, voltaje y
neotransmision = pandas.concat([pandas.Series(['sic' for i in range(i)], name = 'Sistema'), SE.ix[:i,0], SE.ix[:i,1], SE.ix[:i,2], SE.ix[:i,3], transmision.ix[:i-1,3], transmision.iloc[:i,4], transmision.iloc[:i,5], transmision.iloc[:i,6]], names = None, axis = 1)
################################
# Obtenemos los datos del SING #
################################
#Leemos, eliminando las dos primeras lineas correspondientes al header (celdas agrupadas no se leen bien...)
transmision = pandas.read_excel('capacidad_instalada_de_transmision.xlsx', sheetname= 'SING', parse_cols = 'E:J', skiprows=6,header = None)
transmision = transmision[2:].reset_index(drop = True)
linea = 0
tension = 1
numerocircuitos = 2
longitud = 3
capacidad = 5
#Construimos un data frame de dos columnas, de subestaciones por linea
SE = pandas.DataFrame({'SE1' : [],'SE2' : [], 'SEalt1' : [],'SEalt2' : []})
for i in transmision.index:
#Mientras leamos
if pandas.isnull(transmision.ix[i,linea]):
break
subs = SepararLineaSING(transmision.ix[i,linea])
fila = pandas.DataFrame([[subs[0],subs[1],subs[0],subs[1]]], columns=['SE1','SE2','SEalt1','SEalt2'])
SE = SE.append(fila, ignore_index = True)
#Si no tiene limite, le asignamos la capacidad
if transmision.ix[i,capacidad] == 'N/I' or pandas.isnull(transmision.ix[i,capacidad]):
transmision.ix[i,capacidad] = transmision.ix[i,4]
#Hacemos la nueva matriz con las subestaciones, voltaje y
neotransmision2 = pandas.concat([pandas.Series(['sing' for i in range(i)], name = 'Sistema'), SE.ix[:i,0], SE.ix[:i,1], SE.ix[:i,0], SE.ix[:i,1], transmision.ix[:i,tension], transmision.ix[:i,numerocircuitos], transmision.iloc[:i,longitud], transmision.iloc[:i,capacidad]], names = None, axis = 1)
neotransmision2 = neotransmision2[:-1]
#Renombramos columnas
neotransmision2.columns = ['Sistema','SE1','SE2','SEalt1','SEalt2','Tension (KV)', 'N','Longitud (km)','Capacidad (MVA)']
#Unimos ambas
transmisionfinal = pandas.concat([neotransmision, neotransmision2])
#Convertimos filas a int
transmisionfinal[['Tension (KV)', 'N']] = transmisionfinal[['Tension (KV)', 'N']].astype(int)
#Imprimimos datos
transmisionfinal.to_csv('transmision.csv', index = None , float_format = '%.2f')
| apache-2.0 | 7,827,939,213,781,622,000 | 38.496599 | 297 | 0.643472 | false |
napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/node_attribute/sub_tlvs/__init__.py | 1 | 14848 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import sub_tlv
class sub_tlvs(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/opaque-lsa/traffic-engineering/tlvs/tlv/node-attribute/sub-tlvs. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Sub-TLVs of the Node Attribute TLV of the Traffic
Engineering LSA
"""
__slots__ = ("_path_helper", "_extmethods", "__sub_tlv")
_yang_name = "sub-tlvs"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sub_tlv = YANGDynClass(
base=YANGListType(
False,
sub_tlv.sub_tlv,
yang_name="sub-tlv",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sub-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
"opaque-lsa",
"traffic-engineering",
"tlvs",
"tlv",
"node-attribute",
"sub-tlvs",
]
def _get_sub_tlv(self):
"""
Getter method for sub_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/node_attribute/sub_tlvs/sub_tlv (list)
YANG Description: List of the Sub-TLVs contained within the Node Attribute
TLV
"""
return self.__sub_tlv
def _set_sub_tlv(self, v, load=False):
"""
Setter method for sub_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/node_attribute/sub_tlvs/sub_tlv (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_sub_tlv is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sub_tlv() directly.
YANG Description: List of the Sub-TLVs contained within the Node Attribute
TLV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
sub_tlv.sub_tlv,
yang_name="sub-tlv",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sub-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sub_tlv must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,sub_tlv.sub_tlv, yang_name="sub-tlv", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="sub-tlv", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__sub_tlv = t
if hasattr(self, "_set"):
self._set()
def _unset_sub_tlv(self):
self.__sub_tlv = YANGDynClass(
base=YANGListType(
False,
sub_tlv.sub_tlv,
yang_name="sub-tlv",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sub-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
sub_tlv = __builtin__.property(_get_sub_tlv)
_pyangbind_elements = OrderedDict([("sub_tlv", sub_tlv)])
from . import sub_tlv
class sub_tlvs(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/opaque-lsa/traffic-engineering/tlvs/tlv/node-attribute/sub-tlvs. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Sub-TLVs of the Node Attribute TLV of the Traffic
Engineering LSA
"""
__slots__ = ("_path_helper", "_extmethods", "__sub_tlv")
_yang_name = "sub-tlvs"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sub_tlv = YANGDynClass(
base=YANGListType(
False,
sub_tlv.sub_tlv,
yang_name="sub-tlv",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sub-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
"opaque-lsa",
"traffic-engineering",
"tlvs",
"tlv",
"node-attribute",
"sub-tlvs",
]
def _get_sub_tlv(self):
"""
Getter method for sub_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/node_attribute/sub_tlvs/sub_tlv (list)
YANG Description: List of the Sub-TLVs contained within the Node Attribute
TLV
"""
return self.__sub_tlv
def _set_sub_tlv(self, v, load=False):
"""
Setter method for sub_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/node_attribute/sub_tlvs/sub_tlv (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_sub_tlv is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sub_tlv() directly.
YANG Description: List of the Sub-TLVs contained within the Node Attribute
TLV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
sub_tlv.sub_tlv,
yang_name="sub-tlv",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sub-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sub_tlv must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,sub_tlv.sub_tlv, yang_name="sub-tlv", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="sub-tlv", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__sub_tlv = t
if hasattr(self, "_set"):
self._set()
def _unset_sub_tlv(self):
self.__sub_tlv = YANGDynClass(
base=YANGListType(
False,
sub_tlv.sub_tlv,
yang_name="sub-tlv",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sub-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
sub_tlv = __builtin__.property(_get_sub_tlv)
_pyangbind_elements = OrderedDict([("sub_tlv", sub_tlv)])
| apache-2.0 | 8,897,465,015,285,164,000 | 36.589873 | 533 | 0.543373 | false |
Jackojc/Asciify | example/matrix.py | 1 | 1374 | from asciify import asciify
import random
import time
import pygame
width, height = (60, 30)
add_per_frame = 5
render = asciify(width, height, "font", fontsize=16)
random.seed(time.time())
Particles = [
[
random.randint(0, width),
random.randint(-1, height),
random.uniform(0.1, 1.5)
] for x in range(add_per_frame)
]
chars = list("abcdefghijklmnopqrstuvwxyz0123456789!\"$%^&*()_+-=[]{}:;@'~#|\<>?,./")
while True:
render.checkexit()
keys = render.listkeys()
for num, part in enumerate(Particles):
part[1] += part[2]
if part[1] > height+1:
del Particles[num]
colorrand = (random.randint(60, 255), random.randint(150, 255), random.randint(1, 100))
render.setString(part[0], part[1], random.choice(chars), color=colorrand)
Particles.extend([
[
random.randint(0, width),
random.randint(-1, height),
random.uniform(0.1, 1.5)
] for x in range(add_per_frame)
])
render.text(0, 0, "THE MATRIX EXAMPLE", center=(1, 1))
render.update(30) # Peasant FPS, I could add frame delays or something if I wanted 60fps but its beyond the scope of this example.
| mit | -4,344,498,825,365,630,500 | 31.714286 | 134 | 0.534207 | false |
SvichkarevAnatoly/Course-Python-Bioinformatics | semester2/task8/exercise2.py | 1 | 3387 | import numpy
import random
import matplotlib.pyplot as plot
from sklearn.tree import DecisionTreeRegressor
from sklearn import tree
a = 1
b = 2
# Build a simple data set with y = x + random
nPoints = 1000
# x values for plotting
xPlot = [(float(i) / float(nPoints) - 0.5) for i in range(nPoints + 1)]
# x needs to be list of lists.
x = [[s] for s in xPlot]
# y (labels) has random noise added to x-value
# set seed
random.seed(1)
numpy.random.seed(1)
y = [a + b * s * s + numpy.random.normal(scale=0.1) for s in xPlot]
# take fixed test set 30% of sample
nSample = int(nPoints * 0.30)
idxTest = random.sample(range(nPoints), nSample)
idxTest.sort()
idxTrain = [idx for idx in range(nPoints) if not (idx in idxTest)]
# Define test and training attribute and label sets
xTrain = [x[r] for r in idxTrain]
xTest = [x[r] for r in idxTest]
yTrain = [y[r] for r in idxTrain]
yTest = [y[r] for r in idxTest]
# train a series of models on random subsets of the training data
# collect the models in a list and check error of composite as list grows
# maximum number of models to generate
numTreesMax = 30
# tree depth - typically at the high end
treeDepth = 5
# initialize a list to hold models
modelList = []
predList = []
eps = 0.3
# initialize residuals to be the labels y
residuals = list(yTrain)
for iTrees in range(numTreesMax):
modelList.append(DecisionTreeRegressor(max_depth=treeDepth))
modelList[-1].fit(xTrain, residuals)
# make prediction with latest model and add to list of predictions
latestInSamplePrediction = modelList[-1].predict(xTrain)
# use new predictions to update residuals
residuals = [residuals[i] - eps * latestInSamplePrediction[i] \
for i in range(len(residuals))]
latestOutSamplePrediction = modelList[-1].predict(xTest)
predList.append(list(latestOutSamplePrediction))
# build cumulative prediction from first "n" models
mse = []
allPredictions = []
for iModels in range(len(modelList)):
# add the first "iModels" of the predictions and multiply by eps
prediction = []
for iPred in range(len(xTest)):
prediction.append(
sum([predList[i][iPred] for i in range(iModels + 1)]) * eps)
allPredictions.append(prediction)
errors = [(yTest[i] - prediction[i]) for i in range(len(yTest))]
mse.append(sum([e * e for e in errors]) / len(yTest))
nModels = [i + 1 for i in range(len(modelList))]
# mse plot
plot.plot(nModels, mse)
plot.axis('tight')
plot.xlabel('Number of Models in Ensemble')
plot.ylabel('Mean Squared Error')
plot.ylim((0.0, max(mse)))
# plot.show()
plot.savefig("mseEx2.png")
plot.close()
print min(mse)
# predictions plot
plotList = [0, 14, 29]
lineType = [':', '-.', '--']
plot.figure()
for i in range(len(plotList)):
iPlot = plotList[i]
textLegend = 'Prediction with ' + str(iPlot) + ' Trees'
plot.plot(xTest, allPredictions[iPlot],
label=textLegend, linestyle=lineType[i])
plot.plot(xTest, yTest, label='True y Value', alpha=0.25)
plot.legend(bbox_to_anchor=(1, 0.3))
plot.axis('tight')
plot.xlabel('x value')
plot.ylabel('Predictions')
# plot.show()
plot.savefig("predictionsEx2.png")
plot.close()
# save first 2 tree
with open("tree1Ex2.dot", 'w') as f1:
f1 = tree.export_graphviz(modelList[0], out_file=f1)
with open("tree2Ex2.dot", 'w') as f2:
f2 = tree.export_graphviz(modelList[1], out_file=f2)
| gpl-2.0 | -6,363,771,044,119,632,000 | 27.225 | 73 | 0.691172 | false |
chrisjdavie/shares | machine_learning/sklearn_dataset_format.py | 1 | 1160 | '''
Created on 2 Sep 2014
@author: chris
'''
'''File format - data, length of data, containing unicode
- target, length of data, contains int reference to target
- target_names, type names relative to target
- filenames, names of files storing data (probably target too)
'''
def main():
''' taken from the tutorials, I'm having a look at how they store datasets'''
from sklearn.datasets import fetch_20newsgroups
# import numpy as np
categories = ['alt.atheism', 'soc.religion.christian', 'comp.graphics', 'sci.med']
twenty_train = fetch_20newsgroups(subset='train',
categories=categories,
shuffle=True,
random_state=42)
print dir(twenty_train)
print twenty_train.keys()
# print twenty_train.data[0]
print twenty_train.target[0]
print len(twenty_train.filenames)
print twenty_train.filenames[0]
print twenty_train.target_names
if __name__ == '__main__':
main() | mit | 4,733,438,516,823,196,000 | 30.378378 | 86 | 0.552586 | false |
SUSE/kiwi | kiwi/filesystem/fat16.py | 1 | 1400 | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
# project
from kiwi.filesystem.base import FileSystemBase
from kiwi.command import Command
class FileSystemFat16(FileSystemBase):
"""
**Implements creation of fat16 filesystem**
"""
def create_on_device(self, label: str = None):
"""
Create fat16 filesystem on block device
:param string label: label name
"""
device = self.device_provider.get_device()
if label:
self.custom_args['create_options'].append('-n')
self.custom_args['create_options'].append(label)
Command.run(
[
'mkdosfs', '-F16', '-I'
] + self.custom_args['create_options'] + [device]
)
| gpl-3.0 | -5,064,288,651,772,245,000 | 33.146341 | 70 | 0.666429 | false |
jiarong/SSUsearch | scripts/count-taxon.py | 1 | 3226 | #! /usr/bin/env python
# count the taxon number from mothur taxonomy file
# by gjr; 080614
"""
Count the taxon number for each taxon in mothur taxonomy file
% python <thisFile> <sample.gg.taxonomy> <outfile.table>
"""
import sys
import os
import collections
#EXCLUDE = ['Archaea', 'Eukaryota', 'unknown']
EXCLUDE = []
LEVELS = 7
NA='Unclassified'
def read_mothur_taxonomy(f):
"""
Parse mothur classify.seqs output
Parameters:
-----------
f : str
file name of .taxonomy file from classify.seqs
Returns:
--------
dictionary
an dictionary of read name and tuples (each level of taxonomy)
"""
na_lis = ['', 'unknown', 'Unclassified',
'unclassified', 'other', 'unassigned']
d = {}
for n, line in enumerate(open(f)):
if line.startswith('#'):
continue
line = line.rstrip()
name, taxa = line.rstrip().split('\t')
skip = False
for word in EXCLUDE:
if word in taxa:
skip = True
break
if skip:
continue
# the parsing of taxa works for both mothur output and this
taxa = taxa.rstrip(';') # for mothur classfy.seqs output
lis = taxa.split(';')
lis2 = []
for item in lis:
item = item.strip() # for copyrigher copy table ' ;' separater
if item.endswith(')'):
item = item.rsplit('(', 1)[0].strip()
# remove taxon level prefix, e.g. 'p__Firmicutes'
if '__' in item:
item = item.split('__', 1)[1]
#item = item.strip('"')
# green gene taxonomy has sapce
item = item.replace(' ', '_')
item = item.lower()
if item in na_lis:
item = NA
item = item.capitalize()
lis2.append(item)
t = tuple(lis2)
if name.endswith('/1'):
other = '{}/2'.format(name[:-2])
if other in d:
other_taxon = d[other]
if other_taxon.count(NA) > lis2.count(NA):
_ = d.pop(other)
d[name] = t
else:
d[name] = t
elif name.endswith('/2'):
other = '{}/1'.format(name[:-2])
if other in d:
other_taxon = d[other]
if other_taxon.count(NA) > lis2.count(NA):
_ = d.pop(other)
d[name] = t
else:
d[name] = t
else:
d[name] = t
return d
def main():
if len(sys.argv) != 3:
mes = ('Usage: python {} <sample.gg.taxonomy> <outfile.table>')
print >> sys.stderr, mes.format(os.path.basename(sys.argv[0]))
sys.exit(1)
taxonfile = sys.argv[1]
outfile = sys.argv[2]
d = read_mothur_taxonomy(taxonfile)
g_taxonomy = d.values()
d_count = collections.Counter(g_taxonomy)
with open(outfile, 'wb') as fw:
for key, cnt in sorted(d_count.items()):
taxon_string = ';'.join(key)
print >> fw, '{}\t{}'.format(taxon_string, cnt)
if __name__ == '__main__':
main()
| bsd-3-clause | -8,817,099,791,117,188,000 | 23.815385 | 77 | 0.490081 | false |
marekjm/pyversion | tests.py | 1 | 12014 | #!/usr/bin/env python3
import unittest
from pyversion.version import Version, Comparison, valid
# if set to True tests will be verbose
DEBUG = True
# tuple structure: (version, version, desired_result, strict)
versions_to_compare_lt = [ ('2.0.0', '3.0.0', True, True),
('3.0.1', '3.1.0', True, True),
('3.0.0', '3.0.1', True, True),
('3.2.1-alpha', '3.2.1-alpha.1', True, True),
('3.2.1-alpha.1', '3.2.1-beta', True, True),
('3.2.1-beta', '3.2.1-beta.4', True, True),
('3.2.1-beta.4', '3.2.1-beta.17', True, True),
('3.2.1-beta.17', '3.2.1-rc.8', True, True),
('3.2.1-rc.8', '3.2.1-rc.12', True, True),
('3.2.1', '3.2.2-alpha.1', True, True),
('3.2.3-rc.8', '3.2.3-rel.1', True, True),
('3.2.3-rc.8', '3.2.3-release.1', True, True),
('3.2.1+7', '3.2.2+6', True, True),
('6.4.8-a.3+17', '6.4.8-a.4+2', True, True),
('6.4.8', '6.40.8', True, True),
('3.2.1-rc.8', '3.2.1', True, True),
('3.2.1-alpha.1', '3.2.1-alpha.1.rel.3', True, True),
('0.0.2', '0.0.1', False, True),
('0.0.1-alpha.1', '0.0.1', True, True),
('0.0.1', '0.0.1-alpha.1', False, True),
# here starts list of non-strict version strigs
('0.0.0.1', '0.0.0.2', True, False),
('0.0.0.1', '0.0.1.0', True, False),
('1.0', '1.0.1', True, False),
('1', '1.0', True, False),
('1', '2', True, False),
('1-rc.7', '2-rc.2', True, False),
('28.0.1500.95-1', '29.0.1547.57-1', True, False),
('29.0.1547.57-1', '28.0.1500.95-1', False, False),
('1.8.3.4', '1.8.4', True, False),
('0.8.8.4-1', '0.8.8.4-2', True, False),
('0.8.8.4', '0.8.8.4-2', False, False),
('0.8.8.4-1', '0.8.8.4', True, False),
('0:0.9.4-1', '1:0.8', True, False),
]
versions_to_compare_gt = [ ('3.0.0', '2.0.0', True, True),
('3.0.1', '3.0.0', True, True),
('3.1.0', '3.0.0', True, True),
('3.1.0', '3.0.1', True, True),
('3.1.1', '3.1.0', True, True),
('3.0.1', '3.0.0-alpha', True, True),
('3.0.0-alpha.1', '3.0.0-alpha', True, True),
('3.0.0-beta', '3.0.0-alpha.1', True, True),
('3.0.0-beta.1', '3.0.0-beta', True, True),
('3.0.0-rc', '3.0.0-beta.1', True, True),
('3.0.0-rc.1', '3.0.0-rc', True, True),
('3.0.0', '3.0.0-rc.1', True, True),
('3.0.0', '3.0.0', False, True),
('0.2.0', '0.0.1', True, True),
('0.0.3', '0.1.0', False, True),
('0.0.3', '0.1.0-rc.1', False, True),
('0.0.1', '0.0.2', False, True),
('0.0.1', '0.0.1-alpha.1', True, True),
('0.0.1', '0.0.1-beta.1', True, True),
('0.0.1-beta.1', '0.0.1', False, True),
# here starts list of non-strict version strigs
('0.0.0.2', '0.0.0.1', True, False),
('0.0.1.0', '0.0.0.1', True, False),
('1.0.1', '1.0', True, False),
('1.0', '1', True, False),
('2', '1', True, False),
('1', '2', False, False),
('2-rc.2', '1-rc.7', True, False),
('28.0.1500.95-1', '29.0.1547.57-1', False, False),
('29.0.1547.57-1', '28.0.1500.95-1', True, False),
('1.8.4', '1.8.3.4', True, False),
('0.8.8.4-2', '0.8.8.4-1', True, False),
('5.4.3.2.1', '5.4.3.2.0', True, False),
('5.4.3.2.0', '5.4.3.2.1', False, False),
('1:0.9.4-1', '0:1.0.2', True, False),
]
versions_to_compare_ge = [ ('3.2.1', '3.2.1', True, True),
('3.2.2', '3.2.1', True, True),
('3.3.6-rc.7', '3.3.6-rc.5', True, True),
('3.3.5-rc.7', '3.3.6-rc.5', False, True),
('1:0.9.5-1', '0:0.9.5-1', True, False),
('1:0.9.5-1', '1:0.9.5-1', True, False),
('1:0.9.6-1', '1:0.9.5-1', True, False),
]
versions_to_compare_le = [ ('2.0.0', '3.0.0', True, True),
('2.0.0', '2.0.0', True, True),
('3.0.0', '3.0.1', True, True),
('3.0.0-alpha', '3.0.0-alpha.1', True, True),
('3.0.0-alpha.2', '3.0.0-alpha.2', True, True),
('3.0.0-rc.7', '3.1.0-alpha.2', True, True),
('3.0.0-alpha.6', '3.0.0-alpha.2', False, True),
('0:0.9.5-1', '1:0.9.5-1', True, False),
('1:0.9.5-1', '1:0.9.5-1', True, False),
('1:0.9.5-1', '1:0.9.6-1', True, False),
]
class ComparisonTests(unittest.TestCase):
def testLesserThan(self):
for first, second, result, strict in versions_to_compare_lt:
if DEBUG: print(first, '<', second)
first = Version(first, strict=strict)
second = Version(second, strict=strict)
self.assertEqual(result, Comparison(first, second).lt())
self.assertEqual(result, first < second)
def testGreaterThan(self):
if DEBUG: print()
for first, second, result, strict in versions_to_compare_gt:
if DEBUG: print('(', result, ')\t', first, '>', second)
first = Version(first, strict=strict)
second = Version(second, strict=strict)
self.assertEqual(result, Comparison(first, second).gt())
self.assertEqual(result, first > second)
def testGreaterOrEqual(self):
for first, second, result, strict in versions_to_compare_ge:
if DEBUG: print(first, '>=', second)
first = Version(first, strict=strict)
second = Version(second, strict=strict)
self.assertEqual(result, Comparison(first, second).ge())
self.assertEqual(result, first >= second)
def testLesserOrEqual(self):
for first, second, result, strict in versions_to_compare_le:
if DEBUG: print(first, '<=', second)
first = Version(first, strict=strict)
second = Version(second, strict=strict)
self.assertEqual(result, Comparison(first, second).le())
self.assertEqual(result, first <= second)
class InitializationTests(unittest.TestCase):
def testOnlyVersion(self):
v = Version('3.9.3')
self.assertEqual(3, v.major)
self.assertEqual(9, v.minor)
self.assertEqual(3, v.patch)
def testVersionAndPrerelease(self):
v = Version('3.9.3-alpha.1.release.3')
self.assertEqual(3, v.major)
self.assertEqual(9, v.minor)
self.assertEqual(3, v.patch)
self.assertEqual(['alpha', 1, 'release', 3], v.prerelease)
def testBuildMetadata(self):
v1 = Version('3.9.3-alpha.1+42')
v2 = Version('3.9.3+42')
self.assertEqual('42', v1.build)
self.assertEqual('42', v2.build)
class NonstandardInitializationTests(unittest.TestCase):
def testVersionAndPrerelease(self):
v = Version('3.9.3.0-alpha.1.release.3', strict=False)
self.assertEqual(3, v[0])
self.assertEqual(9, v[1])
self.assertEqual(3, v[2])
self.assertEqual(0, v[3])
self.assertEqual(['alpha', 1, 'release', 3], v.prerelease)
def testBuildMetadata(self):
v1 = Version('3.9.3.0-alpha.1+42', strict=False)
v2 = Version('3.9.3.0+42', strict=False)
self.assertEqual('42', v1.build)
self.assertEqual('42', v2.build)
class SatisfactionTests(unittest.TestCase):
def testMinimal(self):
v = Version('3.2.1')
self.assertEqual(True, v.satisfies(min='3.0.0'))
def testMaximal(self):
v = Version('1.8.12')
self.assertEqual(True, v.satisfies(max='1.8.12'))
def testBetween(self):
v = Version('3.2.1-rc.8')
self.assertEqual(True, v.satisfies(min='3.2.1-alpha.1', max='3.2.1-rc.12'))
def testExcept(self):
v = Version('3.2.1-rc.8')
self.assertEqual(False, v.satisfies(but=['3.2.1-rc.8']))
def testAll(self):
v = Version('3.2.1-rc.8')
self.assertEqual(True, v.satisfies())
class NonstandardSatisfactionTests(unittest.TestCase):
def testMinimal(self):
v = Version('3.2.1.0', strict=False)
self.assertEqual(True, v.satisfies(min='3.0.0'))
def testMaximal(self):
v = Version('1.8.12.13', strict=False)
self.assertEqual(True, v.satisfies(max='1.8.12.14'))
def testBetween(self):
v = Version('3.2.1.0-rc.8', strict=False)
self.assertEqual(True, v.satisfies(min='3.2.1.0-alpha.1', max='3.2.1.0-rc.12'))
def testExcept(self):
v = Version('3.2.1.0-rc.8', strict=False)
self.assertEqual(False, v.satisfies(min='3.2.0.19', max='3.2.2.0', but=['3.2.1.0-rc.8']))
def testAll(self):
v = Version('3.2.1.0-rc.8', strict=False)
self.assertEqual(True, v.satisfies())
class StringTests(unittest.TestCase):
def testStr(self):
v = Version('3.9.3-release.4+build.42')
if DEBUG: print(str(v))
self.assertEqual('3.9.3', str(v))
def testStrNonstandard(self):
v = Version('3.9.3.0-release.4+build.42', strict=False)
if DEBUG: print(str(v))
self.assertEqual('3.9.3.0', str(v))
def testRepr(self):
v = Version('3.9.3-release.4+build.42')
if DEBUG: print(repr(v))
self.assertEqual('3.9.3-release.4+build.42', repr(v))
def testReprNonstandard(self):
v = Version('3.9.3.0-release.4+build.42', strict=False)
if DEBUG: print(repr(v))
self.assertEqual('3.9.3.0-release.4+build.42', repr(v))
class ValidationTests(unittest.TestCase):
def testValid(self):
vs = [ '3.2.1',
'13.29.42',
'3.9.2-alpha.1.release.5',
'3.9.2+42',
'3.9.2-rc.6+2',
]
for i in vs:
self.assertEqual(True, valid(i))
def testValidNonstrictStrings(self):
vs = [ '123',
'1.2',
'3.2.1.2',
'3.2.1.2-rc.7',
'3.2.1.2-rc.7+build.13',
'0:3.2.1.2-rc.7+build.13',
]
for i in vs:
self.assertEqual(True, valid(i, strict=False))
def testInvalid(self):
vs = [ '3.2.1.2',
'3.9.2-alpha.1.?.release.5',
'3.9.c',
'a:3.9.0',
]
for i in vs:
self.assertEqual(False, valid(i))
if __name__ == '__main__': unittest.main()
| gpl-3.0 | 7,190,466,796,804,549,000 | 42.528986 | 97 | 0.43724 | false |
giubil/trackit | api/files/api/app/s3_billing_transfer.py | 1 | 8777 | from object_storage import S3BucketObjectStore
from config import BILLING_FILE_REGEX, CLIENT_BILLING_BUCKET, IMPORT_BILLING_AWS_KEY, IMPORT_BILLING_AWS_SECRET, LOCAL_BILLS_DIR
from es import client
from es.awsdetailedlineitem import AWSDetailedLineitem
from contextlib import contextmanager
from zipfile import ZipFile
from tempfile import mkdtemp, TemporaryFile
from shutil import rmtree
from datetime import date, datetime
import os
import io
import csv
import calendar
import elasticsearch.helpers
import itertools
import traceback
import boto3
def _is_line_item(line):
return line['RecordType'] == 'LineItem'
def _str_to_date(s):
return datetime.strptime(s, '%Y-%m-%d %H:%M:%S')
@contextmanager
def extract_zipped_csvs(zip_stream, conversion=csv.reader):
with TemporaryFile() as zip_file_stream:
while True:
chunk = zip_stream.read(2 ** 15)
if not chunk:
break
zip_file_stream.write(chunk)
zip_file = ZipFile(zip_file_stream)
def files():
for name in zip_file.namelist():
if not name.lower().endswith('.csv'):
continue
file = zip_file.open(name)
yield name, conversion(file)
try:
yield files()
except:
pass
_index_es = 'awsdetailedlineitem'
_type_es = 'a_ws_detailed_lineitem'
session = boto3.Session(aws_access_key_id=IMPORT_BILLING_AWS_KEY,
aws_secret_access_key=IMPORT_BILLING_AWS_SECRET)
_converted_fields = {
'PayerAccountId': str,
'LinkedAccountId': str,
# 'RecordId': int,
'RateId': int,
'SubscriptionId': int,
'PricingPlanId': int,
'UsageQuantity': float,
'Rate': float,
'BlendedRate': float,
'UnBlendedRate': float,
'Cost': float,
'BlendedCost': float,
'UnBlendedCost': float,
'ReservedInstance': (lambda s: s == 'Y'),
'UsageStartDate': _str_to_date,
'UsageEndDate': _str_to_date,
'UsageType': str,
}
_converted_name = {
'PayerAccountId': 'payer_account_id',
'LinkedAccountId': 'linked_account_id',
'RecordId': 'record_id',
'ProductName': 'product_name',
'RateId': 'rate_id',
'SubscriptionId': 'subscription_id',
'PricingPlanId': 'pricing_plan_id',
'UsageType': 'usage_type',
'Operation': 'operation',
'AvailabilityZone': 'availability_zone',
'ReservedInstance': 'reserved_instance',
'ItemDescription': 'item_description',
'UsageStartDate': 'usage_start_date',
'UsageEndDate': 'usage_end_date',
'UsageQuantity': 'usage_quantity',
'UsageType': 'usage_type',
'Rate': 'rate',
'BlendedRate': 'rate',
'UnBlendedRate': 'un_blended_rate',
'Cost': 'cost',
'BlendedCost': 'cost',
'UnBlendedCost': 'un_blended_cost',
'ResourceId': 'resource_id',
'Tags': 'tag',
}
_csv_path = lambda x: '{}{}'.format(LOCAL_BILLS_DIR, x)
def _line_to_document(line):
try:
line['Tags'] = []
deleted_fields = set(('InvoiceID', 'RecordType'))
for k, v in line.iteritems():
if k.startswith('aws:') or k.startswith('user:'):
if v:
line['Tags'].append({
'key': k,
'value': v,
})
deleted_fields.add(k)
elif not v and k != 'Tags':
deleted_fields.add(k)
if not line['Tags']:
deleted_fields.add('Tags')
for k in deleted_fields:
del line[k]
for k, v in line.iteritems():
if k in _converted_fields:
line[k] = _converted_fields[k](v)
res = {}
for k, v in line.iteritems():
if k in _converted_name:
res[_converted_name[k]] = v
return res
except:
print("------")
print(line)
traceback.print_exc()
return None
def _document_to_index(index):
def do_document_to_index(document):
try:
return {
'_index': index,
'_id': document['record_id'],
'_type': _type_es,
'_source': document,
} if 'record_id' in document else None
except:
print("------")
print(document)
traceback.print_exc()
return None
return do_document_to_index
def _clean_aws_discounts_in_es(month, es, account_id):
month = datetime.combine(month, datetime.min.time())
date_from = month.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
date_to = month.replace(day=calendar.monthrange(month.year, month.month)[1], hour=0, minute=59, second=59, microsecond=999999)
response = es.search(
index=_index_es,
filter_path=["hits.hits._id"],
body={"size": 10000, "query": {"bool": {"filter": [
{"term": {"item_description": "PAR_APN_ProgramFee_2500"}},
{"term": {"linked_account_id": account_id}},
{"range": {"usage_start_date": {"from": date_from, "to": date_to}}}
]}}})
if 'hits' not in response or 'hits' not in response['hits']:
return
ids = [
line['_id']
for line in response['hits']['hits']
]
if len(ids) < 0:
return
bulk_body = [
'{{"delete": {{"_index": "{}", "_type": "{}", "_id": "{}"}}}}'.format(_index_es, _type_es, id)
for id in ids
]
es.bulk('\n'.join(bulk_body), timeout='120s', request_timeout=120)
def _import_bill_to_es(bill, es, name):
tmp_file = bill.get_file() if CLIENT_BILLING_BUCKET else bill
account_id = BILLING_FILE_REGEX.search(name).group('account_id')
print name
print ' Cleaning'
_clean_aws_discounts_in_es(date(int(name[:-11][-4:]), int(name[:-8][-2:]), 1), es, account_id)
print ' Extracting'
with extract_zipped_csvs(tmp_file, lambda x: x) as files:
for fi, csvfile in files:
reader = csv.DictReader(csvfile, delimiter=',', quotechar='"')
line_items = itertools.ifilter(_is_line_item, reader)
documents = itertools.ifilter(bool, itertools.imap(_line_to_document, line_items))
actions = itertools.ifilter(bool, itertools.imap(_document_to_index(_index_es), documents))
print ' Importing'
elasticsearch.helpers.bulk(es, actions, timeout='120s', request_timeout=120, chunk_size=200)
tmp_file.close()
print ' Ok'
def _upload_bill_to_s3(bill, session, force_yield=False):
if not CLIENT_BILLING_BUCKET:
if not os.path.exists(_csv_path(bill.key())):
print bill.key()
print ' Downloading'
if not os.path.exists(_csv_path('')):
os.mkdir(_csv_path(''))
bill.get_file(f=io.open(_csv_path(bill.key()), 'w+b'))
print ' Ok'
return bill.key()
return
s3 = session.resource('s3')
up_bill = S3BucketObjectStore(s3.Bucket(CLIENT_BILLING_BUCKET)).object(bill.key())
if not up_bill.exists() or up_bill.size() != bill.size():
print bill.key()
print ' Downloading'
f = bill.get_file()
print ' Uploading'
up_bill.put_file(f)
print ' Ok'
return bill.key()
elif force_yield:
return bill.key()
def prepare_bill_for_s3(key, force_yield=False):
'''
- Download bills (cf. BILLING_FILE_REGEX) from S3 with keys in key_ids if they differs from our S3.
- Upload downloaded bills on our S3.
- Yield name of uploaded bills.
:param key: models.AWSKey
:param force_yield: yield name of all bills instead of uploaded bills only
:return: generator (list of string)
'''
if key.billing_bucket_name is None:
return
client_session = key.get_boto_session()
client_s3 = client_session.resource('s3')
bucket = sorted(S3BucketObjectStore(client_s3.Bucket(key.billing_bucket_name)), key=lambda x: x.key(), reverse=True)
for bill in bucket:
m = BILLING_FILE_REGEX.match(bill.key())
if m is not None:
yield _upload_bill_to_s3(bill, session, force_yield)
def prepare_bill_for_es(tr_bills):
'''
- Download bills in tr_bills from our S3
- Process zip and csv
- Import data in ES
:param tr_bills: list of string
:return:
'''
if not tr_bills:
return
s3 = session.resource('s3')
AWSDetailedLineitem.init()
for bill in tr_bills:
if bill:
s3_bill = S3BucketObjectStore(s3.Bucket(CLIENT_BILLING_BUCKET)).object(bill) if CLIENT_BILLING_BUCKET else io.open(_csv_path(bill), 'r+b')
if not CLIENT_BILLING_BUCKET or s3_bill.exists():
_import_bill_to_es(s3_bill, client, bill)
| apache-2.0 | 2,701,550,056,858,239,000 | 31.872659 | 150 | 0.583115 | false |
ninly/pphrase | pphrase.py | 1 | 5060 | """ pphrase.py : Generate a random passphrase
Generate a random passphrase from a subset of the most common
words (max 10000) in Google's trillion-word corpus. See
http://xkcd.com/936 for the motivation and inspiration.
Licensed under terms of MIT license (see LICENSE-MIT)
Copyright (c) 2014 Jason Conklin, <[email protected]>
Usage:
pphrase.py [ -L | -R | -C | -T ] [ options ]
Options:
-h, --help Show usage help (this screen).
-v, --version Show version number and exit.
-L, --normal Normal output (like this) [default].
-R, --running Run output together (likethis).
-C, --camelcase Output in CamelCase (LikeThis).
-T, --titlecase Output in Title Case (Like This).
-w N, --words=N Number of words in passphrase
[default: 4].
-m MAXWORD, --maxword=MAXWORD Maximum word length, in characters
[default: 10].
-n MINWORD, --minword=MINWORD Maximum word length, in characters
[default: 2].
-p POOL, --poolsize=POOL Select from most common POOL words
[default: 2048].
"""
import os
import random
from docopt import docopt
basedir = os.path.dirname(os.path.abspath(__file__))+'/'
if __name__ == "__main__":
arguments = docopt(__doc__, version='0.0.4')
class ArgError(Exception):
"""Error class with no ability to can."""
def __init__(self, value="could not even."):
self.value = value
def __str__(self):
return str(self.value)
def sanitize_args():
"""Check input args for sanity."""
try:
numwords = int(arguments['--words'])
poolsize = int(arguments['--poolsize'])
minword = int(arguments['--minword'])
maxword = int(arguments['--maxword'])
except ValueError:
print("Error: Option arguments must be integers.")
return 1
try:
if (minword < 1) or (maxword < 1) or (numwords < 1):
raise ArgError("word count and length must be positive integers.")
if (poolsize > 10000) or (poolsize < 1):
raise ArgError("pool size must be between 1 and 10000.")
except ArgError as e:
print('Could not even: {}'.format(e))
return 1
return 0
def get_pool(filename = basedir+'wordlist/google-10000-english.txt'):
"""Generate word pool to user specifications."""
poolsize = int(arguments['--poolsize'])
minword = int(arguments['--minword'])
maxword = int(arguments['--maxword'])
with open(filename,'r') as f:
lines = list(f)
f.close()
words = list()
# cull outsized words
for line in lines:
if len(line.strip()) >= minword and len(line.strip()) <= maxword:
words.append(line.strip())
# only keep poolsize words
try:
if len(words) < poolsize:
# words_avail = len(words)
raise ArgError("only "+str(len(words))+" words in specified length range.")
except ArgError as e:
print('Could not even: {}'.format(e))
return
except:
print('Could not even: {}'.format(e))
return
else:
words = list(words)[:poolsize]
return words
def get_mode():
mode = str()
if arguments['--running']:
mode = 'running'
elif arguments['--camelcase']:
mode = 'camelcase'
elif arguments['--titlecase']:
mode = 'titlecase'
else:
mode = 'normal'
return mode
def build_pph(numwords, mode='normal'):
"""Build the passphrase."""
try:
wordpool = get_pool()
if not wordpool:
raise ValueError('Could not generate specified word pool.')
if len(wordpool) < numwords:
raise ValueError('Word pool not large enough to generate '\
+'passphrase of specified length.')
except ValueError as e:
print('Could not even: {}'.format(e))
return
pph_words = list()
pph_str = str()
while len(pph_words) < numwords:
next_word = random.choice(wordpool)
if next_word not in pph_words:
pph_words.append(next_word)
if (mode == 'normal'):
pph_str = ' '.join(pph_words)
if (mode == 'running'):
pph_str = ''.join(pph_words)
if (mode == 'titlecase'):
for i in xrange(numwords):
pph_words[i] = pph_words[i].capitalize()
pph_str = ' '.join(pph_words)
if (mode == 'camelcase'):
for i in xrange(numwords):
pph_words[i] = pph_words[i].capitalize()
pph_str = ''.join(pph_words)
return pph_str
def main():
"""Output passphrase."""
try:
if sanitize_args(): raise ArgError
except ArgError:
return
numwords = int(arguments['--words'])
mode = get_mode()
pph = build_pph(numwords, mode)
if pph: print(pph)
if __name__ == "__main__":
main()
| mit | 5,217,362,287,712,699,000 | 29.481928 | 87 | 0.556917 | false |
dreamhost/akanda-appliance | test/unit/drivers/test_hostname.py | 1 | 2016 | # Copyright 2014 DreamHost, LLC
#
# Author: DreamHost, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest2 import TestCase
import mock
from akanda.router.drivers import hostname, ip
CONFIG = mock.Mock()
CONFIG.hostname = 'akanda'
class HostnameTestCase(TestCase):
"""
"""
def setUp(self):
self.mock_execute = mock.patch('akanda.router.utils.execute').start()
self.mock_replace_file = mock.patch(
'akanda.router.utils.replace_file'
).start()
self.addCleanup(mock.patch.stopall)
self.mgr = hostname.HostnameManager()
def test_update_hostname(self):
self.mgr.update_hostname(CONFIG)
self.mock_execute.assert_has_calls([
mock.call(['/bin/hostname', 'akanda'], 'sudo'),
mock.call(['mv', '/tmp/hostname', '/etc/hostname'], 'sudo')
])
@mock.patch.object(ip.IPManager, 'get_management_address')
def test_update_hosts(self, addr):
expected = [
'127.0.0.1 localhost',
'::1 localhost ip6-localhost ip6-loopback',
'fdca:3ba5:a17a:acda:f816:3eff:fe66:33b6 akanda'
]
addr.return_value = 'fdca:3ba5:a17a:acda:f816:3eff:fe66:33b6'
self.mgr.update_hosts(CONFIG)
self.mock_execute.assert_has_calls([
mock.call(['mv', '/tmp/hosts', '/etc/hosts'], 'sudo')
])
self.mock_replace_file.assert_has_calls([mock.call(
'/tmp/hosts',
'\n'.join(expected))
])
| apache-2.0 | 8,984,808,868,207,687,000 | 31.516129 | 77 | 0.638889 | false |
kratorius/ads | python/dictionary/BST.py | 1 | 14443 | import unittest
import random
random.seed()
class BSTDict(object):
""""
Implementation of a dictionary based on binary search trees.
"""
class BSTNode(object):
def __init__(self, key, value):
self.key = key
self.value = value
self.parent = None
self.left, self.right = None, None
def __str__(self):
return "<%s:%s>" % (self.key, self.value)
def __lt__(self, other):
return self.key < other.key
def __eq__(self, other):
return isinstance(other, BSTDict.BSTNode) and self.key == other.key
def __contains__(self, key):
if self.key == key:
return True
if key < self.key:
return self.left is not None and key in self.left
else:
return self.right is not None and key in self.right
# for some reason PyCharm warns about a possible type exception about
# self.left/right being None, but we are actually checking that...
# noinspection PyTypeChecker
def __len__(self):
left = len(self.left) if self.left is not None else 0
right = len(self.right) if self.right is not None else 0
return 1 + left + right
def insert(self, node):
if node == self:
# the key must be the same, there's no point in updating that too
self.value = node.value
elif node < self:
if self.left is None:
self.left = node
self.left.parent = self
else:
self.left.insert(node)
else: # node > self
if self.right is None:
self.right = node
self.right.parent = self
else:
self.right.insert(node)
def keys(self):
left = self.left.keys() if self.left is not None else []
right = self.right.keys() if self.right is not None else []
return left + [self.key] + right
def values(self):
left = self.left.values() if self.left is not None else []
right = self.right.values() if self.right is not None else []
return left + [self.value] + right
def items(self):
left = self.left.items() if self.left is not None else []
right = self.right.items() if self.right is not None else []
return left + [(self.key, self.value)] + right
def __successor(self):
succ = self.right
while succ.left is not None:
succ = succ.left
return succ
@staticmethod
def delete(root, key):
if root is None:
# nothing to delete
return None
if key < root.key:
BSTDict.BSTNode.delete(root.left, key)
return root
elif key > root.key:
BSTDict.BSTNode.delete(root.right, key)
return root
# at this point root == key
# we can have three cases:
# 1) the node is a leaf
# 2) the node has only one child
# 3) the node has two children
#
# the first two cases are straightforward, while in the last one we
# must relabel the node with the key of its successor, which
# happens to be the leftmost descendant in the right subtree
parent = root.parent
if root.left is not None and root.right is not None:
# node has two children
# rather than complicating things by changing pointers, just
# replace keys and values
succ = root.__successor()
root.key, succ.key = succ.key, root.key
root.value, succ.value = succ.value, root.value
BSTDict.BSTNode.delete(root.right, succ.key)
elif root.left is not None:
# 1 child (the left one)
root.key = root.left.key
root.value = root.left.value
root.left = None
elif root.right is not None:
# 1 child (the right one)
root.key = root.right.key
root.value = root.right.value
root.right = None
else:
# leaf
if parent is not None and parent.left == root:
parent.left = None
elif parent is not None and parent.right == root:
parent.right = None
return None
return root
def p(self, indent_str=""):
string = "%s%s:%s (parent:%s)" % (indent_str, self.key, self.value, self.parent)
print(string)
if self.left is None:
print("%sLEFT NULL" % (indent_str + " "))
else:
self.left.p(indent_str + " ")
if self.right is None:
print("%sRIGHT NULL" % (indent_str + " "))
else:
self.right.p(indent_str + " ")
def walk(self):
if self.left is not None:
for item in self.left.walk():
yield item
yield self
if self.right is not None:
for item in self.right.walk():
yield item
def __init__(self):
self.root = None
def __getitem__(self, item):
node = self.root
while node is not None:
if node.key == item:
return node.value
elif item < node.key:
node = node.left
else: # item > node.key
node = node.right
raise KeyError("key not found")
def __setitem__(self, key, value):
if self.root is None:
self.root = BSTDict.BSTNode(key, value)
else:
self.root.insert(BSTDict.BSTNode(key, value))
def __delitem__(self, key):
if self.root is not None:
self.root = BSTDict.BSTNode.delete(self.root, key)
def __contains__(self, key):
return self.root is not None and key in self.root
def __len__(self):
if self.root is None:
return 0
return len(self.root)
def keys(self):
if self.root is None:
return []
else:
return self.root.keys()
def values(self):
if self.root is None:
return []
else:
return self.root.values()
def items(self):
if self.root is None:
return []
else:
return self.root.items()
def iterkeys(self):
if self.root is None:
yield
for node in self.root.walk():
yield node.key
def itervalues(self):
if self.root is None:
yield
for node in self.root.walk():
yield node.value
def iteritems(self):
if self.root is None:
yield
for node in self.root.walk():
yield node.key, node.value
def clear(self):
self.root = None
class BSTTest(unittest.TestCase):
def test_empty_dict(self):
b = BSTDict()
self.assertEqual(len(b), 0)
self.assertEqual(len(b.keys()), 0)
def test_add_items(self):
b = BSTDict()
b["item1"] = "value1"
b["item2"] = "value2"
b["item3"] = "value3"
self.assertEqual(len(b), 3)
self.assertEqual(b["item1"], "value1")
self.assertEqual(b["item2"], "value2")
self.assertEqual(b["item3"], "value3")
b["new item"] = "new value"
self.assertEqual(b["new item"], "new value")
def test_large_add(self):
vals = range(1, 10000)
random.shuffle(vals)
b = BSTDict()
for val in vals:
b[val] = val
for val in vals:
self.assertEqual(b[val], val)
def test_override(self):
b = BSTDict()
b["key"] = "original value"
self.assertEqual(b["key"], "original value")
b["key"] = "new value"
self.assertEqual(b["key"], "new value")
def test_unexisting_key(self):
b = BSTDict()
with self.assertRaises(KeyError):
# noinspection PyStatementEffect
b["invalid key"]
self.fail("should have thrown KeyError")
def test_delete(self):
b = BSTDict()
b["2"] = "value 2"
b["1"] = "value 1"
b["7"] = "value 7"
b["4"] = "value 4"
b["8"] = "value 8"
b["3"] = "value 3"
b["6"] = "value 6"
b["5"] = "value 5"
# remove a leaf
del b["5"]
self.assertEqual(len(b), 7)
self.assertFalse("5" in b)
# another deletion on the same key should be ignored
del b["5"]
self.assertEqual(len(b), 7)
self.assertFalse("5" in b)
# remove a node with two children
del b["4"]
self.assertEqual(len(b), 6)
self.assertFalse("4" in b)
# remove a node with one child
del b["6"]
self.assertEqual(len(b), 5)
self.assertFalse("6" in b)
# remove the root
del b["2"]
self.assertEqual(len(b), 4)
self.assertFalse("2" in b)
# check that every other key is present
self.assertEqual(b.keys(), ["1", "3", "7", "8"])
def test_delete_single_node(self):
b = BSTDict()
b["key"] = "value"
self.assertTrue("key" in b)
self.assertEqual(len(b), 1)
del b["key"]
self.assertFalse("key" in b)
self.assertEqual(len(b), 0)
def test_delete_random_nodes(self):
# this will probably cover any case I didn't think of
# (or at least most of them)
vals = range(1, 10000)
random.shuffle(vals)
b = BSTDict()
for val in vals:
b[val] = val
for val in vals:
del b[val]
for val in vals:
self.assertFalse(val in b)
self.assertEqual(len(b), 0)
def test_delete_twice(self):
b = BSTDict()
b["key"] = "value"
del b["key"]
try:
del b["key"]
except Exception as ex:
self.fail("exception raised: %s" % ex)
def test_delete_leaf_from_ordered_tree(self):
b = BSTDict()
b["1"] = "1"
b["2"] = "2"
b["3"] = "3"
b["4"] = "4"
b["5"] = "5"
self.assertEquals(5, len(b))
self.assertIn("5", b)
del b["5"]
self.assertEquals(4, len(b))
self.assertNotIn(5, b)
del b["4"]
self.assertEquals(3, len(b))
self.assertNotIn(4, b)
del b["3"]
self.assertEquals(2, len(b))
self.assertNotIn(3, b)
del b["2"]
self.assertEquals(1, len(b))
self.assertNotIn(2, b)
del b["1"]
self.assertEquals(0, len(b))
self.assertNotIn(1, b)
def test_delete_leaf(self):
b = BSTDict()
b[5] = 5
b[4] = 4
b[6] = 6
# each of these operations will delete a leaf
del b[4]
self.assertEqual(len(b), 2)
del b[6]
self.assertEqual(len(b), 1)
del b[5]
self.assertEqual(len(b), 0)
def test_keys(self):
b = BSTDict()
self.assertEqual(b.keys(), [])
b["a"], b["b"], b["c"], b["d"] = 1, 2, 3, 4
self.assertEqual(b.keys(), ["a", "b", "c", "d"])
def test_keys_ordered(self):
# this being a BST, keys will always returned alphabetically ordered
b = BSTDict()
self.assertEqual(b.keys(), [])
for item in "hello world":
b[item] = item
self.assertEqual(b.keys(), [" ", "d", "e", "h", "l", "o", "r", "w"])
def test_values(self):
b = BSTDict()
self.assertEqual(b.values(), [])
b["a"], b["b"], b["c"], b["d"] = 1, 2, 3, 4
self.assertEqual(b.values(), [1, 2, 3, 4])
def test_items(self):
b = BSTDict()
self.assertEqual(b.items(), [])
b["a"], b["b"], b["c"], b["d"] = 1, 2, 3, 4
self.assertEqual(b.items(), [
("a", 1), ("b", 2), ("c", 3), ("d", 4)
])
def test_iterkeys(self):
b = BSTDict()
b["a"], b["b"], b["c"], b["d"] = 1, 2, 3, 4
expected_list = ["a", "b", "c", "d"]
for expected, item in zip(expected_list, b.iterkeys()):
self.assertEqual(expected, item)
# the generator must reset itself
for expected, item in zip(expected_list, b.iterkeys()):
self.assertEqual(expected, item)
def test_itervalues(self):
b = BSTDict()
b["a"], b["b"], b["c"], b["d"] = 1, 2, 3, 4
expected_list = [1, 2, 3, 4]
for expected, item in zip(expected_list, b.itervalues()):
self.assertEqual(expected, item)
# the generator must reset itself
for expected, item in zip(expected_list, b.itervalues()):
self.assertEqual(expected, item)
def test_iteritems(self):
b = BSTDict()
b["a"], b["b"], b["c"], b["d"] = 1, 2, 3, 4
expected_list = [("a", 1), ("b", 2), ("c", 3), ("d", 4)]
for expected, item in zip(expected_list, b.iteritems()):
self.assertEqual(expected, item)
# the generator must reset itself
for expected, item in zip(expected_list, b.iteritems()):
self.assertEqual(expected, item)
def test_clear(self):
b = BSTDict()
for val in range(1, 100):
b[val] = val
for val in range(1, 100):
self.assertIsNotNone(b[val])
b.clear()
for val in range(1, 100):
with self.assertRaises(KeyError):
# noinspection PyStatementEffect
b[val]
def test_contains(self):
b = BSTDict()
b["key"] = "value"
b["another key"] = "another value"
b["hello"] = "world"
self.assertTrue("key" in b)
self.assertTrue("another key" in b)
self.assertTrue("hello" in b)
self.assertFalse("not a key" in b)
self.assertFalse("invalid" in b)
self.assertFalse("whatever" in b)
if __name__ == "__main__":
unittest.main()
| mit | -827,030,094,329,402,400 | 29.089583 | 92 | 0.495396 | false |
Jorge-C/bipy | skbio/maths/stats/distribution.py | 1 | 6102 | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
"""Translations of functions from Release 2.3 of the Cephes Math Library,
which is (c) Stephen L. Moshier 1984, 1995.
"""
from __future__ import division
from math import atan, sqrt
from skbio.maths.stats.special import (fix_rounding_error, expm1, log1p, betai,
igamc, erf, erfc, GB, SQRTH, LP, LQ, EQ,
MACHEP, PI)
def chi_high(x, df):
"""Returns right-hand tail of chi-square distribution (x to infinity).
df, the degrees of freedom, ranges from 1 to infinity (assume integers).
Typically, df is (r-1)*(c-1) for a r by c table.
Result ranges from 0 to 1.
See Cephes docs for details.
"""
x = fix_rounding_error(x)
if x < 0:
raise ValueError("chi_high: x must be >= 0 (got %s)." % x)
if df < 1:
raise ValueError("chi_high: df must be >= 1 (got %s)." % df)
return igamc(df / 2, x / 2)
def z_high(x):
"""Returns right-hand tail of z distribution (0 to x).
x ranges from -infinity to +infinity; result ranges from 0 to 1
See Cephes docs for details."""
y = x * SQRTH
z = abs(y)
if z < SQRTH:
return 0.5 - 0.5 * erf(y)
else:
if x < 0:
return 1 - 0.5 * erfc(z)
else:
return 0.5 * erfc(z)
def zprob(x):
"""Returns both tails of z distribution (-inf to -x, inf to x)."""
return 2 * z_high(abs(x))
def t_low(t, df):
"""Returns left-hand tail of Student's t distribution (-infinity to x).
df, the degrees of freedom, ranges from 1 to infinity.
Typically, df is (n-1) for a sample size of n.
Result ranges from 0 to 1.
See Cephes docs for details.
"""
if df < 1:
raise ValueError("t_low: df must be >= 1 (got %s)." % df)
return stdtr(df, t)
def t_high(t, df):
"""Returns right-hand tail of Student's t distribution (x to infinity).
df, the degrees of freedom, ranges from 1 to infinity.
Typically, df is (n-1) for a sample size of n.
Result ranges from 0 to 1.
See Cephes docs for details.
"""
if df < 1:
raise ValueError("t_high: df must be >= 1 (got %s)." % df)
return stdtr(df, -t) # distribution is symmetric
def tprob(t, df):
"""Returns both tails of t distribution (-infinity to -x, infinity to x)"""
return 2 * t_high(abs(t), df)
def f_high(df1, df2, x):
"""Returns right-hand tail of f distribution (x to infinity).
Result ranges from 0 to 1.
See Cephes docs for details.
"""
return fdtrc(df1, df2, x)
def fdtrc(a, b, x):
"""Returns right tail of F distribution, x to infinity.
See Cephes docs for details.
"""
if min(a, b) < 1:
raise ValueError("F a and b (degrees of freedom) must both be >= 1.")
if x < 0:
raise ValueError("F distribution value of f must be >= 0.")
w = float(b) / (b + a * x)
return betai(0.5 * b, 0.5 * a, w)
def binomial_high(successes, trials, prob):
"""Returns right-hand binomial tail (X > successes) given prob(success)."""
if -1 <= successes < 0:
return 1
return bdtrc(successes, trials, prob)
def bdtrc(k, n, p):
"""Complement of binomial distribution, k+1 through n.
Uses formula bdtrc(k, n, p) = betai(k+1, n-k, p)
See Cephes docs for details.
"""
p = fix_rounding_error(p)
if (p < 0) or (p > 1):
raise ValueError("Binomial p must be between 0 and 1.")
if (k < 0) or (n < k):
raise ValueError("Binomial k must be between 0 and n.")
if k == n:
return 0
dn = n - k
if k == 0:
if p < .01:
dk = -expm1(dn * log1p(-p))
else:
dk = 1 - pow(1.0 - p, dn)
else:
dk = k + 1
dk = betai(dk, dn, p)
return dk
def stdtr(k, t):
"""Student's t distribution, -infinity to t.
See Cephes docs for details.
"""
if k <= 0:
raise ValueError('stdtr: df must be > 0.')
if t == 0:
return 0.5
if t < -2:
rk = k
z = rk / (rk + t * t)
return 0.5 * betai(0.5 * rk, 0.5, z)
# compute integral from -t to + t
if t < 0:
x = -t
else:
x = t
rk = k # degrees of freedom
z = 1 + (x * x) / rk
# test if k is odd or even
if (k & 1) != 0:
# odd k
xsqk = x / sqrt(rk)
p = atan(xsqk)
if k > 1:
f = 1
tz = 1
j = 3
while (j <= (k - 2)) and ((tz / f) > MACHEP):
tz *= (j - 1) / (z * j)
f += tz
j += 2
p += f * xsqk / z
p *= 2 / PI
else:
# even k
f = 1
tz = 1
j = 2
while (j <= (k - 2)) and ((tz / f) > MACHEP):
tz *= (j - 1) / (z * j)
f += tz
j += 2
p = f * x / sqrt(z * rk)
# common exit
if t < 0:
p = -p # note destruction of relative accuracy
p = 0.5 + 0.5 * p
return p
def pseries(a, b, x):
"""Power series for incomplete beta integral.
Use when b * x is small and x not too close to 1.
See Cephes docs for details.
"""
ai = 1 / a
u = (1 - b) * x
v = u / (a + 1)
t1 = v
t = u
n = 2
s = 0
z = MACHEP * ai
while abs(v) > z:
u = (n - b) * x / n
t *= u
v = t / (a + n)
s += v
n += 1
s += t1
s += ai
u = a * log(x)
if ((a + b) < MAXGAM) and (abs(u) < MAXLOG):
t = Gamma(a + b) / (Gamma(a) * Gamma(b))
s = s * t * pow(x, a)
else:
t = lgam(a + b) - lgam(a) - lgam(b) + u + log(s)
if t < MINLOG:
s = 0
else:
s = exp(t)
return(s)
| bsd-3-clause | -6,199,607,592,344,027,000 | 24.214876 | 79 | 0.493445 | false |
gustavoatt/consultas | consultas_proyecto/consultas_proyecto/settings/base.py | 1 | 7398 | """Common settings and globals."""
from os.path import abspath, basename, dirname, join, normpath
from sys import path
########## PATH CONFIGURATION
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
########## END PATH CONFIGURATION
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Your Name', '[email protected]'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Los_Angeles'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'es-VE'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = normpath(join(SITE_ROOT, 'media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = normpath(join(SITE_ROOT, 'assets'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
normpath(join(SITE_ROOT, 'static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key should only be used for development and testing.
SECRET_KEY = r"fh2#ni-%+2-lo@24x5=#9e%i1w^dh%6s1jv0$p$e207iswh3hg"
########## END SECRET CONFIGURATION
########## SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
########## END SITE CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
normpath(join(SITE_ROOT, 'fixtures')),
)
########## END FIXTURE CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
normpath(join(SITE_ROOT, 'templates')),
)
########## END TEMPLATE CONFIGURATION
########## MIDDLEWARE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#middleware-classes
MIDDLEWARE_CLASSES = (
# Default Django middleware.
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## URL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = '%s.urls' % SITE_NAME
########## END URL CONFIGURATION
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin panel and documentation:
'django.contrib.admin',
# 'django.contrib.admindocs',
)
THIRD_PARTY_APPS = (
# Database migration helpers:
'south',
# Form helpers
'floppyforms',
'crispy_forms',
# REST API
'rest_framework',
# Server
'gunicorn',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'pacientes_app',
'historias_app',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
########## END APP CONFIGURATION
########## CRISPY FORMS CONFIGURATION
CRISPY_TEMPLATE_PACK = 'bootstrap3'
########## END CRISPY FORMS CONFIGURATION
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## WSGI CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = '%s.wsgi.application' % SITE_NAME
########## END WSGI CONFIGURATION
| mit | -4,859,612,239,694,173,000 | 27.785992 | 98 | 0.694107 | false |
crccheck/gallery-cms | gallery/routes.py | 1 | 2047 | import os
from io import BytesIO
from itertools import islice
from pathlib import Path
from PIL import Image
from starlette.responses import Response, PlainTextResponse
# TODO centralize this
BASE_DIR = os.getenv("BASE_DIR", os.path.dirname(os.path.abspath(__file__)))
THUMBNAIL_SIZE = (300, 300)
DEFAULT_SIZE = 200
def thumbs(request):
# NOTE: PIL won't create a thumbnail larger than the original
size = int(request.query_params.get("size", DEFAULT_SIZE))
image_file = Path(BASE_DIR, request.path_params["path"])
if not image_file.exists():
return PlainTextResponse("Not found", status_code=404)
im = Image.open(image_file)
# TODO cache thumbnails
im.thumbnail((size, size))
fp = BytesIO()
im.save(fp, format="webp")
fp.seek(0)
# WISHLIST support 304 not modified, etag, content-disposition
# last_modified = image_file.stat().st_mtime
# Last-Modified: Wed, 21 Oct 2015 07:28:00 GMT
# last_modified_str = dt.datetime.fromtimestamp(last_modified).strftime(
# "%a, %e %b %Y %H:%M:%S"
# )
return Response(
fp.read(),
headers={
"Cache-Control": f"public, max-age={86400 * 7}",
# "Last-Modified": f"{last_modified_str} GMT",
},
media_type="image/webp",
)
def album_thumb(request):
size = int(request.query_params.get("size", DEFAULT_SIZE / 2))
album_path = Path(BASE_DIR, request.path_params["path"])
thumb = Image.new("RGB", (size, size))
first_four_images = islice(album_path.glob("*.jpg"), 4)
for idx, img_path in enumerate(first_four_images):
im = Image.open(str(img_path))
# TODO crop thumbnails to square before thumbnailing
im.thumbnail((size / 2, size / 2))
thumb.paste(im, (int(idx / 2) * int(size / 2), (idx % 2) * int(size / 2)))
fp = BytesIO()
thumb.save(fp, format="webp")
fp.seek(0)
return Response(
fp.read(),
headers={"Cache-Control": f"public, max-age=900"},
media_type="image/webp",
)
| gpl-3.0 | -4,949,062,279,888,032,000 | 30.492308 | 82 | 0.625305 | false |
FloatingGhost/skype4py | Skype4Py/call.py | 1 | 18207 | """Calls, conferences.
"""
__docformat__ = 'restructuredtext en'
from .utils import *
from .enums import *
class DeviceMixin(object):
def _Device(self, Name, DeviceType=None, Set=type(None)):
args = args2dict(self._Property(Name, Cache=False))
if Set is type(None):
for dev, value in list(args.items()):
try:
args[dev] = int(value)
except ValueError:
pass
if DeviceType is None:
return args
return args.get(DeviceType, None)
elif DeviceType is None:
raise TypeError('DeviceType must be specified if Set is used')
if Set:
args[DeviceType] = tounicode(Set)
else:
args.pop(DeviceType, None)
for dev, value in list(args.items()):
args[dev] = quote(value, True)
self._Alter('SET_%s' % Name,
', '.join('%s=%s' % item for item in list(args.items())))
def CaptureMicDevice(self, DeviceType=None, Set=type(None)):
"""Queries or sets the mic capture device.
:Parameters:
DeviceType : `enums`.callIoDeviceType* or None
Mic capture device type.
Set
Value the device should be set to or None if it should be deactivated.
Querying all active devices:
Devices = CaptureMicDevice()
Returns a mapping of device types to their values. Only active devices are
returned.
Querying a specific device:
Value = CaptureMicDevice(DeviceType)
Returns a device value for the given DeviceType.
Setting a device value:
CaptureMicDevice(DeviceType, Value)
If Value is None, the device will be deactivated.
:note: This command functions for active calls only.
"""
return self._Device('CAPTURE_MIC', DeviceType, Set)
def InputDevice(self, DeviceType=None, Set=type(None)):
"""Queries or sets the sound input device.
:Parameters:
DeviceType : `enums`.callIoDeviceType* or None
Sound input device type.
Set
Value the device should be set to or None if it should be deactivated.
Querying all active devices:
Devices = InputDevice()
Returns a mapping of device types to their values. Only active devices are
returned.
Querying a specific device:
Value = InputDevice(DeviceType)
Returns a device value for the given DeviceType.
Setting a device value:
InputDevice(DeviceType, Value)
If Value is None, the device will be deactivated.
:note: This command functions for active calls only.
"""
return self._Device('INPUT', DeviceType, Set)
def OutputDevice(self, DeviceType=None, Set=type(None)):
"""Queries or sets the sound output device.
:Parameters:
DeviceType : `enums`.callIoDeviceType* or None
Sound output device type.
Set
Value the device should be set to or None if it should be deactivated.
Querying all active devices:
Devices = OutputDevice()
Returns a mapping of device types to their values. Only active devices are
returned.
Querying a specific device:
Value = OutputDevice(DeviceType)
Returns a device value for the given DeviceType.
Setting a device value:
OutputDevice(DeviceType, Value)
If Value is None, the device will be deactivated.
:note: This command functions for active calls only.
"""
return self._Device('OUTPUT', DeviceType, Set)
class Call(Cached, DeviceMixin):
"""Represents a voice/video call.
"""
_ValidateHandle = int
def __repr__(self):
return Cached.__repr__(self, 'Id')
def _Alter(self, AlterName, Args=None):
return self._Owner._Alter('CALL', self.Id, AlterName, Args)
def _Init(self):
self._MakeOwner()
def _Property(self, PropName, Set=None, Cache=True):
return self._Owner._Property('CALL', self.Id, PropName, Set, Cache)
def Answer(self):
"""Answers the call.
"""
#self._Property('STATUS', 'INPROGRESS')
self._Alter('ANSWER')
def CanTransfer(self, Target):
"""Queries if a call can be transferred to a contact or phone number.
:Parameters:
Target : str
Skypename or phone number the call is to be transferred to.
:return: True if call can be transferred, False otherwise.
:rtype: bool
"""
return self._Property('CAN_TRANSFER %s' % Target) == 'TRUE'
def Finish(self):
"""Ends the call.
"""
#self._Property('STATUS', 'FINISHED')
self._Alter('END', 'HANGUP')
def Forward(self):
"""Forwards a call.
"""
self._Alter('END', 'FORWARD_CALL')
def Hold(self):
"""Puts the call on hold.
"""
#self._Property('STATUS', 'ONHOLD')
self._Alter('HOLD')
def Join(self, Id):
"""Joins with another call to form a conference.
:Parameters:
Id : int
Call Id of the other call to join to the conference.
:return: Conference object.
:rtype: `Conference`
"""
#self._Alter('JOIN_CONFERENCE', Id)
reply = self._Owner._DoCommand('SET CALL %s JOIN_CONFERENCE %s' % (self.Id, Id),
'CALL %s CONF_ID' % self.Id)
return Conference(self._Owner, reply.split()[-1])
def MarkAsSeen(self):
"""Marks the call as seen.
"""
self.Seen = True
def RedirectToVoicemail(self):
"""Redirects a call to voicemail.
"""
self._Alter('END', 'REDIRECT_TO_VOICEMAIL')
def Resume(self):
"""Resumes the held call.
"""
#self.Answer()
self._Alter('RESUME')
def StartVideoReceive(self):
"""Starts video receive.
"""
self._Alter('START_VIDEO_RECEIVE')
def StartVideoSend(self):
"""Starts video send.
"""
self._Alter('START_VIDEO_SEND')
def StopVideoReceive(self):
"""Stops video receive.
"""
self._Alter('STOP_VIDEO_RECEIVE')
def StopVideoSend(self):
"""Stops video send.
"""
self._Alter('STOP_VIDEO_SEND')
def Transfer(self, *Targets):
"""Transfers a call to one or more contacts or phone numbers.
:Parameters:
Targets : str
one or more phone numbers or Skypenames the call is being transferred to.
:note: You can transfer an incoming call to a group by specifying more than one target,
first one of the group to answer will get the call.
:see: `CanTransfer`
"""
self._Alter('TRANSFER', ', '.join(Targets))
def _GetConferenceId(self):
return int(self._Property('CONF_ID'))
ConferenceId = property(_GetConferenceId,
doc="""Conference Id.
:type: int
""")
def _GetDatetime(self):
from datetime import datetime
return datetime.fromtimestamp(self.Timestamp)
Datetime = property(_GetDatetime,
doc="""Date and time of the call.
:type: datetime.datetime
:see: `Timestamp`
""")
def _SetDTMF(self, Value):
self._Alter('DTMF', Value)
DTMF = property(fset=_SetDTMF,
doc="""Set this property to send DTMF codes. Permitted symbols are: [0..9, #, \*].
:type: str
:note: This command functions for active calls only.
""")
def _GetDuration(self):
return int(self._Property('DURATION', Cache=False))
Duration = property(_GetDuration,
doc="""Duration of the call in seconds.
:type: int
""")
def _GetFailureReason(self):
return int(self._Property('FAILUREREASON'))
FailureReason = property(_GetFailureReason,
doc="""Call failure reason. Read if `Status` == `enums.clsFailed`.
:type: `enums`.cfr*
""")
def _GetForwardedBy(self):
return str(self._Property('FORWARDED_BY'))
ForwardedBy = property(_GetForwardedBy,
doc="""Skypename of the user who forwarded a call.
:type: str
""")
def _GetId(self):
return self._Handle
Id = property(_GetId,
doc="""Call Id.
:type: int
""")
def _GetInputStatus(self):
return (self._Property('VAA_INPUT_STATUS') == 'TRUE')
InputStatus = property(_GetInputStatus,
doc="""True if call voice input is enabled.
:type: bool
""")
def _GetParticipants(self):
count = int(self._Property('CONF_PARTICIPANTS_COUNT'))
return ParticipantCollection(self, range(count))
Participants = property(_GetParticipants,
doc="""Participants of a conference call not hosted by the user.
:type: `ParticipantCollection`
""")
def _GetPartnerDisplayName(self):
return self._Property('PARTNER_DISPNAME')
PartnerDisplayName = property(_GetPartnerDisplayName,
doc="""The DisplayName of the remote caller.
:type: unicode
""")
def _GetPartnerHandle(self):
return str(self._Property('PARTNER_HANDLE'))
PartnerHandle = property(_GetPartnerHandle,
doc="""The Skypename of the remote caller.
:type: str
""")
def _GetPstnNumber(self):
return str(self._Property('PSTN_NUMBER'))
PstnNumber = property(_GetPstnNumber,
doc="""PSTN number of the call.
:type: str
""")
def _GetPstnStatus(self):
return self._Property('PSTN_STATUS')
PstnStatus = property(_GetPstnStatus,
doc="""PSTN number status.
:type: unicode
""")
def _GetRate(self):
return int(self._Property('RATE'))
Rate = property(_GetRate,
doc="""Call rate. Expressed using `RatePrecision`. If you're just interested in the call rate
expressed in current currency, use `RateValue` instead.
:type: int
:see: `RateCurrency`, `RatePrecision`, `RateToText`, `RateValue`
""")
def _GetRateCurrency(self):
return self._Property('RATE_CURRENCY')
RateCurrency = property(_GetRateCurrency,
doc="""Call rate currency.
:type: unicode
:see: `Rate`, `RatePrecision`, `RateToText`, `RateValue`
""")
def _GetRatePrecision(self):
return int(self._Property('RATE_PRECISION'))
RatePrecision = property(_GetRatePrecision,
doc="""Call rate precision. Expressed as a number of times the call rate has to be divided by 10.
:type: int
:see: `Rate`, `RateCurrency`, `RateToText`, `RateValue`
""")
def _GetRateToText(self):
return ('%s %.3f' % (self.RateCurrency, self.RateValue)).strip()
RateToText = property(_GetRateToText,
doc="""Returns the call rate as a text with currency and properly formatted value.
:type: unicode
:see: `Rate`, `RateCurrency`, `RatePrecision`, `RateValue`
""")
def _GetRateValue(self):
if self.Rate < 0:
return 0.0
return float(self.Rate) / (10 ** self.RatePrecision)
RateValue = property(_GetRateValue,
doc="""Call rate value. Expressed in current currency.
:type: float
:see: `Rate`, `RateCurrency`, `RatePrecision`, `RateToText`
""")
def _GetSeen(self):
return (self._Property('SEEN') == 'TRUE')
def _SetSeen(self, Value):
self._Property('SEEN', cndexp(Value, 'TRUE', 'FALSE'))
Seen = property(_GetSeen, _SetSeen,
doc="""Queries/sets the seen status of the call. True if the call was seen, False otherwise.
:type: bool
:note: You cannot alter the call seen status from seen to unseen.
""")
def _GetStatus(self):
return str(self._Property('STATUS'))
def _SetStatus(self, Value):
self._Property('STATUS', str(Value))
Status = property(_GetStatus, _SetStatus,
doc="""The call status.
:type: `enums`.cls*
""")
def _GetSubject(self):
return self._Property('SUBJECT')
Subject = property(_GetSubject,
doc="""Call subject.
:type: unicode
""")
def _GetTargetIdentity(self):
return str(self._Property('TARGET_IDENTITY'))
TargetIdentity = property(_GetTargetIdentity,
doc="""Target number for incoming SkypeIn calls.
:type: str
""")
def _GetTimestamp(self):
return float(self._Property('TIMESTAMP'))
Timestamp = property(_GetTimestamp,
doc="""Call date and time expressed as a timestamp.
:type: float
:see: `Datetime`
""")
def _GetTransferActive(self):
return self._Property('TRANSFER_ACTIVE') == 'TRUE'
TransferActive = property(_GetTransferActive,
doc="""Returns True if the call has been transferred.
:type: bool
""")
def _GetTransferredBy(self):
return str(self._Property('TRANSFERRED_BY'))
TransferredBy = property(_GetTransferredBy,
doc="""Returns the Skypename of the user who transferred the call.
:type: str
""")
def _GetTransferredTo(self):
return str(self._Property('TRANSFERRED_TO'))
TransferredTo = property(_GetTransferredTo,
doc="""Returns the Skypename of the user or phone number the call has been transferred to.
:type: str
""")
def _GetTransferStatus(self):
return str(self._Property('TRANSFER_STATUS'))
TransferStatus = property(_GetTransferStatus,
doc="""Returns the call transfer status.
:type: `enums`.cls*
""")
def _GetType(self):
return str(self._Property('TYPE'))
Type = property(_GetType,
doc="""Call type.
:type: `enums`.clt*
""")
def _GetVideoReceiveStatus(self):
return str(self._Property('VIDEO_RECEIVE_STATUS'))
VideoReceiveStatus = property(_GetVideoReceiveStatus,
doc="""Call video receive status.
:type: `enums`.vss*
""")
def _GetVideoSendStatus(self):
return str(self._Property('VIDEO_SEND_STATUS'))
VideoSendStatus = property(_GetVideoSendStatus,
doc="""Call video send status.
:type: `enums`.vss*
""")
def _GetVideoStatus(self):
return str(self._Property('VIDEO_STATUS'))
VideoStatus = property(_GetVideoStatus,
doc="""Call video status.
:type: `enums`.cvs*
""")
def _GetVmAllowedDuration(self):
return int(self._Property('VM_ALLOWED_DURATION'))
VmAllowedDuration = property(_GetVmAllowedDuration,
doc="""Returns the permitted duration of a voicemail in seconds.
:type: int
""")
def _GetVmDuration(self):
return int(self._Property('VM_DURATION'))
VmDuration = property(_GetVmDuration,
doc="""Returns the duration of a voicemail.
:type: int
""")
class CallCollection(CachedCollection):
_CachedType = Call
class Participant(Cached):
"""Represents a conference call participant.
"""
_ValidateHandle = int
def __repr__(self):
return Cached.__repr__(self, 'Id', 'Idx', 'Handle')
def _Property(self, Prop):
# Prop: 0 = user name, 1 = call type, 2 = call status, 3 = display name
reply = self._Owner._Property('CONF_PARTICIPANT %d' % self.Idx)
return chop(reply, 3)[Prop]
def _GetCall(self):
return self._Owner
Call = property(_GetCall,
doc="""Call object.
:type: `Call`
""")
def _GetCallStatus(self):
return str(self._Property(2))
CallStatus = property(_GetCallStatus,
doc="""Call status of a participant in a conference call.
:type: `enums`.cls*
""")
def _GetCallType(self):
return str(self._Property(1))
CallType = property(_GetCallType,
doc="""Call type in a conference call.
:type: `enums`.clt*
""")
def _GetDisplayName(self):
return self._Property(3)
DisplayName = property(_GetDisplayName,
doc="""DisplayName of a participant in a conference call.
:type: unicode
""")
def _GetHandle(self):
return str(self._Property(0))
Handle = property(_GetHandle,
doc="""Skypename of a participant in a conference call.
:type: str
""")
def _GetId(self):
return self._Owner.Id
Id = property(_GetId,
doc="""Call Id.
:type: int
""")
def _GetIdx(self):
return self._Handle
Idx = property(_GetIdx,
doc="""Call participant index.
:type: int
""")
class ParticipantCollection(CachedCollection):
_CachedType = Participant
class Conference(Cached):
"""Represents a conference call.
"""
_ValidateHandle = int
def __repr__(self):
return Cached.__repr__(self, 'Id')
def Finish(self):
"""Finishes a conference so all active calls have the status
`enums.clsFinished`.
"""
for c in self._GetCalls():
c.Finish()
def Hold(self):
"""Places all calls in a conference on hold so all active calls
have the status `enums.clsLocalHold`.
"""
for c in self._GetCalls():
c.Hold()
def Resume(self):
"""Resumes a conference that was placed on hold so all active calls
have the status `enums.clsInProgress`.
"""
for c in self._GetCalls():
c.Resume()
def _GetActiveCalls(self):
return CallCollection(self._Owner, (x.Id for x in self._Owner.ActiveCalls if x.ConferenceId == self.Id))
ActiveCalls = property(_GetActiveCalls,
doc="""Active calls with the same conference ID.
:type: `CallCollection`
""")
def _GetCalls(self):
return CallCollection(self._Owner, (x.Id for x in self._Owner.Calls() if x.ConferenceId == self.Id))
Calls = property(_GetCalls,
doc="""Calls with the same conference ID.
:type: `CallCollection`
""")
def _GetId(self):
return self._Handle
Id = property(_GetId,
doc="""Id of a conference.
:type: int
""")
class ConferenceCollection(CachedCollection):
_CachedType = Conference
| bsd-3-clause | -244,468,576,796,218,620 | 24.752475 | 112 | 0.598396 | false |
google/loaner | loaner/web_app/backend/models/bootstrap_status_model.py | 1 | 1339 | # Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bootstrap status model for the loaner project."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from google.appengine.ext import ndb
class BootstrapStatus(ndb.Model):
"""Datastore model representing a bootstrap task status for the loaner app.
Attributes:
description: str, Friendly description of the bootstrap task function.
success: bool, indicates if the bootstrap task was successful.
timestamp: datetime, The datetime the bootstrap task was completed.
details: str, A record of any error information.
"""
description = ndb.StringProperty()
success = ndb.BooleanProperty()
timestamp = ndb.DateTimeProperty()
details = ndb.TextProperty()
| apache-2.0 | -378,537,550,209,617,500 | 36.194444 | 77 | 0.757282 | false |
Bob131/obraz | obraz.py | 1 | 20221 | # -*- coding: utf-8 -*-
# Copyright (c) 2012-2014 Andrey Vlasovskikh
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Static site generator in a single Python file mostly compatible with Jekyll.
Usage:
obraz (build | serve | new PATH) [options]
obraz -h|--help
Commands:
build Build your site.
serve Serve your site locally.
new Create a new Obraz site scaffold in PATH.
Options:
-s --source=DIR Source directory.
-d --destination=DIR Destination directory.
--force Force overwriting the destination directory.
--safe Disable custom plugins.
-w --watch Watch for changes and rebuild.
-D --drafts Render posts in the _drafts folder.
-H --host=HOSTNAME Listen at the given hostname.
-P --port=PORT Listen at the given port.
-b --baseurl=URL Serve the website from the given base URL.
-q --quiet Be quiet.
-v --version Show version.
-h --help Show help message.
For documentation see <http://obraz.pirx.ru/>.
"""
from __future__ import unicode_literals
import sys
import os
import re
import shutil
import errno
from glob import glob
from io import BytesIO
from datetime import datetime
from threading import Thread
from time import sleep
import traceback
try:
from urllib.request import pathname2url, url2pathname
from http.server import SimpleHTTPRequestHandler, HTTPServer
except ImportError:
from urllib import pathname2url, url2pathname
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import HTTPServer
import yaml
from markdown import markdown
from jinja2 import Environment, FileSystemLoader
from docopt import docopt
__all__ = [
'file_filter', 'template_filter', 'template_renderer', 'loader',
'processor', 'generator',
]
PAGE_ENCODING = URL_ENCODING = 'UTF-8'
PY2 = sys.version_info < (3,)
DEFAULT_CONFIG = {
'source': './',
'destination': './_site',
'include': ['.htaccess'],
'exclude': [],
'exclude_patterns': [
r'^[\.#].*',
r'.*~$',
r'.*\.s[uvw][a-z]$', # *.swp files, etc.
],
'full_build_patterns': [
r'_layouts',
r'_includes',
],
'host': '0.0.0.0',
'port': '8000',
'baseurl': '',
}
_quiet = False
_loaders = []
_processors = []
_file_filters = {}
_template_filters = {}
_render_string = lambda string, context, site: string
def file_filter(extensions):
"""Register a page content filter for file extensions."""
def wrapper(f):
for ext in extensions:
_file_filters[ext] = f
return f
return wrapper
def template_filter(name):
"""Register a template filter."""
def wrapper(f):
_template_filters[name] = f
return f
return wrapper
def template_renderer(f):
"""Set a custom template renderer."""
global _render_string
_render_string = f
return f
def loader(f):
"""Register a site source content loader."""
_loaders.insert(0, f)
return f
def processor(f):
"""Register a site content processor."""
_processors.insert(0, f)
return f
def generator(f):
"""Register a destination files generator for the site."""
_processors.append(f)
return f
def fallback_loader(f):
_loaders.append(f)
return f
def load_yaml_mapping(path):
try:
with open(path, 'rb') as fd:
mapping = yaml.load(fd)
return mapping if mapping else {}
except IOError as e:
if e.errno == errno.ENOENT:
return {}
def merge(x1, x2):
if isinstance(x1, dict) and isinstance(x2, dict):
res = x1.copy()
for k, v in x2.items():
if k in res:
res[k] = merge(res[k], v)
else:
res[k] = v
return res
elif isinstance(x1, list) and isinstance(x2, list):
res = list(x1)
res.extend(x2)
return res
elif x1 == x2:
return x1
else:
raise ValueError("Cannot merge '{0!r}' and '{1!r}'".format(x1, x2))
def all_source_files(source, destination):
source = source.encode()
destination = destination.encode()
dst_base, dst_name = os.path.split(os.path.realpath(destination))
for source, dirs, files in os.walk(source):
if os.path.realpath(source) == dst_base and dst_name in dirs:
dirs.remove(dst_name)
for filename in files:
yield os.path.join(source, filename).decode("utf8")
def changed_files(source, destination, config, poll_interval=1):
times = {}
while True:
changed = []
for path in all_source_files(source, destination):
rel_path = os.path.relpath(path, source)
if not is_file_visible(rel_path, config):
continue
new = os.stat(path).st_mtime
old = times.get(path)
if not old or new > old:
times[path] = new
changed.append(path)
if changed:
yield changed
sleep(poll_interval)
def is_file_visible(path, config):
"""Check file name visibility according to site settings."""
parts = path.split(os.path.sep)
exclude = config.get('exclude', [])
exclude_patterns = config.get('exclude_patterns', [])
if path in config.get('include', []):
return True
elif any(re.match(pattern, part)
for pattern in exclude_patterns
for part in parts):
return False
elif any(path.startswith(s) for s in exclude):
return False
else:
return True
def is_underscored(path):
parts = path.split(os.path.sep)
return any(part.startswith('_') for part in parts)
def path2url(path):
m = re.match(r'(.*)[/\\]index.html?$', path)
if m:
path = m.group(1) + os.path.sep
path = os.path.sep + path
url = pathname2url(path.encode(URL_ENCODING))
return url.decode('ASCII') if PY2 else url
def url2path(url):
if url.endswith('/'):
url += 'index.html'
path = url2pathname(url.encode('ASCII') if PY2 else url)
return (path.decode(URL_ENCODING) if PY2 else path).lstrip(os.path.sep)
def make_dirs(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST:
pass
def remove(path):
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError as e:
if e.errno == errno.EEXIST:
pass
def info(message):
if not _quiet:
log(message)
def log(message):
sys.stderr.write('{0}\n'.format(message))
sys.stderr.flush()
def progress(msg, xs):
if _quiet:
for x in xs:
yield x
else:
size = len(xs)
for i, x in enumerate(xs, 1):
yield x
s = '{0}: {1}% ({2}/{3})'.format(msg, int(i * 100 / size), i, size)
sys.stderr.write('\r' + s)
sys.stderr.write('\n')
def file_suffix(path):
_, ext = os.path.splitext(path)
return ext
def object_name(f):
if f.__doc__:
lines = f.__doc__.splitlines()
for line in lines:
line = line.strip()
if line:
return line.rstrip('.')
return f.__name__
@template_filter('markdownify')
@file_filter(['.md', '.markdown'])
def markdown_filter(s, config):
return markdown(s)
@fallback_loader
def load_file(path, config):
if not is_file_visible(path, config) or is_underscored(path):
return None
return {
'files': [{'url': path2url(path), 'path': path}],
}
@template_renderer
def jinja2_render_string(string, context, config):
includes = os.path.join(config['source'], '_includes')
env = Environment(loader=FileSystemLoader(includes))
for name, f in _template_filters.items():
env.filters[name] = lambda s, f=f: f(s, config)
t = env.from_string(string)
return t.render(**context)
def read_template(path):
with open(path.encode(), 'rb') as fd:
if fd.read(3) != b'---':
return None
lines = []
while True:
line = fd.readline()
if re.match(b'^---\r?\n', line):
break
elif line == b'':
return None
lines.append(line)
front_matter = BytesIO(b''.join(lines))
front_matter.name = path
page = yaml.load(front_matter)
if not page:
page = {}
content = fd.read().decode(PAGE_ENCODING)
page['content'] = content
return page
@loader
def load_page(path, config):
if not is_file_visible(path, config) or is_underscored(path):
return None
name, suffix = os.path.splitext(path)
if suffix in _file_filters:
dst = '{0}.html'.format(name)
else:
dst = path
page = read_template(os.path.join(config['source'], path))
if not page:
return None
page.update({'url': path2url(dst), 'path': path})
return {
'pages': [page]
}
def read_post(path, date, title, config):
page = read_template(os.path.join(config['source'], path))
if not page:
return None
if 'date' in page:
date = page['date']
permalink = config.get('permalink', '/{year}/{month}/{day}/{title}.html')
url_vars = {
'year': '{0:04}'.format(date.year),
'month': '{0:02}'.format(date.month),
'day': '{0:02}'.format(date.day),
'title': title,
}
url = pathname2url(permalink.format(**url_vars))
page.update({'url': url, 'path': path})
if 'date' not in page:
date_str = '{year}-{month}-{day}'.format(**url_vars)
page['date'] = datetime.strptime(date_str, '%Y-%m-%d')
page['id'] = '/{year}/{month}/{day}/{title}'.format(**url_vars)
return {
'posts': [page],
'tags': dict((tag, [page]) for tag in page.get('tags', [])),
}
@loader
def load_post(path, config):
post_re = re.compile('(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})-'
'(?P<title>.+)')
parts = path.split(os.path.sep)
if '_posts' not in parts:
return None
if not is_file_visible(path, config):
return None
name, _ = os.path.splitext(os.path.basename(path))
m = post_re.match(name)
if not m:
return None
date = datetime.strptime('{year}-{month}-{day}'.format(**m.groupdict()),
'%Y-%m-%d')
return read_post(path, date, m.group('title'), config)
@loader
def load_draft(path, config):
if not config.get('drafts'):
return None
if '_drafts' not in path.split(os.path.sep):
return None
if not is_file_visible(path, config):
return None
title, _ = os.path.splitext(os.path.basename(path))
return read_post(path, config['time'], title, config)
def render_layout(content, page, site):
name = page.get('layout', 'nil')
if name == 'nil':
return content
filename = '{0}.html'.format(name)
layout_file = os.path.join(site['source'], '_layouts', filename)
layout = read_template(layout_file)
if not layout:
raise Exception("Cannot load template: '{0}'".format(layout_file))
page_copy = page.copy()
page_copy.pop('layout', None)
page_copy.pop('content', None)
layout.update(page_copy)
context = {
'site': site,
'page': layout,
'content': content,
}
content = _render_string(layout['content'], context, site)
return render_layout(content, layout, site)
def render_page(page, site):
context = {
'site': site,
'page': page,
}
content = page['content']
if not page.get('raw_content', False):
content = _render_string(content, context, site)
f = _file_filters.get(file_suffix(page.get('path', '')))
if f:
content = f(content, site)
page['content'] = content
return render_layout(content, page, site)
@processor
def process_posts(site):
"""Sort and interlink posts."""
posts = site.setdefault('posts', [])
posts.sort(key=lambda p: p['date'], reverse=True)
n = len(posts)
for i, post in enumerate(posts):
if i < n - 1:
post['next'] = posts[i + 1]
if i > 0:
post['previous'] = posts[i - 1]
def generate_page(page, site):
if not page.get('published', True):
return
url = page['url']
dst = os.path.join(site['destination'], url2path(url))
make_dirs(os.path.dirname(dst))
with open(dst, 'wb') as fd:
fd.truncate()
try:
rendered = render_page(page, site)
except Exception as e:
traceback.print_exc()
print("Cannot render '{0}', exiting".format(page.get('path')))
sys.exit(1)
fd.write(rendered.encode(PAGE_ENCODING))
@generator
def generate_pages(site):
"""Generate pages with YAML front matter."""
posts = site.get('posts', [])
pages = site.get('pages', [])
for page in progress('Generating pages', posts + pages):
generate_page(page, site)
@generator
def generate_files(site):
"""Copy static files."""
for file_dict in site.get('files', []):
src = os.path.join(site['source'], file_dict['path'])
dst = os.path.join(site['destination'], url2path(file_dict['url']))
make_dirs(os.path.dirname(dst))
shutil.copy(src.encode(), dst.encode())
def load_plugins(source):
plugins = sorted(glob(os.path.join(source, '_plugins', '*.py')))
n = 0
for plugin in plugins:
with open(plugin, 'rb') as fd:
code = fd.read()
exec(compile(code, plugin, 'exec'), {})
n += 1
if n > 0:
info('Loaded {0} plugins'.format(n))
def build(config):
site = load_site(config)
generate_site(site)
def build_delta(paths, config):
site = load_site_files(paths, config)
generate_site(site, clean=False)
def load_site_files(paths, config):
source = config['source']
info('Loading source files...')
site = config.copy()
n = 0
for path in paths:
rel_path = os.path.relpath(path, source)
for f in _loaders:
data = f(rel_path, site)
if data:
n += 1
site = merge(site, data)
break
info('Loaded {0} files'.format(n))
return site
def load_site(config):
paths = all_source_files(config['source'], config['destination'])
return load_site_files(paths, config)
def generate_site(site, clean=True):
destination = site['destination']
marker = os.path.join(destination, '.obraz_destination')
write_denied = os.path.exists(destination) and not os.path.exists(marker)
if write_denied and not site.get('force'):
raise Exception("Use --force to overwrite the contents "
"of '{0}' not marked as destination "
"directory yet".format(destination))
make_dirs(destination)
if clean:
for name in os.listdir(destination):
remove(os.path.join(destination, name))
with open(marker, 'wb'):
pass
for f in _processors:
msg = object_name(f)
info('{0}...'.format(msg))
f(site)
info('Site generated successfully')
def make_server(config):
host = config['host']
port = int(config['port'])
baseurl = config['baseurl']
class Handler(SimpleHTTPRequestHandler):
def send_head(self):
if not self.path.startswith(baseurl):
self.send_error(404, 'File not found')
return None
self.path = self.path[len(baseurl):]
if not self.path.startswith('/'):
self.path = '/' + self.path
return SimpleHTTPRequestHandler.send_head(self)
return HTTPServer((host, port), Handler)
def serve(config):
build(config)
server = make_server(config)
os.chdir(config['destination'])
log_serving(config)
server.serve_forever()
def watch(config):
source = os.path.abspath(config['source'])
destination = os.path.abspath(config['destination'])
initial_dir = os.getcwd()
serving = False
server = make_server(config)
for changed in changed_files(source, destination, config):
if serving:
info('Changed {0} files, regenerating...'.format(len(changed)))
server.shutdown()
os.chdir(initial_dir)
try:
if full_build_required(changed, config) or not serving:
build(config)
else:
build_delta(changed, config)
except KeyboardInterrupt:
raise
os.chdir(destination)
log_serving(config)
thread = Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
if not serving:
serving = True
def log_serving(config):
url = 'http://{host}:{port}{baseurl}'.format(**config)
if not url.endswith('/'):
url += '/'
info('Serving at {0}'.format(url))
def full_build_required(changed_paths, config):
patterns = config.get('full_build_patterns', [])
source = os.path.abspath(config['source'])
for path in changed_paths:
parts = os.path.relpath(path, source).split(os.path.sep)
if any(re.match(pattern, part)
for pattern in patterns
for part in parts):
return True
return False
def new_site(path):
if os.path.exists(path) and os.listdir(path):
raise Exception("Path '{0}' exists and is not empty".format(path))
source = os.path.join(sys.prefix, 'share', 'obraz', 'scaffold')
shutil.copytree(source, path)
info("New Obraz site installed in '{0}'".format(path))
def obraz(argv):
opts = docopt(__doc__, argv=argv, version='0.9')
global _quiet
_quiet = opts['--quiet']
try:
if opts['new']:
new_site(opts['PATH'])
return
config = DEFAULT_CONFIG.copy()
source = opts['--source'] if opts['--source'] else './'
config_file = os.path.join(source, '_config.yml')
config.update(load_yaml_mapping(config_file))
config['time'] = datetime.utcnow()
for k, v in opts.items():
if k.startswith('--') and v:
config[k[2:]] = v
info('Source: {0}'.format(os.path.abspath(config['source'])))
info('Destination: {0}'.format(os.path.abspath(config['destination'])))
if not config.get('safe'):
load_plugins(source)
if opts['build']:
build(config)
elif opts['serve']:
if opts['--watch']:
watch(config)
else:
serve(config)
except KeyboardInterrupt:
info('Interrupted')
def main():
sys.modules['obraz'] = sys.modules[__name__]
try:
obraz(sys.argv[1:])
except Exception:
traceback.print_exc()
sys.exit(1)
if __name__ == '__main__':
main()
| mit | 4,325,641,461,589,983,700 | 27.084722 | 79 | 0.583997 | false |
zenn1989/scoria-interlude | L2Jscoria-Game/data/scripts/quests/380_BringOutTheFlavorOfIngredients/__init__.py | 1 | 3959 | # Made by disKret & DrLecter
import sys
from com.l2scoria import Config
from com.l2scoria.gameserver.model.quest import State
from com.l2scoria.gameserver.model.quest import QuestState
from com.l2scoria.gameserver.model.quest.jython import QuestJython as JQuest
qn = "380_BringOutTheFlavorOfIngredients"
#NPC
ROLLANT = 30069
#MOBS
DIRE_WOLF = 20205
KADIF_WEREWOLF = 20206
GIANT_MIST_LEECH = 20225
#ITEMS
RITRONS_FRUIT,MOON_FACE_FLOWER,LEECH_FLUIDS = range(5895,5898)
ANTIDOTE = 1831
RITRON_JELLY = 5960
JELLY_RECIPE = 5959
#mob:[chance,item,max]
DROPLIST = {
DIRE_WOLF:[10,RITRONS_FRUIT,4],
KADIF_WEREWOLF:[50,MOON_FACE_FLOWER,20],
GIANT_MIST_LEECH:[50,LEECH_FLUIDS,10]
}
#CHANCE
RECIPE_CHANCE = 55
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
if event == "30069-4.htm" :
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
elif event == "30069-12.htm" :
if st.getInt("cond") == 6 :
st.giveItems(JELLY_RECIPE,1)
st.playSound("ItemSound.quest_finish")
else :
htmltext = "I'll squeeze the jelly from your eyes"
st.exitQuest(1)
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not carrying out your quest or don't meet the criteria.</body></html>"
st = player.getQuestState(qn)
if not st : return htmltext
npcId = npc.getNpcId()
id = st.getState()
cond=st.getInt("cond")
if cond == 0 :
if player.getLevel() >= 24 :
htmltext = "30069-1.htm"
else:
htmltext = "30069-0.htm"
st.exitQuest(1)
elif cond == 1 :
htmltext = "30069-6.htm"
elif cond == 2 :
if st.getQuestItemsCount(ANTIDOTE) >= 2 and st.getQuestItemsCount(RITRONS_FRUIT) == 4 and st.getQuestItemsCount(MOON_FACE_FLOWER) == 20 and st.getQuestItemsCount(LEECH_FLUIDS) == 10 :
st.takeItems(RITRONS_FRUIT,-1)
st.takeItems(MOON_FACE_FLOWER,-1)
st.takeItems(LEECH_FLUIDS,-1)
st.takeItems(ANTIDOTE,2)
st.set("cond","3")
htmltext = "30069-7.htm"
else :
htmltext = "30069-6.htm"
elif cond == 3 :
st.set("cond","4")
htmltext = "30069-8.htm"
elif cond == 4 :
st.set("cond","5")
htmltext = "30069-9.htm"
elif cond == 5 :
st.set("cond","6")
htmltext = "30069-10.htm"
elif cond == 6 :
st.giveItems(RITRON_JELLY,1)
if st.getRandom(100) < RECIPE_CHANCE :
htmltext = "30069-11.htm"
else :
htmltext = "30069-13.htm"
st.playSound("ItemSound.quest_finish")
st.exitQuest(1)
return htmltext
def onKill(self,npc,player,isPet):
st = player.getQuestState(qn)
if not st : return
if st.getState() != STARTED : return
if st.getInt("cond") == 1 :
chance,item,max = DROPLIST[npc.getNpcId()]
numItems,chance = divmod(chance*Config.RATE_DROP_QUEST,100)
count = st.getQuestItemsCount(item)
if count < max :
if st.getRandom(100) < chance :
numItems = numItems + 1
numItems = int(numItems)
if count + numItems > max :
numItems = max - count
if numItems != 0 :
st.giveItems(item,numItems)
if st.getQuestItemsCount(RITRONS_FRUIT) == 4 and st.getQuestItemsCount(MOON_FACE_FLOWER) == 20 and st.getQuestItemsCount(LEECH_FLUIDS) == 10 :
st.set("cond","2")
st.playSound("ItemSound.quest_middle")
else :
st.playSound("ItemSound.quest_itemget")
return
QUEST = Quest(380,qn,"Bring Out The Flavor Of Ingredients")
CREATED = State('Start', QUEST)
STARTED = State('Started', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(ROLLANT)
QUEST.addTalkId(ROLLANT)
for mob in DROPLIST.keys():
QUEST.addKillId(mob)
for item in range(5895,5898):
STARTED.addQuestDrop(ROLLANT,item,1) | gpl-3.0 | -8,783,527,725,578,094,000 | 28.333333 | 188 | 0.635767 | false |
pepetreshere/odoo | addons/account_fleet/models/account_move.py | 2 | 1946 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields, api, _
class AccountMove(models.Model):
_inherit = 'account.move'
def _post(self, soft=True):
vendor_bill_service = self.env.ref('account_fleet.data_fleet_service_type_vendor_bill', raise_if_not_found=False)
if not vendor_bill_service:
return super()._post(soft)
val_list = []
log_list = []
not_posted_before = self.filtered(lambda r: not r.posted_before)
posted = super()._post(soft) # We need the move name to be set, but we also need to know which move are posted for the first time.
for line in (not_posted_before & posted).line_ids.filtered(lambda ml: ml.vehicle_id):
val = {
'service_type_id': vendor_bill_service.id,
'vehicle_id': line.vehicle_id.id,
'amount': line.price_subtotal,
'vendor_id': line.partner_id.id,
'description': line.name,
}
log = _('Service Vendor Bill: <a href=# data-oe-model=account.move data-oe-id={move_id}>{move_name}</a>').format(
move_id=line.move_id.id,
move_name=line.move_id.name,
)
val_list.append(val)
log_list.append(log)
log_service_ids = self.env['fleet.vehicle.log.services'].create(val_list)
for log_service_id, log in zip(log_service_ids, log_list):
log_service_id.message_post(body=log)
return posted
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
vehicle_id = fields.Many2one('fleet.vehicle', string='Vehicle')
need_vehicle = fields.Boolean(compute='_compute_need_vehicle',
help="Technical field to decide whether the vehicle_id field is editable")
def _compute_need_vehicle(self):
self.need_vehicle = False
| agpl-3.0 | 7,216,238,615,381,273,000 | 40.404255 | 139 | 0.607914 | false |
3dfxsoftware/cbss-addons | smile_cross_selling/__openerp__.py | 1 | 2151 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Cross Selling",
"version": "0.1",
"depends": ["sale"],
"author": "Smile",
"description": """This module aims to increase your business.
The product get a new tab to link product between themselves.
You can input properties on each product you link and manage the pricelist or a special price to bypass the pricelist.
When you encode a new sale order and you select a main product with links,
OpenERP shows you a pop-up to choose the product you can add in new lines.
Thanks to Camptocamp. We had this idea from their module "product_link".
We prefered make a new module because we didn't want to depend on stock.
This module built and test on OpenERP v7.0.
Suggestions & Feedback to: [email protected] & [email protected]
""",
"summary": "",
"website": "http://www.smile.fr",
"category": 'Sales Management',
"sequence": 20,
"init_xml": [
"security/ir.model.access.csv",
],
"update_xml": [
"view/product_view.xml",
],
'demo_xml': [],
'test': [],
"auto_install": False,
"installable": True,
"application": False,
}
| gpl-2.0 | -5,505,021,721,958,807,000 | 36.736842 | 122 | 0.621106 | false |
LaboratoireMecaniqueLille/crappy | crappy/blocks/grapher.py | 1 | 5641 | # coding: utf-8
import numpy as np
from .block import Block
from .._global import OptionalModule
try:
import matplotlib.pyplot as plt
from matplotlib.widgets import Button
except (ModuleNotFoundError, ImportError):
plt = OptionalModule("matplotlib")
Button = OptionalModule("matplotlib")
class Grapher(Block):
"""The grapher receive data from a block (via a :ref:`Link`) and plots it."""
def __init__(self,
*labels,
length=0,
freq=2,
maxpt=20000,
window_size=(8, 8),
window_pos=None,
interp=True,
backend="TkAgg"):
"""Sets the args and initializes the parent class.
Args:
*labels (:obj:`tuple`): Tuples of the columns labels of input data for
plotting. You can add as much as you want, depending on your
performances. The first value is the `x` label, the second is the `y`
label.
length (:obj:`int`, optional): If `0` the graph is static and displays
all data from the start of the assay. Else only displays the last
``length`` received chunks, and drops the previous ones.
freq (:obj:`float`, optional): The refresh rate of the graph. May cause
high memory consumption if set too high.
maxpt (:obj:`int`, optional): The maximum number of points displayed on
the graph. When reaching this limit, the block deletes one point out of
two but this is almost invisible to the user.
window_size (:obj:`tuple`, optional): The size of the graph, in inches.
window_pos (:obj:`tuple`, optional): The position of the graph in pixels.
The first value is for the `x` direction, the second for the `y`
direction. The origin is the top left corner. Works with multiple
screens.
interp (:obj:`bool`, optional): If :obj:`True`, the points of data will
be linked to the following by straight lines. Else, each value wil be
displayed as constant until the next update.
backend (:obj:`int`, optional): The :mod:`matplotlib` backend to use.
Example:
::
graph = Grapher(('t(s)', 'F(N)'), ('t(s)', 'def(%)'))
will plot a dynamic graph with two lines plot (`F=f(t)` and `def=f(t)`).
::
graph = Grapher(('def(%)', 'F(N)'), length=0)
will plot a static graph.
::
graph = Grapher(('t(s)', 'F(N)'), length=30)
will plot a dynamic graph displaying the last 30 chunks of data.
"""
Block.__init__(self)
self.niceness = 10
self.length = length
self.freq = freq
self.maxpt = maxpt
self.window_size = window_size
self.window_pos = window_pos
self.interp = interp
self.backend = backend
self.labels = labels
def prepare(self):
if self.backend:
plt.switch_backend(self.backend)
self.f = plt.figure(figsize=self.window_size)
self.ax = self.f.add_subplot(111)
self.lines = []
for _ in self.labels:
if self.interp:
self.lines.append(self.ax.plot([], [])[0])
else:
self.lines.append(self.ax.step([], [])[0])
# Keep only 1/factor points on each line
self.factor = [1 for _ in self.labels]
# Count to drop exactly 1/factor points, no more and no less
self.counter = [0 for _ in self.labels]
legend = [y for x, y in self.labels]
plt.legend(legend, bbox_to_anchor=(-0.03, 1.02, 1.06, .102), loc=3,
ncol=len(legend), mode="expand", borderaxespad=1)
plt.xlabel(self.labels[0][0])
plt.ylabel(self.labels[0][1])
plt.grid()
self.axclear = plt.axes([.8, .02, .15, .05])
self.bclear = Button(self.axclear, 'Clear')
self.bclear.on_clicked(self.clear)
if self.window_pos:
mng = plt.get_current_fig_manager()
mng.window.wm_geometry("+%s+%s" % self.window_pos)
plt.draw()
plt.pause(.001)
def clear(self, event=None):
for line in self.lines:
line.set_xdata([])
line.set_ydata([])
self.factor = [1 for _ in self.labels]
self.counter = [0 for _ in self.labels]
def loop(self):
# We need to recv data from all the links, but keep
# ALL of the data, even with the same label (so not get_all_last)
data = self.recv_all_delay()
for i, (lx, ly) in enumerate(self.labels):
x, y = 0, 0 # So that if we don't find it, we do nothing
for d in data:
if lx in d and ly in d: # Find the first input with both labels
dx = d[lx][self.factor[i]-self.counter[i]-1::self.factor[i]]
dy = d[ly][self.factor[i]-self.counter[i]-1::self.factor[i]]
self.counter[i] = (self.counter[i]+len(d[lx])) % self.factor[i]
x = np.hstack((self.lines[i].get_xdata(), dx))
y = np.hstack((self.lines[i].get_ydata(), dy))
break
if isinstance(x, int):
break
if self.length and len(x) >= self.length:
# Remove the beginning if the graph is dynamic
x = x[-self.length:]
y = y[-self.length:]
elif len(x) > self.maxpt:
# Reduce the number of points if we have to many to display
print("[Grapher] Too many points on the graph {} ({}>{})".format(
i, len(x), self.maxpt))
x, y = x[::2], y[::2]
self.factor[i] *= 2
print("[Grapher] Resampling factor is now {}".format(self.factor[i]))
self.lines[i].set_xdata(x)
self.lines[i].set_ydata(y)
self.ax.relim() # Update the window
self.ax.autoscale_view(True, True, True)
self.f.canvas.draw() # Update the graph
self.f.canvas.flush_events()
def finish(self):
plt.close("all")
| gpl-2.0 | -8,069,995,091,647,567,000 | 35.160256 | 79 | 0.601489 | false |
sam-m888/gprime | gprime/filters/rules/media/_hasattribute.py | 1 | 1778 | #
# gPrime - A web-based genealogy program
#
# Copyright (C) 2008 Gary Burton
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gprime modules
#
#-------------------------------------------------------------------------
from .._hasattributebase import HasAttributeBase
#-------------------------------------------------------------------------
#
# HasAttribute
#
#-------------------------------------------------------------------------
class HasAttribute(HasAttributeBase):
"""Rule that checks for a media object with a particular attribute"""
labels = [ _('Media attribute:'), _('Value:') ]
name = _('Media objects with the attribute <attribute>')
description = _("Matches media objects with the attribute "
"of a particular value")
| gpl-2.0 | -8,233,013,204,067,550,000 | 36.829787 | 79 | 0.538245 | false |
xi-studio/anime | newnet/show.py | 1 | 1080 | import numpy as np
import matplotlib.pyplot as plt
from scipy.sparse import csc_matrix
head = np.random.randint(low=0,high=10,size=20)
tail = np.random.randint(low=0,high=10,size=20)
row = np.arange(20)
data = np.ones(20)
a = csc_matrix((data, (row,head)),shape=(20,10)).toarray()
b = csc_matrix((data, (row,tail)),shape=(20,10)).toarray()
def plotCM(cm,title,colorbarOn,givenAX):
ax = givenAX
idx = np.arange(10)
idy = np.arange(20)
plt.matshow(cm, fignum=False, cmap='Blues', vmin=0, vmax=5.0)
ax.set_xticks(range(10))
ax.set_xticklabels(idx)
plt.title(title,size=12)
for i in range(cm.shape[0]):
for j in range(cm.shape[1]):
ax.text(j,i,int(cm[i,j]),va='center', ha='center')
#fig1=plt.subplot(1, 3, 1)
#plotCM(a,"Head Index","off",fig1.axes)
fig2=plt.subplot(1, 1, 1)
w = np.random.randn(20,1)
plt.matshow(w, fignum=False, cmap='Blues', vmin=0, vmax=1.0)
for x in range(20):
fig2.axes.text(0,x,w[x,0],va='center', ha='center')
#fig3=plt.subplot(1, 3, 3)
#plotCM(b,"Tail Index","off",fig3.axes)
plt.show()
| mit | 6,583,698,324,841,650,000 | 23.545455 | 65 | 0.642593 | false |
wahur666/kodok | python/Base64/ip-ban.py | 1 | 1179 | #!/usr/bin/python3
__author__ = 'Imre'
import sys
import os
import re
def main():
if len(sys.argv) == 2:
inputfile = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), sys.argv[1])
if os.path.isfile(inputfile):
output = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])),'done.txt')
infile = open(inputfile, 'r')
outfile = open(output, 'w')
temp = infile.read().splitlines()
out = []
pattern = re.compile(r"\d*\.\d*\.\d*\.\d*")
for line in temp:
if line == '':
continue
else:
if re.search(pattern,line):
tags = line.split(".")
tags[2],tags[3] = "*","*"
a = ".".join(tags)
out.append(a)
else:
out.append(line)
out = list(set(out))
outfile.write("\n".join(out))
print('Done')
else:
print("nem letezo file")
else:
print('nicns eleg parameter')
if __name__ == '__main__': main() | gpl-3.0 | -950,800,508,332,692,500 | 30.891892 | 92 | 0.433418 | false |
storecast/holon | holon/services/httplib.py | 1 | 1461 | from __future__ import absolute_import
from . import HttpService
from httplib import HTTPConnection, HTTPException, HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def get_transport(self):
"""Helper method to improve testability."""
return self.connection_class(self.host, self.port,
timeout=self.connect_timeout)
def _call(self, body, headers):
start_time = time.time()
try:
connection = self.get_transport()
response = connection.getresponse()
except (HTTPException, timeout, error), e:
raise self.communication_error_class(u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return response.status, data, (end_time - start_time)*1000
@property
def protocol(self):
return self.connection_class._http_vsn_str
| bsd-2-clause | 40,881,219,972,397,120 | 34.634146 | 191 | 0.614648 | false |
rockneurotiko/wirecloud | src/wirecloud/platform/workspace/mashupTemplateGenerator.py | 1 | 12686 | # -*- coding: utf-8 -*-
# Copyright (c) 2012-2015 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import json
import six
from wirecloud.catalogue.models import CatalogueResource
from wirecloud.commons.utils.template.base import parse_contacts_info
from wirecloud.commons.utils.template.writers import rdf
from wirecloud.commons.utils.template.writers import xml
from wirecloud.platform.models import IWidget
from wirecloud.platform.wiring.utils import get_wiring_skeleton, parse_wiring_old_version
from wirecloud.platform.workspace.utils import VariableValueCacheManager
def get_iwidgets_description(included_iwidgets):
description = "Wirecloud Mashup composed of: "
description = ', '.join([iwidget.widget.resource.get_processed_info()['title'] for iwidget in included_iwidgets])
return description
def get_workspace_description(workspace):
included_iwidgets = IWidget.objects.filter(tab__workspace=workspace)
return get_iwidgets_description(included_iwidgets)
def get_current_operator_pref_value(operator, preference):
if preference['name'] in operator['preferences']:
return "%s" % operator['preferences'][preference['name']]['value']
else:
return preference['default']
def process_iwidget(workspace, iwidget, wiring, parametrization, readOnlyWidgets):
widget = iwidget.widget
widget_description = widget.resource.get_template().get_resource_info()
iwidget_id = str(iwidget.id)
iwidget_params = {}
if iwidget_id in parametrization:
iwidget_params = parametrization[iwidget_id]
cache_manager = VariableValueCacheManager(workspace, workspace.creator)
# input and output endpoints
for output_endpoint in widget_description['wiring']['outputs']:
wiring['outputs'].append({
'name': output_endpoint['name'],
'type': output_endpoint['type'],
'label': output_endpoint['label'],
'description': output_endpoint['description'],
'friendcode': output_endpoint['friendcode'],
})
for input_endpoint in widget_description['wiring']['inputs']:
wiring['inputs'].append({
'name': input_endpoint['name'],
'type': input_endpoint['type'],
'label': input_endpoint['label'],
'description': input_endpoint['description'],
'friendcode': input_endpoint['friendcode'],
'actionlabel': input_endpoint['actionlabel'],
})
# preferences
widget_preferences = widget_description['preferences']
preferences = {}
for pref in widget_preferences:
status = 'normal'
if pref['name'] in iwidget_params:
iwidget_param_desc = iwidget_params[pref['name']]
status = iwidget_param_desc.get('status', 'normal')
source = iwidget_param_desc.get('source', 'current')
if source == 'default':
if status == 'normal':
# Do not issue a Preference element for this preference
continue
value = None
elif source == 'current':
value = cache_manager.get_variable_value_from_varname(iwidget, pref['name'])
elif source == 'custom':
value = iwidget_param_desc['value']
else:
raise Exception('Invalid preference value source: %s' % source)
else:
value = cache_manager.get_variable_value_from_varname(iwidget, pref['name'])
preferences[pref['name']] = {
'readonly': status != 'normal',
'hidden': status == 'hidden',
}
if value is not None:
if pref['type'] == 'boolean':
value = str(value).lower()
elif pref['type'] == 'number':
value = str(value)
preferences[pref['name']]['value'] = value
# iWidget properties
widget_properties = widget_description['properties']
properties = {}
for prop in widget_properties:
status = 'normal'
if prop['name'] in iwidget_params:
iwidget_param_desc = iwidget_params[prop['name']]
status = iwidget_param_desc.get('status', 'normal')
source = iwidget_param_desc.get('source', 'current')
if source == 'default':
if status == 'normal':
# Do not issue a Property element for this property
continue
else:
value = None
elif source == 'current':
value = cache_manager.get_variable_value_from_varname(iwidget, prop['name'])
elif source == 'custom':
value = iwidget_param_desc['value']
else:
raise Exception('Invalid property value source: %s' % source)
else:
value = cache_manager.get_variable_value_from_varname(iwidget, prop['name'])
properties[prop['name']] = {
'readonly': status != 'normal',
'value': value,
}
return {
'id': iwidget_id,
'vendor': iwidget.widget.resource.vendor,
'name': iwidget.widget.resource.short_name,
'version': iwidget.widget.resource.version,
'title': iwidget.name,
'readonly': readOnlyWidgets,
'properties': properties,
'preferences': preferences,
'position': {
'x': str(iwidget.positions['widget']['left']),
'y': str(iwidget.positions['widget']['top']),
'z': str(iwidget.positions['widget']['zIndex']),
},
'rendering': {
'width': str(iwidget.positions['widget']['width']),
'height': str(iwidget.positions['widget']['height']),
'layout': str(iwidget.layout),
'fulldragboard': bool(iwidget.positions['widget']['fulldragboard']),
'minimized': bool(iwidget.positions['widget']['minimized']),
},
}
def build_json_template_from_workspace(options, workspace, user):
options['type'] = 'mashup'
options['params'] = []
options['embedmacs'] = options.get('embedmacs', False) is True
options['embedded'] = set()
options['translations'] = {}
options['translation_index_usage'] = {}
description = options.get('description', '').strip()
if description == '':
options['description'] = get_workspace_description(workspace)
if 'authors' not in options:
options['authors'] = ({'name': six.text_type(user)},)
elif isinstance(options['authors'], six.text_type):
options['authors'] = parse_contacts_info(options['authors'])
if 'contributors' not in options:
options['contributors'] = ()
elif isinstance(options['contributors'], six.text_type):
options['contributors'] = parse_contacts_info(options['contributors'])
options['requirements'] = []
readOnlyWidgets = options.get('readOnlyWidgets', False)
parametrization = options.get('parametrization')
if not parametrization:
parametrization = {}
if 'iwidgets' not in parametrization:
parametrization['iwidgets'] = {}
if 'ioperators' not in parametrization:
parametrization['ioperators'] = {}
# Workspace preferences
options['preferences'] = {}
for preference in workspace.workspacepreference_set.all():
if not preference.inherit:
options['preferences'][preference.name] = preference.value
# Tabs and their preferences
options['tabs'] = []
options['wiring'] = {
'inputs': [],
'outputs': [],
}
for tab in workspace.tab_set.order_by('position'):
preferences = {}
for preference in tab.tabpreference_set.all():
if not preference.inherit:
preferences[preference.name] = preference.value
resources = []
for iwidget in tab.iwidget_set.select_related('widget__resource').all():
resource_info = process_iwidget(workspace, iwidget, options['wiring'], parametrization['iwidgets'], readOnlyWidgets)
resources.append(resource_info)
if options['embedmacs']:
options['embedded'].add('/'.join((resource_info['vendor'], resource_info['name'], resource_info['version'])))
options['tabs'].append({
'name': tab.name,
'resources': resources,
'preferences': preferences,
})
# wiring conections and operators
readOnlyConnectables = options.get('readOnlyConnectables', False)
wiring_status = workspace.wiringStatus
if len(wiring_status) == 0:
wiring_status = get_wiring_skeleton()
# Set the wiring status' version
if wiring_status.get('version', '1.0') == '1.0':
wiring_status = parse_wiring_old_version(wiring_status)
options['wiring']['version'] = '2.0'
options['wiring']['operators'] = {}
for id_, operator in six.iteritems(wiring_status['operators']):
operator_data = {
'name': operator['name'],
'preferences': {},
}
vendor, name, version = operator['name'].split('/')
resource = CatalogueResource.objects.get(vendor=vendor, short_name=name, version=version)
operator_info = json.loads(resource.json_description)
operator_params = parametrization['ioperators'].get(id_, {})
for pref_index, preference in enumerate(operator_info['preferences']):
status = 'normal'
if preference['name'] in operator_params:
ioperator_param_desc = operator_params[preference['name']]
status = ioperator_param_desc.get('status', 'normal')
source = ioperator_param_desc.get('source', 'current')
if source == 'default':
if status == 'normal':
# Do not issue a Preference element for this preference
continue
value = None
elif source == 'current':
value = get_current_operator_pref_value(operator, preference)
elif source == 'custom':
value = ioperator_param_desc['value']
else:
raise Exception('Invalid preference value source: %s' % source)
else:
value = get_current_operator_pref_value(operator, preference)
operator_data['preferences'][preference['name']] = {
'readonly': status != 'normal',
'hidden': status == 'hidden',
}
if value is not None:
operator_data['preferences'][preference['name']]['value'] = value
options['wiring']['operators'][id_] = operator_data
if options['embedmacs']:
options['embedded'].add(operator['name'])
options['wiring']['connections'] = []
for connection in wiring_status['connections']:
options['wiring']['connections'].append({
'source': connection['source'],
'target': connection['target'],
'readonly': readOnlyConnectables,
})
options['wiring']['visualdescription'] = wiring_status['visualdescription']
embedded = options['embedded']
options['embedded'] = []
for resource in embedded:
(vendor, name, version) = resource.split('/')
options['embedded'].append({
'vendor': vendor,
'name': name,
'version': version,
'src': 'macs/%s_%s_%s.wgt' % (vendor, name, version)
})
del options['embedmacs']
return options
def build_xml_template_from_workspace(options, workspace, user, raw=False):
build_json_template_from_workspace(options, workspace, user)
return xml.write_xml_description(options, raw=raw)
def build_rdf_template_from_workspace(options, workspace, user):
build_json_template_from_workspace(options, workspace, user)
return rdf.build_rdf_graph(options)
| agpl-3.0 | 8,939,400,542,287,359,000 | 37.093093 | 128 | 0.607647 | false |
johnbywater/eventsourcing | dev/prepare-distribution.py | 1 | 3352 | import os
import subprocess
import sys
from subprocess import CalledProcessError
from time import sleep
def main():
# Validate current working dir (should be project root).
proj_path = os.path.abspath(".")
readme_path = os.path.join(proj_path, "README.md")
if os.path.exists(readme_path):
assert "A library for event sourcing in Python" in open(readme_path).read()
else:
raise Exception("Couldn't find project README.md")
try:
del os.environ["PYTHONPATH"]
except KeyError:
pass
# # Build and upload to Test PyPI.
# NB: Don't upload to Test PyPI because there is a dodgy
# Django distribution, and there may be others:
# https://test.pypi.org/project/Django/3.1.10.17/
# Build distribution.
subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=proj_path)
try:
subprocess.check_call(
# [sys.executable, "setup.py", "sdist", "upload", "-r", "pypitest"],
[sys.executable, "setup.py", "sdist"],
cwd=proj_path,
)
except CalledProcessError:
sys.exit(1)
# Construct the path to the built distribution.
version_path = os.path.join(proj_path, "eventsourcing", "__init__.py")
version = open(version_path).readlines()[0].split("=")[-1].strip().strip('"')
distribution_path = os.path.join(
proj_path, 'dist', f'eventsourcing-{version}.tar.gz'
)
# Define the test targets.
targets = [
(os.path.join(proj_path, "tmpve3.7"), "python3")
]
os.environ["CASS_DRIVER_NO_CYTHON"] = "1"
# Test distribution for various targets.
for (venv_path, base_python_path) in targets:
# Remove existing virtualenv.
if os.path.exists(venv_path):
remove_virtualenv(proj_path, venv_path)
# Create virtualenv.
subprocess.check_call(
["virtualenv", "--python", base_python_path, venv_path], cwd=proj_path
)
pip_path = os.path.join(venv_path, 'bin', 'pip')
python_path = os.path.join(venv_path, 'bin', 'python')
subprocess.check_call(
[pip_path, "install", "-U", "pip", "wheel"], cwd=venv_path
)
# Install from built distribution.
pip_install_cmd = [
pip_path,
"install",
"--no-cache-dir",
distribution_path+"[postgres_dev,crypto]",
]
patience = 10
is_test_pass = False
sleep(1)
while True:
try:
subprocess.check_call(pip_install_cmd, cwd=venv_path)
is_test_pass = True
break
except CalledProcessError:
patience -= 1
if patience < 0:
break
print("Patience:", patience)
sleep(1)
if not is_test_pass:
print("Failed to install.")
sys.exit(1)
# Check installed tests all pass.
subprocess.check_call(
[python_path, "-m" "unittest", "discover",
"eventsourcing.tests"],
cwd=venv_path,
)
remove_virtualenv(proj_path, venv_path)
def remove_virtualenv(proj_path, venv_path):
subprocess.check_call(["rm", "-r", venv_path], cwd=proj_path)
if __name__ == "__main__":
main()
| bsd-3-clause | 3,576,165,509,256,950,000 | 28.928571 | 88 | 0.563842 | false |
reedessick/pointy-Poisson | multiPopVectors2OmegaScan.py | 1 | 6042 | #!/usr/bin/python
usage = "multiPopVectors2OmegaScan.py [--options] vectors.txt"
description = "writes OmegaScan config files based on the multi-population vectors supplied. Assumes KW channel naming conventions. Also writes corresponding comand lines to run OmegaScans for each vector supplied."
author = "[email protected]"
import os
import subprocess as sp
from optparse import OptionParser
#-------------------------------------------------
parser=OptionParser(usage=usage, description=description)
parser.add_option("-v", "--verbose", default=False, action="store_true")
parser.add_option("", "--frame-type", default="H1_R", type="string")
parser.add_option("", "--timeRange", default=64, type="int")
parser.add_option("", "--freq-map", default=None, type="string", help="the output of FrChannels, used to map channel names to sample frequencies")
parser.add_option("", "--gwchan", default="H1:CAL-DELTAL_EXTERNAL_DQ", type="string")
parser.add_option("", "--output-dir", default=".", type="string")
parser.add_option("", "--condor", default=False, action="store_true", help="write a condor_sub file instead of a shell script")
parser.add_option("", "--accounting-group", default="ligo.dev.o2.detchar.explore.test", type="string")
parser.add_option("", "--accounting-group-user", default="reed.essick", type="string")
parser.add_option("", "--request-memory", default=2000000, type="int", help="measured in kB")
opts, args = parser.parse_args()
if len(args)!=1:
raise ValueError("Please supply exactly one input argument\n%s"%(usage))
vectors = args[0]
if opts.freq_map==None:
opts.freq_map = raw_input("--freq-map=")
if not os.path.exists(opts.output_dir):
os.makedirs( opts.output_dir )
ifo = opts.frame_type[0]
#-------------------------------------------------
if opts.verbose:
print "reading in channels from :"+vectors
file_obj = open(vectors, "r")
chans = file_obj.readline().strip().split()[1:] ### skip first column because that is the filename
file_obj.close()
Nchans = len(chans)
if opts.verbose:
print " found %d channels"%(Nchans)
### assume KW channel naming convention
channels = set()
chanmap = {}
for i, chan in enumerate(chans):
chan = chan.split("_")
chan = "%s:%s"%(chan[0], "_".join(chan[1:-2]))
channels.add( chan )
chanmap[i] = chan
if opts.verbose:
print "reading in sample frequencies from :"+opts.freq_map
file_obj = open(opts.freq_map, "r")
freq_map = dict( [l.strip().split() for l in file_obj] )
file_obj.close()
channels = dict( (chan, freq_map[chan]) for chan in channels )
#-------------------------------------------------
if opts.verbose:
print "writing Qscan config files for:"
gwdf_cmd = "gw_data_find -o %s --type %s"%(ifo, opts.frame_type) + " -s %d -e %d -u file"
os_cmd = "/home/omega/opt/omega/bin/wpipeline scan %.9f -r -c %s -o %s -f %s"
header = """# Q Scan configuration file
# Automatically generated with wconfigure.sh
# by user bhughey on 2009-07-09 10:33:18 PDT
# from sample frame files:
# /archive/frames/S6/L1/LHO/H-H1_RDS_R_L1-9311/H-H1_RDS_R_L1-931194752-64.gwf
[Context,Context]
[Parameters,Parameter Estimation]
[Notes,Notes]
[Aux Channels,Identified interesting Aux channels]
"""
template = """{
channelName: '%s'
frameType: '%s'
sampleFrequency: %s
searchTimeRange: %d
searchFrequencyRange: [0 Inf]
searchQRange: [4 64]
searchMaximumEnergyLoss: 0.2
whiteNoiseFalseRate: 1e-3
searchWindowDuration: 0.5
plotTimeRanges: [0.1 1 4 16]
plotFrequencyRange: []
plotNormalizedEnergyRange: [0 25.5]
alwaysPlotFlag: 1
}"""%('%s', opts.frame_type, '%s', opts.timeRange)
if opts.condor:
cmd_file = "%s/run_Qscan.sub"%(opts.output_dir)
cmd_obj = open(cmd_file, "w")
print >> cmd_obj, """universe = vanilla
executable = /home/omega/opt/omega/bin/wpipeline
getenv = True
accounting_group = %s
accounting_group_user = %s
log = %s/Qscan.log
error = %s/Qscan-$(cluster)-$(process).err
output = %s/Qscan-$(cluster)-$(process).out
request_memory = %d KB
notification = never"""%(opts.accounting_group, opts.accounting_group_user, opts.output_dir, opts.output_dir, opts.output_dir, opts.request_memory)
else:
cmd_file = "%s/run_Qscan.sh"%(opts.output_dir)
cmd_obj = open(cmd_file, "w")
file_obj = open(vectors, "r")
file_obj.readline()
for line in file_obj:
line = line.strip().split()
if opts.verbose:
print " "+line[0]
try:
gps = float(line[0])
except:
gps = float(line[0].split("/")[-2].split('-')[-1])
### write config file
participating = set()
for i, v in enumerate( [float(l) for l in line[1:]] ):
if v > 0:
participating.add( chanmap[i] )
outdir = "%s/%.6f"%(opts.output_dir, gps)
if not os.path.exists(outdir):
os.makedirs(outdir)
conf_file = "%s/Qscan.cnf"%(outdir)
if opts.verbose:
print " "+conf_file
conf_obj = open(conf_file, "w")
print >> conf_obj, header
print >> conf_obj, template%(opts.gwchan, freq_map[opts.gwchan])
for chan in sorted(participating): ### assumes KW naming conventions
print >> conf_obj, template%(chan, channels[chan])
conf_obj.close()
### set up command
this_cmd = gwdf_cmd%(int(gps), int(gps)+1)
if opts.verbose:
print " "+this_cmd
frame = sp.Popen( this_cmd.split(), stdout=sp.PIPE).communicate()[0].split()[0]
directory = os.path.dirname( frame.replace("file://localhost","") )
that_cmd = os_cmd%(gps, conf_file, outdir, directory)
if opts.verbose:
print " "+that_cmd
if opts.condor:
print >> cmd_obj, "arguments = \" %s \"\nqueue 1"%(" ".join(that_cmd.split()[1:]))
else:
print >> cmd_obj, that_cmd
cmd_obj.close()
if opts.verbose:
if opts.condor:
print "now run :\ncondor_submit %s"%(cmd_file)
else:
print "now run :\n%s"%(cmd_file)
| mit | -8,005,188,117,140,197,000 | 31.483871 | 215 | 0.624297 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.