repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
nspi/vbcg | src/defines.py | 1 | 1253 | #!/usr/bin/env python
# -*- coding: ascii -*-
"""defines.py - this file contains meta data and program parameters"""
# Meta data
__author__ = "Nicolai Spicher"
__credits__ = ["Nicolai Spicher", "Stefan Maderwald", "Markus Kukuk", "Mark E. Ladd"]
__license__ = "GPL v3"
__version__ = "v0.2-beta"
__maintainer__ = "Nicolai Spicher"
__email__ = "nicolai[dot]spicher[at]fh-dortmund[dot]de"
__status__ = "Beta"
__url__ = "https://github.com/nspi/vbcg"
__description__ = "real-time application for video-based estimation of the hearts activity"
# Indices of program settings
IDX_WEBCAM = 0
IDX_CAMERA = 1
IDX_ALGORITHM = 2
IDX_CURVES = 3
IDX_FRAMES = 4
IDX_FACE = 5
IDX_FPS = 6
IDX_COLORCHANNEL = 7
# Indices of algorithm parameters
IDX_ZERO_PADDING = 0
IDX_WIN_SIZE = 1
IDX_RUN_MAX = 2
IDX_MIN_TIME = 3
# Standard values of program settings
VAL_WEBCAM = 1
VAL_CAMERA = 1
VAL_ALGORITHM = 0
VAL_CURVES = 1
VAL_FRAMES = 0
VAL_FACE = 0
VAL_FPS = 25
VAL_COLORCHANNEL = 1
# Standard values of algorithm parameters
VAL_ZERO_PADDING = 1
VAL_WIN_SIZE = 9
VAL_RUN_MAX = 3
VAL_MIN_TIME = 0.5
# Labels of algorithms in GUI
LABEL_ALGORITHM_1 = "Estimate HR (BMT 2015)"
LABEL_ALGORITHM_2 = "Filter signal (ISMRM 2016)"
LABEL_ALGORITHM_3 = "Trigger MRI (ISMRM 2015)"
| gpl-3.0 | -914,130,250,429,905,500 | 23.568627 | 91 | 0.691141 | false | 2.562372 | false | false | false |
SUSE/azure-sdk-for-python | azure-mgmt-sql/azure/mgmt/sql/models/elastic_pool_per_database_min_dtu_capability.py | 2 | 1345 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ElasticPoolPerDatabaseMinDtuCapability(Model):
"""The minimum per-database DTU capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar limit: The maximum DTUs per database.
:vartype limit: long
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or :class:`CapabilityStatus
<azure.mgmt.sql.models.CapabilityStatus>`
"""
_validation = {
'limit': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'limit': {'key': 'limit', 'type': 'long'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
}
def __init__(self):
self.limit = None
self.status = None
| mit | -770,284,012,031,825,800 | 31.804878 | 76 | 0.589591 | false | 4.559322 | false | false | false |
googleads/google-ads-python | examples/basic_operations/add_campaigns.py | 1 | 4715 | #!/usr/bin/env python
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example illustrates how to add a campaign.
To get campaigns, run get_campaigns.py.
"""
import argparse
import datetime
import sys
import uuid
from google.ads.googleads.client import GoogleAdsClient
from google.ads.googleads.errors import GoogleAdsException
_DATE_FORMAT = "%Y%m%d"
def main(client, customer_id):
campaign_budget_service = client.get_service("CampaignBudgetService")
campaign_service = client.get_service("CampaignService")
# [START add_campaigns]
# Create a budget, which can be shared by multiple campaigns.
campaign_budget_operation = client.get_type("CampaignBudgetOperation")
campaign_budget = campaign_budget_operation.create
campaign_budget.name = f"Interplanetary Budget {uuid.uuid4()}"
campaign_budget.delivery_method = (
client.enums.BudgetDeliveryMethodEnum.STANDARD
)
campaign_budget.amount_micros = 500000
# Add budget.
try:
campaign_budget_response = (
campaign_budget_service.mutate_campaign_budgets(
customer_id=customer_id, operations=[campaign_budget_operation]
)
)
except GoogleAdsException as ex:
_handle_googleads_exception(ex)
# [END add_campaigns]
# [START add_campaigns_1]
# Create campaign.
campaign_operation = client.get_type("CampaignOperation")
campaign = campaign_operation.create
campaign.name = f"Interplanetary Cruise {uuid.uuid4()}"
campaign.advertising_channel_type = (
client.enums.AdvertisingChannelTypeEnum.SEARCH
)
# Recommendation: Set the campaign to PAUSED when creating it to prevent
# the ads from immediately serving. Set to ENABLED once you've added
# targeting and the ads are ready to serve.
campaign.status = client.enums.CampaignStatusEnum.PAUSED
# Set the bidding strategy and budget.
campaign.manual_cpc.enhanced_cpc_enabled = True
campaign.campaign_budget = campaign_budget_response.results[0].resource_name
# Set the campaign network options.
campaign.network_settings.target_google_search = True
campaign.network_settings.target_search_network = True
campaign.network_settings.target_content_network = False
campaign.network_settings.target_partner_search_network = False
# [END add_campaigns_1]
# Optional: Set the start date.
start_time = datetime.date.today() + datetime.timedelta(days=1)
campaign.start_date = datetime.date.strftime(start_time, _DATE_FORMAT)
# Optional: Set the end date.
end_time = start_time + datetime.timedelta(weeks=4)
campaign.end_date = datetime.date.strftime(end_time, _DATE_FORMAT)
# Add the campaign.
try:
campaign_response = campaign_service.mutate_campaigns(
customer_id=customer_id, operations=[campaign_operation]
)
print(f"Created campaign {campaign_response.results[0].resource_name}.")
except GoogleAdsException as ex:
_handle_googleads_exception(ex)
def _handle_googleads_exception(exception):
print(
f'Request with ID "{exception.request_id}" failed with status '
f'"{exception.error.code().name}" and includes the following errors:'
)
for error in exception.failure.errors:
print(f'\tError with message "{error.message}".')
if error.location:
for field_path_element in error.location.field_path_elements:
print(f"\t\tOn field: {field_path_element.field_name}")
sys.exit(1)
if __name__ == "__main__":
# GoogleAdsClient will read the google-ads.yaml configuration file in the
# home directory if none is specified.
googleads_client = GoogleAdsClient.load_from_storage(version="v8")
parser = argparse.ArgumentParser(
description="Adds a campaign for specified customer."
)
# The following argument(s) should be provided to run the example.
parser.add_argument(
"-c",
"--customer_id",
type=str,
required=True,
help="The Google Ads customer ID.",
)
args = parser.parse_args()
main(googleads_client, args.customer_id)
| apache-2.0 | -6,714,371,405,759,470,000 | 34.719697 | 80 | 0.700742 | false | 3.861589 | false | false | false |
chiara-paci/santaclara-third | setup.py | 1 | 1462 | import os
from distutils.core import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='santaclara_third',
version='0.11.1',
packages=['santaclara_third'],
package_data={'santaclara_third': [
'static/css/*.css',
'static/css/images/*',
'static/fonts/*',
'static/js/ace/snippets/*.js',
'static/js/ace/*.js',
'static/js/*.js',
]},
include_package_data=True,
license='GNU General Public License v3 or later (GPLv3+)', # example license
description='A Django app for third part software',
long_description=README,
url='http://www.gianoziaorientale.org/software/',
author='Gianozia Orientale',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| bsd-3-clause | 3,694,934,636,837,836,000 | 35.55 | 103 | 0.604651 | false | 3.777778 | false | false | false |
Lektorium-LLC/edx-ora2 | openassessment/xblock/validation.py | 1 | 17104 | """
Validate changes to an XBlock before it is updated.
"""
from collections import Counter
from submissions.api import MAX_TOP_SUBMISSIONS
from openassessment.assessment.serializers import rubric_from_dict, InvalidRubric
from openassessment.assessment.api.student_training import validate_training_examples
from openassessment.xblock.resolve_dates import resolve_dates, DateValidationError, InvalidDateFormat
from openassessment.xblock.data_conversion import convert_training_examples_list_to_dict
def _match_by_order(items, others):
"""
Given two lists of dictionaries, each containing "order_num" keys,
return a set of tuples, where the items in the tuple are dictionaries
with the same "order_num" keys.
Args:
items (list of dict): Items to match, each of which must contain a "order_num" key.
others (list of dict): Items to match, each of which must contain a "order_num" key.
Returns:
list of tuples, each containing two dictionaries
Raises:
IndexError: A dictionary does no contain a 'order_num' key.
"""
# Sort each dictionary by its "name" key, then zip them and return
key_func = lambda x: x['order_num']
return zip(sorted(items, key=key_func), sorted(others, key=key_func))
def _duplicates(items):
"""
Given an iterable of items, return a set of duplicate items in the list.
Args:
items (list): The list of items, which may contain duplicates.
Returns:
set: The set of duplicate items in the list.
"""
counts = Counter(items)
return set(x for x in items if counts[x] > 1)
def _is_valid_assessment_sequence(assessments):
"""
Check whether the sequence of assessments is valid. The rules enforced are:
-must have one of staff-, peer-, self-, or example-based-assessment listed
-in addition to those, only student-training is a valid entry
-no duplicate entries
-if staff-assessment is present, it must come last
-if example-based-assessment is present, it must come first
-if student-training is present, it must be followed at some point by peer-assessment
Args:
assessments (list of dict): List of assessment dictionaries.
Returns:
bool
"""
sequence = [asmnt.get('name') for asmnt in assessments]
required = ['example-based-assessment', 'staff-assessment', 'peer-assessment', 'self-assessment']
optional = ['student-training']
# at least one of required?
if not any(name in required for name in sequence):
return False
# nothing except what appears in required or optional
if any(name not in required + optional for name in sequence):
return False
# no duplicates
if any(sequence.count(name) > 1 for name in sequence):
return False
# if using staff-assessment, it must come last
if 'staff-assessment' in sequence and 'staff-assessment' != sequence[-1]:
return False
# if using example-based, it must be first
if 'example-based-assessment' in sequence and 'example-based-assessment' != sequence[0]:
return False
# if using training, must be followed by peer at some point
if 'student-training' in sequence:
train_index = sequence.index('student-training')
if 'peer-assessment' not in sequence[train_index:]:
return False
return True
def validate_assessments(assessments, current_assessments, is_released, _):
"""
Check that the assessment dict is semantically valid. See _is_valid_assessment_sequence()
above for a description of valid assessment sequences. In addition, enforces validation
of several assessment-specific settings.
If a question has been released, the type and number of assessment steps
cannot be changed.
Args:
assessments (list of dict): list of serialized assessment models.
current_assessments (list of dict): list of the current serialized
assessment models. Used to determine if the assessment configuration
has changed since the question had been released.
is_released (boolean) : True if the question has been released.
_ (function): The service function used to get the appropriate i18n text
Returns:
tuple (is_valid, msg) where
is_valid is a boolean indicating whether the assessment is semantically valid
and msg describes any validation errors found.
"""
if len(assessments) == 0:
return False, _("This problem must include at least one assessment.")
# Ensure that we support this sequence of assessments.
if not _is_valid_assessment_sequence(assessments):
msg = _("The assessment order you selected is invalid.")
return False, msg
for assessment_dict in assessments:
# Number you need to grade is >= the number of people that need to grade you
if assessment_dict.get('name') == 'peer-assessment':
must_grade = assessment_dict.get('must_grade')
must_be_graded_by = assessment_dict.get('must_be_graded_by')
if must_grade is None or must_grade < 1:
return False, _('In peer assessment, the "Must Grade" value must be a positive integer.')
if must_be_graded_by is None or must_be_graded_by < 1:
return False, _('In peer assessment, the "Graded By" value must be a positive integer.')
if must_grade < must_be_graded_by:
return False, _(
'In peer assessment, the "Must Grade" value must be greater than or equal to the "Graded By" value.'
)
# Student Training must have at least one example, and all
# examples must have unique answers.
if assessment_dict.get('name') == 'student-training':
answers = []
examples = assessment_dict.get('examples')
if not examples:
return False, _('You must provide at least one example response for learner training.')
for example in examples:
if example.get('answer') in answers:
return False, _('Each example response for learner training must be unique.')
answers.append(example.get('answer'))
# Example-based assessment MUST specify 'ease' or 'fake' as the algorithm ID,
# at least for now. Later, we may make this more flexible.
if assessment_dict.get('name') == 'example-based-assessment':
if assessment_dict.get('algorithm_id') not in ['ease', 'fake']:
return False, _('The "algorithm_id" value must be set to "ease" or "fake"')
# Staff grading must be required if it is the only step
if assessment_dict.get('name') == 'staff-assessment' and len(assessments) == 1:
required = assessment_dict.get('required')
if not required: # Captures both None and explicit False cases, both are invalid
return False, _('The "required" value must be true if staff assessment is the only step.')
if is_released:
if len(assessments) != len(current_assessments):
return False, _("The number of assessments cannot be changed after the problem has been released.")
names = [assessment.get('name') for assessment in assessments]
current_names = [assessment.get('name') for assessment in current_assessments]
if names != current_names:
return False, _("The assessment type cannot be changed after the problem has been released.")
return True, u''
def validate_rubric(rubric_dict, current_rubric, is_released, is_example_based, _):
"""
Check that the rubric is semantically valid.
Args:
rubric_dict (dict): Serialized Rubric model representing the updated state of the rubric.
current_rubric (dict): Serialized Rubric model representing the current state of the rubric.
is_released (bool): True if and only if the problem has been released.
is_example_based (bool): True if and only if this is an example-based assessment.
_ (function): The service function used to get the appropriate i18n text
Returns:
tuple (is_valid, msg) where
is_valid is a boolean indicating whether the assessment is semantically valid
and msg describes any validation errors found.
"""
try:
rubric_from_dict(rubric_dict)
except InvalidRubric:
return False, _(u'This rubric definition is not valid.')
for criterion in rubric_dict['criteria']:
# No duplicate option names within a criterion
duplicates = _duplicates([option['name'] for option in criterion['options']])
if len(duplicates) > 0:
msg = _(u"Options in '{criterion}' have duplicate name(s): {duplicates}").format(
criterion=criterion['name'], duplicates=", ".join(duplicates)
)
return False, msg
# Some criteria may have no options, just written feedback.
# In this case, written feedback must be required (not optional or disabled).
if len(criterion['options']) == 0 and criterion.get('feedback', 'disabled') != 'required':
msg = _(u'Criteria with no options must require written feedback.')
return False, msg
# Example-based assessments impose the additional restriction
# that the point values for options must be unique within
# a particular rubric criterion.
if is_example_based:
duplicates = _duplicates([option['points'] for option in criterion['options']])
if len(duplicates) > 0:
msg = _(u"Example-based assessments cannot have duplicate point values.")
return False, msg
# After a problem is released, authors are allowed to change text,
# but nothing that would change the point value of a rubric.
if is_released:
# Number of prompts must be the same
if len(rubric_dict['prompts']) != len(current_rubric['prompts']):
return False, _(u'Prompts cannot be created or deleted after a problem is released.')
# Number of criteria must be the same
if len(rubric_dict['criteria']) != len(current_rubric['criteria']):
return False, _(u'The number of criteria cannot be changed after a problem is released.')
# Criteria names must be the same
# We use criteria names as unique identifiers (unfortunately)
# throughout the system. Changing them mid-flight can cause
# the grade page, for example, to raise 500 errors.
# When we implement non-XML authoring, we might be able to fix this
# the right way by assigning unique identifiers for criteria;
# but for now, this is the safest way to avoid breaking problems
# post-release.
current_criterion_names = set(criterion.get('name') for criterion in current_rubric['criteria'])
new_criterion_names = set(criterion.get('name') for criterion in rubric_dict['criteria'])
if current_criterion_names != new_criterion_names:
return False, _(u'Criteria names cannot be changed after a problem is released')
# Number of options for each criterion must be the same
for new_criterion, old_criterion in _match_by_order(rubric_dict['criteria'], current_rubric['criteria']):
if len(new_criterion['options']) != len(old_criterion['options']):
return False, _(u'The number of options cannot be changed after a problem is released.')
else:
for new_option, old_option in _match_by_order(new_criterion['options'], old_criterion['options']):
if new_option['points'] != old_option['points']:
return False, _(u'Point values cannot be changed after a problem is released.')
return True, u''
def validate_dates(start, end, date_ranges, _):
"""
Check that start and due dates are valid.
Args:
start (str): ISO-formatted date string indicating when the problem opens.
end (str): ISO-formatted date string indicating when the problem closes.
date_ranges (list of tuples): List of (start, end) pair for each submission / assessment.
_ (function): The service function used to get the appropriate i18n text
Returns:
tuple (is_valid, msg) where
is_valid is a boolean indicating whether the assessment is semantically valid
and msg describes any validation errors found.
"""
try:
resolve_dates(start, end, date_ranges, _)
except (DateValidationError, InvalidDateFormat) as ex:
return False, unicode(ex)
else:
return True, u''
def validate_assessment_examples(rubric_dict, assessments, _):
"""
Validate assessment training examples.
Args:
rubric_dict (dict): The serialized rubric model.
assessments (list of dict): List of assessment dictionaries.
_ (function): The service function used to get the appropriate i18n text
Returns:
tuple (is_valid, msg) where
is_valid is a boolean indicating whether the assessment is semantically valid
and msg describes any validation errors found.
"""
for asmnt in assessments:
if asmnt['name'] == 'student-training' or asmnt['name'] == 'example-based-assessment':
examples = convert_training_examples_list_to_dict(asmnt['examples'])
# Must have at least one training example
if len(examples) == 0:
return False, _(
u"Learner training and example-based assessments must have at least one training example."
)
# Delegate to the student training API to validate the
# examples against the rubric.
errors = validate_training_examples(rubric_dict, examples)
if errors:
return False, "; ".join(errors)
return True, u''
def validator(oa_block, _, strict_post_release=True):
"""
Return a validator function configured for the XBlock.
This will validate assessments, rubrics, and dates.
Args:
oa_block (OpenAssessmentBlock): The XBlock being updated.
_ (function): The service function used to get the appropriate i18n text
Keyword Arguments:
strict_post_release (bool): If true, restrict what authors can update once
a problem has been released.
Returns:
callable, of a form that can be passed to `update_from_xml`.
"""
def _inner(rubric_dict, assessments, leaderboard_show=0, submission_start=None, submission_due=None):
is_released = strict_post_release and oa_block.is_released()
# Assessments
current_assessments = oa_block.rubric_assessments
success, msg = validate_assessments(assessments, current_assessments, is_released, _)
if not success:
return False, msg
# Rubric
is_example_based = 'example-based-assessment' in [asmnt.get('name') for asmnt in assessments]
current_rubric = {
'prompts': oa_block.prompts,
'criteria': oa_block.rubric_criteria
}
success, msg = validate_rubric(rubric_dict, current_rubric, is_released, is_example_based, _)
if not success:
return False, msg
# Training examples
success, msg = validate_assessment_examples(rubric_dict, assessments, _)
if not success:
return False, msg
# Dates
submission_dates = [(submission_start, submission_due)]
assessment_dates = [(asmnt.get('start'), asmnt.get('due')) for asmnt in assessments]
success, msg = validate_dates(oa_block.start, oa_block.due, submission_dates + assessment_dates, _)
if not success:
return False, msg
# Leaderboard
if leaderboard_show < 0 or leaderboard_show > MAX_TOP_SUBMISSIONS:
return False, _("Leaderboard number is invalid.")
# Success!
return True, u''
return _inner
def validate_submission(submission, prompts, _, text_response='required'):
"""
Validate submission dict.
Args:
submission (list of unicode): Responses for the prompts.
prompts (list of dict): The prompts from the problem definition.
_ (function): The service function used to get the appropriate i18n text.
Returns:
tuple (is_valid, msg) where
is_valid is a boolean indicating whether the submission is semantically valid
and msg describes any validation errors found.
"""
message = _(u"The submission format is invalid.")
if type(submission) != list:
return False, message
if text_response == 'required' and len(submission) != len(prompts):
return False, message
for submission_part in submission:
if type(submission_part) != unicode:
return False, message
return True, u''
| agpl-3.0 | 1,735,284,554,581,837,600 | 40.921569 | 120 | 0.651485 | false | 4.441444 | false | false | false |
luisera/hmtk | hmtk/sources/area_source.py | 1 | 10138 | #!/usr/bin/env/python
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# LICENSE
#
# Copyright (c) 2010-2013, GEM Foundation, G. Weatherill, M. Pagani,
# D. Monelli.
#
# The Hazard Modeller's Toolkit is free software: you can redistribute
# it and/or modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>
#
# DISCLAIMER
#
# The software Hazard Modeller's Toolkit (hmtk) provided herein
# is released as a prototype implementation on behalf of
# scientists and engineers working within the GEM Foundation (Global
# Earthquake Model).
#
# It is distributed for the purpose of open collaboration and in the
# hope that it will be useful to the scientific, engineering, disaster
# risk and software design communities.
#
# The software is NOT distributed as part of GEM's OpenQuake suite
# (http://www.globalquakemodel.org/openquake) and must be considered as a
# separate entity. The software provided herein is designed and implemented
# by scientific staff. It is not developed to the design standards, nor
# subject to same level of critical review by professional software
# developers, as GEM's OpenQuake software suite.
#
# Feedback and contribution to the software is welcome, and can be
# directed to the hazard scientific staff of the GEM Model Facility
# ([email protected]).
#
# The Hazard Modeller's Toolkit (hmtk) is therefore distributed WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# The GEM Foundation, and the authors of the software, assume no
# liability for use of the software.
# -*- coding: utf-8 -*-
'''
Defines the :class hmtk.sources.mtk_area_source.mtkAreaSource which represents
the hmtk defition of an area source. This extends the :class:
nrml.models.AreaSource
'''
import warnings
import numpy as np
from openquake.nrmllib import models
from openquake.hazardlib.geo.point import Point
from openquake.hazardlib.geo.polygon import Polygon
from openquake.hazardlib.source.area import AreaSource
import hmtk.sources.source_conversion_utils as conv
class mtkAreaSource(object):
'''
Describes the Area Source
:param str identifier:
ID code for the source
:param str name:
Source name
:param str trt:
Tectonic region type
:param geometry:
Instance of :class: nhlib.geo.polygon.Polygon class
:param float upper_depth:
Upper seismogenic depth (km)
:param float lower_depth:
Lower seismogenic depth (km)
:param str mag_scale_rel:
Magnitude scaling relationsip
:param float rupt_aspect_ratio:
Rupture aspect ratio
:param mfd:
Magnitude frequency distribution as instance of
:class: nrml.models.IncrementalMFD or
:class: nrml.models.TGRMFD
:param list nodal_plane_dist:
List of :class: nrml.models.NodalPlane objects representing
nodal plane distribution
:param list hypo_depth_dist:
List of :class: nrml.models.HypocentralDepth instances describing
the hypocentral depth distribution
:param catalogue:
Earthquake catalogue associated to source as instance of
hmtk.seismicity.catalogue.Catalogue object
'''
def __init__(self, identifier, name, trt=None, geometry=None,
upper_depth=None, lower_depth=None, mag_scale_rel=None,
rupt_aspect_ratio=None, mfd=None, nodal_plane_dist=None,
hypo_depth_dist=None):
'''
Instantiates class with two essential attributes: identifier and name
'''
self.typology = 'Area'
self.id = identifier
self.name = name
self.trt = trt
self.geometry = geometry
self.upper_depth = upper_depth
self.lower_depth = lower_depth
self.mag_scale_rel = mag_scale_rel
self.rupt_aspect_ratio = rupt_aspect_ratio
self.mfd = mfd
self.nodal_plane_dist = nodal_plane_dist
self.hypo_depth_dist = hypo_depth_dist
# Check consistency of hypocentral depth inputs
self._check_seismogenic_depths(upper_depth, lower_depth)
self.catalogue = None
def create_geometry(self, input_geometry, upper_depth, lower_depth):
'''
If geometry is defined as a numpy array then create instance of
nhlib.geo.polygon.Polygon class, otherwise if already instance of class
accept class
:param input_geometry:
Input geometry (polygon) as either
i) instance of nhlib.geo.polygon.Polygon class
ii) numpy.ndarray [Longitude, Latitude]
:param float upper_depth:
Upper seismogenic depth (km)
:param float lower_depth:
Lower seismogenic depth (km)
'''
self._check_seismogenic_depths(upper_depth, lower_depth)
# Check/create the geometry class
if not isinstance(input_geometry, Polygon):
if not isinstance(input_geometry, np.ndarray):
raise ValueError('Unrecognised or unsupported geometry '
'definition')
if np.shape(input_geometry)[0] < 3:
raise ValueError('Incorrectly formatted polygon geometry -'
' needs three or more vertices')
geometry = []
for row in input_geometry:
geometry.append(Point(row[0], row[1], self.upper_depth))
self.geometry = Polygon(geometry)
else:
self.geometry = input_geometry
def _check_seismogenic_depths(self, upper_depth, lower_depth):
'''
Checks the seismic depths for physical consistency
:param float upper_depth:
Upper seismogenic depth (km)
:param float lower_depth:
Lower seismogenis depth (km)
'''
# Simple check on depths
if upper_depth:
if upper_depth < 0.:
raise ValueError('Upper seismogenic depth must be greater than'
' or equal to 0.0!')
else:
self.upper_depth = upper_depth
else:
self.upper_depth = 0.0
if lower_depth:
if lower_depth < self.upper_depth:
raise ValueError('Lower seismogenic depth must take a greater'
' value than upper seismogenic depth')
else:
self.lower_depth = lower_depth
else:
self.lower_depth = np.inf
def select_catalogue(self, selector, distance=None):
'''
Selects the catalogue of earthquakes attributable to the source
:param selector:
Populated instance of hmtk.seismicity.selector.CatalogueSelector
class
:param float distance:
Distance (in km) to extend or contract (if negative) the zone for
selecting events
'''
if selector.catalogue.get_number_events() < 1:
raise ValueError('No events found in catalogue!')
self.catalogue = selector.within_polygon(self.geometry,
distance,
upper_depth=self.upper_depth,
lower_depth=self.lower_depth)
if self.catalogue.get_number_events() < 5:
# Throw a warning regarding the small number of earthquakes in
# the source!
warnings.warn('Source %s (%s) has fewer than 5 events'
% (self.id, self.name))
def create_oqnrml_source(self, use_defaults=False):
'''
Converts the source model into an instance of the :class:
openquake.nrmllib.models.AreaSource
:param bool use_defaults:
If set to true, will use put in default values for magitude
scaling relation, rupture aspect ratio, nodal plane distribution
or hypocentral depth distribution where missing. If set to False
then value errors will be raised when information is missing.
'''
area_geometry = models.AreaGeometry(self.geometry.wkt,
self.upper_depth,
self.lower_depth)
return models.AreaSource(
self.id,
self.name,
self.trt,
area_geometry,
conv.render_mag_scale_rel(self.mag_scale_rel, use_defaults),
conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults),
conv.render_mfd(self.mfd),
conv.render_npd(self.nodal_plane_dist, use_defaults),
conv.render_hdd(self.hypo_depth_dist, use_defaults))
def create_oqhazardlib_source(self, tom, mesh_spacing, area_discretisation,
use_defaults=False):
"""
Converts the source model into an instance of the :class:
openquake.hazardlib.source.area.AreaSource
:param tom:
Temporal Occurrence model as instance of :class:
openquake.hazardlib.tom.TOM
:param float mesh_spacing:
Mesh spacing
"""
return AreaSource(
self.id,
self.name,
self.trt,
conv.mfd_to_hazardlib(self.mfd),
mesh_spacing,
conv.mag_scale_rel_to_hazardlib(self.mag_scale_rel, use_defaults),
conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults),
tom,
self.upper_depth,
self.lower_depth,
conv.npd_to_pmf(self.nodal_plane_dist, use_defaults),
conv.hdd_to_pmf(self.hypo_depth_dist, use_defaults),
self.geometry,
area_discretisation)
| agpl-3.0 | -2,997,554,901,713,249,300 | 37.992308 | 79 | 0.626455 | false | 4.0552 | false | false | false |
chuckinator0/Projects | scripts/houseRobber2.py | 1 | 3769 | '''
You are a professional robber planning to rob houses along a street. Each house has a certain amount of money stashed. All houses at this place are arranged in a circle. That means the first house is the neighbor of the last one. Meanwhile, adjacent houses have security system connected and it will automatically contact the police if two adjacent houses were broken into on the same night.
Given a list of non-negative integers representing the amount of money of each house, determine the maximum amount of money you can rob tonight without alerting the police.
Example 1:
Input: [2,3,2]
Output: 3
Explanation: You cannot rob house 1 (money = 2) and then rob house 3 (money = 2),
because they are adjacent houses.
Example 2:
Input: [1,2,3,1]
Output: 4
Explanation: Rob house 1 (money = 1) and then rob house 3 (money = 3).
Total amount you can rob = 1 + 3 = 4.
'''
# We build on the solution to the original house robber case:
def rob(houses):
'''
This function outputs the maximum amount of money we can get from robbing a list
of houses whose values are the money. To do this, we will find the max amount of money
to rob sublists houses[0:1], houses[0:2], ..., houses[0:k+1],..., houses[0:len(houses)]
Let f(k) be the maximum money for robbing houses[0:k+1]. In other words,
f(k) := rob(houses[0:k]). Notice that we have this relationship:
f(k) == max( houses[k] + f(k-2), f(k-1) )
This relationship holds because the maximum money for robbing
houses 0 through k is either the maximum money for robbing houses
0 through k-2 plus robbing house k (remember, the houses can't be adjacent),
or, if house k isn't that much money and house k-1 is, we might get the maximum money from robbing
houses 0 through k-1 (which puts house k off limits due to the no-adjacency rule).
Notice that to compute f(k), we only need the values f(k-2) and f(k-1), much like fibonacci.
So our memo will consist of two variables that keep track of these values and update as k goes from
0 to len(houses)-1.
'''
# Let's handle the case where the house list is empty
if not houses:
return 0
# Let's handle the cases where the houses list is only one house
if len(houses) == 1:
return houses[0]
# Let's handle the cases where the houses list is only two houses
if len(houses) == 2:
return max(houses[0],houses[1])
# initialize f(k-2) to f(0), where our sublist of houses is just the value of the first house.
fk_minus2 = houses[0]
# initialize f(k-1) to f(1), which is the max money for houses[0:3], the first two houses.
# We just take the max of these two house values.
fk_minus1 = max(houses[0], houses[1])
# now we march through the list:houses from position 2 onward, updating f(k-2), f(k-1)
# along the way
for house in houses[2:]:
# The max value we can get robbing houses up to and including this current house is
# either this house plus the max value up to 2 houses ago, or the max value up to the last house
fk = max( house + fk_minus2, fk_minus1)
# increment k
fk_minus2, fk_minus1 = fk_minus1, fk
# At this point, k has reached the end of the list, and so fk represents the maximum money from robbing
# the entire list of houses, which is the return value of our rob function.
return fk
# Now we make a new function
def rob_two(houses):
# since the first and last houses are adjacent,
# we look at how much we can get from robbing all the houses but the first
# and then compare that to what we can get from robbing all the houses but the last.
# This works because we can't rob the first and last house, since they are adjacent.
return max(rob(houses[1:]), rob(houses[:-1]))
l1 = [2,3,2]
l2 = [1,2,3,1]
l3 = [5,2,1,4]
l4 = [5,1,1,3,2,6]
print(rob(l4))
print(rob_two(l4))
| gpl-3.0 | -690,878,699,913,978,600 | 40.417582 | 391 | 0.714248 | false | 2.974743 | false | false | false |
SolusOS-discontinued/pisi | pisi/package.py | 1 | 9936 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2010, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
"""package abstraction methods to add/remove files, extract control files"""
import os.path
import gettext
__trans = gettext.translation('pisi', fallback=True)
_ = __trans.ugettext
import pisi
import pisi.context as ctx
import pisi.archive as archive
import pisi.uri
import pisi.metadata
import pisi.file
import pisi.files
import pisi.util as util
import fetcher
class Error(pisi.Error):
pass
class Package:
"""PiSi Package Class provides access to a pisi package (.pisi
file)."""
formats = ("1.0", "1.1", "1.2")
default_format = "1.2"
@staticmethod
def archive_name_and_format(package_format):
if package_format == "1.2":
archive_format = "tarxz"
archive_suffix = ctx.const.xz_suffix
elif package_format == "1.1":
archive_format = "tarlzma"
archive_suffix = ctx.const.lzma_suffix
else:
# "1.0" format does not have an archive
return (None, None)
archive_name = ctx.const.install_tar + archive_suffix
return archive_name, archive_format
def __init__(self, packagefn, mode='r', format=None, tmp_dir=None):
self.filepath = packagefn
url = pisi.uri.URI(packagefn)
if url.is_remote_file():
self.fetch_remote_file(url)
try:
self.impl = archive.ArchiveZip(self.filepath, 'zip', mode)
except IOError, e:
raise Error(_("Cannot open package file: %s") % e)
self.install_archive = None
if mode == "r":
self.metadata = self.get_metadata()
format = self.metadata.package.packageFormat
# Many of the old packages do not contain format information
# because of a bug in old Pisi versions. This is a workaround
# to guess their package format.
if format is None:
archive_name = ctx.const.install_tar + ctx.const.lzma_suffix
if self.impl.has_file(archive_name):
format = "1.1"
else:
format = "1.0"
self.format = format or Package.default_format
if self.format not in Package.formats:
raise Error(_("Unsupported package format: %s") % format)
self.tmp_dir = tmp_dir or ctx.config.tmp_dir()
def fetch_remote_file(self, url):
dest = ctx.config.cached_packages_dir()
self.filepath = os.path.join(dest, url.filename())
if not os.path.exists(self.filepath):
try:
pisi.file.File.download(url, dest)
except pisi.fetcher.FetchError:
# Bug 3465
if ctx.get_option('reinstall'):
raise Error(_("There was a problem while fetching '%s'.\nThe package "
"may have been upgraded. Please try to upgrade the package.") % url);
raise
else:
ctx.ui.info(_('%s [cached]') % url.filename())
def add_to_package(self, fn, an=None):
"""Add a file or directory to package"""
self.impl.add_to_archive(fn, an)
def add_to_install(self, name, arcname=None):
"""Add the file 'name' to the install archive"""
if arcname is None:
arcname = name
if self.format == "1.0":
arcname = util.join_path("install", arcname)
self.add_to_package(name, arcname)
return
if self.install_archive is None:
archive_name, archive_format = \
self.archive_name_and_format(self.format)
self.install_archive_path = util.join_path(self.tmp_dir,
archive_name)
ctx.build_leftover = self.install_archive_path
self.install_archive = archive.ArchiveTar(
self.install_archive_path,
archive_format)
self.install_archive.add_to_archive(name, arcname)
def add_metadata_xml(self, path):
self.metadata = pisi.metadata.MetaData()
self.metadata.read(path)
self.add_to_package(path, ctx.const.metadata_xml)
def add_files_xml(self, path):
self.files = pisi.files.Files()
self.files.read(path)
self.add_to_package(path, ctx.const.files_xml)
def close(self):
"""Close the package archive"""
if self.install_archive:
self.install_archive.close()
arcpath = self.install_archive_path
arcname = os.path.basename(arcpath)
self.add_to_package(arcpath, arcname)
self.impl.close()
if self.install_archive:
os.unlink(self.install_archive_path)
ctx.build_leftover = None
def get_install_archive(self):
archive_name, archive_format = \
self.archive_name_and_format(self.format)
if archive_name is None or not self.impl.has_file(archive_name):
return
archive_file = self.impl.open(archive_name)
tar = archive.ArchiveTar(fileobj=archive_file,
arch_type=archive_format,
no_same_permissions=False,
no_same_owner=False)
return tar
def extract(self, outdir):
"""Extract entire package contents to directory"""
self.extract_dir('', outdir) # means package root
def extract_files(self, paths, outdir):
"""Extract paths to outdir"""
self.impl.unpack_files(paths, outdir)
def extract_file(self, path, outdir):
"""Extract file with path to outdir"""
self.extract_files([path], outdir)
def extract_file_synced(self, path, outdir):
"""Extract file with path to outdir"""
data = self.impl.read_file(path)
fpath = util.join_path(outdir, path)
util.ensure_dirs(os.path.dirname(fpath))
with open(fpath, "wb") as f:
f.write(data)
f.flush()
os.fsync(f.fileno())
def extract_dir(self, dir, outdir):
"""Extract directory recursively, this function
copies the directory archiveroot/dir to outdir"""
self.impl.unpack_dir(dir, outdir)
def extract_install(self, outdir):
def callback(tarinfo, extracted):
if not extracted:
# Installing packages (especially shared libraries) is a
# bit tricky. You should also change the inode if you
# change the file, cause the file is opened allready and
# accessed. Removing and creating the file will also
# change the inode and will do the trick (in fact, old
# file will be deleted only when its closed).
#
# Also, tar.extract() doesn't write on symlinks... Not any
# more :).
if os.path.isfile(tarinfo.name) or os.path.islink(tarinfo.name):
try:
os.unlink(tarinfo.name)
except OSError, e:
ctx.ui.warning(e)
else:
# Added for package-manager
if tarinfo.name.endswith(".desktop"):
ctx.ui.notify(pisi.ui.desktopfile, desktopfile=tarinfo.name)
tar = self.get_install_archive()
if tar:
tar.unpack_dir(outdir, callback=callback)
else:
self.extract_dir_flat('install', outdir)
def extract_dir_flat(self, dir, outdir):
"""Extract directory recursively, this function
unpacks the *contents* of directory archiveroot/dir inside outdir
this is the function used by the installer"""
self.impl.unpack_dir_flat(dir, outdir)
def extract_to(self, outdir, clean_dir = False):
"""Extracts contents of the archive to outdir. Before extracting if clean_dir
is set, outdir is deleted with its contents"""
self.impl.unpack(outdir, clean_dir)
def extract_pisi_files(self, outdir):
"""Extract PiSi control files: metadata.xml, files.xml,
action scripts, etc."""
self.extract_files([ctx.const.metadata_xml, ctx.const.files_xml], outdir)
self.extract_dir('config', outdir)
def get_metadata(self):
"""reads metadata.xml from the PiSi package and returns MetaData object"""
md = pisi.metadata.MetaData()
md.parse(self.impl.read_file(ctx.const.metadata_xml))
return md
def get_files(self):
"""reads files.xml from the PiSi package and returns Files object"""
files = pisi.files.Files()
files.parse(self.impl.read_file(ctx.const.files_xml))
return files
def read(self):
self.files = self.get_files()
self.metadata = self.get_metadata()
def pkg_dir(self):
packageDir = self.metadata.package.name + '-' \
+ self.metadata.package.version + '-' \
+ self.metadata.package.release
return os.path.join(ctx.config.packages_dir(), packageDir)
def comar_dir(self):
return os.path.join(self.pkg_dir(), ctx.const.comar_dir)
@staticmethod
def is_cached(packagefn):
url = pisi.uri.URI(packagefn)
filepath = packagefn
if url.is_remote_file():
filepath = os.path.join(ctx.config.cached_packages_dir(), url.filename())
return os.path.exists(filepath) and filepath
else:
return filepath
| gpl-2.0 | 1,664,222,144,843,548,200 | 33.620209 | 90 | 0.58122 | false | 4.09732 | false | false | false |
mesocentrefc/Janua-SMS | janua/actions/set_auth_backend.py | 1 | 2374 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
#
# Copyright (c) 2016 Cédric Clerget - HPC Center of Franche-Comté University
#
# This file is part of Janua-SMS
#
# http://github.com/mesocentrefc/Janua-SMS
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import json
from janua.actions.action import Action
from janua.actions.action import argument
from janua.ws import json_error, json_success
from janua.ws.services import urlconfig
from janua.ws.auth import auth_manager, AuthConfigError
class SetAuthBackend(Action):
"""
Set authentication backend configuration
Sample request to set configuration for LDAP backend:
.. code-block:: javascript
POST /set_auth_backend/ldap HTTP/1.1
Host: janua.mydomain.com
Content-Type: application/json
JanuaAuthToken: abcdef123456789
{
"parameters":
{
"ldap_uri": "ldap://ldap.example.fr:389",
"ldap_bind_dn": "uid=${login},ou=people,dc=example,dc=fr",
"ldap_tls_support": false
},
"success": true
}
Sample response:
.. code-block:: javascript
HTTP/1.1 200
{
"message": "Configuration has been saved",
"success": true
}
"""
category = '__INTERNAL__'
@urlconfig('/set_auth_backend/<backend>', role=['admin'])
def web(self, backend):
auth_backend = auth_manager.get(backend)
if auth_backend:
try:
auth_backend.update_config(self.parameters())
except AuthConfigError, err:
return json_error('Failed to update configuration: %s' % err)
return json_success('Configuration has been saved')
@argument(required=True)
def parameters(self):
"""Backend config parameters"""
return str()
| gpl-2.0 | -6,885,421,660,702,887,000 | 28.283951 | 77 | 0.6543 | false | 3.888525 | true | false | false |
annashcherbina/sherlockstoolkit | python_analysis_modules/quality_check/qcSamples.py | 1 | 7970 | #This module will draw an S-curve for a set of selected profiles.
import MySQLdb as mdb
import os
import sys
import Params
from Outputs import *
def recordAttachmentParameterMappings(attachment,cur):
attachments=[attachment]
parameter_ids=[Params.min_calls_id]
print str(parameter_ids)
for attachment_id in attachments:
for param_id in parameter_ids:
query="insert into attachments_parameters (attachment_id,parameter_id) VALUES("+str(attachment_id)+","+str(param_id)+");"
cur.execute(query)
def getParameters(cur,user_id,parameter_group):
#get minimum number of calls to accept an allele
query="Select id,value from parameters where user_id="+str(user_id)+" and group_name='"+str(parameter_group)+"' and name like '%Minimum Reads per Locus%' order by id DESC limit 1;"
cur.execute(query)
values=cur.fetchone()
if values==None:
#use the default
query="select id,value from parameters where user_id=1 and name like '%Minimum Reads per Locus%';"
cur.execute(query)
values=cur.fetchone()
Params.min_calls_id=values[0]
Params.min_calls=values[1]
def usage():
print "Usage: python qcSamples.py -h <host ip> -db <database> -u <database user> -p <database password> -id_user <id of user running the analysis> -id_folder <id of folder where analysis results are to be stored> -samples [samples to analyze]"
def check_if_exists(cur,samples,locus_group):
samples.sort()
existing=[]
param_hash=str(Params.min_calls_id) +str(locus_group)
qc_name=param_hash+"qc"
for sample in samples:
qc_name=qc_name+"_"+sample
qc_name=hash(qc_name)
query="select id from images where internal_hash ="+str(qc_name)+";"
print str(query)
cur.execute(query)
id_qc=cur.fetchone()
if id_qc==None:
return []
else:
existing.append(id_qc[0])
query="select id from attachments where internal_hash ="+str(qc_name)+";"
print str(query)
cur.execute(query)
id_qc=cur.fetchone()
if id_qc==None:
return []
else:
existing.append(id_qc[0])
return existing
def parseInputs():
host=None
db=None
user=None
password=None
samples=[]
user_id=None
folder_id=None
parameter_group=None
quality=None
locus_group=None
sys.argv=[i.replace('\xe2\x80\x93','-') for i in sys.argv]
for i in range(1,len(sys.argv)):
entry=sys.argv[i]
print "entry:"+str(entry)
if entry=='-h':
host=sys.argv[i+1]
elif entry=='-db':
db=sys.argv[i+1]
elif entry=='-u':
user=sys.argv[i+1]
elif entry=='-p':
password=sys.argv[i+1]
elif entry.__contains__('-samples'):
for j in range(i+1,len(sys.argv)):
if not sys.argv[j].startswith('-'):
samples.append(sys.argv[j])
else:
break;
elif entry.__contains__('-id_user'):
user_id=sys.argv[i+1]
#print "set user id: "+str(user_id)
elif entry.__contains__('-id_folder'):
folder_id=sys.argv[i+1]
elif entry.__contains__('-parameter_group'):
parameter_group=sys.argv[i+1]
elif entry.__contains__('-quality'):
quality=int(sys.argv[i+1])
elif entry.__contains__('-locus_group'):
locus_group=int(sys.argv[i+1])
#make sure all the necessary arguments have been supplied
if host==None or db==None or user==None or password==None or len(samples)==0 or user_id==None or folder_id==None or locus_group==None or quality==None:
print "-1 incorrect parameters passed to function"
if host==None:
print " missing -host"
if db==None:
print " missing -db"
if user==None:
print " missing -u (database user)"
if password==None:
print " missing -p (database password)"
if replicates==None:
print " missing -samples (samples)"
if user_id==None:
print " missing -id_user"
if folder_id==None:
print " missing -id_folder"
if locus_group==None:
print " missing -locus_group"
if quality==None:
print "missing -quality"
#usage()
sys.exit(0)
else:
return host,db,user,password,samples,user_id,folder_id,parameter_group,locus_group,quality
def main():
host,db,user,password, samples, user_id,folder_id,parameter_group,locus_group,quality=parseInputs()
con,cur=connect(host,user,password,db);
getParameters(cur,user_id,parameter_group)
existing_id=check_if_exists(cur,samples,locus_group)
if existing_id!=[]:
print "Image already exists!"
print str(existing_id[0])+ " " + str(existing_id[1])
sys.exit(0)
#if a locus group is specified, limit the analysis only to snps within the locus group
locus_group_name="Unspecified"
locus_group_snps=[]
if locus_group!=0:
query="select locus_id from loci_loci_groups where loci_group_id="+str(locus_group)+";"
cur.execute(query)
locus_group_snps=[i[0] for i in cur.fetchall()]
query="select name from loci_groups where id="+str(locus_group)+";"
cur.execute(query)
locus_group_name=cur.fetchone()[0]
sample_to_ma=dict()
snp_to_freq=dict()
for i in range(len(samples)):
sample=samples[i]
sample_to_ma[sample]=[]
query="select minor_allele_frequency,total_count,forward_count,locus_id from calls where sample_id="+str(sample)+";"
cur.execute(query)
ma_data=cur.fetchall()
for entry in ma_data:
maf=entry[0]
counts=entry[1]
forward_counts=entry[2]
locus_id=entry[3]
if locus_id not in snp_to_freq:
snp_to_freq[locus_id]=dict()
snp_to_freq[locus_id][sample]=[maf,counts,forward_counts]
if (locus_group !=0) and locus_id not in locus_group_snps:
continue
if forward_counts <Params.min_calls:
maf=-.1*(i+1)
if (counts - forward_counts)< Params.min_calls:
maf=-.1*(i+1)
sample_to_ma[sample].append(maf)
for sample in sample_to_ma:
sample_to_ma[sample].sort()
image_id=generate_image(sample_to_ma,cur,user_id,folder_id,quality,locus_group_name,locus_group)
attachment_id=generate_attachment(sample_to_ma,cur,user_id,folder_id,parameter_group,quality,locus_group_name,locus_group_snps,locus_group,snp_to_freq)
query="update images set associated_attachment_id="+str(attachment_id)+" where id="+str(image_id)+";"
cur.execute(query)
recordAttachmentParameterMappings(attachment_id,cur)
disconnect(con,cur)
print str(image_id) + " " + str(attachment_id)
#Connect to the database
def connect(host,connecting_user,password,dbName):
try:
con = mdb.connect(host,connecting_user,password,dbName)
cur = con.cursor()
con.begin()
#Execute a test query to make sure the database connection has been successful.
return con,cur
except mdb.Error,e:
error_message = e.__str__();
print error_message
sys.exit(0)
#close connection to the database
def disconnect(con,cur):
try:
con.commit();
cur.close();
con.close();
except mdb.Error,e:
error_message=e.__str__();
print error_message
sys.exit(0)
if __name__=='__main__':
main()
| gpl-3.0 | 6,908,922,549,713,577,000 | 35.559633 | 247 | 0.580678 | false | 3.624375 | false | false | false |
LeMinaw/minaw.net | dynimg/models.py | 1 | 1599 | #-*- coding: utf-8 -*-
from django.urls import reverse
from django.db import models
from datetime import timedelta
class BaseModel(models.Model):
created = models.DateField (auto_now_add=True, verbose_name="création" )
last_used = models.DateField (auto_now=True, blank=True, verbose_name="dernière utilisation" )
times_used = models.IntegerField ( default=0, verbose_name="nombre d'utilisations")
class Meta:
abstract = True
class ImageUrl(BaseModel):
dwnlTime = models.DurationField(default=timedelta(0), verbose_name="temps de téléchargement")
url = models.URLField (max_length=128, unique=True, verbose_name="URL" )
def __str__(self):
return self.url
class Meta:
verbose_name = "URL d'image"
verbose_name_plural = "URLs d'images"
class DynamicImg(BaseModel):
name = models.CharField (max_length=32, blank=True, verbose_name="nom")
urls = models.ManyToManyField(ImageUrl, verbose_name="URLs")
shadowMode = models.BooleanField (default=False, verbose_name="mode discret")
def __str__(self):
return str(self.id)
def get_absolute_url(self):
return reverse('dynimg:getimg', kwargs={'id_img': self.id})
def get_urls_nb(self):
return self.urls.count()
get_urls_nb.short_description = "Nombre d'URLs"
class Meta:
verbose_name = "image dynamique"
verbose_name_plural = "images dynamiques"
| gpl-3.0 | -952,888,346,687,520,100 | 33.673913 | 106 | 0.608777 | false | 3.649886 | false | false | false |
matthewalbani/scipy | scipy/io/matlab/tests/test_pathological.py | 24 | 1064 | """ Test reading of files not conforming to matlab specification
We try and read any file that matlab reads, these files included
"""
from __future__ import division, print_function, absolute_import
from os.path import dirname, join as pjoin
from numpy.testing import assert_, assert_raises
from scipy.io.matlab.mio import loadmat
TEST_DATA_PATH = pjoin(dirname(__file__), 'data')
def test_multiple_fieldnames():
# Example provided by Dharhas Pothina
# Extracted using mio5.varmats_from_mat
multi_fname = pjoin(TEST_DATA_PATH, 'nasty_duplicate_fieldnames.mat')
vars = loadmat(multi_fname)
funny_names = vars['Summary'].dtype.names
assert_(set(['_1_Station_Q', '_2_Station_Q',
'_3_Station_Q']).issubset(funny_names))
def test_malformed1():
# Example from gh-6072
# Contains malformed header data, which previously resulted into a
# buffer overflow.
#
# Should raise an exception, not segfault
fname = pjoin(TEST_DATA_PATH, 'malformed1.mat')
assert_raises(ValueError, loadmat, fname)
| bsd-3-clause | 5,226,861,288,698,345,000 | 31.242424 | 73 | 0.702068 | false | 3.546667 | true | false | false |
swordsmaster/waf | rtems_waf/debug.py | 2 | 2222 | from hashlib import sha256
from os.path import exists
from json import JSONEncoder
from time import time
import logging
from logging.handlers import MemoryHandler
from waflib.Task import Task
from waflib.Utils import subprocess, check_dir
from waflib.Logs import debug
from os.path import dirname
#from cStringIO import StringIO
#from waflib import Utils
def logger_json_create(ctx):
logger = logging.getLogger('build.json')
logger.setLevel(logging.INFO)
if ctx.variant == "host":
file = "%s/logs/host.json" % ctx.out_dir
else:
file = "%s/logs/%s.json" % (ctx.out_dir, ctx.variant)
check_dir(dirname(file))
filetarget = logging.FileHandler(file, mode="w")
memoryhandler = MemoryHandler(1048576, target=filetarget)
logger.addHandler(memoryhandler)
return logger
def hash_files(files):
h = []
for file in files:
if exists(file):
fp = open(file, "r")
h.append((file, sha256(fp.read()).hexdigest()))
fp.close()
return h
def exec_command_json(self, cmd, **kw):
# subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)
debug('runner_env: kw=%s' % kw)
try:
record = {}
record["time"] = time()
record["command"] = cmd
recoard["variant"] = ctx.variant
task_self = kw["json_task_self"]
record["type"] = task_self.__class__.__name__
del kw["json_task_self"]
record["inputs"] = [x.srcpath() for x in task_self.inputs]
record["outputs"] = [x.srcpath() for x in task_self.outputs]
record["cflags"] = self.env.CFLAGS
record["cc"] = self.env.CC
kw['stdout'] = kw['stderr'] = subprocess.PIPE
time_start = time()
p = subprocess.Popen(cmd, **kw)
(stdout, stderr) = p.communicate()
record["time_duration"] = time() - time_start
if stdout:
record["stdout"] = stdout
if stderr:
record["stderr"] = stderr
record["hash"] = {}
record["hash"]["inputs"] = hash_files(record["inputs"])
record["hash"]["outputs"] = hash_files(record["outputs"])
record["retval"] = p.returncode
data = JSONEncoder(sort_keys=False, indent=False).encode(record)
self.logger_json.info(data)
return p.returncode
except OSError:
return -1
def exec_command_json_extra(self, cmd, **kw):
kw["json_task_self"] = self
self.exec_command_real(cmd, **kw)
| gpl-2.0 | 5,777,441,172,275,156,000 | 22.389474 | 66 | 0.680918 | false | 3.060606 | false | false | false |
jtpaasch/armyguys | armyguys/jobs/instanceprofiles.py | 1 | 8479 | # -*- coding: utf-8 -*-
"""Jobs for IAM roles."""
import os
from ..aws.iam import instanceprofile
from .exceptions import FileDoesNotExist
from .exceptions import ImproperlyConfigured
from .exceptions import MissingKey
from .exceptions import ResourceAlreadyExists
from .exceptions import ResourceDoesNotExist
from .exceptions import ResourceNotCreated
from .exceptions import ResourceNotDeleted
from .exceptions import WaitTimedOut
from . import roles as role_jobs
from . import utils
def get_display_name(record):
"""Get the display name for a record.
Args:
record
A record returned by AWS.
Returns:
A display name for the instance profile.
"""
return record["InstanceProfileName"]
def fetch_all(profile):
"""Fetch all instance profiles.
Args:
profile
A profile to connect to AWS with.
Returns:
A list of instance profiles.
"""
params = {}
params["profile"] = profile
response = utils.do_request(instanceprofile, "get", params)
data = utils.get_data("InstanceProfiles", response)
return data
def fetch_by_name(profile, name):
"""Fetch an instance profile by name.
Args:
profile
A profile to connect to AWS with.
name
The name of the instance profile you want to fetch.
Returns:
A list of instance profiles with the provided name.
"""
params = {}
params["profile"] = profile
response = utils.do_request(instanceprofile, "get", params)
data = utils.get_data("InstanceProfiles", response)
result = [x for x in data if x["InstanceProfileName"] == name]
return result
def exists(profile, name):
"""Check if an instance profile exists.
Args:
profile
A profile to connect to AWS with.
name
The name of an instance profile.
Returns:
True if it exists, False if it doesn't.
"""
result = fetch_by_name(profile, name)
return len(result) > 0
def polling_fetch(profile, name, max_attempts=10, wait_interval=1):
"""Try to fetch an instance profile repeatedly until it exists.
Args:
profile
A profile to connect to AWS with.
name
The name of an instance profile.
max_attempts
The max number of times to poll AWS.
wait_interval
How many seconds to wait between each poll.
Returns:
The instance profile's data, or None if it times out.
"""
data = None
count = 0
while count < max_attempts:
data = fetch_by_name(profile, name)
if data:
break
else:
count += 1
sleep(wait_interval)
if not data:
msg = "Timed out waiting for instance profile to be created."
raise WaitTimedOut(msg)
return data
def create(profile, name):
"""Create an instance profile.
Args:
profile
A profile to connect to AWS with.
name
The name you want to give to the instance profile.
Returns:
Info about the newly created instance profile.
"""
# Make sure it doesn't exist already.
if exists(profile, name):
msg = "Instance profile '" + str(name) + "' already exists."
raise ResourceAlreadyExists(msg)
# Now we can create it.
params = {}
params["profile"] = profile
params["name"] = name
response = utils.do_request(instanceprofile, "create", params)
# Check that it exists.
instance_profile_data = polling_fetch(profile, name)
if not instance_profile_data:
msg = "Instance profile '" + str(name) + "' not created."
raise ResourceNotCreated(msg)
# Send back the instance profile's info.
return instance_profile_data
def delete(profile, name):
"""Delete an IAM instance profile.
Args:
profile
A profile to connect to AWS with.
name
The name of the instance profile you want to delete.
"""
# Make sure the instance profile exists.
if not exists(profile, name):
msg = "No instance profile '" + str(name) + "'."
raise ResourceDoesNotExist(msg)
# Now try to delete it.
params = {}
params["profile"] = profile
params["name"] = name
response = utils.do_request(instanceprofile, "delete", params)
# Check that it was, in fact, deleted.
if exists(profile, name):
msg = "The instance profile '" + str(name) + "' was not deleted."
raise ResourceNotDeleted(msg)
def attach(profile, instance_profile, role):
"""Attach an IAM role to an instance profile.
Args:
profile
A profile to connect to AWS with.
instance_profile
The name of an instance profile.
role
The name of a role.
Returns:
The data returned by boto3.
"""
# Make sure the instance profile exists.
if not exists(profile, instance_profile):
msg = "No instance profile '" + str(instance_profile) + "'."
raise ResourceDoesNotExist(msg)
# Make sure the role exists.
if not role_jobs.exists(profile, role):
msg = "No role '" + str(role) + "'."
raise ResourceDoesNotExist(msg)
# Attach the role to the instance profile.
params = {}
params["profile"] = profile
params["instance_profile"] = instance_profile
params["role"] = role
return utils.do_request(instanceprofile, "add_role", params)
def detach(profile, instance_profile, role):
"""Detach an IAM role from an instance profile.
Args:
profile
A profile to connect to AWS with.
instance profile
The name of an instance profile.
role
The name of a role.
Returns:
The data returned by boto3.
"""
# Make sure the instance profile exists.
if not exists(profile, instance_profile):
msg = "No instance profile '" + str(instance_profile) + "'."
raise ResourceDoesNotExist(msg)
# Make sure the role exists.
if not role_jobs.exists(profile, role):
msg = "No role '" + str(role) + "'."
raise ResourceDoesNotExist(msg)
# Detach the role
params = {}
params["profile"] = profile
params["instance_profile"] = instance_profile
params["role"] = role
return utils.do_request(instanceprofile, "remove_role", params)
def is_attached(profile, instance_profile, role):
"""Check if an IAM role is attached to an instance profile.
Args:
profile
A profile to connect to AWS with.
instance_profile
The name of an instance profile.
role
The name of a role.
Returns:
True if it's attached, False if it's not.
"""
# Make sure the instance profile exists.
instance_profile_data = fetch_by_name(profile, instance_profile)
if not instance_profile_data:
msg = "No instance profile '" + str(instance_profile) + "'."
raise ResourceDoesNotExist(msg)
# Make sure the role exists.
if not role_jobs.exists(profile, role):
msg = "No role '" + str(role) + "'."
raise ResourceDoesNotExist(msg)
# Check if the role is attached.
roles = utils.get_data("Roles", instance_profile_data[0])
matching_roles = [x for x in roles if x["RoleName"] == role]
return len(matching_roles) > 0
def is_detached(profile, instance_profile, role):
"""Check if an IAM role is detached from an instance profile.
Args:
profile
A profile to connect to AWS with.
instance_profile
The name of an instance profile.
role
The name of a role.
Returns:
True if it's detached, False if it's not.
"""
# Make sure the instance profile exists.
instance_profile_data = fetch_by_name(profile, instance_profile)
if not instance_profile_data:
msg = "No instance profile '" + str(instance_profile) + "'."
raise ResourceDoesNotExist(msg)
# Make sure the role exists.
if not role_jobs.exists(profile, role):
msg = "No role '" + str(role) + "'."
raise ResourceDoesNotExist(msg)
# Check if the role is detached.
roles = utils.get_data("Roles", instance_profile_data[0])
matching_roles = [x for x in roles if x["RoleName"] == role]
return len(matching_roles) == 0
| mit | -5,456,793,122,395,470,000 | 24.011799 | 73 | 0.617172 | false | 4.31721 | false | false | false |
Azure/azure-sdk-for-python | sdk/search/azure-search-documents/samples/async_samples/sample_semantic_search_async.py | 1 | 2503 | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_semantic_search_async.py
DESCRIPTION:
This sample demonstrates how to use semantic search.
USAGE:
python sample_semantic_search_async.py
Set the environment variables with your own values before running the sample:
1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service
2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index")
3) AZURE_SEARCH_API_KEY - your search API key
"""
import os
import asyncio
async def speller():
# [START speller_async]
from azure.core.credentials import AzureKeyCredential
from azure.search.documents.aio import SearchClient
endpoint = os.getenv("AZURE_SEARCH_SERVICE_ENDPOINT")
index_name = os.getenv("AZURE_SEARCH_INDEX_NAME")
api_key = os.getenv("AZURE_SEARCH_API_KEY")
credential = AzureKeyCredential(api_key)
client = SearchClient(endpoint=endpoint,
index_name=index_name,
credential=credential)
results = await client.search(search_text="luxucy", query_language="en-us", speller="lexicon")
async for result in results:
print("{}\n{}\n)".format(result["HotelId"], result["HotelName"]))
# [END speller_async]
async def semantic_ranking():
# [START semantic_ranking_async]
from azure.core.credentials import AzureKeyCredential
from azure.search.documents import SearchClient
endpoint = os.getenv("AZURE_SEARCH_SERVICE_ENDPOINT")
index_name = os.getenv("AZURE_SEARCH_INDEX_NAME")
api_key = os.getenv("AZURE_SEARCH_API_KEY")
credential = AzureKeyCredential(api_key)
client = SearchClient(endpoint=endpoint,
index_name=index_name,
credential=credential)
results = list(client.search(search_text="luxury", query_type="semantic", query_language="en-us"))
for result in results:
print("{}\n{}\n)".format(result["HotelId"], result["HotelName"]))
# [END semantic_ranking_async]
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(speller())
loop.run_until_complete(semantic_ranking())
| mit | -5,546,289,094,553,076,000 | 36.358209 | 102 | 0.64203 | false | 3.917058 | false | false | false |
mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/config/network/linkset.py | 1 | 6262 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class linkset(base_resource) :
""" Configuration for link set resource. """
def __init__(self) :
self._id = ""
self._ifnum = ""
self.___count = 0
@property
def id(self) :
"""Unique identifier for the linkset. Must be of the form LS/x, where x can be an integer from 1 to 32.
"""
try :
return self._id
except Exception as e:
raise e
@id.setter
def id(self, id) :
"""Unique identifier for the linkset. Must be of the form LS/x, where x can be an integer from 1 to 32.
"""
try :
self._id = id
except Exception as e:
raise e
@property
def ifnum(self) :
"""The interfaces to be bound to the linkset.
"""
try :
return self._ifnum
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(linkset_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.linkset
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.id) :
return str(self.id)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
""" Use this API to add linkset.
"""
try :
if type(resource) is not list :
addresource = linkset()
addresource.id = resource.id
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ linkset() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].id = resource[i].id
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
""" Use this API to delete linkset.
"""
try :
if type(resource) is not list :
deleteresource = linkset()
if type(resource) != type(deleteresource):
deleteresource.id = resource
else :
deleteresource.id = resource.id
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ linkset() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].id = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ linkset() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].id = resource[i].id
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
""" Use this API to fetch all the linkset resources that are configured on netscaler.
"""
try :
if not name :
obj = linkset()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = linkset()
obj.id = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [linkset() for _ in range(len(name))]
obj = [linkset() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = linkset()
obj[i].id = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
""" Use this API to fetch filtered set of linkset resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = linkset()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
""" Use this API to count the linkset resources configured on NetScaler.
"""
try :
obj = linkset()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
""" Use this API to count filtered the set of linkset resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = linkset()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class linkset_response(base_response) :
def __init__(self, length=1) :
self.linkset = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.linkset = [linkset() for _ in range(length)]
| apache-2.0 | 237,834,643,917,131,900 | 27.857143 | 109 | 0.66145 | false | 3.314981 | false | false | false |
vuolter/pyload | src/pyload/plugins/decrypters/YoutubeComFolder.py | 1 | 6834 | # -*- coding: utf-8 -*-
import json
from ..base.decrypter import BaseDecrypter
class YoutubeComFolder(BaseDecrypter):
__name__ = "YoutubeComFolder"
__type__ = "decrypter"
__version__ = "1.11"
__status__ = "testing"
__pattern__ = r"https?://(?:www\.|m\.)?youtube\.com/(?P<TYPE>user|playlist|view_play_list)(/|.*?[?&](?:list|p)=)(?P<ID>[\w\-]+)"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
(
"folder_per_package",
"Default;Yes;No",
"Create folder for each package",
"Default",
),
("likes", "bool", "Grab user (channel) liked videos", False),
("favorites", "bool", "Grab user (channel) favorite videos", False),
("uploads", "bool", "Grab channel unplaylisted videos", True),
]
__description__ = """Youtube.com channel & playlist decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
API_KEY = "AIzaSyAcA9c4evtwSY1ifuvzo6HKBkeot5Bk_U4"
def api_response(self, method, **kwargs):
kwargs['key'] = self.API_KEY
json_data = self.load("https://www.googleapis.com/youtube/v3/" + method, get=kwargs)
return json.loads(json_data)
def get_channel(self, user):
channels = self.api_response("channels",
part="id,snippet,contentDetails",
forUsername=user,
maxResults=50)
if channels['items']:
channel = channels['items'][0]
return {'id': channel['id'],
'title': channel['snippet']['title'],
'relatedPlaylists': channel['contentDetails']['relatedPlaylists'],
'user': user} #: One lone channel for user?
def get_playlist(self, playlist_id):
playlists = self.api_response("playlists",
part="snippet",
id=playlist_id)
if playlists['items']:
playlist = playlists['items'][0]
return {'id': playlist_id,
'title': playlist['snippet']['title'],
'channelId': playlist['snippet']['channelId'],
'channelTitle': playlist['snippet']['channelTitle']}
def _get_playlists(self, playlist_id, token=None):
if token:
playlists = self.api_response("playlists",
part="id",
maxResults=50,
channelId=playlist_id,
pageToken=token)
else:
playlists = self.api_response("playlists",
part="id",
maxResults=50,
channelId=playlist_id)
for playlist in playlists['items']:
yield playlist['id']
if "nextPageToken" in playlists:
for item in self._get_playlists(playlist_id, playlists['nextPageToken']):
yield item
def get_playlists(self, ch_id):
return [self.get_playlist(p_id) for p_id in self._get_playlists(ch_id)]
def _get_videos_id(self, playlist_id, token=None):
if token:
playlist = self.api_response("playlistItems",
part="contentDetails",
maxResults=50,
playlistId=playlist_id,
pageToken=token)
else:
playlist = self.api_response("playlistItems",
part="contentDetails",
maxResults=50,
playlistId=playlist_id)
for item in playlist["items"]:
yield item["contentDetails"]["videoId"]
if "nextPageToken" in playlist:
for item in self._get_videos_id(playlist_id, playlist["nextPageToken"]):
yield item
def get_videos_id(self, p_id):
return list(self._get_videos_id(p_id))
def decrypt(self, pyfile):
if self.info["pattern"]["TYPE"] == "user":
self.log_debug("Url recognized as Channel")
channel = self.get_channel(self.info["pattern"]["ID"])
if channel:
playlists = self.get_playlists(channel["id"])
self.log_debug(
r'{} playlists found on channel "{}"'.format(
len(playlists), channel["title"]
)
)
related_playlist = {
p_name: self.get_playlist(p_id)
for p_name, p_id in channel["relatedPlaylists"].items()
}
self.log_debug(
"Channel's related playlists found = {}".format(
list(related_playlist.keys())
)
)
related_playlist["uploads"]["title"] = "Unplaylisted videos"
related_playlist["uploads"]["checkDups"] = True #: checkDups flag
for p_name, p_data in related_playlist.items():
if self.config.get(p_name):
p_data["title"] += " of " + channel["user"]
playlists.append(p_data)
else:
playlists = []
else:
self.log_debug("Url recognized as Playlist")
playlists = [self.get_playlist(self.info["pattern"]["ID"])]
if not playlists:
self.fail(self._("No playlist available"))
added_videos = []
urlize = lambda x: "https://www.youtube.com/watch?v=" + x
for p in playlists:
p_name = p["title"]
p_videos = self.get_videos_id(p["id"])
self.log_debug(
r'{} videos found on playlist "{}"'.format(len(p_videos), p_name)
)
if not p_videos:
continue
elif "checkDups" in p:
p_urls = [urlize(v_id) for v_id in p_videos if v_id not in added_videos]
self.log_debug(
r'{} videos available on playlist "{}" after duplicates cleanup'.format(
len(p_urls), p_name
)
)
else:
p_urls = [urlize(url) for url in p_videos]
#: Folder is NOT recognized by pyload 0.5.0!
self.packages.append((p_name, p_urls, p_name))
added_videos.extend(p_videos)
| agpl-3.0 | -5,458,169,004,478,449,000 | 37.393258 | 132 | 0.473954 | false | 4.38921 | false | false | false |
4degrees/harmony | source/harmony/schema/collection.py | 1 | 1859 | # :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from ..error import SchemaConflictError
class Collection(object):
'''Store registered schemas.'''
def __init__(self, schemas=None):
'''Initialise collection with *schemas*.'''
self._schemas = {}
if schemas is not None:
for schema in schemas:
self.add(schema)
def add(self, schema):
'''Add *schema*.
Raise SchemaConflictError if a schema with the same id already exists.
'''
schema_id = schema['id']
try:
self.get(schema_id)
except KeyError:
self._schemas[schema_id] = schema
else:
raise SchemaConflictError('A schema is already registered with '
'id {0}'.format(schema_id))
def remove(self, schema_id):
'''Remove a schema with *schema_id*.'''
try:
self._schemas.pop(schema_id)
except KeyError:
raise KeyError('No schema found with id {0}'.format(schema_id))
def clear(self):
'''Remove all registered schemas.'''
self._schemas.clear()
def get(self, schema_id):
'''Return schema registered with *schema_id*.
Raise KeyError if no schema with *schema_id* registered.
'''
try:
schema = self._schemas[schema_id]
except KeyError:
raise KeyError('No schema found with id {0}'.format(schema_id))
else:
return schema
def items(self):
'''Yield (id, schema) pairs.'''
for schema in self:
yield (schema['id'], schema)
def __iter__(self):
'''Iterate over registered schemas.'''
for schema_id in self._schemas:
yield self.get(schema_id)
| apache-2.0 | -6,000,509,363,584,870,000 | 27.166667 | 78 | 0.555137 | false | 4.374118 | false | false | false |
praekelt/portia | portia/utils.py | 1 | 1745 | from glob import glob
import json
import os
from urlparse import urlparse
from twisted.internet.endpoints import serverFromString
from twisted.internet import reactor as default_reactor
from twisted.web.server import Site
from twisted.python import log
from txredisapi import Connection
from .web import PortiaWebServer
from .protocol import JsonProtocolFactory
from .exceptions import PortiaException
def start_redis(redis_uri='redis://localhost:6379/1'):
try:
url = urlparse(redis_uri)
except (AttributeError, TypeError):
raise PortiaException('Invalid url: %s.' % (redis_uri,))
if not url.hostname:
raise PortiaException('Missing Redis hostname.')
try:
int(url.path[1:])
except (IndexError, ValueError):
raise PortiaException('Invalid Redis db index.')
return Connection(url.hostname, int(url.port or 6379),
dbid=int(url.path[1:]))
def start_webserver(portia, endpoint_str, cors=None, reactor=default_reactor):
endpoint = serverFromString(reactor, str(endpoint_str))
return endpoint.listen(
Site(PortiaWebServer(portia, cors=cors).app.resource()))
def start_tcpserver(portia, endpoint_str, reactor=default_reactor):
endpoint = serverFromString(reactor, str(endpoint_str))
return endpoint.listen(JsonProtocolFactory(portia))
def compile_network_prefix_mappings(glob_paths):
mapping = {}
for glob_path in glob_paths:
for mapping_file in glob(glob_path):
if not os.path.isfile(mapping_file):
continue
log.msg('Loading mapping file: %s.' % (mapping_file,))
with open(mapping_file) as fp:
mapping.update(json.load(fp))
return mapping
| bsd-3-clause | 3,205,609,877,235,778,600 | 29.614035 | 78 | 0.695702 | false | 3.974943 | false | false | false |
zircote/gh_mirror | gh_mirror/cli.py | 1 | 3527 | #!/usr/bin/python
# -*- coding: utf-8; -*-
# Copyright [2013] [Robert Allen]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
import os
import logging
import subprocess
def fetch_repos(org, token, cwd="./"):
"""
Collects all repos and iterates to update of clone as required
:param org str:
:param token str:
:param cwd str:
"""
uri = "https://api.github.com/orgs/{0}/repos".format(org)
headers = {}
if token is not None:
headers["Authorization"] = "token {0}".format(token)
i = 0
try:
r = requests.get(uri, headers=headers)
if r.status_code != 200:
raise requests.HTTPError("unsuccessful request made to %s" % uri)
result = r.json()
for repo in result:
i += 1
if os.path.exists('%s/%s' % (cwd, repo['name'])):
git_update_mirror(repo=repo, cwd=cwd)
else:
git_clone_project(repo=repo, cwd=cwd)
except OSError as error:
logging.exception(error)
return i
def git_update_mirror(repo, cwd):
"""
Updates the project based on the information from the repo dict
:param repo dict:
:param cwd str:
"""
args = ["git", "remote", "update", "-q"]
path = "%s/%s" % (cwd, repo['name'])
logging.info("updating %s" % (repo['full_name']))
subprocess.Popen(args, cwd=path)
def git_clone_project(repo, cwd):
"""
Clones a new project based on the repo dic
:param repo dict:
:param cwd str:
"""
args = ["git", "clone", "-q", "--mirror", repo['ssh_url'], repo['name']]
path = "%s" % cwd
logging.info("cloning %s to %s" % (repo['ssh_url'], repo['name']))
subprocess.Popen(args, cwd=path)
def main():
from argparse import ArgumentParser
parser = ArgumentParser('GitHub Organization Repository Mirroring Tool')
parser.add_argument('--loglevel', type=str, choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'],
help='Available levels are CRITICAL, ERROR, WARNING, INFO, DEBUG',
default="INFO")
parser.add_argument('-d', '--directory', type=str, default=os.environ['HOME'],
help='The directory/path to mirror the repositories into, defaults to the user home.')
parser.add_argument('-t', '--token', type=str, required=True,
help='The github oauth token authorized to pull from the repositories.')
parser.add_argument('-o', '--organisation', type=str, required=True,
help='The Organisation name that owns the projects to mirror')
options = parser.parse_args()
log_level = getattr(logging, options.loglevel)
logging.basicConfig(level=log_level, format='%(message)s')
logging.info('Starting up...')
count = fetch_repos(org=options.organisation, token=options.token, cwd=options.directory)
logging.info("Run Complete [%s] repositories found..." % count)
if __name__ == "__main__":
main()
| apache-2.0 | 8,240,381,497,432,421,000 | 35.360825 | 110 | 0.620357 | false | 3.875824 | false | false | false |
remi1411/RootTheBox | setup/xmlsetup.py | 6 | 7231 | # -*- coding: utf-8 -*-
'''
Created on Aug 26, 2013
Copyright 2012 Root the Box
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------------------------------------------
This file wraps the Python scripted game setup API.
It reads an XML file(s) and calls the API based on the it's contents.
'''
import os
import logging
import defusedxml.cElementTree as ET
# We have to import all of the classes to avoid mapper errors
from setup.create_database import *
from models import dbsession
def get_child_by_tag(elem, tag_name):
''' Return child elements with a given tag '''
tags = filter(
lambda child: child.tag == tag_name, elem.getchildren()
)
return tags[0] if 0 < len(tags) else None
def get_child_text(elem, tag_name):
''' Shorthand access to .text data '''
return get_child_by_tag(elem, tag_name).text
def create_levels(levels):
''' Create GameLevel objects based on XML data '''
logging.info("Found %s game level(s)" % levels.get('count'))
for index, level_elem in enumerate(levels.getchildren()):
# GameLevel 0 is created automatically by the bootstrap
if get_child_text(level_elem, 'number') != '0':
try:
number = get_child_text(level_elem, 'number')
if GameLevel.by_number(number) is None:
game_level = GameLevel()
game_level.number = number
game_level.buyout = get_child_text(level_elem, 'buyout')
dbsession.add(game_level)
else:
logging.info("GameLevel %d already exists, skipping" % number)
except:
logging.exception("Failed to import game level #%d" % (index + 1))
dbsession.flush()
game_levels = GameLevel.all()
for index, game_level in enumerate(game_levels):
if index + 1 < len(game_levels):
game_level.next_level_id = game_levels[index + 1].id
logging.info("%r -> %r" % (game_level, game_levels[index + 1]))
dbsession.add(game_level)
dbsession.commit()
def create_hints(parent, box):
''' Create flag objects for a box '''
logging.info("Found %s hint(s)" % parent.get('count'))
for index, hint_elem in enumerate(parent.getchildren()):
try:
hint = Hint(box_id=box.id)
hint.price = get_child_text(hint_elem, 'price')
hint.description = get_child_text(hint_elem, 'description')
dbsession.add(hint)
except:
logging.exception("Failed to import hint #%d" % (index + 1))
def create_flags(parent, box):
''' Create flag objects for a box '''
logging.info("Found %s flag(s)" % parent.get('count'))
for index, flag_elem in enumerate(parent.getchildren()):
try:
name = get_child_text(flag_elem, 'name')
flag = Flag(box_id=box.id)
flag.name = name
flag.token = get_child_text(flag_elem, 'token')
flag.value = get_child_text(flag_elem, 'value')
flag.description = get_child_text(flag_elem, 'description')
flag.capture_message = get_child_text(flag_elem, 'capture_message')
flag.type = flag_elem.get('type')
dbsession.add(flag)
except:
logging.exception("Failed to import flag #%d" % (index + 1))
def create_boxes(parent, corporation):
''' Create boxes for a corporation '''
logging.info("Found %s boxes" % parent.get('count'))
for index, box_elem in enumerate(parent.getchildren()):
try:
name = get_child_text(box_elem, 'name')
game_level = GameLevel.by_number(box_elem.get('gamelevel'))
if game_level is None:
logging.warning("GameLevel does not exist for box %s, skipping" % name)
elif Box.by_name(name) is None:
box = Box(corporation_id=corporation.id)
box.name = name
box.game_level_id = game_level.id
box.difficulty = get_child_text(box_elem, 'difficulty')
box.description = get_child_text(box_elem, 'description')
box.operating_system = get_child_text(box_elem, 'operatingsystem')
box.avatar = get_child_text(box_elem, 'avatar').decode('base64')
box.garbage = get_child_text(box_elem, 'garbage')
dbsession.add(box)
dbsession.flush()
create_flags(get_child_by_tag(box_elem, 'flags'), box)
create_hints(get_child_by_tag(box_elem, 'hints'), box)
else:
logging.info("Box with name %s already exists, skipping" % name)
except:
logging.exception("Failed to import box %d" % (index + 1))
def create_corps(corps):
''' Create Corporation objects based on XML data '''
logging.info("Found %s corporation(s)" % corps.get('count'))
for index, corp_elem in enumerate(corps):
try:
corporation = Corporation()
corporation.name = get_child_text(corp_elem, 'name')
dbsession.add(corporation)
dbsession.flush()
create_boxes(get_child_by_tag(corp_elem, 'boxes'), corporation)
except:
logging.exception("Faild to create corporation #%d" % (index + 1))
def _xml_file_import(filename):
''' Parse and import a single XML file '''
logging.debug("Processing: %s" % filename)
try:
tree = ET.parse(filename)
xml_root = tree.getroot()
levels = get_child_by_tag(xml_root, "gamelevels")
create_levels(levels)
corporations = get_child_by_tag(xml_root, "corporations")
create_corps(corporations)
logging.debug("Done processing: %s" % filename)
dbsession.commit()
return True
except:
dbsession.rollback()
logging.exception("Exception raised while parsing %s, rolling back changes" % filename)
return False
def import_xml(target):
''' Import XML file or directory of files '''
target = os.path.abspath(target)
if not os.path.exists(target):
logging.error("Error: Target does not exist (%s) " % target)
elif os.path.isdir(target):
# Import any .xml files in the target directory
logging.debug("%s is a directory ..." % target)
ls = filter(lambda fname: fname.lower().endswith('.xml'), os.listdir(target))
logging.debug("Found %d XML file(s) ..." % len(ls))
results = [_xml_file_import(target + '/' + fxml) for fxml in ls]
return False not in results
else:
# Import a single file
return _xml_file_import(target)
| apache-2.0 | 7,185,562,956,989,984,000 | 39.172222 | 95 | 0.599226 | false | 3.893915 | false | false | false |
coderbone/SickRage-alt | sickbeard/show_name_helpers.py | 1 | 9036 | # coding=utf-8
# Author: Nic Wolfe <[email protected]>
# URL: https://sickchill.github.io
#
# This file is part of SickChill.
#
# SickChill is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickChill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickChill. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, print_function, unicode_literals
# Stdlib Imports
import fnmatch
import os
import re
# Third Party Imports
import six
# First Party Imports
import sickbeard
from sickchill.helper.encoding import ek
# Local Folder Imports
from . import common, logger
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from .scene_exceptions import get_scene_exceptions
resultFilters = {
"sub(bed|ed|pack|s)",
"(dir|sub|nfo)fix",
"(?<!shomin.)sample",
"(dvd)?extras",
"dub(bed)?"
}
if hasattr('General', 'ignored_subs_list') and sickbeard.IGNORED_SUBS_LIST:
resultFilters.add("(" + sickbeard.IGNORED_SUBS_LIST.replace(",", "|") + ")sub(bed|ed|s)?")
def containsAtLeastOneWord(name, words):
"""
Filters out results based on filter_words
name: name to check
words : string of words separated by a ',' or list of words
Returns: False if the name doesn't contain any word of words list, or the found word from the list.
"""
if isinstance(words, six.string_types):
words = words.split(',')
words = {word.strip() for word in words if word.strip()}
if not any(words):
return True
for word, regexp in six.iteritems(
{word: re.compile(r'(^|[\W_]){0}($|[\W_])'.format(re.escape(word)), re.I) for word in words}
):
if regexp.search(name):
return word
return False
def filter_bad_releases(name, parse=True, show=None):
"""
Filters out non-english and just all-around stupid releases by comparing them
to the resultFilters contents.
name: the release name to check
Returns: True if the release name is OK, False if it's bad.
"""
try:
if parse:
NameParser().parse(name)
except InvalidNameException as error:
logger.log("{0}".format(error), logger.DEBUG)
return False
except InvalidShowException:
pass
# except InvalidShowException as error:
# logger.log(u"{0}".format(error), logger.DEBUG)
# return False
def clean_set(words):
return {x.strip() for x in set((words or '').lower().split(',')) if x.strip()}
# if any of the bad strings are in the name then say no
ignore_words = resultFilters
ignore_words = ignore_words.union(clean_set(show and show.rls_ignore_words or '')) # Show specific ignored words
ignore_words = ignore_words.union(clean_set(sickbeard.IGNORE_WORDS)) # Plus Global ignored words
ignore_words = ignore_words.difference(clean_set(show and show.rls_require_words or '')) # Minus show specific required words
if sickbeard.REQUIRE_WORDS and not (show and show.rls_ignore_words): # Only remove global require words from the list if we arent using show ignore words
ignore_words = ignore_words.difference(clean_set(sickbeard.REQUIRE_WORDS))
word = containsAtLeastOneWord(name, ignore_words)
if word:
logger.log("Release: " + name + " contains " + word + ", ignoring it", logger.INFO)
return False
# if any of the good strings aren't in the name then say no
require_words = set()
require_words = require_words.union(clean_set(show and show.rls_require_words or '')) # Show specific required words
require_words = require_words.union(clean_set(sickbeard.REQUIRE_WORDS)) # Plus Global required words
require_words = require_words.difference(clean_set(show and show.rls_ignore_words or '')) # Minus show specific ignored words
if sickbeard.IGNORE_WORDS and not (show and show.rls_require_words): # Only remove global ignore words from the list if we arent using show require words
require_words = require_words.difference(clean_set(sickbeard.IGNORE_WORDS))
if require_words and not containsAtLeastOneWord(name, require_words):
logger.log("Release: " + name + " doesn't contain any of " + ', '.join(set(require_words)) +
", ignoring it", logger.INFO)
return False
return True
def allPossibleShowNames(show, season=-1):
"""
Figures out every possible variation of the name for a particular show. Includes TVDB name, TVRage name,
country codes on the end, eg. "Show Name (AU)", and any scene exception names.
show: a TVShow object that we should get the names of
Returns: a list of all the possible show names
"""
showNames = get_scene_exceptions(show.indexerid, season=season)
if not showNames: # if we dont have any season specific exceptions fallback to generic exceptions
season = -1
showNames = get_scene_exceptions(show.indexerid, season=season)
showNames.append(show.name)
if not show.is_anime:
newShowNames = []
country_list = common.countryList
country_list.update(dict(zip(common.countryList.values(), common.countryList.keys())))
for curName in set(showNames):
if not curName:
continue
# if we have "Show Name Australia" or "Show Name (Australia)" this will add "Show Name (AU)" for
# any countries defined in common.countryList
# (and vice versa)
for curCountry in country_list:
if curName.endswith(' ' + curCountry):
newShowNames.append(curName.replace(' ' + curCountry, ' (' + country_list[curCountry] + ')'))
elif curName.endswith(' (' + curCountry + ')'):
newShowNames.append(curName.replace(' (' + curCountry + ')', ' (' + country_list[curCountry] + ')'))
# # if we have "Show Name (2013)" this will strip the (2013) show year from the show name
# newShowNames.append(re.sub('\(\d{4}\)', '', curName))
showNames += newShowNames
return set(showNames)
def determineReleaseName(dir_name=None, nzb_name=None):
"""Determine a release name from an nzb and/or folder name"""
if nzb_name is not None:
logger.log("Using nzb_name for release name.")
return nzb_name.rpartition('.')[0]
if dir_name is None:
return None
# try to get the release name from nzb/nfo
file_types = ["*.nzb", "*.nfo"]
for search in file_types:
reg_expr = re.compile(fnmatch.translate(search), re.I)
files = [file_name for file_name in ek(os.listdir, dir_name) if
ek(os.path.isfile, ek(os.path.join, dir_name, file_name))]
results = [f for f in files if reg_expr.search(f)]
if len(results) == 1:
found_file = ek(os.path.basename, results[0])
found_file = found_file.rpartition('.')[0]
if filter_bad_releases(found_file):
logger.log("Release name (" + found_file + ") found from file (" + results[0] + ")")
return found_file.rpartition('.')[0]
# If that fails, we try the folder
folder = ek(os.path.basename, dir_name)
if filter_bad_releases(folder):
# NOTE: Multiple failed downloads will change the folder name.
# (e.g., appending #s)
# Should we handle that?
logger.log("Folder name (" + folder + ") appears to be a valid release name. Using it.", logger.DEBUG)
return folder
return None
def hasPreferredWords(name, show=None):
"""Determine based on the full episode (file)name combined with the preferred words what the weight its preference should be"""
name = name.lower()
def clean_set(words):
weighted_words = []
words = words.lower().strip().split(',')
val = len(words)
for word in words:
weighted_words.append({"word": word, "weight": val})
val = val - 1
return weighted_words
prefer_words = []
## Because we weigh values, we can not union global and show based values, so we don't do that
if sickbeard.PREFER_WORDS:
prefer_words = clean_set(sickbeard.PREFER_WORDS)
if show and show.rls_prefer_words:
prefer_words = clean_set(show.rls_prefer_words or '')
## if nothing set, return position 0
if len(prefer_words) <= 0:
return 0
value = 0
for word_pair in prefer_words:
if word_pair['weight'] > value and word_pair['word'] in name:
value = word_pair['weight']
return value
| gpl-3.0 | 774,299,349,112,703,500 | 35.881633 | 158 | 0.651726 | false | 3.809444 | false | false | false |
renanvicente/urleater-server | urleater/settings.py | 1 | 2513 | """
Django settings for urleater project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ka$u7fm!iu&r2%gn8)01p@&)3xs=s4$t4zck%=$r&!!gcb$!-e'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django_admin_bootstrapped.bootstrap3',
'django_admin_bootstrapped',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'south',
'app',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'urleater.urls'
WSGI_APPLICATION = 'urleater.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'testapp',
'USER': 'testapp',
'PASSWORD': 'test',
'HOST': '',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
#TEMPLATE_DIRS = (
# # Put strings here, like "/home/html/django_templates"
# # or "C:/www/django/templates".
# # Always use forward slashes, even on Windows.
# # Don't forget to use absolute paths, not relative paths.
# os.path.join(BASE_DIR, '../templates'),
#)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "vendors"),
os.path.join(BASE_DIR, "static"),)
| apache-2.0 | -8,719,305,114,898,483,000 | 23.881188 | 71 | 0.6924 | false | 3.205357 | false | false | false |
meowtec/fanserve.py | examples/test-tornado.py | 1 | 1361 | # coding:utf-8
import sys
sys.path.append("..")
import tornado.ioloop
import tornado.web
import fanserve as fans
class MyTornadoFans(fans.Tornado):
app_secret = 'appsecretEnF5leY4V'
def receive_text(self, text):
if text == u'文章':
print('receive wz')
self.reply_articles([
{
"display_name": "两个故事",
"summary": "今天讲两个故事,分享给你。谁是公司?谁又是中国人?",
"image": "http://storage.mcp.weibo.cn/0JlIv.jpg",
"url": "http://e.weibo.com/mediaprofile/article/detail?uid=1722052204&aid=983319"
}
])
else:
print('receive text')
self.reply_text(text)
def receive_event(self, event):
self.reply_text('event: ' + event)
def receive_default(self, data):
self.reply_text('发送『文章』,将返回文章;发送其他文字将原文返回。')
class MainHandler(tornado.web.RequestHandler):
def get(self):
MyTornadoFans(context=self).get()
def post(self):
MyTornadoFans(context=self).post()
application = tornado.web.Application([
(r"/weibo/", MainHandler),
])
if __name__ == "__main__":
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
| mit | -7,682,928,567,104,062,000 | 23.98 | 101 | 0.572458 | false | 2.911422 | false | false | false |
Jajcus/pyxmpp | pyxmpp/stanzaprocessor.py | 1 | 19717 | #
# (C) Copyright 2003-2010 Jacek Konieczny <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
"""Handling of XMPP stanzas.
Normative reference:
- `RFC 3920 <http://www.ietf.org/rfc/rfc3920.txt>`__
"""
__docformat__="restructuredtext en"
import libxml2
import logging
import threading
from pyxmpp.expdict import ExpiringDictionary
from pyxmpp.exceptions import ProtocolError, BadRequestProtocolError, FeatureNotImplementedProtocolError
from pyxmpp.stanza import Stanza
class StanzaProcessor:
"""Universal stanza handler/router class.
Provides facilities to set up custom handlers for various types of stanzas.
:Ivariables:
- `lock`: lock object used to synchronize access to the
`StanzaProcessor` object.
- `me`: local JID.
- `peer`: remote stream endpoint JID.
- `process_all_stanzas`: when `True` then all stanzas received are
considered local.
- `initiator`: `True` if local stream endpoint is the initiating entity.
"""
def __init__(self):
"""Initialize a `StanzaProcessor` object."""
self.me=None
self.peer=None
self.initiator=None
self.peer_authenticated=False
self.process_all_stanzas=True
self._iq_response_handlers=ExpiringDictionary()
self._iq_get_handlers={}
self._iq_set_handlers={}
self._message_handlers=[]
self._presence_handlers=[]
self.__logger=logging.getLogger("pyxmpp.Stream")
self.lock=threading.RLock()
def process_response(self, response):
"""Examines out the response returned by a stanza handler and sends all
stanzas provided.
:Returns:
- `True`: if `response` is `Stanza`, iterable or `True` (meaning the stanza was processed).
- `False`: when `response` is `False` or `None`
:returntype: `bool`
"""
if response is None or response is False:
return False
if isinstance(response, Stanza):
self.send(response)
return True
try:
response = iter(response)
except TypeError:
return bool(response)
for stanza in response:
if isinstance(stanza, Stanza):
self.send(stanza)
return True
def process_iq(self, stanza):
"""Process IQ stanza received.
:Parameters:
- `stanza`: the stanza received
If a matching handler is available pass the stanza to it.
Otherwise ignore it if it is "error" or "result" stanza
or return "feature-not-implemented" error."""
sid=stanza.get_id()
fr=stanza.get_from()
typ=stanza.get_type()
if typ in ("result","error"):
if fr:
ufr=fr.as_unicode()
else:
ufr=None
res_handler = err_handler = None
try:
res_handler, err_handler = self._iq_response_handlers.pop((sid,ufr))
except KeyError:
if ( (fr==self.peer or fr==self.me or fr==self.me.bare()) ):
try:
res_handler, err_handler = self._iq_response_handlers.pop((sid,None))
except KeyError:
pass
if None is res_handler is err_handler:
return False
if typ=="result":
response = res_handler(stanza)
else:
response = err_handler(stanza)
self.process_response(response)
return True
q=stanza.get_query()
if not q:
raise BadRequestProtocolError, "Stanza with no child element"
el=q.name
ns=q.ns().getContent()
if typ=="get":
if self._iq_get_handlers.has_key((el,ns)):
response = self._iq_get_handlers[(el,ns)](stanza)
self.process_response(response)
return True
else:
raise FeatureNotImplementedProtocolError, "Not implemented"
elif typ=="set":
if self._iq_set_handlers.has_key((el,ns)):
response = self._iq_set_handlers[(el,ns)](stanza)
self.process_response(response)
return True
else:
raise FeatureNotImplementedProtocolError, "Not implemented"
else:
raise BadRequestProtocolError, "Unknown IQ stanza type"
def __try_handlers(self,handler_list,typ,stanza):
""" Search the handler list for handlers matching
given stanza type and payload namespace. Run the
handlers found ordering them by priority until
the first one which returns `True`.
:Parameters:
- `handler_list`: list of available handlers
- `typ`: stanza type (value of its "type" attribute)
- `stanza`: the stanza to handle
:return: result of the last handler or `False` if no
handler was found."""
namespaces=[]
if stanza.xmlnode.children:
c=stanza.xmlnode.children
while c:
try:
ns=c.ns()
except libxml2.treeError:
ns=None
if ns is None:
c=c.next
continue
ns_uri=ns.getContent()
if ns_uri not in namespaces:
namespaces.append(ns_uri)
c=c.next
for handler_entry in handler_list:
t=handler_entry[1]
ns=handler_entry[2]
handler=handler_entry[3]
if t!=typ:
continue
if ns is not None and ns not in namespaces:
continue
response = handler(stanza)
if self.process_response(response):
return True
return False
def process_message(self,stanza):
"""Process message stanza.
Pass it to a handler of the stanza's type and payload namespace.
If no handler for the actual stanza type succeeds then hadlers
for type "normal" are used.
:Parameters:
- `stanza`: message stanza to be handled
"""
if not self.initiator and not self.peer_authenticated:
self.__logger.debug("Ignoring message - peer not authenticated yet")
return True
typ=stanza.get_type()
if self.__try_handlers(self._message_handlers,typ,stanza):
return True
if typ!="error":
return self.__try_handlers(self._message_handlers,"normal",stanza)
return False
def process_presence(self,stanza):
"""Process presence stanza.
Pass it to a handler of the stanza's type and payload namespace.
:Parameters:
- `stanza`: presence stanza to be handled
"""
if not self.initiator and not self.peer_authenticated:
self.__logger.debug("Ignoring presence - peer not authenticated yet")
return True
typ=stanza.get_type()
if not typ:
typ="available"
return self.__try_handlers(self._presence_handlers,typ,stanza)
def route_stanza(self,stanza):
"""Process stanza not addressed to us.
Return "recipient-unavailable" return if it is not
"error" nor "result" stanza.
This method should be overriden in derived classes if they
are supposed to handle stanzas not addressed directly to local
stream endpoint.
:Parameters:
- `stanza`: presence stanza to be processed
"""
if stanza.get_type() not in ("error","result"):
r = stanza.make_error_response("recipient-unavailable")
self.send(r)
return True
def process_stanza(self,stanza):
"""Process stanza received from the stream.
First "fix" the stanza with `self.fix_in_stanza()`,
then pass it to `self.route_stanza()` if it is not directed
to `self.me` and `self.process_all_stanzas` is not True. Otherwise
stanza is passwd to `self.process_iq()`, `self.process_message()`
or `self.process_presence()` appropriately.
:Parameters:
- `stanza`: the stanza received.
:returns: `True` when stanza was handled
"""
self.fix_in_stanza(stanza)
to=stanza.get_to()
if to and to.node == None and (not self.me
or to.domain == self.me.domain):
# workaround for OpenFire bug
# http://community.igniterealtime.org/thread/35966
to = None
if not self.process_all_stanzas and to and to!=self.me and to.bare()!=self.me.bare():
return self.route_stanza(stanza)
try:
if stanza.stanza_type=="iq":
if self.process_iq(stanza):
return True
elif stanza.stanza_type=="message":
if self.process_message(stanza):
return True
elif stanza.stanza_type=="presence":
if self.process_presence(stanza):
return True
except ProtocolError, e:
typ = stanza.get_type()
if typ != 'error' and (typ != 'result' or stanza.stanza_type != 'iq'):
r = stanza.make_error_response(e.xmpp_name)
self.send(r)
e.log_reported()
else:
e.log_ignored()
self.__logger.debug("Unhandled %r stanza: %r" % (stanza.stanza_type,stanza.serialize()))
return False
def check_to(self,to):
"""Check "to" attribute of received stream header.
:return: `to` if it is equal to `self.me`, None otherwise.
Should be overriden in derived classes which require other logic
for handling that attribute."""
if to!=self.me:
return None
return to
def set_response_handlers(self,iq,res_handler,err_handler,timeout_handler=None,timeout=300):
"""Set response handler for an IQ "get" or "set" stanza.
This should be called before the stanza is sent.
:Parameters:
- `iq`: an IQ stanza
- `res_handler`: result handler for the stanza. Will be called
when matching <iq type="result"/> is received. Its only
argument will be the stanza received. The handler may return
a stanza or list of stanzas which should be sent in response.
- `err_handler`: error handler for the stanza. Will be called
when matching <iq type="error"/> is received. Its only
argument will be the stanza received. The handler may return
a stanza or list of stanzas which should be sent in response
but this feature should rather not be used (it is better not to
respond to 'error' stanzas).
- `timeout_handler`: timeout handler for the stanza. Will be called
when no matching <iq type="result"/> or <iq type="error"/> is
received in next `timeout` seconds. The handler should accept
two arguments and ignore them.
- `timeout`: timeout value for the stanza. After that time if no
matching <iq type="result"/> nor <iq type="error"/> stanza is
received, then timeout_handler (if given) will be called.
"""
self.lock.acquire()
try:
self._set_response_handlers(iq,res_handler,err_handler,timeout_handler,timeout)
finally:
self.lock.release()
def _set_response_handlers(self,iq,res_handler,err_handler,timeout_handler=None,timeout=300):
"""Same as `Stream.set_response_handlers` but assume `self.lock` is acquired."""
self.fix_out_stanza(iq)
to=iq.get_to()
if to:
to=to.as_unicode()
if timeout_handler:
self._iq_response_handlers.set_item((iq.get_id(),to),
(res_handler,err_handler),
timeout,timeout_handler)
else:
self._iq_response_handlers.set_item((iq.get_id(),to),
(res_handler,err_handler),timeout)
def set_iq_get_handler(self,element,namespace,handler):
"""Set <iq type="get"/> handler.
:Parameters:
- `element`: payload element name
- `namespace`: payload element namespace URI
- `handler`: function to be called when a stanza
with defined element is received. Its only argument
will be the stanza received. The handler may return a stanza or
list of stanzas which should be sent in response.
Only one handler may be defined per one namespaced element.
If a handler for the element was already set it will be lost
after calling this method.
"""
self.lock.acquire()
try:
self._iq_get_handlers[(element,namespace)]=handler
finally:
self.lock.release()
def unset_iq_get_handler(self,element,namespace):
"""Remove <iq type="get"/> handler.
:Parameters:
- `element`: payload element name
- `namespace`: payload element namespace URI
"""
self.lock.acquire()
try:
if self._iq_get_handlers.has_key((element,namespace)):
del self._iq_get_handlers[(element,namespace)]
finally:
self.lock.release()
def set_iq_set_handler(self,element,namespace,handler):
"""Set <iq type="set"/> handler.
:Parameters:
- `element`: payload element name
- `namespace`: payload element namespace URI
- `handler`: function to be called when a stanza
with defined element is received. Its only argument
will be the stanza received. The handler may return a stanza or
list of stanzas which should be sent in response.
Only one handler may be defined per one namespaced element.
If a handler for the element was already set it will be lost
after calling this method."""
self.lock.acquire()
try:
self._iq_set_handlers[(element,namespace)]=handler
finally:
self.lock.release()
def unset_iq_set_handler(self,element,namespace):
"""Remove <iq type="set"/> handler.
:Parameters:
- `element`: payload element name.
- `namespace`: payload element namespace URI."""
self.lock.acquire()
try:
if self._iq_set_handlers.has_key((element,namespace)):
del self._iq_set_handlers[(element,namespace)]
finally:
self.lock.release()
def __add_handler(self,handler_list,typ,namespace,priority,handler):
"""Add a handler function to a prioritized handler list.
:Parameters:
- `handler_list`: a handler list.
- `typ`: stanza type.
- `namespace`: stanza payload namespace.
- `priority`: handler priority. Must be >=0 and <=100. Handlers
with lower priority list will be tried first."""
if priority<0 or priority>100:
raise ValueError,"Bad handler priority (must be in 0:100)"
handler_list.append((priority,typ,namespace,handler))
handler_list.sort()
def set_message_handler(self, typ, handler, namespace=None, priority=100):
"""Set a handler for <message/> stanzas.
:Parameters:
- `typ`: message type. `None` will be treated the same as "normal",
and will be the default for unknown types (those that have no
handler associated).
- `namespace`: payload namespace. If `None` that message with any
payload (or even with no payload) will match.
- `priority`: priority value for the handler. Handlers with lower
priority value are tried first.
- `handler`: function to be called when a message stanza
with defined type and payload namespace is received. Its only
argument will be the stanza received. The handler may return a
stanza or list of stanzas which should be sent in response.
Multiple <message /> handlers with the same type/namespace/priority may
be set. Order of calling handlers with the same priority is not defined.
Handlers will be called in priority order until one of them returns True or
any stanza(s) to send (even empty list will do).
"""
self.lock.acquire()
try:
if not typ:
typ = "normal"
self.__add_handler(self._message_handlers,typ,namespace,priority,handler)
finally:
self.lock.release()
def set_presence_handler(self,typ,handler,namespace=None,priority=100):
"""Set a handler for <presence/> stanzas.
:Parameters:
- `typ`: presence type. "available" will be treated the same as `None`.
- `namespace`: payload namespace. If `None` that presence with any
payload (or even with no payload) will match.
- `priority`: priority value for the handler. Handlers with lower
priority value are tried first.
- `handler`: function to be called when a presence stanza
with defined type and payload namespace is received. Its only
argument will be the stanza received. The handler may return a
stanza or list of stanzas which should be sent in response.
Multiple <presence /> handlers with the same type/namespace/priority may
be set. Order of calling handlers with the same priority is not defined.
Handlers will be called in priority order until one of them returns
True or any stanza(s) to send (even empty list will do).
"""
self.lock.acquire()
try:
if not typ:
typ="available"
self.__add_handler(self._presence_handlers,typ,namespace,priority,handler)
finally:
self.lock.release()
def fix_in_stanza(self,stanza):
"""Modify incoming stanza before processing it.
This implementation does nothig. It should be overriden in derived
classes if needed."""
pass
def fix_out_stanza(self,stanza):
"""Modify outgoing stanza before sending into the stream.
This implementation does nothig. It should be overriden in derived
classes if needed."""
pass
def send(self,stanza):
"""Send a stanza somwhere. This one does nothing. Should be overriden
in derived classes.
:Parameters:
- `stanza`: the stanza to send.
:Types:
- `stanza`: `pyxmpp.stanza.Stanza`"""
raise NotImplementedError,"This method must be overriden in derived classes."""
# vi: sts=4 et sw=4
| lgpl-2.1 | 5,931,750,637,663,219,000 | 36.990366 | 104 | 0.589745 | false | 4.516033 | false | false | false |
ocornut/bgfx | 3rdparty/scintilla/scripts/LexGen.py | 63 | 2214 | #!/usr/bin/env python
# LexGen.py - implemented 2002 by Neil Hodgson [email protected]
# Released to the public domain.
# Regenerate the Scintilla source files that list all the lexers.
# Should be run whenever a new lexer is added or removed.
# Requires Python 2.5 or later
# Files are regenerated in place with templates stored in comments.
# The format of generation comments is documented in FileGenerator.py.
from FileGenerator import Regenerate, UpdateLineInFile, ReplaceREInFile
import ScintillaData
import HFacer
def UpdateVersionNumbers(sci, root):
UpdateLineInFile(root + "win32/ScintRes.rc", "#define VERSION_SCINTILLA",
"#define VERSION_SCINTILLA \"" + sci.versionDotted + "\"")
UpdateLineInFile(root + "win32/ScintRes.rc", "#define VERSION_WORDS",
"#define VERSION_WORDS " + sci.versionCommad)
UpdateLineInFile(root + "qt/ScintillaEditBase/ScintillaEditBase.pro",
"VERSION =",
"VERSION = " + sci.versionDotted)
UpdateLineInFile(root + "qt/ScintillaEdit/ScintillaEdit.pro",
"VERSION =",
"VERSION = " + sci.versionDotted)
UpdateLineInFile(root + "doc/ScintillaDownload.html", " Release",
" Release " + sci.versionDotted)
ReplaceREInFile(root + "doc/ScintillaDownload.html",
r"/scintilla/([a-zA-Z]+)\d\d\d",
r"/scintilla/\g<1>" + sci.version)
UpdateLineInFile(root + "doc/index.html",
' <font color="#FFCC99" size="3"> Release version',
' <font color="#FFCC99" size="3"> Release version ' +\
sci.versionDotted + '<br />')
UpdateLineInFile(root + "doc/index.html",
' Site last modified',
' Site last modified ' + sci.mdyModified + '</font>')
UpdateLineInFile(root + "doc/ScintillaHistory.html",
' Released ',
' Released ' + sci.dmyModified + '.')
def RegenerateAll(root):
sci = ScintillaData.ScintillaData(root)
Regenerate(root + "src/Catalogue.cxx", "//", sci.lexerModules)
Regenerate(root + "win32/scintilla.mak", "#", sci.lexFiles)
UpdateVersionNumbers(sci, root)
HFacer.RegenerateAll(root, False)
if __name__=="__main__":
RegenerateAll("../")
| bsd-2-clause | -3,412,395,111,659,150,300 | 40 | 77 | 0.654472 | false | 3.481132 | false | false | false |
zerok/celery-prometheus-exporter | setup.py | 1 | 1075 | import io
from setuptools import setup
long_description = "See https://github.com/zerok/celery-prometheus-exporter"
with io.open('README.rst', encoding='utf-8') as fp:
long_description = fp.read()
setup(
name='celery-prometheus-exporter',
description="Simple Prometheus metrics exporter for Celery",
long_description=long_description,
version='1.7.0',
author='Horst Gutmann',
license='MIT',
author_email='[email protected]',
url='https://github.com/zerok/celery-prometheus-exporter',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
],
py_modules=[
'celery_prometheus_exporter',
],
install_requires=[
'celery>=3',
'prometheus_client>=0.0.20',
],
entry_points={
'console_scripts': [
'celery-prometheus-exporter = celery_prometheus_exporter:main',
],
}
)
| mit | 1,541,334,639,674,667,500 | 27.289474 | 76 | 0.618605 | false | 3.681507 | false | false | false |
thaim/ansible | lib/ansible/modules/network/fortios/fortios_firewall_multicast_policy6.py | 13 | 14200 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_multicast_policy6
short_description: Configure IPv6 multicast NAT policies in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify firewall feature and multicast_policy6 category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
firewall_multicast_policy6:
description:
- Configure IPv6 multicast NAT policies.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
action:
description:
- Accept or deny traffic matching the policy.
type: str
choices:
- accept
- deny
dstaddr:
description:
- IPv6 destination address name.
type: list
suboptions:
name:
description:
- Address name. Source firewall.multicast-address6.name.
required: true
type: str
dstintf:
description:
- IPv6 destination interface name. Source system.interface.name system.zone.name.
type: str
end_port:
description:
- Integer value for ending TCP/UDP/SCTP destination port in range (1 - 65535).
type: int
id:
description:
- Policy ID.
required: true
type: int
logtraffic:
description:
- Enable/disable logging traffic accepted by this policy.
type: str
choices:
- enable
- disable
protocol:
description:
- Integer value for the protocol type as defined by IANA (0 - 255).
type: int
srcaddr:
description:
- IPv6 source address name.
type: list
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
type: str
srcintf:
description:
- IPv6 source interface name. Source system.interface.name system.zone.name.
type: str
start_port:
description:
- Integer value for starting TCP/UDP/SCTP destination port in range (1 - 65535).
type: int
status:
description:
- Enable/disable this policy.
type: str
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure IPv6 multicast NAT policies.
fortios_firewall_multicast_policy6:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
firewall_multicast_policy6:
action: "accept"
dstaddr:
-
name: "default_name_5 (source firewall.multicast-address6.name)"
dstintf: "<your_own_value> (source system.interface.name system.zone.name)"
end_port: "7"
id: "8"
logtraffic: "enable"
protocol: "10"
srcaddr:
-
name: "default_name_12 (source firewall.address6.name firewall.addrgrp6.name)"
srcintf: "<your_own_value> (source system.interface.name system.zone.name)"
start_port: "14"
status: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_firewall_multicast_policy6_data(json):
option_list = ['action', 'dstaddr', 'dstintf',
'end_port', 'id', 'logtraffic',
'protocol', 'srcaddr', 'srcintf',
'start_port', 'status']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def firewall_multicast_policy6(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['firewall_multicast_policy6'] and data['firewall_multicast_policy6']:
state = data['firewall_multicast_policy6']['state']
else:
state = True
firewall_multicast_policy6_data = data['firewall_multicast_policy6']
filtered_data = underscore_to_hyphen(filter_firewall_multicast_policy6_data(firewall_multicast_policy6_data))
if state == "present":
return fos.set('firewall',
'multicast-policy6',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('firewall',
'multicast-policy6',
mkey=filtered_data['id'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_firewall(data, fos):
if data['firewall_multicast_policy6']:
resp = firewall_multicast_policy6(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"firewall_multicast_policy6": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"action": {"required": False, "type": "str",
"choices": ["accept", "deny"]},
"dstaddr": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"dstintf": {"required": False, "type": "str"},
"end_port": {"required": False, "type": "int"},
"id": {"required": True, "type": "int"},
"logtraffic": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"protocol": {"required": False, "type": "int"},
"srcaddr": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"srcintf": {"required": False, "type": "str"},
"start_port": {"required": False, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_firewall(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_firewall(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| mit | 6,936,528,424,947,619,000 | 32.17757 | 113 | 0.552535 | false | 4.493671 | false | false | false |
ekager/focus-android | tools/taskcluster/lib/tasks.py | 1 | 5419 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import datetime
import json
import os
import taskcluster
class TaskBuilder(object):
def __init__(self, task_id, repo_url, branch, commit, owner, source, scheduler_id):
self.task_id = task_id
self.repo_url = repo_url
self.branch = branch
self.commit = commit
self.owner = owner
self.source = source
self.scheduler_id = scheduler_id
def build_task(self, name, description, command, dependencies = [], artifacts = {}, scopes = [], features = {}, worker_type = 'github-worker'):
created = datetime.datetime.now()
expires = taskcluster.fromNow('1 year')
deadline = taskcluster.fromNow('1 day')
features = features.copy()
features.update({
"taskclusterProxy": True
})
return {
"workerType": worker_type,
"taskGroupId": self.task_id,
"schedulerId": self.scheduler_id,
"expires": taskcluster.stringDate(expires),
"retries": 5,
"created": taskcluster.stringDate(created),
"tags": {},
"priority": "lowest",
"deadline": taskcluster.stringDate(deadline),
"dependencies": [ self.task_id ] + dependencies,
"routes": [],
"scopes": scopes,
"requires": "all-completed",
"payload": {
"features": features,
"maxRunTime": 7200,
"image": "mozillamobile/focus-android:1.4",
"command": [
"/bin/bash",
"--login",
"-c",
command
],
"artifacts": artifacts,
"deadline": taskcluster.stringDate(deadline)
},
"provisionerId": "aws-provisioner-v1",
"metadata": {
"name": name,
"description": description,
"owner": self.owner,
"source": self.source
}
}
def craft_signing_task(self, build_task_id, name, description, signing_format, is_staging, apks=[], scopes=[], routes=[]):
created = datetime.datetime.now()
expires = taskcluster.fromNow('1 year')
deadline = taskcluster.fromNow('1 day')
return {
"workerType": 'mobile-signing-dep-v1' if is_staging else 'mobile-signing-v1',
"taskGroupId": self.task_id,
"schedulerId": self.scheduler_id,
"expires": taskcluster.stringDate(expires),
"retries": 5,
"created": taskcluster.stringDate(created),
"tags": {},
"priority": "lowest",
"deadline": taskcluster.stringDate(deadline),
"dependencies": [ self.task_id, build_task_id],
"routes": routes,
"scopes": scopes,
"requires": "all-completed",
"payload": {
"maxRunTime": 3600,
"upstreamArtifacts": [
{
"paths": apks,
"formats": [signing_format],
"taskId": build_task_id,
"taskType": "build"
}
]
},
"provisionerId": "scriptworker-prov-v1",
"metadata": {
"name": name,
"description": description,
"owner": self.owner,
"source": self.source
}
}
def craft_push_task(self, signing_task_id, name, description, is_staging, apks=[], scopes=[], channel='internal', commit=False):
created = datetime.datetime.now()
expires = taskcluster.fromNow('1 year')
deadline = taskcluster.fromNow('1 day')
return {
"workerType": 'mobile-pushapk-dep-v1' if is_staging else 'mobile-pushapk-v1',
"taskGroupId": self.task_id,
"schedulerId": self.scheduler_id,
"expires": taskcluster.stringDate(expires),
"retries": 5,
"created": taskcluster.stringDate(created),
"tags": {},
"priority": "lowest",
"deadline": taskcluster.stringDate(deadline),
"dependencies": [ self.task_id, signing_task_id],
"routes": [],
"scopes": scopes,
"requires": "all-completed",
"payload": {
"commit": commit,
"channel": channel,
"upstreamArtifacts": [
{
"paths": apks,
"taskId": signing_task_id,
"taskType": "signing"
}
]
},
"provisionerId": "scriptworker-prov-v1",
"metadata": {
"name": name,
"description": description,
"owner": self.owner,
"source": self.source
}
}
def schedule_task(queue, taskId, task):
print "TASK", taskId
print json.dumps(task, indent=4, separators=(',', ': '))
result = queue.createTask(taskId, task)
print "RESULT", taskId
print json.dumps(result)
| mpl-2.0 | -7,108,205,373,988,370,000 | 34.651316 | 147 | 0.49511 | false | 4.416463 | false | false | false |
PLyczkowski/Sticky-Keymap | 2.74/scripts/addons_contrib/add_mesh_building_objects/tread.py | 2 | 12171 | # Stairbuilder - Tread generation
#
# Generates treads for stair generation.
# Stair Type (typ):
# - id1 = Freestanding staircase
# - id2 = Housed-open staircase
# - id3 = Box staircase
# - id4 = Circular staircase
# Tread Type (typ_t):
# - tId1 = Classic
# - tId2 = Basic Steel
# - tId3 = Bar 1
# - tId4 = Bar 2
# - tId5 = Bar 3
#
# Paul "BrikBot" Marshall
# Created: September 19, 2011
# Last Modified: January 26, 2012
# Homepage (blog): http://post.darkarsenic.com/
# //blog.darkarsenic.com/
#
# Coded in IDLE, tested in Blender 2.61.
# Search for "@todo" to quickly find sections that need work.
#
# ##### BEGIN GPL LICENSE BLOCK #####
#
# Stairbuilder is for quick stair generation.
# Copyright (C) 2011 Paul Marshall
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import mathutils
from copy import copy
from math import radians, sqrt
from mathutils import Matrix, Vector
class Treads:
def __init__(self,G,typ,typ_t,run,w,h,d,r,toe,o,n,tk,sec,sp,sn,deg=4):
self.G = G #General
self.typ = typ #Stair type
self.typ_t = typ_t #Tread type
self.run = run #Stair run. Degrees if self.typ == "id4"
self.w=w #tread width. Is outer radius if self.typ == "id4"
self.h=h #tread height
self.d=d #tread run. Ignore for now if self.typ == "id4"
self.r=r #tread rise
self.t=toe #tread nosing
self.o=o #tread side overhang. Is inner radius if self.typ == "id4"
self.n=n #number of treads
self.tk=tk #thickness of tread metal
self.sec=sec #metal sections for tread
if sec != 1 and typ_t not in ["tId4", "tId5"]:
self.sp=((d+toe)*(sp/100))/(sec-1) #spacing between sections (% of depth)
elif typ_t in ["tId4", "tId5"]:
self.sp=sp/100 #keep % value
else:
self.sp=0
self.sn=sn #number of cross sections
self.deg = deg #number of section per "slice". Only applys if self.typ == "id4"
self.tId2_faces = [[0,1,2,3],[0,3,4,5],[4,5,6,7],[6,7,8,9],[8,9,10,11],
[12,13,14,15],[12,15,16,17],[16,17,18,19],
[18,19,20,21],[20,21,22,23],[0,1,13,12],[1,2,14,13],
[2,3,15,14],[3,4,16,15],[4,7,19,16],[7,8,20,19],
[8,11,23,20],[11,10,22,23],[10,9,21,22],[9,6,18,21],
[6,5,17,18],[5,0,12,17]]
self.out_faces = [[0,2,3,1],[0,2,10,8],[9,11,3,1],[9,11,10,8],
[2,6,7,3],[2,6,14,10],[11,15,7,3],[11,15,14,10],
[0,4,5,1],[0,4,12,8],[9,13,5,1],[9,13,12,8],
[4,6,7,5],[4,6,14,12],[13,15,14,12],[13,15,7,5]]
self.Create()
def Create(self):
# Setup the coordinates:
coords = []
coords2 = []
coords3 = []
cross = 0
cW = 0
depth = 0
offset = 0
height = 0
if self.typ in ["id1", "id2", "id3"]:
if self.typ_t == "tId1":
coords.append(Vector([-self.t,-self.o,0]))
coords.append(Vector([self.d,-self.o,0]))
coords.append(Vector([-self.t,self.w + self.o,0]))
coords.append(Vector([self.d,self.w + self.o,0]))
for i in range(4):
coords.append(coords[i]+Vector([0,0,-self.h]))
elif self.typ_t == "tId2":
depth = (self.d + self.t - (self.sec - 1) * self.sp) / self.sec
inset = depth / 4
tDepth = depth - self.t
coords.append(Vector([-self.t, -self.o, -self.h])) #0
coords.append(Vector([inset - self.t, -self.o, -self.h])) #1
coords.append(Vector([inset - self.t, -self.o, -self.h + self.tk])) #2
coords.append(Vector([self.tk - self.t, -self.o, -self.h + self.tk])) #3
coords.append(Vector([self.tk - self.t, -self.o, -self.tk])) #4
coords.append(Vector([-self.t, -self.o, 0])) #5
coords.append(Vector([tDepth, -self.o, 0])) #6
coords.append(Vector([tDepth - self.tk, -self.o, -self.tk])) #7
coords.append(Vector([tDepth - self.tk, -self.o, self.tk - self.h])) #8
coords.append(Vector([tDepth, -self.o, -self.h])) #9
coords.append(Vector([tDepth - inset, -self.o, -self.h])) #10
coords.append(Vector([tDepth - inset, -self.o, -self.h + self.tk])) #11
for i in range(12):
coords.append(coords[i] + Vector([0, self.w + (2 * self.o), 0]))
elif self.typ_t in ["tId3", "tId4", "tId5"]:
# Frame:
coords.append(Vector([-self.t,-self.o,-self.h]))
coords.append(Vector([self.d,-self.o,-self.h]))
coords.append(Vector([-self.t,-self.o,0]))
coords.append(Vector([self.d,-self.o,0]))
for i in range(4):
if (i % 2) == 0:
coords.append(coords[i] + Vector([self.tk,self.tk,0]))
else:
coords.append(coords[i] + Vector([-self.tk,self.tk,0]))
for i in range(4):
coords.append(coords[i] + Vector([0,self.w + self.o,0]))
for i in range(4):
coords.append(coords[i + 4] + Vector([0,self.w + self.o - (2 * self.tk),0]))
# Tread sections:
if self.typ_t == "tId3":
offset = (self.tk * sqrt(2)) / 2
topset = self.h - offset
self.sp = ((self.d + self.t - (2 * self.tk)) - (offset * (self.sec) + topset)) / (self.sec + 1)
baseX = -self.t + self.sp + self.tk
coords2.append(Vector([baseX, self.tk - self.o, offset - self.h]))
coords2.append(Vector([baseX + offset, self.tk - self.o, -self.h]))
for i in range(2):
coords2.append(coords2[i] + Vector([topset, 0, topset]))
for i in range(4):
coords2.append(coords2[i] + Vector([0, (self.w + self.o) - (2 * self.tk), 0]))
elif self.typ_t in ["tId4", "tId5"]:
offset = ((self.run + self.t) * self.sp) / (self.sec + 1)
topset = (((self.run + self.t) * (1 - self.sp)) - (2 * self.tk)) / self.sec
baseX = -self.t + self.tk + offset
baseY = self.w + self.o - 2 * self.tk
coords2.append(Vector([baseX, -self.o + self.tk, -self.h / 2]))
coords2.append(Vector([baseX + topset, -self.o + self.tk, -self.h / 2]))
coords2.append(Vector([baseX, -self.o + self.tk, 0]))
coords2.append(Vector([baseX + topset, -self.o + self.tk, 0]))
for i in range(4):
coords2.append(coords2[i] + Vector([0, baseY, 0]))
# Tread cross-sections:
if self.typ_t in ["tId3", "tId4"]:
cW = self.tk
cross = (self.w + (2 * self.o) - (self.sn + 2) * self.tk) / (self.sn + 1)
else: # tId5
spacing = self.sp ** (1 / 4)
cross = ((2*self.o + self.w) * spacing) / (self.sn + 1)
cW = (-2*self.tk + (2*self.o + self.w) * (1 - spacing)) / self.sn
self.sp = topset
height = -self.h / 2
baseY = -self.o + self.tk + cross
coords3.append(Vector([-self.t + self.tk, baseY, -self.h]))
coords3.append(Vector([self.d - self.tk, baseY, -self.h]))
coords3.append(Vector([-self.t + self.tk, baseY, height]))
coords3.append(Vector([self.d - self.tk, baseY, height]))
for i in range(4):
coords3.append(coords3[i] + Vector([0, cW, 0]))
# Make the treads:
for i in range(self.n):
if self.typ_t == "tId1":
self.G.Make_mesh(coords,self.G.faces,'treads')
elif self.typ_t == "tId2":
temp = []
for j in coords:
temp.append(copy(j))
for j in range(self.sec):
self.G.Make_mesh(temp, self.tId2_faces, 'treads')
for k in temp:
k += Vector([depth + self.sp, 0, 0])
elif self.typ_t in ["tId3", "tId4", "tId5"]:
self.G.Make_mesh(coords,self.out_faces,'treads')
temp = []
for j in coords2:
temp.append(copy(j))
for j in range(self.sec):
self.G.Make_mesh(temp,self.G.faces,'bars')
for k in temp:
k += Vector([offset + self.sp, 0, 0])
for j in coords2:
j += Vector([self.d, 0, self.r])
temp = []
for j in coords3:
temp.append(copy(j))
for j in range(self.sn):
self.G.Make_mesh(temp,self.G.faces,'crosses')
for k in temp:
k += Vector([0, cW + cross, 0])
for j in coords3:
j += Vector([self.d, 0, self.r])
for j in coords:
j += Vector([self.d,0,self.r])
# Circular staircase:
elif self.typ in ["id4"]:
start = [Vector([0, -self.o, 0]), Vector([0, -self.o, -self.h]),
Vector([0, -self.w, 0]), Vector([0, -self.w, -self.h])]
self.d = radians(self.run) / self.n
for i in range(self.n):
coords = []
# Base faces. Should be able to append more sections:
tId4_faces = [[0, 1, 3, 2]]
t_inner = Matrix.Rotation((-self.t / self.o) + (self.d * i), 3, 'Z')
coords.append((t_inner * start[0]) + Vector([0, 0, self.r * i]))
coords.append((t_inner * start[1]) + Vector([0, 0, self.r * i]))
t_outer = Matrix.Rotation((-self.t / self.w) + (self.d * i), 3, 'Z')
coords.append((t_outer * start[2]) + Vector([0, 0, self.r * i]))
coords.append((t_outer * start[3]) + Vector([0, 0, self.r * i]))
k = 0
for j in range(self.deg + 1):
k = (j * 4) + 4
tId4_faces.append([k, k - 4, k - 3, k + 1])
tId4_faces.append([k - 2, k - 1, k + 3, k + 2])
tId4_faces.append([k + 1, k - 3, k - 1, k + 3])
tId4_faces.append([k, k - 4, k - 2, k + 2])
rot = Matrix.Rotation(((self.d * j) / self.deg) + (self.d * i), 3, 'Z')
for v in start:
coords.append((rot * v) + Vector([0, 0, self.r * i]))
tId4_faces.append([k, k + 1, k + 3, k + 2])
self.G.Make_mesh(coords, tId4_faces, 'treads')
return
| gpl-2.0 | -6,137,030,975,140,094,000 | 49.293388 | 115 | 0.45822 | false | 3.302849 | false | false | false |
sr/beadmin | sites-edit.py | 1 | 1922 | #!/usr/bin/env python
#-*- coding: utf8 -*-
from dialog import *
import database as db
import dbconf
import re
import sys
import os
if os.environ.has_key('SUDO_USER'):
user = os.environ['SUDO_USER']
else:
user = 'root'
userfromdb = db.select('users', where="login = '%s'" % user)
if len(userfromdb) == 0:
print 'Votre utilisateur n\'a pas été autorisé à avoir un site.'
print 'Merci de contacter l\'administrateur.'
sys.exit()
id_user = list(userfromdb)[0].id
if len(sys.argv) > 1:
default = sys.argv[1]
else:
default = ""
while True:
domain = text('Nom de domaine du site à éditer :', default)
if re.match(r'^([-a-zA-Z0-9_]+\.)+(fr|eu|cc|com|org|net|info|name|be)$', domain):
break
default = ""
sites = db.query("""SELECT websites.*, domains.name
FROM websites, domains
WHERE websites.id_domains = domains.id
AND domains.name = '%s'
AND websites.id_users = '%s'""" % (domain, id_user))
if len(sites) == 0:
print 'Aucun site portant ce domaine n\'existe sous votre nom'
sys.exit()
site = list(sites)[0]
site_id = site.id
try:
if site.enabled == "yes":
enabled = choices('Voulez-vous Éditer ou Désactiver le site ?', dict(e='edit', d='no'), default='e')
else:
enabled = choices('Voulez-vous Éditer ou Activer le site ?', dict(e='edit', a='yes'), default='e')
except KeyboardInterrupt:
print
sys.exit()
if enabled == "edit":
config = editor(filling=site.config.encode('utf8')).decode('utf8')
db.update('websites', where='id = $site_id', config=config, vars=locals())
print 'La configuration de %s a été mise à jour.' % domain
else:
db.update('websites', where='id = $site_id', enabled=enabled, vars=locals())
print 'Le site %s a été %s' % (domain, {'yes':'activé', 'no':'désactivé'}[enabled])
print 'N\'oubliez pas de relancer Lighttpd pour l\'appliquer'
print 'avec restart-lighttpd.'
| gpl-3.0 | -4,300,826,163,190,533,600 | 27.863636 | 102 | 0.64252 | false | 2.75289 | false | false | false |
hydroshare/hydroshare | hs_geo_raster_resource/migrations/0001_initial.py | 1 | 7484 | # -*- coding: utf-8 -*-
from django.db import models, migrations
from django.conf import settings
import hs_core.models
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('pages', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
('hs_core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='BandInformation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('name', models.CharField(max_length=500, null=True)),
('variableName', models.TextField(max_length=100, null=True)),
('variableUnit', models.CharField(max_length=50, null=True)),
('method', models.TextField(null=True, blank=True)),
('comment', models.TextField(null=True, blank=True)),
('content_type', models.ForeignKey(related_name='hs_geo_raster_resource_bandinformation_related', to='contenttypes.ContentType')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CellInformation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('name', models.CharField(max_length=500, null=True)),
('rows', models.IntegerField(null=True)),
('columns', models.IntegerField(null=True)),
('cellSizeXValue', models.FloatField(null=True)),
('cellSizeYValue', models.FloatField(null=True)),
('cellSizeUnit', models.CharField(max_length=50, null=True)),
('cellDataType', models.CharField(max_length=50, null=True)),
('noDataValue', models.FloatField(null=True)),
('content_type', models.ForeignKey(related_name='hs_geo_raster_resource_cellinformation_related', to='contenttypes.ContentType')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OriginalCoverage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('_value', models.CharField(max_length=1024, null=True)),
('content_type', models.ForeignKey(related_name='hs_geo_raster_resource_originalcoverage_related', to='contenttypes.ContentType')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RasterMetaData',
fields=[
('coremetadata_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='hs_core.CoreMetaData')),
],
options={
},
bases=('hs_core.coremetadata',),
),
migrations.CreateModel(
name='RasterResource',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='pages.Page')),
('comments_count', models.IntegerField(default=0, editable=False)),
('rating_count', models.IntegerField(default=0, editable=False)),
('rating_sum', models.IntegerField(default=0, editable=False)),
('rating_average', models.FloatField(default=0, editable=False)),
('public', models.BooleanField(default=True, help_text='If this is true, the resource is viewable and downloadable by anyone')),
('frozen', models.BooleanField(default=False, help_text='If this is true, the resource should not be modified')),
('do_not_distribute', models.BooleanField(default=False, help_text='If this is true, the resource owner has to designate viewers')),
('discoverable', models.BooleanField(default=True, help_text='If this is true, it will turn up in searches.')),
('published_and_frozen', models.BooleanField(default=False, help_text='Once this is true, no changes can be made to the resource')),
('content', models.TextField()),
('short_id', models.CharField(default=hs_core.models.short_id, max_length=32, db_index=True)),
('doi', models.CharField(help_text=b"Permanent identifier. Never changes once it's been set.", max_length=1024, null=True, db_index=True, blank=True)),
('object_id', models.PositiveIntegerField(null=True, blank=True)),
('content_type', models.ForeignKey(blank=True, to='contenttypes.ContentType', null=True)),
('creator', models.ForeignKey(related_name='creator_of_hs_geo_raster_resource_rasterresource', to=settings.AUTH_USER_MODEL, help_text='This is the person who first uploaded the resource')),
('edit_groups', models.ManyToManyField(help_text='This is the set of Hydroshare Groups who can edit the resource', related_name='group_editable_hs_geo_raster_resource_rasterresource', null=True, to='auth.Group', blank=True)),
('edit_users', models.ManyToManyField(help_text='This is the set of Hydroshare Users who can edit the resource', related_name='user_editable_hs_geo_raster_resource_rasterresource', null=True, to=settings.AUTH_USER_MODEL, blank=True)),
('last_changed_by', models.ForeignKey(related_name='last_changed_hs_geo_raster_resource_rasterresource', to=settings.AUTH_USER_MODEL, help_text='The person who last changed the resource', null=True)),
('owners', models.ManyToManyField(help_text='The person who has total ownership of the resource', related_name='owns_hs_geo_raster_resource_rasterresource', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(related_name='rasterresources', verbose_name='Author', to=settings.AUTH_USER_MODEL)),
('view_groups', models.ManyToManyField(help_text='This is the set of Hydroshare Groups who can view the resource', related_name='group_viewable_hs_geo_raster_resource_rasterresource', null=True, to='auth.Group', blank=True)),
('view_users', models.ManyToManyField(help_text='This is the set of Hydroshare Users who can view the resource', related_name='user_viewable_hs_geo_raster_resource_rasterresource', null=True, to=settings.AUTH_USER_MODEL, blank=True)),
],
options={
'ordering': ('_order',),
'verbose_name': 'Geographic Raster',
},
bases=('pages.page', models.Model),
),
migrations.AlterUniqueTogether(
name='originalcoverage',
unique_together=set([('content_type', 'object_id')]),
),
migrations.AlterUniqueTogether(
name='cellinformation',
unique_together=set([('content_type', 'object_id')]),
),
migrations.RemoveField(
model_name='cellinformation',
name='cellSizeUnit',
),
]
| bsd-3-clause | 2,092,259,263,180,738,800 | 60.344262 | 250 | 0.608097 | false | 4.363848 | false | false | false |
CanonicalLtd/landscape-client | landscape/client/monitor/tests/test_activeprocessinfo.py | 1 | 33631 | import operator
import os
import shutil
import tempfile
import subprocess
from twisted.internet.defer import fail
from landscape.lib.testing import ProcessDataBuilder
from landscape.client.monitor.activeprocessinfo import ActiveProcessInfo
from landscape.client.tests.helpers import LandscapeTest, MonitorHelper
from mock import ANY, Mock, patch
class ActiveProcessInfoTest(LandscapeTest):
"""Active process info plugin tests."""
helpers = [MonitorHelper]
def setUp(self):
"""Initialize helpers and sample data builder."""
LandscapeTest.setUp(self)
self.sample_dir = tempfile.mkdtemp()
self.builder = ProcessDataBuilder(self.sample_dir)
self.mstore.set_accepted_types(["active-process-info"])
def tearDown(self):
"""Clean up sample data artifacts."""
shutil.rmtree(self.sample_dir)
LandscapeTest.tearDown(self)
def test_first_run_includes_kill_message(self):
"""Test ensures that the first run queues a kill-processes message."""
plugin = ActiveProcessInfo(uptime=10)
self.monitor.add(plugin)
plugin.exchange()
message = self.mstore.get_pending_messages()[0]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("kill-all-processes" in message)
self.assertEqual(message["kill-all-processes"], True)
self.assertTrue("add-processes" in message)
def test_only_first_run_includes_kill_message(self):
"""Test ensures that only the first run queues a kill message."""
self.builder.create_data(672, self.builder.TRACING_STOP,
uid=1000, gid=1000, started_after_boot=10,
process_name="blarpy")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=10)
self.monitor.add(plugin)
self.monitor.exchange()
self.builder.create_data(671, self.builder.STOPPED, uid=1000,
gid=1000, started_after_boot=15,
process_name="blargh")
self.monitor.exchange()
messages = self.mstore.get_pending_messages()
self.assertEqual(len(messages), 2)
message = messages[0]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("kill-all-processes" in message)
self.assertTrue("add-processes" in message)
message = messages[1]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("add-processes" in message)
def test_terminating_process_race(self):
"""Test that the plugin handles process termination races.
There is a potential race in the time between getting a list
of process directories in C{/proc} and reading
C{/proc/<process-id>/status} or C{/proc/<process-id>/stat}.
The process with C{<process-id>} may terminate and causing
status (or stat) to be removed in this window, resulting in an
file-not-found IOError.
This test simulates race behaviour by creating a directory for
a process without a C{status} or C{stat} file.
"""
directory = tempfile.mkdtemp()
try:
os.mkdir(os.path.join(directory, "42"))
plugin = ActiveProcessInfo(proc_dir=directory, uptime=10)
self.monitor.add(plugin)
plugin.exchange()
finally:
shutil.rmtree(directory)
def test_read_proc(self):
"""Test reading from /proc."""
plugin = ActiveProcessInfo(uptime=10)
self.monitor.add(plugin)
plugin.exchange()
messages = self.mstore.get_pending_messages()
self.assertTrue(len(messages) > 0)
self.assertTrue("add-processes" in messages[0])
def test_read_sample_data(self):
"""Test reading a sample set of process data."""
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=1030, process_name="init")
self.builder.create_data(671, self.builder.STOPPED, uid=1000,
gid=1000, started_after_boot=1110,
process_name="blargh")
self.builder.create_data(672, self.builder.TRACING_STOP,
uid=1000, gid=1000, started_after_boot=1120,
process_name="blarpy")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
message = self.mstore.get_pending_messages()[0]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("kill-all-processes" in message)
self.assertTrue("add-processes" in message)
expected_process_0 = {"state": b"R", "gid": 0, "pid": 1,
"vm-size": 11676, "name": "init", "uid": 0,
"start-time": 103, "percent-cpu": 0.0}
expected_process_1 = {"state": b"T", "gid": 1000, "pid": 671,
"vm-size": 11676, "name": "blargh", "uid": 1000,
"start-time": 111, "percent-cpu": 0.0}
expected_process_2 = {"state": b"t", "gid": 1000, "pid": 672,
"vm-size": 11676, "name": "blarpy", "uid": 1000,
"start-time": 112, "percent-cpu": 0.0}
processes = message["add-processes"]
processes.sort(key=operator.itemgetter("pid"))
self.assertEqual(processes, [expected_process_0, expected_process_1,
expected_process_2])
def test_skip_non_numeric_subdirs(self):
"""Test ensures the plugin doesn't touch non-process dirs in /proc."""
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=1120, process_name="init")
directory = os.path.join(self.sample_dir, "acpi")
os.mkdir(directory)
self.assertTrue(os.path.isdir(directory))
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
message = self.mstore.get_pending_messages()[0]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("kill-all-processes" in message)
self.assertTrue("add-processes" in message)
expected_process = {"pid": 1, "state": b"R", "name": "init",
"vm-size": 11676, "uid": 0, "gid": 0,
"start-time": 112, "percent-cpu": 0.0}
self.assertEqual(message["add-processes"], [expected_process])
def test_plugin_manager(self):
"""Test plugin manager integration."""
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=1100, process_name="init")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
self.monitor.exchange()
self.assertMessages(
self.mstore.get_pending_messages(),
[{"type": "active-process-info",
"kill-all-processes": True,
"add-processes": [{"pid": 1, "state": b"R", "name": "init",
"vm-size": 11676, "uid": 0, "gid": 0,
"start-time": 110, "percent-cpu": 0.0}]}])
def test_process_terminated(self):
"""Test that the plugin handles process changes in a diff-like way."""
# This test is *too big*
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=1010, process_name="init")
self.builder.create_data(671, self.builder.STOPPED, uid=1000,
gid=1000, started_after_boot=1020,
process_name="blargh")
self.builder.create_data(672, self.builder.TRACING_STOP,
uid=1000, gid=1000, started_after_boot=1040,
process_name="blarpy")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
# Terminate a process and start another.
self.builder.remove_data(671)
self.builder.create_data(12753, self.builder.RUNNING,
uid=0, gid=0, started_after_boot=1070,
process_name="wubble")
plugin.exchange()
messages = self.mstore.get_pending_messages()
self.assertEqual(len(messages), 2)
# The first time the plugin runs we expect all known processes
# to be killed.
message = messages[0]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("kill-all-processes" in message)
self.assertEqual(message["kill-all-processes"], True)
self.assertTrue("add-processes" in message)
expected_process_0 = {"state": b"R", "gid": 0, "pid": 1,
"vm-size": 11676, "name": "init",
"uid": 0, "start-time": 101,
"percent-cpu": 0.0}
expected_process_1 = {"state": b"T", "gid": 1000, "pid": 671,
"vm-size": 11676, "name": "blargh",
"uid": 1000, "start-time": 102,
"percent-cpu": 0.0}
expected_process_2 = {"state": b"t", "gid": 1000, "pid": 672,
"vm-size": 11676, "name": "blarpy",
"uid": 1000, "start-time": 104,
"percent-cpu": 0.0}
processes = message["add-processes"]
processes.sort(key=operator.itemgetter("pid"))
self.assertEqual(processes, [expected_process_0, expected_process_1,
expected_process_2])
# Report diff-like changes to processes, such as terminated
# processes and new processes.
message = messages[1]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("add-processes" in message)
self.assertEqual(len(message["add-processes"]), 1)
expected_process = {"state": b"R", "gid": 0, "pid": 12753,
"vm-size": 11676, "name": "wubble",
"uid": 0, "start-time": 107,
"percent-cpu": 0.0}
self.assertEqual(message["add-processes"], [expected_process])
self.assertTrue("kill-processes" in message)
self.assertEqual(len(message["kill-processes"]), 1)
self.assertEqual(message["kill-processes"], [671])
def test_only_queue_message_when_process_data_is_available(self):
"""Test ensures that messages are only queued when data changes."""
self.builder.create_data(672, self.builder.TRACING_STOP,
uid=1000, gid=1000, started_after_boot=10,
process_name="blarpy")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=10)
self.monitor.add(plugin)
plugin.exchange()
self.assertEqual(len(self.mstore.get_pending_messages()), 1)
plugin.exchange()
self.assertEqual(len(self.mstore.get_pending_messages()), 1)
def test_only_report_active_processes(self):
"""Test ensures the plugin only reports active processes."""
self.builder.create_data(672, self.builder.DEAD,
uid=1000, gid=1000, started_after_boot=10,
process_name="blarpy")
self.builder.create_data(673, self.builder.ZOMBIE,
uid=1000, gid=1000, started_after_boot=12,
process_name="blarpitty")
self.builder.create_data(674, self.builder.RUNNING,
uid=1000, gid=1000, started_after_boot=13,
process_name="blarpie")
self.builder.create_data(675, self.builder.STOPPED,
uid=1000, gid=1000, started_after_boot=14,
process_name="blarping")
self.builder.create_data(676, self.builder.TRACING_STOP,
uid=1000, gid=1000, started_after_boot=15,
process_name="floerp")
self.builder.create_data(677, self.builder.DISK_SLEEP,
uid=1000, gid=1000, started_after_boot=18,
process_name="floerpidity")
self.builder.create_data(678, self.builder.SLEEPING,
uid=1000, gid=1000, started_after_boot=21,
process_name="floerpiditting")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=10)
self.monitor.add(plugin)
plugin.exchange()
messages = self.mstore.get_pending_messages()
self.assertEqual(len(messages), 1)
message = messages[0]
self.assertTrue("kill-all-processes" in message)
self.assertTrue("kill-processes" not in message)
self.assertTrue("add-processes" in message)
pids = [process["pid"] for process in message["add-processes"]]
pids.sort()
self.assertEqual(pids, [673, 674, 675, 676, 677, 678])
def test_report_interesting_state_changes(self):
"""Test ensures that interesting state changes are reported."""
self.builder.create_data(672, self.builder.RUNNING,
uid=1000, gid=1000, started_after_boot=10,
process_name="blarpy")
# Report a running process.
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=10)
self.monitor.add(plugin)
plugin.exchange()
messages = self.mstore.get_pending_messages()
self.assertEqual(len(messages), 1)
message = messages[0]
self.assertTrue("kill-all-processes" in message)
self.assertTrue("kill-processes" not in message)
self.assertTrue("add-processes" in message)
self.assertEqual(message["add-processes"][0]["pid"], 672)
self.assertEqual(message["add-processes"][0]["state"], b"R")
# Convert the process to a zombie and ensure it gets reported.
self.builder.remove_data(672)
self.builder.create_data(672, self.builder.ZOMBIE,
uid=1000, gid=1000, started_after_boot=10,
process_name="blarpy")
plugin.exchange()
messages = self.mstore.get_pending_messages()
self.assertEqual(len(messages), 2)
message = messages[1]
self.assertTrue("kill-all-processes" not in message)
self.assertTrue("update-processes" in message)
self.assertEqual(message["update-processes"][0]["state"], b"Z")
def test_call_on_accepted(self):
"""
L{MonitorPlugin}-based plugins can provide a callable to call
when a message type becomes accepted.
"""
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10)
self.monitor.add(plugin)
self.assertEqual(len(self.mstore.get_pending_messages()), 0)
result = self.monitor.fire_event(
"message-type-acceptance-changed", "active-process-info", True)
def assert_messages(ignored):
self.assertEqual(len(self.mstore.get_pending_messages()), 1)
result.addCallback(assert_messages)
return result
def test_resynchronize_event(self):
"""
When a C{resynchronize} event occurs, with 'process' scope, we should
clear the information held in memory by the activeprocess monitor.
"""
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=1030, process_name="init")
self.builder.create_data(671, self.builder.STOPPED, uid=1000,
gid=1000, started_after_boot=1110,
process_name="blargh")
self.builder.create_data(672, self.builder.TRACING_STOP,
uid=1000, gid=1000, started_after_boot=1120,
process_name="blarpy")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
messages = self.mstore.get_pending_messages()
expected_messages = [{"add-processes": [
{"gid": 1000,
"name": u"blarpy",
"pid": 672,
"start-time": 112,
"state": b"t",
"uid": 1000,
"vm-size": 11676,
"percent-cpu": 0.0},
{"gid": 0,
"name": u"init",
"pid": 1,
"start-time": 103,
"state": b"R",
"uid": 0,
"vm-size": 11676,
"percent-cpu": 0.0},
{"gid": 1000,
"name": u"blargh",
"pid": 671,
"start-time": 111,
"state": b"T",
"uid": 1000,
"vm-size": 11676,
"percent-cpu": 0.0}],
"kill-all-processes": True,
"type": "active-process-info"}]
self.assertMessages(messages, expected_messages)
plugin.exchange()
messages = self.mstore.get_pending_messages()
# No new messages should be pending
self.assertMessages(messages, expected_messages)
process_scope = ["process"]
self.reactor.fire("resynchronize", process_scope)
plugin.exchange()
messages = self.mstore.get_pending_messages()
# The resynchronisation should cause the same messages to be generated
# again.
expected_messages.extend(expected_messages)
self.assertMessages(messages, expected_messages)
def test_resynchronize_event_resets_session_id(self):
"""
When a C{resynchronize} event occurs a new session id is acquired so
that future messages can be sent.
"""
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
session_id = plugin._session_id
plugin.client.broker.message_store.drop_session_ids()
self.reactor.fire("resynchronize")
plugin.exchange()
self.assertNotEqual(session_id, plugin._session_id)
def test_resynchronize_event_with_global_scope(self):
"""
When a C{resynchronize} event occurs the L{_reset} method should be
called on L{ActiveProcessInfo}.
"""
self.builder.create_data(672, self.builder.TRACING_STOP,
uid=1000, gid=1000, started_after_boot=1120,
process_name="blarpy")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
messages = self.mstore.get_pending_messages()
expected_messages = [{"add-processes": [
{"gid": 1000,
"name": u"blarpy",
"pid": 672,
"start-time": 112,
"state": b"t",
"uid": 1000,
"vm-size": 11676,
"percent-cpu": 0.0}],
"kill-all-processes": True,
"type": "active-process-info"}]
self.assertMessages(messages, expected_messages)
plugin.exchange()
messages = self.mstore.get_pending_messages()
# No new messages should be pending
self.assertMessages(messages, expected_messages)
self.reactor.fire("resynchronize")
plugin.exchange()
messages = self.mstore.get_pending_messages()
# The resynchronisation should cause the same messages to be generated
# again.
expected_messages.extend(expected_messages)
self.assertMessages(messages, expected_messages)
def test_do_not_resynchronize_with_other_scope(self):
"""
When a C{resynchronize} event occurs, with an irrelevant scope, we
should do nothing.
"""
self.builder.create_data(672, self.builder.TRACING_STOP,
uid=1000, gid=1000, started_after_boot=1120,
process_name="blarpy")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
messages = self.mstore.get_pending_messages()
expected_messages = [{"add-processes": [
{"gid": 1000,
"name": u"blarpy",
"pid": 672,
"start-time": 112,
"state": b"t",
"uid": 1000,
"vm-size": 11676,
"percent-cpu": 0.0}],
"kill-all-processes": True,
"type": "active-process-info"}]
self.assertMessages(messages, expected_messages)
plugin.exchange()
messages = self.mstore.get_pending_messages()
# No new messages should be pending
self.assertMessages(messages, expected_messages)
disk_scope = ["disk"]
self.reactor.fire("resynchronize", disk_scope)
plugin.exchange()
messages = self.mstore.get_pending_messages()
# The resynchronisation should not have fired, so we won't see any
# additional messages here.
self.assertMessages(messages, expected_messages)
def test_do_not_persist_changes_when_send_message_fails(self):
"""
When the plugin is run it persists data that it uses on
subsequent checks to calculate the delta to send. It should
only persist data when the broker confirms that the message
sent by the plugin has been sent.
"""
class MyException(Exception):
pass
self.log_helper.ignore_errors(MyException)
self.builder.create_data(672, self.builder.RUNNING,
uid=1000, gid=1000, started_after_boot=10,
process_name="python")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=10)
self.monitor.add(plugin)
self.monitor.broker.send_message = Mock(
return_value=fail(MyException()))
message = plugin.get_message()
def assert_message(message_id):
self.assertEqual(message, plugin.get_message())
result = plugin.exchange()
result.addCallback(assert_message)
self.monitor.broker.send_message.assert_called_once_with(
ANY, ANY, urgent=ANY)
return result
def test_process_updates(self):
"""Test updates to processes are successfully reported."""
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=1100, process_name="init",)
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
with patch.object(plugin.registry, 'flush') as flush_mock:
plugin.exchange()
flush_mock.assert_called_once_with()
flush_mock.reset_mock()
messages = self.mstore.get_pending_messages()
self.assertEqual(len(messages), 1)
self.builder.remove_data(1)
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=1100,
process_name="init", vmsize=20000)
plugin.exchange()
flush_mock.assert_called_once_with()
messages = self.mstore.get_pending_messages()
self.assertEqual(len(messages), 2)
self.assertMessages(messages, [{"timestamp": 0,
"api": b"3.2",
"type": "active-process-info",
"kill-all-processes": True,
"add-processes": [{"start-time": 110,
"name": u"init",
"pid": 1,
"percent-cpu": 0.0,
"state": b"R",
"gid": 0,
"vm-size": 11676,
"uid": 0}]},
{"timestamp": 0,
"api": b"3.2",
"type": "active-process-info",
"update-processes": [
{"start-time": 110,
"name": u"init",
"pid": 1,
"percent-cpu": 0.0,
"state": b"R",
"gid": 0,
"vm-size": 20000,
"uid": 0}]}])
class PluginManagerIntegrationTest(LandscapeTest):
helpers = [MonitorHelper]
def setUp(self):
LandscapeTest.setUp(self)
self.sample_dir = self.makeDir()
self.builder = ProcessDataBuilder(self.sample_dir)
self.mstore.set_accepted_types(["active-process-info",
"operation-result"])
def get_missing_pid(self):
popen = subprocess.Popen(["hostname"], stdout=subprocess.PIPE)
popen.wait()
return popen.pid
def get_active_process(self):
return subprocess.Popen(["python", "-c", "raw_input()"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def test_read_long_process_name(self):
"""Test reading a process with a long name."""
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=1030,
process_name="NetworkManagerDaemon")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=2000,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
message = self.mstore.get_pending_messages()[0]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("kill-all-processes" in message)
self.assertTrue("add-processes" in message)
expected_process_0 = {"state": b"R", "gid": 0, "pid": 1,
"vm-size": 11676, "name": "NetworkManagerDaemon",
"uid": 0, "start-time": 103, "percent-cpu": 0.0}
processes = message["add-processes"]
self.assertEqual(processes, [expected_process_0])
def test_strip_command_line_name_whitespace(self):
"""Whitespace should be stripped from command-line names."""
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=30,
process_name=" postgres: writer process ")
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10)
self.monitor.add(plugin)
plugin.exchange()
message = self.mstore.get_pending_messages()[0]
self.assertEqual(message["add-processes"][0]["name"],
u"postgres: writer process")
def test_read_process_with_no_cmdline(self):
"""Test reading a process without a cmdline file."""
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=1030,
process_name="ProcessWithLongName",
generate_cmd_line=False)
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=100,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
message = self.mstore.get_pending_messages()[0]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("kill-all-processes" in message)
self.assertTrue("add-processes" in message)
expected_process_0 = {"state": b"R", "gid": 0, "pid": 1,
"vm-size": 11676, "name": "ProcessWithLong",
"uid": 0, "start-time": 103, "percent-cpu": 0.0}
processes = message["add-processes"]
self.assertEqual(processes, [expected_process_0])
def test_generate_cpu_usage(self):
"""
Test that we can calculate the CPU usage from system information and
the /proc/<pid>/stat file.
"""
stat_data = "1 Process S 1 0 0 0 0 0 0 0 " \
"0 0 20 20 0 0 0 0 0 0 3000 0 " \
"0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0"
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=None,
process_name="Process",
generate_cmd_line=False,
stat_data=stat_data)
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=400,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
message = self.mstore.get_pending_messages()[0]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("kill-all-processes" in message)
self.assertTrue("add-processes" in message)
processes = message["add-processes"]
expected_process_0 = {"state": b"R", "gid": 0, "pid": 1,
"vm-size": 11676, "name": u"Process",
"uid": 0, "start-time": 300,
"percent-cpu": 4.00}
processes = message["add-processes"]
self.assertEqual(processes, [expected_process_0])
def test_generate_cpu_usage_capped(self):
"""
Test that we can calculate the CPU usage from system information and
the /proc/<pid>/stat file, the CPU usage should be capped at 99%.
"""
stat_data = "1 Process S 1 0 0 0 0 0 0 0 " \
"0 0 500 500 0 0 0 0 0 0 3000 0 " \
"0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0"
self.builder.create_data(1, self.builder.RUNNING, uid=0, gid=0,
started_after_boot=None,
process_name="Process",
generate_cmd_line=False,
stat_data=stat_data)
plugin = ActiveProcessInfo(proc_dir=self.sample_dir, uptime=400,
jiffies=10, boot_time=0)
self.monitor.add(plugin)
plugin.exchange()
message = self.mstore.get_pending_messages()[0]
self.assertEqual(message["type"], "active-process-info")
self.assertTrue("kill-all-processes" in message)
self.assertTrue("add-processes" in message)
processes = message["add-processes"]
expected_process_0 = {"state": b"R", "gid": 0, "pid": 1,
"vm-size": 11676, "name": u"Process",
"uid": 0, "start-time": 300,
"percent-cpu": 99.00}
processes = message["add-processes"]
self.assertEqual(processes, [expected_process_0])
| gpl-2.0 | -229,604,412,375,126,000 | 44.570461 | 79 | 0.522821 | false | 4.305595 | true | false | false |
tallforasmurf/PPQT | pqFnote.py | 1 | 58681 | # These imports move Python 2.x almost to Python 3.
# They must precede anything except #comments, including even the docstring
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future_builtins import *
__version__ = "1.3.0"
__author__ = "David Cortesi"
__copyright__ = "Copyright 2011, 2012, 2013 David Cortesi"
__maintainer__ = "David Cortesi"
__email__ = "[email protected]"
__license__ = '''
License (GPL-3.0) :
This file is part of PPQT.
PPQT is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You can find a copy of the GNU General Public License in the file
extras/COPYING.TXT included in the distribution of this program, or see:
<http://www.gnu.org/licenses/>.
'''
'''
Implement the Footnote managament panel, whose chief feature is a table
of footnotes that is re/built with a Refresh button. Important nomenclature:
A footnote KEY is a symbol that links an ANCHOR to a NOTE.
An Anchor
* appears in text but never in column 0 (never at start of line),
* never appears inside a word, so [oe] is not an anchor,
* has a Key in square brackets with no superfluous spaces, e.g. [A] or [2].
A Note
* begins on a line that follows its matching anchor
* always begins in column 0 with [Footnote k: where k is a Key.
* always ends with a right square bracket at end of line.
It is not required that Keys be unique. (It is normal for most Keys in a PG
text to be proofed as "A" and a few as "B".) However it is expected and required
that (a) the Anchor with Key k precedes the Note with the matching Key k,
and (b) Notes with the same Key appear in the same sequence as their anchors.
A Note may contain an Anchor, but Notes may NOT be nested. A Note anchored in
another Note must be outside the other note. A note may contain square brackets
so long as the contained brackets do not end a line. This is valid:
Text[A] and more text[A]
...
[Footnote A: this note has[i] an anchor.]
[Footnote A: this is the second note A and runs
to multiple -- [text in brackets but not at end of line] --
lines]
[Footnote i: inner note anchored in first note A.]
The footnote table has these columns:
Key: The key text from a footnote, e.g. A or iv or 92.
Class: The class of the key, one of:
ABC uppercase alpha
IVX uppercase roman numeral
123 decimal
abc lowercase alpha
ivx lowercase roman numeral
*\u00A4\u00A5 symbols
Ref Line: The text block (line) number containing the anchor
Note Line: The text block number of the matching Note
Length: The length in lines of the matched Note
Text: The opening text of the Note e.g. [Footnote A: This note has...
The example above might produce the following table:
Key Class Ref Line Note Line Length Text
A ABC 1535 1570 1 Footnote A: this note has[i..
A ABC 1535 1571 3 Footnote A: this is the sec..
i ivx 1570 1574 1 Footnote i: inner note refe..
The table interacts as follows.
* Clicking Key jumps the edit text to the Ref line, unless it is on the ref
line in which case it jumps to the Note line, in other words, click/click
the Key to ping-pong between the Ref and the Note.
* Clicking Ref Line jumps the edit text to that line with the Key
(not the whole Anchor) selected.
* Clicking Note line jumps the edit text to that line with the Note selected.
* When a Key or a Note is not matched, its row is pink.
* When the Lines value is >10, or Note Line minus Ref Line is >50, the row
is pale green
The actual data behind the table is a Python list of dicts where each dict
describes one Key and/or Note (both, when they match), with these elements:
'K' : Key symbol as a QString
'C' : Key class number
'R' : QTextCursor with position/anchor selecting the Key in the Ref, or None
'N' : QTextCursor selecting the Note, or None
If an Anchor is found, K has the Key and R selects the Key.
If a Note is found, K has the key and N selects the Note.
When a Ref and a Note are matched, all fields are set.
Note we don't pull out the line numbers but rather get them as needed from the
QTextCursors. This is because Qt keeps the cursors updated as the document
is edited, so edits that don't modify Refs or Notes don't need Refresh to keep
the table current.
When Refresh is clicked, this list of dicts is rebuilt by scanning the whole
document with regexs to find Anchors and Notes, and matching them.
During Refresh, found Keys are assigned to a number class based on their
values, with classes expressed as regular expressions:
Regex Assumed class
[IVXLCDM]{1,19} IVX
[A-Z]{1,2} ABC
[1-9]{1,3} 123
[ivxlcdm]{1,19} ivx
[a-z]{1,2} abc
[*\u00a4\u00a7\u00b6\u2020\u20221] symbols *, para, currency, dagger, dbl-dagger
(Apart from the symbols these tests are not unicode-aware, e.g. the ABC class
does not encompass uppercase Cyrillic, only the Latin-1 letters. In Qt5 it may
be possible to code a regex to detect Unicode upper- or lowercase, and we can
revisit allowing e.g. Keys with Greek letters.)
Other controls supplied at the bottom of the panel are:
Renumber Streams: a box with the six Key classes and for each, a popup
giving the choice of renumber stream:
1,2,..9999
A,B,..ZZZ
I,II,..MMMM
a,b,..zzz
i,ii,..mmmm
no renumber
There are five unique number streams, set to 0 at the start of a renumber
operation and incremented before use, and formatted in one of five ways.
The initial assignment of classes to streams is:
123 : 1,2,..9999
ABC : A,B,..ZZZ
IVX : A,B,..ZZZ
abc : a,b,..zzz
ivx : a,b,..zzz
sym : no renumber
A typical book has only ABC keys, or possibly ABC and also ixv or 123 Keys.
There is unavoidable ambiguity between alpha and roman classes. Although an
alpha key with only roman letters is classed as roman, the renumber stream
for roman is initialized to the alpha number stream.
In other words, the ambiguity is resolved in favor of treating all alphas
as alphas. If the user actually wants a roman stream, she can e.g. set
class ivx to use stream i,ii..m. Setting either roman Class to use a
roman Stream causes the alpha class of that case to change to no-renumber.
Setting an alpha class to use any stream causes the roman stream of that
case to also use the same stream. Thus we will not permit a user to try
to have both an alpha stream AND a roman stream of the same letter case
at the same time.
The Renumber button checks for any nonmatched keys and only shows an error
dialog if any exist. Else it causes all Keys in the table to be renumbered
using the stream assigned to their class. This is a single-undo operation.
A Footnote Section is marked off using /F .. F/ markers (which are ignored by
the reflow code). The Move Notes button asks permission with a warning message.
On OK, it scans the document and makes a list of QTextCursors of the body of
all Footnote Sections. If none are found it shows an error and stops. If the
last one found is above the last Note in the table, it shows an error and stops.
Else it scans the Notes in the table from bottom up. For each note, if the note
is not already inside a Footnote section, its contents are inserted at the
head of the Footnote section next below it and deleted at the
original location. The QTextCursor in the table is repositioned.
The database of footnotes built by Refresh and shown in the table is cleared
on the DocHasChanged signal from pqMain, so it has to be rebuilt after any
book is loaded, and isn't saved. We should think about adding the footnote
info to the document metadata, but only if the Refresh operation proves to be
too lengthy to bear.
'''
from PyQt4.QtCore import (
Qt,
QAbstractTableModel,QModelIndex,
QChar, QString, QStringList,
QRegExp,
QVariant,
SIGNAL)
from PyQt4.QtGui import (
QBrush, QColor,
QComboBox,
QItemDelegate,
QSpacerItem,
QTableView,
QGroupBox,
QHBoxLayout, QVBoxLayout,
QHeaderView,
QLabel,
QLineEdit,
QPushButton,
QSpinBox,
QTextCursor,
QWidget)
import pqMsgs
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# This code is global and relates to creating the "database" of footnotes.
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Right, let's get some constants defined globally
# KeyClass_* gives sequential integer values to the classes.
KeyClass_IVX = 0
KeyClass_ABC = 1
KeyClass_ivx = 2
KeyClass_abc = 3
KeyClass_123 = 4
KeyClass_sym = 5
# name strings in KeyClass_* numeric order
KeyClassNames = (
QString(u'IVX'),
QString(u'ABC'),
QString(u'ivx'),
QString(u'abc'),
QString(u'123'),
QString(u'*\u00a4\u00a7') )
# stream names as a QStringList in KeyClass_* numeric order
# (used in comboboxes)
StreamNames = QStringList(QString(u'I,II..M')) << \
QString(u'A,B,..ZZZ') << \
QString(u'i,ii..mm') << \
QString(u'a,b,..zzz') << \
QString(u'1,2,..9999') << \
QString(u'no renumber')
# class-detecting REs in KeyClass_* numeric order
ClassREs = (
u'[IVXLCDM]{1,19}', # ROMAN to MMMMDCCCCLXXXXVIII (4998)
u'[A-Z]{1,3}', # ALPHA to ZZZ
u'[ivxlcdm]{1,19}', # roman to whatever
u'[a-z]{1,3}', # alpha to zzz
u'\d{1,4}', # decimal to 9999
u'[\*\u00a4\u00a7\u00b6\u2020\u2021]' # star currency section para dagger dbl-dagger
)
# In order to not find [oe] as an anchor assiduously skip such keys
TheDreadedOE = QString(u'OE')
# The regex for finding a Ref to any possible Key class.
RefClassMatch = u'\[(' + u'|'.join(ClassREs) + u')\]'
RefFinderRE = QRegExp(RefClassMatch)
# The similar regex for finding the head of a Note of any Key class.
NoteFinderRE = QRegExp( u'\[Footnote\s+(' + u'|'.join(ClassREs) + u')\s*\:' )
# Some notes about QTextCursors. A cursor is connected to a document (our main
# document) and has an anchor and a position. If .anchor() != .position() there
# is a selection. Qt doesn't care which is lower (closer to the top of the doc)
# but we take pains herein that .anchor() < .position(), i.e. the cursor is
# "positioned" at the end of the selection, the anchor at the start.
# Given a QTextCursor that selects an Anchor, return its line number.
# (Also used for text cursors that index /F and F/ lines.)
def refLineNumber(tc):
if tc is not None:
return tc.block().blockNumber() # block number for tc.position()
return None
# Given a QTextCursor that selects a Note, return its line number, which is
# the block number for the anchor, not necessarily that of the position.
def noteLineNumber(tc):
if tc is not None:
return tc.document().findBlock(tc.anchor()).blockNumber()
return None
# Given a QTextCursor that selects a Note, return the number of lines in it.
def noteLineLength(tc):
if tc is not None:
return 1 + tc.blockNumber() - \
tc.document().findBlock(tc.anchor()).blockNumber()
return 0
# Given a QString that is a Key, return the class of the Key.
# single-class Regexes based on ClassREs above, tupled with the code.
ClassQRegExps = (
(KeyClass_IVX, QRegExp(ClassREs[KeyClass_IVX])),
(KeyClass_ABC, QRegExp(ClassREs[KeyClass_ABC])),
(KeyClass_123, QRegExp(ClassREs[KeyClass_123])),
(KeyClass_ivx, QRegExp(ClassREs[KeyClass_ivx])),
(KeyClass_abc, QRegExp(ClassREs[KeyClass_abc])),
(KeyClass_sym, QRegExp(ClassREs[KeyClass_sym]))
)
def classOfKey(qs):
for (keyclass,regex) in ClassQRegExps:
if 0 == regex.indexIn(qs):
return keyclass
return None
# Given a QTextCursor that selects a Key (typically an Anchor)
# return the class of the Key.
def classOfRefKey(tc):
return classOfKey(tc.selectedText())
# Given a QTextCursor that selects a Note, return the note's key.
# We assume that tc really selects a Note so that noteFinderRE will
# definitely hit so we don't check its return. All we want is its cap(1).
def keyFromNote(tc):
NoteFinderRE.indexIn(tc.selectedText())
return NoteFinderRE.cap(1)
# Given a QTextCursor that selects a Note, return the class of its key.
def classOfNoteKey(tc):
return classOfKey(keyFromNote(tc))
# Given a QTextCursor that selects a Note, return the leading characters,
# truncated at 40 chars, from the Note.
MaxNoteText = 40
def textFromNote(tc):
qs = QString()
if tc is not None:
qs = tc.selectedText()
if MaxNoteText < qs.size() :
qs.truncate(MaxNoteText-3)
qs.append(u'...')
return qs
# The following is the database for the table of footnotes.
# This is empty on startup and after the DocHasChanged signal, then built
# by the Refresh button.
TheFootnoteList = [ ]
TheCountOfUnpairedKeys = 0
TheEditCountAtRefresh = -1
# Make a database item given ref and note cursors as available.
# Note we copy the text cursors so the caller doesn't have to worry about
# overwriting, reusing, or letting them go out of scope afterward.
def makeDBItem(reftc,notetc):
keyqs = reftc.selectedText() if reftc is not None else keyFromNote(notetc)
item = {'K': keyqs,
'C': classOfKey(keyqs),
'R': QTextCursor(reftc) if reftc is not None else None,
'N': QTextCursor(notetc) if notetc is not None else None
}
return item
# Append a new matched footnote to the end of the database, given the
# cursors for the Anchor and the Note. It is assumed this is called on
# a top-to-bottom sequential scan so entries will be added in line# sequence.
def addMatchedPair(reftc,notetc):
global TheFootnoteList
TheFootnoteList.append(makeDBItem(reftc,notetc))
# insert an unmatched reference into the db in ref line number sequence.
# unmatched refs and notes are expected to be few, so a sequential scan is ok.
def insertUnmatchedRef(reftc):
global TheFootnoteList
item = makeDBItem(reftc,None)
j = refLineNumber(reftc)
for i in range(len(TheFootnoteList)):
if j <= refLineNumber(TheFootnoteList[i]['R']) :
TheFootnoteList.insert(i,item)
return
TheFootnoteList.append(item) # unmatched ref after all other refs
# insert an unmatched note in note line number sequence.
def insertUnmatchedNote(notetc):
global TheFootnoteList
item = makeDBItem(None,notetc)
j = noteLineNumber(notetc)
for i in range(len(TheFootnoteList)):
if j <= noteLineNumber(notetc) :
TheFootnoteList.insert(i,item)
return
# Based on the above spadework, do the Refresh operation
def theRealRefresh():
global TheFootnoteList, TheCountOfUnpairedKeys, TheEditCountAtRefresh
TheFootnoteList = [] # wipe the slate
TheCountOfUnpairedKeys = 0
TheEditCountAtRefresh = IMC.editCounter
doc = IMC.editWidget.document() # get handle of document
# initialize status message and progress bar
barCount = doc.characterCount()
pqMsgs.startBar(barCount * 2,"Scanning for notes and anchors")
barBias = 0
# scan the document from top to bottom finding Anchors and make a
# list of them as textcursors. doc.find(re,pos) returns a textcursor
# that .isNull on no hit.
listOrefs = []
findtc = QTextCursor(doc) # cursor that points to top of document
findtc = doc.find(RefFinderRE,findtc)
while not findtc.isNull() : # while we keep finding things
# findtc now selects the whole anchor [xx] but we want to only
# select the key. This means incrementing the anchor and decrementing
# the position; the means to do this are a bit awkward.
a = findtc.anchor()+1
p = findtc.position()-1
findtc.setPosition(a,QTextCursor.MoveAnchor) #click..
findtc.setPosition(p,QTextCursor.KeepAnchor) #..and drag
# The anchor could have been an [oe] character, don't save if so.
if findtc.selectedText().compare(TheDreadedOE, Qt.CaseInsensitive):
listOrefs.append(QTextCursor(findtc))
pqMsgs.rollBar(findtc.position())
findtc = doc.find(RefFinderRE,findtc) # look for the next
barBias = barCount
pqMsgs.rollBar(barBias)
# scan the document again top to bottom now looking for Notes, and make
# a list of them as textcursors.
listOnotes = []
findtc = QTextCursor(doc) # cursor that points to top of document
findtc = doc.find(NoteFinderRE,findtc)
while not findtc.isNull():
# findtc selects "[Footnote key:" now we need to find the closing
# right bracket, which must be at the end of its line. We will go
# by text blocks looking for a line that ends like this]
pqMsgs.rollBar(findtc.anchor()+barBias)
while True:
# "drag" to end of line, selecting whole line
findtc.movePosition(QTextCursor.EndOfBlock,QTextCursor.KeepAnchor)
if findtc.selectedText().endsWith(u']') :
break # now selecting whole note
if findtc.block() == doc.lastBlock() :
# ran off end of document looking for ...]
findtc.clearSelection() # just forget this note, it isn't a note
break # we could tell user, unterminated note. eh.
else: # there is another line, step to its head and look again
findtc.movePosition(QTextCursor.NextBlock,QTextCursor.KeepAnchor)
if findtc.hasSelection() : # we did find the line ending in ]
listOnotes.append(QTextCursor(findtc))
findtc = doc.find(NoteFinderRE,findtc) # find next, fail at end of doc
# Now, listOrefs is all the Anchors and listOnotes is all the Notes,
# both in sequence by document position. Basically, merge these lists.
# For each Ref in sequence, find the first Note with a matching key at
# a higher line number. If there is one, add the matched pair to the db,
# and delete the note from its list. If there is no match, copy the
# ref to a list of unmatched refs (because we can't del from the listOrefs
# inside the loop over it).
# This is not an MxN process despite appearances, as (a) most refs
# will find a match, (b) most matches appear quickly and (c) we keep
# shortening the list of notes.
listOfOrphanRefs = []
for reftc in listOrefs:
hit = False
refln = refLineNumber(reftc) # note line number for comparison
for notetc in listOnotes:
if 0 == reftc.selectedText().compare(keyFromNote(notetc)) and \
refln < noteLineNumber(notetc) :
hit = True
break
if hit : # a match was found
addMatchedPair(reftc,notetc)
listOnotes.remove(notetc)
else:
listOfOrphanRefs.append(reftc)
# All the matches have been made (in heaven?). If there remain any
# unmatched refs or notes, insert them in the db as well.
for reftc in listOfOrphanRefs:
insertUnmatchedRef(reftc)
for notetc in listOnotes:
insertUnmatchedNote(notetc)
TheCountOfUnpairedKeys = len(listOfOrphanRefs)+len(listOnotes)
# clear the status and progress bar
pqMsgs.endBar()
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# This code implements the Fnote table and its interactions.
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Implement a concrete table model by subclassing Abstract Table Model.
# The data served is derived from the TheFootnoteList, above.
class myTableModel(QAbstractTableModel):
def __init__(self, parent=None):
super(myTableModel, self).__init__(parent)
# The header texts for the columns
self.headerDict = {
0:"Key", 1:"Class", 2:"Ref line", 3:"Note Line", 4:"Length", 5:"Text"
}
# the text alignments for the columns
self.alignDict = { 0:Qt.AlignCenter, 1: Qt.AlignCenter,
2: Qt.AlignRight, 3: Qt.AlignRight,
4: Qt.AlignRight, 5: Qt.AlignLeft }
# The values for tool/status tips for data and headers
self.tipDict = { 0: "Actual key text",
1: "Assumed class of key for renumbering",
2: "Line number of the Anchor",
3: "First line number of the Note",
4: "Number of lines in the Note",
5: "Initial text of the Note"
}
# The brushes to painting the background of good and questionable rows
self.whiteBrush = QBrush(QColor(QString('transparent')))
self.pinkBrush = QBrush(QColor(QString('lightpink')))
self.greenBrush = QBrush(QColor(QString('palegreen')))
# Here save the expansion of one database item for convenient fetching
self.lastRow = -1
self.lastTuple = ()
self.brushForRow = QBrush()
def columnCount(self,index):
if index.isValid() : return 0 # we don't have a tree here
return 6
def flags(self,index):
f = Qt.ItemIsEnabled
#if index.column() ==1 :
#f |= Qt.ItemIsEditable # column 1 only editable
return f
def rowCount(self,index):
if index.isValid() : return 0 # we don't have a tree here
return len(TheFootnoteList) # initially 0
def headerData(self, col, axis, role):
if (axis == Qt.Horizontal) and (col >= 0):
if role == Qt.DisplayRole : # wants actual text
return QString(self.headerDict[col])
elif (role == Qt.ToolTipRole) or (role == Qt.StatusTipRole) :
return QString(self.tipDict[col])
return QVariant() # we don't do that, whatever it is
# This method is called whenever the table view wants to know practically
# anything about the visible aspect of a table cell. The row & column are
# in the index, and what it wants to know is expressed by the role.
def data(self, index, role ):
# whatever it wants, we need the row data. Get it into self.lastTuple
if index.row() != self.lastRow :
# We assume Qt won't ask for any row outside 0..rowCount-1.
# We TRUST it will go horizontally, hitting a row multiple times,
# before going on to the next row.
r = index.row()
rtc = TheFootnoteList[r]['R']
ntc = TheFootnoteList[r]['N']
rln = refLineNumber(rtc)
nln = noteLineNumber(ntc)
nll = noteLineLength(ntc) # None if ntc is None
self.lastTuple = (
TheFootnoteList[r]['K'], # key as a qstring
KeyClassNames[TheFootnoteList[r]['C']], # class as qstring
QString(unicode(rln)) if rtc is not None else QString("?"),
QString(unicode(nln)) if ntc is not None else QString("?"),
QString(unicode(nll)),
textFromNote(ntc)
)
self.brushForRow = self.whiteBrush
if (rtc is None) or (ntc is None):
self.brushForRow = self.pinkBrush
elif 10 < nll or 50 < (nln-rln) :
self.brushForRow = self.greenBrush
# Now, what was it you wanted?
if role == Qt.DisplayRole : # wants actual data
return self.lastTuple[index.column()] # so give it.
elif (role == Qt.TextAlignmentRole) :
return self.alignDict[index.column()]
elif (role == Qt.ToolTipRole) or (role == Qt.StatusTipRole) :
return QString(self.tipDict[index.column()])
elif (role == Qt.BackgroundRole) or (role == Qt.BackgroundColorRole):
return self.brushForRow
# don't support other roles
return QVariant()
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# This code creates the Fnote panel and implements the other UI widgets.
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Used during renumbering: given an integer, return an upper- or
# lowercase roman numeral. Cribbed from Mark Pilgrim's "Dive Into Python".
RomanNumeralMap = (('M', 1000),
('CM', 900),
('D', 500),
('CD', 400),
('C', 100),
('XC', 90),
('L', 50),
('XL', 40),
('X', 10),
('IX', 9),
('V', 5),
('IV', 4),
('I', 1))
def toRoman(n,lc):
"""convert integer to Roman numeral"""
if not (0 < n < 5000):
raise ValueError, "number out of range (must be 1..4999)"
if int(n) <> n:
raise ValueError, "decimals can not be converted"
result = ""
for numeral, integer in RomanNumeralMap:
while n >= integer:
result += numeral
n -= integer
qs = QString(result)
if lc : return qs.toLower()
return qs
AlphaMap = u'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
def toAlpha(n,lc=False):
'''convert integer to alpha A..ZZZ (nb. 26**3 == 17577'''
if not (0 < n < 17577):
raise ValueError, "number out of range (must be 1..17577)"
if int(n) <> n:
raise ValueError, "decimals can not be converted"
result = ''
while True :
(n,m) = divmod(n-1,26)
result = AlphaMap[m]+result
if n == 0 : break
qs = QString(result)
if lc : return qs.toLower()
return qs
class fnotePanel(QWidget):
def __init__(self, parent=None):
super(fnotePanel, self).__init__(parent)
# Here we go making a layout. The outer shape is a vbox.
mainLayout = QVBoxLayout()
self.setLayout(mainLayout)
# The following things are stacked inside the vbox.
# 1, the Refresh button, left-justifed in an hbox.
refreshLayout = QHBoxLayout()
self.refreshButton = QPushButton("Refresh")
refreshLayout.addWidget(self.refreshButton,0)
refreshLayout.addStretch(1) # stretch on right left-aligns the button
mainLayout.addLayout(refreshLayout)
self.connect(self.refreshButton, SIGNAL("clicked()"), self.doRefresh)
# 2, The table of footnotes, represented as a QTableView that displays
# our myTableModel.
self.view = QTableView()
self.view.setCornerButtonEnabled(False)
self.view.setWordWrap(False)
self.view.setAlternatingRowColors(False)
self.view.setSortingEnabled(False)
mainLayout.addWidget(self.view,1) # It gets all stretch for the panel
# create the table (empty just now) and display it
self.table = myTableModel() #
self.view.setModel(self.table)
# Connect the table view's clicked to our clicked slot
self.connect(self.view, SIGNAL("clicked(QModelIndex)"), self.tableClick)
# 3, an hbox containing 3 vboxes each containing 2 hboxes... ok, let's
# start with 6 comboboxes, one for each class.
self.pickIVX = self.makeStreamMenu(KeyClass_ABC) # initialize both IVX
self.pickABC = self.makeStreamMenu(KeyClass_ABC) # ..and ABC to A,B
self.pickivx = self.makeStreamMenu(KeyClass_abc) # similarly
self.pickabc = self.makeStreamMenu(KeyClass_abc)
self.pick123 = self.makeStreamMenu(KeyClass_123)
self.picksym = self.makeStreamMenu()
# while we are at it let us connect their signals to the methods
# that enforce their behavior.
self.connect(self.pickIVX, SIGNAL("activated(int)"),self.IVXpick)
self.connect(self.pickABC, SIGNAL("activated(int)"),self.ABCpick)
self.connect(self.pickivx, SIGNAL("activated(int)"),self.ivxpick)
self.connect(self.pickabc, SIGNAL("activated(int)"),self.abcpick)
# Now make 6 hboxes each containing a label and the corresponding
# combobox.
hbIVX = self.makePair(KeyClassNames[0],self.pickIVX)
hbABC = self.makePair(KeyClassNames[1],self.pickABC)
hbivx = self.makePair(KeyClassNames[2],self.pickivx)
hbabc = self.makePair(KeyClassNames[3],self.pickabc)
hb123 = self.makePair(KeyClassNames[4],self.pick123)
hbsym = self.makePair(KeyClassNames[5],self.picksym)
# Stack up the pairs in three attractive vboxes
vbIA = self.makeStack(hbABC,hbIVX)
vbia = self.makeStack(hbabc,hbivx)
vbns = self.makeStack(hb123,hbsym)
# Array them across a charming hbox and stick it in our panel
hbxxx = QHBoxLayout()
hbxxx.addLayout(vbIA)
hbxxx.addLayout(vbia)
hbxxx.addLayout(vbns)
hbxxx.addStretch(1)
mainLayout.addLayout(hbxxx)
# Finally, the action buttons on the bottom in a frame.
doitgb = QGroupBox("Actions")
doithb = QHBoxLayout()
self.renumberButton = QPushButton("Renumber")
self.moveButton = QPushButton("Move Notes")
self.asciiButton = QPushButton("ASCII Cvt")
self.htmlButton = QPushButton("HTML Cvt")
doithb.addWidget(self.renumberButton,0)
doithb.addStretch(1)
doithb.addWidget(self.moveButton)
doithb.addStretch(1)
doithb.addWidget(self.asciiButton)
doithb.addStretch(1)
doithb.addWidget(self.htmlButton)
doitgb.setLayout(doithb)
mainLayout.addWidget(doitgb)
# and connect the buttons to actions
self.connect(self.renumberButton, SIGNAL("clicked()"), self.doRenumber)
self.connect(self.moveButton, SIGNAL("clicked()"), self.doMove)
self.connect(self.asciiButton, SIGNAL("clicked()"), self.doASCII)
self.connect(self.htmlButton, SIGNAL("clicked()"), self.doHTML)
# The renumber streams and a set of lambdas for getting the
# next number in sequence from them. The lambdas are selected by the
# value in a stream menu combo box, 0-4 or 5 meaning no-renumber.
self.streams = [0,0,0,0,0,0]
self.streamLambdas = [
lambda s : toRoman(s,False),
lambda s : toAlpha(s,False),
lambda s : toRoman(s,True),
lambda s : toAlpha(s,True),
lambda s : QString(unicode(s)),
lambda s : None]
self.streamMenuList = [
self.pickIVX,self.pickABC,self.pickivx,
self.pickabc,self.pick123,self.picksym]
# Note a count of items over which it is worthwhile to run a
# progress bar during renumber, move, etc. Reconsider later: 100? 200?
self.enoughForABar = 100
# Convenience function to shorten code when instantiating
def makeStreamMenu(self,choice=5):
cb = QComboBox()
cb.addItems(StreamNames)
cb.setCurrentIndex(choice)
return cb
# Convenience function to shorten code when instantiating
def makePair(self,qs,cb):
hb = QHBoxLayout()
hb.addWidget(QLabel(qs))
hb.addWidget(cb)
hb.addStretch(1)
return hb
# Convenience function to shorten code when instantiating
def makeStack(self,pair1,pair2):
vb = QVBoxLayout()
vb.addLayout(pair1)
vb.addLayout(pair2)
vb.addStretch(1)
return vb
# The slot for a click of the Refresh button. Tell the table model we are
# changing stuff; then call theRealRefresh; then tell table we're good.
def doRefresh(self):
self.table.beginResetModel()
theRealRefresh()
self.table.endResetModel()
self.view.resizeColumnsToContents()
# These slots are invoked when a choice is made in the stream popup menu
# for an ambiguous class, to ensure that contradictory choices aren't made.
# If the user sets the IVX stream to the same as the ABC stream, or
# to no-renumber, fine. Otherwise, she is asserting that she has valid
# IVX footnote keys, in which case ABC needs to be no-renumber.
def IVXpick(self,pick):
if (pick != self.pickABC.currentIndex()) or (pick != 5) :
self.pickABC.setCurrentIndex(5)
# If the user sets the ABC stream to anything but no-renumber, she is
# asserting that there are valid ABC keys in which case, keys we have
# classed as IVX need to use the same stream.
def ABCpick(self,pick):
if pick != 5 :
self.pickIVX.setCurrentIndex(pick)
# And similarly for lowercase.
def ivxpick(self,pick):
if (pick != self.pickabc.currentIndex()) or (pick != 5) :
self.pickabc.setCurrentIndex(5)
def abcpick(self,pick):
if pick != 5 :
self.pickivx.setCurrentIndex(pick)
# The slot for a click anywhere in the tableview. If the click is on:
# * column 0 or 1 (key or class) we jump to the ref line, unless we are on
# the ref line in which case we jump to the note line (ping-pong).
# * column 2 (ref line) we jump to the ref line.
# * column 3, 4, 5 (note line or note) we jump to the note line.
# In each case, to "jump" means, set the document cursor to the reftc
# or the notetc, making the ref or note the current selection.
def tableClick(self,index):
r = index.row()
c = index.column()
dtc = IMC.editWidget.textCursor()
rtc = TheFootnoteList[r]['R']
ntc = TheFootnoteList[r]['N']
targtc = None
if c > 2 : # column 3 4 or 5
targtc = ntc
elif c == 2 :
targtc = rtc
else: # c == 0 or 1
dln = dtc.blockNumber()
rln = refLineNumber(rtc) # None, if rtc is None
if dln == rln : # True if there is a ref line and we are on it
targtc = ntc # go to the note
else: # there isn't a ref line (rtc is None) or there is and we want it
targtc = rtc
if targtc is not None:
IMC.editWidget.setTextCursor(targtc)
IMC.editWidget.centerCursor()
# The slots for the main window's docWill/HasChanged signals.
# Right now, just clear the footnote database, the user can hit
# refresh when he wants the info. If the refresh proves to be
# very small performance hit even in a very large book, we could
# look at calling doRefresh automatically after docHasChanged.
def docWillChange(self):
self.table.beginResetModel()
def docHasChanged(self):
global TheFootnoteList
TheFootnoteList = []
self.table.endResetModel()
# Subroutine to make sure it is ok to do a major revision such as renumber or move.
# First, if the document has changed since the last time we did a refresh, do one
# Second, if there are then any unpaired keys, display a message and return false.
def canWeRevise(self,action):
global TheCountOfUnpairedKeys, TheEditCountAtRefresh
if TheEditCountAtRefresh != IMC.editCounter :
self.doRefresh()
if TheCountOfUnpairedKeys is not 0 :
pqMsgs.warningMsg(
"Cannot {0} with orphan notes and anchors".format(action),
"The count of unpaired anchors and notes is: {0}".format(TheCountOfUnpairedKeys)
)
return False # dinna do it, laddie!
return True # ok to go ahead
# The slot for the Renumber button. Check to see if any unpaired keys and
# don't do it if there are any. But if all are matched, go through the
# database top to bottom (because that is the direction the user expects
# the number streams to increment). For each key, develop a new key string
# based on its present class and the stream selection for that class.
def doRenumber(self):
global TheFootnoteList
if not self.canWeRevise(u"Renumber") :
return
# If the database is actually empty, just do nothing.
dbcount = len(TheFootnoteList)
if dbcount < 1 : return
# OTOH, if there is significant work to do, start the progress bar.
if dbcount >= self.enoughForABar :
pqMsgs.startBar(dbcount,"Renumbering footnotes...")
# clear the number streams
self.streams = [0,0,0,0,0,0]
# Tell the table model that things are gonna change
self.table.beginResetModel()
# create a working cursor and start an undo macro on it.
worktc = QTextCursor(IMC.editWidget.textCursor())
worktc.beginEditBlock()
for i in range(dbcount) : # there's a reason this isn't "for item in..."
item = TheFootnoteList[i]
# Note this key's present string value and class number.
oldkeyqs = item['K']
oldclass = item['C']
# Get the renumber stream index for the present class
renchoice = self.streamMenuList[oldclass].currentIndex()
# Increment that stream (if no-renumber, increment is harmless)
self.streams[renchoice] += 1
# Format the incremented value as a string based on stream choice
# This produces None if renchoice is 5, no-renumber. It could produce
# a value error on a too-big roman numeral or other unlikely things.
try :
newkeyqs = self.streamLambdas[renchoice](self.streams[renchoice])
# If that produced an alpha oe or OE, skip that value
if 0 == newkeyqs.compare(TheDreadedOE, Qt.CaseInsensitive) :
self.streams[renchoice] += 1
newkeyqs = self.streamLambdas[renchoice](self.streams[renchoice])
except ValueError, errmsg :
pqMsgs.warningMsg(
"Error encoding {0} key stream".format(KeyClassNames[renchoice]),
"Numbers will be wrong, recommend Undo when operation ends"
)
self.streams[renchoice] = 0 # restart that stream
newkeyqs = self.streamLambdas[renchoice](self.streams[renchoice])
if newkeyqs is not None: # not no-renumber, so we do it
# infer the key class of the new key string
newclass = classOfKey(newkeyqs)
# ## Replace the key in the note text:
# First, make a pattern to match the old key. Do it by making
# a COPY of the old key and appending : to the COPY. We need
# the colon because the key text might be e.g. "o" or "F".
targqs = QString(oldkeyqs).append(u':')
# Cause worktc to select the opening text of the note through
# the colon, from notetc. Don't select the whole note as we will
# use QString::replace which replaces every match it finds.
notetc = item['N']
worktc.setPosition(notetc.anchor())
worktc.setPosition(notetc.anchor()+10+targqs.size(),QTextCursor.KeepAnchor)
# Get that selected text as a QString
workqs = worktc.selectedText()
# Find the offset of the old key (s.b. 10 but not anal about spaces)
targix = workqs.indexOf(targqs,0,Qt.CaseSensitive)
# Replace the old key text with the new key text
workqs.replace(targix,oldkeyqs.size(),newkeyqs)
# put the modified text back in the document, replacing just
# [Footnote key:. Even this will make Qt mess with the anchor
# and position of notetc, so set up to recreate that.
selstart = notetc.anchor()
selend = notetc.position()-oldkeyqs.size()+newkeyqs.size()
worktc.insertText(workqs)
notetc.setPosition(selstart)
notetc.setPosition(selend,QTextCursor.KeepAnchor)
# ## Replace the key in the anchor, a simpler job, although
# we again have to recover the selection
reftc = item['R']
selstart = reftc.anchor()
sellen = newkeyqs.size()
worktc.setPosition(selstart)
worktc.setPosition(reftc.position(),QTextCursor.KeepAnchor)
worktc.insertText(newkeyqs)
reftc.setPosition(selstart)
reftc.setPosition(selstart+sellen,QTextCursor.KeepAnchor)
# Update the database item. The two cursors are already updated.
# Note that this is Python; "item" is a reference to
# TheFootnoteList[i], ergo we are updating the db in place.
item['K'] = newkeyqs
item['C'] = newclass
# end of "newkeyqs is not None"
if dbcount >= self.enoughForABar and 0 == (i & 7):
pqMsgs.rollBar(i)
# end of "for i in range(dbcount)"
# Clean up:
worktc.endEditBlock() # End the undo macro
self.table.endResetModel() # tell the table the data have stabilized
if dbcount > self.enoughForABar :
pqMsgs.endBar() # clear the progress bar
# The slot for the Move button. Check to see if any unpaired keys and
# don't do it if there are any. But if all are matched, first find all
# footnote sections in the document and make a list of them in the form
# of textcursors. Get user permission, showing section count as a means
# of validating markup, then move each note that is not in a section,
# into the section next below it. Update the note cursors in the db.
def doMove(self):
global TheFootnoteList
if not self.canWeRevise(u"Move Notes to /F..F/") :
return
# If the database is actually empty, just do nothing.
dbcount = len(TheFootnoteList)
if dbcount < 1 : return
# Create a working text cursor.
worktc = QTextCursor(IMC.editWidget.textCursor())
# Search the whole document and find the /F..F/ sections. We could look
# for lines starting /F and then after finding one, for the F/ line, but
# the logic gets messy when the user might have forgotten or miscoded
# the F/. So we use a regex that will cap(0) the entire section. We are
# not being Mr. Nice Guy and allowing \s* spaces either, it has to be
# zackly \n/F\n.*\nF/\n.
sectRegex = QRegExp(u'\\n/F.*\\nF/(\\n|$)')
sectRegex.setMinimal(True) # minimal match for the .* above
sectRegex.setCaseSensitivity(Qt.CaseSensitive)
wholeDocQs = IMC.editWidget.toPlainText() # whole doc as qstring
sectList = []
j = sectRegex.indexIn(wholeDocQs,0)
while j > -1:
# initialize text cursors to record the start and end positions
# of each section. Note, cursors point between characters:
# sectcA----v
# sectcI----v sectcB---v
# ... \2029 / F \2029 ..whatever.. \2029 F / \2029
# Notes are inserted at sectcI which is moved ahead each time. Qt
# takes care of updating sectcB and other cursors on inserts.
# The line number of sectcA is that of the first line after /F,
# and that of sectcB is that of the F/ for comparison.
sectcA = QTextCursor(worktc)
sectcA.setPosition(j+4)
sectcB = QTextCursor(worktc)
sectcB.setPosition(j+sectRegex.matchedLength()-3)
sectcI = QTextCursor(sectcA)
sectList.append( (sectcA,sectcI,sectcB) )
j = sectRegex.indexIn(wholeDocQs,j+1)
# Let wholeDocQs go out of scope just in case it is an actual copy
# of the document. (Should just be a const reference but who knows?)
wholeDocQs = QString()
# Did we in fact find any footnote sections?
if len(sectList) == 0:
pqMsgs.warningMsg(u"Found no /F..F/ footnote sections.")
return
# Since this is a big deal, and /F is easy to mis-code, let's show
# the count found and get approval.
if not pqMsgs.okCancelMsg(
u"Found {0} footnote sections".format(len(sectList)),
"OK to proceed with the move?") :
return
# Right, we're gonna do stuff. If there is significant work to do,
# start the progress bar.
if dbcount >= self.enoughForABar :
pqMsgs.startBar(dbcount,"Moving Notes to /F..F/ sections")
# Tell the table model that things are gonna change
self.docWillChange()
# Start an undo macro on the working cursor.
worktc = QTextCursor(IMC.editWidget.textCursor())
worktc.beginEditBlock()
# loop over all notes.
for i in range(dbcount):
notetc = TheFootnoteList[i]['N']
nln = noteLineNumber(notetc)
# Look for the first section whose last line is below nln
for s in range(len(sectList)):
(sectcA,sectcI,sectcB) = sectList[s]
if nln >= refLineNumber(sectcB):
# this note starts below this section s
continue # to next s
# this note starts above the end of this section,
if nln >= refLineNumber(sectcA):
# however this note is inside this section already
break # and move on to next i
# this note is above, and not within, the section sectList[s],
# so do the move. Start saving the length of the note as
# currently known.
notelen = notetc.position() - notetc.anchor()
# Modify the note selection to include both the \2029 that
# precedes the note and the \2029 that follows the right bracket.
# This assumes that a note is not at the exact beginning of a document
# (seems safe enough) and not at the end either (the /F follows it).
new_anchor = notetc.anchor() - 1
new_position = notetc.position() + 1
notetc.setPosition(new_anchor)
notetc.setPosition(new_position,QTextCursor.KeepAnchor)
# point our worktc at the insertion point in this section
worktc.setPosition(sectcI.position())
# copy the note text inserting it in the section
worktc.insertText(notetc.selectedText())
# save the ending position as the new position of sectcI -- the
# next inserted note goes there
sectcI.setPosition(worktc.position())
# clear the old note text. Have to do this using worktc for
# the undo-redo macro to record it. When the text is removed,
# Qt adjusts all cursors that point below it, including sectcI.
worktc.setPosition(notetc.anchor())
worktc.setPosition(notetc.position(),QTextCursor.KeepAnchor)
worktc.removeSelectedText()
# reset notetc to point to the new note location
notepos = sectcI.position()-notelen-1
notetc.setPosition(notepos)
notetc.setPosition(notepos+notelen,QTextCursor.KeepAnchor)
break # all done scanning sectList for this note
# end of "for s in range(len(sectList))"
if dbcount >= self.enoughForABar and 0 == (i & 7) :
pqMsgs.rollBar(i)
# end of "for i in range(dbcount)"
# Clean up:
worktc.endEditBlock() # End the undo macro
theRealRefresh() # fix up the line numbers in the table
self.docHasChanged() # tell the table the data has stabilized
if dbcount > self.enoughForABar :
pqMsgs.endBar() # clear the progress bar
# The slot for the HTML button. Make sure the db is clean and there is work
# to do. Then go through each item and update as follows:
# Around the anchor put:
# <a id='FA_key' href='#FN_key' class='fnanchor'>[key]</a>
# Replace "[Footnote key:" with
# <div class='footnote' id='FN_key'>\n
# <span class="fnlabel"><a href='FA_key'>[key]</a></span> text..
# Replace the final ] with \n\n</div>
# The idea is that the HTML conversion in pqFlow will see the \n\n
# and insert <p> and </p> as usual.
# We work the list from the bottom up because of nested references.
# Going top-down, we would rewrite a Note containing an Anchor, and
# that unavoidably messes up the reftc pointing to that Anchor.
# Going bottom-up, we rewrite the nested Anchor before we rewrite the
# Note that contains it.
def doHTML(self):
global TheFootnoteList
if not self.canWeRevise(u"Convert Footnotes to HTML") :
return
# If the database is actually empty, just do nothing.
dbcount = len(TheFootnoteList)
if dbcount < 1 : return
# Just in case the user had a spastic twitch and clicked in error,
if not pqMsgs.okCancelMsg(
"Going to convert {0} footnotes to HTML".format(dbcount),
"Note Symbol class keys are skipped."):
return
# Set up a boilerplate string for the Anchor replacements.
# Each %n placeholder is replaced by a copy of the key value.
anchor_pattern = QString(u"<a name='FA_%1' id='FA_%2' href='#FN_%3' class='fnanchor'>[%4]</a>")
# Set up a regex pattern to recognize [Footnote key:, being forgiving
# about extra spaces and absorbing spaces after the colon.
# %1 is replaced by the key value.
fnt_pattern = QString(u"\[Footnote\s+%1\s*:\s*")
fnt_RE = QRegExp()
# Set up a replacement boilerplate for [Footnote key.
# Each %n placeholder is replaced by a copy of the key value.
fnt_rep = QString(u"<div class='footnote' id='FN_%1'>\u2029<span class='fnlabel'><a href='#FA_%2'>[%3]</a></span> ")
# Make a working textcursor, start the undo macro, advise the table
worktc = QTextCursor(IMC.editWidget.textCursor())
worktc.beginEditBlock()
self.docWillChange()
if dbcount >= self.enoughForABar :
pqMsgs.startBar(dbcount,"Converting notes to HTML...")
for i in reversed(range(dbcount)):
item = TheFootnoteList[i]
# Don't even try to convert symbol-class keys
if item['C'] == KeyClass_sym :
continue
key_qs = item['K'] # Key value as qstring
key_tc = item['R'] # cursor that selects the key
# note the start position of the anchor, less 1 to include the [
anchor_start = key_tc.anchor() - 1
# note the anchor end position, plus 1 for the ]
anchor_end = key_tc.position() + 1
# Copy the anchor boilerplate and install the key in it
anchor_qs = anchor_pattern.arg(key_qs,key_qs,key_qs,key_qs)
# Replace the anchor text, using the work cursor.
worktc.setPosition(anchor_start)
worktc.setPosition(anchor_end,QTextCursor.KeepAnchor)
worktc.insertText(anchor_qs)
# Note the start position of the note
note_tc = item['N']
note_start = note_tc.anchor()
# Note its end position, which includes the closing ]
note_end = note_tc.position()
# Copy the note boilerplates and install the key in them.
note_pattern = fnt_pattern.arg(key_qs)
note_qs = fnt_rep.arg(key_qs,key_qs,key_qs)
# Point the work cursor at the note.
worktc.setPosition(note_start)
worktc.setPosition(note_end,QTextCursor.KeepAnchor)
# get the note as a string, truncate the closing ],
# append </div> on a separate line, and put it back.
oldnote = worktc.selectedText()
oldnote.chop(1)
oldnote.append(QString(u"\u2029</div>"))
worktc.insertText(oldnote) # worktc now positioned after note
# use the note string to recognize the length of [Footnote key:sp
fnt_RE.setPattern(note_pattern)
j = fnt_RE.indexIn(oldnote) # assert j==0
j = fnt_RE.cap(0).size() # size of the target portion
# set the work cursor to select just that, and replace it.
worktc.setPosition(note_start)
worktc.setPosition(note_start + j,QTextCursor.KeepAnchor)
worktc.insertText(note_qs)
if dbcount >= self.enoughForABar and 0 == (i & 7):
pqMsgs.rollBar(dbcount - i)
# end of "for i in range(dbcount)"
# Clean up:
worktc.endEditBlock() # End the undo macro
self.docHasChanged() # tell the table the data has stabilized
if dbcount > self.enoughForABar :
pqMsgs.endBar() # clear the progress bar
# The slot for the ASCII button. Make sure the db is clean and there is work
# to do. Then go through all Notes (the Anchors are left alone)
# and update all Notes as follows:
# Replace "[Footnote key:" with /Q Fnote XXX\n [key]
# where XXX is the KeyClassName, e.g. ABC or ivx.
# Replace the final ] with \nQ/\n
# The idea is to change a footnote into a block quote tagged with the class
# which is ignored by reflow, but can be used to do find/replace.
def doASCII(self):
global TheFootnoteList, KeyClassNames
if not self.canWeRevise(u"Convert Footnotes to /Q..Q/") :
return
# If the database is actually empty, just do nothing.
dbcount = len(TheFootnoteList)
if dbcount < 1 : return
# Just in case the user had a spastic twitch and clicked in error,
if not pqMsgs.okCancelMsg(
"Going to convert {0} footnotes to /Q..Q/".format(dbcount),
""):
return
# Set up a regex pattern to recognize [Footnote key: being forgiving
# about extra spaces and absorbing spaces after the colon. The %1
# marker is replaced in a QString.arg() operation with the key value.
fnt_pattern = QString(u"\[Footnote\s+%1\s*:\s*")
fnt_RE = QRegExp()
# Set up a replacement boilerplate for [Footnote key. Here %1 is
# replaced with the key classname and %2 with the key value.
fnt_rep = QString(u"/Q FNote %1\u2029 [%2] ")
# Make a working textcursor, start the undo macro, advise the table
worktc = QTextCursor(IMC.editWidget.textCursor())
worktc.beginEditBlock()
self.docWillChange()
if dbcount >= self.enoughForABar :
pqMsgs.startBar(dbcount,"Converting notes to ASCII...")
for i in range(dbcount):
item = TheFootnoteList[i]
key_qs = item['K']
# Get the cursor that selects the Note.
note_tc = item['N']
# Record the start position of the note
note_start = note_tc.anchor()
# Record its end position, which includes the closing ]
note_end = note_tc.position()
# Copy the regex pattern with the actual key in it.
note_pat = fnt_pattern.arg(key_qs)
# Copy the replacement string with the keyclass and key in it
note_qs = fnt_rep.arg(KeyClassNames[item['C']]).arg(key_qs)
# Point the work cursor at the note.
worktc.setPosition(note_start)
worktc.setPosition(note_end,QTextCursor.KeepAnchor)
# get the note as a string, truncate the closing ], add the
# newline and Q/, and put it back.
oldnote = worktc.selectedText()
oldnote.chop(1)
oldnote.append(QString(u'\u2029Q/'))
worktc.insertText(oldnote) # worktc now positioned after note
# use the note string to recognize the length of [Footnote key:sp
fnt_RE.setPattern(note_pat)
j = fnt_RE.indexIn(oldnote) # assert j==0
j = fnt_RE.cap(0).size() # size of the target portion
# set the work cursor to select just that, and replace it.
worktc.setPosition(note_start)
worktc.setPosition(note_start + j,QTextCursor.KeepAnchor)
worktc.insertText(note_qs)
if dbcount >= self.enoughForABar and 0 == (i & 7):
pqMsgs.rollBar(i)
# end of "for i in range(dbcount)"
# Clean up:
worktc.endEditBlock() # End the undo macro
self.docHasChanged() # tell the table the data has stabilized
if dbcount > self.enoughForABar :
pqMsgs.endBar() # clear the progress bar
if __name__ == "__main__":
def docEdit():
IMC.editCounter += 1
import sys
from PyQt4.QtCore import (Qt,QFile,QIODevice,QTextStream,QSettings)
from PyQt4.QtGui import (QApplication,QPlainTextEdit,QFileDialog,QMainWindow)
import pqIMC
app = QApplication(sys.argv) # create an app
IMC = pqIMC.tricorder() # set up a fake IMC for unit test
IMC.fontFamily = QString("Courier")
import pqMsgs
pqMsgs.IMC = IMC
IMC.editWidget = QPlainTextEdit()
IMC.editWidget.setFont(pqMsgs.getMonoFont())
IMC.settings = QSettings()
IMC.editCounter = 0
widj = fnotePanel()
MW = QMainWindow()
MW.setCentralWidget(widj)
pqMsgs.makeBarIn(MW.statusBar())
MW.connect(IMC.editWidget, SIGNAL("textChanged()"), docEdit)
MW.show()
utqs = QString('''
This is text[A] with two anchors one at end of line.[2]
[Footnote A: footnote A which
extends onto
three lines]
[Footnote 2: footnote 2 which has[A] a nested note]
[Footnote A: nested ref in note 2]
This is another[DCCCCLXXXXVIII] reference.
This is another[q] reference.
[Footnote DCCCCLXXXXVIII: footnote DCCCCLXXXXVIII]
[Footnote q: footnote q]
/F
F/
A lame symbol[\u00a7] reference.
Ref to unmatched key[]
/F
this gets no notes
F/
[Footnot zz: orphan note]
[Footnote \u00a7: footnote symbol]
/F
F/
''')
IMC.editWidget.setPlainText(utqs)
IMC.mainWindow = MW
IMC.editWidget.show()
app.exec_() | gpl-3.0 | -8,001,404,848,635,416,000 | 44.738114 | 124 | 0.630869 | false | 3.72933 | false | false | false |
maweigert/spimagine | spimagine/utils/imgutils.py | 1 | 4869 | from __future__ import absolute_import, print_function
import logging
logger = logging.getLogger(__name__)
import os
import numpy as np
import re
import json
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
from tifffile import TiffFile, imsave, imread
#from spimagine.lib.tifffile import TiffFile, imsave, imread
from spimagine.lib.czifile import CziFile
def read3dTiff(fName):
return imread(fName)
def write3dTiff(data,fName):
imsave(fName,data)
# def getTiffSize(fName):
# from PIL import Image
# img = Image.open(fName, 'r')
# depth = 0
# while True:
# try:
# img.seek(depth)
# except Exception as e:
# break
# depth += 1
#
# return (depth,)+img.size[::-1]
def readCziFile(fName):
with CziFile(fName) as f:
return np.squeeze(f.asarray())
def parseIndexFile(fname):
"""
returns (t,z,y,z) dimensions of a spim stack
"""
try:
lines = open(fname).readlines()
except IOError:
print("could not open and read ",fname)
return None
items = lines[0].replace("\t",",").split(",")
try:
stackSize = [int(i) for i in items[-4:-1]] +[len(lines)]
except Exception as e:
print(e)
print("couldnt parse ", fname)
return None
stackSize.reverse()
return stackSize
def parseMetaFile(fName):
"""
returns pixelSizes (dx,dy,dz)
"""
with open(fName) as f:
s = f.read()
try:
z1 = np.float(re.findall("StartZ.*",s)[0].split("\t")[2])
z2 = np.float(re.findall("StopZ.*",s)[0].split("\t")[2])
zN = np.float(re.findall("NumberOfPlanes.*",s)[0].split("\t")[2])
return (.162,.162, (1.*z2-z1)/(zN-1.))
except Exception as e:
print(e)
print("coulndt parse ", fName)
return (1.,1.,1.)
def parse_index_xwing(fname):
"""
returns (z,y,z) dimensions of a xwing stack
"""
try:
lines = open(fname).readlines()
except IOError:
print("could not open and read ",fname)
return None
items = lines[0].replace("\t",",").replace("\n","").split(",")
try:
stackSize = [int(i) for i in items[-3:]]
except Exception as e:
print(e)
print("couldnt parse ", fname)
return None
stackSize.reverse()
return stackSize
def parse_meta_xwing(fName):
"""
returns pixelSizes (dx,dy,dz)
"""
with open(fName) as f:
try:
s = json.loads((f.readlines()[0]))
x = float(s["VoxelDimX"])
y = float(s["VoxelDimY"])
z = float(s["VoxelDimZ"])
return (x,y,z)
except Exception as e:
print(e)
print("coulndt parse ", fName)
return (1.,1.,1.)
def fromSpimFolder(fName,dataFileName="data/data.bin",indexFileName="data/index.txt",pos=0,count=1):
stackSize = parseIndexFile(os.path.join(fName,indexFileName))
if stackSize:
# clamp to pos to stackSize
pos = min(pos,stackSize[0]-1)
pos = max(pos,0)
if count>0:
stackSize[0] = min(count,stackSize[0]-pos)
else:
stackSize[0] = max(0,stackSize[0]-pos)
with open(os.path.join(fName,dataFileName),"rb") as f:
f.seek(2*pos*np.prod(stackSize[1:]))
return np.fromfile(f,dtype="<u2",
count=np.prod(stackSize)).reshape(stackSize)
t = time()
ds.append(func(fName))
print("%s\ntime: %.2f ms"%(func.__name__, 1000.*(time()-t)))
assert np.allclose(*ds)
def createSpimFolder(fName,data = None,
stackSize= [10,10,32,32],
stackUnits = (.162,.162,.162)):
if not os.path.exists(fName):
os.makedirs(fName)
if not os.path.exists(os.path.join(fName,"data")):
os.makedirs(os.path.join(fName,"data"))
if not data is None:
stackSize = data.shape
datafName = os.path.join(fName,"data/data.bin")
with open(datafName,"wa") as f:
data.astype(np.uint16).tofile(f)
Nt,Nz,Ny,Nx = stackSize
indexfName = os.path.join(fName,"data/index.txt")
with open(indexfName,"w") as f:
for i in range(Nt):
f.write("%i\t0.0000\t1,%i,%i,%i\t0\n"%(i,Nx,Ny,Nz))
metafName = os.path.join(fName,"metadata.txt")
with open(metafName,"w") as f:
f.write("timelapse.NumberOfPlanes\t=\t%i\t0\n"%Nz)
f.write("timelapse.StartZ\t=\t0\t0\n")
f.write("timelapse.StopZ\t=\t%.2f\t0\n"%(stackUnits[2]*(Nz-1.)))
def test_czi():
d = readCziFile("test_data/retina.czi")
return d
if __name__ == '__main__':
# test_tiff()
d = test_czi()
| bsd-3-clause | -8,010,270,323,642,686,000 | 23.715736 | 100 | 0.551037 | false | 3.171987 | false | false | false |
wunderlins/learning | python/zodb/lib/osx/ZEO/scripts/zeoqueue.py | 2 | 11094 | #!/usr/bin/env python2.3
"""Report on the number of currently waiting clients in the ZEO queue.
Usage: %(PROGRAM)s [options] logfile
Options:
-h / --help
Print this help text and exit.
-v / --verbose
Verbose output
-f file
--file file
Use the specified file to store the incremental state as a pickle. If
not given, %(STATEFILE)s is used.
-r / --reset
Reset the state of the tool. This blows away any existing state
pickle file and then exits -- it does not parse the file. Use this
when you rotate log files so that the next run will parse from the
beginning of the file.
"""
from __future__ import print_function
import os
import re
import sys
import time
import errno
import getopt
from ZEO._compat import load, dump
COMMASPACE = ', '
STATEFILE = 'zeoqueue.pck'
PROGRAM = sys.argv[0]
tcre = re.compile(r"""
(?P<ymd>
\d{4}- # year
\d{2}- # month
\d{2}) # day
T # separator
(?P<hms>
\d{2}: # hour
\d{2}: # minute
\d{2}) # second
""", re.VERBOSE)
ccre = re.compile(r"""
zrpc-conn:(?P<addr>\d+.\d+.\d+.\d+:\d+)\s+
calling\s+
(?P<method>
\w+) # the method
\( # args open paren
\' # string quote start
(?P<tid>
\S+) # first argument -- usually the tid
\' # end of string
(?P<rest>
.*) # rest of line
""", re.VERBOSE)
wcre = re.compile(r'Clients waiting: (?P<num>\d+)')
def parse_time(line):
"""Return the time portion of a zLOG line in seconds or None."""
mo = tcre.match(line)
if mo is None:
return None
date, time_ = mo.group('ymd', 'hms')
date_l = [int(elt) for elt in date.split('-')]
time_l = [int(elt) for elt in time_.split(':')]
return int(time.mktime(date_l + time_l + [0, 0, 0]))
class Txn:
"""Track status of single transaction."""
def __init__(self, tid):
self.tid = tid
self.hint = None
self.begin = None
self.vote = None
self.abort = None
self.finish = None
self.voters = []
def isactive(self):
if self.begin and not (self.abort or self.finish):
return True
else:
return False
class Status:
"""Track status of ZEO server by replaying log records.
We want to keep track of several events:
- The last committed transaction.
- The last committed or aborted transaction.
- The last transaction that got the lock but didn't finish.
- The client address doing the first vote of a transaction.
- The number of currently active transactions.
- The number of reported queued transactions.
- Client restarts.
- Number of current connections (but this might not be useful).
We can observe these events by reading the following sorts of log
entries:
2002-12-16T06:16:05 BLATHER(-100) zrpc:12649 calling
tpc_begin('\x03I\x90((\xdbp\xd5', '', 'QueueCatal...
2002-12-16T06:16:06 BLATHER(-100) zrpc:12649 calling
vote('\x03I\x90((\xdbp\xd5')
2002-12-16T06:16:06 BLATHER(-100) zrpc:12649 calling
tpc_finish('\x03I\x90((\xdbp\xd5')
2002-12-16T10:46:10 INFO(0) ZSS:12649:1 Transaction blocked waiting
for storage. Clients waiting: 1.
2002-12-16T06:15:57 BLATHER(-100) zrpc:12649 connect from
('10.0.26.54', 48983): <ManagedServerConnection ('10.0.26.54', 48983)>
2002-12-16T10:30:09 INFO(0) ZSS:12649:1 disconnected
"""
def __init__(self):
self.lineno = 0
self.pos = 0
self.reset()
def reset(self):
self.commit = None
self.commit_or_abort = None
self.last_unfinished = None
self.n_active = 0
self.n_blocked = 0
self.n_conns = 0
self.t_restart = None
self.txns = {}
def iscomplete(self):
# The status report will always be complete if we encounter an
# explicit restart.
if self.t_restart is not None:
return True
# If we haven't seen a restart, assume that seeing a finished
# transaction is good enough.
return self.commit is not None
def process_file(self, fp):
if self.pos:
if VERBOSE:
print('seeking to file position', self.pos)
fp.seek(self.pos)
while True:
line = fp.readline()
if not line:
break
self.lineno += 1
self.process(line)
self.pos = fp.tell()
def process(self, line):
if line.find("calling") != -1:
self.process_call(line)
elif line.find("connect") != -1:
self.process_connect(line)
# test for "locked" because word may start with "B" or "b"
elif line.find("locked") != -1:
self.process_block(line)
elif line.find("Starting") != -1:
self.process_start(line)
def process_call(self, line):
mo = ccre.search(line)
if mo is None:
return
called_method = mo.group('method')
# Exit early if we've got zeoLoad, because it's the most
# frequently called method and we don't use it.
if called_method == "zeoLoad":
return
t = parse_time(line)
meth = getattr(self, "call_%s" % called_method, None)
if meth is None:
return
client = mo.group('addr')
tid = mo.group('tid')
rest = mo.group('rest')
meth(t, client, tid, rest)
def process_connect(self, line):
pass
def process_block(self, line):
mo = wcre.search(line)
if mo is None:
# assume that this was a restart message for the last blocked
# transaction.
self.n_blocked = 0
else:
self.n_blocked = int(mo.group('num'))
def process_start(self, line):
if line.find("Starting ZEO server") != -1:
self.reset()
self.t_restart = parse_time(line)
def call_tpc_begin(self, t, client, tid, rest):
txn = Txn(tid)
txn.begin = t
if rest[0] == ',':
i = 1
while rest[i].isspace():
i += 1
rest = rest[i:]
txn.hint = rest
self.txns[tid] = txn
self.n_active += 1
self.last_unfinished = txn
def call_vote(self, t, client, tid, rest):
txn = self.txns.get(tid)
if txn is None:
print("Oops!")
txn = self.txns[tid] = Txn(tid)
txn.vote = t
txn.voters.append(client)
def call_tpc_abort(self, t, client, tid, rest):
txn = self.txns.get(tid)
if txn is None:
print("Oops!")
txn = self.txns[tid] = Txn(tid)
txn.abort = t
txn.voters = []
self.n_active -= 1
if self.commit_or_abort:
# delete the old transaction
try:
del self.txns[self.commit_or_abort.tid]
except KeyError:
pass
self.commit_or_abort = txn
def call_tpc_finish(self, t, client, tid, rest):
txn = self.txns.get(tid)
if txn is None:
print("Oops!")
txn = self.txns[tid] = Txn(tid)
txn.finish = t
txn.voters = []
self.n_active -= 1
if self.commit:
# delete the old transaction
try:
del self.txns[self.commit.tid]
except KeyError:
pass
if self.commit_or_abort:
# delete the old transaction
try:
del self.txns[self.commit_or_abort.tid]
except KeyError:
pass
self.commit = self.commit_or_abort = txn
def report(self):
print("Blocked transactions:", self.n_blocked)
if not VERBOSE:
return
if self.t_restart:
print("Server started:", time.ctime(self.t_restart))
if self.commit is not None:
t = self.commit_or_abort.finish
if t is None:
t = self.commit_or_abort.abort
print("Last finished transaction:", time.ctime(t))
# the blocked transaction should be the first one that calls vote
L = [(txn.begin, txn) for txn in self.txns.values()]
L.sort()
for x, txn in L:
if txn.isactive():
began = txn.begin
if txn.voters:
print("Blocked client (first vote):", txn.voters[0])
print("Blocked transaction began at:", time.ctime(began))
print("Hint:", txn.hint)
print("Idle time: %d sec" % int(time.time() - began))
break
def usage(code, msg=''):
print(__doc__ % globals(), file=sys.stderr)
if msg:
print(msg, file=sys.stderr)
sys.exit(code)
def main():
global VERBOSE
VERBOSE = 0
file = STATEFILE
reset = False
# -0 is a secret option used for testing purposes only
seek = True
try:
opts, args = getopt.getopt(sys.argv[1:], 'vhf:r0',
['help', 'verbose', 'file=', 'reset'])
except getopt.error as msg:
usage(1, msg)
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-v', '--verbose'):
VERBOSE += 1
elif opt in ('-f', '--file'):
file = arg
elif opt in ('-r', '--reset'):
reset = True
elif opt == '-0':
seek = False
if reset:
# Blow away the existing state file and exit
try:
os.unlink(file)
if VERBOSE:
print('removing pickle state file', file)
except OSError as e:
if e.errno != errno.ENOENT:
raise
return
if not args:
usage(1, 'logfile is required')
if len(args) > 1:
usage(1, 'too many arguments: %s' % COMMASPACE.join(args))
path = args[0]
# Get the previous status object from the pickle file, if it is available
# and if the --reset flag wasn't given.
status = None
try:
statefp = open(file, 'rb')
try:
status = load(statefp)
if VERBOSE:
print('reading status from file', file)
finally:
statefp.close()
except IOError as e:
if e.errno != errno.ENOENT:
raise
if status is None:
status = Status()
if VERBOSE:
print('using new status')
if not seek:
status.pos = 0
fp = open(path, 'rb')
try:
status.process_file(fp)
finally:
fp.close()
# Save state
statefp = open(file, 'wb')
dump(status, statefp, 1)
statefp.close()
# Print the report and return the number of blocked clients in the exit
# status code.
status.report()
sys.exit(status.n_blocked)
if __name__ == "__main__":
main()
| gpl-2.0 | 2,865,013,277,010,498,600 | 26.735 | 78 | 0.534974 | false | 3.715338 | false | false | false |
GoogleCloudPlatform/cloud-foundation-toolkit | dm/templates/network/network.py | 1 | 3125 | # Copyright 2018 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" This template creates a network, optionally with subnetworks. """
def append_optional_property(res, properties, prop_name):
""" If the property is set, it is added to the resource. """
val = properties.get(prop_name)
if val:
res['properties'][prop_name] = val
return
def generate_config(context):
""" Entry point for the deployment resources. """
properties = context.properties
name = properties.get('name', context.env['name'])
network_self_link = '$(ref.{}.selfLink)'.format(context.env['name'])
network_resource = {
# https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert
'type': 'gcp-types/compute-v1:networks',
'name': context.env['name'],
'properties':
{
'name': name,
'autoCreateSubnetworks': properties.get('autoCreateSubnetworks', False)
}
}
optional_properties = [
'description',
'routingConfig',
'project',
]
for prop in optional_properties:
append_optional_property(network_resource, properties, prop)
resources = [network_resource]
# Subnetworks:
out = {}
for i, subnetwork in enumerate(
properties.get('subnetworks', []), 1
):
subnetwork['network'] = network_self_link
if properties.get('project'):
subnetwork['project'] = properties.get('project')
subnetwork_name = 'subnetwork-{}'.format(i)
resources.append(
{
'name': subnetwork_name,
'type': 'subnetwork.py',
'properties': subnetwork
}
)
out[subnetwork_name] = {
'selfLink': '$(ref.{}.selfLink)'.format(subnetwork_name),
'ipCidrRange': '$(ref.{}.ipCidrRange)'.format(subnetwork_name),
'region': '$(ref.{}.region)'.format(subnetwork_name),
'network': '$(ref.{}.network)'.format(subnetwork_name),
'gatewayAddress': '$(ref.{}.gatewayAddress)'.format(subnetwork_name)
}
return {
'resources':
resources,
'outputs':
[
{
'name': 'name',
'value': name
},
{
'name': 'selfLink',
'value': network_self_link
},
{
'name': 'subnetworks',
'value': out
}
]
}
| apache-2.0 | -7,620,196,664,950,365,000 | 31.552083 | 87 | 0.56032 | false | 4.328255 | false | false | false |
kkarrancsu/fecapi | python/qa_tpc_decoder_withnoise.py | 1 | 7308 | #!/usr/bin/env python
##################################################
# Gnuradio Python Flow Graph
# Title: TPC Encoder Test
# Generated:
##################################################
from gnuradio import gr, gr_unittest
from gnuradio import blocks
from fec.extended_decoder_interface import extended_decoder_interface
import fec
import struct
import os
class qa_tpc_decoder_withnoise(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
self.testFilesDir = os.path.join(os.environ['srcdir'], '..', 'testdata')
def tearDown(self):
self.tb = None
def readBinaryFile(self, filename):
fileData = ()
f = open(filename, 'rb')
try:
# read the file, this function is expecting a binary file and will
# read the file as unsigned chars
byte = f.read(1)
while byte != "":
# put the byte into the return vector
fileData = fileData + (byte,)
byte = f.read(1)
finally:
f.close()
return map(ord, fileData)
def readFloatFile(self, filename):
f = open(filename, 'rb')
n = 288
fileData = struct.unpack('f'*n, f.read(4*n))
f.close()
return fileData
def runIt(self, decoderInputFilename, decoderOutputFilename, rowPoly, colPoly, kRow, kCol, B, Q,
numIter, decType):
decoderInputLLR = self.readFloatFile(os.path.join(self.testFilesDir, decoderInputFilename))
decoderExpectedOutput = self.readBinaryFile(os.path.join(self.testFilesDir, decoderOutputFilename))
# define the required components
self.variable_cc_def_fecapi_tpc_decoder_def_0 = \
variable_cc_def_fecapi_tpc_decoder_def_0 = \
map( (lambda a: fec.tpc_make_decoder((rowPoly), (colPoly), kRow, kCol, B, Q, numIter, decType)), range(0,1) );
self.variable_decoder_interface_0 = \
variable_decoder_interface_0 = \
extended_decoder_interface(decoder_obj_list=variable_cc_def_fecapi_tpc_decoder_def_0,
threading='capillary',
ann=None,
puncpat='11',
integration_period=10000)
# setup connections of flowgraph
self.blocks_vector_source_x_0 = blocks.vector_source_f(decoderInputLLR, False, 1, [])
self.blocks_vector_sink_x_0 = blocks.vector_sink_b(1)
self.tb.connect((self.blocks_vector_source_x_0, 0), (self.variable_decoder_interface_0, 0))
self.tb.connect((self.variable_decoder_interface_0, 0), (self.blocks_vector_sink_x_0, 0))
# run the block
self.tb.run()
# check output versus expectedOutputData
actualOutputData = self.blocks_vector_sink_x_0.data()
actualOutputDataList = list(actualOutputData)
actualOutputDataList_int = map(int, actualOutputDataList)
print type(decoderExpectedOutput)
print type(actualOutputDataList_int)
print '******** DECODER EXPECTED OUTPUT *********'
print decoderExpectedOutput
print '******** DECODER ACTUAL OUTPUT ***********'
print actualOutputDataList_int
outputLen = len(decoderExpectedOutput)
self.assertFloatTuplesAlmostEqual(decoderExpectedOutput, actualOutputDataList_int, outputLen)
def test_004_tpc_decoder(self):
print 'RUNNING NOISE TEST 4'
inputFilename = 'snrtest_4_input.bin'
outputFilename = 'snrtest_4_output.bin'
# the definitions below MUST match the octave test script
rowPoly = [3]
colPoly = [43]
kRow = 26
kCol = 6
B = 9
Q = 3
numIters = 6
decoderType = 2
self.runIt(inputFilename, outputFilename, rowPoly, colPoly, kRow, kCol, B, Q, numIters, decoderType)
def test_005_tpc_decoder(self):
print 'RUNNING NOISE TEST 5'
inputFilename = 'snrtest_5_input.bin'
outputFilename = 'snrtest_5_output.bin'
# the definitions below MUST match the octave test script
rowPoly = [3]
colPoly = [43]
kRow = 26
kCol = 6
B = 9
Q = 3
numIters = 6
decoderType = 2
self.runIt(inputFilename, outputFilename, rowPoly, colPoly, kRow, kCol, B, Q, numIters, decoderType)
def test_006_tpc_decoder(self):
print 'RUNNING NOISE TEST 6'
inputFilename = 'snrtest_6_input.bin'
outputFilename = 'snrtest_6_output.bin'
# the definitions below MUST match the octave test script
rowPoly = [3]
colPoly = [43]
kRow = 26
kCol = 6
B = 9
Q = 3
numIters = 6
decoderType = 2
self.runIt(inputFilename, outputFilename, rowPoly, colPoly, kRow, kCol, B, Q, numIters, decoderType)
def test_007_tpc_decoder(self):
print 'RUNNING NOISE TEST 7'
inputFilename = 'snrtest_7_input.bin'
outputFilename = 'snrtest_7_output.bin'
# the definitions below MUST match the octave test script
rowPoly = [3]
colPoly = [43]
kRow = 26
kCol = 6
B = 9
Q = 3
numIters = 6
decoderType = 2
self.runIt(inputFilename, outputFilename, rowPoly, colPoly, kRow, kCol, B, Q, numIters, decoderType)
def test_008_tpc_decoder(self):
print 'RUNNING NOISE TEST 8'
inputFilename = 'snrtest_8_input.bin'
outputFilename = 'snrtest_8_output.bin'
# the definitions below MUST match the octave test script
rowPoly = [3]
colPoly = [43]
kRow = 26
kCol = 6
B = 9
Q = 3
numIters = 6
decoderType = 2
self.runIt(inputFilename, outputFilename, rowPoly, colPoly, kRow, kCol, B, Q, numIters, decoderType)
def test_009_tpc_decoder(self):
print 'RUNNING NOISE TEST 9'
inputFilename = 'snrtest_9_input.bin'
outputFilename = 'snrtest_9_output.bin'
# the definitions below MUST match the octave test script
rowPoly = [3]
colPoly = [43]
kRow = 26
kCol = 6
B = 9
Q = 3
numIters = 6
decoderType = 2
self.runIt(inputFilename, outputFilename, rowPoly, colPoly, kRow, kCol, B, Q, numIters, decoderType)
def test_010_tpc_decoder(self):
print 'RUNNING NOISE TEST 10'
inputFilename = 'snrtest_10_input.bin'
outputFilename = 'snrtest_10_output.bin'
# the definitions below MUST match the octave test script
rowPoly = [3]
colPoly = [43]
kRow = 26
kCol = 6
B = 9
Q = 3
numIters = 6
decoderType = 2
self.runIt(inputFilename, outputFilename, rowPoly, colPoly, kRow, kCol, B, Q, numIters, decoderType)
if __name__=='__main__':
gr_unittest.run(qa_tpc_decoder_withnoise) | gpl-3.0 | -1,348,339,920,012,235,000 | 33.476415 | 127 | 0.558019 | false | 3.818182 | true | false | false |
bdestombe/flopy-1 | flopy/modflow/mfbas.py | 2 | 11659 | """
mfbas module. Contains the ModflowBas class. Note that the user can access
the ModflowBas class as `flopy.modflow.ModflowBas`.
Additional information for this MODFLOW package can be found at the `Online
MODFLOW Guide
<http://water.usgs.gov/ogw/modflow/MODFLOW-2005-Guide/index.html?bas6.htm>`_.
"""
import sys
import numpy as np
from ..pakbase import Package
from ..utils import Util3d, check, get_neighbors
class ModflowBas(Package):
"""
MODFLOW Basic Package Class.
Parameters
----------
model : model object
The model object (of type :class:`flopy.modflow.mf.Modflow`) to which
this package will be added.
ibound : array of ints, optional
The ibound array (the default is 1).
strt : array of floats, optional
An array of starting heads (the default is 1.0).
ifrefm : bool, optional
Indication if data should be read using free format (the default is
True).
ixsec : bool, optional
Indication of whether model is cross sectional or not (the default is
False).
ichflg : bool, optional
Flag indicating that flows between constant head cells should be
calculated (the default is False).
stoper : float
percent discrepancy that is compared to the budget percent discrepancy
continue when the solver convergence criteria are not met. Execution
will unless the budget percent discrepancy is greater than stoper
(default is None). MODFLOW-2005 only
hnoflo : float
Head value assigned to inactive cells (default is -999.99).
extension : str, optional
File extension (default is 'bas').
unitnumber : int, optional
FORTRAN unit number for this package (default is None).
filenames : str or list of str
Filenames to use for the package. If filenames=None the package name
will be created using the model name and package extension. If a single
string is passed the package name will be set to the string.
Default is None.
Attributes
----------
heading : str
Text string written to top of package input file.
options : list of str
Can be either or a combination of XSECTION, CHTOCH or FREE.
ifrefm : bool
Indicates whether or not packages will be written as free format.
Methods
-------
See Also
--------
Notes
-----
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow()
>>> bas = flopy.modflow.ModflowBas(m)
"""
@staticmethod
def ftype():
return 'BAS6'
@staticmethod
def defaultunit():
return 13
def __init__(self, model, ibound=1, strt=1.0, ifrefm=True, ixsec=False,
ichflg=False, stoper=None, hnoflo=-999.99, extension='bas',
unitnumber=None, filenames=None):
"""
Package constructor.
"""
if unitnumber is None:
unitnumber = ModflowBas.defaultunit()
# set filenames
if filenames is None:
filenames = [None]
elif isinstance(filenames, str):
filenames = [filenames]
# Fill namefile items
name = [ModflowBas.ftype()]
units = [unitnumber]
extra = ['']
# set package name
fname = [filenames[0]]
# Call ancestor's init to set self.parent, extension, name and unit number
Package.__init__(self, model, extension=extension, name=name,
unit_number=units, extra=extra, filenames=fname)
self.url = 'bas6.htm'
nrow, ncol, nlay, nper = self.parent.nrow_ncol_nlay_nper
self.ibound = Util3d(model, (nlay, nrow, ncol), np.int, ibound,
name='ibound', locat=self.unit_number[0])
self.strt = Util3d(model, (nlay, nrow, ncol), np.float32, strt,
name='strt', locat=self.unit_number[0])
self.heading = '# {} package for '.format(self.name[0]) + \
' {}, '.format(model.version_types[model.version]) + \
'generated by Flopy.'
self.options = ''
self.ixsec = ixsec
self.ichflg = ichflg
self.stoper = stoper
#self.ifrefm = ifrefm
#model.array_free_format = ifrefm
model.free_format_input = ifrefm
self.hnoflo = hnoflo
self.parent.add_package(self)
return
@property
def ifrefm(self):
return self.parent.free_format_input
def __setattr__(self, key, value):
if key == "ifrefm":
self.parent.free_format_input = value
else:
super(ModflowBas,self).__setattr__(key,value)
def check(self, f=None, verbose=True, level=1):
"""
Check package data for common errors.
Parameters
----------
f : str or file handle
String defining file name or file handle for summary file
of check method output. If a sting is passed a file handle
is created. If f is None, check method does not write
results to a summary file. (default is None)
verbose : bool
Boolean flag used to determine if check method results are
written to the screen
level : int
Check method analysis level. If level=0, summary checks are
performed. If level=1, full checks are performed.
Returns
-------
None
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow.load('model.nam')
>>> m.bas6.check()
"""
chk = check(self, f=f, verbose=verbose, level=level)
neighbors = get_neighbors(self.ibound.array)
neighbors[np.isnan(neighbors)] = 0 # set neighbors at edges to 0 (inactive)
chk.values(self.ibound.array,
(self.ibound.array > 0) & np.all(neighbors < 1, axis=0),
'isolated cells in ibound array', 'Warning')
chk.values(self.ibound.array, np.isnan(self.ibound.array),
error_name='Not a number', error_type='Error')
chk.summarize()
return chk
def write_file(self, check=True):
"""
Write the package file.
Parameters
----------
check : boolean
Check package data for common errors. (default True)
Returns
-------
None
"""
if check: # allows turning off package checks when writing files at model level
self.check(f='{}.chk'.format(self.name[0]), verbose=self.parent.verbose, level=1)
# Open file for writing
f_bas = open(self.fn_path, 'w')
# First line: heading
#f_bas.write('%s\n' % self.heading)
f_bas.write('{0:s}\n'.format(self.heading))
# Second line: format specifier
self.options = ''
if self.ixsec:
self.options += 'XSECTION'
if self.ichflg:
self.options += ' CHTOCH'
if self.ifrefm:
self.options += ' FREE'
if self.stoper is not None:
self.options += ' STOPERROR {0}'.format(self.stoper)
f_bas.write('{0:s}\n'.format(self.options))
# IBOUND array
f_bas.write(self.ibound.get_file_entry())
# Head in inactive cells
f_bas.write('{0:15.6G}\n'.format(self.hnoflo))
# Starting heads array
f_bas.write(self.strt.get_file_entry())
# Close file
f_bas.close()
@staticmethod
def load(f, model, nlay=None, nrow=None, ncol=None, ext_unit_dict=None, check=True):
"""
Load an existing package.
Parameters
----------
f : filename or file handle
File to load.
model : model object
The model object (of type :class:`flopy.modflow.mf.Modflow`) to
which this package will be added.
nlay, nrow, ncol : int, optional
If not provided, then the model must contain a discretization
package with correct values for these parameters.
ext_unit_dict : dictionary, optional
If the arrays in the file are specified using EXTERNAL,
or older style array control records, then `f` should be a file
handle. In this case ext_unit_dict is required, which can be
constructed using the function
:class:`flopy.utils.mfreadnam.parsenamefile`.
check : boolean
Check package data for common errors. (default True)
Returns
-------
bas : ModflowBas object
ModflowBas object (of type :class:`flopy.modflow.ModflowBas`)
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow()
>>> bas = flopy.modflow.ModflowBas.load('test.bas', m, nlay=1, nrow=10,
>>> ncol=10)
"""
if model.verbose:
sys.stdout.write('loading bas6 package file...\n')
if not hasattr(f, 'read'):
filename = f
f = open(filename, 'r')
#dataset 0 -- header
while True:
line = f.readline()
if line[0] != '#':
break
#dataset 1 -- options
line = line.upper()
opts = line.strip().split()
ixsec = False
ichflg = False
ifrefm = False
iprinttime = False
ishowp = False
istoperror = False
stoper = None
if 'XSECTION' in opts:
ixsec = True
if 'CHTOCH' in opts:
ichflg = True
if 'FREE' in opts:
ifrefm = True
if 'PRINTTIME' in opts:
iprinttime = True
if 'SHOWPROGRESS' in opts:
ishowp = True
if 'STOPERROR' in opts:
istoperror = True
i = opts.index('STOPERROR')
stoper = np.float32(opts[i+1])
#get nlay,nrow,ncol if not passed
if nlay is None and nrow is None and ncol is None:
nrow, ncol, nlay, nper = model.get_nrow_ncol_nlay_nper()
#dataset 2 -- ibound
ibound = Util3d.load(f, model, (nlay, nrow, ncol), np.int, 'ibound',
ext_unit_dict)
#print ibound.array
#dataset 3 -- hnoflo
line = f.readline()
hnoflo = np.float32(line.strip().split()[0])
#dataset 4 -- strt
strt = Util3d.load(f, model, (nlay, nrow, ncol), np.float32, 'strt',
ext_unit_dict)
f.close()
# set package unit number
unitnumber = None
filenames = [None]
if ext_unit_dict is not None:
unitnumber, filenames[0] = \
model.get_ext_dict_attr(ext_unit_dict,
filetype=ModflowBas.ftype())
#create bas object and return
bas = ModflowBas(model, ibound=ibound, strt=strt,
ixsec=ixsec, ifrefm=ifrefm, ichflg=ichflg,
stoper=stoper, hnoflo=hnoflo,
unitnumber=unitnumber, filenames=filenames)
if check:
bas.check(f='{}.chk'.format(bas.name[0]), verbose=bas.parent.verbose, level=0)
return bas
| bsd-3-clause | 5,940,413,536,282,508,000 | 32.494083 | 93 | 0.546016 | false | 4.04967 | false | false | false |
karimbahgat/PythonGis | pythongis/vector/geometry.py | 1 | 3638 |
from shapely.geometry import shape
from shapely.prepared import prep
from .geography import Geography
class Geometry(object):
def __init__(self, obj, **kwargs):
# stored internally as shapely
if isinstance(obj, dict):
self._shapely_data = obj # keep geojson as is, dont convert to shapely until needed
elif kwargs:
self._shapely_data = kwargs # keep geojson as is, dont convert to shapely until needed
elif "shapely" in type(obj):
self._shapely_data = obj
elif isinstance(obj, Geometry):
self._shapely_data = obj._shapely_data
else:
raise Exception()
self._prepped_data = None
@property
def _shapely(self):
'shapely object is needed, converted from geojson if needed'
if isinstance(self._shapely_data, dict):
self._shapely_data = shape(self._shapely_data)
return self._shapely_data
@property
def _prepped(self):
'prepared geometry for faster ops, created if needed'
if not self._prepped_data:
self._prepped_data = prep(self._shapely)
return self._prepped_data
@property
def __geo_interface__(self):
if isinstance(self._shapely_data, dict):
# if shapely not created yet, return directly from geojson
return self._shapely_data
else:
return self._shapely_data.__geo_interface__
@property
def type(self):
return self.__geo_interface__["type"]
@property
def coordinates(self):
return self.__geo_interface__["coordinates"]
@property
def geoms(self):
for geoj in self.__geo_interface__["geometries"]:
yield Geometry(geoj)
@property
def is_empty(self):
return True if not self._shapely_data else self._shapely.is_empty
# calculations
def area(self, geodetic=False):
if geodetic:
geog = Geography(self.__geo_interface__)
return geog.area
else:
return self._shapely.area
def length(self, geodetic=False):
if geodetic:
geog = Geography(self.__geo_interface__)
return geog.length
else:
return self._shapely.length
def distance(self, other, geodetic=False):
if geodetic:
geog = Geography(self.__geo_interface__)
other = Geography(other.__geo_interface__)
return geog.distance(other)
else:
other = Geometry(other)
return self._shapely.distance(other)
# tests
# TODO: Maybe implement batch ops via prepped, or should that be handled higher up...?
def intersects(self, other):
return self._shapely.intersects(other._shapely)
def disjoint(self, other):
return self._shapely.disjoint(other._shapely)
def touches(self, other):
return self._shapely.touches(other._shapely)
# modify
def walk(self):
pass
def line_to(self):
pass
def buffer(self, distance, resolution=100, geodetic=False):
if geodetic:
geog = Geography(self.__geo_interface__)
buff = geog.buffer(distance, resolution)
return Geometry(buff.__geo_interface__)
else:
return self._shapely.buffer(distance, resolution)
def intersection(self, other):
return self._shapely.intersection(other._shapely)
def union(self, other):
return self._shapely.union(other._shapely)
def difference(self, other):
return self._shapely.difference(other._shapely)
| mit | 2,483,002,800,462,296,000 | 26.770992 | 98 | 0.604178 | false | 4.148233 | false | false | false |
MDAnalysis/pyQuteMol | python/Molecule.py | 1 | 19581 |
import numpy
import random
from OpenGL.GL import *
import glew_wrap as glew
from Canvas import moltextureCanvas, haloCanvas
from OctaMap import octamap
from trackball import glTrackball
from quaternion import quaternion
from CgUtil import cgSettings
import hardSettings
import ShadowMap
from ShadowMap import AOgpu2
import struct
from MDAnalysis import *
import molGL
TOO_BIG = 0
TOO_SMALL = 1
SIZE_OK = 2
def getAtomRadius(atom, coarse_grain = False):
E2R = {"F": 1.47, "CL": 1.89, "H": 1.10, "C":1.548, "N": 1.4, "O":1.348, "P":1.88, "S":1.808, "CA":1.948, "FE":1.948, "ZN": 1.148, "I": 1.748}
rad = E2R.get(atom[:1], 0)
if rad == 0: rad = E2R.get(atom[:2], 0)
if rad == 0: 1.5
if coarse_grain: rad = 2.35
return rad
def getAtomColor(atom):
E2C = {"H": 0xFFFFFF,
"HE": 0xFFC0CB,
"LI": 0xB22222,
"BE": 0xFF1493,
"B": 0x00FF00,
"C": 0x808080,
"N": 0x8F8FFF,
"O": 0xF00000,
"F": 0xDAA520,
"NE": 0xFF1493,
"NA": 0x0000FF,
"MG": 0x228B22,
"AL": 0x808090,
"SI": 0xDAA520,
"P": 0xFFA500,
"S": 0xFFC832,
"CL": 0x00FF00,
"AR": 0xFF1493,
"K": 0xFF1493,
"CA": 0x808090,
"SC": 0xFF1493,
"TI": 0x808090,
"V": 0xFF1493,
"CR": 0x808090,
"MN": 0x808090,
"FE": 0xFFA500,
"CO": 0xFF1493,
"NI": 0xA52A2A,
"CU": 0xA52A2A,
"ZN": 0xA52A2A}
E2C_coarse = {"NC3": 0x00CC00 ,"PO4": 0x6600CC, "GL": 0xFFFF33, "W": 0x0000CC}
E2C.update(E2C_coarse)
color = E2C.get(atom, 0)
if color == 0: color = E2C.get(atom[:2], 0)
if color == 0: color = E2C.get(atom[:1], 0)
color_int = [ord(val) for val in struct.unpack("cccc", struct.pack("i", color))]
return numpy.array(color_int[1:])/255.
def convert_color(color):
color_int = [ord(val) for val in struct.unpack("cccc", struct.pack("i", color))]
return numpy.array(color_int[1:])/255.
# XXX This class isn't actually used, since everything is in numpy arrays and the drawing is done in C code
class Atom:
def __init__(self, atomid, name):
self.id = atomid
self.r = getAtomRadius(name)
self.col = numpy.array(getAtomColor(name))/255.
def Draw(self):
r = self.r
p = self.pos[self.id]
col = self.col
glColor3f(col[0],col[1],col[2])
glTexCoord2f(self.tx/moltextureCanvas.GetHardRes(),self.ty/moltextureCanvas.GetHardRes())
glNormal3f(1,1,r)
glVertex3f(p[0],p[1],p[2])
glNormal3f(-1,+1, r)
glVertex3f(p[0],p[1],p[2])
glNormal3f(-1,-1, r)
glVertex3f(p[0],p[1],p[2])
glNormal3f(+1,-1, r)
glVertex3f(p[0],p[1],p[2])
def FillTexture(self,texture, texsize):
octamap.FillTexture(texture, texsize, self.tx, self.ty, self.col[0], self.col[1], self.col[2])
def AssignNextTextPos(self, texsize):
self.tx = lx
self.ty = ly
if (lx+octamap.TotTexSizeX()>texsize) or (ly+octamap.TotTexSizeY()>texsize): return False
lx += octamap.TotTexSizeX()
if (lx+octamap.TotTexSizeX()>texsize):
ly+=octamap.TotTexSizeY()
lx=0
return True
def DrawOnTexture(self, CSIZE, px, py, pz, r):
glColor3f(ShadowMap.myrand(), ShadowMap.myrand(), ShadowMap.myrand())
h = 0.0
Xm = -1.0-1.0/CSIZE
Xp = 1.0+1.0/CSIZE
Ym=Xm
Yp=Xp
glew.glMultiTexCoord4fARB(glew.GL_TEXTURE1_ARB, px,py,pz,r)
glTexCoord2f(Xm,Ym); glVertex2f(-h+self.tx, -h+self.ty)
glTexCoord2f(Xp,Ym); glVertex2f(-h+self.tx+CSIZE,-h+self.ty)
glTexCoord2f(Xp,Yp); glVertex2f(-h+self.tx+CSIZE,-h+self.ty+CSIZE)
glTexCoord2f(Xm,Yp); glVertex2f(-h+self.tx, -h+self.ty+CSIZE)
def DrawShadowmap(self):
r = self.r
px, py, pz = self.pos[self.id]
#if ((!geoSettings.showHetatm)&&(hetatomFlag)): return
glNormal3f(+1,+1, r)
glVertex3f(px,py,pz)
glNormal3f(-1,+1, r)
glVertex3f(px,py,pz)
glNormal3f(-1,-1, r)
glVertex3f(px,py,pz)
glNormal3f(+1,-1, r)
glVertex3f(px,py,pz)
def DrawHalo(self, r, px, py, pz):
#r = self.r
#px, py, pz = self.pos[self.id]
#if ((!geoSettings.showHetatm)&&(hetatomFlag)) return
s=cgSettings.P_halo_size * 2.5
glew.glMultiTexCoord2fARB(glew.GL_TEXTURE1_ARB, r+s, (r+s)*(r+s) / (s*s+2*r*s))
glTexCoord2f(+1,+1)
glVertex3f(px,py,pz)
glTexCoord2f(-1,+1)
glVertex3f(px,py,pz)
glTexCoord2f(-1,-1)
glVertex3f(px,py,pz)
glTexCoord2f(+1,-1)
glVertex3f(px,py,pz)
class Molecule:
def __init__(self,filename,istrj = True,coarse_grain=False):
self.r = 0 # default scaling factor for system
self.pos = numpy.zeros(3) # center of bounding box
self.orien = quaternion([0,0,-1,0]) # orientation in space
self.scaleFactor = 1
self.idx = None
self.DirV = []
self.istrj = istrj
self.coarse_grain = coarse_grain
self.clipplane = numpy.array([0.,0.,0.,0,], numpy.float32)
self.excl = numpy.array([], numpy.int32)
if not istrj: self.load_pdb(filename)
else: self.load_trj(filename)
def load_pdb(self,filename):
infile = file(filename)
coords = []
radii = []
colors = []
radii = []
for i,line in enumerate(infile):
if not (line[:4] == "ATOM" or line[:6] == "HETATM"): continue
name = line[13:16]
x, y, z = float(line[30:38]),float(line[38:46]),float(line[46:54])
coords.append((x,y,z))
radii.append(getAtomRadius(name, self.coarse_grain))
colors.append(getAtomColor(name))
self.numatoms = len(coords)
self.atompos = numpy.array(coords, numpy.float32)
self.colors = numpy.array(colors, numpy.float32)
self.radii = numpy.array(radii, numpy.float32)
# Calculate bounding box
min = numpy.minimum.reduce(self.atompos)
max = numpy.maximum.reduce(self.atompos)
pos = (min+max)/2
self.r = 0.5*numpy.sqrt(numpy.sum(numpy.power(max-min-4,2)))
self.pos = pos
self.min, self.max = min-pos, max-pos
self.textureAssigned = False
self.textures = numpy.ones((self.numatoms, 2), numpy.float32)
self.ReassignTextureAutosize()
self.ResetAO()
def load_trj(self,prefix):
universe = AtomGroup.Universe(prefix+".psf", prefix+".dcd")
print "Finished loading psf"
self.universe = universe
#self.atompos = numpy.asarray(universe.dcd.ts._pos).T
self.atompos = universe.dcd.ts._pos
self.sel = universe
self.idx = self.sel.atoms.indices()
self.numatoms = universe.atoms.numberOfAtoms()
print "Finished selection"
radii = [getAtomRadius(a.name, self.coarse_grain) for a in universe.atoms]
colors = [getAtomColor(a.name) for a in universe.atoms]
self.colors = numpy.array(colors, numpy.float32)
self.radii = numpy.array(radii, numpy.float32)
# This is the old way for using Vertex arrays - it might still be faster if I can use indexes arrays
# or vertex buffer objects
# see glDrawElements so I don't have to duplicate everything by 4
#verts = numpy.transpose(universe.dcd.ts._pos)
#self.atompos = numpy.repeat(verts, 4, axis=0)
# Set up vertex arrays
#glVertexPointer(3, GL_FLOAT, 0, self.atompos)
#glEnableClientState(GL_VERTEX_ARRAY)
#glNormalPointer(GL_FLOAT, 0, self.normals)
#glEnableClientState(GL_NORMAL_ARRAY)
#glColorPointer(3,GL_FLOAT, 0, self.colors)
#glEnableClientState(GL_COLOR_ARRAY)
# Calculate bounding box
min = numpy.minimum.reduce(self.atompos)
max = numpy.maximum.reduce(self.atompos)
pos = (min+max)/2
self.r = 0.5*numpy.sqrt(numpy.sum(numpy.power(max-min-4,2)))
self.pos = pos
self.min, self.max = min-pos, max-pos
# for drawing lines
if hasattr(self.universe, "_bonds"):
self.bonds = numpy.array(self.universe._bonds)
self.textureAssigned = False
self.textures = numpy.ones((self.numatoms, 2), numpy.float32)
self.ReassignTextureAutosize()
self.ResetAO()
# this is for trajectory averaging
self.new_ts = self.universe.dcd.ts._pos
self.averaging = 1
def read_next_frame(self):
if self.istrj:
currframe = self.universe.dcd.ts.frame
if currframe == len(self.universe.dcd): currframe = 0
ts = self.universe.dcd[currframe] # this looks weird, but currframe is 1-indexed
if self.averaging > 1 and not ts.frame > len(self.universe.dcd)-self.averaging:
self.new_ts *= 0
self.new_ts += self.atompos
for ts in self.universe.dcd[currframe+1:currframe+self.averaging]:
self.new_ts += self.atompos
ts.frame = currframe+1
self.atompos[:] = self.new_ts/self.averaging
def read_previous_frame(self):
if self.istrj:
currframe = self.universe.dcd.ts.frame-1
self.universe.dcd[currframe-1]
def ReassignTextureAutosize(self):
if (self.textureAssigned): return
guess = hardSettings.TSIZE
lastThatWorked = guess
enlarge = False; shrink = False; forced = False
while True:
if (enlarge and shrink): forced = True
moltextureCanvas.SetRes(guess)
lastThatWorked = guess
res = SetCsize(guess, self.numatoms)
if not forced:
if ((res==TOO_BIG) and (guess/2 >= 16)):
shrink = True
guess /= 2
continue
if ((res == TOO_SMALL) and (guess*2 <= hardSettings.MAX_TSIZE)):
enlarge = True
guess *= 2
continue
octamap.SetSize(hardSettings.CSIZE)
self.ReassignTexture(guess)
break
# Rebuild texture arrays
#glTexCoordPointer(2, GL_FLOAT, 0, self.textures)
#glEnableClientState(GL_TEXTURE_COORD_ARRAY)
def ReassignTexture(self, texsize):
lx = ly = 0
# assign texture positions
textures = []
for i in range(self.numatoms):
textures.append((lx, ly))
if (lx+octamap.TotTexSizeX()>texsize) or (ly+octamap.TotTexSizeY()>texsize): raise Exception
lx += octamap.TotTexSizeX()
if (lx+octamap.TotTexSizeX()>texsize):
ly+=octamap.TotTexSizeY()
lx=0
self.textures = numpy.array(textures, numpy.float32)
def DrawLines(self):
r = self.r * self.scaleFactor
px, py, pz = self.pos
glPushMatrix()
glScalef(1./r,1./r,1./r)
glMultMatrixd((glTrackball.quat * self.orien).asRotation())
glTranslatef(-px, -py, -pz)
glDisable(glew.GL_VERTEX_PROGRAM_ARB)
glDisable(glew.GL_FRAGMENT_PROGRAM_ARB)
glBegin(GL_LINES)
molGL.molDrawSticks(self.atompos, self.bonds, self.colors, self.clipplane)
glEnd()
glPopMatrix()
def Draw(self):
r = self.r * self.scaleFactor
px, py, pz = self.pos
glPushMatrix()
glScalef(1./r,1./r,1./r)
glMultMatrixd((glTrackball.quat * self.orien).asRotation())
glTranslatef(-px, -py, -pz)
#glClipPlane(GL_CLIP_PLANE0, self.clipplane)
x = glGetFloatv(GL_MODELVIEW_MATRIX)
scalef = extractCurrentScaleFactor_x(x)
glew.glProgramEnvParameter4fARB(glew.GL_VERTEX_PROGRAM_ARB,0,scalef,0,0,0)
glEnable(glew.GL_VERTEX_PROGRAM_ARB)
glEnable(glew.GL_TEXTURE_2D)
glew.glActiveTextureARB(glew.GL_TEXTURE0_ARB)
moltextureCanvas.SetAsTexture()
if cgSettings.P_shadowstrenght>0:
ShadowMap.GetCurrentPVMatrix()
ShadowMap.FeedParameters()
for i in range(3):
glew.glProgramEnvParameter4fARB(glew.GL_FRAGMENT_PROGRAM_ARB, i,
x[i][0],x[i][1],x[i][2],0)
glew.glProgramEnvParameter4fARB(glew.GL_FRAGMENT_PROGRAM_ARB, 6,
self.PredictAO(),0,0,0)
glEnable(glew.GL_VERTEX_PROGRAM_ARB)
glEnable(glew.GL_FRAGMENT_PROGRAM_ARB)
glBegin(GL_QUADS)
molGL.MolDraw(self.atompos, self.radii, self.textures/moltextureCanvas.GetHardRes(), self.colors, self.clipplane, self.excl, self.idx)
glEnd()
#glDrawArrays(GL_QUADS, 0, self.numatoms)
glDisable(glew.GL_VERTEX_PROGRAM_ARB)
glDisable(glew.GL_FRAGMENT_PROGRAM_ARB)
# Draw wireframe for clipplane
if not numpy.allclose(self.clipplane, 0):
clipplane = self.clipplane
glColor(0.5, 0.5, 0.5)
glBegin(GL_LINE_STRIP)
glVertex3f(px-r, clipplane[3], pz-r)
glVertex3f(px-r, clipplane[3], pz+r)
glVertex3f(px+r, clipplane[3], pz+r)
glVertex3f(px+r, clipplane[3], pz-r)
glVertex3f(px-r, clipplane[3], pz-r)
glEnd()
glPopMatrix()
def DrawShadowmap(self,invert,shadowSettings):
r = self.r * self.scaleFactor
px, py, pz = self.pos
glPushMatrix()
glScalef(1./r,1./r, 1./r)
glMultMatrixd((glTrackball.quat * self.orien).asRotation())
glTranslate(-px, -py, -pz)
#glClipPlane(GL_CLIP_PLANE0, self.clipplane)
scalef=extractCurrentScaleFactor()
glew.glProgramEnvParameter4fARB(glew.GL_VERTEX_PROGRAM_ARB, 0, scalef,0,0,0)
glEnable(glew.GL_VERTEX_PROGRAM_ARB)
glEnable(glew.GL_FRAGMENT_PROGRAM_ARB)
glew.glActiveTextureARB(glew.GL_TEXTURE0_ARB)
glDisable(GL_TEXTURE_2D)
glew.glActiveTextureARB(glew.GL_TEXTURE1_ARB)
glDisable(GL_TEXTURE_2D)
shadowSettings.BindShaders()
glBegin(GL_QUADS)
molGL.MolDrawShadow(self.atompos, self.radii, self.clipplane, self.excl, self.idx)
glEnd()
#glDisableClientState(GL_COLOR_ARRAY)
#glDisableClientState(GL_TEXTURE_COORD_ARRAY)
#glDrawArrays(GL_QUADS, 0, self.numatoms)
#glEnableClientState(GL_COLOR_ARRAY)
#glEnableClientState(GL_TEXTURE_COORD_ARRAY)
#if (sticks):
# pass
glPopMatrix()
def DrawHalos(self):
# let's try to aviod THIS!
# Moved to drawFrame()
#shadowmap.prepareDepthTextureForCurrentViewpoint() # hum, unavoidable.
r = self.r * self.scaleFactor
px, py, pz = self.pos
glPushMatrix()
glScalef(1/r,1/r,1/r)
glMultMatrixd((glTrackball.quat * self.orien).asRotation())
glTranslatef(-px,-py,-pz)
#glClipPlane(GL_CLIP_PLANE0, self.clipplane)
x = glGetFloatv(GL_MODELVIEW_MATRIX)
scalef = extractCurrentScaleFactor_x(x)
glew.glProgramEnvParameter4fARB(glew.GL_VERTEX_PROGRAM_ARB, 0,scalef, 0,0,0)
glEnable(glew.GL_VERTEX_PROGRAM_ARB)
glEnable(glew.GL_FRAGMENT_PROGRAM_ARB)
glDepthMask(False)
glEnable(GL_BLEND)
if (cgSettings.doingAlphaSnapshot): glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA)
else: glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
cgSettings.BindHaloShader( haloCanvas.getResPow2() )
glew.glProgramEnvParameter4fARB(glew.GL_FRAGMENT_PROGRAM_ARB, 0,
(100.0+cgSettings.P_halo_aware*1300.0)/scalef/r, 0,0,0)
glBegin(GL_QUADS)
molGL.MolDrawHalo(self.atompos, self.radii, cgSettings.P_halo_size, self.clipplane, self.excl, self.idx)
glEnd()
glDisable(GL_BLEND)
cgSettings.BindShaders()
glDepthMask(True)
glPopMatrix()
glDisable(glew.GL_VERTEX_PROGRAM_ARB)
glDisable(glew.GL_FRAGMENT_PROGRAM_ARB)
def DrawOnTexture(self):
glEnable(GL_BLEND)
glBlendFunc(GL_ONE,GL_ONE)
glMatrixMode(GL_PROJECTION)
glPushMatrix()
glLoadIdentity()
glOrtho(0,moltextureCanvas.GetSoftRes(),0,moltextureCanvas.GetSoftRes(), 0,1)
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glLoadIdentity()
lastviewport = glGetIntegerv(GL_VIEWPORT)
glViewport(0,0,moltextureCanvas.GetSoftRes(),moltextureCanvas.GetSoftRes())
glew.glActiveTextureARB(glew.GL_TEXTURE1_ARB)
glDisable(GL_TEXTURE_2D)
glew.glActiveTextureARB(glew.GL_TEXTURE0_ARB)
glDisable(GL_TEXTURE_2D)
glBegin(GL_QUADS)
molGL.MolDrawOnTexture(self.atompos, self.radii, self.textures, hardSettings.CSIZE, self.idx)
glEnd()
#if (self.sticks):
# pass
glMatrixMode(GL_PROJECTION)
glPopMatrix()
glMatrixMode(GL_MODELVIEW)
glPopMatrix()
glViewport(lastviewport[0],lastviewport[1],lastviewport[2],lastviewport[3])
return lastviewport
def PrepareAOstep(self, nsteps, shadowmap):
if not self.DoingAO(): return True
if not self.AOstarted: self.PrepareAOstart()
AOgpu2.Bind()
if ShadowMap.validView(self.DirV[self.AOdoneLvl]): ao = AOgpu2(self.DirV[self.AOdoneLvl], self, len(self.DirV), shadowmap)
AOgpu2.UnBind()
self.AOdoneLvl += 1
return (self.AOdoneLvl >= len(self.DirV))
# for testing
def PrepareAOSingleView(self, shadowmap, static_i=[0]):
self.PrepareAOstart()
AOgpu2.Bind()
ao = AOgpu2(self.DirV[static_i[0]], self, 4, shadowmap)
static_i[0] += 1
if (static_i[0] > len(self.DirV)): static_i[0] = 0
AOgpu2.UnBind()
self.AOdoneLvl = len(self.DirV)
def PrepareAOstart(self):
self.AOdoneLvl = 0
AOgpu2.Reset(self)
self.AOstarted = True
if (len(self.DirV) == 0):
# generate probe views
self.DirV = ShadowMap.GenUniform(hardSettings.N_VIEW_DIR)
# mix them up
numpy.random.shuffle(self.DirV)
def ResetAO(self):
self.AOready = False
self.AOstarted = False
self.AOdoneLvl = 0
#self.DirV = []
def DoingAO(self):
if (cgSettings.P_texture == 0): return False
if (len(self.DirV) == 0): return True
return self.AOdoneLvl < len(self.DirV)
def DecentAO(self):
k = 1.
if (self.AOdoneLvl>=len(self.DirV)): return True
else: return False # XXX
if (self.numatoms<10): return (self.AOdoneLvl>6*k)
if (self.numatoms<100): return (self.AOdoneLvl>4*k)
if (self.numatoms<1000): return (self.AOdoneLvl>2*k)
if (self.numatoms<10000): return (self.AOdoneLvl>1*k)
return True
def PredictAO(self):
# multiplicative prediction
if self.AOstarted == False: return 1.0
else:
coeff = 0.25+(self.AOdoneLvl-1)/20.
if (coeff > 1.0): coeff = 1.0
return coeff*len(self.DirV)*1.0/self.AOdoneLvl
def extractCurrentScaleFactor():
x = glGetFloatv(GL_MODELVIEW_MATRIX)
scalef=numpy.power(numpy.abs(numpy.linalg.det(x)),1./3.)
return scalef
def extractCurrentScaleFactor_x(x):
return numpy.power(numpy.abs(numpy.linalg.det(x)),1./3.)
def SetCsize(textsize, natoms):
# initial guess
i = numpy.ceil(numpy.sqrt(natoms))
hardSettings.CSIZE = textsize / int(i)
if (hardSettings.CSIZE > 250):
hardSettings.CSIZE = 250
return TOO_BIG
if (hardSettings.CSIZE < 6):
hardSettings.CSIZE = 6
return TOO_SMALL
return SIZE_OK
| gpl-2.0 | 5,204,924,955,819,472,000 | 33.595406 | 146 | 0.6038 | false | 3.016174 | false | false | false |
mbakker7/ttim | ttim/equation.py | 1 | 18909 | import numpy as np
class HeadEquation:
def equation(self):
'''Mix-in class that returns matrix rows for head-specified conditions.
(really written as constant potential element)
Works for nunknowns = 1
Returns matrix part nunknowns,neq,npval, complex
Returns rhs part nunknowns,nvbc,npval, complex
Phi_out - c*T*q_s = Phi_in
Well: q_s = Q / (2*pi*r_w*H)
LineSink: q_s = sigma / H = Q / (L*H)
'''
mat = np.empty((self.nunknowns, self.model.neq,
self.model.npval), 'D')
# rhs needs be initialized zero
rhs = np.zeros((self.nunknowns, self.model.ngvbc,
self.model.npval), 'D')
for icp in range(self.ncp):
istart = icp * self.nlayers
ieq = 0
for e in self.model.elementlist:
if e.nunknowns > 0:
mat[istart: istart + self.nlayers,
ieq: ieq + e.nunknowns, :] = e.potinflayers(
self.xc[icp], self.yc[icp], self.layers)
if e == self:
for i in range(self.nlayers):
mat[istart + i, ieq + istart + i, :] -= \
self.resfacp[istart + i] * \
e.dischargeinflayers[istart + i]
ieq += e.nunknowns
for i in range(self.model.ngbc):
rhs[istart: istart + self.nlayers, i, :] -= \
self.model.gbclist[i].unitpotentiallayers(
self.xc[icp], self.yc[icp], self.layers)
if self.type == 'v':
iself = self.model.vbclist.index(self)
for i in range(self.nlayers):
rhs[istart + i, self.model.ngbc + iself, :] = \
self.pc[istart + i] / self.model.p
return mat, rhs
class WellBoreStorageEquation:
def equation(self):
'''Mix-in class that returns matrix rows for multi-aquifer element with
total given discharge, uniform but unknown head and
InternalStorageEquation
'''
mat = np.zeros((self.nunknowns, self.model.neq,
self.model.npval), 'D')
rhs = np.zeros((self.nunknowns, self.model.ngvbc,
self.model.npval), 'D')
ieq = 0
for e in self.model.elementlist:
if e.nunknowns > 0:
head = e.potinflayers(self.xc[0], self.yc[0], self.layers) / \
self.aq.T[self.layers][:, np.newaxis, np.newaxis]
mat[:-1, ieq: ieq + e.nunknowns, :] = head[:-1, :] - head[1:, :]
mat[-1, ieq: ieq + e.nunknowns, :] -= np.pi * self.rc**2 * \
self.model.p * head[0, :]
if e == self:
disterm = self.dischargeinflayers * self.res / (2 * np.pi *
self.rw * self.aq.Haq[self.layers][:, np.newaxis])
if self.nunknowns > 1: # Multiple layers
for i in range(self.nunknowns - 1):
mat[i, ieq + i, :] -= disterm[i]
mat[i, ieq + i + 1, :] += disterm[i + 1]
mat[-1, ieq: ieq + self.nunknowns, :] += \
self.dischargeinflayers
mat[-1, ieq, :] += \
np.pi * self.rc ** 2 * self.model.p * disterm[0]
ieq += e.nunknowns
for i in range(self.model.ngbc):
head = self.model.gbclist[i].unitpotentiallayers(
self.xc[0], self.yc[0], self.layers) / \
self.aq.T[self.layers][:, np.newaxis]
rhs[:-1, i, :] -= head[:-1, :] - head[1:, :]
rhs[-1, i, :] += np.pi * self.rc ** 2 * self.model.p * head[0, :]
if self.type == 'v':
iself = self.model.vbclist.index(self)
rhs[-1, self.model.ngbc + iself, :] += self.flowcoef
if self.hdiff is not None:
# head[0] - head[1] = hdiff
rhs[:-1, self.model.ngbc + iself, :] += \
self.hdiff[:, np.newaxis] / self.model.p
return mat, rhs
class HeadEquationNores:
def equation(self):
'''Mix-in class that returns matrix rows for head-specified conditions.
(really written as constant potential element)
Returns matrix part nunknowns, neq, npval, complex
Returns rhs part nunknowns, nvbc, npval, complex
'''
mat = np.empty((self.nunknowns, self.model.neq,
self.model.npval), 'D')
rhs = np.zeros((self.nunknowns, self.model.ngvbc,
self.model.npval), 'D')
for icp in range(self.ncp):
istart = icp * self.nlayers
ieq = 0
for e in self.model.elementlist:
if e.nunknowns > 0:
mat[istart: istart + self.nlayers,
ieq: ieq + e.nunknowns, :] = e.potinflayers(
self.xc[icp], self.yc[icp], self.layers)
ieq += e.nunknowns
for i in range(self.model.ngbc):
rhs[istart: istart + self.nlayers, i, :] -= \
self.model.gbclist[i].unitpotentiallayers(
self.xc[icp], self.yc[icp], self.layers)
if self.type == 'v':
iself = self.model.vbclist.index(self)
for i in range(self.nlayers):
rhs[istart + i, self.model.ngbc + iself, :] = \
self.pc[istart + i] / self.model.p
return mat, rhs
class LeakyWallEquation:
def equation(self):
'''Mix-in class that returns matrix rows for leaky-wall condition
Returns matrix part nunknowns,neq,npval, complex
Returns rhs part nunknowns,nvbc,npval, complex
'''
mat = np.empty((self.nunknowns, self.model.neq,
self.model.npval), 'D')
rhs = np.zeros((self.nunknowns, self.model.ngvbc,
self.model.npval), 'D')
for icp in range(self.ncp):
istart = icp * self.nlayers
ieq = 0
for e in self.model.elementlist:
if e.nunknowns > 0:
qx, qy = e.disvecinflayers(self.xc[icp], self.yc[icp],
self.layers)
mat[istart: istart + self.nlayers,
ieq: ieq + e.nunknowns, :] = \
qx * self.cosout[icp] + qy * self.sinout[icp]
if e == self:
hmin = e.potinflayers(
self.xcneg[icp], self.ycneg[icp], self.layers) / \
self.aq.T[self.layers][: ,np.newaxis, np.newaxis]
hplus = e.potinflayers(
self.xc[icp], self.yc[icp], self.layers) / \
self.aq.T[self.layers][:, np.newaxis, np.newaxis]
mat[istart:istart + self.nlayers,
ieq: ieq + e.nunknowns, :] -= \
self.resfac[:, np.newaxis, np.newaxis] * \
(hplus - hmin)
ieq += e.nunknowns
for i in range(self.model.ngbc):
qx, qy = self.model.gbclist[i].unitdisveclayers(
self.xc[icp], self.yc[icp], self.layers)
rhs[istart: istart + self.nlayers, i, :] -= \
qx * self.cosout[icp] + qy * self.sinout[icp]
#if self.type == 'v':
# iself = self.model.vbclist.index(self)
# for i in range(self.nlayers):
# rhs[istart+i,self.model.ngbc+iself,:] = \
# self.pc[istart+i] / self.model.p
return mat, rhs
class MscreenEquation:
def equation(self):
'''Mix-in class that returns matrix rows for multi-screen conditions
where total discharge is specified.
Works for nunknowns = 1
Returns matrix part nunknowns, neq, npval, complex
Returns rhs part nunknowns, nvbc, npval, complex
head_out - c * q_s = h_in
Set h_i - h_(i + 1) = 0 and Sum Q_i = Q'''
mat = np.zeros((self.nunknowns, self.model.neq,
self.model.npval), 'D')
rhs = np.zeros((self.nunknowns, self.model.ngvbc,
self.model.npval), 'D')
ieq = 0
for icp in range(self.ncp):
istart = icp * self.nlayers
ieq = 0
for e in self.model.elementlist:
if e.nunknowns > 0:
head = e.potinflayers(
self.xc[icp], self.yc[icp], self.layers) / \
self.aq.T[self.layers][:,np.newaxis,np.newaxis]
mat[istart: istart + self.nlayers - 1,
ieq: ieq + e.nunknowns, :] = \
head[:-1,:] - head[1:,:]
if e == self:
for i in range(self.nlayers-1):
mat[istart + i, ieq + istart + i, :] -= \
self.resfach[istart + i] * \
e.dischargeinflayers[istart + i]
mat[istart + i, ieq + istart + i + 1, :] += \
self.resfach[istart + i + 1] * \
e.dischargeinflayers[istart + i + 1]
mat[istart + i,
ieq + istart: ieq + istart + i + 1, :] -= \
self.vresfac[istart + i] * \
e.dischargeinflayers[istart + i]
mat[istart + self.nlayers - 1,
ieq + istart: ieq + istart + self.nlayers, :] = 1.0
ieq += e.nunknowns
for i in range(self.model.ngbc):
head = self.model.gbclist[i].unitpotentiallayers(
self.xc[icp], self.yc[icp], self.layers) / \
self.aq.T[self.layers][:, np.newaxis]
rhs[istart: istart + self.nlayers - 1, i, :] -= \
head[:-1,:] - head[1:,:]
if self.type == 'v':
iself = self.model.vbclist.index(self)
rhs[istart + self.nlayers - 1, self.model.ngbc + iself, :] = 1.0
# If self.type == 'z', it should sum to zero,
# which is the default value of rhs
return mat, rhs
class MscreenDitchEquation:
def equation(self):
'''Mix-in class that returns matrix rows for multi-screen conditions
where total discharge is specified.
Returns matrix part nunknowns,neq,npval, complex
Returns rhs part nunknowns,nvbc,npval, complex
head_out - c*q_s = h_in
Set h_i - h_(i+1) = 0 and Sum Q_i = Q
I would say
headin_i - headin_(i+1) = 0
headout_i - c*qs_i - headout_(i+1) + c*qs_(i+1) = 0
In case of storage:
Sum Q_i - A * p^2 * headin = Q
'''
mat = np.zeros((self.nunknowns, self.model.neq,
self.model.npval), 'D')
rhs = np.zeros((self.nunknowns, self.model.ngvbc,
self.model.npval), 'D')
ieq = 0
for icp in range(self.ncp):
istart = icp * self.nlayers
ieq = 0
for e in self.model.elementlist:
if e.nunknowns > 0:
head = e.potinflayers(
self.xc[icp], self.yc[icp], self.layers) / \
self.aq.T[self.layers][:, np.newaxis, np.newaxis]
if self.nlayers > 1:
mat[istart: istart + self.nlayers - 1,
ieq: ieq + e.nunknowns, :] = \
head[:-1, :] - head[1:, :]
# Store head in top layer in 2nd to last equation
# of this control point
mat[istart + self.nlayers - 1,
ieq: ieq + e.nunknowns, :] = head[0,:]
if e == self:
# Correct head in top layer in second to last equation
# to make it head inside
mat[istart + self.nlayers - 1,
ieq + istart, :] -= self.resfach[istart] * \
e.dischargeinflayers[istart]
if icp == 0:
istartself = ieq # Needed to build last equation
for i in range(self.nlayers-1):
mat[istart + i, ieq + istart + i, :] -= \
self.resfach[istart + i] * \
e.dischargeinflayers[istart + i]
mat[istart + i, ieq + istart + i + 1, :] += \
self.resfach[istart + i + 1] * \
e.dischargeinflayers[istart + i + 1]
#vresfac not yet used here; it is set to zero as
#I don't quite now what is means yet
#mat[istart + i, ieq + istart:ieq+istart+i+1,:] -= \
# self.vresfac[istart + i] * \
# e.dischargeinflayers[istart + i]
ieq += e.nunknowns
for i in range(self.model.ngbc):
head = self.model.gbclist[i].unitpotentiallayers(
self.xc[icp], self.yc[icp], self.layers) / \
self.aq.T[self.layers][:, np.newaxis]
if self.nlayers > 1:
rhs[istart: istart + self.nlayers - 1, i, :] -= \
head[:-1, :] - head[1:, :]
# Store minus the head in top layer in second to last equation
# for this control point
rhs[istart + self.nlayers - 1, i, :] -= head[0, :]
# Modify last equations
for icp in range(self.ncp - 1):
ieq = (icp + 1) * self.nlayers - 1
# Head first layer control point icp - Head first layer control
# point icp + 1
mat[ieq, :, :] -= mat[ieq + self.nlayers, :, :]
rhs[ieq, :, :] -= rhs[ieq + self.nlayers, :, :]
# Last equation setting the total discharge of the ditch
mat[-1, :, :] = 0.0
mat[-1, istartself: istartself + self.nparam, :] = 1.0
if self.Astorage is not None:
# Used to store last equation in case of ditch storage
matlast = np.zeros((self.model.neq, self.model.npval), 'D')
rhslast = np.zeros((self.model.npval), 'D')
ieq = 0
for e in self.model.elementlist:
head = e.potinflayers(self.xc[0], self.yc[0], self.layers) / \
self.aq.T[self.layers][:, np.newaxis, np.newaxis]
matlast[ieq: ieq + e.nunknowns] -= \
self.Astorage * self.model.p ** 2 * head[0, :]
if e == self:
# only need to correct first unknown
matlast[ieq] += self.Astorage * self.model.p ** 2 * \
self.resfach[0] * e.dischargeinflayers[0]
ieq += e.nunknowns
for i in range(self.model.ngbc):
head = self.model.gbclist[i].unitpotentiallayers(
self.xc[0], self.yc[0], self.layers) / \
self.aq.T[self.layers][:, np.newaxis]
rhslast += self.Astorage * self.model.p ** 2 * head[0]
mat[-1] += matlast
rhs[-1, :, :] = 0.0
if self.type == 'v':
iself = self.model.vbclist.index(self)
rhs[-1, self.model.ngbc + iself, :] = 1.0
# If self.type == 'z', it should sum to zero, which is the default
# value of rhs
if self.Astorage is not None:
rhs[-1, self.model.ngbc + iself, :] += rhslast
return mat, rhs
class InhomEquation:
def equation(self):
'''Mix-in class that returns matrix rows for inhomogeneity conditions'''
mat = np.zeros((self.nunknowns, self.model.neq,
self.model.npval), 'D')
rhs = np.zeros((self.nunknowns, self.model.ngvbc,
self.model.npval), 'D')
for icp in range(self.ncp):
istart = icp * 2 * self.nlayers
ieq = 0
for e in self.model.elementList:
if e.nunknowns > 0:
mat[istart: istart + self.nlayers,
ieq: ieq + e.nunknowns, :] = \
e.potinflayers(self.xc[icp], self.yc[icp],
self.layers, self.aqin) / \
self.aqin.T[self.layers][:, np.newaxis, np.newaxis] - \
e.potinflayers(self.xc[icp], self.yc[icp],
self.layers, self.aqout) / \
self.aqout.T[self.layers][:, np.newaxis, np.newaxis]
qxin, qyin = e.disinflayers(
self.xc[icp], self.yc[icp], self.layers, self.aqin)
qxout, qyout = e.disinflayers(
self.xc[icp], self.yc[icp], self.layers, self.aqout)
mat[istart + self.nlayers: istart + 2 * self.nlayers,
ieq: ieq + e.nunknowns, :] = \
(qxin - qxout) * np.cos(self.thetacp[icp]) + \
(qyin - qyout) * np.sin(self.thetacp[icp])
ieq += e.nunknowns
for i in range(self.model.ngbc):
rhs[istart: istart + self.nlayers, i, :] -= (
self.model.gbclist[i].unitpotentiallayers(
self.xc[icp], self.yc[icp], self.layers, self.aqin) /
self.aqin.T[self.layers][:, np.newaxis] -
self.model.gbclist[i].unitpotentiallayers(
self.xc[icp], self.yc[icp], self.layers, self.aqout) /
self.aqout.T[self.layers][:, np.newaxis])
qxin, qyin = self.model.gbclist[i].unitdischargelayers(
self.xc[icp], self.yc[icp], self.layers, self.aqin)
qxout,qyout = self.model.gbclist[i].unitdischargelayers(
self.xc[icp], self.yc[icp], self.layers, self.aqout)
rhs[istart + self.nlayers: istart + 2 * self.nlayers, i, :] -= \
(qxin - qxout) * np.cos(self.thetacp[icp]) + \
(qyin - qyout) * np.sin(self.thetacp[icp])
return mat, rhs | mit | 836,768,421,473,794,000 | 50.38587 | 82 | 0.459041 | false | 3.588727 | false | false | false |
BrainTech/openbci | obci/analysis/classification/tests/pyml_test.py | 1 | 1979 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# OpenBCI - framework for Brain-Computer Interfaces based on EEG signal
# Project was initiated by Magdalena Michalska and Krzysztof Kulewski
# as part of their MSc theses at the University of Warsaw.
# Copyright (C) 2008-2009 Krzysztof Kulewski and Magdalena Michalska
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author:
# Mateusz Kruszyński <[email protected]>
"""
This script show how to use PyML module.
"""
from PyML import *
def run():
data = VectorDataSet('iris.data', labelsColumn = -1)
#labels = data.labels.L
#some_pattern = data.getPattern(2)
#all_features_as_array = data.getMatrix()
#data.normalize()
#number_of_features = data.numFeatures
#number_of_trials = len(data)
#number_of_every_class = labels.classSize
data2 = data.__class__(data, classes = ['Iris-versicolor', 'Iris-virginica'])
s = SVM()
#r = s.cv(data2)
print(data2)
#r.plotROC()
#param = modelSelection.Param(svm.SVM(), 'C', [0.1, 1, 10, 100, 1000])
#m = modelSelection.ModelSelector(param, measure='balancedSuccessRate')
#m.train(data2)
#best_svm = m.classifier
#for i in range(len(data2)):
# print(best_svm.decisionFunc(data2, i), best_svm.classify(data2, i))
#best_svm_result = best_svm.cv(data2)
if __name__ == '__main__':
run()
| gpl-3.0 | 6,883,719,996,742,280,000 | 31.966667 | 81 | 0.68908 | false | 3.200647 | false | false | false |
bagelbits/dnd-3.5-tools | character-specific/Krag/weapons/boulder.py | 1 | 2255 | #!/usr/bin/env python
# -*- coding: utf8 -*-
"""
Damage Calculator for my character, Krag
Written by Christopher Durien Ward
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
from dice_rolling import damage_roll, attack_roll
#For making text all colorful and easier to read.
class colorz:
PURPLE = '\033[95m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
GREY = '\033[90m'
CYAN = '\033[96m'
WHITE = '\033[97m'
ENDC = '\033[0m'
#################
# THROW BOULDER #
#################
def throw_boulder(char_stats):
total_damage = 0
boulder_attack_bonus = char_stats['BAB'] + char_stats['StrMod'] + char_stats['AttackSizeMod']
boulder_attack_bonus += char_stats['MoraleAttack']
#Range mod
distance = int(raw_input('\n\nHow far away is the target? (in feet) '))
if distance >= char_stats['BoulderRange'] * 5:
print "Target too far away"
return total_damage
range_penalty = 0
while distance >= char_stats['BoulderRange']:
distance -= char_stats['BoulderRange']
range_penalty += 1
#Attack roll
total_attack_roll, multiplier = attack_roll(char_stats, boulder_attack_bonus, range_penalty)
hit = raw_input('Did it hit? (y|n) ')
if hit.lower().startswith('n'):
return total_damage
#Damage roll
damage_mod = char_stats['StrMod'] + char_stats['MoraleDmg']
damage_dice = {
'num_of_dice': 2,
'num_of_sides': 8,
'total_mod': damage_mod,
'multiplier': multiplier
}
total_damage = damage_roll(char_stats, damage_dice)
return total_damage
| gpl-2.0 | 1,582,518,392,385,350,000 | 29.066667 | 97 | 0.642572 | false | 3.432268 | false | false | false |
jemandez/creaturas-magicas | Configuraciones básicas/scripts/addons/blendertools-1.0.0/makewalk/fkik.py | 1 | 23920 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Project Name: MakeHuman
# Product Home Page: http://www.makehuman.org/
# Code Home Page: http://code.google.com/p/makehuman/
# Authors: Thomas Larsson
# Script copyright (C) MakeHuman Team 2001-2014
# Coding Standards: See http://www.makehuman.org/node/165
import bpy
from mathutils import Vector, Matrix
from bpy.props import *
from .utils import *
def updateScene():
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='POSE')
def getPoseMatrix(gmat, pb):
restInv = pb.bone.matrix_local.inverted()
if pb.parent:
parInv = pb.parent.matrix.inverted()
parRest = pb.parent.bone.matrix_local
return restInv * (parRest * (parInv * gmat))
else:
return restInv * gmat
def getGlobalMatrix(mat, pb):
gmat = pb.bone.matrix_local * mat
if pb.parent:
parMat = pb.parent.matrix
parRest = pb.parent.bone.matrix_local
return parMat * (parRest.inverted() * gmat)
else:
return gmat
def matchPoseTranslation(pb, src):
pmat = getPoseMatrix(src.matrix, pb)
insertLocation(pb, pmat)
def matchPoseRotation(pb, src):
pmat = getPoseMatrix(src.matrix, pb)
insertRotation(pb, pmat)
def matchPoseTwist(pb, src):
pmat0 = src.matrix_basis
euler = pmat0.to_3x3().to_euler('YZX')
euler.z = 0
pmat = euler.to_matrix().to_4x4()
pmat.col[3] = pmat0.col[3]
insertRotation(pb, pmat)
def printMatrix(string,mat):
print(string)
for i in range(4):
print(" %.4g %.4g %.4g %.4g" % tuple(mat[i]))
def matchIkLeg(legIk, toeFk, mBall, mToe, mHeel):
rmat = toeFk.matrix.to_3x3()
tHead = Vector(toeFk.matrix.col[3][:3])
ty = rmat.col[1]
tail = tHead + ty * toeFk.bone.length
zBall = mBall.matrix.col[3][2]
zToe = mToe.matrix.col[3][2]
zHeel = mHeel.matrix.col[3][2]
x = Vector(rmat.col[0])
y = Vector(rmat.col[1])
z = Vector(rmat.col[2])
if zHeel > zBall and zHeel > zToe:
# 1. foot.ik is flat
if abs(y[2]) > abs(z[2]):
y = -z
y[2] = 0
else:
# 2. foot.ik starts at heel
hHead = Vector(mHeel.matrix.col[3][:3])
y = tail - hHead
y.normalize()
x -= x.dot(y)*y
x.normalize()
if abs(x[2]) < 0.7:
x[2] = 0
x.normalize()
z = x.cross(y)
head = tail - y * legIk.bone.length
# Create matrix
gmat = Matrix()
gmat.col[0][:3] = x
gmat.col[1][:3] = y
gmat.col[2][:3] = z
gmat.col[3][:3] = head
pmat = getPoseMatrix(gmat, legIk)
insertLocation(legIk, pmat)
insertRotation(legIk, pmat)
def matchPoleTarget(pb, above, below):
x = Vector(above.matrix.col[1][:3])
y = Vector(below.matrix.col[1][:3])
p0 = Vector(below.matrix.col[3][:3])
n = x.cross(y)
if abs(n.length) > 1e-4:
z = x - y
n.normalize()
z -= z.dot(n)*n
z.normalize()
p = p0 + 6*pb.length*z
else:
p = p0
gmat = Matrix.Translation(p)
pmat = getPoseMatrix(gmat, pb)
insertLocation(pb, pmat)
def matchPoseReverse(pb, src):
gmat = src.matrix
tail = gmat.col[3] + src.length * gmat.col[1]
rmat = Matrix((gmat.col[0], -gmat.col[1], -gmat.col[2], tail))
rmat.transpose()
pmat = getPoseMatrix(rmat, pb)
pb.matrix_basis = pmat
insertRotation(pb, pmat)
def matchPoseScale(pb, src):
pmat = getPoseMatrix(src.matrix, pb)
pb.scale = pmat.to_scale()
pb.keyframe_insert("scale", group=pb.name)
def snapFkArm(rig, snapIk, snapFk, frame):
(uparmFk, loarmFk, handFk) = snapFk
(uparmIk, loarmIk, elbow, elbowPt, handIk) = snapIk
matchPoseRotation(uparmFk, uparmIk)
matchPoseRotation(loarmFk, loarmIk)
matchPoseRotation(handFk, handIk)
def snapIkArm(rig, snapIk, snapFk, frame):
(uparmIk, loarmIk, elbow, elbowPt, handIk) = snapIk
(uparmFk, loarmFk, handFk) = snapFk
matchPoseTranslation(handIk, handFk)
matchPoseRotation(handIk, handFk)
updateScene()
matchPoleTarget(elbowPt, uparmFk, loarmFk)
#matchPoseRotation(uparmIk, uparmFk)
#matchPoseRotation(loarmIk, loarmFk)
def snapFkLeg(rig, snapIk, snapFk, frame, legIkToAnkle):
(uplegIk, lolegIk, kneePt, ankleIk, legIk, footRev, toeRev, mBall, mToe, mHeel) = snapIk
(uplegFk, lolegFk, footFk, toeFk) = snapFk
matchPoseRotation(uplegFk, uplegIk)
matchPoseRotation(lolegFk, lolegIk)
if not legIkToAnkle:
matchPoseReverse(footFk, footRev)
matchPoseReverse(toeFk, toeRev)
def snapIkLeg(rig, snapIk, snapFk, frame, legIkToAnkle):
(uplegIk, lolegIk, kneePt, ankleIk, legIk, footRev, toeRev, mBall, mToe, mHeel) = snapIk
(uplegFk, lolegFk, footFk, toeFk) = snapFk
if legIkToAnkle:
matchPoseTranslation(ankleIk, footFk)
else:
matchIkLeg(legIk, toeFk, mBall, mToe, mHeel)
matchPoseTwist(lolegIk, lolegFk)
updateScene()
matchPoseReverse(toeRev, toeFk)
updateScene()
matchPoseReverse(footRev, footFk)
updateScene()
matchPoleTarget(kneePt, uplegFk, lolegFk)
if not legIkToAnkle:
matchPoseTranslation(ankleIk, footFk)
SnapBonesAlpha8 = {
"Arm" : ["upper_arm", "forearm", "hand"],
"ArmFK" : ["upper_arm.fk", "forearm.fk", "hand.fk"],
"ArmIK" : ["upper_arm.ik", "forearm.ik", None, "elbow.pt.ik", "hand.ik"],
"Leg" : ["thigh", "shin", "foot", "toe"],
"LegFK" : ["thigh.fk", "shin.fk", "foot.fk", "toe.fk"],
"LegIK" : ["thigh.ik", "shin.ik", "knee.pt.ik", "ankle.ik", "foot.ik", "foot.rev", "toe.rev", "ball.marker", "toe.marker", "heel.marker"],
}
def getSnapBones(rig, key, suffix):
try:
rig.pose.bones["thigh.fk.L"]
names = SnapBonesAlpha8[key]
suffix = '.' + suffix[1:]
except KeyError:
names = None
if not names:
raise McpError("Not an mhx armature")
pbones = []
constraints = []
for name in names:
if name:
pb = rig.pose.bones[name+suffix]
pbones.append(pb)
for cns in pb.constraints:
if cns.type == 'LIMIT_ROTATION' and not cns.mute:
constraints.append(cns)
else:
pbones.append(None)
return tuple(pbones),constraints
def muteConstraints(constraints, value):
for cns in constraints:
cns.mute = value
def clearAnimation(rig, scn, act, type, snapBones):
from . import target
target.getTargetArmature(rig, scn)
ikBones = []
if scn.McpFkIkArms:
for bname in snapBones["Arm" + type]:
if bname is not None:
ikBones += [bname+".L", bname+".R"]
if scn.McpFkIkLegs:
for bname in snapBones["Leg" + type]:
if bname is not None:
ikBones += [bname+".L", bname+".R"]
ikFCurves = []
for fcu in act.fcurves:
words = fcu.data_path.split('"')
if (words[0] == "pose.bones[" and
words[1] in ikBones):
ikFCurves.append(fcu)
if ikFCurves == []:
raise MocapError("%s bones have no animation" % type)
for fcu in ikFCurves:
act.fcurves.remove(fcu)
def setMhxIk(rig, useArms, useLegs, turnOn):
if isMhxRig(rig):
ikLayers = []
fkLayers = []
if useArms:
rig["MhaArmIk_L"] = turnOn
rig["MhaArmIk_R"] = turnOn
ikLayers += [2,18]
fkLayers += [3,19]
if useLegs:
rig["MhaLegIk_L"] = turnOn
rig["MhaLegIk_R"] = turnOn
ikLayers += [4,20]
fkLayers += [5,21]
if turnOn:
first = ikLayers
second = fkLayers
else:
first = fkLayers
second = ikLayers
for n in first:
rig.data.layers[n] = True
for n in second:
rig.data.layers[n] = False
def transferMhxToFk(rig, scn):
from . import target
target.getTargetArmature(rig, scn)
lArmSnapIk,lArmCnsIk = getSnapBones(rig, "ArmIK", "_L")
lArmSnapFk,lArmCnsFk = getSnapBones(rig, "ArmFK", "_L")
rArmSnapIk,rArmCnsIk = getSnapBones(rig, "ArmIK", "_R")
rArmSnapFk,rArmCnsFk = getSnapBones(rig, "ArmFK", "_R")
lLegSnapIk,lLegCnsIk = getSnapBones(rig, "LegIK", "_L")
lLegSnapFk,lLegCnsFk = getSnapBones(rig, "LegFK", "_L")
rLegSnapIk,rLegCnsIk = getSnapBones(rig, "LegIK", "_R")
rLegSnapFk,rLegCnsFk = getSnapBones(rig, "LegFK", "_R")
#muteAllConstraints(rig, True)
oldLayers = list(rig.data.layers)
setMhxIk(rig, scn.McpFkIkArms, scn.McpFkIkLegs, True)
rig.data.layers = MhxLayers
lLegIkToAnkle = rig["MhaLegIkToAnkle_L"]
rLegIkToAnkle = rig["MhaLegIkToAnkle_R"]
frames = getActiveFramesBetweenMarkers(rig, scn)
nFrames = len(frames)
limbsBendPositive(rig, scn.McpFkIkArms, scn.McpFkIkLegs, frames)
for n,frame in enumerate(frames):
showProgress(n, frame, nFrames)
scn.frame_set(frame)
updateScene()
if scn.McpFkIkArms:
snapFkArm(rig, lArmSnapIk, lArmSnapFk, frame)
snapFkArm(rig, rArmSnapIk, rArmSnapFk, frame)
if scn.McpFkIkLegs:
snapFkLeg(rig, lLegSnapIk, lLegSnapFk, frame, lLegIkToAnkle)
snapFkLeg(rig, rLegSnapIk, rLegSnapFk, frame, rLegIkToAnkle)
rig.data.layers = oldLayers
setMhxIk(rig, scn.McpFkIkArms, scn.McpFkIkLegs, False)
setInterpolation(rig)
#muteAllConstraints(rig, False)
def transferMhxToIk(rig, scn):
from . import target
target.getTargetArmature(rig, scn)
lArmSnapIk,lArmCnsIk = getSnapBones(rig, "ArmIK", "_L")
lArmSnapFk,lArmCnsFk = getSnapBones(rig, "ArmFK", "_L")
rArmSnapIk,rArmCnsIk = getSnapBones(rig, "ArmIK", "_R")
rArmSnapFk,rArmCnsFk = getSnapBones(rig, "ArmFK", "_R")
lLegSnapIk,lLegCnsIk = getSnapBones(rig, "LegIK", "_L")
lLegSnapFk,lLegCnsFk = getSnapBones(rig, "LegFK", "_L")
rLegSnapIk,rLegCnsIk = getSnapBones(rig, "LegIK", "_R")
rLegSnapFk,rLegCnsFk = getSnapBones(rig, "LegFK", "_R")
#muteAllConstraints(rig, True)
oldLayers = list(rig.data.layers)
setMhxIk(rig, scn.McpFkIkArms, scn.McpFkIkLegs, False)
rig.data.layers = MhxLayers
lLegIkToAnkle = rig["MhaLegIkToAnkle_L"]
rLegIkToAnkle = rig["MhaLegIkToAnkle_R"]
frames = getActiveFramesBetweenMarkers(rig, scn)
#frames = range(scn.frame_start, scn.frame_end+1)
nFrames = len(frames)
for n,frame in enumerate(frames):
showProgress(n, frame, nFrames)
scn.frame_set(frame)
updateScene()
if scn.McpFkIkArms:
snapIkArm(rig, lArmSnapIk, lArmSnapFk, frame)
snapIkArm(rig, rArmSnapIk, rArmSnapFk, frame)
if scn.McpFkIkLegs:
snapIkLeg(rig, lLegSnapIk, lLegSnapFk, frame, lLegIkToAnkle)
snapIkLeg(rig, rLegSnapIk, rLegSnapFk, frame, rLegIkToAnkle)
rig.data.layers = oldLayers
setMhxIk(rig, scn.McpFkIkArms, scn.McpFkIkLegs, True)
setInterpolation(rig)
#muteAllConstraints(rig, False)
def muteAllConstraints(rig, value):
lArmSnapIk,lArmCnsIk = getSnapBones(rig, "ArmIK", "_L")
lArmSnapFk,lArmCnsFk = getSnapBones(rig, "ArmFK", "_L")
rArmSnapIk,rArmCnsIk = getSnapBones(rig, "ArmIK", "_R")
rArmSnapFk,rArmCnsFk = getSnapBones(rig, "ArmFK", "_R")
lLegSnapIk,lLegCnsIk = getSnapBones(rig, "LegIK", "_L")
lLegSnapFk,lLegCnsFk = getSnapBones(rig, "LegFK", "_L")
rLegSnapIk,rLegCnsIk = getSnapBones(rig, "LegIK", "_R")
rLegSnapFk,rLegCnsFk = getSnapBones(rig, "LegFK", "_R")
muteConstraints(lArmCnsIk, value)
muteConstraints(lArmCnsFk, value)
muteConstraints(rArmCnsIk, value)
muteConstraints(rArmCnsFk, value)
muteConstraints(lLegCnsIk, value)
muteConstraints(lLegCnsFk, value)
muteConstraints(rLegCnsIk, value)
muteConstraints(rLegCnsFk, value)
#------------------------------------------------------------------------
# Rigify
#------------------------------------------------------------------------
SnapBonesRigify = {
"Arm" : ["upper_arm", "forearm", "hand"],
"ArmFK" : ["upper_arm.fk", "forearm.fk", "hand.fk"],
"ArmIK" : ["hand_ik", "elbow_target.ik"],
"Leg" : ["thigh", "shin", "foot"],
"LegFK" : ["thigh.fk", "shin.fk", "foot.fk"],
"LegIK" : ["foot.ik", "foot_roll.ik", "knee_target.ik"],
}
def setLocation(bname, rig):
pb = rig.pose.bones[bname]
pb.keyframe_insert("location", group=pb.name)
def setRotation(bname, rig):
pb = rig.pose.bones[bname]
if pb.rotation_mode == 'QUATERNION':
pb.keyframe_insert("rotation_quaternion", group=pb.name)
else:
pb.keyframe_insert("rotation_euler", group=pb.name)
def setLocRot(bname, rig):
pb = rig.pose.bones[bname]
pb.keyframe_insert("location", group=pb.name)
pb = rig.pose.bones[bname]
if pb.rotation_mode == 'QUATERNION':
pb.keyframe_insert("rotation_quaternion", group=pb.name)
else:
pb.keyframe_insert("rotation_euler", group=pb.name)
def setRigifyFKIK(rig, value):
rig.pose.bones["hand.ik.L"]["ikfk_switch"] = value
rig.pose.bones["hand.ik.R"]["ikfk_switch"] = value
rig.pose.bones["foot.ik.L"]["ikfk_switch"] = value
rig.pose.bones["foot.ik.R"]["ikfk_switch"] = value
on = (value < 0.5)
for n in [6, 9, 12, 15]:
rig.data.layers[n] = on
for n in [7, 10, 13, 16]:
rig.data.layers[n] = not on
def transferRigifyToFk(rig, scn):
from rig_ui import fk2ik_arm, fk2ik_leg
frames = getActiveFramesBetweenMarkers(rig, scn)
nFrames = len(frames)
for n,frame in enumerate(frames):
showProgress(n, frame, nFrames)
scn.frame_set(frame)
updateScene()
if scn.McpFkIkArms:
for suffix in [".L", ".R"]:
uarm = "upper_arm.fk"+suffix
farm = "forearm.fk"+suffix
hand = "hand.fk"+suffix
uarmi = "MCH-upper_arm.ik"+suffix
farmi = "MCH-forearm.ik"+suffix
handi = "hand.ik"+suffix
fk = [uarm,farm,hand]
ik = [uarmi,farmi,handi]
fk2ik_arm(rig, fk, ik)
setRotation(uarm, rig)
setRotation(farm, rig)
setRotation(hand, rig)
if scn.McpFkIkLegs:
for suffix in [".L", ".R"]:
thigh = "thigh.fk"+suffix
shin = "shin.fk"+suffix
foot = "foot.fk"+suffix
mfoot = "MCH-foot"+suffix
thighi = "MCH-thigh.ik"+suffix
shini = "MCH-shin.ik"+suffix
footi = "foot.ik"+suffix
mfooti = "MCH-foot"+suffix+".001"
fk = [thigh,shin,foot,mfoot]
ik = [thighi,shini,footi,mfooti]
fk2ik_leg(rig, fk, ik)
setRotation(thigh, rig)
setRotation(shin, rig)
setRotation(foot, rig)
setInterpolation(rig)
for suffix in [".L", ".R"]:
if scn.McpFkIkArms:
rig.pose.bones["hand.ik"+suffix]["ikfk_switch"] = 0.0
if scn.McpFkIkLegs:
rig.pose.bones["foot.ik"+suffix]["ikfk_switch"] = 0.0
def transferRigifyToIk(rig, scn):
from rig_ui import ik2fk_arm, ik2fk_leg
frames = getActiveFramesBetweenMarkers(rig, scn)
nFrames = len(frames)
for n,frame in enumerate(frames):
showProgress(n, frame, nFrames)
scn.frame_set(frame)
updateScene()
if scn.McpFkIkArms:
for suffix in [".L", ".R"]:
uarm = "upper_arm.fk"+suffix
farm = "forearm.fk"+suffix
hand = "hand.fk"+suffix
uarmi = "MCH-upper_arm.ik"+suffix
farmi = "MCH-forearm.ik"+suffix
handi = "hand.ik"+suffix
pole = "elbow_target.ik"+suffix
fk = [uarm,farm,hand]
ik = [uarmi,farmi,handi,pole]
ik2fk_arm(rig, fk, ik)
setLocation(pole, rig)
setLocRot(handi, rig)
if scn.McpFkIkLegs:
for suffix in [".L", ".R"]:
thigh = "thigh.fk"+suffix
shin = "shin.fk"+suffix
foot = "foot.fk"+suffix
mfoot = "MCH-foot"+suffix
thighi = "MCH-thigh.ik"+suffix
shini = "MCH-shin.ik"+suffix
footi = "foot.ik"+suffix
footroll = "foot_roll.ik"+suffix
pole = "knee_target.ik"+suffix
mfooti = "MCH-foot"+suffix+".001"
fk = [thigh,shin,foot,mfoot]
ik = [thighi,shini,footi,footroll,pole,mfooti]
ik2fk_leg(rig, fk, ik)
setLocation(pole, rig)
setLocRot(footi, rig)
setRotation(footroll, rig)
setInterpolation(rig)
for suffix in [".L", ".R"]:
if scn.McpFkIkArms:
rig.pose.bones["hand.ik"+suffix]["ikfk_switch"] = 1.0
if scn.McpFkIkLegs:
rig.pose.bones["foot.ik"+suffix]["ikfk_switch"] = 1.0
#-------------------------------------------------------------
# Limbs bend positive
#-------------------------------------------------------------
def limbsBendPositive(rig, doElbows, doKnees, frames):
limbs = {}
if doElbows:
pb = getTrgBone("forearm.L", rig)
minimizeFCurve(pb, rig, 0, frames)
pb = getTrgBone("forearm.R", rig)
minimizeFCurve(pb, rig, 0, frames)
if doKnees:
pb = getTrgBone("shin.L", rig)
minimizeFCurve(pb, rig, 0, frames)
pb = getTrgBone("shin.R", rig)
minimizeFCurve(pb, rig, 0, frames)
def minimizeFCurve(pb, rig, index, frames):
fcu = findBoneFCurve(pb, rig, index)
if fcu is None:
return
y0 = fcu.evaluate(0)
t0 = frames[0]
t1 = frames[-1]
for kp in fcu.keyframe_points:
t = kp.co[0]
if t >= t0 and t <= t1:
y = kp.co[1]
if y < y0:
kp.co[1] = y0
class VIEW3D_OT_McpLimbsBendPositiveButton(bpy.types.Operator):
bl_idname = "mcp.limbs_bend_positive"
bl_label = "Bend Limbs Positive"
bl_description = "Ensure that limbs' X rotation is positive."
bl_options = {'UNDO'}
def execute(self, context):
from .target import getTargetArmature
scn = context.scene
rig = context.object
try:
layers = list(rig.data.layers)
getTargetArmature(rig, scn)
frames = getActiveFramesBetweenMarkers(rig, scn)
limbsBendPositive(rig, scn.McpBendElbows, scn.McpBendKnees, frames)
rig.data.layers = layers
print("Limbs bent positive")
except MocapError:
bpy.ops.mcp.error('INVOKE_DEFAULT')
return{'FINISHED'}
#------------------------------------------------------------------------
# Buttons
#------------------------------------------------------------------------
class VIEW3D_OT_TransferToFkButton(bpy.types.Operator):
bl_idname = "mcp.transfer_to_fk"
bl_label = "Transfer IK => FK"
bl_description = "Transfer IK animation to FK bones"
bl_options = {'UNDO'}
def execute(self, context):
use_global_undo = context.user_preferences.edit.use_global_undo
context.user_preferences.edit.use_global_undo = False
try:
startProgress("Transfer to FK")
rig = context.object
scn = context.scene
if isMhxRig(rig):
transferMhxToFk(rig, scn)
elif isRigify(rig):
transferRigifyToFk(rig, scn)
else:
raise MocapError("Can not transfer to FK with this rig")
endProgress("Transfer to FK completed")
except MocapError:
bpy.ops.mcp.error('INVOKE_DEFAULT')
finally:
context.user_preferences.edit.use_global_undo = use_global_undo
return{'FINISHED'}
class VIEW3D_OT_TransferToIkButton(bpy.types.Operator):
bl_idname = "mcp.transfer_to_ik"
bl_label = "Transfer FK => IK"
bl_description = "Transfer FK animation to IK bones"
bl_options = {'UNDO'}
def execute(self, context):
use_global_undo = context.user_preferences.edit.use_global_undo
context.user_preferences.edit.use_global_undo = False
try:
startProgress("Transfer to IK")
rig = context.object
scn = context.scene
if isMhxRig(rig):
transferMhxToIk(rig, scn)
elif isRigify(rig):
transferRigifyToIk(rig, scn)
else:
raise MocapError("Can not transfer to IK with this rig")
endProgress("Transfer to IK completed")
except MocapError:
bpy.ops.mcp.error('INVOKE_DEFAULT')
finally:
context.user_preferences.edit.use_global_undo = use_global_undo
return{'FINISHED'}
class VIEW3D_OT_ClearAnimationButton(bpy.types.Operator):
bl_idname = "mcp.clear_animation"
bl_label = "Clear Animation"
bl_description = "Clear Animation For FK or IK Bones"
bl_options = {'UNDO'}
type = StringProperty()
def execute(self, context):
use_global_undo = context.user_preferences.edit.use_global_undo
context.user_preferences.edit.use_global_undo = False
try:
startProgress("Clear animation")
rig = context.object
scn = context.scene
if not rig.animation_data:
raise MocapError("Rig has no animation data")
act = rig.animation_data.action
if not act:
raise MocapError("Rig has no action")
if isMhxRig(rig):
clearAnimation(rig, scn, act, self.type, SnapBonesAlpha8)
setMhxIk(rig, scn.McpFkIkArms, scn.McpFkIkLegs, (self.type=="FK"))
elif isRigify(rig):
clearAnimation(rig, scn, act, self.type, SnapBonesRigify)
else:
raise MocapError("Can not clear %s animation with this rig" % self.type)
endProgress("Animation cleared")
except MocapError:
bpy.ops.mcp.error('INVOKE_DEFAULT')
finally:
context.user_preferences.edit.use_global_undo = use_global_undo
return{'FINISHED'}
#------------------------------------------------------------------------
# Debug
#------------------------------------------------------------------------
def printHand(context):
rig = context.object
'''
handFk = rig.pose.bones["hand.fk.L"]
handIk = rig.pose.bones["hand.ik.L"]
print(handFk)
print(handFk.matrix)
print(handIk)
print(handIk.matrix)
'''
footIk = rig.pose.bones["foot.ik.L"]
print(footIk)
print(footIk.matrix)
class VIEW3D_OT_PrintHandsButton(bpy.types.Operator):
bl_idname = "mcp.print_hands"
bl_label = "Print Hands"
bl_options = {'UNDO'}
def execute(self, context):
printHand(context)
return{'FINISHED'}
| gpl-3.0 | -1,981,514,103,589,804,800 | 30.682119 | 142 | 0.580644 | false | 2.885752 | false | false | false |
FND/impact | impactlib/install.py | 1 | 5369 | import zipfile
import StringIO
import tempfile
import shutil
import os
from impactlib.load import load_repo_data
from impactlib.refresh import strip_extra
from impactlib.github import GitHub
from impactlib.semver import SemanticVersion
from impactlib import config
try:
import colorama
from colorama import Fore, Back, Style
colorama.init()
use_color = True
except:
use_color = False
def get_package(pkg):
repo_data = load_repo_data()
if not pkg in repo_data:
msg = "No package named '"+pkg+"' found"
if use_color:
print Fore.RED+msg
else:
print msg
return None
return repo_data[pkg]
def latest_version(versions):
if len(versions)==0:
return None
keys = versions.keys()
svs = map(lambda x: (SemanticVersion(x, tolerant=True), x), keys)
sorted_versions = sorted(svs, cmp=lambda x, y: x[0]>y[0])
print "sorted_versions = "+str(sorted_versions)
return sorted_versions[0][1]
def install_version(pkg, version, github, dryrun, verbose):
repo_data = load_repo_data()
pdata = get_package(pkg)
if pdata==None:
return
versions = pdata["versions"]
vdata = None
for ver in versions:
if ver==version:
vdata = versions[ver]
if vdata==None:
msg = "No version '"+str(version)+"' found for package '"+str(pkg)+"'"
if use_color:
print Fore.RED+msg
else:
print msg
return
zipurl = vdata["zipball_url"]
vpath = vdata["path"]
if verbose:
print " URL: "+zipurl
if not dryrun:
zfp = StringIO.StringIO(github.getDownload(zipurl).read())
zf = zipfile.ZipFile(zfp)
root = zf.infolist()[0].filename
dst = os.path.join(".", str(pkg)+" "+str(strip_extra(version)))
if os.path.exists(dst):
print " Directory "+dst+" already exists, skipping"
else:
td = tempfile.mkdtemp()
zf.extractall(td)
src = os.path.join(td, root, vpath)
if verbose:
print " Root zip directory: "+root
print " Temp directory: "+str(td)
print " Version path: "+str(vpath)
print " Source: "+str(src)
print " Destination: "+str(dst)
shutil.copytree(src,dst)
shutil.rmtree(td)
def elaborate_dependencies(pkgname, version, current):
repo_data = load_repo_data()
if not pkgname in repo_data:
print " No information for package "+pkgname+", skipping"
return current
if not version in repo_data[pkgname]["versions"]:
print " No version "+version+" of package "+pkgname+" found, skipping"
return current
ret = current.copy()
ret[pkgname] = version
vdata = repo_data[pkgname]["versions"][version]
deps = vdata["dependencies"]
for dep in deps:
dname = dep["name"]
dver = dep["version"]
if dname in ret:
if dver==ret[dname]:
# This could avoid circular dependencies?
continue
else:
raise NameError("Dependency on version %s and %s of %s" % \
(ret[dname], dver, dname))
subs = elaborate_dependencies(dname, dver, ret)
for sub in subs:
if sub in ret:
if subs[sub]==ret[sub]:
continue
else:
raise NameError("Dependency on version %s and %s of %s" % \
(sub[sub], ret[sub], sub))
ret[sub] = subs[sub]
return ret
def install(pkgname, verbose, dry_run):
username = config.get("Impact", "username", None)
password = config.get("Impact", "password", None)
token = config.get("Impact", "token", None)
if "#" in pkgname:
pkg_data = pkgname.split("#")
else:
pkg_data = pkgname.split(" ")
if len(pkg_data)==1:
pkg = pkg_data[0]
version = None
elif len(pkg_data)==2:
pkg = pkg_data[0]
version = pkg_data[1]
else:
raise ValueError("Package name must be of the form name[#version]")
pdata = get_package(pkg)
if pdata==None:
return
version = version
if version==None:
version = latest_version(pdata["versions"])
if verbose:
print " Choosing latest version: "+version
if version==None:
msg = "No (semantic) versions found for package '"+pkg+"'"
if use_color:
print Fore.RED+msg
else:
print msg
return
msg = "Installing version '"+version+"' of package '"+pkg+"'"
if use_color:
print Fore.GREEN+msg
else:
print msg
# Setup connection to github
github = GitHub(username=username, password=password,
token=token)
pkgversions = elaborate_dependencies(pkg, version, current={})
if verbose:
print "Libraries to install:"
for pkgname in pkgversions:
print " "+pkgname+" version "+pkgversions[pkgname]
print "Installation..."
for pkgname in pkgversions:
install_version(pkgname, pkgversions[pkgname], github,
dryrun=dry_run, verbose=verbose)
| mit | 2,942,958,143,946,274,000 | 29.505682 | 79 | 0.562302 | false | 3.947794 | false | false | false |
lightscaletech/pulseaudio-streamer | pulseaudio_streamer/device.py | 1 | 2077 | import sys
import socket
import errno
import re
import xmltodict
import logging
if sys.version_info >= (3, 0):
from urllib import request
elif sys.version_info < (3, 0):
import urllib as request
class Service(object):
def __init__(self, data):
self.service_type = data['serviceType']
self.service_id = data['serviceId']
self.scpd_url = data['SCPDURL']
self.control_url = data['controlURL']
self.event_url = data['eventSubURL']
class Device(object):
def __init__(self, url, data):
doc = xmltodict.parse(data)
device = doc['root']['device']
service = device['serviceList']['service']
try:
self.friendly_name = device['friendlyName']
self.manufacturer = device['manufacturer']
self.model_name = device['modelName']
self.model_description = device['modelDescription']
except: pass
self.url_base = url
self.services = []
if(type(service) is list):
for s in service: self.services.append(Service(s))
else: self.services.append(Service(service))
def get_service(self, type):
for s in self.services:
if(s.service_type == type): return s
return None
def has_service(self, type):
if(self.get_service(type) == None): return False
else: return True
def get_base_url(path):
try:
m = re.match('https?\:\/\/([a-zA-Z0-9.:]+)\/', path)
return m.group(1)
except:
return None
def get_device(res):
url = res.location
baseurl = get_base_url(url)
if not baseurl: return None
try:
con = request.urlopen(url)
return Device(baseurl, con.read())
except OSError as err: pass
def get_devices(resources):
result = []
for r in resources:
dev = get_device(r)
if dev: result.append(dev)
return result
def filter_devices_by_service_type(devices, type):
result = []
for d in devices:
if(d.has_service(type)): result.append(d)
return result
| gpl-3.0 | 7,192,833,278,839,230,000 | 24.9625 | 63 | 0.597015 | false | 3.702317 | false | false | false |
kodi-czsk/plugin.video.joj.sk | default.py | 2 | 1991 | # -*- coding: UTF-8 -*-
#/*
# * Copyright (C) 2013 Maros Ondrasek
# *
# *
# * This Program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2, or (at your option)
# * any later version.
# *
# * This Program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; see the file COPYING. If not, write to
# * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# * http://www.gnu.org/copyleft/gpl.html
# *
# */
import os
sys.path.append( os.path.join ( os.path.dirname(__file__),'resources','lib') )
import joj
import xbmcprovider,xbmcaddon,xbmcutil,xbmc
import util
import traceback,urllib2
__scriptid__ = 'plugin.video.joj.sk'
__scriptname__ = 'joj.sk'
__addon__ = xbmcaddon.Addon(id=__scriptid__)
__language__ = __addon__.getLocalizedString
settings = {'downloads':__addon__.getSetting('downloads'),'quality':__addon__.getSetting('quality')}
params = util.params()
if params=={}:
xbmcutil.init_usage_reporting(__scriptid__)
provider = joj.JojContentProvider()
class XBMCJojContentProvider(xbmcprovider.XBMCMultiResolverContentProvider):
def render_default(self, item):
if item['type'] == 'showoff':
item['title'] = item['title'] + ' [B](Nevys)[/B]'
elif item['type'] == "showon7d":
item['title'] = item['title'] + ' [B][COLOR red](7d)[/COLOR][/B]'
if item['type'] == 'topvideo' or item['type'] == 'newvideo':
self.render_video(item)
else:
self.render_dir(item)
XBMCJojContentProvider(provider,settings,__addon__).run(params)
| gpl-2.0 | -8,803,822,367,505,026,000 | 37.288462 | 100 | 0.657459 | false | 3.391823 | false | false | false |
mrunge/openstack_horizon | openstack_horizon/dashboards/admin/dashboard.py | 1 | 1166 | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon_lib
class SystemPanels(horizon_lib.PanelGroup):
slug = "admin"
name = _("System")
panels = ('overview', 'metering', 'hypervisors', 'aggregates',
'instances', 'volumes', 'flavors', 'images',
'networks', 'routers', 'defaults', 'info')
class Admin(horizon_lib.Dashboard):
name = _("Admin")
slug = "admin"
panels = (SystemPanels,)
default_panel = 'overview'
permissions = ('openstack.roles.admin',)
horizon_lib.register(Admin)
| apache-2.0 | -5,459,148,279,635,116,000 | 31.388889 | 78 | 0.680103 | false | 3.912752 | false | false | false |
QuantCrimAtLeeds/PredictCode | tests/scripted/processors_test.py | 1 | 2017 | import pytest
from unittest.mock import patch
import open_cp.scripted.processors as processors
import open_cp.scripted.evaluators as evaluators
from .. import helpers
import io
@pytest.fixture
def outfile():
with io.StringIO() as f:
yield f
@pytest.fixture
def hit_rate_save(outfile):
hrs = processors.HitRateSave(outfile, [10,15,20,100])
hrs.init()
return hrs
def test_HitRateSave_header(hit_rate_save, outfile):
hit_rate_save.done()
assert outfile.getvalue().strip() == "Predictor,Start time,End time,10%,15%,20%,100%"
def test_HitRateSave_header_filename():
capture = helpers.StrIOWrapper()
with patch("builtins.open", helpers.MockOpen(capture)):
hrs = processors.HitRateSave("out.csv", [10, 20])
hrs.init()
hrs.done()
assert capture.data.strip() == "Predictor,Start time,End time,10%,20%"
def test_HitRateSave(hit_rate_save, outfile):
hit_rate_save.process("predname", evaluators.HitRateEvaluator(), [{10:12, 15:20, 20:100, 100:100}], [("absa", "ahjsdjh")])
hit_rate_save.process("dave", 6, None, None)
hit_rate_save.done()
rows = [x.strip() for x in outfile.getvalue().split("\n")]
assert rows[0] == "Predictor,Start time,End time,10%,15%,20%,100%"
assert rows[1] == "predname,absa,ahjsdjh,12,20,100,100"
@pytest.fixture
def hit_count_save(outfile):
hcs = processors.HitCountSave(outfile, [10,15,20,100])
hcs.init()
return hcs
def test_HitCountSave_header(hit_count_save, outfile):
hit_count_save.done()
assert outfile.getvalue().strip() == "Predictor,Start time,End time,Number events,10%,15%,20%,100%"
def test_HitCountSave(hit_count_save, outfile):
hit_count_save.process("pn", evaluators.HitCountEvaluator(), [{10:(5,12), 15:(6,12), 20:(8,12), 100:(12,12)}], [("absa", "ahjsdjh")])
hit_count_save.process("dave", 6, None, None)
hit_count_save.done()
rows = [x.strip() for x in outfile.getvalue().split("\n")]
assert rows[1] == "pn,absa,ahjsdjh,12,5,6,8,12"
| artistic-2.0 | -8,576,715,014,576,377,000 | 34.385965 | 137 | 0.667328 | false | 2.931686 | true | false | false |
marcore/edx-platform | lms/djangoapps/courseware/features/events.py | 177 | 2247 | # pylint: disable=missing-docstring
from lettuce import step
from lettuce import world
from lettuce import before
from pymongo import MongoClient
from nose.tools import assert_equals
from nose.tools import assert_in
REQUIRED_EVENT_FIELDS = [
'agent',
'event',
'event_source',
'event_type',
'host',
'ip',
'page',
'time',
'username'
]
@before.all
def connect_to_mongodb():
world.mongo_client = MongoClient()
world.event_collection = world.mongo_client['track']['events']
@before.each_scenario
def reset_captured_events(_scenario):
world.event_collection.drop()
@before.outline
def reset_between_outline_scenarios(_scenario, order, outline, reasons_to_fail):
world.event_collection.drop()
@step(r'[aA]n? course url "(.*)" event is emitted$')
def course_url_event_is_emitted(_step, url_regex):
event_type = url_regex.format(world.scenario_dict['COURSE'].id)
n_events_are_emitted(_step, 1, event_type, "server")
@step(r'([aA]n?|\d+) "(.*)" (server|browser) events? is emitted$')
def n_events_are_emitted(_step, count, event_type, event_source):
# Ensure all events are written out to mongo before querying.
world.mongo_client.fsync()
# Note that splinter makes 2 requests when you call browser.visit('/foo')
# the first just checks to see if the server responds with a status
# code of 200, the next actually uses the browser to submit the request.
# We filter out events associated with the status code checks by ignoring
# events that come directly from splinter.
criteria = {
'event_type': event_type,
'event_source': event_source,
'agent': {
'$ne': 'python/splinter'
}
}
cursor = world.event_collection.find(criteria)
try:
number_events = int(count)
except ValueError:
number_events = 1
assert_equals(cursor.count(), number_events)
event = cursor.next()
expected_field_values = {
"username": world.scenario_dict['USER'].username,
"event_type": event_type,
}
for key, value in expected_field_values.iteritems():
assert_equals(event[key], value)
for field in REQUIRED_EVENT_FIELDS:
assert_in(field, event)
| agpl-3.0 | -2,871,520,452,668,130,300 | 26.072289 | 80 | 0.663996 | false | 3.630048 | false | false | false |
jamielennox/keystone | keystone/tests/unit/contrib/federation/test_utils.py | 3 | 25865 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.auth.plugins import mapped
from keystone.contrib.federation import utils as mapping_utils
from keystone import exception
from keystone.tests import unit
from keystone.tests.unit import mapping_fixtures
class MappingRuleEngineTests(unit.BaseTestCase):
"""A class for testing the mapping rule engine."""
def assertValidMappedUserObject(self, mapped_properties,
user_type='ephemeral',
domain_id=None):
"""Check whether mapped properties object has 'user' within.
According to today's rules, RuleProcessor does not have to issue user's
id or name. What's actually required is user's type and for ephemeral
users that would be service domain named 'Federated'.
"""
self.assertIn('user', mapped_properties,
message='Missing user object in mapped properties')
user = mapped_properties['user']
self.assertIn('type', user)
self.assertEqual(user_type, user['type'])
self.assertIn('domain', user)
domain = user['domain']
domain_name_or_id = domain.get('id') or domain.get('name')
domain_ref = domain_id or 'Federated'
self.assertEqual(domain_ref, domain_name_or_id)
def test_rule_engine_any_one_of_and_direct_mapping(self):
"""Should return user's name and group id EMPLOYEE_GROUP_ID.
The ADMIN_ASSERTION should successfully have a match in MAPPING_LARGE.
They will test the case where `any_one_of` is valid, and there is
a direct mapping for the users name.
"""
mapping = mapping_fixtures.MAPPING_LARGE
assertion = mapping_fixtures.ADMIN_ASSERTION
rp = mapping_utils.RuleProcessor(mapping['rules'])
values = rp.process(assertion)
fn = assertion.get('FirstName')
ln = assertion.get('LastName')
full_name = '%s %s' % (fn, ln)
group_ids = values.get('group_ids')
user_name = values.get('user', {}).get('name')
self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
self.assertEqual(full_name, user_name)
def test_rule_engine_no_regex_match(self):
"""Should deny authorization, the email of the tester won't match.
This will not match since the email in the assertion will fail
the regex test. It is set to match any @example.com address.
But the incoming value is set to [email protected].
RuleProcessor should return list of empty group_ids.
"""
mapping = mapping_fixtures.MAPPING_LARGE
assertion = mapping_fixtures.BAD_TESTER_ASSERTION
rp = mapping_utils.RuleProcessor(mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertValidMappedUserObject(mapped_properties)
self.assertIsNone(mapped_properties['user'].get('name'))
self.assertListEqual(list(), mapped_properties['group_ids'])
def test_rule_engine_regex_many_groups(self):
"""Should return group CONTRACTOR_GROUP_ID.
The TESTER_ASSERTION should successfully have a match in
MAPPING_TESTER_REGEX. This will test the case where many groups
are in the assertion, and a regex value is used to try and find
a match.
"""
mapping = mapping_fixtures.MAPPING_TESTER_REGEX
assertion = mapping_fixtures.TESTER_ASSERTION
rp = mapping_utils.RuleProcessor(mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
user_name = assertion.get('UserName')
group_ids = values.get('group_ids')
name = values.get('user', {}).get('name')
self.assertEqual(user_name, name)
self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
def test_rule_engine_any_one_of_many_rules(self):
"""Should return group CONTRACTOR_GROUP_ID.
The CONTRACTOR_ASSERTION should successfully have a match in
MAPPING_SMALL. This will test the case where many rules
must be matched, including an `any_one_of`, and a direct
mapping.
"""
mapping = mapping_fixtures.MAPPING_SMALL
assertion = mapping_fixtures.CONTRACTOR_ASSERTION
rp = mapping_utils.RuleProcessor(mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
user_name = assertion.get('UserName')
group_ids = values.get('group_ids')
name = values.get('user', {}).get('name')
self.assertEqual(user_name, name)
self.assertIn(mapping_fixtures.CONTRACTOR_GROUP_ID, group_ids)
def test_rule_engine_not_any_of_and_direct_mapping(self):
"""Should return user's name and email.
The CUSTOMER_ASSERTION should successfully have a match in
MAPPING_LARGE. This will test the case where a requirement
has `not_any_of`, and direct mapping to a username, no group.
"""
mapping = mapping_fixtures.MAPPING_LARGE
assertion = mapping_fixtures.CUSTOMER_ASSERTION
rp = mapping_utils.RuleProcessor(mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
user_name = assertion.get('UserName')
group_ids = values.get('group_ids')
name = values.get('user', {}).get('name')
self.assertEqual(user_name, name)
self.assertEqual([], group_ids,)
def test_rule_engine_not_any_of_many_rules(self):
"""Should return group EMPLOYEE_GROUP_ID.
The EMPLOYEE_ASSERTION should successfully have a match in
MAPPING_SMALL. This will test the case where many remote
rules must be matched, including a `not_any_of`.
"""
mapping = mapping_fixtures.MAPPING_SMALL
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
rp = mapping_utils.RuleProcessor(mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
user_name = assertion.get('UserName')
group_ids = values.get('group_ids')
name = values.get('user', {}).get('name')
self.assertEqual(user_name, name)
self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
def test_rule_engine_not_any_of_regex_verify_pass(self):
"""Should return group DEVELOPER_GROUP_ID.
The DEVELOPER_ASSERTION should successfully have a match in
MAPPING_DEVELOPER_REGEX. This will test the case where many
remote rules must be matched, including a `not_any_of`, with
regex set to True.
"""
mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
assertion = mapping_fixtures.DEVELOPER_ASSERTION
rp = mapping_utils.RuleProcessor(mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
user_name = assertion.get('UserName')
group_ids = values.get('group_ids')
name = values.get('user', {}).get('name')
self.assertEqual(user_name, name)
self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
def test_rule_engine_not_any_of_regex_verify_fail(self):
"""Should deny authorization.
The email in the assertion will fail the regex test.
It is set to reject any @example.org address, but the
incoming value is set to [email protected].
RuleProcessor should return list of empty group_ids.
"""
mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
assertion = mapping_fixtures.BAD_DEVELOPER_ASSERTION
rp = mapping_utils.RuleProcessor(mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertValidMappedUserObject(mapped_properties)
self.assertIsNone(mapped_properties['user'].get('name'))
self.assertListEqual(list(), mapped_properties['group_ids'])
def _rule_engine_regex_match_and_many_groups(self, assertion):
"""Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
A helper function injecting assertion passed as an argument.
Expect DEVELOPER_GROUP_ID and TESTER_GROUP_ID in the results.
"""
mapping = mapping_fixtures.MAPPING_LARGE
rp = mapping_utils.RuleProcessor(mapping['rules'])
values = rp.process(assertion)
user_name = assertion.get('UserName')
group_ids = values.get('group_ids')
name = values.get('user', {}).get('name')
self.assertValidMappedUserObject(values)
self.assertEqual(user_name, name)
self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
def test_rule_engine_regex_match_and_many_groups(self):
"""Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
The TESTER_ASSERTION should successfully have a match in
MAPPING_LARGE. This will test a successful regex match
for an `any_one_of` evaluation type, and will have many
groups returned.
"""
self._rule_engine_regex_match_and_many_groups(
mapping_fixtures.TESTER_ASSERTION)
def test_rule_engine_discards_nonstring_objects(self):
"""Check whether RuleProcessor discards non string objects.
Despite the fact that assertion is malformed and contains
non string objects, RuleProcessor should correctly discard them and
successfully have a match in MAPPING_LARGE.
"""
self._rule_engine_regex_match_and_many_groups(
mapping_fixtures.MALFORMED_TESTER_ASSERTION)
def test_rule_engine_fails_after_discarding_nonstring(self):
"""Check whether RuleProcessor discards non string objects.
Expect RuleProcessor to discard non string object, which
is required for a correct rule match. RuleProcessor will result with
empty list of groups.
"""
mapping = mapping_fixtures.MAPPING_SMALL
rp = mapping_utils.RuleProcessor(mapping['rules'])
assertion = mapping_fixtures.CONTRACTOR_MALFORMED_ASSERTION
mapped_properties = rp.process(assertion)
self.assertValidMappedUserObject(mapped_properties)
self.assertIsNone(mapped_properties['user'].get('name'))
self.assertListEqual(list(), mapped_properties['group_ids'])
def test_rule_engine_returns_group_names(self):
"""Check whether RuleProcessor returns group names with their domains.
RuleProcessor should return 'group_names' entry with a list of
dictionaries with two entries 'name' and 'domain' identifying group by
its name and domain.
"""
mapping = mapping_fixtures.MAPPING_GROUP_NAMES
rp = mapping_utils.RuleProcessor(mapping['rules'])
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
reference = {
mapping_fixtures.DEVELOPER_GROUP_NAME:
{
"name": mapping_fixtures.DEVELOPER_GROUP_NAME,
"domain": {
"name": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_NAME
}
},
mapping_fixtures.TESTER_GROUP_NAME:
{
"name": mapping_fixtures.TESTER_GROUP_NAME,
"domain": {
"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
}
}
}
for rule in mapped_properties['group_names']:
self.assertDictEqual(reference.get(rule.get('name')), rule)
def test_rule_engine_whitelist_and_direct_groups_mapping(self):
"""Should return user's groups Developer and Contractor.
The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
in MAPPING_GROUPS_WHITELIST. It will test the case where 'whitelist'
correctly filters out Manager and only allows Developer and Contractor.
"""
mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
rp = mapping_utils.RuleProcessor(mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
reference = {
mapping_fixtures.DEVELOPER_GROUP_NAME:
{
"name": mapping_fixtures.DEVELOPER_GROUP_NAME,
"domain": {
"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
}
},
mapping_fixtures.CONTRACTOR_GROUP_NAME:
{
"name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
"domain": {
"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
}
}
}
for rule in mapped_properties['group_names']:
self.assertDictEqual(reference.get(rule.get('name')), rule)
self.assertEqual('tbo', mapped_properties['user']['name'])
self.assertEqual([], mapped_properties['group_ids'])
def test_rule_engine_blacklist_and_direct_groups_mapping(self):
"""Should return user's group Developer.
The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
in MAPPING_GROUPS_BLACKLIST. It will test the case where 'blacklist'
correctly filters out Manager and Developer and only allows Contractor.
"""
mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
rp = mapping_utils.RuleProcessor(mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
reference = {
mapping_fixtures.CONTRACTOR_GROUP_NAME:
{
"name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
"domain": {
"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
}
}
}
for rule in mapped_properties['group_names']:
self.assertDictEqual(reference.get(rule.get('name')), rule)
self.assertEqual('tbo', mapped_properties['user']['name'])
self.assertEqual([], mapped_properties['group_ids'])
def test_rule_engine_blacklist_and_direct_groups_mapping_multiples(self):
"""Tests matching multiple values before the blacklist.
Verifies that the local indexes are correct when matching multiple
remote values for a field when the field occurs before the blacklist
entry in the remote rules.
"""
mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MULTIPLES
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
rp = mapping_utils.RuleProcessor(mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
reference = {
mapping_fixtures.CONTRACTOR_GROUP_NAME:
{
"name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
"domain": {
"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
}
}
}
for rule in mapped_properties['group_names']:
self.assertDictEqual(reference.get(rule.get('name')), rule)
self.assertEqual('tbo', mapped_properties['user']['name'])
self.assertEqual([], mapped_properties['group_ids'])
def test_rule_engine_whitelist_direct_group_mapping_missing_domain(self):
"""Test if the local rule is rejected upon missing domain value
This is a variation with a ``whitelist`` filter.
"""
mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
rp = mapping_utils.RuleProcessor(mapping['rules'])
self.assertRaises(exception.ValidationError, rp.process, assertion)
def test_rule_engine_blacklist_direct_group_mapping_missing_domain(self):
"""Test if the local rule is rejected upon missing domain value
This is a variation with a ``blacklist`` filter.
"""
mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
rp = mapping_utils.RuleProcessor(mapping['rules'])
self.assertRaises(exception.ValidationError, rp.process, assertion)
def test_rule_engine_no_groups_allowed(self):
"""Should return user mapped to no groups.
The EMPLOYEE_ASSERTION should successfully have a match
in MAPPING_GROUPS_WHITELIST, but 'whitelist' should filter out
the group values from the assertion and thus map to no groups.
"""
mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
rp = mapping_utils.RuleProcessor(mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertListEqual(mapped_properties['group_names'], [])
self.assertListEqual(mapped_properties['group_ids'], [])
self.assertEqual('tbo', mapped_properties['user']['name'])
def test_mapping_federated_domain_specified(self):
"""Test mapping engine when domain 'ephemeral' is explicitely set.
For that, we use mapping rule MAPPING_EPHEMERAL_USER and assertion
EMPLOYEE_ASSERTION
"""
mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
rp = mapping_utils.RuleProcessor(mapping['rules'])
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
def test_create_user_object_with_bad_mapping(self):
"""Test if user object is created even with bad mapping.
User objects will be created by mapping engine always as long as there
is corresponding local rule. This test shows, that even with assertion
where no group names nor ids are matched, but there is 'blind' rule for
mapping user, such object will be created.
In this test MAPPING_EHPEMERAL_USER expects UserName set to jsmith
whereas value from assertion is 'tbo'.
"""
mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
rp = mapping_utils.RuleProcessor(mapping['rules'])
assertion = mapping_fixtures.CONTRACTOR_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
self.assertNotIn('id', mapped_properties['user'])
self.assertNotIn('name', mapped_properties['user'])
def test_set_ephemeral_domain_to_ephemeral_users(self):
"""Test auto assigning service domain to ephemeral users.
Test that ephemeral users will always become members of federated
service domain. The check depends on ``type`` value which must be set
to ``ephemeral`` in case of ephemeral user.
"""
mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN
rp = mapping_utils.RuleProcessor(mapping['rules'])
assertion = mapping_fixtures.CONTRACTOR_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
def test_local_user_local_domain(self):
"""Test that local users can have non-service domains assigned."""
mapping = mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN
rp = mapping_utils.RuleProcessor(mapping['rules'])
assertion = mapping_fixtures.CONTRACTOR_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(
mapped_properties, user_type='local',
domain_id=mapping_fixtures.LOCAL_DOMAIN)
def test_user_identifications_name(self):
"""Test varius mapping options and how users are identified.
This test calls mapped.setup_username() for propagating user object.
Test plan:
- Check if the user has proper domain ('federated') set
- Check if the user has property type set ('ephemeral')
- Check if user's name is properly mapped from the assertion
- Check if user's id is properly set and equal to name, as it was not
explicitely specified in the mapping.
"""
mapping = mapping_fixtures.MAPPING_USER_IDS
rp = mapping_utils.RuleProcessor(mapping['rules'])
assertion = mapping_fixtures.CONTRACTOR_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
mapped.setup_username({}, mapped_properties)
self.assertEqual('jsmith', mapped_properties['user']['id'])
self.assertEqual('jsmith', mapped_properties['user']['name'])
def test_user_identifications_name_and_federated_domain(self):
"""Test varius mapping options and how users are identified.
This test calls mapped.setup_username() for propagating user object.
Test plan:
- Check if the user has proper domain ('federated') set
- Check if the user has propert type set ('ephemeral')
- Check if user's name is properly mapped from the assertion
- Check if user's id is properly set and equal to name, as it was not
explicitely specified in the mapping.
"""
mapping = mapping_fixtures.MAPPING_USER_IDS
rp = mapping_utils.RuleProcessor(mapping['rules'])
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
mapped.setup_username({}, mapped_properties)
self.assertEqual('tbo', mapped_properties['user']['name'])
self.assertEqual('abc123%40example.com',
mapped_properties['user']['id'])
def test_user_identification_id(self):
"""Test varius mapping options and how users are identified.
This test calls mapped.setup_username() for propagating user object.
Test plan:
- Check if the user has proper domain ('federated') set
- Check if the user has propert type set ('ephemeral')
- Check if user's id is properly mapped from the assertion
- Check if user's name is properly set and equal to id, as it was not
explicitely specified in the mapping.
"""
mapping = mapping_fixtures.MAPPING_USER_IDS
rp = mapping_utils.RuleProcessor(mapping['rules'])
assertion = mapping_fixtures.ADMIN_ASSERTION
mapped_properties = rp.process(assertion)
context = {'environment': {}}
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
mapped.setup_username(context, mapped_properties)
self.assertEqual('bob', mapped_properties['user']['name'])
self.assertEqual('bob', mapped_properties['user']['id'])
def test_user_identification_id_and_name(self):
"""Test varius mapping options and how users are identified.
This test calls mapped.setup_username() for propagating user object.
Test plan:
- Check if the user has proper domain ('federated') set
- Check if the user has proper type set ('ephemeral')
- Check if user's name is properly mapped from the assertion
- Check if user's id is properly set and and equal to value hardcoded
in the mapping
This test does two iterations with different assertions used as input
for the Mapping Engine. Different assertions will be matched with
different rules in the ruleset, effectively issuing different user_id
(hardcoded values). In the first iteration, the hardcoded user_id is
not url-safe and we expect Keystone to make it url safe. In the latter
iteration, provided user_id is already url-safe and we expect server
not to change it.
"""
testcases = [(mapping_fixtures.CUSTOMER_ASSERTION, 'bwilliams'),
(mapping_fixtures.EMPLOYEE_ASSERTION, 'tbo')]
for assertion, exp_user_name in testcases:
mapping = mapping_fixtures.MAPPING_USER_IDS
rp = mapping_utils.RuleProcessor(mapping['rules'])
mapped_properties = rp.process(assertion)
context = {'environment': {}}
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
mapped.setup_username(context, mapped_properties)
self.assertEqual(exp_user_name, mapped_properties['user']['name'])
self.assertEqual('abc123%40example.com',
mapped_properties['user']['id'])
| apache-2.0 | 1,597,877,283,526,985,000 | 41.332242 | 79 | 0.658264 | false | 4.331045 | true | false | false |
Muff1nz/PygameDeepRLAgent | PygameDeepRLAgent/A3CBootcampGame/ShootingGrounds/ShootingGrounds.py | 1 | 2896 | import asyncio
import numpy as np
import pygame
from A3CBootcampGame.BaseGame import BaseGame
from A3CBootcampGame.ShootingGrounds.Targets import TargetHandler
from A3CBootcampGame.ShootingGrounds.GameHandler import GameHandler
from A3CBootcampGame.ShootingGrounds.Player import Player
from A3CBootcampGame.ShootingGrounds.world import World
from A3CBootcampGame.physics import physicsHandler
WHITE = 255, 255, 255
# Class for the shooting grounds level in A3CBootCamp
class ShootingGrounds(BaseGame):
def __init__(self, settings, gameDataQueue, playerActionQueue):
BaseGame.__init__(self, settings, gameDataQueue, playerActionQueue)
def initGame(self): # This function was created to do init in the run function when this class was a process
self.baseInit()
self.world = World(self.settings)
self.player = Player(self.settings, "./Assets/Player.png")
self.targetHandler = TargetHandler(self.settings, self.player)
self.gameHandler = GameHandler(self.player, self.targetHandler)
collisionGroups = 2
boxes = []
self.player.collisionGroup = 0
boxes.append(self.player)
for bullet in self.player.ws.bullets:
bullet.collisionGroup = 0
boxes.append(bullet)
self.targetHandler.target.collisionGroup = 1
boxes.append(self.targetHandler.target)
self.physics = physicsHandler(self.world.walls, boxes, collisionGroups, self.settings)
async def run(self):
while True:
if not self.episodeInProgress:
self.endEpisode()
# Render stuff
self.gameScreen.fill(WHITE)
self.world.draw(self.gameScreen)
self.targetHandler.draw(self.gameScreen)
self.player.draw(self.gameScreen)
if self.window:
self.drawWindow()
if not self.gameCounter % self.settings.deepRLRate:
self.sendFrameToWorker()
while self.playerActionQueue.empty(): # Yield to let other games run, to prevent blocking on the queue
await asyncio.sleep(0.005)
self.getActionFromWorker()
# Update stuff
self.player.update(self.playerAction, self.timeStep)
self.physics.update(self.timeStep)
self.targetHandler.update(self.gameCounter)
self.episodeInProgress = self.gameHandler.update(self.physics.events,
self.gameCounter,
self.episodeData,
self.bootStrapCounter,
self.settings.bootStrapCutOff)
if self.window:
self.handleWindow()
self.gameCounter += 1 | mit | 5,469,149,061,274,142,000 | 41.602941 | 119 | 0.61913 | false | 4.368024 | false | false | false |
mattgwwalker/msg-extractor | extract_msg/constants.py | 1 | 22395 | """
The constants used in extract_msg. If you modify any of these
without explicit instruction to do so from one of the
contributers, please do not complain about bugs.
"""
import datetime
import struct
import sys
if sys.version_info[0] >= 3:
BYTES = bytes
STRING = str
else:
BYTES = str
STRING = unicode
# DEFINE CONSTANTS
# WARNING DO NOT CHANGE ANY OF THESE VALUES UNLESS YOU KNOW
# WHAT YOU ARE DOING! FAILURE TO FOLLOW THIS INSTRUCTION
# CAN AND WILL BREAK THIS SCRIPT!
# Constants used by named.py
NUMERICAL_NAMED = 0
STRING_NAMED = 1
GUID_PS_MAPI = '{00020328-0000-0000-C000-000000000046}'
GUID_PS_PUBLIC_STRINGS = '{00020329-0000-0000-C000-000000000046}'
GUID_PSETID_COMMON = '{00062008-0000-0000-C000-000000000046}'
GUID_PSETID_ADDRESS = '{00062004-0000-0000-C000-000000000046}'
GUID_PS_INTERNET_HEADERS = '{00020386-0000-0000-C000-000000000046}'
GUID_PSETID_APPOINTMENT = '{00062002-0000-0000-C000-000000000046}'
GUID_PSETID_MEETING = '{6ED8DA90-450B-101B-98DA-00AA003F1305}'
GUID_PSETID_LOG = '{0006200A-0000-0000-C000-000000000046}'
GUID_PSETID_MESSAGING = '{41F28F13-83F4-4114-A584-EEDB5A6B0BFF}'
GUID_PSETID_NOTE = '{0006200E-0000-0000-C000-000000000046}'
GUID_PSETID_POSTRSS = '{00062041-0000-0000-C000-000000000046}'
GUID_PSETID_TASK = '{00062003-0000-0000-C000-000000000046}'
GUID_PSETID_UNIFIEDMESSAGING = '{4442858E-A9E3-4E80-B900-317A210CC15B}'
GUID_PSETID_AIRSYNC = '{71035549-0739-4DCB-9163-00F0580DBBDF}'
GUID_PSETID_SHARING = '{00062040-0000-0000-C000-000000000046}'
GUID_PSETID_XMLEXTRACTEDENTITIES = '{23239608-685D-4732-9C55-4C95CB4E8E33}'
GUID_PSETID_ATTACHMENT = '{96357F7F-59E1-47D0-99A7-46515C183B54}'
FIXED_LENGTH_PROPS = (
0x0000,
0x0001,
0x0002,
0x0003,
0x0004,
0x0005,
0x0006,
0x0007,
0x000A,
0x000B,
0x0014,
0x0040,
0x0048,
)
FIXED_LENGTH_PROPS_STRING = (
'0000',
'0001',
'0002',
'0003',
'0004',
'0005',
'0006',
'0007',
'000A',
'000B',
'0014',
'0040',
'0048',
)
VARIABLE_LENGTH_PROPS = (
0x000D,
0x001E,
0x001F,
0x00FB,
0x00FD,
0x00FE,
0X0102,
0x1002,
0x1003,
0x1004,
0x1005,
0x1006,
0x1007,
0x1014,
0x101E,
0x101F,
0x1040,
0x1048,
0x1102,
)
VARIABLE_LENGTH_PROPS_STRING = (
'000D',
'001E',
'001F',
'00FB',
'00FD',
'00FE',
'0102',
'1002',
'1003',
'1004',
'1005',
'1006',
'1007',
'1014',
'101E',
'101F',
'1040',
'1048',
'1102',
)
CODE_PAGES = {
37: 'IBM037', # IBM EBCDIC US-Canada
437: 'IBM437', # OEM United States
500: 'IBM500', # IBM EBCDIC International
708: 'ASMO-708', # Arabic (ASMO 708)
709: '', # Arabic (ASMO-449+, BCON V4)
710: '', # Arabic - Transparent Arabic
720: 'DOS-720', # Arabic (Transparent ASMO); Arabic (DOS)
737: 'ibm737', # OEM Greek (formerly 437G); Greek (DOS)
775: 'ibm775', # OEM Baltic; Baltic (DOS)
850: 'ibm850', # OEM Multilingual Latin 1; Western European (DOS)
852: 'ibm852', # OEM Latin 2; Central European (DOS)
855: 'IBM855', # OEM Cyrillic (primarily Russian)
857: 'ibm857', # OEM Turkish; Turkish (DOS)
858: 'IBM00858', # OEM Multilingual Latin 1 + Euro symbol
860: 'IBM860', # OEM Portuguese; Portuguese (DOS)
861: 'ibm861', # OEM Icelandic; Icelandic (DOS)
862: 'DOS-862', # OEM Hebrew; Hebrew (DOS)
863: 'IBM863', # OEM French Canadian; French Canadian (DOS)
864: 'IBM864', # OEM Arabic; Arabic (864)
865: 'IBM865', # OEM Nordic; Nordic (DOS)
866: 'cp866', # OEM Russian; Cyrillic (DOS)
869: 'ibm869', # OEM Modern Greek; Greek, Modern (DOS)
870: 'IBM870', # IBM EBCDIC Multilingual/ROECE (Latin 2); IBM EBCDIC Multilingual Latin 2
874: 'windows-874', # ANSI/OEM Thai (ISO 8859-11); Thai (Windows)
875: 'cp875', # IBM EBCDIC Greek Modern
932: 'shift_jis', # ANSI/OEM Japanese; Japanese (Shift-JIS)
936: 'gb2312', # ANSI/OEM Simplified Chinese (PRC, Singapore); Chinese Simplified (GB2312)
949: 'ks_c_5601-1987', # ANSI/OEM Korean (Unified Hangul Code)
950: 'big5', # ANSI/OEM Traditional Chinese (Taiwan; Hong Kong SAR, PRC); Chinese Traditional (Big5)
1026: 'IBM1026', # IBM EBCDIC Turkish (Latin 5)
1047: 'IBM01047', # IBM EBCDIC Latin 1/Open System
1140: 'IBM01140', # IBM EBCDIC US-Canada (037 + Euro symbol); IBM EBCDIC (US-Canada-Euro)
1141: 'IBM01141', # IBM EBCDIC Germany (20273 + Euro symbol); IBM EBCDIC (Germany-Euro)
1142: 'IBM01142', # IBM EBCDIC Denmark-Norway (20277 + Euro symbol); IBM EBCDIC (Denmark-Norway-Euro)
1143: 'IBM01143', # IBM EBCDIC Finland-Sweden (20278 + Euro symbol); IBM EBCDIC (Finland-Sweden-Euro)
1144: 'IBM01144', # IBM EBCDIC Italy (20280 + Euro symbol); IBM EBCDIC (Italy-Euro)
1145: 'IBM01145', # IBM EBCDIC Latin America-Spain (20284 + Euro symbol); IBM EBCDIC (Spain-Euro)
1146: 'IBM01146', # IBM EBCDIC United Kingdom (20285 + Euro symbol); IBM EBCDIC (UK-Euro)
1147: 'IBM01147', # IBM EBCDIC France (20297 + Euro symbol); IBM EBCDIC (France-Euro)
1148: 'IBM01148', # IBM EBCDIC International (500 + Euro symbol); IBM EBCDIC (International-Euro)
1149: 'IBM01149', # IBM EBCDIC Icelandic (20871 + Euro symbol); IBM EBCDIC (Icelandic-Euro)
1200: 'utf-16', # Unicode UTF-16, little endian byte order (BMP of ISO 10646); available only to managed applications
1201: 'unicodeFFFE', # Unicode UTF-16, big endian byte order; available only to managed applications
1250: 'windows-1250', # ANSI Central European; Central European (Windows)
1251: 'windows-1251', # ANSI Cyrillic; Cyrillic (Windows)
1252: 'windows-1252', # ANSI Latin 1; Western European (Windows)
1253: 'windows-1253', # ANSI Greek; Greek (Windows)
1254: 'windows-1254', # ANSI Turkish; Turkish (Windows)
1255: 'windows-1255', # ANSI Hebrew; Hebrew (Windows)
1256: 'windows-1256', # ANSI Arabic; Arabic (Windows)
1257: 'windows-1257', # ANSI Baltic; Baltic (Windows)
1258: 'windows-1258', # ANSI/OEM Vietnamese; Vietnamese (Windows)
1361: 'Johab', # Korean (Johab)
10000: 'macintosh', # MAC Roman; Western European (Mac)
10001: 'x-mac-japanese', # Japanese (Mac)
10002: 'x-mac-chinesetrad', # MAC Traditional Chinese (Big5); Chinese Traditional (Mac)
10003: 'x-mac-korean', # Korean (Mac)
10004: 'x-mac-arabic', # Arabic (Mac)
10005: 'x-mac-hebrew', # Hebrew (Mac)
10006: 'x-mac-greek', # Greek (Mac)
10007: 'x-mac-cyrillic', # Cyrillic (Mac)
10008: 'x-mac-chinesesimp', # MAC Simplified Chinese (GB 2312); Chinese Simplified (Mac)
10010: 'x-mac-romanian', # Romanian (Mac)
10017: 'x-mac-ukrainian', # Ukrainian (Mac)
10021: 'x-mac-thai', # Thai (Mac)
10029: 'x-mac-ce', # MAC Latin 2; Central European (Mac)
10079: 'x-mac-icelandic', # Icelandic (Mac)
10081: 'x-mac-turkish', # Turkish (Mac)
10082: 'x-mac-croatian', # Croatian (Mac)
12000: 'utf-32', # Unicode UTF-32, little endian byte order; available only to managed applications
12001: 'utf-32BE', # Unicode UTF-32, big endian byte order; available only to managed applications
20000: 'x-Chinese_CNS', # CNS Taiwan; Chinese Traditional (CNS)
20001: 'x-cp20001', # TCA Taiwan
20002: 'x_Chinese-Eten', # Eten Taiwan; Chinese Traditional (Eten)
20003: 'x-cp20003', # IBM5550 Taiwan
20004: 'x-cp20004', # TeleText Taiwan
20005: 'x-cp20005', # Wang Taiwan
20105: 'x-IA5', # IA5 (IRV International Alphabet No. 5, 7-bit); Western European (IA5)
20106: 'x-IA5-German', # IA5 German (7-bit)
20107: 'x-IA5-Swedish', # IA5 Swedish (7-bit)
20108: 'x-IA5-Norwegian', # IA5 Norwegian (7-bit)
20127: 'us-ascii', # US-ASCII (7-bit)
20261: 'x-cp20261', # T.61
20269: 'x-cp20269', # ISO 6937 Non-Spacing Accent
20273: 'IBM273', # IBM EBCDIC Germany
20277: 'IBM277', # IBM EBCDIC Denmark-Norway
20278: 'IBM278', # IBM EBCDIC Finland-Sweden
20280: 'IBM280', # IBM EBCDIC Italy
20284: 'IBM284', # IBM EBCDIC Latin America-Spain
20285: 'IBM285', # IBM EBCDIC United Kingdom
20290: 'IBM290', # IBM EBCDIC Japanese Katakana Extended
20297: 'IBM297', # IBM EBCDIC France
20420: 'IBM420', # IBM EBCDIC Arabic
20423: 'IBM423', # IBM EBCDIC Greek
20424: 'IBM424', # IBM EBCDIC Hebrew
20833: 'x-EBCDIC-KoreanExtended', # IBM EBCDIC Korean Extended
20838: 'IBM-Thai', # IBM EBCDIC Thai
20866: 'koi8-r', # Russian (KOI8-R); Cyrillic (KOI8-R)
20871: 'IBM871', # IBM EBCDIC Icelandic
20880: 'IBM880', # IBM EBCDIC Cyrillic Russian
20905: 'IBM905', # IBM EBCDIC Turkish
20924: 'IBM00924', # IBM EBCDIC Latin 1/Open System (1047 + Euro symbol)
20932: 'EUC-JP', # Japanese (JIS 0208-1990 and 0212-1990)
20936: 'x-cp20936', # Simplified Chinese (GB2312); Chinese Simplified (GB2312-80)
20949: 'x-cp20949', # Korean Wansung
21025: 'cp1025', # IBM EBCDIC Cyrillic Serbian-Bulgarian
21027: '', # (deprecated)
21866: 'koi8-u', # Ukrainian (KOI8-U); Cyrillic (KOI8-U)
28591: 'iso-8859-1', # ISO 8859-1 Latin 1; Western European (ISO)
28592: 'iso-8859-2', # ISO 8859-2 Central European; Central European (ISO)
28593: 'iso-8859-3', # ISO 8859-3 Latin 3
28594: 'iso-8859-4', # ISO 8859-4 Baltic
28595: 'iso-8859-5', # ISO 8859-5 Cyrillic
28596: 'iso-8859-6', # ISO 8859-6 Arabic
28597: 'iso-8859-7', # ISO 8859-7 Greek
28598: 'iso-8859-8', # ISO 8859-8 Hebrew; Hebrew (ISO-Visual)
28599: 'iso-8859-9', # ISO 8859-9 Turkish
28603: 'iso-8859-13', # ISO 8859-13 Estonian
28605: 'iso-8859-15', # ISO 8859-15 Latin 9
29001: 'x-Europa', # Europa 3
38598: 'iso-8859-8-i', # ISO 8859-8 Hebrew; Hebrew (ISO-Logical)
50220: 'iso-2022-jp', # ISO 2022 Japanese with no halfwidth Katakana; Japanese (JIS)
50221: 'csISO2022JP', # ISO 2022 Japanese with halfwidth Katakana; Japanese (JIS-Allow 1 byte Kana)
50222: 'iso-2022-jp', # ISO 2022 Japanese JIS X 0201-1989; Japanese (JIS-Allow 1 byte Kana - SO/SI)
50225: 'iso-2022-kr', # ISO 2022 Korean
50227: 'x-cp50227', # ISO 2022 Simplified Chinese; Chinese Simplified (ISO 2022)
50229: '', # ISO 2022 Traditional Chinese
50930: '', # EBCDIC Japanese (Katakana) Extended
50931: '', # EBCDIC US-Canada and Japanese
50933: '', # EBCDIC Korean Extended and Korean
50935: '', # EBCDIC Simplified Chinese Extended and Simplified Chinese
50936: '', # EBCDIC Simplified Chinese
50937: '', # EBCDIC US-Canada and Traditional Chinese
50939: '', # EBCDIC Japanese (Latin) Extended and Japanese
51932: 'euc-jp', # EUC Japanese
51936: 'EUC-CN', # EUC Simplified Chinese; Chinese Simplified (EUC)
51949: 'euc-kr', # EUC Korean
51950: '', # EUC Traditional Chinese
52936: 'hz-gb-2312', # HZ-GB2312 Simplified Chinese; Chinese Simplified (HZ)
54936: 'GB18030', # Windows XP and later: GB18030 Simplified Chinese (4 byte); Chinese Simplified (GB18030)
57002: 'x-iscii-de', # ISCII Devanagari
57003: 'x-iscii-be', # ISCII Bangla
57004: 'x-iscii-ta', # ISCII Tamil
57005: 'x-iscii-te', # ISCII Telugu
57006: 'x-iscii-as', # ISCII Assamese
57007: 'x-iscii-or', # ISCII Odia
57008: 'x-iscii-ka', # ISCII Kannada
57009: 'x-iscii-ma', # ISCII Malayalam
57010: 'x-iscii-gu', # ISCII Gujarati
57011: 'x-iscii-pa', # ISCII Punjabi
65000: 'utf-7', # Unicode (UTF-7)
65001: 'utf-8', # Unicode (UTF-8)
}
INTELLIGENCE_DUMB = 0
INTELLIGENCE_SMART = 1
INTELLIGENCE_TUPLE = (
'INTELLIGENCE_DUMB',
'INTELLIGENCE_SMART',
)
TYPE_MESSAGE = 0
TYPE_MESSAGE_EMBED = 1
TYPE_ATTACHMENT = 2
TYPE_RECIPIENT = 3
TYPE_TUPLE = (
'TYPE_MESSAGE',
'TYPE_MESSAGE_EMBED',
'TYPE_ATTACHMENT',
'TYPE_RECIPIENT',
)
RECIPIENT_SENDER = 0
RECIPIENT_TO = 1
RECIPIENT_CC = 2
RECIPIENT_BCC = 3
RECIPIENT_TUPLE = (
'RECIPIENT_SENDER',
'RECIPIENT_TO',
'RECIPIENT_CC',
'RECIPIENT_BCC',
)
# PidTagImportance
IMPORTANCE_LOW = 0
IMPORTANCE_MEDIUM = 1
IMPORTANCE_HIGH = 2
IMPORTANCE_TUPLE = (
'IMPORTANCE_LOW',
'IMPORTANCE_MEDIUM',
'IMPORTANCE_HIGH',
)
# PidTagSensitivity
SENSITIVITY_NORMAL = 0
SENSITIVITY_PERSONAL = 1
SENSITIVITY_PRIVATE = 2
SENSITIVITY_CONFIDENTIAL = 3
SENSITIVITY_TUPLE = (
'SENSITIVITY_NORMAL',
'SENSITIVITY_PERSONAL',
'SENSITIVITY_PRIVATE',
'SENSITIVITY_CONFIDENTIAL',
)
# PidTagPriority
PRIORITY_URGENT = 0x00000001
PRIORITY_NORMAL = 0x00000000
PRIORITY_NOT_URGENT = 0xFFFFFFFF
PYTPFLOATINGTIME_START = datetime.datetime(1899, 12, 30)
# Constants used for argparse stuff
KNOWN_FILE_FLAGS = [
'--out-name',
]
NEEDS_ARG = [
'--out-name',
]
MAINDOC = "extract_msg:\n\tExtracts emails and attachments saved in Microsoft Outlook's .msg files.\n\n" \
"https://github.com/mattgwwalker/msg-extractor"
# Define pre-compiled structs to make unpacking slightly faster
# General structs
ST1 = struct.Struct('<8x4I')
ST2 = struct.Struct('<H2xI8x')
ST3 = struct.Struct('<Q')
# Structs used by named.py
STNP_NAM = struct.Struct('<i')
STNP_ENT = struct.Struct('<IHH') # Struct used for unpacking the entries in the entry stream
# Structs used by prop.py
STFIX = struct.Struct('<8x8s')
STVAR = struct.Struct('<8xi4s')
# Structs to help with email type to python type conversions
STI16 = struct.Struct('<h6x')
STI32 = struct.Struct('<I4x')
STI64 = struct.Struct('<q')
STF32 = struct.Struct('<f4x')
STF64 = struct.Struct('<d')
STUI32 = struct.Struct('<I4x')
STMI16 = struct.Struct('<h')
STMI32 = struct.Struct('<i')
STMI64 = struct.Struct('<q')
STMF32 = struct.Struct('<f')
STMF64 = struct.Struct('<d')
# PermanentEntryID parsing struct
STPEID = struct.Struct('<B3x16s4xI')
PTYPES = {
0x0000: 'PtypUnspecified',
0x0001: 'PtypNull',
0x0002: 'PtypInteger16', # Signed short
0x0003: 'PtypInteger32', # Signed int
0x0004: 'PtypFloating32', # Float
0x0005: 'PtypFloating64', # Double
0x0006: 'PtypCurrency',
0x0007: 'PtypFloatingTime',
0x000A: 'PtypErrorCode',
0x000B: 'PtypBoolean',
0x000D: 'PtypObject/PtypEmbeddedTable/Storage',
0x0014: 'PtypInteger64', # Signed longlong
0x001E: 'PtypString8',
0x001F: 'PtypString',
0x0040: 'PtypTime', # Use msgEpoch to convert to unix time stamp
0x0048: 'PtypGuid',
0x00FB: 'PtypServerId',
0x00FD: 'PtypRestriction',
0x00FE: 'PtypRuleAction',
0x0102: 'PtypBinary',
0x1002: 'PtypMultipleInteger16',
0x1003: 'PtypMultipleInteger32',
0x1004: 'PtypMultipleFloating32',
0x1005: 'PtypMultipleFloating64',
0x1006: 'PtypMultipleCurrency',
0x1007: 'PtypMultipleFloatingTime',
0x1014: 'PtypMultipleInteger64',
0x101E: 'PtypMultipleString8',
0x101F: 'PtypMultipleString',
0x1040: 'PtypMultipleTime',
0x1048: 'PtypMultipleGuid',
0x1102: 'PtypMultipleBinary',
}
# Display types
DT_MAILUSER = 0x0000
DT_DISTLIST = 0x0001
DT_FORUM = 0x0002
DT_AGENT = 0x0003
DT_ORGANIZATION = 0x0004
DT_PRIVATE_DISTLIST = 0x0005
DT_REMOTE_MAILUSER = 0x0006
DT_CONTAINER = 0x0100
DT_TEMPLATE = 0x0101
DT_ADDRESS_TEMPLATE = 0x0102
DT_SEARCH = 0x0200
# Rule action types
RA_OP_MOVE = 0x01
RA_OP_COPY = 0x02
RA_OP_REPLY = 0x03
RA_OP_OOF_REPLY = 0x04
RA_OP_DEFER_ACTION = 0x05
RA_OP_BOUNCE = 0x06
RA_OP_FORWARD = 0x07
RA_OP_DELEGATE = 0x08
RA_OP_TAG = 0x09
RA_OP_DELETE = 0x0A
RA_OP_MARK_AS_READ = 0x0B
# Recipiet Row Types
RR_NOTYPE = 0x0
RR_X500DN = 0x1
RR_MSMAIL = 0x2
RR_SMTP = 0x3
RR_FAX = 0x4
RR_PROFESSIONALOFFICESYSTEM = 0x5
RR_PERSONALDESTRIBUTIONLIST1 = 0x6
RR_PERSONALDESTRIBUTIONLIST2 = 0x7
# This property information was sourced from
# http://www.fileformat.info/format/outlookmsg/index.htm
# on 2013-07-22.
# It was extended by The Elemental of Destruction on 2018-10-12
PROPERTIES = {
'00010102': 'Template data',
'0002000B': 'Alternate recipient allowed',
'0004001F': 'Auto forward comment',
'00040102': 'Script data',
'0005000B': 'Auto forwarded',
'000F000F': 'Deferred delivery time',
'00100040': 'Deliver time',
'00150040': 'Expiry time',
'00170003': 'Importance',
'001A001F': 'Message class',
'0023001F': 'Originator delivery report requested',
'00250102': 'Parent key',
'00260003': 'Priority',
'0029000B': 'Read receipt requested',
'002A0040': 'Receipt time',
'002B000B': 'Recipient reassignment prohibited',
'002E0003': 'Original sensitivity',
'00300040': 'Reply time',
'00310102': 'Report tag',
'00320040': 'Report time',
'00360003': 'Sensitivity',
'0037001F': 'Subject',
'00390040': 'Client Submit Time',
'003A001F': '',
'003B0102': '',
'003D001F': 'Subject prefix',
'003F0102': '',
'0040001F': 'Received by name',
'00410102': '',
'0042001F': 'Sent repr name',
'00430102': '',
'0044001F': 'Rcvd repr name',
'00450102': '',
'0046001F': '',
'00470102': '',
'0049001F': '',
'004B001F': '',
'004C0102': '',
'004D001F': 'Org author name',
'004E0040': '',
'004F0102': '',
'0050001F': 'Reply rcipnt names',
'00510102': '',
'00520102': '',
'00530102': '',
'00540102': '',
'00550040': '',
'0057000B': '',
'0058000B': '',
'0059000B': '',
'005A001F': 'Org sender name',
'005B0102': '',
'005C0102': '',
'005D001F': '',
'005E0102': '',
'005F0102': '',
'00600040': '',
'00610040': '',
'00620003': '',
'0063000B': '',
'0064001F': 'Sent repr adrtype',
'0065001F': 'Sent repr email',
'0066001F': '',
'00670102': '',
'0068001F': '',
'0069001F': '',
'0070001F': 'Topic',
'00710102': '',
'0072001F': '',
'0073001F': '',
'0074001F': '',
'0075001F': 'Rcvd by adrtype',
'0076001F': 'Rcvd by email',
'0077001F': 'Repr adrtype',
'0078001F': 'Repr email',
'007D001F': 'Message header',
'007F0102': '',
'0080001F': '',
'0081001F': '',
'08070003': '',
'0809001F': '',
'0C040003': '',
'0C050003': '',
'0C06000B': '',
'0C08000B': '',
'0C150003': '',
'0C17000B': '',
'0C190102': '',
'0C1A001F': 'Sender name',
'0C1B001F': '',
'0C1D0102': '',
'0C1E001F': 'Sender adr type',
'0C1F001F': 'Sender email',
'0C200003': '',
'0C21001F': '',
'0E01000B': '',
'0E02001F': 'Display BCC',
'0E03001F': 'Display CC',
'0E04001F': 'Display To',
'0E060040': '',
'0E070003': '',
'0E080003': '',
'0E080014': '',
'0E090102': '',
'0E0F000B': '',
'0E12000D': '',
'0E13000D': '',
'0E170003': '',
'0E1B000B': '',
'0E1D001F': 'Subject (normalized)',
'0E1F000B': '',
'0E200003': '',
'0E210003': '',
'0E28001F': 'Recvd account1 (uncertain)',
'0E29001F': 'Recvd account2 (uncertain)',
'1000001F': 'Message body',
'1008': 'RTF sync body tag', # Where did this come from ??? It's not listed in the docs
'10090102': 'Compressed RTF body',
'1013001F': 'HTML body',
'1035001F': 'Message ID (uncertain)',
'1046001F': 'Sender email (uncertain)',
'3001001F': 'Display name',
'3002001F': 'Address type',
'3003001F': 'Email address',
'30070040': 'Creation date',
'39FE001F': '7-bit email (uncertain)',
'39FF001F': '7-bit display name',
# Attachments (37xx)
'37010102': 'Attachment data',
'37020102': '',
'3703001F': 'Attachment extension',
'3704001F': 'Attachment short filename',
'37050003': 'Attachment attach method',
'3707001F': 'Attachment long filename',
'370E001F': 'Attachment mime tag',
'3712001F': 'Attachment ID (uncertain)',
# Address book (3Axx):
'3A00001F': 'Account',
'3A02001F': 'Callback phone no',
'3A05001F': 'Generation',
'3A06001F': 'Given name',
'3A08001F': 'Business phone',
'3A09001F': 'Home phone',
'3A0A001F': 'Initials',
'3A0B001F': 'Keyword',
'3A0C001F': 'Language',
'3A0D001F': 'Location',
'3A11001F': 'Surname',
'3A15001F': 'Postal address',
'3A16001F': 'Company name',
'3A17001F': 'Title',
'3A18001F': 'Department',
'3A19001F': 'Office location',
'3A1A001F': 'Primary phone',
'3A1B101F': 'Business phone 2',
'3A1C001F': 'Mobile phone',
'3A1D001F': 'Radio phone no',
'3A1E001F': 'Car phone no',
'3A1F001F': 'Other phone',
'3A20001F': 'Transmit dispname',
'3A21001F': 'Pager',
'3A220102': 'User certificate',
'3A23001F': 'Primary Fax',
'3A24001F': 'Business Fax',
'3A25001F': 'Home Fax',
'3A26001F': 'Country',
'3A27001F': 'Locality',
'3A28001F': 'State/Province',
'3A29001F': 'Street address',
'3A2A001F': 'Postal Code',
'3A2B001F': 'Post Office Box',
'3A2C001F': 'Telex',
'3A2D001F': 'ISDN',
'3A2E001F': 'Assistant phone',
'3A2F001F': 'Home phone 2',
'3A30001F': 'Assistant',
'3A44001F': 'Middle name',
'3A45001F': 'Dispname prefix',
'3A46001F': 'Profession',
'3A47001F': '',
'3A48001F': 'Spouse name',
'3A4B001F': 'TTYTTD radio phone',
'3A4C001F': 'FTP site',
'3A4E001F': 'Manager name',
'3A4F001F': 'Nickname',
'3A51001F': 'Business homepage',
'3A57001F': 'Company main phone',
'3A58101F': 'Childrens names',
'3A59001F': 'Home City',
'3A5A001F': 'Home Country',
'3A5B001F': 'Home Postal Code',
'3A5C001F': 'Home State/Provnce',
'3A5D001F': 'Home Street',
'3A5F001F': 'Other adr City',
'3A60': 'Other adr Country',
'3A61': 'Other adr PostCode',
'3A62': 'Other adr Province',
'3A63': 'Other adr Street',
'3A64': 'Other adr PO box',
'3FF7': 'Server (uncertain)',
'3FF8': 'Creator1 (uncertain)',
'3FFA': 'Creator2 (uncertain)',
'3FFC': 'To email (uncertain)',
'403D': 'To adrtype (uncertain)',
'403E': 'To email (uncertain)',
'5FF6': 'To (uncertain)',
}
# END CONSTANTS
def int_to_data_type(integer):
"""
Returns the name of the data type constant that has the value of :param integer:
"""
return TYPE_TUPLE[integer]
def int_to_intelligence(integer):
"""
Returns the name of the intelligence level constant that has the value of :param integer:
"""
return INTELLIGENCE_TUPLE[integer]
def int_to_recipient_type(integer):
"""
Returns the name of the recipient type constant that has the value of :param integer:
"""
return RECIPIENT_TUPLE[integer]
| gpl-3.0 | -1,751,319,441,181,529,000 | 32.030973 | 121 | 0.639071 | false | 2.626979 | false | false | false |
rwstauner/dear_astrid | dear_astrid/test/test_formatter.py | 1 | 1646 | # pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring
# pylint: disable=undefined-variable,line-too-long,invalid-name
from __future__ import unicode_literals
from nose.tools import *
from dear_astrid.formatter import *
from dear_astrid.parser import *
from dear_astrid.test.helpers import *
# shortcut
def one_task(fragment):
return parse_xml(
'<astrid format="2">{0}</astrid>'.format(fragment)
)[0]
class TestFormatXML(TestCase):
# pylint: disable=too-many-public-methods,no-member
def assert_task_parses(self, xml, exp):
with timezone('UTC'):
self.assert_equal(one_task(xml), exp)
def assert_round_trip(self, task):
xml = format_task(task)
tags = ['astrid']
tags.extend(task['tags'])
task['tags'] = tags
self.assert_task_parses(xml, task)
def test_round_trip(self):
self.assert_round_trip({
'title': 'squid',
'priority': 2,
'due_date': dtu(2014, 5, 10, 19, 0, 0, 402000),
'recurrence': None,
'repeat_until': None,
'completed': None,
'deleted': None,
'estimated': 0,
'elapsed': 0,
'tags': [],
'notes': None,
})
self.assert_round_trip({
'title': 'squidly',
'priority': 3,
'due_date': dtu(2014, 5, 10, 19, 0, 0, 402000),
'recurrence': {"FREQ": "DAILY", "INTERVAL": 12},
'repeat_until': None,
'completed': dtu(2014, 6, 10, 19, 0, 0, 402000),
'deleted': None,
'estimated': 0,
'elapsed': 0,
'tags': ["taggy"],
'notes': "foo",
})
| mit | -2,871,590,752,276,053,500 | 26.433333 | 74 | 0.565006 | false | 3.141221 | false | false | false |
eschloss/FluFuture | openpds/visualization/stats.py | 1 | 6302 | from django.shortcuts import render_to_response
from django.template import RequestContext
import pdb
from openpds.visualization.internal import getInternalDataStore
from openpds.core.models import Profile, FluQuestions, ProfileStartEnd, FB_Connection, Emoji, Emoji2, emoji_choices, QuestionInstance, QuestionType, FirebaseToken, IPReferral
import facebook
import json, datetime, time, re, math, pytz
from django.http import HttpResponse, Http404, HttpResponseRedirect, HttpResponseForbidden
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from calendar import monthrange
from openpds.questions.tasks import checkForProfileReferral
from django.views.decorators.cache import cache_page
from pymongo import Connection
import random
from django.conf import settings
from django.utils import timezone
def dupEmojis(request):
for e in Emoji.objects.all():
Emoji2.objects.create(profile=e.profile, emoji=e.emoji, created=e.created, lat=e.lat, lng=e.lng)
return HttpResponse("success")
def getLength(request):
connection = Connection(
host=random.choice(getattr(settings, "MONGODB_HOST", None)),
port=getattr(settings, "MONGODB_PORT", None),
readPreference='nearest'
)
for p in Profile.objects.all():
pse = ProfileStartEnd.objects.filter(profile=p)
if len(pse) == 0:
dbName = p.getDBName().strip()
try:
db = connection[dbName]
collection = db["funf"]
try:
count = db.command("collstats", "funf")["count"]
if count == 0:
ProfileStartEnd.objects.create(profile=p)
else:
start = collection.find({"key": {"$ne": "edu.mit.media.funf.probe.builtin.WifiProbe"}}).sort("time", 1)[0]["time"]
end = collection.find({"key": {"$ne": "edu.mit.media.funf.probe.builtin.WifiProbe"}}).sort("time", -1)[0]["time"]
# TODO NExt time don't use wifiProbe timestamps (or SMS) it's unreliable - probably ActivityProbe or screenProbe
# also must check to make sure the start time is after profile.created
days = end - start
days = int(days / 60.0 / 60.0/ 24.0)
ProfileStartEnd.objects.create(profile=p, start=datetime.datetime.fromtimestamp(start), end=datetime.datetime.fromtimestamp(end), days=days)
except:
pass
except:
pass
connection.close()
return HttpResponse("success")
def removeEmptyPSE(request):
pses = ProfileStartEnd.objects.filter(start__isnull=True) | ProfileStartEnd.objects.filter(end__isnull=True)
def getEmoji(n):
for x in EMOJI_PERCENTAGE_CUMULATIVE:
if n < x[1]:
return x[0]
return 'h'
EMOJI_PERCENTAGE = {
'h': .16, # 'healthy':
's': .08, # 'sick':
'y': .13, # 'sleepy':
'c': .05, # 'cough':
'f': .05, # 'fever':
'u': .015, # 'flu':
'n': .04, # 'nauseous':
'l': .04, # 'sore throat':
'r': .08, # 'runnynose':
'b': .01, # 'body ache':
'a': .08, #'calm':
'd': .065, #'down':
'e': .1, #'energized':
'm': .03, #'motivated':
't': .07, #'trouble concentrating':
}
EMOJI_PERCENTAGE_CUMULATIVE = [
( 'h', .16, ), # 'healthy',
( 's', .24, ), # 'sick',
( 'y', .37, ), # 'sleepy',
( 'c', .42, ), # 'cough',
( 'f', .47, ), # 'fever',
( 'u', .485, ), # 'flu',
( 'n', .525, ), # 'nauseous',
( 'l', .565, ), # 'sore throat',
( 'r', .645, ), # 'runnynose',
( 'b', .655, ), # 'body ache',
( 'a', .735, ), #'calm',
( 'd', .8, ), #'down',
( 'e', .9, ), #'energized',
( 'm', .93, ), #'motivated',
( 't', 1.0, ), #'trouble concentrating',
]
def randEmojis(request):
pses = ProfileStartEnd.objects.filter(days__gt=2)
totalcreated = 0
for pse in pses:
start = pse.start
new_start = timezone.now()
new_start = new_start.replace(year=2017, month=3, day=1)
if new_start > start:
start = new_start
end = pse.end
if start > end:
continue
randint = random.randint(3,5)
count = 1
start = start + datetime.timedelta(days=randint)
while start < end:
try:
emojinum = random.random()
emoji = getEmoji(emojinum) #todo Hard - should be some correlation
Emoji2.objects.create(profile=pse.profile, created=start, emoji=emoji)
totalcreated += 1
randint = random.randint(3,5 + count)
count += 1
start = start + datetime.timedelta(days=randint)
rmin = random.randint(0, 59)
rsec = random.randint(0, 59)
msec = random.randint(0, 999999)
rhour = random.randint(9, 18)
start = start.replace(hour=rhour, minute=rmin, second=rsec, microsecond=msec)
except:
pass
return HttpResponse(str(totalcreated))
def fluQuestionSet(request):
ps = Profile.objects.all()
for p in ps:
if FluQuestions.objects.filter(profile=p).count() == 0:
r1 = random.random() < .05 # get flu this season yet
r2 = random.random() < .17 # get flu last season
r3 = random.random() < .35 # vaccine this season
if Emoji2.objects.filter(emoji='u', profile=p).count() > 0:
r3 = False
FluQuestions.objects.create(profile=p, fluThisSeason=r1, fluLastSeason=r2, vaccineThisSeason=r3)
return HttpResponse("Success")
def csv(request):
response = ""
for e in Emoji2.objects.all():
response += "%s,%s,%s,%s,%s,%s<br/>" % (str(e.pk), str(e.profile.pk), e.emoji, str(e.created), "", "")
return HttpResponse(response)
| mit | 6,003,946,626,061,600,000 | 39.146497 | 174 | 0.541098 | false | 3.453151 | false | false | false |
chiggs/theopencorps | theopencorps/travis/yml.py | 1 | 1414 | """
Helper for generating the .travis.yml file for
"""
__copyright__ = """
Copyright (C) 2016 Potential Ventures Ltd
This file is part of theopencorps
<https://github.com/theopencorps/theopencorps/>
"""
__license__ = """
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
from jinja2 import Environment, FileSystemLoader
# Annoyingly GAE is jinja 2.6 which doesn't support lstrip_blocks=True
_env = Environment(loader=FileSystemLoader(os.path.dirname(__file__)), trim_blocks=True)
class TravisYML(object):
"""
Convenience wrapper for our Travis YML generation
"""
def __init__(self, *args, **kwargs):
for name, value in kwargs.iteritems():
setattr(self, name, value)
def render(self):
_template = _env.get_template('travis.yml.tpl')
return _template.render(**self.__dict__)
| agpl-3.0 | -1,915,430,394,060,182,300 | 32.666667 | 88 | 0.727016 | false | 3.906077 | false | false | false |
arthurdarcet/aiohttp | aiohttp/http_websocket.py | 3 | 24594 | """WebSocket protocol versions 13 and 8."""
import asyncio
import collections
import json
import random
import re
import sys
import zlib
from enum import IntEnum
from struct import Struct
from typing import Any, Callable, List, Optional, Tuple, Union
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS
from .log import ws_logger
from .streams import DataQueue
__all__ = ('WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY',
'WebSocketReader', 'WebSocketWriter', 'WSMessage',
'WebSocketError', 'WSMsgType', 'WSCloseCode')
class WSCloseCode(IntEnum):
OK = 1000
GOING_AWAY = 1001
PROTOCOL_ERROR = 1002
UNSUPPORTED_DATA = 1003
INVALID_TEXT = 1007
POLICY_VIOLATION = 1008
MESSAGE_TOO_BIG = 1009
MANDATORY_EXTENSION = 1010
INTERNAL_ERROR = 1011
SERVICE_RESTART = 1012
TRY_AGAIN_LATER = 1013
ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode}
class WSMsgType(IntEnum):
# websocket spec types
CONTINUATION = 0x0
TEXT = 0x1
BINARY = 0x2
PING = 0x9
PONG = 0xa
CLOSE = 0x8
# aiohttp specific types
CLOSING = 0x100
CLOSED = 0x101
ERROR = 0x102
text = TEXT
binary = BINARY
ping = PING
pong = PONG
close = CLOSE
closing = CLOSING
closed = CLOSED
error = ERROR
WS_KEY = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
UNPACK_LEN2 = Struct('!H').unpack_from
UNPACK_LEN3 = Struct('!Q').unpack_from
UNPACK_CLOSE_CODE = Struct('!H').unpack
PACK_LEN1 = Struct('!BB').pack
PACK_LEN2 = Struct('!BBH').pack
PACK_LEN3 = Struct('!BBQ').pack
PACK_CLOSE_CODE = Struct('!H').pack
MSG_SIZE = 2 ** 14
DEFAULT_LIMIT = 2 ** 16
_WSMessageBase = collections.namedtuple('_WSMessageBase',
['type', 'data', 'extra'])
class WSMessage(_WSMessageBase):
def json(self, *, # type: ignore
loads: Callable[[Any], Any]=json.loads) -> None:
"""Return parsed JSON data.
.. versionadded:: 0.22
"""
return loads(self.data)
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
class WebSocketError(Exception):
"""WebSocket protocol parser error."""
def __init__(self, code: int, message: str) -> None:
self.code = code
super().__init__(message)
class WSHandshakeError(Exception):
"""WebSocket protocol handshake error."""
native_byteorder = sys.byteorder
# Used by _websocket_mask_python
_XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)]
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
"""Websocket masking function.
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
object of any length. The contents of `data` are masked with `mask`,
as specified in section 5.3 of RFC 6455.
Note that this function mutates the `data` argument.
This pure-python implementation may be replaced by an optimized
version when available.
"""
assert isinstance(data, bytearray), data
assert len(mask) == 4, mask
if data:
a, b, c, d = (_XOR_TABLE[n] for n in mask)
data[::4] = data[::4].translate(a)
data[1::4] = data[1::4].translate(b)
data[2::4] = data[2::4].translate(c)
data[3::4] = data[3::4].translate(d)
if NO_EXTENSIONS: # pragma: no cover
_websocket_mask = _websocket_mask_python
else:
try:
from ._websocket import _websocket_mask_cython # type: ignore
_websocket_mask = _websocket_mask_cython
except ImportError: # pragma: no cover
_websocket_mask = _websocket_mask_python
_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xff, 0xff])
_WS_EXT_RE = re.compile(r'^(?:;\s*(?:'
r'(server_no_context_takeover)|'
r'(client_no_context_takeover)|'
r'(server_max_window_bits(?:=(\d+))?)|'
r'(client_max_window_bits(?:=(\d+))?)))*$')
_WS_EXT_RE_SPLIT = re.compile(r'permessage-deflate([^,]+)?')
def ws_ext_parse(extstr: str, isserver: bool=False) -> Tuple[int, bool]:
if not extstr:
return 0, False
compress = 0
notakeover = False
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
defext = ext.group(1)
# Return compress = 15 when get `permessage-deflate`
if not defext:
compress = 15
break
match = _WS_EXT_RE.match(defext)
if match:
compress = 15
if isserver:
# Server never fail to detect compress handshake.
# Server does not need to send max wbit to client
if match.group(4):
compress = int(match.group(4))
# Group3 must match if group4 matches
# Compress wbit 8 does not support in zlib
# If compress level not support,
# CONTINUE to next extension
if compress > 15 or compress < 9:
compress = 0
continue
if match.group(1):
notakeover = True
# Ignore regex group 5 & 6 for client_max_window_bits
break
else:
if match.group(6):
compress = int(match.group(6))
# Group5 must match if group6 matches
# Compress wbit 8 does not support in zlib
# If compress level not support,
# FAIL the parse progress
if compress > 15 or compress < 9:
raise WSHandshakeError('Invalid window size')
if match.group(2):
notakeover = True
# Ignore regex group 5 & 6 for client_max_window_bits
break
# Return Fail if client side and not match
elif not isserver:
raise WSHandshakeError('Extension for deflate not supported' +
ext.group(1))
return compress, notakeover
def ws_ext_gen(compress: int=15, isserver: bool=False,
server_notakeover: bool=False) -> str:
# client_notakeover=False not used for server
# compress wbit 8 does not support in zlib
if compress < 9 or compress > 15:
raise ValueError('Compress wbits must between 9 and 15, '
'zlib does not support wbits=8')
enabledext = ['permessage-deflate']
if not isserver:
enabledext.append('client_max_window_bits')
if compress < 15:
enabledext.append('server_max_window_bits=' + str(compress))
if server_notakeover:
enabledext.append('server_no_context_takeover')
# if client_notakeover:
# enabledext.append('client_no_context_takeover')
return '; '.join(enabledext)
class WSParserState(IntEnum):
READ_HEADER = 1
READ_PAYLOAD_LENGTH = 2
READ_PAYLOAD_MASK = 3
READ_PAYLOAD = 4
class WebSocketReader:
def __init__(self, queue: DataQueue[WSMessage],
max_msg_size: int, compress: bool=True) -> None:
self.queue = queue
self._max_msg_size = max_msg_size
self._exc = None # type: Optional[BaseException]
self._partial = bytearray()
self._state = WSParserState.READ_HEADER
self._opcode = None # type: Optional[int]
self._frame_fin = False
self._frame_opcode = None # type: Optional[int]
self._frame_payload = bytearray()
self._tail = b''
self._has_mask = False
self._frame_mask = None # type: Optional[bytes]
self._payload_length = 0
self._payload_length_flag = 0
self._compressed = None # type: Optional[bool]
self._decompressobj = None # type: Any # zlib.decompressobj actually
self._compress = compress
def feed_eof(self) -> None:
self.queue.feed_eof()
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
if self._exc:
return True, data
try:
return self._feed_data(data)
except Exception as exc:
self._exc = exc
self.queue.set_exception(exc)
return True, b''
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
for fin, opcode, payload, compressed in self.parse_frame(data):
if compressed and not self._decompressobj:
self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
if opcode == WSMsgType.CLOSE:
if len(payload) >= 2:
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
if (close_code < 3000 and
close_code not in ALLOWED_CLOSE_CODES):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Invalid close code: {}'.format(close_code))
try:
close_message = payload[2:].decode('utf-8')
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT,
'Invalid UTF-8 text message') from exc
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
elif payload:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Invalid close frame: {} {} {!r}'.format(
fin, opcode, payload))
else:
msg = WSMessage(WSMsgType.CLOSE, 0, '')
self.queue.feed_data(msg, 0)
elif opcode == WSMsgType.PING:
self.queue.feed_data(
WSMessage(WSMsgType.PING, payload, ''), len(payload))
elif opcode == WSMsgType.PONG:
self.queue.feed_data(
WSMessage(WSMsgType.PONG, payload, ''), len(payload))
elif opcode not in (
WSMsgType.TEXT, WSMsgType.BINARY) and self._opcode is None:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
"Unexpected opcode={!r}".format(opcode))
else:
# load text/binary
if not fin:
# got partial frame payload
if opcode != WSMsgType.CONTINUATION:
self._opcode = opcode
self._partial.extend(payload)
if (self._max_msg_size and
len(self._partial) >= self._max_msg_size):
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size))
else:
# previous frame was non finished
# we should get continuation opcode
if self._partial:
if opcode != WSMsgType.CONTINUATION:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'The opcode in non-fin frame is expected '
'to be zero, got {!r}'.format(opcode))
if opcode == WSMsgType.CONTINUATION:
assert self._opcode is not None
opcode = self._opcode
self._opcode = None
self._partial.extend(payload)
if (self._max_msg_size and
len(self._partial) >= self._max_msg_size):
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size))
# Decompress process must to be done after all packets
# received.
if compressed:
self._partial.extend(_WS_DEFLATE_TRAILING)
payload_merged = self._decompressobj.decompress(
self._partial, self._max_msg_size)
if self._decompressobj.unconsumed_tail:
left = len(self._decompressobj.unconsumed_tail)
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Decompressed message size exceeds limit {}".
format(self._max_msg_size + left,
self._max_msg_size))
else:
payload_merged = bytes(self._partial)
self._partial.clear()
if opcode == WSMsgType.TEXT:
try:
text = payload_merged.decode('utf-8')
self.queue.feed_data(
WSMessage(WSMsgType.TEXT, text, ''), len(text))
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT,
'Invalid UTF-8 text message') from exc
else:
self.queue.feed_data(
WSMessage(WSMsgType.BINARY, payload_merged, ''),
len(payload_merged))
return False, b''
def parse_frame(self, buf: bytes) -> List[Tuple[bool, Optional[int],
bytearray,
Optional[bool]]]:
"""Return the next frame from the socket."""
frames = []
if self._tail:
buf, self._tail = self._tail + buf, b''
start_pos = 0
buf_length = len(buf)
while True:
# read header
if self._state == WSParserState.READ_HEADER:
if buf_length - start_pos >= 2:
data = buf[start_pos:start_pos+2]
start_pos += 2
first_byte, second_byte = data
fin = (first_byte >> 7) & 1
rsv1 = (first_byte >> 6) & 1
rsv2 = (first_byte >> 5) & 1
rsv3 = (first_byte >> 4) & 1
opcode = first_byte & 0xf
# frame-fin = %x0 ; more frames of this message follow
# / %x1 ; final frame of this message
# frame-rsv1 = %x0 ;
# 1 bit, MUST be 0 unless negotiated otherwise
# frame-rsv2 = %x0 ;
# 1 bit, MUST be 0 unless negotiated otherwise
# frame-rsv3 = %x0 ;
# 1 bit, MUST be 0 unless negotiated otherwise
#
# Remove rsv1 from this test for deflate development
if rsv2 or rsv3 or (rsv1 and not self._compress):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received frame with non-zero reserved bits')
if opcode > 0x7 and fin == 0:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received fragmented control frame')
has_mask = (second_byte >> 7) & 1
length = second_byte & 0x7f
# Control frames MUST have a payload
# length of 125 bytes or less
if opcode > 0x7 and length > 125:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Control frame payload cannot be '
'larger than 125 bytes')
# Set compress status if last package is FIN
# OR set compress status if this is first fragment
# Raise error if not first fragment with rsv1 = 0x1
if self._frame_fin or self._compressed is None:
self._compressed = True if rsv1 else False
elif rsv1:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received frame with non-zero reserved bits')
self._frame_fin = bool(fin)
self._frame_opcode = opcode
self._has_mask = bool(has_mask)
self._payload_length_flag = length
self._state = WSParserState.READ_PAYLOAD_LENGTH
else:
break
# read payload length
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
length = self._payload_length_flag
if length == 126:
if buf_length - start_pos >= 2:
data = buf[start_pos:start_pos+2]
start_pos += 2
length = UNPACK_LEN2(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else:
break
elif length > 126:
if buf_length - start_pos >= 8:
data = buf[start_pos:start_pos+8]
start_pos += 8
length = UNPACK_LEN3(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else:
break
else:
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
# read payload mask
if self._state == WSParserState.READ_PAYLOAD_MASK:
if buf_length - start_pos >= 4:
self._frame_mask = buf[start_pos:start_pos+4]
start_pos += 4
self._state = WSParserState.READ_PAYLOAD
else:
break
if self._state == WSParserState.READ_PAYLOAD:
length = self._payload_length
payload = self._frame_payload
chunk_len = buf_length - start_pos
if length >= chunk_len:
self._payload_length = length - chunk_len
payload.extend(buf[start_pos:])
start_pos = buf_length
else:
self._payload_length = 0
payload.extend(buf[start_pos:start_pos+length])
start_pos = start_pos + length
if self._payload_length == 0:
if self._has_mask:
assert self._frame_mask is not None
_websocket_mask(self._frame_mask, payload)
frames.append((
self._frame_fin,
self._frame_opcode,
payload,
self._compressed))
self._frame_payload = bytearray()
self._state = WSParserState.READ_HEADER
else:
break
self._tail = buf[start_pos:]
return frames
class WebSocketWriter:
def __init__(self, protocol: BaseProtocol, transport: asyncio.Transport, *,
use_mask: bool=False, limit: int=DEFAULT_LIMIT,
random: Any=random.Random(),
compress: int=0, notakeover: bool=False) -> None:
self.protocol = protocol
self.transport = transport
self.use_mask = use_mask
self.randrange = random.randrange
self.compress = compress
self.notakeover = notakeover
self._closing = False
self._limit = limit
self._output_size = 0
self._compressobj = None # type: Any # actually compressobj
async def _send_frame(self, message: bytes, opcode: int,
compress: Optional[int]=None) -> None:
"""Send a frame over the websocket with message as its payload."""
if self._closing:
ws_logger.warning('websocket connection is closing.')
rsv = 0
# Only compress larger packets (disabled)
# Does small packet needs to be compressed?
# if self.compress and opcode < 8 and len(message) > 124:
if (compress or self.compress) and opcode < 8:
if compress:
# Do not set self._compress if compressing is for this frame
compressobj = zlib.compressobj(wbits=-compress)
else: # self.compress
if not self._compressobj:
self._compressobj = zlib.compressobj(wbits=-self.compress)
compressobj = self._compressobj
message = compressobj.compress(message)
message = message + compressobj.flush(
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH)
if message.endswith(_WS_DEFLATE_TRAILING):
message = message[:-4]
rsv = rsv | 0x40
msg_length = len(message)
use_mask = self.use_mask
if use_mask:
mask_bit = 0x80
else:
mask_bit = 0
if msg_length < 126:
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
elif msg_length < (1 << 16):
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
else:
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
if use_mask:
mask = self.randrange(0, 0xffffffff)
mask = mask.to_bytes(4, 'big')
message = bytearray(message)
_websocket_mask(mask, message)
self.transport.write(header + mask + message)
self._output_size += len(header) + len(mask) + len(message)
else:
if len(message) > MSG_SIZE:
self.transport.write(header)
self.transport.write(message)
else:
self.transport.write(header + message)
self._output_size += len(header) + len(message)
if self._output_size > self._limit:
self._output_size = 0
await self.protocol._drain_helper()
async def pong(self, message: bytes=b'') -> None:
"""Send pong message."""
if isinstance(message, str):
message = message.encode('utf-8')
await self._send_frame(message, WSMsgType.PONG)
async def ping(self, message: bytes=b'') -> None:
"""Send ping message."""
if isinstance(message, str):
message = message.encode('utf-8')
await self._send_frame(message, WSMsgType.PING)
async def send(self, message: Union[str, bytes],
binary: bool=False,
compress: Optional[int]=None) -> None:
"""Send a frame over the websocket with message as its payload."""
if isinstance(message, str):
message = message.encode('utf-8')
if binary:
await self._send_frame(message, WSMsgType.BINARY, compress)
else:
await self._send_frame(message, WSMsgType.TEXT, compress)
async def close(self, code: int=1000, message: bytes=b'') -> None:
"""Close the websocket, sending the specified code and message."""
if isinstance(message, str):
message = message.encode('utf-8')
try:
await self._send_frame(
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE)
finally:
self._closing = True
| apache-2.0 | -3,456,276,932,102,304,300 | 36.663093 | 79 | 0.505083 | false | 4.442558 | false | false | false |
iogf/vy | vyapp/plugins/range_sel.py | 1 | 2369 | """
Overview
========
This plugin implements range selection.
Key-Commands
============
Namespace: range-sel
Mode: NORMAL
Event: <Control-k>
Description: Add/remove selection one line up from the initial selection mark.
Mode: NORMAL
Event: <Control-j>
Description: Add/remove selection one line down from the initial selection mark.
Mode: NORMAL
Event: <Control-l>
Description: Add/remove selection one character right from the initial selection mark.
Mode: NORMAL
Event: <Control-h>
Description: Add/remove selection one character left from the initial selection mark.
Mode: NORMAL
Event: <Control-v>
Description: Drop a selection mark.
"""
from vyapp.app import root
class RangeSel:
def __init__(self, area):
area.install('range-sel',
('NORMAL', '<Control-k>', self.sel_up),
('NORMAL', '<Control-j>', self.sel_down),
('NORMAL', '<Control-h>', self.sel_left),
('NORMAL', '<Control-l>', self.sel_right),
('NORMAL', '<Control-v>', self.start_selection))
area.mark_set('(RANGE_SEL_MARK)', '1.0')
self.area = area
def start_selection(self, event):
"""
Start range selection.
"""
self.area.mark_set('(RANGE_SEL_MARK)', 'insert')
root.status.set_msg('Dropped selection mark.')
def sel_up(self, event):
"""
It adds 'sel' one line up the 'insert' position
and sets the cursor one line up.
"""
self.area.rmsel('(RANGE_SEL_MARK)', 'insert')
self.area.up()
self.area.addsel('(RANGE_SEL_MARK)', 'insert')
def sel_down(self, event):
"""
It adds or removes selection one line down.
"""
self.area.rmsel('(RANGE_SEL_MARK)', 'insert')
self.area.down()
self.area.addsel('(RANGE_SEL_MARK)', 'insert')
def sel_right(self, event):
"""
It adds or removes selection one character right.
"""
self.area.rmsel('(RANGE_SEL_MARK)', 'insert')
self.area.right()
self.area.addsel('(RANGE_SEL_MARK)', 'insert')
def sel_left(self, event):
"""
It adds or removes selection one character left.
"""
self.area.rmsel('(RANGE_SEL_MARK)', 'insert')
self.area.left()
self.area.addsel('(RANGE_SEL_MARK)', 'insert')
install = RangeSel
| mit | 2,314,433,789,206,561,000 | 23.42268 | 86 | 0.593921 | false | 3.644615 | false | false | false |
vanatteveldt/luctor | recipes/migrations/0008_auto_20160405_0044.py | 1 | 1062 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-05 00:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('recipes', '0007_auto_20160330_1637'),
]
operations = [
migrations.AlterModelOptions(
name='lesson',
options={'ordering': ['date']},
),
migrations.AddField(
model_name='lesson',
name='parsed',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='lesson',
name='date',
field=models.DateField(blank=True, null=True),
),
migrations.AlterField(
model_name='lesson',
name='problems',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='lesson',
name='title',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| mit | 2,756,338,900,395,628,000 | 26.230769 | 74 | 0.541431 | false | 4.26506 | false | false | false |
jkokorian/ODMAnalysis | odmanalysis/scripts/TabulateODMAnalysisData.py | 1 | 1770 | import pandas as pd
import odmanalysis as odm
from odmanalysis import gui
import os
import argparse
def main():
parser = argparse.ArgumentParser(description='Pivots the odmanalysis output file to produce and excel file with all cycles in different columns')
parser.add_argument('filename',
nargs='?',
default="",
help="The odmanalysis.csv file to tabulate",
type=str)
parser.add_argument('--split-direction','-d',
type=bool,
help='split the actuation directions into different columns',
metavar='')
args = parser.parse_args()
if not os.path.isfile(args.filename):
args.filename = gui.get_path("*.csv",defaultFile="odmanalysis.csv")
commonPath = os.path.abspath(os.path.split(args.filename)[0])
df = odm.readAnalysisData(args.filename)
cycleFrames = []
keys = ['cycleNumber']
if args.split_direction == True:
keys.append('direction')
grouped = df.groupby(keys)
for keys, group in grouped:
if not hasattr(keys, '__iter__'):
keys = tuple([keys])
dfTemp = group[['actuatorVoltage','displacement']]
dfTemp = dfTemp.reset_index().drop('timestamp',axis=1)
name = 'cycle_%i' % keys[0]
for k in keys[1:]:
name += "_%s" % k
cycleFrames.append(pd.concat({name: dfTemp}, axis=1))
dfCombined = pd.concat(cycleFrames,axis=1)
dfCombined.to_excel(os.path.join(commonPath,'odmanalysis_tabulated.xlsx'),index=False)
print os.path.join(commonPath,'odmanalysis_tabulated.xlsx')
if __name__=="__main__":
main()
| gpl-3.0 | 2,002,328,115,349,419,500 | 32.415094 | 149 | 0.580791 | false | 3.986486 | false | false | false |
ASMlover/study | python/coroutines/trampoline.py | 1 | 1796 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2020 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from typing import Generator, Union
Numeric = Union[int, float]
def add(x: Numeric, y: Numeric) -> Generator[Numeric, None, None]:
yield x + y
def main() -> Generator[Numeric, None, None]:
r = yield add(2, 2)
print(f"result is: {r}")
yield
def run() -> None:
m = main()
sub = m.send(None)
result = sub.send(None)
m.send(result)
if __name__ == '__main__':
run()
| bsd-2-clause | 4,227,577,050,536,624,000 | 34.92 | 70 | 0.727728 | false | 4.054176 | false | false | false |
solvire/ordo_electro | ordo_electro/config/common.py | 1 | 11527 | # -*- coding: utf-8 -*-
"""
Django settings for ordo_electro project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from os.path import join, dirname, expanduser
from configurations import Configuration, values
from ConfigParser import RawConfigParser
BASE_DIR = dirname(dirname(__file__))
## for connecting to mongo
from mongoengine import connect
class Common(Configuration):
# APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'avatar', # for user avatars
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
'twython', #twython twitter API app
'rest_framework', # for the django rest framework
'mongoengine',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'users', # custom users app
# Your stuff: custom apps go here
'social', #tools for managing social content
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# END APP CONFIGURATION
# MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'djangosecure.middleware.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# END MIDDLEWARE CONFIGURATION
# MIGRATIONS CONFIGURATION
MIGRATION_MODULES = {
'sites': 'contrib.sites.migrations'
}
# END MIGRATIONS CONFIGURATION
# DEBUG
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = values.BooleanValue(True)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
# END DEBUG
# SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
# In production, this is changed to a values.SecretValue() setting
SECRET_KEY = 'CHANGEME!!!'
# END SECRET CONFIGURATION
# FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
join(BASE_DIR, 'fixtures'),
)
# END FIXTURE CONFIGURATION
# EMAIL CONFIGURATION
#EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend')
EMAIL_BACKEND = values.Value('django.core.mail.backends.console.EmailBackend')
# END EMAIL CONFIGURATION
# MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""Ordo""", '[email protected]'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# END MANAGER CONFIGURATION
# DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = values.DatabaseURLValue('mysql://root:@localhost/ordo_electro')
# END DATABASE CONFIGURATION
# CACHING
# Do this here because thanks to django-pylibmc-sasl and pylibmc
# memcacheify (used on heroku) is painful to install on windows.
CACHES = {
'default': {
# 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
# 'LOCATION': ''
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/0",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
}
}
# END CACHING
# GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'UTC'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# END GENERAL CONFIGURATION
# TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'allauth.account.context_processors.account',
'allauth.socialaccount.context_processors.socialaccount',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
# Your stuff: custom template context processers go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
join(BASE_DIR, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# END TEMPLATE CONFIGURATION
# STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'static')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
join(BASE_DIR, 'static'),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# END STATIC FILE CONFIGURATION
# MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = join(BASE_DIR, 'media')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# END MEDIA CONFIGURATION
# URL Configuration
ROOT_URLCONF = 'urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'wsgi.application'
# End URL Configuration
# AUTHENTICATION CONFIGURATION
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = 'username'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
# END AUTHENTICATION CONFIGURATION
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'users:redirect'
LOGIN_URL = 'account_login'
# END Custom user app defaults
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# END SLUGLIFIER
# LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'DEBUG',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': '/tmp/python.log',
'formatter': 'verbose',
},
},
'formatters': {
'verbose': {
'format' : "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
'datefmt' : "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'loggers': {
'django': {
'handlers': ['mail_admins','file'],
'level': 'DEBUG',
'propagate': True,
},
}
}
# LOGGING = {
# 'version': 1,
# 'disable_existing_loggers': False,
# 'filters': {
# 'require_debug_false': {
# '()': 'django.utils.log.RequireDebugFalse'
# }
# },
# 'handlers': {
# 'file': {
# 'level': 'DEBUG',
# 'class': 'logging.FileHandler',
# 'filename': '/tmp/python.log',
# },
# },
# 'loggers': {
# 'django.request': {
# 'handlers': ['file'],
# 'level': 'DEBUG',
# 'propagate': True,
# },
# },
# }
# END LOGGING CONFIGURATION
@classmethod
def post_setup(cls):
cls.DATABASES['default']['ATOMIC_REQUESTS'] = True
# Your common stuff: Below this line define 3rd party library settings
#### get the users personal settings
config = RawConfigParser()
config.read(expanduser('~') + '/.ordo_electro/settings.ini')
TWITTER_KEY = config.get('secrets', 'TWITTER_KEY')
TWITTER_SECRET = config.get('secrets', 'TWITTER_SECRET')
OAUTH_TOKEN = config.get('secrets', 'OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = config.get('secrets', 'OAUTH_TOKEN_SECRET')
LOGOUT_URL='account_logout'
LOGOUT_REDIRECT_URL='/'
# DRF
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
'PAGINATE_BY': 10
}
connect('ordo_electro') | bsd-3-clause | 962,781,622,429,351,200 | 32.221902 | 102 | 0.620543 | false | 3.847463 | true | false | false |
starcraftman/python-client | neovim/api/tabpage.py | 5 | 1031 | """API for working with Nvim tabpages."""
from .common import Remote, RemoteMap, RemoteSequence
__all__ = ('Tabpage')
class Tabpage(Remote):
"""A remote Nvim tabpage."""
def __init__(self, session, code_data):
"""Initialize from session and code_data immutable object.
The `code_data` contains serialization information required for
msgpack-rpc calls. It must be immutable for Tabpage equality to work.
"""
self._session = session
self.code_data = code_data
self.windows = RemoteSequence(session, 'tabpage_get_windows', self)
self.vars = RemoteMap(session, 'tabpage_get_var', 'tabpage_set_var',
self)
@property
def window(self):
"""Get the `Window` currently focused on the tabpage."""
return self._session.request('tabpage_get_window', self)
@property
def valid(self):
"""Return True if the tabpage still exists."""
return self._session.request('tabpage_is_valid', self)
| apache-2.0 | 6,166,943,724,153,261,000 | 31.21875 | 77 | 0.628516 | false | 4.124 | false | false | false |
trailofbits/manticore | docs/conf.py | 1 | 5587 | # -*- coding: utf-8 -*-
#
# Manticore documentation build configuration file, created by
# sphinx-quickstart on Fri Mar 10 18:04:51 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
needs_sphinx = "1.0"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ["sphinx.ext.autodoc"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "Manticore"
copyright = "2019, Trail of Bits"
author = "Trail of Bits"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "0.3.6"
# The full version, including alpha/beta/rc tags.
release = "0.3.6"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "Manticoredoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "Manticore.tex", "Manticore Documentation", "Trail of Bits", "manual")
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "manticore", "Manticore Documentation", [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"Manticore",
"Manticore Documentation",
author,
"Manticore",
"One line description of project.",
"Miscellaneous",
)
]
# -- Custom
# our setup.py does not install z3-solver when on rtd because
# for some reason z3 is built during the dep install process,
# and rtd ooms (something related to `python setup.py --force`)
# so because z3-solver is not installed as a dep, but
# rtd still does `import manticore`, we need to mock the environment
# enough to make Manticore importable. specifically, we need to mock
# things so a Z3Solver can be constructed.
import subprocess
class MockZ3Fd:
def readline(self, *args, **kwargs):
return '(:version "4.5.1")\n'
def flush(self, *args, **kwargs):
return
def write(self, *args, **kwargs):
return
class MockPopen:
def __init__(self, *args, **kwargs):
self.stdout = MockZ3Fd()
self.stdin = MockZ3Fd()
subprocess.Popen = MockPopen
| agpl-3.0 | 7,054,512,338,096,493,000 | 28.877005 | 87 | 0.669948 | false | 3.855763 | true | false | false |
galad-loth/LearnDescriptor | symbols/metric_net.py | 1 | 1866 | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 14 09:35:33 2017
@author: galad-loth
"""
import mxnet as mx
import feat_net
def metric_net_2ch():
data = mx.sym.Variable("data")
conv_weight = []
conv_bias = []
for i in range(3):
conv_weight.append(mx.sym.Variable('conv' + str(i) + '_weight'))
conv_bias.append(mx.sym.Variable('conv' + str(i) + '_bias'))
conv_res= feat_net.featnet1(data,conv_weight, conv_bias,"")
conv_res = mx.sym.Flatten(data=conv_res)
net = mx.sym.FullyConnected(data=conv_res,num_hidden=256, name="fc1")
net = mx.sym.Activation(data=net, act_type="relu", name="relu3")
net = mx.sym.FullyConnected(data=net,num_hidden=1, name="fc2")
return net
def metric_net_2ch_cs():
datas= mx.sym.Variable("datas")
datac= mx.sym.Variable("datac")
conv_weight_s = []
conv_bias_s = []
conv_weight_c = []
conv_bias_c = []
for i in range(4):
conv_weight_s.append(mx.sym.Variable('conv' + str(i) + '_weight_s'))
conv_bias_s.append(mx.sym.Variable('conv' + str(i) + '_bias_s'))
conv_weight_c.append(mx.sym.Variable('conv' + str(i) + '_weight_c'))
conv_bias_c.append(mx.sym.Variable('conv' + str(i) + '_bias_c'))
conv_res_s=feat_net.featnet2(datas,conv_weight_s, conv_bias_s,"bs_")
conv_res_c=feat_net.featnet2(datac,conv_weight_c, conv_bias_c,"bc_")
conv_res=mx.sym.Concat(conv_res_s,conv_res_c,dim=1, name='conv_res')
net = mx.sym.FullyConnected(data=conv_res,num_hidden=768, name="fc1")
net = mx.sym.Activation(data=net, act_type="relu", name="relu1")
net = mx.sym.FullyConnected(data=net,num_hidden=1, name="fc2")
return net
if __name__=="__main__":
net=metric_net_2ch_cs()
ex=net.simple_bind(ctx=mx.cpu(), datas=(50,2,64,64),datac=(50,2,64,64))
| apache-2.0 | -1,251,829,865,832,214,800 | 36.916667 | 79 | 0.598071 | false | 2.708273 | false | false | false |
anyonedev/anyonedev-monitor-agent | monitor/metrics/linux/dir_stat.py | 1 | 3290 | '''
监控linux目录变化(可观察的监控源)
Create on : 2014-12-23
Author:liangzonghua
'''
from core.MonitorSource import ObserableMonitorSource
from core.MetricValue import MultiMetricValue
from core import regist_monitor_source
from utils.Logger import info
import os
import pyinotify
class DiectoryChangleHanlder(pyinotify.ProcessEvent):
_monitor_source = None
def __init__(self,ms):
self._monitor_source = ms
def process_default(self,event):
f = event.pathname
info(event.maskname+":"+f)
metricValue = MultiMetricValue(self._monitor_source.getMonitorSourceName())
metricValue.addMetricValue("type", event.maskname)
metricValue.addMetricValue("monitor_path",event.path)
metricValue.addMetricValue("monitor_file",event.file)
metricValue.addMetricValue("is_directory",event.dir)
if hasattr(event, "src_pathname"):
metricValue.addMetricValue("src_pathname",event.src_pathname)
if os.path.exists(f):
metricValue.addMetricValue("atime", os.path.getatime(f))
metricValue.addMetricValue("ctime", os.path.getctime(f))
metricValue.addMetricValue("mtime", os.path.getmtime(f))
metricValue.addMetricValue("size", os.path.getsize(f))
self._monitor_source.notify(metricValue)
DEFAULT_MONITOR_MASK = pyinotify.IN_DELETE | pyinotify.IN_CREATE | pyinotify.IN_MODIFY | pyinotify.IN_MOVED_TO | pyinotify.IN_MOVED_FROM
class LiuxDirectoryMonitorSource(ObserableMonitorSource):
_monitor_dir = "."
_monitor_mask= DEFAULT_MONITOR_MASK
_is_rec = True
def __init__(self,monitorSourceName, directory=None, mask=DEFAULT_MONITOR_MASK, rec = True):
self.monitorSourceName(monitorSourceName)
if dir != None:
self._monitor_dir = directory
if mask != None:
self._monitor_mask = mask
self._is_rec = rec
def start(self):
ObserableMonitorSource.start(self)
if not os.path.exists(self._monitor_dir):
os.makedirs(self._monitor_dir,exist_ok=True)
info("create monitor dir:%s"%(self._monitor_dir))
wm= pyinotify.WatchManager()
eventHandler= DiectoryChangleHanlder(self)
self._notifier= pyinotify.Notifier(wm, eventHandler)
wm.add_watch(self._monitor_dir, self._monitor_mask,self._is_rec)
info('now starting monitor: %s'%(self._monitor_dir))
while True:
try:
if self._notifier.is_alive():
self._notifier.process_events()
if self._notifier.check_events():
self._notifier.read_events()
else:
break
except KeyboardInterrupt:
self.stop()
break
def stop(self):
if hasattr(self, "_notifier") and self._notifier != None:
self._notifier.stop()
ObserableMonitorSource.stop(self)
def linux_dir_stat(name="linux_dir_stat",monitor_dir=".",rec=True,mask=DEFAULT_MONITOR_MASK):
monitorSource = LiuxDirectoryMonitorSource(monitorSourceName=name,directory=monitor_dir,mask=mask,rec=rec)
regist_monitor_source(monitorSource)
return monitorSource | gpl-2.0 | -28,914,935,067,256,016 | 40.303797 | 140 | 0.64531 | false | 3.745121 | false | false | false |
kpanic/lymph | iris/core/events.py | 1 | 1907 | import functools
import re
class Event(object):
def __init__(self, evt_type, body, source=None):
self.evt_type = evt_type
self.body = body
self.source = source
def __getitem__(self, key):
return self.body[key]
def __iter__(self):
return iter(self.body)
@classmethod
def deserialize(cls, data):
return cls(data.get('type'), data.get('body', {}), source=data.get('source'))
def serialize(self):
return {
'type': self.evt_type,
'body': self.body,
'source': self.source,
}
class EventDispatcher(object):
wildcards = {
'#': r'[\w.]*(?=\.|$)',
'*': r'\w+',
}
def __init__(self, patterns=()):
self.patterns = list(patterns)
def compile(self, key):
words = (self.wildcards.get(word, re.escape(word)) for word in key.split('.'))
return re.compile('^%s$' % r'\.'.join(words))
def register(self, pattern, handler):
self.patterns.append((
self.compile(pattern),
pattern,
handler,
))
def __iter__(self):
for regex, pattern, handler in self.patterns:
yield pattern, handler
def update(self, other):
for pattern, handler in other:
self.register(pattern, handler)
def dispatch(self, evt_type):
for regex, pattern, handler in self.patterns:
if regex.match(evt_type):
yield pattern, handler
def __call__(self, obj, event):
handlers = set()
for pattern, handler in self.dispatch(event.evt_type):
if handler not in handlers:
handlers.add(handler)
handler(obj, event)
return bool(handlers)
def __get__(self, obj, cls):
if obj is None:
return self
return functools.partial(self, obj)
| apache-2.0 | -3,250,280,866,492,313,000 | 24.77027 | 86 | 0.538542 | false | 4.109914 | false | false | false |
szatanszmatan/myrdp | app/database/schema.py | 1 | 1109 | # -*- coding: utf-8 -*-
from sqlalchemy import Column, Integer, String
from sqlalchemy.schema import ForeignKey
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class HostTable(Base):
__tablename__ = 'hosts'
id = Column(Integer, primary_key=True)
name = Column(String, unique=True, nullable=False)
address = Column(String, nullable=False)
user = Column(String)
password = Column(String)
group = Column(Integer, ForeignKey('groups.id'), nullable=True)
def __repr__(self):
return "<name='%s', address='%s', user='%s', password='%s'>" \
% (self.name, self.address, self.user, self.password)
class GroupsTable(Base):
__tablename__ = 'groups'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False, unique=True)
default_user_name = Column(String)
default_password = Column(String)
def __repr__(self):
return "<id='%s', name='%s', default_user_name='%s', default_password='%s'>" \
% (self.id, self.name, self.default_user_name, self.default_password) | gpl-2.0 | 6,143,549,514,950,739,000 | 31.647059 | 86 | 0.64743 | false | 3.696667 | false | false | false |
lukauskas/mMass-fork | mspy/mod_signal.py | 2 | 20335 | # -------------------------------------------------------------------------
# Copyright (C) 2005-2013 Martin Strohalm <www.mmass.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# Complete text of GNU GPL can be found in the file LICENSE.TXT in the
# main directory of the program.
# -------------------------------------------------------------------------
# load libs
import numpy
# load stopper
from mod_stopper import CHECK_FORCE_QUIT
# load modules
import calculations
# SIGNAL PROCESSING FUNCTIONS
# ---------------------------
def locate(signal, x):
"""Find nearest higher index of searched x-value.
signal (numpy array) - signal data points
x (float) - x value
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
return 0
# locate x
return calculations.signal_locate_x(signal, float(x))
# ----
def basepeak(signal):
"""Locate highest y-value in signal. Point index is returned.
signal (numpy array) - signal data points
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
raise ValueError, "Signal contains no data!"
# locate x
return calculations.signal_locate_max_y(signal)
# ----
def interpolate(p1, p2, x=None, y=None):
"""Calculates inner point between two points by linear interpolation.
p1 (tuple of floats) - point 1
p2 (tuple of floats) - point 2
x (float) - x value (to interpolate y)
y (float) - y value (to interpolate x)
"""
# interpolate y point
if x != None:
return calculations.signal_interpolate_y(float(p1[0]), float(p1[1]), float(p2[0]), float(p2[1]), float(x))
# interpolate x point
elif y != None:
return calculations.signal_interpolate_x(float(p1[0]), float(p1[1]), float(p2[0]), float(p2[1]), float(y))
# no value
else:
raise ValueError, "No x/y value provided for interpolation!"
# ----
def boundaries(signal):
"""Calculates signal minima and maxima as (minX, minY, maxX, maxY).
signal (numpy array) - signal data points
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
raise ValueError, "Signal contains no data!"
# calculate boundaries
return calculations.signal_box(signal)
# ----
def maxima(signal):
"""Find local maxima in signal.
signal (numpy array) - signal data points
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
return numpy.array([])
# determine intensity
return calculations.signal_local_maxima(signal)
# ----
def intensity(signal, x):
"""Find corresponding y-value for searched x-value.
signal (numpy array) - signal data points
x (float) - x-value
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
raise ValueError, "Signal contains no data!"
# determine intensity
return calculations.signal_intensity(signal, float(x))
# ----
def centroid(signal, x, height):
"""Find peak centroid for searched x-value measured at y-value.
signal (numpy array) - signal data points
x (float) - x-value
height (float) - y-value for width determination
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
raise ValueError, "Signal contains no data!"
# determine centroid
return calculations.signal_centroid(signal, float(x), float(height))
# ----
def width(signal, x, height):
"""Find peak width for searched x-value measured at y-value.
signal (numpy array) - signal data points
x (float) - x-value
height (float) - y-value for width determination
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
raise ValueError, "Signal contains no data!"
# determine width
return calculations.signal_width(signal, float(x), float(height))
# ----
def area(signal, minX=None, maxX=None, baseline=None):
"""Return area under signal curve.
signal (numpy array) - signal data points
minX (float) - starting m/z value
maxX (float) - ending m/z value
baseline (numpy array) - signal baseline
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check baseline type
if baseline != None:
if not isinstance(baseline, numpy.ndarray):
raise TypeError, "Baseline must be NumPy array!"
if baseline.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
return 0.0
# check range
if minX != None and maxX != None and minX == maxX:
return 0.0
# crop data
if minX != None and maxX != None:
signal = crop(signal, minX, maxX)
# subtract baseline
if baseline != None:
signal = subbase(signal, baseline)
# calculate area
return calculations.signal_area(signal)
# ----
def noise(signal, minX=None, maxX=None, x=None, window=0.1):
"""Calculates signal noise level and width.
signal (numpy array) - signal data points
minX, maxX (float) - x-axis range to use for calculation
x (float) - x-value for which to calculate the noise +- window
window (float) - x-axis range used for calculation, relative to given x (in %/100)
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
return (0.0, 0.0)
# use specified signal range
if minX != None and maxX != None:
i1 = locate(signal, minX)
i2 = locate(signal, maxX)
# use specified x +- window
elif x != None and window != None:
window = x*window
i1 = locate(signal, x-window)
i2 = locate(signal, x+window)
# use whole signal range
else:
i1 = 0
i2 = len(signal)
# get data from signal
signal = signal[i1:i2]
# check signal data
if len(signal) == 0:
return (0.0, 0.0)
# calculate noise
return calculations.signal_noise(signal)
# ----
def baseline(signal, window=0.1, offset=0.):
"""Return baseline data.
signal (numpy array) - signal data points
window (float or None) - noise calculation window (%/100)
offset (float) - baseline offset, relative to noise width (in %/100)
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
raise ValueError, "Signal contains no data!"
# single segment baseline
if window == None:
noiseLevel, noiseWidth = noise(signal)
noiseLevel -= noiseWidth*offset
return numpy.array([ [signal[0][0], noiseLevel, noiseWidth], [signal[-1][0], noiseLevel, noiseWidth] ])
# make raster
raster = []
minimum = max(0, signal[0][0])
x = signal[-1][0]
while x > minimum:
raster.append(x)
x -= max(50, x*window)
raster.append(minimum)
raster.sort()
# calc baseline data
levels = []
widths = []
for i, x in enumerate(raster):
i1 = locate(signal, x-x*window)
i2 = locate(signal, x+x*window)
if i1 == i2:
noiseLevel = signal[i1][1]
noiseWidth = 0.0
else:
noiseLevel, noiseWidth = noise(signal[i1:i2])
levels.append([x, noiseLevel])
widths.append([x, noiseWidth])
# smooth baseline data
swindow = 5 * window * (signal[-1][0] - signal[0][0])
levels = smooth(numpy.array(levels), 'GA', swindow, 2)
widths = smooth(numpy.array(widths), 'GA', swindow, 2)
# make baseline and apply offset
buff = []
for i, x in enumerate(raster):
width = abs(widths[i][1])
level = max(0, levels[i][1] - width*offset)
buff.append([x, level, width])
return numpy.array(buff)
# ----
def crop(signal, minX, maxX):
"""Crop signal to given x-range. New array is returned.
signal (numpy array) - signal data points
minX (float) - minimum x-value
maxX (float) - maximum x-value
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
# check limits
if minX > maxX:
minX, maxX = maxX, minX
# check signal data
if len(signal) == 0 or signal[-1][0] < minX or signal[0][0] > maxX:
return numpy.array([])
# crop data
return calculations.signal_crop(signal, float(minX), float(maxX))
# ----
def offset(signal, x=0.0, y=0.0):
"""Shift signal by offset. New array is returned.
signal (numpy array) - signal data points
x (float) - x-axis offset
y (float) - y-axis offset
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
return numpy.array([])
# offset signal
return calculations.signal_offset(signal, float(x), float(y))
# ----
def multiply(signal, x=1.0, y=1.0):
"""Multiply signal values by factor. New array is returned.
signal (numpy array) - signal data points
x (float) - x-axis multiplicator
y (float) - y-axis multiplicator
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
return numpy.array([])
# multiply signal
return calculations.signal_multiply(signal, float(x), float(y))
# ----
def normalize(signal):
"""Normalize y-values of the signal to max 1. New array is returned.
signal (numpy array) - signal data points
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
return numpy.array([])
# offset signal
return calculations.signal_normalize(signal)
# ----
def smooth(signal, method, window, cycles=1):
"""Smooth signal by moving average filter. New array is returned.
signal (numpy array) - signal data points
method (MA GA SG) - smoothing method: MA - moving average, GA - Gaussian, SG - Savitzky-Golay
window (float) - m/z window size for smoothing
cycles (int) - number of repeating cycles
"""
# check signal type
if not isinstance(signal, numpy.ndarray):
raise TypeError, "Signal must be NumPy array!"
if signal.dtype.name != 'float64':
raise TypeError, "Signal data must be float64!"
# check signal data
if len(signal) == 0:
return numpy.array([])
# apply moving average filter
if method == 'MA':
return movaver(signal, window, cycles, style='flat')
# apply gaussian filter
elif method == 'GA':
return movaver(signal, window, cycles, style='gaussian')
# apply savitzky-golay filter
elif method == 'SG':
return savgol(signal, window, cycles)
# unknown smoothing method
else:
raise KeyError, "Unknown smoothing method! -->", method
# ----
def movaver(signal, window, cycles=1, style='flat'):
"""Smooth signal by moving average filter. New array is returned.
signal (numpy array) - signal data points
window (float) - m/z window size for smoothing
cycles (int) - number of repeating cycles
"""
# approximate number of points within window
window = int(window*len(signal)/(signal[-1][0]-signal[0][0]))
window = min(window, len(signal))
if window < 3:
return signal.copy()
if not window % 2:
window -= 1
# unpack mz and intensity
xAxis, yAxis = numpy.hsplit(signal,2)
xAxis = xAxis.flatten()
yAxis = yAxis.flatten()
# smooth the points
while cycles:
CHECK_FORCE_QUIT()
if style == 'flat':
w = numpy.ones(window,'f')
elif style == 'gaussian':
r = numpy.array([(i-(window-1)/2.) for i in range(window)])
w = numpy.exp(-(r**2/(window/4.)**2))
else:
w = eval('numpy.'+style+'(window)')
s = numpy.r_[yAxis[window-1:0:-1], yAxis, yAxis[-2:-window-1:-1]]
y = numpy.convolve(w/w.sum(), s, mode='same')
yAxis = y[window-1:-window+1]
cycles -=1
# return smoothed data
xAxis.shape = (-1,1)
yAxis.shape = (-1,1)
data = numpy.concatenate((xAxis,yAxis), axis=1)
return data.copy()
# ----
def savgol(signal, window, cycles=1, order=3):
"""Smooth signal by Savitzky-Golay filter. New array is returned.
signal (numpy array) - signal data points
window (float) - m/z window size for smoothing
cycles (int) - number of repeating cycles
order (int) - order of polynom used
"""
# approximate number of points within window
window = int(window*len(signal)/(signal[-1][0]-signal[0][0]))
if window <= order:
return signal.copy()
# unpack axes
xAxis, yAxis = numpy.hsplit(signal,2)
yAxis = yAxis.flatten()
# coeficients
orderRange = range(order+1)
halfWindow = (window-1) // 2
b = numpy.mat([[k**i for i in orderRange] for k in range(-halfWindow, halfWindow+1)])
m = numpy.linalg.pinv(b).A[0]
window = len(m)
halfWindow = (window-1) // 2
# precompute the offset values for better performance
offsets = range(-halfWindow, halfWindow+1)
offsetData = zip(offsets, m)
# smooth the data
while cycles:
smoothData = list()
yAxis = numpy.concatenate((numpy.zeros(halfWindow)+yAxis[0], yAxis, numpy.zeros(halfWindow)+yAxis[-1]))
for i in range(halfWindow, len(yAxis) - halfWindow):
CHECK_FORCE_QUIT()
value = 0.0
for offset, weight in offsetData:
value += weight * yAxis[i + offset]
smoothData.append(value)
yAxis = smoothData
cycles -=1
# return smoothed data
yAxis = numpy.array(yAxis)
yAxis.shape = (-1,1)
data = numpy.concatenate((xAxis,yAxis), axis=1)
return data.copy()
# ----
def combine(signalA, signalB):
"""Unify x-raster and combine two arrays (y=yA+yB). New array is returned.
signalA (numpy array) - signal A data points
signalB (numpy array) - signal B data points
"""
# check signal type
if not isinstance(signalA, numpy.ndarray) or not isinstance(signalB, numpy.ndarray):
raise TypeError, "Signals must be NumPy arrays!"
if signalA.dtype.name != 'float64' or signalB.dtype.name != 'float64':
raise TypeError, "Signals data must be float64!"
# check signal data
if len(signalA) == 0 and len(signalB) == 0:
return numpy.array([])
# subtract signals
return calculations.signal_combine(signalA, signalB)
# ----
def overlay(signalA, signalB):
"""Unify x-raster and overlay two arrays (y=max(yA,yB)). New array is returned.
signalA (numpy array) - signal A data points
signalB (numpy array) - signal B data points
"""
# check signal type
if not isinstance(signalA, numpy.ndarray) or not isinstance(signalB, numpy.ndarray):
raise TypeError, "Signals must be NumPy arrays!"
if signalA.dtype.name != 'float64' or signalB.dtype.name != 'float64':
raise TypeError, "Signals data must be float64!"
# check signal data
if len(signalA) == 0 and len(signalB) == 0:
return numpy.array([])
# subtract signals
return calculations.signal_overlay(signalA, signalB)
# ----
def subtract(signalA, signalB):
"""Unify x-raster and subtract two arrays (y=yA-yB). New array is returned.
signalA (numpy array) - signal A data points
signalB (numpy array) - signal B data points
"""
# check signal type
if not isinstance(signalA, numpy.ndarray) or not isinstance(signalB, numpy.ndarray):
raise TypeError, "Signals must be NumPy arrays!"
if signalA.dtype.name != 'float64' or signalB.dtype.name != 'float64':
raise TypeError, "Signals data must be float64!"
# check signal data
if len(signalA) == 0 and len(signalB) == 0:
return numpy.array([])
# subtract signals
return calculations.signal_subtract(signalA, signalB)
# ----
def subbase(signal, baseline):
"""Subtract baseline from signal withou chaning x-raster. New array is returned.
signal (numpy array) - signal data points
baseline (numpy array) - baseline data points
"""
# check signal type
if not isinstance(signal, numpy.ndarray) or not isinstance(baseline, numpy.ndarray):
raise TypeError, "Signals must be NumPy arrays!"
if signal.dtype.name != 'float64' or baseline.dtype.name != 'float64':
raise TypeError, "Signals data must be float64!"
# check signal data
if len(signal) == 0:
return numpy.array([])
# check baseline data
if len(baseline) == 0:
return signal.copy()
# check baseline shape
if baseline.shape[1] > 2:
baseline = numpy.hsplit(baseline, (2,6))[0].copy()
# subtract signals
return calculations.signal_subbase(signal, baseline)
# ----
| gpl-3.0 | 36,186,215,712,887,930 | 29.487256 | 114 | 0.603295 | false | 3.948544 | false | false | false |
reydelamirienda/nuevo | lib/nuevo/drivers/neo4j/rest.py | 1 | 3350 |
import logging
log = logging.getLogger(__name__)
import requests, json
from nuevo.drivers.neo4j.commands import JSONCommandEncoder, Neo4jBatchedCommand
from nuevo.drivers.neo4j.content import Neo4jContent
from nuevo.core.exceptions import NuevoException
class RESTException(NuevoException):
def __init__(self, message, status):
self.status = status
super(RESTException, self).__init__(message)
class Neo4jAtomicREST(object):
def __init__(self, base_url):
self.session = requests.session(
headers = {
'Accept':'application/json',
'Content-Type':'application/json'
}
)
self.base_url = base_url.rstrip('/')
def execute(self, cmd, resp_cls=Neo4jContent):
log.debug("EXEC: %s", cmd)
response = self.send(cmd)
if response is not None:
return resp_cls(cid=None, response=response)
else:
return None
def send(self, cmd):
url = self.base_url + cmd.resource
data = json.dumps(cmd, cls=JSONCommandEncoder)
log.debug("SEND: %s %s %s", cmd.method, url, data)
try:
resp = self.session.request(cmd.method, url, data=data)
code = resp.status_code
cont = resp.content
resp.raise_for_status()
if cont:
cont = json.loads(cont)
else:
cont = None
log.debug("RECV: %s %s", code, cont)
return cont
except requests.exceptions.HTTPError as ex:
raise RESTException(cont, code)
class Neo4jBatchedREST(object):
def __init__(self, base_url):
self.session = requests.session(
headers = {
'Accept':'application/json',
'Content-Type':'application/json'
}
)
self.base_url = base_url.rstrip('/')
self._cid = 0
self.batch = []
self.futures = []
@property
def next_cid(self):
_cid = self._cid
self._cid = self._cid + 1
return _cid
def execute(self, cmd, resp_cls=Neo4jContent):
cid = self.next_cid
cmd = Neo4jBatchedCommand(cmd, cid)
fut = resp_cls(cid=cid, response=None)
self.batch.append(cmd)
self.futures.append(fut)
return fut
def flush(self):
url = "%s/batch" % self.base_url
data = json.dumps(self.batch, cls=JSONCommandEncoder)
log.debug("SEND: %s", data)
resp = self.session.request("POST", url, data=data)
try:
resp.raise_for_status()
responses = json.loads(resp.content)
log.debug("RECV: %s", responses)
self.materialize(responses)
self._cid = 0
self.batch = []
self.futures = []
except requests.HTTPError as err:
c = resp.content
if 'message' in c:
raise RESTException(c.message)
else:
raise RESTException(c.exception)
def materialize(self, responses):
for fut, response in zip(self.futures, responses):
fut.__materialize__(response['body']) | bsd-3-clause | 8,176,684,758,473,357,000 | 27.641026 | 80 | 0.53194 | false | 4.031288 | false | false | false |
aaronkaplan/intelmq-beta | intelmq/bots/parsers/arbor/parser.py | 1 | 1345 | from intelmq.lib.bot import Bot, sys
from intelmq.lib.message import Event
from intelmq.lib.harmonization import DateTime
from intelmq.lib import utils
class ArborParserBot(Bot):
def process(self):
report = self.receive_message()
if not report.contains("raw"):
self.acknowledge_message()
raw_report = utils.base64_decode(report.value("raw"))
for row in raw_report.split('\n'):
row = row.strip()
if len(row) == 0 or row.startswith('other'):
continue
event = Event()
time_observation = DateTime().generate_datetime_now()
event.add('time.observation', time_observation, sanitize=True)
event.add('feed.name', u'arbor')
event.add('feed.url', u'http://atlas-public.ec2.arbor.net/public/ssh_attackers')
event.add('classification.type', u'brute-force')
event.add("raw", row, sanitize=True)
columns = ["source.ip"]
row = row.split()
for key, value in zip(columns, row):
event.add(key, value, sanitize=True)
self.send_message(event)
self.acknowledge_message()
if __name__ == "__main__":
bot = ArborParserBot(sys.argv[1])
bot.start()
| agpl-3.0 | 77,732,868,448,493,170 | 30.02381 | 92 | 0.560595 | false | 3.97929 | false | false | false |
Hotpirsch/tbot | Metar.py | 1 | 45637 | #!/usr/bin/env python
#
# A python package for interpreting METAR and SPECI weather reports.
#
# US conventions for METAR/SPECI reports are described in chapter 12 of
# the Federal Meteorological Handbook No.1. (FMH-1 1995), issued by NOAA.
# See <http://metar.noaa.gov/>
#
# International conventions for the METAR and SPECI codes are specified in
# the WMO Manual on Codes, vol I.1, Part A (WMO-306 I.i.A).
#
# This module handles a reports that follow the US conventions, as well
# the more general encodings in the WMO spec. Other regional conventions
# are not supported at present.
#
# The current METAR report for a given station is available at the URL
# http://weather.noaa.gov/pub/data/observations/metar/stations/<station>.TXT
# where <station> is the four-letter ICAO station code.
#
# The METAR reports for all reporting stations for any "cycle" (i.e., hour)
# in the last 24 hours is available in a single file at the URL
# http://weather.noaa.gov/pub/data/observations/metar/cycles/<cycle>Z.TXT
# where <cycle> is a 2-digit cycle number (e.g., "00", "05" or "23").
#
# Copyright 2004 Tom Pollard
#
"""
This module defines the Metar class. A Metar object represents the weather report encoded by a single METAR code.
"""
__author__ = "Tom Pollard"
__email__ = "[email protected]"
__version__ = "1.2"
__LICENSE__ = """
Copyright (c) 2004-2016, %s
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""" % __author__
import datetime
from Datatypes import *
## Exceptions
class ParserError(Exception):
"""Exception raised when an unparseable group is found in body of the report."""
pass
## regular expressions to decode various groups of the METAR code
MISSING_RE = re.compile(r"^[M/]+$")
TYPE_RE = re.compile(r"^(?P<type>METAR|SPECI)\s+")
STATION_RE = re.compile(r"^(?P<station>[A-Z][A-Z0-9]{3})\s+")
TIME_RE = re.compile(r"""^(?P<day>\d\d)
(?P<hour>\d\d)
(?P<min>\d\d)Z?\s+""",
re.VERBOSE)
MODIFIER_RE = re.compile(r"^(?P<mod>AUTO|FINO|NIL|TEST|CORR?|RTD|CC[A-G])\s+")
WIND_RE = re.compile(r"""^(?P<dir>[\dO]{3}|[0O]|///|MMM|VRB)
(?P<speed>P?[\dO]{2,3}|[/M]{2,3})
(G(?P<gust>P?(\d{1,3}|[/M]{1,3})))?
(?P<units>KTS?|LT|K|T|KMH|MPS)?
(\s+(?P<varfrom>\d\d\d)V
(?P<varto>\d\d\d))?\s+""",
re.VERBOSE)
VISIBILITY_RE = re.compile(r"""^(?P<vis>(?P<dist>(M|P)?\d\d\d\d|////)
(?P<dir>[NSEW][EW]? | NDV)? |
(?P<distu>(M|P)?(\d+|\d\d?/\d\d?|\d+\s+\d/\d))
(?P<units>SM|KM|M|U) |
CAVOK )\s+""",
re.VERBOSE)
RUNWAY_RE = re.compile(r"""^(RVRNO |
R(?P<name>\d\d(RR?|LL?|C)?)/
(?P<low>(M|P)?\d\d\d\d)
(V(?P<high>(M|P)?\d\d\d\d))?
(?P<unit>FT)?[/NDU]*)\s+""",
re.VERBOSE)
WEATHER_RE = re.compile(r"""^(?P<int>(-|\+|VC)*)
(?P<desc>(MI|PR|BC|DR|BL|SH|TS|FZ)+)?
(?P<prec>(DZ|RA|SN|SG|IC|PL|GR|GS|UP|/)*)
(?P<obsc>BR|FG|FU|VA|DU|SA|HZ|PY)?
(?P<other>PO|SQ|FC|SS|DS|NSW|/+)?
(?P<int2>[-+])?\s+""",
re.VERBOSE)
SKY_RE= re.compile(r"""^(?P<cover>VV|CLR|SKC|SCK|NSC|NCD|BKN|SCT|FEW|[O0]VC|///)
(?P<height>[\dO]{2,4}|///)?
(?P<cloud>([A-Z][A-Z]+|///))?\s+""",
re.VERBOSE)
TEMP_RE = re.compile(r"""^(?P<temp>(M|-)?\d+|//|XX|MM)/
(?P<dewpt>(M|-)?\d+|//|XX|MM)?\s+""",
re.VERBOSE)
PRESS_RE = re.compile(r"""^(?P<unit>A|Q|QNH|SLP)?
(?P<press>[\dO]{3,4}|////)
(?P<unit2>INS)?\s+""",
re.VERBOSE)
RECENT_RE = re.compile(r"""^RE(?P<desc>MI|PR|BC|DR|BL|SH|TS|FZ)?
(?P<prec>(DZ|RA|SN|SG|IC|PL|GR|GS|UP)*)?
(?P<obsc>BR|FG|FU|VA|DU|SA|HZ|PY)?
(?P<other>PO|SQ|FC|SS|DS)?\s+""",
re.VERBOSE)
WINDSHEAR_RE = re.compile(r"^(WS\s+)?(ALL\s+RWY|RWY(?P<name>\d\d(RR?|L?|C)?))\s+")
COLOR_RE = re.compile(r"""^(BLACK)?(BLU|GRN|WHT|RED)\+?
(/?(BLACK)?(BLU|GRN|WHT|RED)\+?)*\s*""",
re.VERBOSE)
RUNWAYSTATE_RE = re.compile(r"""((?P<name>\d\d) | R(?P<namenew>\d\d)(RR?|LL?|C)?/?)
((?P<special> SNOCLO|CLRD(\d\d|//)) |
(?P<deposit>(\d|/))
(?P<extent>(\d|/))
(?P<depth>(\d\d|//))
(?P<friction>(\d\d|//)))\s+""",
re.VERBOSE)
TREND_RE = re.compile(r"^(?P<trend>TEMPO|BECMG|FCST|NOSIG)\s+")
TRENDTIME_RE = re.compile(r"(?P<when>(FM|TL|AT))(?P<hour>\d\d)(?P<min>\d\d)\s+")
REMARK_RE = re.compile(r"^(RMKS?|NOSPECI|NOSIG)\s+")
## regular expressions for remark groups
AUTO_RE = re.compile(r"^AO(?P<type>\d)\s+")
SEALVL_PRESS_RE = re.compile(r"^SLP(?P<press>\d\d\d)\s+")
PEAK_WIND_RE = re.compile(r"""^P[A-Z]\s+WND\s+
(?P<dir>\d\d\d)
(?P<speed>P?\d\d\d?)/
(?P<hour>\d\d)?
(?P<min>\d\d)\s+""",
re.VERBOSE)
WIND_SHIFT_RE = re.compile(r"""^WSHFT\s+
(?P<hour>\d\d)?
(?P<min>\d\d)
(\s+(?P<front>FROPA))?\s+""",
re.VERBOSE)
PRECIP_1HR_RE = re.compile(r"^P(?P<precip>\d\d\d\d)\s+")
PRECIP_24HR_RE = re.compile(r"""^(?P<type>6|7)
(?P<precip>\d\d\d\d)\s+""",
re.VERBOSE)
PRESS_3HR_RE = re.compile(r"""^5(?P<tend>[0-8])
(?P<press>\d\d\d)\s+""",
re.VERBOSE)
TEMP_1HR_RE = re.compile(r"""^T(?P<tsign>0|1)
(?P<temp>\d\d\d)
((?P<dsign>0|1)
(?P<dewpt>\d\d\d))?\s+""",
re.VERBOSE)
TEMP_6HR_RE = re.compile(r"""^(?P<type>1|2)
(?P<sign>0|1)
(?P<temp>\d\d\d)\s+""",
re.VERBOSE)
TEMP_24HR_RE = re.compile(r"""^4(?P<smaxt>0|1)
(?P<maxt>\d\d\d)
(?P<smint>0|1)
(?P<mint>\d\d\d)\s+""",
re.VERBOSE)
UNPARSED_RE = re.compile(r"(?P<group>\S+)\s+")
LIGHTNING_RE = re.compile(r"""^((?P<freq>OCNL|FRQ|CONS)\s+)?
LTG(?P<type>(IC|CC|CG|CA)*)
( \s+(?P<loc>( OHD | VC | DSNT\s+ | \s+AND\s+ |
[NSEW][EW]? (-[NSEW][EW]?)* )+) )?\s+""",
re.VERBOSE)
TS_LOC_RE = re.compile(r"""TS(\s+(?P<loc>( OHD | VC | DSNT\s+ | \s+AND\s+ |
[NSEW][EW]? (-[NSEW][EW]?)* )+))?
( \s+MOV\s+(?P<dir>[NSEW][EW]?) )?\s+""",
re.VERBOSE)
## translation of weather location codes
loc_terms = [ ("OHD", "overhead"),
("DSNT", "distant"),
("AND", "and"),
("VC", "nearby") ]
def xlate_loc( loc ):
"""Substitute English terms for the location codes in the given string."""
for code, english in loc_terms:
loc = loc.replace(code,english)
return loc
## translation of the sky-condition codes into english
SKY_COVER = { "SKC":"clear",
"CLR":"clear",
"NSC":"clear",
"NCD":"clear",
"FEW":"a few ",
"SCT":"scattered ",
"BKN":"broken ",
"OVC":"overcast",
"///":"",
"VV":"indefinite ceiling" }
CLOUD_TYPE = { "TCU":"towering cumulus",
"CU":"cumulus",
"CB":"cumulonimbus",
"SC":"stratocumulus",
"CBMAM":"cumulonimbus mammatus",
"ACC":"altocumulus castellanus",
"SCSL":"standing lenticular stratocumulus",
"CCSL":"standing lenticular cirrocumulus",
"ACSL":"standing lenticular altocumulus" }
## translation of the present-weather codes into english
WEATHER_INT = { "-":"light",
"+":"heavy",
"-VC":"nearby light",
"+VC":"nearby heavy",
"VC":"nearby" }
WEATHER_DESC = { "MI":"shallow",
"PR":"partial",
"BC":"patches of",
"DR":"low drifting",
"BL":"blowing",
"SH":"showers",
"TS":"thunderstorm",
"FZ":"freezing" }
WEATHER_PREC = { "DZ":"drizzle",
"RA":"rain",
"SN":"snow",
"SG":"snow grains",
"IC":"ice crystals",
"PL":"ice pellets",
"GR":"hail",
"GS":"snow pellets",
"UP":"unknown precipitation",
"//":"" }
WEATHER_OBSC = { "BR":"mist",
"FG":"fog",
"FU":"smoke",
"VA":"volcanic ash",
"DU":"dust",
"SA":"sand",
"HZ":"haze",
"PY":"spray" }
WEATHER_OTHER = { "PO":"sand whirls",
"SQ":"squalls",
"FC":"funnel cloud",
"SS":"sandstorm",
"DS":"dust storm" }
WEATHER_SPECIAL = { "+FC":"tornado" }
COLOR = { "BLU":"blue",
"GRN":"green",
"WHT":"white" }
## translation of various remark codes into English
PRESSURE_TENDENCY = { "0":"increasing, then decreasing",
"1":"increasing more slowly",
"2":"increasing",
"3":"increasing more quickly",
"4":"steady",
"5":"decreasing, then increasing",
"6":"decreasing more slowly",
"7":"decreasing",
"8":"decreasing more quickly" }
LIGHTNING_FREQUENCY = { "OCNL":"occasional",
"FRQ":"frequent",
"CONS":"constant" }
LIGHTNING_TYPE = { "IC":"intracloud",
"CC":"cloud-to-cloud",
"CG":"cloud-to-ground",
"CA":"cloud-to-air" }
REPORT_TYPE = { "METAR":"routine report",
"SPECI":"special report",
"AUTO":"automatic report",
"COR":"manually corrected report" }
## Helper functions
def _report_match(handler,match):
"""Report success or failure of the given handler function. (DEBUG)"""
if match:
print(handler.__name__," matched '"+match+"'")
else:
print(handler.__name__," didn't match...")
def _unparsedGroup( self, d ):
"""
Handle otherwise unparseable main-body groups.
"""
self._unparsed_groups.append(d['group'])
## METAR report objects
debug = False
class Metar(object):
"""METAR (aviation meteorology report)"""
def __init__( self, metarcode, month=None, year=None, utcdelta=None):
"""Parse raw METAR code."""
self.code = metarcode # original METAR code
self.type = 'METAR' # METAR (routine) or SPECI (special)
self.mod = "AUTO" # AUTO (automatic) or COR (corrected)
self.station_id = None # 4-character ICAO station code
self.time = None # observation time [datetime]
self.cycle = None # observation cycle (0-23) [int]
self.wind_dir = None # wind direction [direction]
self.wind_speed = None # wind speed [speed]
self.wind_gust = None # wind gust speed [speed]
self.wind_dir_from = None # beginning of range for win dir [direction]
self.wind_dir_to = None # end of range for wind dir [direction]
self.vis = None # visibility [distance]
self.vis_dir = None # visibility direction [direction]
self.max_vis = None # visibility [distance]
self.max_vis_dir = None # visibility direction [direction]
self.temp = None # temperature (C) [temperature]
self.dewpt = None # dew point (C) [temperature]
self.press = None # barometric pressure [pressure]
self.runway = [] # runway visibility (list of tuples)
self.weather = [] # present weather (list of tuples)
self.recent = [] # recent weather (list of tuples)
self.sky = [] # sky conditions (list of tuples)
self.windshear = [] # runways w/ wind shear (list of strings)
self.wind_speed_peak = None # peak wind speed in last hour
self.wind_dir_peak = None # direction of peak wind speed in last hour
self.peak_wind_time = None # time of peak wind observation [datetime]
self.wind_shift_time = None # time of wind shift [datetime]
self.max_temp_6hr = None # max temp in last 6 hours
self.min_temp_6hr = None # min temp in last 6 hours
self.max_temp_24hr = None # max temp in last 24 hours
self.min_temp_24hr = None # min temp in last 24 hours
self.press_sea_level = None # sea-level pressure
self.precip_1hr = None # precipitation over the last hour
self.precip_3hr = None # precipitation over the last 3 hours
self.precip_6hr = None # precipitation over the last 6 hours
self.precip_24hr = None # precipitation over the last 24 hours
self._trend = False # trend groups present (bool)
self._trend_groups = [] # trend forecast groups
self._remarks = [] # remarks (list of strings)
self._unparsed_groups = []
self._unparsed_remarks = []
self._now = datetime.datetime.utcnow()
if utcdelta:
self._utcdelta = utcdelta
else:
self._utcdelta = datetime.datetime.now() - self._now
self._month = month
self._year = year
code = self.code+" " # (the regexps all expect trailing spaces...)
try:
ngroup = len(Metar.handlers)
igroup = 0
ifailed = -1
while igroup < ngroup and code:
pattern, handler, repeatable = Metar.handlers[igroup]
if debug: print(handler.__name__,":",code)
m = pattern.match(code)
while m:
ifailed = -1
if debug: _report_match(handler,m.group())
handler(self,m.groupdict())
code = code[m.end():]
if self._trend:
code = self._do_trend_handlers(code)
if not repeatable: break
if debug: print(handler.__name__,":",code)
m = pattern.match(code)
if not m and ifailed < 0:
ifailed = igroup
igroup += 1
if igroup == ngroup and not m:
# print("** it's not a main-body group **")
pattern, handler = (UNPARSED_RE, _unparsedGroup)
if debug: print(handler.__name__,":",code)
m = pattern.match(code)
if debug: _report_match(handler,m.group())
handler(self,m.groupdict())
code = code[m.end():]
igroup = ifailed
ifailed = -2 # if it's still -2 when we run out of main-body
# groups, we'll try parsing this group as a remark
if pattern == REMARK_RE or self.press:
while code:
for pattern, handler in Metar.remark_handlers:
if debug: print(handler.__name__,":",code)
m = pattern.match(code)
if m:
if debug: _report_match(handler,m.group())
handler(self,m.groupdict())
code = pattern.sub("",code,1)
break
except Exception as err:
raise ParserError(handler.__name__+" failed while processing '"+code+"'\n"+" ".join(err.args))
raise err
if self._unparsed_groups:
code = ' '.join(self._unparsed_groups)
raise ParserError("Unparsed groups in body '"+code+"' while processing '"+metarcode+"'")
def _do_trend_handlers(self, code):
for pattern, handler, repeatable in Metar.trend_handlers:
if debug: print(handler.__name__,":",code)
m = pattern.match(code)
while m:
if debug: _report_match(handler, m.group())
self._trend_groups.append(m.group().strip())
handler(self,m.groupdict())
code = code[m.end():]
if not repeatable: break
m = pattern.match(code)
return code
def __str__(self):
return self.string()
def _handleType( self, d ):
"""
Parse the code-type group.
The following attributes are set:
type [string]
"""
self.type = d['type']
def _handleStation( self, d ):
"""
Parse the station id group.
The following attributes are set:
station_id [string]
"""
self.station_id = d['station']
def _handleModifier( self, d ):
"""
Parse the report-modifier group.
The following attributes are set:
mod [string]
"""
mod = d['mod']
if mod == 'CORR': mod = 'COR'
if mod == 'NIL' or mod == 'FINO': mod = 'NO DATA'
self.mod = mod
def _handleTime( self, d ):
"""
Parse the observation-time group.
The following attributes are set:
time [datetime]
cycle [int]
_day [int]
_hour [int]
_min [int]
"""
self._day = int(d['day'])
if not self._month:
self._month = self._now.month
if self._day > self._now.day:
if self._month == 1:
self._month = 12
else:
self._month = self._month - 1
if not self._year:
self._year = self._now.year
if self._month > self._now.month:
self._year = self._year - 1
elif self._month == self._now.month and self._day > self._now.day:
self._year = self._year - 1
self._hour = int(d['hour'])
self._min = int(d['min'])
self.time = datetime.datetime(self._year, self._month, self._day,
self._hour, self._min)
if self._min < 45:
self.cycle = self._hour
else:
self.cycle = self._hour+1
def _handleWind( self, d ):
"""
Parse the wind and variable-wind groups.
The following attributes are set:
wind_dir [direction]
wind_speed [speed]
wind_gust [speed]
wind_dir_from [int]
wind_dir_to [int]
"""
wind_dir = d['dir'].replace('O','0')
if wind_dir != "VRB" and wind_dir != "///" and wind_dir != "MMM":
self.wind_dir = direction(wind_dir)
wind_speed = d['speed'].replace('O','0')
units = d['units']
if units == 'KTS' or units == 'K' or units == 'T' or units == 'LT':
units = 'KT'
if wind_speed.startswith("P"):
self.wind_speed = speed(wind_speed[1:], units, ">")
elif not MISSING_RE.match(wind_speed):
self.wind_speed = speed(wind_speed, units)
if d['gust']:
wind_gust = d['gust']
if wind_gust.startswith("P"):
self.wind_gust = speed(wind_gust[1:], units, ">")
elif not MISSING_RE.match(wind_gust):
self.wind_gust = speed(wind_gust, units)
if d['varfrom']:
self.wind_dir_from = direction(d['varfrom'])
self.wind_dir_to = direction(d['varto'])
def _handleVisibility( self, d ):
"""
Parse the minimum and maximum visibility groups.
The following attributes are set:
vis [distance]
vis_dir [direction]
max_vis [distance]
max_vis_dir [direction]
"""
vis = d['vis']
vis_less = None
vis_dir = None
vis_units = "M"
vis_dist = "10000"
if d['dist'] and d['dist'] != '////':
vis_dist = d['dist']
if d['dir'] and d['dir'] != 'NDV':
vis_dir = d['dir']
elif d['distu']:
vis_dist = d['distu']
if d['units'] and d['units'] != "U":
vis_units = d['units']
if vis_dist == "9999":
vis_dist = "10000"
vis_less = ">"
if self.vis:
if vis_dir:
self.max_vis_dir = direction(vis_dir)
self.max_vis = distance(vis_dist, vis_units, vis_less)
else:
if vis_dir:
self.vis_dir = direction(vis_dir)
self.vis = distance(vis_dist, vis_units, vis_less)
def _handleRunway( self, d ):
"""
Parse a runway visual range group.
The following attributes are set:
range [list of tuples]
. name [string]
. low [distance]
. high [distance]
"""
if d['name']:
name = d['name']
low = distance(d['low'])
if d['high']:
high = distance(d['high'])
else:
high = low
self.runway.append((name,low,high))
def _handleWeather( self, d ):
"""
Parse a present-weather group.
The following attributes are set:
weather [list of tuples]
. intensity [string]
. description [string]
. precipitation [string]
. obscuration [string]
. other [string]
"""
inteni = d['int']
if not inteni and d['int2']:
inteni = d['int2']
desci = d['desc']
preci = d['prec']
obsci = d['obsc']
otheri = d['other']
self.weather.append((inteni,desci,preci,obsci,otheri))
def _handleSky( self, d ):
"""
Parse a sky-conditions group.
The following attributes are set:
sky [list of tuples]
. cover [string]
. height [distance]
. cloud [string]
"""
height = d['height']
if not height or height == "///":
height = None
else:
height = height.replace('O','0')
height = distance(int(height)*100,"FT")
cover = d['cover']
if cover == 'SCK' or cover == 'SKC' or cover == 'CL': cover = 'CLR'
if cover == '0VC': cover = 'OVC'
cloud = d['cloud']
if cloud == '///': cloud = ""
self.sky.append((cover,height,cloud))
def _handleTemp( self, d ):
"""
Parse a temperature-dewpoint group.
The following attributes are set:
temp temperature (Celsius) [float]
dewpt dew point (Celsius) [float]
"""
temp = d['temp']
dewpt = d['dewpt']
if temp and temp != "//" and temp != "XX" and temp != "MM" :
self.temp = temperature(temp)
if dewpt and dewpt != "//" and dewpt != "XX" and dewpt != "MM" :
self.dewpt = temperature(dewpt)
def _handlePressure( self, d ):
"""
Parse an altimeter-pressure group.
The following attributes are set:
press [int]
"""
press = d['press']
if press != '////':
press = float(press.replace('O','0'))
if d['unit']:
if d['unit'] == 'A' or (d['unit2'] and d['unit2'] == 'INS'):
self.press = pressure(press/100,'IN')
elif d['unit'] == 'SLP':
if press < 500:
press = press/10 + 1000
else:
press = press/10 + 900
self.press = pressure(press,'MB')
self._remarks.append("sea-level pressure %.1fhPa" % press)
else:
self.press = pressure(press,'MB')
elif press > 2500:
self.press = pressure(press/100,'IN')
else:
self.press = pressure(press,'MB')
def _handleRecent( self, d ):
"""
Parse a recent-weather group.
The following attributes are set:
weather [list of tuples]
. intensity [string]
. description [string]
. precipitation [string]
. obscuration [string]
. other [string]
"""
desci = d['desc']
preci = d['prec']
obsci = d['obsc']
otheri = d['other']
self.recent.append(("",desci,preci,obsci,otheri))
def _handleWindShear( self, d ):
"""
Parse wind-shear groups.
The following attributes are set:
windshear [list of strings]
"""
if d['name']:
self.windshear.append(d['name'])
else:
self.windshear.append("ALL")
def _handleColor( self, d ):
"""
Parse (and ignore) the color groups.
The following attributes are set:
trend [list of strings]
"""
pass
def _handleRunwayState( self, d ):
"""
Parse (and ignore) the runway state.
The following attributes are set:
"""
pass
def _handleTrend( self, d ):
"""
Parse (and ignore) the trend groups.
"""
if 'trend' in d:
self._trend_groups.append(d['trend'])
self._trend = True
def _startRemarks( self, d ):
"""
Found the start of the remarks section.
"""
self._remarks = []
def _handleSealvlPressRemark( self, d ):
"""
Parse the sea-level pressure remark group.
"""
value = float(d['press'])/10.0
if value < 50:
value += 1000
else:
value += 900
if not self.press:
self.press = pressure(value,"MB")
self.press_sea_level = pressure(value,"MB")
def _handlePrecip24hrRemark( self, d ):
"""
Parse a 3-, 6- or 24-hour cumulative preciptation remark group.
"""
value = float(d['precip'])/100.0
if d['type'] == "6":
if self.cycle == 3 or self.cycle == 9 or self.cycle == 15 or self.cycle == 21:
self.precip_3hr = precipitation(value,"IN")
else:
self.precip_6hr = precipitation(value,"IN")
else:
self.precip_24hr = precipitation(value,"IN")
def _handlePrecip1hrRemark( self, d ):
"""Parse an hourly precipitation remark group."""
value = float(d['precip'])/100.0
self.precip_1hr = precipitation(value,"IN")
def _handleTemp1hrRemark( self, d ):
"""
Parse a temperature & dewpoint remark group.
These values replace the temp and dewpt from the body of the report.
"""
value = float(d['temp'])/10.0
if d['tsign'] == "1": value = -value
self.temp = temperature(value)
if d['dewpt']:
value2 = float(d['dewpt'])/10.0
if d['dsign'] == "1": value2 = -value2
self.dewpt = temperature(value2)
def _handleTemp6hrRemark( self, d ):
"""
Parse a 6-hour maximum or minimum temperature remark group.
"""
value = float(d['temp'])/10.0
if d['sign'] == "1": value = -value
if d['type'] == "1":
self.max_temp_6hr = temperature(value,"C")
else:
self.min_temp_6hr = temperature(value,"C")
def _handleTemp24hrRemark( self, d ):
"""
Parse a 24-hour maximum/minimum temperature remark group.
"""
value = float(d['maxt'])/10.0
if d['smaxt'] == "1": value = -value
value2 = float(d['mint'])/10.0
if d['smint'] == "1": value2 = -value2
self.max_temp_24hr = temperature(value,"C")
self.min_temp_24hr = temperature(value2,"C")
def _handlePress3hrRemark( self, d ):
"""
Parse a pressure-tendency remark group.
"""
value = float(d['press'])/10.0
descrip = PRESSURE_TENDENCY[d['tend']]
self._remarks.append("3-hr pressure change %.1fhPa, %s" % (value,descrip))
def _handlePeakWindRemark( self, d ):
"""
Parse a peak wind remark group.
"""
peak_dir = int(d['dir'])
peak_speed = int(d['speed'])
self.wind_speed_peak = speed(peak_speed, "KT")
self.wind_dir_peak = direction(peak_dir)
peak_min = int(d['min'])
if d['hour']:
peak_hour = int(d['hour'])
else:
peak_hour = self._hour
self.peak_wind_time = datetime.datetime(self._year, self._month, self._day,
peak_hour, peak_min)
if self.peak_wind_time > self.time:
if peak_hour > self._hour:
self.peak_wind_time -= datetime.timedelta(hours=24)
else:
self.peak_wind_time -= datetime.timedelta(hours=1)
self._remarks.append("peak wind %dkt from %d degrees at %d:%02d" % \
(peak_speed, peak_dir, peak_hour, peak_min))
def _handleWindShiftRemark( self, d ):
"""
Parse a wind shift remark group.
"""
if d['hour']:
wshft_hour = int(d['hour'])
else:
wshft_hour = self._hour
wshft_min = int(d['min'])
self.wind_shift_time = datetime.datetime(self._year, self._month, self._day,
wshft_hour, wshft_min)
if self.wind_shift_time > self.time:
if wshft_hour > self._hour:
self.wind_shift_time -= datetime.timedelta(hours=24)
else:
self.wind_shift_time -= datetime.timedelta(hours=1)
text = "wind shift at %d:%02d" % (wshft_hour, wshft_min)
if d['front']:
text += " (front)"
self._remarks.append(text)
def _handleLightningRemark( self, d ):
"""
Parse a lightning observation remark group.
"""
parts = []
if d['freq']:
parts.append(LIGHTNING_FREQUENCY[d['freq']])
parts.append("lightning")
if d['type']:
ltg_types = []
group = d['type']
while group:
ltg_types.append(LIGHTNING_TYPE[group[:2]])
group = group[2:]
parts.append("("+",".join(ltg_types)+")")
if d['loc']:
parts.append(xlate_loc(d['loc']))
self._remarks.append(" ".join(parts))
def _handleTSLocRemark( self, d ):
"""
Parse a thunderstorm location remark group.
"""
text = "thunderstorm"
if d['loc']:
text += " "+xlate_loc(d['loc'])
if d['dir']:
text += " moving %s" % d['dir']
self._remarks.append(text)
def _handleAutoRemark( self, d ):
"""
Parse an automatic station remark group.
"""
if d['type'] == "1":
self._remarks.append("Automated station")
elif d['type'] == "2":
self._remarks.append("Automated station (type 2)")
def _unparsedRemark( self, d ):
"""
Handle otherwise unparseable remark groups.
"""
self._unparsed_remarks.append(d['group'])
## the list of handler functions to use (in order) to process a METAR report
handlers = [ (TYPE_RE, _handleType, False),
(STATION_RE, _handleStation, False),
(TIME_RE, _handleTime, False),
(MODIFIER_RE, _handleModifier, False),
(WIND_RE, _handleWind, False),
(VISIBILITY_RE, _handleVisibility, True),
(RUNWAY_RE, _handleRunway, True),
(WEATHER_RE, _handleWeather, True),
(SKY_RE, _handleSky, True),
(TEMP_RE, _handleTemp, False),
(PRESS_RE, _handlePressure, True),
(RECENT_RE,_handleRecent, True),
(WINDSHEAR_RE, _handleWindShear, True),
(COLOR_RE, _handleColor, True),
(RUNWAYSTATE_RE, _handleRunwayState, True),
(TREND_RE, _handleTrend, False),
(REMARK_RE, _startRemarks, False) ]
trend_handlers = [ (TRENDTIME_RE, _handleTrend, True),
(WIND_RE, _handleTrend, True),
(VISIBILITY_RE, _handleTrend, True),
(WEATHER_RE, _handleTrend, True),
(SKY_RE, _handleTrend, True),
(COLOR_RE, _handleTrend, True)]
## the list of patterns for the various remark groups,
## paired with the handler functions to use to record the decoded remark.
remark_handlers = [ (AUTO_RE, _handleAutoRemark),
(SEALVL_PRESS_RE, _handleSealvlPressRemark),
(PEAK_WIND_RE, _handlePeakWindRemark),
(WIND_SHIFT_RE, _handleWindShiftRemark),
(LIGHTNING_RE, _handleLightningRemark),
(TS_LOC_RE, _handleTSLocRemark),
(TEMP_1HR_RE, _handleTemp1hrRemark),
(PRECIP_1HR_RE, _handlePrecip1hrRemark),
(PRECIP_24HR_RE, _handlePrecip24hrRemark),
(PRESS_3HR_RE, _handlePress3hrRemark),
(TEMP_6HR_RE, _handleTemp6hrRemark),
(TEMP_24HR_RE, _handleTemp24hrRemark),
(UNPARSED_RE, _unparsedRemark) ]
## functions that return text representations of conditions for output
def string( self ):
"""
Return a human-readable version of the decoded report.
"""
lines = []
lines.append("station: %s" % self.station_id)
# if self.type:
# lines.append("type: %s" % self.report_type())
if self.time:
lines.append("time: %s" % self.time.ctime())
if self.temp:
lines.append("temperature: %s" % self.temp.string("C"))
if self.dewpt:
lines.append("dew point: %s" % self.dewpt.string("C"))
if self.wind_speed:
lines.append("wind: %s" % self.wind())
if self.wind_speed_peak:
lines.append("peak wind: %s" % self.peak_wind())
if self.wind_shift_time:
lines.append("wind shift: %s" % self.wind_shift())
if self.vis:
lines.append("visibility: %s" % self.visibility())
if self.runway:
lines.append("visual range: %s" % self.runway_visual_range())
if self.press:
lines.append("pressure: %s" % self.press.string("mb"))
if self.weather:
lines.append("weather: %s" % self.present_weather())
if self.sky:
lines.append("sky: %s" % self.sky_conditions("\n "))
if self.press_sea_level:
lines.append("sea-level pressure: %s" % self.press_sea_level.string("mb"))
if self.max_temp_6hr:
lines.append("6-hour max temp: %s" % str(self.max_temp_6hr))
if self.max_temp_6hr:
lines.append("6-hour min temp: %s" % str(self.min_temp_6hr))
if self.max_temp_24hr:
lines.append("24-hour max temp: %s" % str(self.max_temp_24hr))
if self.max_temp_24hr:
lines.append("24-hour min temp: %s" % str(self.min_temp_24hr))
if self.precip_1hr:
lines.append("1-hour precipitation: %s" % str(self.precip_1hr))
if self.precip_3hr:
lines.append("3-hour precipitation: %s" % str(self.precip_3hr))
if self.precip_6hr:
lines.append("6-hour precipitation: %s" % str(self.precip_6hr))
if self.precip_24hr:
lines.append("24-hour precipitation: %s" % str(self.precip_24hr))
if self._remarks:
lines.append("remarks:")
lines.append("- "+self.remarks("\n- "))
if self._unparsed_remarks:
lines.append("- "+' '.join(self._unparsed_remarks))
## lines.append("METAR: "+self.code)
return "\n".join(lines)
def report_type( self ):
"""
Return a textual description of the report type.
"""
if self.type == None:
text = "unknown report type"
elif self.type in REPORT_TYPE:
text = REPORT_TYPE[self.type]
else:
text = self.type+" report"
if self.cycle:
text += ", cycle %d" % self.cycle
if self.mod:
if self.mod in REPORT_TYPE:
text += " (%s)" % REPORT_TYPE[self.mod]
else:
text += " (%s)" % self.mod
return text
def wind( self, units="KT" ):
"""
Return a textual description of the wind conditions.
Units may be specified as "MPS", "KT", "KMH", or "MPH".
"""
if self.wind_speed == None:
return "missing"
elif self.wind_speed.value() == 0.0:
text = "calm"
else:
wind_speed = self.wind_speed.string(units)
if not self.wind_dir:
text = "variable at %s" % wind_speed
elif self.wind_dir_from:
text = "%s to %s at %s" % \
(self.wind_dir_from.compass(), self.wind_dir_to.compass(), wind_speed)
else:
text = "%s at %s" % (self.wind_dir.compass(), wind_speed)
if self.wind_gust:
text += ", gusting to %s" % self.wind_gust.string(units)
return text
def peak_wind( self, units="KT" ):
"""
Return a textual description of the peak wind conditions.
Units may be specified as "MPS", "KT", "KMH", or "MPH".
"""
if self.wind_speed_peak == None:
return "missing"
elif self.wind_speed_peak.value() == 0.0:
text = "calm"
else:
wind_speed = self.wind_speed_peak.string(units)
if not self.wind_dir_peak:
text = wind_speed
else:
text = "%s at %s" % (self.wind_dir_peak.compass(), wind_speed)
if not self.peak_wind_time == None:
text += " at %s" % self.peak_wind_time.strftime('%H:%M')
return text
def wind_shift( self, units="KT" ):
"""
Return a textual description of the wind shift time
Units may be specified as "MPS", "KT", "KMH", or "MPH".
"""
if self.wind_shift_time == None:
return "missing"
else:
return self.wind_shift_time.strftime('%H:%M')
def visibility( self, units=None ):
"""
Return a textual description of the visibility.
Units may be statute miles ("SM") or meters ("M").
"""
if self.vis == None:
return "missing"
if self.vis_dir:
text = "%s to %s" % (self.vis.string(units), self.vis_dir.compass())
else:
text = self.vis.string(units)
if self.max_vis:
if self.max_vis_dir:
text += "; %s to %s" % (self.max_vis.string(units), self.max_vis_dir.compass())
else:
text += "; %s" % self.max_vis.string(units)
return text
def runway_visual_range( self, units=None ):
"""
Return a textual description of the runway visual range.
"""
lines = []
for name,low,high in self.runway:
if low != high:
lines.append("on runway %s, from %d to %s" % (name, low.value(units), high.string(units)))
else:
lines.append("on runway %s, %s" % (name, low.string(units)))
return "; ".join(lines)
def present_weather( self ):
"""
Return a textual description of the present weather.
"""
return self._weather( self.weather )
def recent_weather( self ):
"""
Return a textual description of the recent weather.
"""
return self._weather( self.recent )
def _weather( self, weather ):
"""
Return a textual description of weather.
"""
text_list = []
for weatheri in weather:
(inteni,desci,preci,obsci,otheri) = weatheri
text_parts = []
code_parts = []
if inteni:
code_parts.append(inteni)
text_parts.append(WEATHER_INT[inteni])
if desci:
code_parts.append(desci)
if desci != "SH" or not preci:
text_parts.append(WEATHER_DESC[desci[0:2]])
if len(desci) == 4:
text_parts.append(WEATHER_DESC[desci[2:]])
if preci:
code_parts.append(preci)
if len(preci) == 2:
precip_text = WEATHER_PREC[preci]
elif len(preci) == 4:
precip_text = WEATHER_PREC[preci[:2]]+" and "
precip_text += WEATHER_PREC[preci[2:]]
elif len(preci) == 6:
precip_text = WEATHER_PREC[preci[:2]]+", "
precip_text += WEATHER_PREC[preci[2:4]]+" and "
precip_text += WEATHER_PREC[preci[4:]]
if desci == "TS":
text_parts.append("with")
text_parts.append(precip_text)
if desci == "SH":
text_parts.append(WEATHER_DESC[desci])
if obsci:
code_parts.append(obsci)
text_parts.append(WEATHER_OBSC[obsci])
if otheri:
code_parts.append(otheri)
text_parts.append(WEATHER_OTHER[otheri])
code = " ".join(code_parts)
if code in WEATHER_SPECIAL:
text_list.append(WEATHER_SPECIAL[code])
else:
text_list.append(" ".join(text_parts))
return "; ".join(text_list)
def sky_conditions( self, sep="; " ):
"""
Return a textual description of the sky conditions.
"""
text_list = []
for skyi in self.sky:
(cover,height,cloud) = skyi
if cover in ["SKC", "CLR", "NSC"]:
text_list.append(SKY_COVER[cover])
else:
if cloud:
what = CLOUD_TYPE[cloud]
elif SKY_COVER[cover].endswith(" "):
what = "clouds"
else:
what = ""
if cover == "VV":
text_list.append("%s%s, vertical visibility to %s" %
(SKY_COVER[cover],what,str(height)))
else:
text_list.append("%s%s at %s" %
(SKY_COVER[cover],what,str(height)))
return sep.join(text_list)
def trend( self ):
"""
Return the trend forecast groups
"""
return " ".join(self._trend_groups)
def remarks( self, sep="; "):
"""
Return the decoded remarks.
"""
return sep.join(self._remarks)
| apache-2.0 | 2,790,689,688,889,914,000 | 37.31822 | 754 | 0.486491 | false | 3.650084 | false | false | false |
liuchengtian/CS523 | steerstats/tools/plotting/animating/anim_scatter2d.py | 8 | 2679 | """
Matplotlib Animation Example
author: Jake Vanderplas
email: [email protected]
website: http://jakevdp.github.com
license: BSD
Please feel free to use and modify this, but keep the above information. Thanks!
"""
import numpy as np
from matplotlib import pyplot as plt
from matplotlib import animation
import matplotlib
import random
import sys
import csv
sys.path.append('../../../')
from util import getParetoFront
filename = sys.argv[1]
data = []
for i in range(1, int(sys.argv[2])):
tmp_filename = filename + str(i) + ".csv"
csvfile = open(tmp_filename, 'r')
spamreader = csv.reader(csvfile, delimiter=',')
tmp_data = []
for row in spamreader:
tmp_data.append(row)
# tmp_data.append([float(row[0]), float(row[1]), float(row[2])])
data.append(tmp_data)
data = np.array(data)
up = 2
low = 0
# First set up the figure, the axis, and the plot element we want to animate
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_ylim([0.4,0.8])
ax.set_xlim([0.65,1])
ax.set_xlabel('Efficiency')
ax.set_ylabel('Effort')
ax.set_title('Pareto Optimal Front Estimation with ORCA')
# ax = plt.axes(xlim=(low, up), ylim=(low, up))
# ax = plt.axes(xlim=(0.9, 1.0), ylim=(0, 1))
scat1 = ax.scatter([3], [4], c="b")
line, = ax.plot([], [], lw=2, c='g')
# initialization function: plot the background of each frame
def init():
print "paths"
# print scat.get_paths()
# sys.exit()
# scat.set_paths(matplotlib.path.Path([[2, 3]]))
return scat1, line
# animation function. This is called sequentially
def animate(i):
# scat1.set_paths(np.array(tmp_data))
# scat.set_paths([x, y])
# print scat.get_offsets()
tmp_data=np.array(data[i], dtype='d')
# print tmp_data[:, 1:3]
# scat1.set_paths(tmp_data)
scat1.set_offsets(tmp_data)
fitnesses = tmp_data[:,0:2]
parameters = tmp_data[:,2:]
# print tmp_data
# print fitnesses
# print parameters
front = getParetoFront(fitnesses, parameters)
line.set_data(front[:,0], front[:,1])
return scat1, line
# call the animator. blit=True means only re-draw the parts that have changed.
anim = animation.FuncAnimation(fig, animate, init_func=init,
frames=200, interval=100, blit=True)
# save the animation as an mp4. This requires ffmpeg or mencoder to be
# installed. The extra_args ensure that the x264 codec is used, so that
# the video can be embedded in html5. You may need to adjust this for
# your system: for more information, see
# http://matplotlib.sourceforge.net/api/animation_api.html
anim.save('basic_animation.mp4', fps=10, extra_args=['-vcodec', 'libx264'])
plt.show() | gpl-3.0 | 4,624,429,499,103,212,000 | 29.11236 | 80 | 0.668533 | false | 3.020293 | false | false | false |
Saevon/AnkiHelpers | new_kanji_locator.py | 1 | 1381 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from models.anki import AnkiModel
from models.kanji import Kanji
from models.kanji_word import KanjiWord
from utf8_helper import force_UTF8
import kana
import settings
import argparse
import sys
from collections import Counter
def parse(args=None):
parser = argparse.ArgumentParser(description='Returns a random word')
parser.add_argument(
'-n', '--num',
dest='count', action='store', type=int,
default=10,
help='The number of words to display'
)
out = parser.parse_args(args)
return out
if __name__ == '__main__':
force_UTF8()
args = parse()
# Find all the kanji that are in the deck
all_kanji = set()
for word in KanjiWord.all():
for kanji in word.kanji:
all_kanji.add(kanji)
for kanji in Kanji.all():
all_kanji.add(kanji)
# Count which kanji the input data has
data = Counter(unicode(sys.stdin.read()))
for char, count in data.most_common():
# we don't want kana
if kana.is_kana(char):
del data[char]
# Nor do we want kanji we know
if char in all_kanji:
del data[char]
# Nor any non-kanji chars
if not kana.is_kanji(char):
del data[char]
for char, count in data.most_common(args.count):
print char, count
| mit | -6,331,862,144,492,737,000 | 22.016667 | 73 | 0.613324 | false | 3.461153 | false | false | false |
dpereira/synergy | ext/toolchain/commands1.py | 1 | 58099 | #Embedded file name: ext/toolchain/commands1.py
import sys, os, ConfigParser, shutil, re, ftputil, zipfile, glob, commands
from generators import VisualStudioGenerator, EclipseGenerator, XcodeGenerator, MakefilesGenerator
from getopt import gnu_getopt
if sys.version_info >= (2, 4):
import subprocess
class Toolchain():
requiredMajor = 2
requiredMinor = 6
globalOptions = 'v'
globalOptionsLong = ['no-prompts',
'verbose',
'skip-gui',
'skip-core']
cmd_opt_dict = {'about': ['', []],
'setup': ['g:', ['generator=']],
'configure': ['g:dr', ['generator=',
'debug',
'release',
'mac-sdk=',
'mac-identity=']],
'build': ['dr', ['debug', 'release']],
'clean': ['dr', ['debug', 'release']],
'update': ['', []],
'install': ['', []],
'doxygen': ['', []],
'dist': ['', ['vcredist-dir=', 'qt-dir=']],
'distftp': ['', ['host=',
'user=',
'pass=',
'dir=']],
'kill': ['', []],
'usage': ['', []],
'revision': ['', []],
'reformat': ['', []],
'open': ['', []],
'genlist': ['', []],
'reset': ['', []],
'signwin': ['', ['pfx=', 'pwd=', 'dist']],
'signmac': ['', []]}
cmd_alias_dict = {'info': 'about',
'help': 'usage',
'package': 'dist',
'docs': 'doxygen',
'make': 'build',
'cmake': 'configure'}
def complete_command(self, arg):
completions = []
for cmd, optarg in self.cmd_opt_dict.iteritems():
if cmd == arg:
return [cmd]
if cmd.startswith(arg):
completions.append(cmd)
for alias, cmd in self.cmd_alias_dict.iteritems():
if alias == arg:
return [alias]
if alias.startswith(arg):
completions.append(alias)
return completions
def start_cmd(self, argv):
cmd_arg = ''
if len(argv) > 1:
cmd_arg = argv[1]
if cmd_arg in ('--help', '-h', '--usage', '-u', '/?'):
cmd_arg = 'usage'
completions = self.complete_command(cmd_arg)
if cmd_arg and len(completions) > 0:
if len(completions) == 1:
cmd = completions[0]
cmd_map = list()
if cmd_arg != cmd:
cmd_map.append(cmd_arg)
cmd_map.append(cmd)
if cmd in self.cmd_alias_dict.keys():
alias = cmd
if cmd_arg == cmd:
cmd_map.append(alias)
cmd = self.cmd_alias_dict[cmd]
cmd_map.append(cmd)
if len(cmd_map) != 0:
print 'Mapping command: %s' % ' -> '.join(cmd_map)
self.run_cmd(cmd, argv[2:])
return 0
print 'Command `%s` too ambiguous, could mean any of: %s' % (cmd_arg, ', '.join(completions))
else:
if len(argv) == 1:
print 'No command specified, showing usage.\n'
else:
print 'Command not recognised: %s\n' % cmd_arg
self.run_cmd('usage')
return 1
def run_cmd(self, cmd, argv = []):
verbose = False
try:
options_pair = self.cmd_opt_dict[cmd]
options = self.globalOptions + options_pair[0]
options_long = []
options_long.extend(self.globalOptionsLong)
options_long.extend(options_pair[1])
opts, args = gnu_getopt(argv, options, options_long)
for o, a in opts:
if o in ('-v', '--verbose'):
verbose = True
handler = CommandHandler(argv, opts, args, verbose)
cmd_func = getattr(handler, cmd)
cmd_func()
except:
if not verbose:
sys.stderr.write('Error: ' + sys.exc_info()[1].__str__() + '\n')
sys.exit(1)
else:
raise
def run(self, argv):
if sys.version_info < (self.requiredMajor, self.requiredMinor):
print 'Python version must be at least ' + str(self.requiredMajor) + '.' + str(self.requiredMinor) + ', but is ' + str(sys.version_info[0]) + '.' + str(sys.version_info[1])
sys.exit(1)
try:
self.start_cmd(argv)
except KeyboardInterrupt:
print '\n\nUser aborted, exiting.'
class InternalCommands():
project = 'synergy'
setup_version = 5
website_url = 'http://synergy-project.org/'
this_cmd = 'hm'
cmake_cmd = 'cmake'
qmake_cmd = 'qmake'
make_cmd = 'make'
xcodebuild_cmd = 'xcodebuild'
w32_make_cmd = 'mingw32-make'
w32_qt_version = '4.6.2'
defaultTarget = 'release'
cmake_dir = 'res'
gui_dir = 'src/gui'
doc_dir = 'doc'
extDir = 'ext'
sln_filename = '%s.sln' % project
xcodeproj_filename = '%s.xcodeproj' % project
configDir = 'build'
configFilename = '%s/%s.cfg' % (configDir, this_cmd)
qtpro_filename = 'gui.pro'
doxygen_filename = 'doxygen.cfg'
cmake_url = 'http://www.cmake.org/cmake/resources/software.html'
prevdir = ''
generator_id = None
no_prompts = False
enableMakeCore = True
enableMakeGui = True
macSdk = None
macIdentity = None
gtestDir = 'gtest-1.6.0'
gmockDir = 'gmock-1.6.0'
win32_generators = {1: VisualStudioGenerator('10'),
2: VisualStudioGenerator('10 Win64'),
3: VisualStudioGenerator('9 2008'),
4: VisualStudioGenerator('9 2008 Win64'),
5: VisualStudioGenerator('8 2005'),
6: VisualStudioGenerator('8 2005 Win64')}
unix_generators = {1: MakefilesGenerator(),
2: EclipseGenerator()}
darwin_generators = {1: MakefilesGenerator(),
2: XcodeGenerator(),
3: EclipseGenerator()}
def getBuildDir(self, target = ''):
return self.getGenerator().getBuildDir(target)
def getBinDir(self, target = ''):
return self.getGenerator().getBinDir(target)
def sln_filepath(self):
return '%s\\%s' % (self.getBuildDir(), self.sln_filename)
def xcodeproj_filepath(self, target = ''):
return '%s/%s' % (self.getBuildDir(target), self.xcodeproj_filename)
def usage(self):
app = sys.argv[0]
print 'Usage: %s <command> [-g <index>|-v|--no-prompts|<command-options>]\n\nReplace [command] with one of:\n about Show information about this script\n setup Runs the initial setup for this script\n conf Runs cmake (generates project files)\n open Attempts to open the generated project file\n build Builds using the platform build chain\n clean Cleans using the platform build chain\n kill Kills all synergy processes (run as admin)\n update Updates the source code from repository\n revision Display the current source code revision\n package Create a distribution package (e.g. tar.gz)\n install Installs the program\n doxygen Builds doxygen documentation\n reformat Reformat .cpp and .h files using AStyle\n genlist Shows the list of available platform generators\n usage Shows the help screen\n\nExample: %s build -g 3' % (app, app)
def configureAll(self, targets, extraArgs = ''):
if len(targets) == 0:
targets += [self.defaultTarget]
for target in targets:
self.configure(target)
def checkGTest(self):
dir = self.extDir + '/' + self.gtestDir
if os.path.isdir(dir):
return
zipFilename = dir + '.zip'
if not os.path.exists(zipFilename):
raise Exception('GTest zip not found at: ' + zipFilename)
if not os.path.exists(dir):
os.mkdir(dir)
zip = zipfile.ZipFile(zipFilename)
self.zipExtractAll(zip, dir)
def checkGMock(self):
dir = self.extDir + '/' + self.gmockDir
if os.path.isdir(dir):
return
zipFilename = dir + '.zip'
if not os.path.exists(zipFilename):
raise Exception('GMock zip not found at: ' + zipFilename)
if not os.path.exists(dir):
os.mkdir(dir)
zip = zipfile.ZipFile(zipFilename)
self.zipExtractAll(zip, dir)
def zipExtractAll(self, z, dir):
if not dir.endswith('/'):
dir += '/'
for f in z.namelist():
if f.endswith('/'):
os.makedirs(dir + f)
else:
z.extract(f, dir)
def configure(self, target = '', extraArgs = ''):
self.ensure_setup_latest()
if sys.platform == 'darwin':
config = self.getConfig()
if self.macSdk:
config.set('hm', 'macSdk', self.macSdk)
elif config.has_option('hm', 'macSdk'):
self.macSdk = config.get('hm', 'macSdk')
if self.macIdentity:
config.set('hm', 'macIdentity', self.macIdentity)
elif config.has_option('hm', 'macIdentity'):
self.macIdentity = config.get('hm', 'macIdentity')
self.write_config(config)
if not self.macSdk:
raise Exception('Arg missing: --mac-sdk <version>')
if not self.macIdentity:
raise Exception('Arg missing: --mac-identity <name>')
sdkDir = self.getMacSdkDir()
if not os.path.exists(sdkDir):
raise Exception('Mac SDK not found at: ' + sdkDir)
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.macSdk
if target == '':
print 'Defaulting target to: ' + self.defaultTarget
target = self.defaultTarget
if self.enableMakeCore:
self.configureCore(target, extraArgs)
if self.enableMakeGui:
self.configureGui(target, extraArgs)
self.setConfRun(target)
def configureCore(self, target = '', extraArgs = ''):
_cmake_cmd = self.persist_cmake()
generator = self.getGenerator()
if generator != self.findGeneratorFromConfig():
print 'Generator changed, running setup.'
self.setup(target)
cmake_args = ''
#if generator.cmakeName != '':
# cmake_args += ' -DCMAKE_TOOLCHAIN_FILE=/Users/diego/hghome/ios-cmake/toolchain/iOS.cmake'
print generator.cmakeName
if generator.cmakeName.find('Unix Makefiles') != -1:
print "UNIX MAKEFILES!"
cmake_args += ' -DCMAKE_BUILD_TYPE=' + target.capitalize()
if sys.platform == 'darwin':
macSdkMatch = re.match('(\\d+)\\.(\\d+)', self.macSdk)
if not macSdkMatch:
raise Exception('unknown osx version: ' + self.macSdk)
sdkDir = self.getMacSdkDir()
sysroot = "/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk"
cmake_args += ' -DCMAKE_OSX_ARCHITECTURE="armv7s"'
cmake_args += ' -DCMAKE_OSX_ARCHITECTURES="armv7s"'
cmake_args += ' -DCMAKE_TRY_COMPILE_OSX_ARCHITECTURES=""'
cmake_args += ' -DCMAKE_OSX_DEPLOYMENT_TARGET=""'
cmake_args += ' -DCMAKE_BUILD_TYPE=debug'
cmake_args += ' -DCMAKE_OSX_SYSROOT=%s' % ("/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk",)
cmake_args += ' -DCMAKE_CXX_FLAGS="-g -arch armv7 -arch armv7s -arch arm64 -isysroot %s -DHAVE_INET_ATON -DHAVE_POSIX_SIGWAIT -DHAVE_SYS_SOCKET_H -DTYPE_OF_SIZE_1=char -DHAVE_SYS_TYPES_H -DHAVE_PTHREAD -I/Users/diego/tarballs/iOScURL/iOScURL/"' % (sysroot, )
cmake_args += ' -DCMAKE_C_FLAGS="-g -arch armv7 -arch armv7s -arch arm64 -isysroot %s -DHAVE_INET_ATON -DHAVE_POSIX_SIGWAIT -DTYPE_OF_SIZE_1=char -DHAVE_SYS_SOCKET_H -DHAVE_SYS_TYPES_H -DHAVE_PTHREAD -I/Users/diego/tarballs/iOScURL/iOScURL/"' % (sysroot, )
sourceDir = generator.getSourceDir()
self.checkGTest()
self.checkGMock()
if extraArgs != '':
cmake_args += ' ' + extraArgs
cmake_cmd_string = _cmake_cmd + cmake_args + ' ' + sourceDir
self.try_chdir(self.getBuildDir(target))
print 'CMake command: ' + cmake_cmd_string
err = os.system(cmake_cmd_string)
self.restore_chdir()
if generator.cmakeName.find('Eclipse') != -1:
self.fixCmakeEclipseBug()
if err != 0:
raise Exception('CMake encountered error: ' + str(err))
def configureGui(self, target = '', extraArgs = ''):
self.persist_qmake()
qmake_cmd_string = self.qmake_cmd + ' ' + self.qtpro_filename + ' -r'
if sys.platform == 'darwin':
qmake_cmd_string += ' -spec macx-g++'
major, minor = self.getMacVersion()
if major == 10 and minor <= 4:
qmake_cmd_string += ' CONFIG+="ppc i386"'
libs = '-framework ApplicationServices -framework Security -framework cocoa'
if major == 10 and minor >= 6:
libs += ' -framework ServiceManagement'
qmake_cmd_string += ' "MACX_LIBS=%s" ' % libs
sdkDir = self.getMacSdkDir()
shortForm = 'macosx' + self.macSdk
version = str(major) + '.' + str(minor)
qmake_cmd_string += ' QMAKE_MACOSX_DEPLOYMENT_TARGET=' + version
qMajor, qMinor, qRev = self.getQmakeVersion()
if qMajor <= 4:
qmake_cmd_string += ' QMAKE_MAC_SDK=' + sdkDir
else:
qmake_cmd_string += ' QMAKE_MAC_SDK=' + shortForm
qmake_cmd_string += ' QMAKE_MAC_SDK.' + shortForm + '.path=' + sdkDir
print 'QMake command: ' + qmake_cmd_string
self.try_chdir(self.gui_dir)
err = os.system(qmake_cmd_string)
self.restore_chdir()
if err != 0:
raise Exception('QMake encountered error: ' + str(err))
def getQmakeVersion(self):
version = commands.getoutput('qmake --version')
result = re.search('(\\d+)\\.(\\d+)\\.(\\d)', version)
if not result:
raise Exception('Could not get qmake version.')
major = int(result.group(1))
minor = int(result.group(2))
rev = int(result.group(3))
return (major, minor, rev)
def getMacSdkDir(self):
sdkName = 'macosx' + self.macSdk
status, sdkPath = commands.getstatusoutput('xcrun --show-sdk-path --sdk ' + sdkName)
if status == 0 and sdkPath:
return sdkPath
developerDir = os.getenv('DEVELOPER_DIR')
if not developerDir:
developerDir = '/Applications/Xcode.app/Contents/Developer'
sdkDirName = sdkName.replace('macosx', 'MacOSX')
sdkPath = developerDir + '/Platforms/MacOSX.platform/Developer/SDKs/' + sdkDirName + '.sdk'
sdkPath = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'
if os.path.exists(sdkPath):
return sdkPath
return '/Developer/SDKs/' + sdkDirName + '.sdk'
def fixCmakeEclipseBug(self):
print 'Fixing CMake Eclipse bugs...'
file = open('.project', 'r+')
content = file.read()
pattern = re.compile('\\s+<linkedResources>.+</linkedResources>', re.S)
content = pattern.sub('', content)
file.seek(0)
file.write(content)
file.truncate()
file.close()
def persist_cmake(self):
err = os.system('%s --version' % self.cmake_cmd)
if err != 0:
print 'Could not find `%s` in system path.\nDownload the latest version from:\n %s' % (self.cmake_cmd, self.cmake_url)
raise Exception('Cannot continue without CMake.')
else:
return self.cmake_cmd
def persist_qt(self):
self.persist_qmake()
def persist_qmake(self):
if sys.version_info < (2, 4):
return
try:
p = subprocess.Popen([self.qmake_cmd, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except:
print >> sys.stderr, 'Error: Could not find qmake.'
if sys.platform == 'win32':
print 'Suggestions:\n1. Ensure that qmake.exe exists in your system path.\n2. Try to download Qt (check our dev FAQ for links):\n qt-sdk-win-opensource-2010.02.exe'
raise Exception('Cannot continue without qmake.')
stdout, stderr = p.communicate()
if p.returncode != 0:
raise Exception('Could not test for cmake: %s' % stderr)
else:
m = re.search('.*Using Qt version (\\d+\\.\\d+\\.\\d+).*', stdout)
if m:
if sys.platform == 'win32':
ver = m.group(1)
if ver != self.w32_qt_version:
print >> sys.stderr, 'Warning: Not using supported Qt version %s (your version is %s).' % (self.w32_qt_version, ver)
else:
raise Exception('Could not find qmake version.')
def ensureConfHasRun(self, target, skipConfig):
if self.hasConfRun(target):
print 'Skipping config for target: ' + target
skipConfig = True
if not skipConfig:
self.configure(target)
def build(self, targets = [], skipConfig = False):
if len(targets) == 0:
targets += [self.defaultTarget]
self.ensure_setup_latest()
self.loadConfig()
if self.enableMakeCore:
self.makeCore(targets)
if self.enableMakeGui:
self.makeGui(targets)
def loadConfig(self):
config = self.getConfig()
if config.has_option('hm', 'macSdk'):
self.macSdk = config.get('hm', 'macSdk')
if config.has_option('hm', 'macIdentity'):
self.macIdentity = config.get('hm', 'macIdentity')
def makeCore(self, targets):
generator = self.getGeneratorFromConfig().cmakeName
if self.macSdk:
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.macSdk
if generator.find('Unix Makefiles') != -1:
for target in targets:
self.runBuildCommand(self.make_cmd, target)
else:
for target in targets:
if generator.startswith('Visual Studio'):
self.run_vcbuild(generator, target, self.sln_filepath())
elif generator == 'Xcode':
cmd = self.xcodebuild_cmd + ' -configuration ' + target.capitalize()
self.runBuildCommand(cmd, target)
else:
raise Exception('Build command not supported with generator: ' + generator)
def makeGui(self, targets, args = ''):
for target in targets:
if sys.platform == 'win32':
gui_make_cmd = self.w32_make_cmd + ' ' + target + args
print 'Make GUI command: ' + gui_make_cmd
self.try_chdir(self.gui_dir)
err = os.system(gui_make_cmd)
self.restore_chdir()
if err != 0:
raise Exception(gui_make_cmd + ' failed with error: ' + str(err))
elif sys.platform in ('linux2', 'sunos5', 'freebsd7', 'darwin'):
gui_make_cmd = self.make_cmd + ' -w' + args
print 'Make GUI command: ' + gui_make_cmd
targetDir = self.getGenerator().getBinDir(target)
bundleTargetDir = targetDir + '/Synergy.app'
if os.path.exists(bundleTargetDir):
shutil.rmtree(bundleTargetDir)
binDir = self.getGenerator().binDir
bundleTempDir = binDir + '/Synergy.app'
if os.path.exists(bundleTempDir):
shutil.rmtree(bundleTempDir)
self.try_chdir(self.gui_dir)
err = os.system(gui_make_cmd)
self.restore_chdir()
if err != 0:
raise Exception(gui_make_cmd + ' failed with error: ' + str(err))
if sys.platform == 'darwin' and 'clean' not in args:
self.macPostGuiMake(target)
self.fixQtFrameworksLayout(target)
else:
raise Exception('Unsupported platform: ' + sys.platform)
def macPostGuiMake(self, target):
bundle = 'Synergy.app'
binDir = self.getGenerator().binDir
targetDir = self.getGenerator().getBinDir(target)
bundleTempDir = binDir + '/' + bundle
bundleTargetDir = targetDir + '/' + bundle
if os.path.exists(bundleTempDir):
shutil.move(bundleTempDir, bundleTargetDir)
if self.enableMakeCore:
bundleBinDir = bundleTargetDir + '/Contents/MacOS/'
shutil.copy(targetDir + '/synergyc', bundleBinDir)
shutil.copy(targetDir + '/synergys', bundleBinDir)
shutil.copy(targetDir + '/syntool', bundleBinDir)
bundlePluginDir = bundleBinDir + 'plugins'
pluginDir = targetDir + '/plugins'
print 'Copying plugins dirtree: ' + pluginDir
if os.path.isdir(pluginDir):
print 'Copying to: ' + bundlePluginDir
shutil.copytree(pluginDir, bundlePluginDir)
else:
print "pluginDir doesn't exist, skipping"
self.loadConfig()
if not self.macIdentity:
raise Exception('run config with --mac-identity')
if self.enableMakeGui:
bin = 'macdeployqt Synergy.app -verbose=2'
self.try_chdir(targetDir)
err = os.system(bin)
self.restore_chdir()
print bundleTargetDir
if err != 0:
raise Exception(bin + ' failed with error: ' + str(err))
qMajor, qMinor, qRev = self.getQmakeVersion()
if qMajor <= 4:
frameworkRootDir = '/Library/Frameworks'
else:
frameworkRootDir = '/Developer/Qt5.2.1/5.2.1/clang_64/lib'
frameworkRootDir = '/usr/local/Cellar/qt/4.8.6/Frameworks'
target = bundleTargetDir + '/Contents/Frameworks'
for root, dirs, files in os.walk(target):
for dir in dirs:
if dir.startswith('Qt'):
shutil.copy(frameworkRootDir + '/' + dir + '/Contents/Info.plist', target + '/' + dir + '/Resources/')
def symlink(self, source, target):
if not os.path.exists(target):
os.symlink(source, target)
def move(self, source, target):
if os.path.exists(source):
shutil.move(source, target)
def fixQtFrameworksLayout(self, target):
targetDir = self.getGenerator().getBinDir(target)
target = targetDir + '/Synergy.app/Contents/Frameworks'
major, minor = self.getMacVersion()
if major == 10:
if minor >= 9:
for root, dirs, files in os.walk(target):
for dir in dirs:
if dir.startswith('Qt'):
self.try_chdir(target + '/' + dir + '/Versions')
self.symlink('5', 'Current')
self.move('../Resources', '5')
self.restore_chdir()
self.try_chdir(target + '/' + dir)
dot = dir.find('.')
frameworkName = dir[:dot]
self.symlink('Versions/Current/' + frameworkName, frameworkName)
self.symlink('Versions/Current/Resources', 'Resources')
self.restore_chdir()
def signmac(self):
self.loadConfig()
if not self.macIdentity:
raise Exception('run config with --mac-identity')
self.try_chdir('bin/Release/')
err = os.system('codesign --deep -fs "' + self.macIdentity + '" Synergy.app')
self.restore_chdir()
if err != 0:
raise Exception('codesign failed with error: ' + str(err))
def signwin(self, pfx, pwdFile, dist):
generator = self.getGeneratorFromConfig().cmakeName
if not generator.startswith('Visual Studio'):
raise Exception('only windows is supported')
f = open(pwdFile)
lines = f.readlines()
f.close()
pwd = lines[0]
if dist:
self.signFile(pfx, pwd, 'bin/Release', self.getDistFilename('win'))
else:
self.signFile(pfx, pwd, 'bin/Release', 'synergy.exe')
self.signFile(pfx, pwd, 'bin/Release', 'synergyc.exe')
self.signFile(pfx, pwd, 'bin/Release', 'synergys.exe')
self.signFile(pfx, pwd, 'bin/Release', 'synergyd.exe')
self.signFile(pfx, pwd, 'bin/Release', 'syntool.exe')
self.signFile(pfx, pwd, 'bin/Release', 'synwinhk.dll')
def signFile(self, pfx, pwd, dir, file):
self.try_chdir(dir)
err = os.system('signtool sign /f ' + pfx + ' /p ' + pwd + ' /t http://timestamp.verisign.com/scripts/timstamp.dll ' + file)
self.restore_chdir()
if err != 0:
raise Exception('signtool failed with error: ' + str(err))
def runBuildCommand(self, cmd, target):
print 'Running: %s %s' % (cmd, target)
self.try_chdir(self.getBuildDir(target))
err = os.system(cmd)
self.restore_chdir()
if err != 0:
raise Exception(cmd + ' failed: ' + str(err))
def clean(self, targets = []):
if len(targets) == 0:
targets += [self.defaultTarget]
if self.enableMakeCore:
self.cleanCore(targets)
if self.enableMakeGui:
self.cleanGui(targets)
def cleanCore(self, targets):
generator = self.getGeneratorFromConfig().cmakeName
if generator.startswith('Visual Studio'):
if generator.startswith('Visual Studio 10'):
for target in targets:
self.run_vcbuild(generator, target, self.sln_filepath(), '/target:clean')
elif generator.startswith('Visual Studio'):
for target in targets:
self.run_vcbuild(generator, target, self.sln_filepath(), '/clean')
else:
cmd = ''
if generator == 'Unix Makefiles':
print 'Cleaning with GNU Make...'
cmd = self.make_cmd
elif generator == 'Xcode':
print 'Cleaning with Xcode...'
cmd = self.xcodebuild_cmd
else:
raise Exception('Not supported with generator: ' + generator)
for target in targets:
self.try_chdir(self.getBuildDir(target))
err = os.system(cmd + ' clean')
self.restore_chdir()
if err != 0:
raise Exception('Clean failed: ' + str(err))
def cleanGui(self, targets):
self.makeGui(targets, ' clean')
def open(self):
generator = self.getGeneratorFromConfig().cmakeName
if generator.startswith('Visual Studio'):
print 'Opening with %s...' % generator
self.open_internal(self.sln_filepath())
elif generator.startswith('Xcode'):
print 'Opening with %s...' % generator
self.open_internal(self.xcodeproj_filepath(), 'open')
else:
raise Exception('Not supported with generator: ' + generator)
def update(self):
print 'Running Subversion update...'
err = os.system('svn update')
if err != 0:
raise Exception('Could not update from repository with error code code: ' + str(err))
def revision(self):
print self.find_revision()
def find_revision(self):
return self.getGitRevision()
def getGitRevision(self):
if sys.version_info < (2, 4):
raise Exception('Python 2.4 or greater required.')
p = subprocess.Popen(['git',
'log',
'--pretty=format:%h',
'-n',
'1'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
raise Exception('Could not get revision, git error: ' + str(p.returncode))
return stdout.strip()
def getGitBranchName(self):
if sys.version_info < (2, 4):
raise Exception('Python 2.4 or greater required.')
p = subprocess.Popen(['git',
'rev-parse',
'--abbrev-ref',
'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
raise Exception('Could not get branch name, git error: ' + str(p.returncode))
result = stdout.strip()
result = re.sub('heads/', '', result)
return result
def find_revision_svn(self):
if sys.version_info < (2, 4):
stdout = commands.getoutput('svn info')
else:
p = subprocess.Popen(['svn', 'info'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
raise Exception('Could not get revision - svn info failed with code: ' + str(p.returncode))
m = re.search('.*Revision: (\\d+).*', stdout)
if not m:
raise Exception('Could not find revision number in svn info output.')
return m.group(1)
def kill(self):
if sys.platform == 'win32':
return os.system('taskkill /F /FI "IMAGENAME eq synergy*"')
raise Exception('Not implemented for platform: ' + sys.platform)
def doxygen(self):
self.enableMakeGui = False
self.configure(self.defaultTarget, '-DCONF_DOXYGEN:BOOL=TRUE')
err = os.system('doxygen %s/%s' % (self.doc_dir, self.doxygen_filename))
if err != 0:
raise Exception('doxygen failed with error code: ' + str(err))
def dist(self, type, vcRedistDir, qtDir):
package_unsupported = False
unixTarget = self.defaultTarget
if type == '' or type == None:
self.dist_usage()
return
moveExt = ''
if type == 'src':
self.distSrc()
elif type == 'rpm':
if sys.platform == 'linux2':
self.distRpm()
else:
package_unsupported = True
elif type == 'deb':
if sys.platform == 'linux2':
self.distDeb()
else:
package_unsupported = True
elif type == 'win':
if sys.platform == 'win32':
self.distWix()
else:
package_unsupported = True
elif type == 'mac':
if sys.platform == 'darwin':
self.distMac()
else:
package_unsupported = True
else:
raise Exception('Package type not supported: ' + type)
if moveExt != '':
self.unixMove(self.getGenerator().buildDir + '/release/*.' + moveExt, self.getGenerator().binDir)
if package_unsupported:
raise Exception("Package type, '%s' is not supported for platform, '%s'" % (type, sys.platform))
def distRpm(self):
rpmDir = self.getGenerator().buildDir + '/rpm'
if os.path.exists(rpmDir):
shutil.rmtree(rpmDir)
os.makedirs(rpmDir)
templateFile = open(self.cmake_dir + '/synergy.spec.in')
template = templateFile.read()
template = template.replace('${in:version}', self.getVersionNumber())
specPath = rpmDir + '/synergy.spec'
specFile = open(specPath, 'w')
specFile.write(template)
specFile.close()
target = '../../bin/synergy-%s-%s.rpm' % (self.getVersionForFilename(), self.getLinuxPlatform())
try:
self.try_chdir(rpmDir)
cmd = 'rpmbuild -bb --define "_topdir `pwd`" synergy.spec'
print 'Command: ' + cmd
err = os.system(cmd)
if err != 0:
raise Exception('rpmbuild failed: ' + str(err))
self.unixMove('RPMS/*/*.rpm', target)
cmd = 'rpmlint ' + target
print 'Command: ' + cmd
err = os.system(cmd)
if err != 0:
raise Exception('rpmlint failed: ' + str(err))
finally:
self.restore_chdir()
def distDeb(self):
buildDir = self.getGenerator().buildDir
binDir = self.getGenerator().binDir
resDir = self.cmake_dir
package = '%s-%s-%s' % (self.project, self.getVersionForFilename(), self.getLinuxPlatform())
debDir = '%s/deb' % buildDir
if os.path.exists(debDir):
shutil.rmtree(debDir)
metaDir = '%s/%s/DEBIAN' % (debDir, package)
os.makedirs(metaDir)
templateFile = open(resDir + '/deb/control.in')
template = templateFile.read()
template = template.replace('${in:version}', self.getVersionNumber())
template = template.replace('${in:arch}', self.getDebianArch())
controlPath = '%s/control' % metaDir
controlFile = open(controlPath, 'w')
controlFile.write(template)
controlFile.close()
targetBin = '%s/%s/usr/bin' % (debDir, package)
targetPlugin = '%s/%s/usr/lib/synergy/plugins' % (debDir, package)
targetShare = '%s/%s/usr/share' % (debDir, package)
targetApplications = '%s/applications' % targetShare
targetIcons = '%s/icons' % targetShare
targetDocs = '%s/doc/%s' % (targetShare, self.project)
os.makedirs(targetBin)
os.makedirs(targetPlugin)
os.makedirs(targetApplications)
os.makedirs(targetIcons)
os.makedirs(targetDocs)
for root, dirs, files in os.walk(debDir):
for d in dirs:
os.chmod(os.path.join(root, d), 493)
binFiles = ['synergy',
'synergyc',
'synergys',
'synergyd',
'syntool']
for f in binFiles:
shutil.copy('%s/%s' % (binDir, f), targetBin)
target = '%s/%s' % (targetBin, f)
os.chmod(target, 493)
err = os.system('strip ' + target)
if err != 0:
raise Exception('strip failed: ' + str(err))
pluginDir = '%s/plugins' % binDir
pluginFiles = ['libns.so']
for f in pluginFiles:
shutil.copy('%s/%s' % (pluginDir, f), targetPlugin)
target = '%s/%s' % (targetPlugin, f)
os.chmod(target, 420)
err = os.system('strip ' + target)
if err != 0:
raise Exception('strip failed: ' + str(err))
shutil.copy('%s/synergy.desktop' % resDir, targetApplications)
shutil.copy('%s/synergy.ico' % resDir, targetIcons)
docTarget = '%s/doc/%s' % (targetShare, self.project)
copyrightPath = '%s/deb/copyright' % resDir
shutil.copy(copyrightPath, docTarget)
shutil.copy('%s/deb/changelog' % resDir, docTarget)
os.system('gzip -9 %s/changelog' % docTarget)
if err != 0:
raise Exception('gzip failed: ' + str(err))
for root, dirs, files in os.walk(targetShare):
for f in files:
os.chmod(os.path.join(root, f), 420)
target = '../../bin/%s.deb' % package
try:
self.try_chdir(debDir)
cmd = 'fakeroot dpkg-deb --build %s' % package
print 'Command: ' + cmd
err = os.system(cmd)
if err != 0:
raise Exception('dpkg-deb failed: ' + str(err))
cmd = 'lintian %s.deb' % package
print 'Command: ' + cmd
err = os.system(cmd)
if err != 0:
raise Exception('lintian failed: ' + str(err))
self.unixMove('*.deb', target)
finally:
self.restore_chdir()
def distSrc(self):
name = '%s-%s-%s' % (self.project, self.getVersionForFilename(), 'Source')
exportPath = self.getGenerator().buildDir + '/' + name
if os.path.exists(exportPath):
print 'Removing existing export...'
shutil.rmtree(exportPath)
os.mkdir(exportPath)
cmd = 'git archive %s | tar -x -C %s' % (self.getGitBranchName(), exportPath)
print 'Exporting repository to: ' + exportPath
err = os.system(cmd)
if err != 0:
raise Exception('Repository export failed: ' + str(err))
packagePath = '../' + self.getGenerator().binDir + '/' + name + '.tar.gz'
try:
self.try_chdir(self.getGenerator().buildDir)
print 'Packaging to: ' + packagePath
err = os.system('tar cfvz ' + packagePath + ' ' + name)
if err != 0:
raise Exception('Package failed: ' + str(err))
finally:
self.restore_chdir()
def unixMove(self, source, dest):
print 'Moving ' + source + ' to ' + dest
err = os.system('mv ' + source + ' ' + dest)
if err != 0:
raise Exception('Package failed: ' + str(err))
def distMac(self):
self.loadConfig()
binDir = self.getGenerator().getBinDir('Release')
name = 'Synergy'
dist = binDir + '/' + name
if os.path.exists(dist):
shutil.rmtree(dist)
os.makedirs(dist)
shutil.move(binDir + '/' + name + '.app', dist + '/' + name + '.app')
self.try_chdir(dist)
err = os.system('ln -s /Applications')
self.restore_chdir()
fileName = '%s-%s-%s.dmg' % (self.project, self.getVersionForFilename(), self.getMacPackageName())
cmd = 'hdiutil create ' + fileName + ' -srcfolder ./' + name + '/ -ov'
self.try_chdir(binDir)
err = os.system(cmd)
self.restore_chdir()
def distWix(self):
generator = self.getGeneratorFromConfig().cmakeName
arch = 'x86'
if generator.endswith('Win64'):
arch = 'x64'
version = self.getVersionNumber()
args = '/p:DefineConstants="Version=%s"' % version
self.run_vcbuild(generator, 'release', 'synergy.sln', args, 'src/setup/win32/', 'x86')
filename = '%s-%s-Windows-%s.msi' % (self.project, self.getVersionForFilename(), arch)
old = 'bin/Release/synergy.msi'
new = 'bin/Release/%s' % filename
try:
os.remove(new)
except OSError:
pass
os.rename(old, new)
def distNsis(self, vcRedistDir, qtDir):
if vcRedistDir == '':
raise Exception('VC++ redist dir path not specified (--vcredist-dir).')
if qtDir == '':
raise Exception('QT SDK dir path not specified (--qt-dir).')
generator = self.getGeneratorFromConfig().cmakeName
arch = 'x86'
installDirVar = '$PROGRAMFILES32'
if generator.endswith('Win64'):
arch = 'x64'
installDirVar = '$PROGRAMFILES64'
templateFile = open(self.cmake_dir + '\\Installer.nsi.in')
template = templateFile.read()
template = template.replace('${in:version}', self.getVersionNumber())
template = template.replace('${in:arch}', arch)
template = template.replace('${in:vcRedistDir}', vcRedistDir)
template = template.replace('${in:qtDir}', qtDir)
template = template.replace('${in:installDirVar}', installDirVar)
nsiPath = self.getGenerator().buildDir + '\\Installer.nsi'
nsiFile = open(nsiPath, 'w')
nsiFile.write(template)
nsiFile.close()
command = 'makensis ' + nsiPath
print 'NSIS command: ' + command
err = os.system(command)
if err != 0:
raise Exception('Package failed: ' + str(err))
def getVersionNumber(self):
cmakeFile = open('CMakeLists.txt')
cmake = cmakeFile.read()
majorRe = re.search('VERSION_MAJOR (\\d+)', cmake)
major = majorRe.group(1)
minorRe = re.search('VERSION_MINOR (\\d+)', cmake)
minor = minorRe.group(1)
revRe = re.search('VERSION_REV (\\d+)', cmake)
rev = revRe.group(1)
return '%s.%s.%s' % (major, minor, rev)
def getVersionStage(self):
cmakeFile = open('CMakeLists.txt')
cmake = cmakeFile.read()
stageRe = re.search('VERSION_STAGE (\\w+)', cmake)
return stageRe.group(1)
def getVersionForFilename(self):
versionStage = self.getVersionStage()
gitBranch = self.getGitBranchName()
gitRevision = self.getGitRevision()
return '%s-%s-%s' % (gitBranch, versionStage, gitRevision)
def distftp(self, type, ftp):
if not type:
raise Exception('Platform type not specified.')
self.loadConfig()
binDir = self.getGenerator().getBinDir('Release')
filename = self.getDistFilename(type)
packageSource = binDir + '/' + filename
packageTarget = filename
ftp.upload(packageSource, packageTarget)
if type != 'src':
pluginsDir = binDir + '/plugins'
nsPluginSource = self.findLibraryFile(type, pluginsDir, 'ns')
if nsPluginSource:
nsPluginTarget = self.getLibraryDistFilename(type, pluginsDir, 'ns')
ftp.upload(nsPluginSource, nsPluginTarget, 'plugins')
def getLibraryDistFilename(self, type, dir, name):
platform, packageExt, libraryExt = self.getDistributePlatformInfo(type)
firstPart = '%s-%s-%s' % (name, self.getVersionForFilename(), platform)
filename = '%s.%s' % (firstPart, libraryExt)
if type == 'rpm' or type == 'deb':
filename = '%s-%s.%s' % (firstPart, packageExt, libraryExt)
return filename
def findLibraryFile(self, type, dir, name):
if not os.path.exists(dir):
return None
platform, packageExt, libraryExt = self.getDistributePlatformInfo(type)
ext = libraryExt
pattern = name + '\\.' + ext
for filename in os.listdir(dir):
if re.search(pattern, filename):
return dir + '/' + filename
def getDistributePlatformInfo(self, type):
ext = None
libraryExt = None
platform = None
if type == 'src':
ext = 'tar.gz'
platform = 'Source'
elif type == 'rpm' or type == 'deb':
ext = type
libraryExt = 'so'
platform = self.getLinuxPlatform()
elif type == 'win':
ext = 'msi'
libraryExt = 'dll'
generator = self.getGeneratorFromConfig().cmakeName
if generator.find('Win64') != -1:
platform = 'Windows-x64'
else:
platform = 'Windows-x86'
elif type == 'mac':
ext = 'dmg'
libraryExt = 'dylib'
platform = self.getMacPackageName()
if not platform:
raise Exception('Unable to detect distributable platform.')
return (platform, ext, libraryExt)
def getDistFilename(self, type):
pattern = self.getVersionForFilename()
for filename in os.listdir(self.getBinDir('Release')):
if re.search(pattern, filename):
return filename
raise Exception('Could not find package name with pattern: ' + pattern)
def getDebianArch(self):
if os.uname()[4][:3] == 'arm':
return 'armhf'
import platform
os_bits, other = platform.architecture()
if os_bits == '32bit':
return 'i386'
if os_bits == '64bit':
return 'amd64'
raise Exception('unknown os bits: ' + os_bits)
def getLinuxPlatform(self):
if os.uname()[4][:3] == 'arm':
return 'Linux-armv6l'
import platform
os_bits, other = platform.architecture()
if os_bits == '32bit':
return 'Linux-i686'
if os_bits == '64bit':
return 'Linux-x86_64'
raise Exception('unknown os bits: ' + os_bits)
def dist_usage(self):
print 'Usage: %s package [package-type]\n\nReplace [package-type] with one of:\n src .tar.gz source (Posix only)\n rpm .rpm package (Red Hat)\n deb .deb paclage (Debian)\n win .exe installer (Windows)\n mac .dmg package (Mac OS X)\n\nExample: %s package src-tgz' % (self.this_cmd, self.this_cmd)
def about(self):
print 'Help Me script, from the Synergy project.\n%s\n\nFor help, run: %s help' % (self.website_url, self.this_cmd)
def try_chdir(self, dir):
global prevdir
if dir == '':
prevdir = ''
return
if not os.path.exists(dir):
print 'Creating dir: ' + dir
os.makedirs(dir)
prevdir = os.path.abspath(os.curdir)
print 'Entering dir: ' + dir
os.chdir(dir)
def restore_chdir(self):
if prevdir == '':
return
print 'Going back to: ' + prevdir
os.chdir(prevdir)
def open_internal(self, project_filename, application = ''):
if not os.path.exists(project_filename):
raise Exception('Project file (%s) not found, run hm conf first.' % project_filename)
else:
path = project_filename
if application != '':
path = application + ' ' + path
err = os.system(path)
if err != 0:
raise Exception('Could not open project with error code code: ' + str(err))
def setup(self, target = ''):
print 'Running setup...'
oldGenerator = self.findGeneratorFromConfig()
if not oldGenerator == None:
for target in ['debug', 'release']:
buildDir = oldGenerator.getBuildDir(target)
cmakeCacheFilename = 'CMakeCache.txt'
if buildDir != '':
cmakeCacheFilename = buildDir + '/' + cmakeCacheFilename
if os.path.exists(cmakeCacheFilename):
print 'Removing %s, since generator changed.' % cmakeCacheFilename
os.remove(cmakeCacheFilename)
generator = self.get_generator_from_prompt()
config = self.getConfig()
config.set('hm', 'setup_version', self.setup_version)
config.set('cmake', 'generator', generator)
self.write_config(config)
self.setConfRun('all', False)
self.setConfRun('debug', False)
self.setConfRun('release', False)
print 'Setup complete.'
def getConfig(self):
if os.path.exists(self.configFilename):
config = ConfigParser.ConfigParser()
config.read(self.configFilename)
else:
config = ConfigParser.ConfigParser()
if not config.has_section('hm'):
config.add_section('hm')
if not config.has_section('cmake'):
config.add_section('cmake')
return config
def write_config(self, config, target = ''):
if not os.path.isdir(self.configDir):
os.mkdir(self.configDir)
configfile = open(self.configFilename, 'wb')
config.write(configfile)
def getGeneratorFromConfig(self):
generator = self.findGeneratorFromConfig()
if generator:
return generator
raise Exception('Could not find generator: ' + name)
def findGeneratorFromConfig(self):
config = ConfigParser.RawConfigParser()
config.read(self.configFilename)
if not config.has_section('cmake'):
return None
name = config.get('cmake', 'generator')
generators = self.get_generators()
keys = generators.keys()
keys.sort()
for k in keys:
if generators[k].cmakeName == name:
return generators[k]
def min_setup_version(self, version):
if os.path.exists(self.configFilename):
config = ConfigParser.RawConfigParser()
config.read(self.configFilename)
try:
return config.getint('hm', 'setup_version') >= version
except:
return False
else:
return False
def hasConfRun(self, target):
if self.min_setup_version(2):
config = ConfigParser.RawConfigParser()
config.read(self.configFilename)
try:
return config.getboolean('hm', 'conf_done_' + target)
except:
return False
else:
return False
def setConfRun(self, target, hasRun = True):
if self.min_setup_version(3):
config = ConfigParser.RawConfigParser()
config.read(self.configFilename)
config.set('hm', 'conf_done_' + target, hasRun)
self.write_config(config)
else:
raise Exception('User does not have correct setup version.')
def get_generators(self):
if sys.platform == 'win32':
return self.win32_generators
if sys.platform in ('linux2', 'sunos5', 'freebsd7', 'aix5'):
return self.unix_generators
if sys.platform == 'darwin':
return self.darwin_generators
raise Exception('Unsupported platform: ' + sys.platform)
def get_generator_from_prompt(self):
return self.getGenerator().cmakeName
def getGenerator(self):
generators = self.get_generators()
if len(generators.keys()) == 1:
return generators[generators.keys()[0]]
if self.generator_id:
return generators[int(self.generator_id)]
conf = self.findGeneratorFromConfig()
if conf:
return conf
raise Exception('Generator not specified, use -g arg ' + '(use `hm genlist` for a list of generators).')
def setup_generator_prompt(self, generators):
if self.no_prompts:
raise Exception('User prompting is disabled.')
prompt = 'Enter a number:'
print prompt,
generator_id = raw_input()
if generator_id in generators:
print 'Selected generator:', generators[generator_id]
else:
print 'Invalid number, try again.'
self.setup_generator_prompt(generators)
return generators[generator_id]
def get_vcvarsall(self, generator):
import platform, _winreg
os_bits, other = platform.architecture()
if os_bits == '64bit':
key_name = 'SOFTWARE\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VS7'
else:
key_name = 'SOFTWARE\\Microsoft\\VisualStudio\\SxS\\VC7'
try:
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key_name)
except:
raise Exception('Unable to open Visual Studio registry key. Application may not be installed.')
if generator.startswith('Visual Studio 8'):
value, type = _winreg.QueryValueEx(key, '8.0')
elif generator.startswith('Visual Studio 9'):
value, type = _winreg.QueryValueEx(key, '9.0')
elif generator.startswith('Visual Studio 10'):
value, type = _winreg.QueryValueEx(key, '10.0')
else:
raise Exception('Cannot determine vcvarsall.bat location for: ' + generator)
if os_bits == '64bit':
path = value + 'vc\\vcvarsall.bat'
else:
path = value + 'vcvarsall.bat'
if not os.path.exists(path):
raise Exception("'%s' not found." % path)
return path
def run_vcbuild(self, generator, mode, solution, args = '', dir = '', config32 = 'Win32'):
import platform
os_bits, other = platform.architecture()
if generator.find('Win64') != -1:
if os_bits == '32bit':
vcvars_platform = 'x86_amd64'
else:
vcvars_platform = 'amd64'
config_platform = 'x64'
else:
vcvars_platform = 'x86'
config_platform = config32
if mode == 'release':
config = 'Release'
else:
config = 'Debug'
if generator.startswith('Visual Studio 10'):
cmd = '@echo off\ncall "%s" %s \ncd "%s"\nmsbuild /nologo %s /p:Configuration="%s" /p:Platform="%s" "%s"' % (self.get_vcvarsall(generator),
vcvars_platform,
dir,
args,
config,
config_platform,
solution)
else:
config = config + '|' + config_platform
cmd = '@echo off\ncall "%s" %s \ncd "%s"\nvcbuild /nologo %s "%s" "%s"' % (self.get_vcvarsall(generator),
vcvars_platform,
dir,
args,
solution,
config)
temp_bat = self.getBuildDir() + '\\vcbuild.bat'
file = open(temp_bat, 'w')
file.write(cmd)
file.close()
err = os.system(temp_bat)
if err != 0:
raise Exception('Microsoft compiler failed with error code: ' + str(err))
def ensure_setup_latest(self):
if not self.min_setup_version(self.setup_version):
self.setup()
def reformat(self):
err = os.system('tool\\astyle\\AStyle.exe --quiet --suffix=none --style=java --indent=force-tab=4 --recursive lib/*.cpp lib/*.h cmd/*.cpp cmd/*.h')
if err != 0:
raise Exception('Reformat failed with error code: ' + str(err))
def printGeneratorList(self):
generators = self.get_generators()
keys = generators.keys()
keys.sort()
for k in keys:
print str(k) + ': ' + generators[k].cmakeName
def getMacVersion(self):
if not self.macSdk:
raise Exception('Mac OS X SDK not set.')
result = re.search('(\\d+)\\.(\\d+)', self.macSdk)
if not result:
print versions
raise Exception('Could not find Mac OS X version.')
major = int(result.group(1))
minor = int(result.group(2))
return (major, minor)
def getMacPackageName(self):
major, minor = self.getMacVersion()
if major == 10:
if minor <= 4:
arch = 'Universal'
elif minor <= 6:
arch = 'i386'
else:
arch = 'x86_64'
else:
raise Exception('Mac OS major version unknown: ' + str(major))
version = str(major) + str(minor)
return 'MacOSX%s-%s' % (version, arch)
def reset(self):
if os.path.exists('build'):
shutil.rmtree('build')
if os.path.exists('bin'):
shutil.rmtree('bin')
if os.path.exists('lib'):
shutil.rmtree('lib')
if os.path.exists('src/gui/tmp'):
shutil.rmtree('src/gui/tmp')
for filename in glob.glob('src/gui/ui_*'):
os.remove(filename)
class CommandHandler():
ic = InternalCommands()
build_targets = []
vcRedistDir = ''
qtDir = ''
def __init__(self, argv, opts, args, verbose):
self.ic.verbose = verbose
self.opts = opts
self.args = args
for o, a in self.opts:
if o == '--no-prompts':
self.ic.no_prompts = True
elif o in ('-g', '--generator'):
self.ic.generator_id = a
elif o == '--skip-gui':
self.ic.enableMakeGui = False
elif o == '--skip-core':
self.ic.enableMakeCore = False
elif o in ('-d', '--debug'):
self.build_targets += ['debug']
elif o in ('-r', '--release'):
self.build_targets += ['release']
elif o == '--vcredist-dir':
self.vcRedistDir = a
elif o == '--qt-dir':
self.qtDir = a
elif o == '--mac-sdk':
self.ic.macSdk = a
elif o == '--mac-identity':
self.ic.macIdentity = a
def about(self):
self.ic.about()
def setup(self):
self.ic.setup()
def configure(self):
self.ic.configureAll(self.build_targets)
def build(self):
self.ic.build(self.build_targets)
def clean(self):
self.ic.clean(self.build_targets)
def update(self):
self.ic.update()
def install(self):
print 'Not yet implemented: install'
def doxygen(self):
self.ic.doxygen()
def dist(self):
type = None
if len(self.args) > 0:
type = self.args[0]
self.ic.dist(type, self.vcRedistDir, self.qtDir)
def distftp(self):
type = None
host = None
user = None
password = None
dir = None
if len(self.args) > 0:
type = self.args[0]
for o, a in self.opts:
if o == '--host':
host = a
elif o == '--user':
user = a
elif o == '--pass':
password = a
elif o == '--dir':
dir = a
if not host:
raise Exception('FTP host was not specified.')
ftp = ftputil.FtpUploader(host, user, password, dir)
self.ic.distftp(type, ftp)
def destroy(self):
self.ic.destroy()
def kill(self):
self.ic.kill()
def usage(self):
self.ic.usage()
def revision(self):
self.ic.revision()
def reformat(self):
self.ic.reformat()
def open(self):
self.ic.open()
def genlist(self):
self.ic.printGeneratorList()
def reset(self):
self.ic.reset()
def signwin(self):
pfx = None
pwd = None
dist = False
for o, a in self.opts:
if o == '--pfx':
pfx = a
elif o == '--pwd':
pwd = a
elif o == '--dist':
dist = True
self.ic.signwin(pfx, pwd, dist)
def signmac(self):
self.ic.signmac()
| gpl-2.0 | -9,186,781,263,585,066,000 | 38.442634 | 939 | 0.547858 | false | 3.94239 | true | false | false |
davisd/django-blogyall | blog/managers.py | 1 | 4842 | from datetime import datetime
from django.db import models
from tagging.models import Tag, TaggedItem
class PostImageManager(models.Manager):
"""
Post Image Manager
"""
# use for related fields
use_for_related_fields = True
def get_gallery_images(self):
"""
Get gallery images
Gallery images are PostImages that have a non-null gallery position
"""
return self.get_query_set().filter(gallery_position__isnull=False)
class PostManager(models.Manager):
"""
Post Manager
"""
# use for related fields
use_for_related_fields = True
def build_query(self, require_published=True, year=None, month=None,
category_slug=None, series_slug=None, tag=None, require_featured=False):
# Initial posts by require published indicator
if require_published:
posts = self.get_query_set().filter(is_published=True,
publish_date__lt=datetime.now)
else:
posts = self.get_query_set()
# featured
if require_featured == True:
posts = posts.filter(is_featured=True)
# date
if year:
posts = posts.filter(publish_date__year=year)
if month:
posts = posts.filter(publish_date__month=month)
#category and series
if category_slug:
posts = posts.filter(categories__slug=category_slug)
if series_slug:
posts = posts.filter(series__slug=series_slug)
# tag
if tag:
# return posts filtered by the tag
return TaggedItem.objects.get_by_model(posts, [tag,])
else:
return posts
def get_published_posts(self):
"""
Get published posts
"""
return self.build_query(require_published=True)
def get_featured_posts(self):
"""
Get featured posts
"""
return self.build_query(require_published=True, require_featured=True)
def get_post_archive(self, require_published=True, year=None, month=None,
category_slug=None, tag=None):
"""
Return a Post Archive
A blog post archive is a tuple of (year, months[]),
each month containing a tuple of (month, days[]),
each day containing a tuple of (day, posts[])
"""
# This was originally done as a dictionary
# but python dictionaries can't guarantee sort order.
posts = self.build_query(require_published=require_published, year=year,
month=month, category_slug=category_slug, tag=tag)
post_archive = {}
for post in posts.order_by('-publish_date'):
if not post_archive.has_key(post.publish_date.year):
post_archive[post.publish_date.year] = {}
if not post_archive[post.publish_date.year].has_key(post.publish_date.month):
post_archive[post.publish_date.year][post.publish_date.month] = {}
if not post_archive[post.publish_date.year][post.publish_date.month].has_key(post.publish_date.day):
post_archive[post.publish_date.year][post.publish_date.month][post.publish_date.day] = []
post_archive[post.publish_date.year][post.publish_date.month][post.publish_date.day].append(post)
# Now that all of that lifting is done, convert the dictionaries into tuples with lists
sorted_years = [(k,[]) for k in sorted(post_archive.keys(),
reverse=True)]
for sorted_year in sorted_years:
sorted_months = [(k,[]) for k in sorted(post_archive[sorted_year[0]],
reverse=True)]
sorted_year[1].extend(sorted_months)
for sorted_month in sorted_months:
sorted_days = [(k,[]) for k in sorted(
post_archive[sorted_year[0]][sorted_month[0]], reverse=True)]
sorted_month[1].extend(sorted_days)
for sorted_day in sorted_days:
sorted_day[1].extend(
post_archive[sorted_year[0]][sorted_month[0]][sorted_day[0]])
return sorted_years
@classmethod
def get_tags_in_use(cls):
"""
Return the tags in use
"""
return Tag.objects.filter(
id__in=TaggedItem.objects.filter(
content_type=ContentType.objects.get(
app_label='blogyall',
model=cls
)
).values('tag_id')
)
class PublishedPostManager(PostManager):
"""
Published Post Manager
"""
def get_query_set(self):
return super(PublishedPostManager, self).get_query_set().filter(is_published=True)
| bsd-3-clause | 2,348,055,286,321,983,500 | 35.413534 | 112 | 0.577654 | false | 4.177739 | false | false | false |
kcpawan/django | tests/many_to_one/models.py | 215 | 2785 | """
Many-to-one relationships
To define a many-to-one relationship, use ``ForeignKey()``.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter, models.CASCADE)
def __str__(self):
return self.headline
class Meta:
ordering = ('headline',)
# If ticket #1578 ever slips back in, these models will not be able to be
# created (the field names being lower-cased versions of their opposite
# classes is important here).
class First(models.Model):
second = models.IntegerField()
class Second(models.Model):
first = models.ForeignKey(First, models.CASCADE, related_name='the_first')
# Protect against repetition of #1839, #2415 and #2536.
class Third(models.Model):
name = models.CharField(max_length=20)
third = models.ForeignKey('self', models.SET_NULL, null=True, related_name='child_set')
class Parent(models.Model):
name = models.CharField(max_length=20, unique=True)
bestchild = models.ForeignKey('Child', models.SET_NULL, null=True, related_name='favored_by')
class Child(models.Model):
name = models.CharField(max_length=20)
parent = models.ForeignKey(Parent, models.CASCADE)
class ToFieldChild(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE, to_field='name')
# Multiple paths to the same model (#7110, #7125)
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class Record(models.Model):
category = models.ForeignKey(Category, models.CASCADE)
@python_2_unicode_compatible
class Relation(models.Model):
left = models.ForeignKey(Record, models.CASCADE, related_name='left_set')
right = models.ForeignKey(Record, models.CASCADE, related_name='right_set')
def __str__(self):
return "%s - %s" % (self.left.category.name, self.right.category.name)
# Test related objects visibility.
class SchoolManager(models.Manager):
def get_queryset(self):
return super(SchoolManager, self).get_queryset().filter(is_public=True)
class School(models.Model):
is_public = models.BooleanField(default=False)
objects = SchoolManager()
class Student(models.Model):
school = models.ForeignKey(School, models.CASCADE)
| bsd-3-clause | -1,773,659,522,397,237,800 | 26.85 | 97 | 0.712029 | false | 3.607513 | false | false | false |
desihub/desisim | py/desisim/pixelsplines.py | 1 | 15653 | """
desisim.pixelsplines
====================
Pixel-integrated spline utilities.
Written by A. Bolton, U. of Utah, 2010-2013.
"""
from __future__ import absolute_import, division, print_function
import numpy as n
from scipy import linalg as la
from scipy import sparse as sp
from scipy import special as sf
def compute_duck_slopes(pixbound, flux):
"""
Compute the slope of the illuminating quadratic spline at
the locations of the 'ducks', i.e., the pixel boundaries,
given the integrated flux per unit baseline within the pixels.
ARGUMENTS:
pixbound: (npix + 1) ndarray of pixel boundaries, in units of
wavelength or log-wavelength or frequency or whatever you like.
flux: (npix) ndarray of spectral flux (energy or counts) per
abscissa unit, averaged over the extent of the pixel
RETURNS:
an (npix+1) ndarray of the slope of the underlying/illuminating
flux per unit abscissa spectrum at the position of the pixel
boundaries, a.k.a. 'ducks'. The end conditions are taken to
be zero slope, so the exterior points of the output are zeros.
"""
npix = len(flux)
# Test for correct argument dimensions:
if (len(pixbound) - npix) != 1:
print('Need one more element in pixbound than in flux!')
return 0
# The array of "delta-x" values:
dxpix = pixbound[1:] - pixbound[:-1]
# Test for monotonif increase:
if dxpix.min() <= 0.:
print('Pixel boundaries not monotonically increasing!')
return 0
# Encode the tridiagonal matrix that needs to be solved:
maindiag = (dxpix[:-1] + dxpix[1:]) / 3.
offdiag = dxpix[1:-1] / 6.
upperdiag = n.append(0., offdiag)
lowerdiag = n.append(offdiag, 0.)
band_matrix = n.vstack((upperdiag, maindiag, lowerdiag))
# The right-hand side:
rhs = flux[1:] - flux[:-1]
# Solve the banded matrix and return:
acoeff = la.solve_banded((1,1), band_matrix, rhs)
acoeff = n.append(n.append(0., acoeff), 0.)
return acoeff
def cen2bound(pixelcen):
"""
Convenience function to do the obvious thing to transform
pixel centers to pixel boundaries.
"""
pixbound = 0.5 * (pixelcen[1:] + pixelcen[:-1])
lo_val = 2. * pixbound[0] - pixbound[1]
hi_val = 2. * pixbound[-1] - pixbound[-2]
pixbound = n.append(n.append(lo_val, pixbound), hi_val)
return pixbound
def gauss_blur_matrix(pixbound, sig_conv):
"""
Function to generate a Gaussian blurring matrix for a pixelized
spectrum, from specified pixel boundaries and 'sigma' vector.
The matrix will be flux-conserving if the spectrum to which it is
applied has units of 'counts per unit x', and pixbound and sig_conv
both have units of x.
pixbound should have one more element than sig_conv.
Output is a scipy sparse matrix that can implement the blurring as:
blurflux = gauss_blur_matrix * flux
where 'flux' has the same dimensions as 'sig_conv'.
"""
# Derived values and error checks:
npix = len(pixbound) - 1
if (len(sig_conv) != npix):
raise PixSplineError('Need one more element in pixbound than in \
sig_conv!')
if (sig_conv.min() <= 0.):
raise PixSplineError('sig_conv must be > 0 everywhere!')
xcen = 0.5 * (pixbound[1:] + pixbound[:-1])
dxpix = pixbound[1:] - pixbound[:-1]
if (dxpix.min() <= 0.):
raise PixSplineError('Pixel boundaries not monotonically increasing!')
# Which "new" pixels does each "old" pixel touch?
# Let's go +/- 6 sigma for all:
sig_width = 6.0
# A minor correction factor to preserve flux conservation:
cfact = 1./sf.erf(sig_width / n.sqrt(2.))
xblur_lo = xcen - sig_width * sig_conv
xblur_hi = xcen + sig_width * sig_conv
bin_lo = n.digitize(xblur_lo, pixbound) - 1
bin_hi = n.digitize(xblur_hi, pixbound) - 1
# Restrict the ranges:
#xblur_lo = n.where((xblur_lo > pixbound[0]), xblur_lo, pixbound[0])
#xblur_lo = n.where((xblur_lo < pixbound[-1]), xblur_lo, pixbound[-1])
#xblur_hi = n.where((xblur_hi > pixbound[0]), xblur_hi, pixbound[0])
#xblur_hi = n.where((xblur_hi < pixbound[-1]), xblur_hi, pixbound[-1])
bin_lo = n.where((bin_lo >= 0), bin_lo, 0)
#bin_lo = n.where((bin_lo < npix), bin_lo, npix-1)
#bin_hi = n.where((bin_hi >= 0), bin_hi, 0)
bin_hi = n.where((bin_hi < npix), bin_hi, npix-1)
# Compute total number of non-zero elements in the broadening matrix:
n_each = bin_hi - bin_lo + 1
n_entries = n_each.sum()
ij = n.zeros((2, n_entries), dtype=int)
v_vec = n.zeros(n_entries, dtype=float)
# Loop over pixels in the "old" spectrum:
pcount = 0
roottwo = n.sqrt(2.)
bin_vec = n.arange(npix, dtype=int)
for k in range(npix):
xbound = pixbound[bin_lo[k]:bin_hi[k]+2]
# Gaussian integral in terms of error function:
erf_terms = cfact * 0.5 * sf.erf((xbound - xcen[k]) / (roottwo *
sig_conv[k]))
erf_int = (erf_terms[1:] - erf_terms[:-1]) * \
dxpix[k] / dxpix[bin_lo[k]:bin_hi[k]+1]
ij[0,pcount:pcount+n_each[k]] = bin_vec[bin_lo[k]:bin_hi[k]+1]
ij[1,pcount:pcount+n_each[k]] = k
v_vec[pcount:pcount+n_each[k]] = erf_int
pcount += n_each[k]
conv_matrix = sp.coo_matrix((v_vec, ij), shape=(npix,npix))
return conv_matrix.tocsr()
class PixSplineError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class PixelSpline:
"""
Pixel Spline object class.
Initialize as follows:
PS = PixelSpline(pixbound, flux)
where
pixbound = array of pixel boundaries in baseline units
and
flux = array of specific flux values in baseline units.
Assumptions:
'pixbound' should have one more element than 'flux', and
units of 'flux' are -per-unit-baseline, for the baseline
units in which pixbound is expressed, averaged over the
extent of each pixel.
"""
def __init__(self, pixbound, flux):
npix = len(flux)
# Test for correct argument dimensions:
if (len(pixbound) - npix) != 1:
raise PixSplineError('Need one more element in pixbound \
than in flux!')
# The array of "delta-x" values:
dxpix = pixbound[1:] - pixbound[:-1]
# Test for monotonic increase:
if dxpix.min() <= 0.:
raise PixSplineError('Pixel boundaries not monotonically \
increasing!')
self.npix = npix
self.pixbound = pixbound.copy()
self.dxpix = dxpix.copy()
self.xcen = 0.5 * (pixbound[1:] + pixbound[:-1]).copy()
self.flux = flux.copy()
maindiag = (dxpix[:-1] + dxpix[1:]) / 3.
offdiag = dxpix[1:-1] / 6.
upperdiag = n.append(0., offdiag)
lowerdiag = n.append(offdiag, 0.)
band_matrix = n.vstack((upperdiag, maindiag, lowerdiag))
# The right-hand side:
rhs = flux[1:] - flux[:-1]
# Solve the banded matrix for the slopes at the ducks:
acoeff = la.solve_banded((1,1), band_matrix, rhs)
self.duckslopes = n.append(n.append(0., acoeff), 0.)
def point_evaluate(self, xnew, missing=0.):
"""
Evaluate underlying pixel spline at array of points
BUG: input currently needs to be at least 1D array.
"""
# Initialize output array:
outflux = 0. * self.flux[0] * xnew + missing
# Digitize into bins:
bin_idx = n.digitize(xnew, self.pixbound)
# Find the indices of those that are actually in-bounds:
wh_in = n.where((bin_idx > 0) * (bin_idx < len(self.pixbound)))
if len(wh_in[0]) == 0:
return outflux
xnew_in = xnew[wh_in]
idx_in = bin_idx[wh_in] - 1
# The pixel centers as per the algorithm in use:
adiff = self.duckslopes[idx_in+1] - self.duckslopes[idx_in]
asum = self.duckslopes[idx_in+1] + self.duckslopes[idx_in]
xdiff = xnew_in - self.xcen[idx_in]
fluxvals = adiff * xdiff**2 / (2. * self.dxpix[idx_in]) + asum * xdiff \
/ 2. + self.flux[idx_in] - adiff * self.dxpix[idx_in] / 24.
outflux[wh_in] = fluxvals
return outflux
def find_extrema(self, minima=False):
# Find the formal extrema positions:
x_ext = self.xcen - 0.5 * self.dxpix * \
(self.duckslopes[1:] + self.duckslopes[:-1]) / \
(self.duckslopes[1:] - self.duckslopes[:-1])
# Digitize these into bins:
bin_ext = n.digitize(x_ext, self.pixbound) - 1
# The second derivatives, flipped in sign if minima is set:
curvat = (-1)**(minima == True) * (self.duckslopes[1:] -
self.duckslopes[:-1]) / self.dxpix
# Find in-bin maxima:
wh_ext = n.where((bin_ext == n.arange(self.npix)) * (curvat < 0))
if len(wh_ext[0]) < 1:
return n.array([])
x_ext = x_ext[wh_ext]
return x_ext
def subpixel_average(self, ipix, xlo, xhi):
adiff = self.duckslopes[ipix+1] - self.duckslopes[ipix]
asum = self.duckslopes[ipix+1] + self.duckslopes[ipix]
xlo_c = xlo - self.xcen[ipix]
xhi_c = xhi - self.xcen[ipix]
outval = adiff * ((xhi-xlo)**2 / 6. + xhi_c * xlo_c / 2.) / \
self.dxpix[ipix] + asum * (xhi_c + xlo_c) / 4. - adiff * \
self.dxpix[ipix] / 24. + self.flux[ipix]
return outval
def resample(self, pb_new):
"""
Method to resample a pixelspline analytically onto a new
set of pixel boundaries.
"""
npix_new = len(pb_new) - 1
xnew_lo = pb_new[:-1].copy()
xnew_hi = pb_new[1:].copy()
# Test for monotonic:
new_fulldx = xnew_hi - xnew_lo
if new_fulldx.min() <= 0.:
raise PixSplineError('New pixel boundaries not monotonically \
increasing!')
# Digitize the new boundaries into the original bins:
bin_idx = n.digitize(pb_new, self.pixbound) - 1
bin_lo = bin_idx[:-1].copy()
bin_hi = bin_idx[1:].copy()
# Array for accumulating new counts:
new_counts = n.zeros(npix_new, dtype=self.flux.dtype)
# Array for accumulating new pixel widths by pieces.
# Only used for debugging so far, but may be useful in future.
#new_dxpix = n.zeros(npix_new, dtype=self.flux.dtype)
# For convenience, we define the following.
# Careful not to modify them... they are views, not copies!
xold_lo = self.pixbound[:-1]
xold_hi = self.pixbound[1:]
# 4 cases to cover:
# Case 1: both bin_hi and bin_lo in the same bin:
wh_this = n.where((bin_hi == bin_lo) * (bin_lo >= 0) * \
(bin_hi < self.npix))
if (len(wh_this[0]) > 0):
dx_this = xnew_hi[wh_this] - xnew_lo[wh_this]
avgval_this = self.subpixel_average(bin_lo[wh_this],
xnew_lo[wh_this],
xnew_hi[wh_this])
#new_dxpix[wh_this] += dx_this
new_counts[wh_this] += avgval_this * dx_this
# Case 2: more than one bin, lower segment:
wh_this = n.where((bin_hi > bin_lo) * (bin_lo >= 0))
if (len(wh_this[0]) > 0):
dx_this = xold_hi[bin_lo[wh_this]] - xnew_lo[wh_this]
avgval_this = self.subpixel_average(bin_lo[wh_this],
xnew_lo[wh_this],
xold_hi[bin_lo[wh_this]])
#new_dxpix[wh_this] += dx_this
new_counts[wh_this] += avgval_this * dx_this
# Case 3: more than one bin, upper segment:
wh_this = n.where((bin_hi > bin_lo) * (bin_hi < self.npix))
if (len(wh_this[0]) > 0):
dx_this = xnew_hi[wh_this] - xold_lo[bin_hi[wh_this]]
avgval_this = self.subpixel_average(bin_hi[wh_this],
xold_lo[bin_hi[wh_this]],
xnew_hi[wh_this])
#new_dxpix[wh_this] += dx_this
new_counts[wh_this] += avgval_this * dx_this
# Case 4: enire bins covered, whole pixels:
wh_this = n.where(bin_hi > (bin_lo+1))
nwhole = len(wh_this[0])
if (nwhole > 0):
pcounts = self.flux * self.dxpix
icounts_this = n.array([pcounts[bin_lo[wh_this[0][ii]]+1:\
bin_hi[wh_this[0][ii]]].sum()
for ii in range(nwhole)])
#new_dxpix[wh_this] += dx_this
new_counts[wh_this] += icounts_this
# Divide out for average and return:
return new_counts / new_fulldx
class WeightedRebinCoadder:
"""
Objet class for weighted rebinning and coaddition of spectra
Initialize as follows:
WRC = WeighedRebinCoadder(fluxes, invvars, pixbounds)
where
fluxes = list of arrays of specific flux values
invvars = list of arrays of associated inverse variances
pixbounds = list of arrays of pixel boundaries in baseline units
"""
def __init__(self, fluxes, invvars, pixbounds):
# Determine minimum and maximum values of independent variable:
self.min_indep = [this_bound.min() for this_bound in pixbounds]
self.max_indep = [this_bound.max() for this_bound in pixbounds]
self._n_input = len(fluxes)
# Compute pixel widths:
dpixes = [this_bound[1:] - this_bound[:-1] for this_bound in pixbounds]
# Compute "specific inverse variances":
sp_invvars = [invvars[i] / dpixes[i] for i in range(self._n_input)]
# Compute pixelspline objects for fluxes:
self._PXS_fluxes = [PixelSpline(pixbounds[i], fluxes[i]) for i in \
range(self._n_input)]
# Compute pixelspline objects for specific inverse variances:
self._PXS_sp_invvars = [PixelSpline(pixbounds[i], sp_invvars[i]) for \
i in range(self._n_input)]
def coadd(self, pixbound_out):
# Compute coverage masks:
masks = [(pixbound_out[:-1] > self.min_indep[i]) *
(pixbound_out[1:] < self.max_indep[i]) for i in \
range(self._n_input)]
# Compute output pixel widths:
dpix_out = pixbound_out[1:] - pixbound_out[:-1]
# Compute interpolated fluxes:
new_fluxes = [this_PXS.resample(pixbound_out) for this_PXS in \
self._PXS_fluxes]
# Compute interpolated specific inverse variances (converted
# to inverse variances):
new_invvars = [dpix_out * this_PXS.resample(pixbound_out) for \
this_PXS in self._PXS_sp_invvars]
# Compute coadded flux and inverse variance and return:
flux_coadd = 0.
invvar_coadd = 0.
for i in range(self._n_input):
flux_coadd += new_fluxes[i] * new_invvars[i] * masks[i]
invvar_coadd += new_invvars[i] * masks[i]
is_good = n.where(invvar_coadd > 0.)
flux_coadd[is_good] /= invvar_coadd[is_good]
return flux_coadd, invvar_coadd
| bsd-3-clause | -4,022,755,084,042,178,600 | 43.851003 | 80 | 0.565259 | false | 3.324766 | false | false | false |
JohnBrookes/MayaAWD | Autodesk/Maya2013/Python/Lib/site-packages/pymel/tools/envparse.py | 4 | 15560 | "Parser for Maya.env"
import sys, os, os.path, logging
#import external.ply.lex as lex
try:
from pymel.util.external.ply import lex
except ImportError:
from ply import lex
from pymel.mayautils import getMayaAppDir
_logger = logging.getLogger(__name__)
# lexer and parser for the Maya.env file
# first level lexer : form LVAR ASSIGN VALUE, then second level parsing of VALUE
# variables substitution are done as in Maya, taking only into account already defined vars
# when line is encountered
class EnvLex :
""" ply.lex lexer class to parse Maya.env file """
def __init__(self):
self.states = ( ('left','exclusive'), ('right','exclusive'), ('end','exclusive'), ('cancel','exclusive') )
self.line = ''
def build(self, **kwargs):
self.lexer = lex.lex(object=self,**kwargs)
tokens = (
'COMMENT',
'ASSIGN',
'VAR',
'VALUE',
'OK',
'CANCEL',
'newline'
)
# First level parsing : form LVAR ASSIGN VALUE
t_ANY_ignore_COMMENT = r'\#[^\n]*'
# Ignore starting spaces only
t_INITIAL_ignore = '^[ \t]+'
t_left_ignore = '[ \t]+'
t_right_ignore = '[ \t]+'
# careful, there seems to be a nasty bug where ply.lex takes $ as its literal value instead of in the 'end of line' meaning ?
t_end_ignore = '[ \t]+$'
t_cancel_ignore = '[^\n]+'
# Valid l-values are env var names, must come first in line (INITIAL sate)
def t_VAR(self, t) :
r'[^\\^\/^\:^\*^\"^\<^\>^\|^=^ ^\t^\n^#]+'
# VAR can only be on left side of ASSIGN (INITIAL parser state)
self.lexer.begin('left')
self.line += t.value
return t
# Assignation sign, ignore spaces around it
def t_left_ASSIGN(self, t):
r'[ \t]*=[ \t]*'
self.lexer.begin('right')
t.value = t.value.strip()
self.line += t.value
return t
# r-values will be parsed again depending on os name
def t_right_VALUE(self, t):
r'[^=^\n^#]+'
# one and only one VALUE on right side of ASSIGN
self.lexer.begin('end')
self.line += t.value
return t
# More than one equal sign per line would be an error
def t_right_ASSIGN(self, t):
r'[ \t]*=[ \t]*'
warnings.warn ( "Double '=' at line %i, format for a Maya.env line is <VAR> = <value>, line ignored" % (self.lexer.lineno), ExecutionWarning)
# skip whole line
self.lexer.begin('cancel')
while self.lexer.lexpos<self.lexer.lexlen and self.lexer.lexdata[self.lexer.lexpos] != '\n' :
self.lexer.skip(1)
def t_end_ASSIGN(self, t):
r'[ \t]*=[ \t]*'
warnings.warn ( "More than one '=' at line %i, format for a Maya.env line is <VAR> = <value>, line ignored" % (self.lexer.lineno), ExecutionWarning)
# skip whole line
self.lexer.begin('cancel')
while self.lexer.lexpos<self.lexer.lexlen and self.lexer.lexdata[self.lexer.lexpos] != '\n' :
self.lexer.skip(1)
# r-values will be parsed again depending on os name
def t_end_VALUE(self, t):
r'[^=^\n^#]+'
# one and only one VALUE on right side of ASSIGN
warnings.warn ( "More than one value at line %i, format for a Maya.env line is <VAR> = <value>, line ignored" % (self.lexer.lineno), ExecutionWarning)
# skip whole line
self.lexer.begin('cancel')
while self.lexer.lexpos<self.lexer.lexlen and self.lexer.lexdata[self.lexer.lexpos] != '\n' :
self.lexer.skip(1)
# Ignore ending spaces and count line no
def t_ANY_newline(self, t):
r'[ \t]*\n+'
st = self.lexer.current_state()
if st == 'end' :
t.type = 'OK'
t.value = self.line
elif st == 'INITIAL' :
pass
else :
t.type = 'CANCEL'
v = ''
i = self.lexer.lexpos-2
while i>0 and self.lexer.lexdata[i] != '\n' :
v = self.lexer.lexdata[i] + v
i -= 1
t.value = v
self.lexer.begin('INITIAL')
self.line = ''
# Cound nb of new lines, removing white space
self.lexer.lineno += len(t.value.lstrip(' \t'))
return t
# Error handling rules
def t_ANY_error(self, t):
warnings.warn ( "Illegal character '%s' at line %i, ignored" % (t.value[0], self.lexer.lineno), ExecutionWarning)
self.lexer.skip(1)
def t_INITIAL_error(self, t):
warnings.warn ( "Invalid VAR name '%s' at line %i, line ignored" % (t.value[0], self.lexer.lineno), ExecutionWarning)
# skip whole line
while self.lexer.lexpos<self.lexer.lexlen and self.lexer.lexdata[self.lexer.lexpos] != '\n' :
self.lexer.skip(1)
def t_left_error(self, t):
warnings.warn ( "Illegal value '%s' at line %i, format for a Maya.env line is <VAR> = <value>, line ignored" % (t.value[0], self.lexer.lineno), ExecutionWarning)
# skip whole line
while self.lexer.lexpos<self.lexer.lexlen and self.lexer.lexdata[self.lexer.lexpos] != '\n' :
self.lexer.skip(1)
def t_right_error(self, t):
warnings.warn ( "Illegal value '%s' at line %i, line ignored" % (t.value[0], self.lexer.lineno), ExecutionWarning)
# skip whole line
while self.lexer.lexpos<self.lexer.lexlen and self.lexer.lexdata[self.lexer.lexpos] != '\n' :
self.lexer.skip(1)
# Test it
def test(self,data):
self.lexer.input(data)
while 1:
tok = self.lexer.token()
if not tok: break
print tok
# second level lexer : os dependant parsing of values and variable substitution
class ValueLex :
""" second level lexer to parse right-values depending on os name """
class Warn :
""" a ValueLex subclass to reset warning count """
def __init__(self):
self.SEP = False
self.VAR = False
self.PATH = False
def __init__(self, symbols, osname = os.name):
self.os = osname
self.symbols = symbols
self.line = 0
self.warn = ValueLex.Warn()
def build(self, **kwargs):
self.lexer = lex.lex(object=self,**kwargs)
tokens = (
'SEP',
'RVAR1',
'RVAR2',
'PATHSEP',
'VALUE'
)
# ignore ending space
t_ignore = '^[ \t]+'
def t_SEP(self, t):
r':;'
if t.value==';' and self.os != 'nt' :
# t.value = ':'
if not self.warn.SEP :
warnings.warn ( "Line %i: the ';' separator should only be used on nt os, on linux or osx use ':' rather" % self.lexer.lineno, ExecutionWarning)
self.warn.SEP = True
return t
# Valid l-values are env var names, must come first in line (INITIAL sate)
def t_RVAR1(self, t) :
r'\$[^\\^/^:^*^"^<^>^|^=^ ^\t^\n^#^$]+'
if self.os == 'nt' :
if not self.warn.VAR :
warnings.warn ( "Line %i: $VAR should be used on linux or osx, \%VAR\% on nt" % self.lexer.lineno, ExecutionWarning)
self.warn.VAR = True
v = t.value.lstrip('$')
if self.symbols.has_key(v) :
t.value = self.symbols[v]
return t
def t_RVAR2(self, t) :
r'\%[^\\^/^:^*^"^<^>^|^=^ ^\t^\n^#]+\%'
if self.os != 'nt' :
if not self.warn.VAR :
warnings.warn ( "Line %i: $VAR should be used on linux or osx, \%VAR\% on nt" % self.lexer.lineno, ExecutionWarning)
self.warn.VAR = True
v = t.value.strip('%')
if self.symbols.has_key(v) :
t.value = self.symbols[v]
return t
# Assignation sign, ignore spaces around it
def t_PATHSEP(self, t) :
r'\/|\\'
if self.os != 'nt' and t.value == '\\':
if not self.warn.PATH :
warnings.warn ( "Line %i: the '\\' path separator should only be used on nt, on linux or osx use '/' rather" % self.lexer.lineno, ExecutionWarning)
self.warn.PATH = True
return t
# we just return the rest as-is
# TODO: warnings if it's a path and path doesn't exist ?
# Would need to differentiate % or $ wether we are on nt or not but py.lex
# handles definitions strangely, like they are static / source time evaluated
# removed % from the list of excluded characters as some definitions seem to use it :
# $RMSTREE/icons/%B
# TODO : Never seen it elsewhere, must check it doesn't collide with %VARNAME% on NT
def t_VALUE(self, t):
r'[^=^\n^#^$]+'
return t
def t_error(self, t):
warnings.warn ( "Illegal character '%s' at line %i, ignored" % (t.value[0], self.lexer.lineno), ExecutionWarning)
self.lexer.skip(1)
# Test it
def test(self,data):
self.lexer.input(data)
while 1:
tok = self.lexer.token()
if not tok: break
print tok
# Do the 2 level parse of a Maya.env format text and return a symbol table of the declared env vars
def parse(text, environ=os.environ, osname=os.name):
symbols = environ.copy()
newsymbols = {}
# first level lexer
envLex = EnvLex()
envLex.build()
sep = os.path.pathsep
# easier if we have a closing newline before eof
if not text.endswith('\n') :
text += '\n'
envLex.lexer.input(text)
# second level lexer for values
valueLex = ValueLex(symbols, osname)
valueLex.build()
tok = 'dummy'
while tok:
tok = envLex.lexer.token()
if tok is not None :
if tok.type=='VAR' :
var = tok.value
elif tok.type=='VALUE' :
value = tok.value
elif tok.type=='OK' :
# secondary parsing on value depending on os
# update defined env vars up to now
if var is not None :
# It's quite hard to guess what Maya does with pre-existant env vars when they are also declared
# in Maya.env. It seems to ignore Maya,env in most of these cases, except for MAYA_SCRIPT_PATH
# where it will add the content o Maya.env to the predefined var
# for PATH, MAYA_PLUGIN_PATH and LD_LIBRARY_PATH on linux it seems to add his own stuff, disreguarding
# Maya.env if the the variable was pre-existant. If you notice (or want) different behaviors you can
# change it here
newvalue = None
action = 'Ignore'
if symbols.has_key(var) :
# For these variables ONLY, maya will append the value in maya.env to an exisiting environment variable
# (Default is for already defined value to override value in maya.env)
# (note the LACK of PYTHONPATH here... boo!)
if var in ('MAYA_SCRIPT_PATH',
'MAYA_PLUG_IN_PATH',
'MAYA_MODULE_PATH',
'XBMLANGPATH'):
newvalue = self.symbols[var]+sep
action = 'Add'
else :
newvalue = ''
action = 'Set'
if newvalue is not None :
# only display warning for a better feedback there,
# as even if it makes no sense we can in all cases affect the value to the env var
valueLex.symbols = symbols
valueLex.lexer.input(value)
valueLex.lexer.lineno = tok.lineno
valueLex.warn = ValueLex.Warn()
vtok = 'dummy'
while vtok:
vtok = valueLex.lexer.token()
if vtok is not None :
newvalue += vtok.value
symbols[var] = newvalue
newsymbols[var] = newvalue
if action == 'Set' :
print u"%s set to value %s" % (var, unicode(newvalue))
elif action == 'Add' :
print u"%s was already set, appending value: %s" % (var, unicode(newvalue))
elif action == 'Ignore' :
print u"%s was already set, ignoring line: %s" % (var, unicode(tok.value))
var = value = None
elif tok.type=='CANCEL' :
print "Line was ignored due to parsing errors: %s" % unicode(tok.value)
var = value = None
else :
pass
return newsymbols
# parse the Maya.env file and set the environment variables and python path accordingly
def parseMayaenv(envLocation=None, version=None) :
""" parse the Maya.env file and set the environement variablas and python path accordingly.
You can specify a location for the Maya.env file or the Maya version"""
name = 'Maya.env'
envPath = None
if envLocation :
envPath = envLocation
if not os.path.isfile(envPath) :
envPath = os.path.join(envPath, name)
# no Maya.env specified, we look for it in MAYA_APP_DIR
if not envPath or not envPath.isfile() :
maya_app_dir = getMayaAppDir()
if not maya_app_dir:
_logger.warn("Neither HOME nor MAYA_APP_DIR is set, unable to find location of Maya.env")
return False
# try to find which version of Maya should be initialized
if not version :
# try to query version, will only work if reparsing env from a working Maya
version = Version.installName()
if version is None:
# if run from Maya provided mayapy / python interpreter, can guess version
_logger.debug("Unable to determine which verson of Maya should be initialized, trying for Maya.env in %s" % maya_app_dir)
# look first for Maya.env in 'version' subdir of MAYA_APP_DIR, then directly in MAYA_APP_DIR
if version and os.path.isfile(os.path.join(maya_app_dir, version, name)) :
envPath = os.path.join(maya_app_dir, version, name)
else :
envPath = os.path.join(maya_app_dir, name)
# finally if we have a possible Maya.env, parse it
if os.path.isfile(envPath) :
try :
envFile = open(envPath)
except :
_logger.warn ("Unable to open Maya.env file %s" % envPath )
return False
success = False
try :
envTxt = envFile.read()
envVars = parse(envTxt)
# update env vars
for v in envVars :
#_logger.debug("%s was set or modified" % v)
os.environ[v] = envVars[v]
# add to syspath
if envVars.has_key('PYTHONPATH') :
#_logger.debug("sys.path will be updated")
plist = os.environ['PYTHONPATH'].split(os.pathsep)
for p in plist :
if not p in sys.path :
sys.path.append(p)
success = True
finally :
envFile.close()
return success
else :
if version :
print"Found no suitable Maya.env file for Maya version %s" % version
else :
print"Found no suitable Maya.env file"
return False | apache-2.0 | -6,613,185,605,031,252,000 | 40.718499 | 169 | 0.546722 | false | 3.810923 | false | false | false |
torshid/foodnow | entities/user.py | 1 | 2323 | from flask import render_template
from common import *
import datetime
from tables import users, restolikes, dishlikes
from werkzeug import redirect
page = Blueprint(__name__)
@page.route('/user/<int:user_id>/')
def main(user_id):
if isValidUserId(user_id):
return render_template('user.html', user_id = user_id)
return abort(404)
@page.route('/user/<int:user_id>/settings/')
def settings(user_id):
if isLogged():
return render_template('settings.html', user_id = user_id)
return render_template('home.html')
def updateProfile(userId, name = None, email = None, password = None):
if (isLogged()):
args = [name, email, password]
settings = []
for i in range(3):
if args[i]:
if i is 2:
settings.append(md5Password(args[i]))
else:
settings.append(args[i])
with db() as connection:
with connection.cursor() as cursor:
try:
update(userId, settings);
except dbapi2.Error:
connection.rollback()
else:
connection.commit()
return
@page.route('/user/<int:user_id>/likeresto/<int:resto_id>/')
def likeResto(user_id, resto_id):
from tables import restolikes
liked = restolikes.likeResto(user_id, resto_id)
#if liked is 0:
#return 'Returned 0'
#elif liked is 1:
#return 'Returned 1'
#else:
#return 2
return redirect(request.args.get('next') or request.referrer or url_for(default))
@page.route('/user/<int:user_id>/likedish/<int:dish_id>/')
def likeDish(user_id, dish_id):
from tables import dishlikes
liked = dishlikes.likeDish(user_id, dish_id)
#if liked is 0:
#return 'Returned 0'
#if liked is 1:
#return 'Returned 1'
#else:
#return 'Returned 2'
return redirect(request.args.get('next') or request.referrer or url_for(default))
def reset():
dishlikes.reset()
restolikes.reset()
users.reset()
users.addUser('Yusuf Aksoy', '[email protected]', md5Password('12345')) # id=1
users.addUser('Moctar Sawadogo', '[email protected]', md5Password('12345')) # id=2
users.addUser('Test User', '[email protected]', md5Password('12345')) # id=3
return | gpl-3.0 | 7,894,251,774,190,047,000 | 29.986667 | 86 | 0.599656 | false | 3.401171 | false | false | false |
cmusv-sc/DIWD-Team4-Wei-Lin-Tsai | src/python/data_2015_fall/api/generateNetwork.py | 1 | 2181 | from django.http import JsonResponse
from data_2015_fall.models import *
from neomodel import db
class NetworkResponse():
def __init__(self, name):
self.name = name
self.children = []
def to_dict(self):
return {
"name": self.name,
"children": [c.to_dict() for c in self.children],
}
def get_citations(name):
query = "match (n1)-[:CITED]->(n2) where n1.title='%s' return n2" % name
results, meta = db.cypher_query(query)
return [a.title for a in [Article.inflate(row[0]) for row in results]]
def get_citations_network(request, name):
root = NetworkResponse(name)
root.children = [NetworkResponse(title) for title in get_citations(name)]
for c in root.children:
c.children = [NetworkResponse(title) for title in get_citations(c.name)]
for c2 in c.children:
c2.children = [NetworkResponse(title) for title in get_citations(c2.name)]
return JsonResponse(root.to_dict())
def get_authors(name):
query = "match (n1)<-[:AUTHORED]-(n2) where n1.title={name} return n2"
results, meta = db.cypher_query(query, {"name": name})
return [a.name for a in [Author.inflate(row[0]) for row in results]]
def get_papers(name):
query = "match (n1)-[:AUTHORED]->(n2) where n1.name={name} return n2"
results, meta = db.cypher_query(query, {"name": name})
return [a.title for a in [Article.inflate(row[0]) for row in results]]
def get_coauthors(request, name):
query = "match (n1)-[:COAUTHORED]->(n2) where n1.name={name} return n2"
results, meta = db.cypher_query(query, {"name": name})
return JsonResponse({"authors": [a.toDict() for a in [Author.inflate(row[0]) for row in results]]})
def get_paper_author_network(request, name):
root = NetworkResponse(name)
root.children = [NetworkResponse(author) for author in get_authors(name)]
for author in root.children:
author.children = [NetworkResponse(title) for title in get_papers(author.name)]
for paper in author.children:
paper.children = [NetworkResponse(author) for author in get_authors(paper.name)]
return JsonResponse(root.to_dict()) | unlicense | -2,011,199,960,961,736,700 | 35.983051 | 103 | 0.65658 | false | 3.299546 | false | false | false |
sahikaru/DP | chapter1/strategymode.py | 1 | 1364 | #!/usr/env python
class Flyable:
def fly(self):
pass
class Quackable(object):
def quack(self):
pass
class ReadHeadDuckFly(Flyable):
def fly(self):
print "I am a readheadduck, I can fly"
class ReadHeadDuckQack(Quackable):
def quack(self):
print "I am a readheadduck,Dcuk duck duck..."
class Duck():
def swim(self):
print "I am a duck,I can swim..."
class ReadHeadDuck(Duck):
def __init__(self,flyable,quackable):
self.f = flyable
self.q = quackable
def fly(self):
return self.f.fly()
def quack(self):
return self.q.quack()
class Mallardduckflyable(Flyable):
def fly(self):
print "I am a Mallardduck....,I can fly"
class MallardduckQuackble(Quackable):
def quack(self):
print "I am a Mallardduck,Duck.duck..duck.."
class Mallardduck(Duck):
def __init__(self,flyable,quackable):
self.f = flyable
self.q = quackable
def fly(self):
return self.f.fly()
def quack(self):
return self.q.quack()
if __name__ == "__main__":
duck = Duck()
duck.swim()
rhduck = ReadHeadDuck(ReadHeadDuckFly(),ReadHeadDuckQack())
rhduck.fly()
rhduck.swim()
rhduck.quack()
md = Mallardduck(Mallardduckflyable(),MallardduckQuackble())
md.fly()
md.quack()
md.swim()
| gpl-2.0 | -60,068,599,119,975,496 | 19.984615 | 64 | 0.603372 | false | 2.847599 | false | false | false |
patricmutwiri/pombola | pombola/core/migrations/0045_migrate_identifiers.py | 4 | 25162 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.contrib.contenttypes import generic
from django.db import models
class Migration(DataMigration):
def get_fields_mapping(self, orm):
return {'old-mz-person-original': (orm.Person, 'original_id'),
'old-mz-organisation-original': (orm.Organisation, 'original_id'),
'old-mz-organisation-external': (orm.Organisation, 'external_id'),
'old-mz-place-original': (orm.Place, 'original_id'),
'old-mz-place-external': (orm.Place, 'external_id'),
'old-mz-position-external': (orm.Position, 'external_id'),
'old-mz-position-title-original': (orm.PositionTitle, 'original_id')}
def forwards(self, orm):
for scheme, model_and_field in self.get_fields_mapping(orm).items():
model, field = model_and_field
for o in model.objects.all():
old_value = getattr(o, field)
if old_value is None or old_value == '':
continue
content_type = orm['contenttypes.ContentType'].objects.get(app_label="core",
model=model.__name__.lower())
orm.Identifier.objects.create(scheme=scheme,
identifier=unicode(old_value),
object_id=o.id,
content_type=content_type)
def backwards(self, orm):
# We need the next two lines to make the content_object field
# of Identifier work, as suggested here:
# http://south.readthedocs.org/en/latest/generics.html
gfk = generic.GenericForeignKey()
gfk.contribute_to_class(orm.Identifier, 'content_object')
fields_mapping = self.get_fields_mapping(orm)
for identifier in orm.Identifier.objects.all():
id_to_put_back = identifier.identifier
scheme = identifier.scheme
if scheme not in fields_mapping:
raise RuntimeError, "It's not possible to migrate identifiers of scheme '%s' backwards" % (scheme,)
if id_to_put_back is None or id_to_put_back == '':
continue
model, field = fields_mapping[scheme]
setattr(identifier.content_object, field, int(identifier.object_id))
models = {
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.alternativepersonname': {
'Meta': {'unique_together': "(('person', 'alternative_name'),)", 'object_name': 'AlternativePersonName'},
'alternative_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_to_use': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'alternative_names'", 'to': "orm['core.Person']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.contact': {
'Meta': {'object_name': 'Contact'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.ContactKind']"}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'source': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'core.contactkind': {
'Meta': {'object_name': 'ContactKind'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.identifier': {
'Meta': {'unique_together': "(('scheme', 'identifier'),)", 'object_name': 'Identifier'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'scheme': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.informationsource': {
'Meta': {'object_name': 'InformationSource'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'entered': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.organisation': {
'Meta': {'object_name': 'Organisation'},
'_summary_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'ended': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.OrganisationKind']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'original_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'}),
'started': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'summary': ('markitup.fields.MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.organisationkind': {
'Meta': {'object_name': 'OrganisationKind'},
'_summary_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'}),
'summary': ('markitup.fields.MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.parliamentarysession': {
'Meta': {'object_name': 'ParliamentarySession'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'house': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Organisation']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mapit_generation': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.person': {
'Meta': {'object_name': 'Person'},
'_summary_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'can_be_featured': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_of_birth': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'date_of_death': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'legal_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'original_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'}),
'summary': ('markitup.fields.MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.place': {
'Meta': {'object_name': 'Place'},
'_summary_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.PlaceKind']"}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'mapit_area': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mapit.Area']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organisation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Organisation']", 'null': 'True', 'blank': 'True'}),
'original_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'parent_place': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_places'", 'null': 'True', 'to': "orm['core.Place']"}),
'parliamentary_session': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.ParliamentarySession']", 'null': 'True'}),
'shape_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'summary': ('markitup.fields.MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.placekind': {
'Meta': {'object_name': 'PlaceKind'},
'_summary_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'plural_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'}),
'summary': ('markitup.fields.MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.position': {
'Meta': {'object_name': 'Position'},
'category': ('django.db.models.fields.CharField', [], {'default': "'other'", 'max_length': '20'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'end_date': ('django_date_extensions.fields.ApproximateDateField', [], {'default': "'future'", 'max_length': '10', 'blank': 'True'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'}),
'organisation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Organisation']", 'null': 'True', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Person']"}),
'place': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Place']", 'null': 'True', 'blank': 'True'}),
'sorting_end_date': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10'}),
'sorting_end_date_high': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10'}),
'sorting_start_date': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10'}),
'sorting_start_date_high': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10'}),
'start_date': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'subtitle': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'title': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.PositionTitle']", 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.positiontitle': {
'Meta': {'object_name': 'PositionTitle'},
'_summary_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'original_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'requires_place': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'}),
'summary': ('markitup.fields.MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.slugredirect': {
'Meta': {'unique_together': "(('content_type', 'old_object_slug'),)", 'object_name': 'SlugRedirect'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'old_object_slug': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'images.image': {
'Meta': {'object_name': 'Image'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'is_primary': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '400'})
},
'mapit.area': {
'Meta': {'object_name': 'Area'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'areas'", 'null': 'True', 'to': "orm['mapit.Country']"}),
'generation_high': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'final_areas'", 'null': 'True', 'to': "orm['mapit.Generation']"}),
'generation_low': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'new_areas'", 'null': 'True', 'to': "orm['mapit.Generation']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'parent_area': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['mapit.Area']"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'areas'", 'to': "orm['mapit.Type']"})
},
'mapit.country': {
'Meta': {'object_name': 'Country'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'mapit.generation': {
'Meta': {'object_name': 'Generation'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'mapit.type': {
'Meta': {'object_name': 'Type'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'scorecards.category': {
'Meta': {'object_name': 'Category'},
'_description_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 18, 10, 59, 1, 621786)', 'auto_now_add': 'True', 'blank': 'True'}),
'description': ('markitup.fields.MarkupField', [], {'no_rendered_field': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'synopsis': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 18, 10, 59, 1, 621832)', 'auto_now': 'True', 'blank': 'True'})
},
'scorecards.entry': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'category', 'date'),)", 'object_name': 'Entry'},
'_equivalent_remark_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'_extended_remark_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scorecards.Category']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 18, 10, 59, 1, 622393)', 'auto_now_add': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'disabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'disabled_comment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'}),
'equivalent_remark': ('markitup.fields.MarkupField', [], {'max_length': '400', 'no_rendered_field': 'True', 'blank': 'True'}),
'extended_remark': ('markitup.fields.MarkupField', [], {'max_length': '1000', 'no_rendered_field': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'remark': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'score': ('django.db.models.fields.IntegerField', [], {}),
'source_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 18, 10, 59, 1, 622419)', 'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['contenttypes', 'core']
| agpl-3.0 | -2,694,846,960,529,568,300 | 82.042904 | 176 | 0.547015 | false | 3.688361 | false | false | false |
aristanetworks/arista-ovs-quantum | quantum/plugins/cisco/ucs/cisco_ucs_plugin_v2.py | 4 | 14992 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
#
import logging
from quantum.db import api as db
from quantum.openstack.common import importutils
from quantum.plugins.cisco.common import cisco_constants as const
from quantum.plugins.cisco.common import cisco_credentials_v2 as cred
from quantum.plugins.cisco.common import cisco_exceptions as cexc
from quantum.plugins.cisco.common import cisco_utils as cutil
from quantum.plugins.cisco.db import network_db_v2 as cdb
from quantum.plugins.cisco.db import ucs_db_v2 as udb
from quantum.plugins.cisco.l2device_plugin_base import L2DevicePluginBase
from quantum.plugins.cisco.ucs import cisco_ucs_configuration as conf
LOG = logging.getLogger(__name__)
class UCSVICPlugin(L2DevicePluginBase):
"""UCS Device Plugin"""
def __init__(self):
self._driver = importutils.import_object(conf.UCSM_DRIVER)
LOG.debug("Loaded driver %s\n" % conf.UCSM_DRIVER)
# TODO (Sumit) Make the counter per UCSM
self._port_profile_counter = 0
def get_all_networks(self, tenant_id, **kwargs):
"""
Returns a dictionary containing all
<network_uuid, network_name> for
the specified tenant.
"""
LOG.debug("UCSVICPlugin:get_all_networks() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
networks_list = db.network_list(tenant_id)
new_networks_list = []
for network in networks_list:
new_network_dict = cutil.make_net_dict(network[const.UUID],
network[const.NETWORKNAME],
[])
new_networks_list.append(new_network_dict)
return new_networks_list
def create_network(self, tenant_id, net_name, net_id, vlan_name, vlan_id,
**kwargs):
"""
Creates a new Virtual Network, and assigns it
a symbolic name.
"""
LOG.debug("UCSVICPlugin:create_network() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
self._driver.create_vlan(vlan_name, str(vlan_id), self._ucsm_ip,
self._ucsm_username, self._ucsm_password)
ports_on_net = []
new_network_dict = cutil.make_net_dict(net_id,
net_name,
ports_on_net)
return new_network_dict
def delete_network(self, tenant_id, net_id, **kwargs):
"""
Deletes the network with the specified network identifier
belonging to the specified tenant.
"""
LOG.debug("UCSVICPlugin:delete_network() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
vlan_binding = cdb.get_vlan_binding(net_id)
vlan_name = vlan_binding[const.VLANNAME]
self._driver.delete_vlan(vlan_name, self._ucsm_ip,
self._ucsm_username, self._ucsm_password)
#Rohit:passing empty network name, might not need fixing
net_dict = cutil.make_net_dict(net_id,
"",
[])
return net_dict
def get_network_details(self, tenant_id, net_id, **kwargs):
"""
Deletes the Virtual Network belonging to a the
spec
"""
LOG.debug("UCSVICPlugin:get_network_details() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
network = db.network_get(net_id)
ports_list = network[const.NETWORKPORTS]
ports_on_net = []
for port in ports_list:
new_port = cutil.make_port_dict(port[const.UUID],
port[const.PORTSTATE],
port[const.NETWORKID],
port[const.INTERFACEID])
ports_on_net.append(new_port)
new_network = cutil.make_net_dict(network[const.UUID],
network[const.NETWORKNAME],
ports_on_net)
return new_network
def update_network(self, tenant_id, net_id, **kwargs):
"""
Updates the symbolic name belonging to a particular
Virtual Network.
"""
LOG.debug("UCSVICPlugin:update_network() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
network = db.network_get(net_id)
net_dict = cutil.make_net_dict(network[const.UUID],
network[const.NETWORKNAME],
[])
return net_dict
def get_all_ports(self, tenant_id, net_id, **kwargs):
"""
Retrieves all port identifiers belonging to the
specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:get_all_ports() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
network = db.network_get(net_id)
ports_list = network[const.NETWORKPORTS]
ports_on_net = []
for port in ports_list:
port_binding = udb.get_portbinding(port[const.UUID])
ports_on_net.append(port_binding)
return ports_on_net
def create_port(self, tenant_id, net_id, port_state, port_id, **kwargs):
"""
Creates a port on the specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:create_port() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
qos = None
ucs_inventory = kwargs[const.UCS_INVENTORY]
least_rsvd_blade_dict = kwargs[const.LEAST_RSVD_BLADE_DICT]
chassis_id = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_CHASSIS]
blade_id = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_ID]
blade_data_dict = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_DATA]
new_port_profile = self._create_port_profile(tenant_id, net_id,
port_id,
conf.DEFAULT_VLAN_NAME,
conf.DEFAULT_VLAN_ID)
profile_name = new_port_profile[const.PROFILE_NAME]
rsvd_nic_dict = ucs_inventory.reserve_blade_interface(
self._ucsm_ip, chassis_id,
blade_id, blade_data_dict,
tenant_id, port_id,
profile_name)
port_binding = udb.update_portbinding(port_id,
portprofile_name=profile_name,
vlan_name=conf.DEFAULT_VLAN_NAME,
vlan_id=conf.DEFAULT_VLAN_ID,
qos=qos)
return port_binding
def delete_port(self, tenant_id, net_id, port_id, **kwargs):
"""
Deletes a port on a specified Virtual Network,
if the port contains a remote interface attachment,
the remote interface should first be un-plugged and
then the port can be deleted.
"""
LOG.debug("UCSVICPlugin:delete_port() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
ucs_inventory = kwargs[const.UCS_INVENTORY]
chassis_id = kwargs[const.CHASSIS_ID]
blade_id = kwargs[const.BLADE_ID]
interface_dn = kwargs[const.BLADE_INTF_DN]
port_binding = udb.get_portbinding(port_id)
profile_name = port_binding[const.PORTPROFILENAME]
self._delete_port_profile(port_id, profile_name)
ucs_inventory.unreserve_blade_interface(self._ucsm_ip, chassis_id,
blade_id, interface_dn)
return udb.remove_portbinding(port_id)
def update_port(self, tenant_id, net_id, port_id, **kwargs):
"""
Updates the state of a port on the specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:update_port() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
pass
def get_port_details(self, tenant_id, net_id, port_id, **kwargs):
"""
This method allows the user to retrieve a remote interface
that is attached to this particular port.
"""
LOG.debug("UCSVICPlugin:get_port_details() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
port_binding = udb.get_portbinding(port_id)
return port_binding
def plug_interface(self, tenant_id, net_id, port_id, remote_interface_id,
**kwargs):
"""
Attaches a remote interface to the specified port on the
specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:plug_interface() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
port_binding = udb.get_portbinding(port_id)
profile_name = port_binding[const.PORTPROFILENAME]
old_vlan_name = port_binding[const.VLANNAME]
new_vlan_name = self._get_vlan_name_for_network(tenant_id, net_id)
new_vlan_id = self._get_vlan_id_for_network(tenant_id, net_id)
self._driver.change_vlan_in_profile(profile_name, old_vlan_name,
new_vlan_name, self._ucsm_ip,
self._ucsm_username,
self._ucsm_password)
return udb.update_portbinding(port_id, vlan_name=new_vlan_name,
vlan_id=new_vlan_id)
def unplug_interface(self, tenant_id, net_id, port_id, **kwargs):
"""
Detaches a remote interface from the specified port on the
specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:unplug_interface() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
port_binding = udb.get_portbinding(port_id)
profile_name = port_binding[const.PORTPROFILENAME]
old_vlan_name = port_binding[const.VLANNAME]
new_vlan_name = conf.DEFAULT_VLAN_NAME
self._driver.change_vlan_in_profile(profile_name, old_vlan_name,
new_vlan_name, self._ucsm_ip,
self._ucsm_username,
self._ucsm_password)
return udb.update_portbinding(port_id, vlan_name=new_vlan_name,
vlan_id=conf.DEFAULT_VLAN_ID)
def create_multiport(self, tenant_id, net_id_list, ports_num,
port_id_list, **kwargs):
"""
Creates a port on the specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:create_multiport() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
qos = None
ucs_inventory = kwargs[const.UCS_INVENTORY]
least_rsvd_blade_dict = kwargs[const.LEAST_RSVD_BLADE_DICT]
chassis_id = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_CHASSIS]
blade_id = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_ID]
blade_data_dict = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_DATA]
port_binding_list = []
for port_id, net_id in zip(port_id_list, net_id_list):
new_port_profile = self._create_port_profile(
tenant_id, net_id, port_id,
conf.DEFAULT_VLAN_NAME,
conf.DEFAULT_VLAN_ID)
profile_name = new_port_profile[const.PROFILE_NAME]
rsvd_nic_dict = ucs_inventory.reserve_blade_interface(
self._ucsm_ip, chassis_id,
blade_id, blade_data_dict,
tenant_id, port_id,
profile_name)
port_binding = udb.update_portbinding(
port_id,
portprofile_name=profile_name,
vlan_name=conf.DEFAULT_VLAN_NAME,
vlan_id=conf.DEFAULT_VLAN_ID,
qos=qos)
port_binding_list.append(port_binding)
return port_binding_list
def detach_port(self, tenant_id, instance_id, instance_desc, **kwargs):
"""
Remove the association of the VIF with the dynamic vnic
"""
LOG.debug("detach_port() called\n")
port_id = kwargs[const.PORTID]
kwargs.pop(const.PORTID)
return self.unplug_interface(tenant_id, None, port_id, **kwargs)
def _get_profile_name(self, port_id):
"""Returns the port profile name based on the port UUID"""
profile_name = conf.PROFILE_NAME_PREFIX + cutil.get16ByteUUID(port_id)
return profile_name
def _get_vlan_name_for_network(self, tenant_id, network_id):
"""Return the VLAN name as set by the L2 network plugin"""
vlan_binding = cdb.get_vlan_binding(network_id)
return vlan_binding[const.VLANNAME]
def _get_vlan_id_for_network(self, tenant_id, network_id):
"""Return the VLAN id as set by the L2 network plugin"""
vlan_binding = cdb.get_vlan_binding(network_id)
return vlan_binding[const.VLANID]
def _create_port_profile(self, tenant_id, net_id, port_id, vlan_name,
vlan_id):
"""Create port profile in UCSM"""
if self._port_profile_counter >= int(conf.MAX_UCSM_PORT_PROFILES):
raise cexc.UCSMPortProfileLimit(net_id=net_id, port_id=port_id)
profile_name = self._get_profile_name(port_id)
self._driver.create_profile(profile_name, vlan_name, self._ucsm_ip,
self._ucsm_username, self._ucsm_password)
self._port_profile_counter += 1
new_port_profile = {const.PROFILE_NAME: profile_name,
const.PROFILE_VLAN_NAME: vlan_name,
const.PROFILE_VLAN_ID: vlan_id}
return new_port_profile
def _delete_port_profile(self, port_id, profile_name):
"""Delete port profile in UCSM"""
self._driver.delete_profile(profile_name, self._ucsm_ip,
self._ucsm_username, self._ucsm_password)
self._port_profile_counter -= 1
def _set_ucsm(self, ucsm_ip):
"""Set the UCSM IP, username, and password"""
self._ucsm_ip = ucsm_ip
self._ucsm_username = cred.Store.get_username(conf.UCSM_IP_ADDRESS)
self._ucsm_password = cred.Store.get_password(conf.UCSM_IP_ADDRESS)
| apache-2.0 | -5,851,265,784,641,359,000 | 43.486647 | 79 | 0.576241 | false | 3.791603 | false | false | false |
MTgeophysics/mtpy | mtpy/gui/SmartMT/legacy/frequency_selection.py | 1 | 6304 | import numpy as np
from PyQt4 import QtGui
from mtpy.gui.SmartMT.gui.matplotlib_imabedding import MPLCanvas, Cursor
from mtpy.gui.SmartMT.ui_asset.groupbox_frequency_period_single import Ui_groupBoxFrequency_pereiod_single
class FrequencySingle(QtGui.QGroupBox):
"""
Frequency selection (single frequency)
"""
_unit_period = 'second'
_unit_frequency = 'Hz'
_title_period = 'Period'
_title_frequency = 'Frequency'
def __init__(self, parent, use_period=False):
QtGui.QGroupBox.__init__(self, parent)
self._mt_objs = None
self.use_period = use_period
self.ui = Ui_groupBoxFrequency_pereiod_single()
self.ui.setupUi(self)
self._histogram = FrequencySingle.FrequencyHistogram()
self.set_use_period(self.use_period)
# add matplotlib canvas
self.ui.verticalLayoutFrequencyPeriod.addWidget(self._histogram)
# connect components
self.ui.comboBoxPeriod.currentIndexChanged.connect(self.update_histogram)
self.ui.comboBoxPeriod.editTextChanged.connect(self.update_histogram)
self._histogram.mpl_connect('button_release_event', self._mouse_pick)
def toggle_time_scale(self, *args):
self.use_period = not self.use_period
self.set_use_period(self.use_period)
def set_use_period(self, use_period=False):
if use_period:
self._histogram.set_unit(self._unit_period)
self._histogram.set_title(self._title_period)
title = '%s (%s)' % (self._title_period, self._unit_period)
else:
self._histogram.set_unit(self._unit_frequency)
self._histogram.set_title(self._title_frequency)
title = '%s (%s)' % (self._title_frequency, self._unit_frequency)
self.setTitle(title)
self._update_frequency()
def _mouse_pick(self, event):
if not event.inaxes:
return
x = event.xdata
self.ui.comboBoxPeriod.setEditText("%.5f" % x)
def get_frequency(self):
return float(self.ui.comboBoxPeriod.currentText())
def update_histogram(self):
value = float(self.ui.comboBoxPeriod.currentText())
self._histogram.set_current_frequency(value)
def set_data(self, mt_objs):
self._mt_objs = mt_objs
self._update_frequency()
def _update_frequency(self):
if self._mt_objs is not None:
all_freqs = []
for mt_obj in self._mt_objs:
all_freqs.extend(list(mt_obj.Z.freq))
if self.use_period:
all_periods = 1.0 / np.array(all_freqs)
# self._histogram.set_data(all_periods)
all_unique = sorted(list(set(all_periods)))
else:
# self._histogram.set_data(all_freqs)
all_unique = sorted(list(set(all_freqs)))
self._histogram.set_data(all_unique)
self._histogram.update_figure()
# sort all frequencies in ascending order
for period in all_unique:
self.ui.comboBoxPeriod.addItem("%.5f" % period)
self.ui.comboBoxPeriod.setCurrentIndex(0)
self.update_histogram()
class FrequencyHistogram(MPLCanvas):
def __init__(self, parent=None, width=5, height=2, dpi=100):
self.artists = dict()
self._frequency = None
self._current_frequency = None
self._title = None
self._unit = None
MPLCanvas.__init__(self, parent, width, height, dpi)
self._lx = None
self.cursor = None
# self.mpl_connect('motion_notify_event', self.cursor)
self.mpl_connect('button_release_event', self.mouse_pick)
self.setMinimumSize(200, 150)
self.resize(self.sizeHint())
# def mouse_move(self, event):
# if not event.inaxes:
# return
# x = event.xdata
# y = event.ydata
# if self._cursor_x is None:
# self._cursor_x = self._axes.axvline(linewidth=1, color="green")
# if self._cursor_text is None:
# self._cursor_text = self._axes.text(0.0, 0.0, '', fontsize=8)
# self._cursor_x.set_xdata(x)
# self._cursor_text.set_text('period=%.2f' % x)
# self._cursor_text.set_position((x, y))
# self.draw()
def set_title(self, title):
self._title = title
def set_unit(self, unit):
if unit != self._unit:
self._unit = unit
self.cursor = Cursor(self._axes, track_y=False, text_format="%f " + self._unit, useblit=True)
def mouse_pick(self, event):
if not event.inaxes:
return
x = event.xdata
self.set_current_frequency(x)
def compute_initial_figure(self):
if self._frequency is not None:
self._axes.tick_params(axis='both', which='major', labelsize=6)
self._axes.tick_params(axis='both', which='minor', labelsize=4)
self._axes.hist(self._frequency) # , 50, normed=1)
if self._title and self._unit:
self._axes.set_xlabel("%s (%s)" % (self._title, self._unit), fontsize=8)
self.figure.suptitle('%s Distribution in Selected Stations' % self._title, fontsize=8)
self._fig.set_tight_layout(True)
def set_data(self, frequency):
self._frequency = frequency
self._lx = None
self._current_frequency = None
def set_current_frequency(self, freq):
self._current_frequency = freq
if self._lx is None:
self._lx = self._axes.axvline(linewidth=2, color="red")
self._lx.set_xdata(self._current_frequency)
# if self._fig.canvas.supports_blit:
# self._axes.draw_artist(self._lx)
# self._fig.canvas.blit(self._axes.bbox)
# else:
# self._fig.canvas.draw_idle()
self._fig.canvas.draw_idle()
def update_figure(self):
# clear figure
self._axes.cla()
self.compute_initial_figure()
self.draw()
| gpl-3.0 | -2,814,063,199,744,263,700 | 37.439024 | 109 | 0.568211 | false | 3.818292 | false | false | false |
NuenoB/TheTeleop | src/the_teleop/sender.py | 1 | 1250 | #! /usr/bin/env python
import genpy
import rospy
from std_msgs.msg import String
import roslib
import roslib.message
def sender(msg,reciver, msg_type):
#reciver -> String
#msg -> obj
#rospy.loginfo("sender " + reciver)
msg_class = roslib.message.get_message_class(msg_type)
pub = rospy.Publisher(reciver, msg_class , queue_size=0)
#rospy.loginfo(msg)
rate = rospy.Rate(10)
x=1
while x<2 :
pub.publish(msg)
rate.sleep()
x=x+1
#rospy.loginfo("sended ")
#rospy.spin()
def sender1(msg, reciver, msg_type):
#reciver -> String
#msg -> obj
#rospy.init_node('sender', anonymous=True)
#rospy.loginfo("sender " + reciver)
msg_class = roslib.message.get_message_class(msg_type)
pub = rospy.Publisher(reciver, msg_class , queue_size=0)
#rospy.loginfo(msg)
# type-case using YAML
try:
pub_args = []
for m in msg:
pub_args.append(yaml.load(m))
except Exception as e:
parser.error("Argument error: "+str(e))
rate = rospy.Rate(10)
msg = msg_class()
now = rospy.get_rostime()
import std_msgs.msg
keys = { 'now': now, 'auto': std_msgs.msg.Header(stamp=now) }
genpy.message.fill_message_args(msg, pub_args, keys=keys)
x=0
while x<2 :
pub.publish(msg)
rate.sleep()
x=x+1
#rospy.loginfo("sended ")
rospy.spin() | bsd-3-clause | 274,586,115,358,421,950 | 22.166667 | 62 | 0.6808 | false | 2.422481 | false | false | false |
bmccary/csvgb | csvgb/reductions.py | 1 | 3473 |
from functools import partial
from itertools import izip
from csvgb import (
isna,
isnum,
isexempt,
ismissed,
isransomed,
sum0,
drop0,
inner0,
mean0,
K_RANSOMED,
K_MISSED,
K_EXAM_N,
K_HOMEWORK_N,
K_QUIZ_N,
K_THQ_N,
)
def sum_XX_YY(XX, YY, N, s_m_n_f='{}_{:02d}'):
s_m = '{}_{:02d}'.format(XX, YY)
s_m_o = '{}_override'.format(s_m)
s_m_p = '{}_penalty'.format(s_m)
s_m_r = '{}_ransom'.format(s_m)
s_m_n = [s_m_n_f.format(s_m, n + 1) for n in xrange(N)]
def f(row, ALL=[]):
r = row.get(s_m_r)
if not isna(r):
return K_RANSOMED
o = row.get(s_m_o)
if not isna(o):
return o
v = sum0([row.get(k) for k in s_m_n])
if isna(v):
if s_m in ALL:
v = K_MISSED
return v
p = row.get(s_m_p)
if isnum(p):
v = max(v - p, 0)
return v
return s_m, f
for m in xrange(K_EXAM_N):
s_m, f = sum_XX_YY('exam', m + 1, 12)
globals()[s_m] = f
for m in xrange(K_QUIZ_N):
s_m, f = sum_XX_YY('quiz', m + 1, 3)
globals()[s_m] = f
for m in xrange(K_THQ_N):
s_m, f = sum_XX_YY('thq', m + 1, 3, s_m_n_f='{}_q{}')
globals()[s_m] = f
def X_all(row, ALL): return [row[k] for k in ALL if not isexempt(row[k])]
def X_grade(row, ALL, DROPS, DENOM):
m = mean0(drop0(X_all(row, ALL), DROPS))
if not isnum(m):
return 'None'
return m/DENOM*100.0
homework_grade = partial(X_grade, DENOM=100.0)
quiz_grade = partial(X_grade, DENOM=30.0)
thq_grade = partial(X_grade, DENOM=30.0)
def X_misses_g(row, K): return (k for k in K if ismissed(row[k]))
def X_misses_count(row, K): return sum(1 for x in X_misses_g(row, K))
def X_misses_percent(row, K, D):
N = len(_all(row, k))
if N <= D:
return None
M = max(X_misses_count(row, K) - D, 0)
return float(M)/N*100.0
def X_ransom_g(row, K):
for k in K:
s = '{}_ransom'.format(k)
if isransomed(row.get(s)):
yield k
def X_ransom_count(row, K): return sum(1 for x in X_ransom_g(row, K))
LETTER = [ 'F', 'D', 'C', 'B', 'A', ]
LETTER_CUTS = [ 60.00, 70.00, 80.00, 90.00, ]
LETTER_PM = [ 'F', 'D-', 'D', 'D+', 'C-', 'C', 'C+', 'B-', 'B', 'B+', 'A-', 'A', 'A+', ]
LETTER_CUTS_PM = [ 60.00, 63.33, 66.66, 70.00, 73.33, 76.66, 80.00, 83.33, 86.66, 90.00, 93.33, 96.66, ]
def letterize(grade, cuts=LETTER_CUTS_PM):
if type(cuts) != list:
raise Exception("Bad cuts: " + str(cuts))
L = None
if len(cuts) == len(LETTER) - 1:
L = LETTER
elif len(cuts) == len(LETTER_PM) - 1:
L = LETTER_PM
else:
raise Exception("Bad cuts: " + str(cuts))
for c, l in izip(cuts, L):
if grade < c:
return l
return L[-1]
_gpa_d = {
"A+": 4.000,
"A" : 4.000,
"A-": 3.670,
"B+": 3.330,
"B" : 3.000,
"B-": 2.670,
"C+": 2.330,
"C" : 2.000,
"C-": 1.670,
"D+": 1.330,
"D" : 1.000,
"D-": 0.670,
"F" : 0.000,
"NF": 0.000,
}
def gpaize(grade):
v = _gpa_d.get(grade)
if v is None:
raise Exception("Unknown grade: {}".format(grade))
return v
def donothing(row):
return None
| mit | 7,387,753,133,588,528,000 | 22.62585 | 109 | 0.463288 | false | 2.473647 | false | false | false |
darcamo/pyphysim | pyphysim/util/serialize.py | 1 | 5985 | #!/usr/bin/env python
"""
Module containing function related to serialization.
"""
import json
from typing import Any, Dict, Union, cast
import numpy as np
Serializable = Union[np.ndarray, np.int32, np.int64, np.float32, np.float64,
np.float128, set]
# A type corresponding to the JSON representation of the object. For a lack of
# a better option we use Any
JsonRepresentation = Any
class NumpyOrSetEncoder(json.JSONEncoder):
"""
JSON encoder for numpy arrays.
Pass this class to json.dumps when converting a dictionary to json so
that any field which with a numpy array as value will be properly
converted.
This encoder will also handle numpy scalars and the native python set
types.
When you need to convert the json representation back, use the
`json_numpy_or_set_obj_hook` function.
See Also
--------
json_numpy_or_set_obj_hook
"""
def default(self, obj: Serializable) -> JsonRepresentation:
"""
If input object is an ndarray it will be converted into a dict holding
data, dtype, _is_numpy_array and shape.
Parameters
----------
obj : Serializable
Returns
-------
Serialized Data
"""
# Case for numpy arrays
if isinstance(obj, np.ndarray):
return {
'data': obj.tolist(),
'dtype': str(obj.dtype),
'_is_numpy_array': True,
'shape': obj.shape
}
# Case for numpy scalars
if isinstance(obj, (np.int32, np.int64)):
return int(obj)
if isinstance(obj, (np.float32, np.float64, np.float128)):
return int(obj)
# Case for built-in Python sets
if isinstance(obj, set):
return {'data': list(obj), '_is_set': True}
# If it is not a numpy array we fall back to base class encoder
return json.JSONEncoder(self, obj) # type: ignore
def json_numpy_or_set_obj_hook(
dct: Dict[str, JsonRepresentation]) -> Serializable:
"""
Decodes a previously encoded numpy array.
Parameters
----------
dct : dict
The JSON encoded numpy array.
Returns
-------
np.ndarray | set | dict, optional
The decoded numpy array or None if the encoded json data was not an
encoded numpy array.
See Also
--------
NumpyOrSetEncoder
"""
if isinstance(dct, dict) and '_is_numpy_array' in dct:
if dct['_is_numpy_array'] is True:
data = dct['data']
return np.array(data)
raise ValueError( # pragma: no cover
'Json representation contains the "_is_numpy_array" key '
'indicating that the object should be a numpy array, but it '
'was set to False, which is not valid.')
if isinstance(dct, dict) and '_is_set' in dct:
if dct['_is_set'] is True:
data = dct['data']
return set(data)
raise ValueError( # pragma: no cover
'Json representation contains the "_is_set" key '
'indicating that the object should be python set, but it '
'was set to False, which is not valid.')
return dct
class JsonSerializable:
"""
Base class for classes you want to be JSON serializable (convert
to/from JSON).
You can call the methods `to_json` and `from_json` methods (the later
is a staticmethod).
Note that a subclass must implement the `_to_dict` and `_from_dict` methods.
"""
def _to_dict(self) -> Any:
"""
Convert the object to a dictionary representation.
Returns
-------
dict
The dictionary representation of the object.
"""
raise NotImplementedError(
"Implement in a subclass") # pragma: no cover
def to_dict(self) -> Dict[str, Any]:
"""
Convert the object to a dictionary representation.
Returns
-------
dict
The dictionary representation of the object.
"""
return cast(Dict[str, Any], self._to_dict())
@staticmethod
def _from_dict(d: Any) -> Any:
"""
Convert from a dictionary to an object.
Parameters
----------
d : dict
The dictionary representing the object.
Returns
-------
Result
The converted object.
"""
raise NotImplementedError(
"Implement in a subclass") # pragma: no cover
@classmethod
def from_dict(cls, d: Dict[str, Any]) -> Any:
"""
Convert from a dictionary to an object.
Parameters
----------
d : dict
The dictionary representing the Result.
Returns
-------
Result
The converted object.
"""
return cls._from_dict(d)
def to_json(self) -> JsonRepresentation:
"""
Convert the object to JSON.
Returns
-------
str
JSON representation of the object.
"""
return json.dumps(self._to_dict(), cls=NumpyOrSetEncoder)
@classmethod
def from_json(cls, data: JsonRepresentation) -> Any:
"""
Convert a JSON representation of the object to an actual object.
Parameters
----------
data : str
The JSON representation of the object.
Returns
-------
any
The actual object
"""
d = json.loads(data, object_hook=json_numpy_or_set_obj_hook)
return cls._from_dict(d)
# # xxxxxxxxxx Test and Example Usage xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# if __name__ == '__main__':
# expected = np.arange(100, dtype=np.float)
# dumped = json.dumps(expected, cls=NumpyOrSetEncoder)
# result = json.loads(dumped, object_hook=json_numpy_or_set_obj_hook)
# print(type(result))
# print(result)
| gpl-2.0 | -4,537,723,960,394,275,300 | 26.580645 | 80 | 0.570426 | false | 4.413717 | false | false | false |
ctuning/ck-env | soft/dataset.librispeech.preprocessed/customize.py | 1 | 2228 | #
# Collective Knowledge (individual environment - setup)
#
# See CK LICENSE.txt for licensing details
# See CK COPYRIGHT.txt for copyright details
#
# Developer: Leo Gordon, [email protected]
#
##############################################################################
# setup environment setup
def setup(i):
"""
Input: {
cfg - meta of this soft entry
self_cfg - meta of module soft
ck_kernel - import CK kernel module (to reuse functions)
host_os_uoa - host OS UOA
host_os_uid - host OS UID
host_os_dict - host OS meta
target_os_uoa - target OS UOA
target_os_uid - target OS UID
target_os_dict - target OS meta
target_device_id - target device ID (if via ADB)
tags - list of tags used to search this entry
env - updated environment vars from meta
customize - updated customize vars from meta
deps - resolved dependencies for this soft
interactive - if 'yes', can ask questions, otherwise quiet
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
bat - prepared string for bat file
}
"""
import os
from math import sqrt
ck_kernel = i.get('ck_kernel')
cus = i.get('customize',{})
full_path = cus.get('full_path','')
env_prefix = cus.get('env_prefix','')
install_env = cus.get('install_env', {})
detection_mode = len(install_env) == 0
path_install = full_path if os.path.isdir(full_path) else os.path.dirname(full_path)
env = i.get('env', {})
env[env_prefix + '_DIR'] = path_install
## Prepend the hidden variables with env_prefix
#
for varname in install_env.keys():
if varname.startswith('_'):
env[env_prefix + varname] = install_env[varname]
return {'return':0, 'bat':''}
| bsd-3-clause | -705,101,672,003,192,600 | 30.380282 | 91 | 0.496409 | false | 4.284615 | false | false | false |
feresum/acml | miniml/typeunion.py | 1 | 1303 | class TypeUnion(dict):
def _get(self, k):
return super().__getitem__(k)
def _set(self, k, v):
super().__setitem__(k, v)
def representative(self, k):
while type(self._get(k)) is not int:
k = self._get(k)
return k
def size(self, x):
return self._get(self[x])
def compress(self, start, rep):
while start is not rep:
tmp = self._get(start)
self._set(start, rep)
start = tmp
def __getitem__(self, key):
if key not in self:
return key
rep = self.representative(key)
self.compress(key, rep)
return rep
def join(self, a, b):
for x in a, b:
if x not in self:
self._set(x, 1)
ar, br = self.representative(a), self.representative(b)
if ar is br: return
av, bv = a.isTypeVariable(), b.isTypeVariable()
az, bz = self._get(ar), self._get(br)
if bz > az if av == bv else av:
self._set(ar, br)
self._set(br, az + bz)
else:
self._set(br, ar)
self._set(ar, az + bz)
def import_dict(self, d):
for k, v in d.items():
self.join(k, v)
def __setitem__(*a):
raise Exception("Don't do that")
| mit | 5,240,343,500,133,450,000 | 30.780488 | 63 | 0.488872 | false | 3.512129 | false | false | false |
dirjud/pickup | event/models.py | 1 | 3032 | from django.db import models
from django.utils import timezone
import pytz
import datetime
def hash(n):
n = int(n)
return ((0x0000FFFF & n)<<16) + ((0xFFFF0000 & n)>>16)
class EventInstance(object):
def __init__(self, event, event_time, date):
self.date = date.date()
self.time = date.time()
self.event= event
self.event_time = event_time
self.attending = Signup.objects.filter(event=event, date=self.date, status=Signup.ATTENDING)
self.not_attending = Signup.objects.filter(event=event, date=self.date, status=Signup.NOT_ATTENDING)
def get_date_id(self):
return "%4d_%02d_%02d" % (self.date.year, self.date.month, self.date.day)
class Event(models.Model):
name = models.CharField(max_length=100)
timezone = models.CharField(max_length=50, choices=[(x,x) for x in pytz.all_timezones ], default="US/Mountain")
description = models.TextField()
location_lat = models.FloatField()
location_lon = models.FloatField()
addr = models.CharField(max_length=200)
city = models.CharField(max_length=100)
state = models.CharField(max_length=5)
zip = models.CharField(max_length=20)
contact_emails = models.CharField(max_length=500, help_text='Comma separated list of email addresses')
def __unicode__(self):
return self.name
def get_next(self):
timezone.activate(pytz.timezone(self.timezone))
now = timezone.now().date()
events = [ EventInstance(self, d, d.get_next(now)) for d in self.times.all() ]
events.sort(key=lambda x:x.date)
return events
class EventTime(models.Model):
DAY_CHOICES = (
(0, "Monday", ),
(1, "Tuesday", ),
(2, "Wednesday",),
(3, "Thursday", ),
(4, "Friday", ),
(5, "Saturday", ),
(6, "Sunday", ),
)
event= models.ForeignKey(Event, related_name="times")
day = models.IntegerField(choices=DAY_CHOICES)
time = models.TimeField()
def get_next(self, now):
dow = now.weekday()
td = datetime.timedelta(days=(self.day - dow) % 7)
next_date = now + td
return datetime.datetime.combine(next_date, self.time)
class Signup(models.Model):
ATTENDING = 0
NOT_ATTENDING = 1
status_choices = (
( ATTENDING , "I'm In", ),
( NOT_ATTENDING, "I'm Out", ),
)
event = models.ForeignKey(Event, related_name="signups")
date = models.DateField()
name = models.CharField(max_length=100)
status= models.IntegerField(choices=status_choices, blank=False, default=ATTENDING)
def hash(self):
return hash(self.pk)
class Comment(models.Model):
class Meta:
ordering = ["-timestamp"]
event = models.ForeignKey(Event, related_name="comments")
name = models.CharField(max_length=100)
comment = models.TextField()
timestamp = models.DateTimeField(auto_now_add=True)
| gpl-2.0 | -7,187,010,939,716,523,000 | 33.850575 | 121 | 0.611148 | false | 3.481056 | false | false | false |
team-xue/xue | xue/cms/utils/helpers.py | 4 | 2000 | # -*- coding: utf-8 -*-
from django.conf import settings
# modify reversions to match our needs if required...
def reversion_register(model_class, fields=None, follow=(), format="json", exclude_fields=None):
"""CMS interface to reversion api - helper function. Registers model for
reversion only if reversion is available.
Auto excludes publisher fields.
"""
if not 'reversion' in settings.INSTALLED_APPS:
return
if fields and exclude_fields:
raise ValueError("Just one of fields, exclude_fields arguments can be passed.")
opts = model_class._meta
local_fields = opts.local_fields + opts.local_many_to_many
if fields is None:
fields = [field.name for field in local_fields]
exclude_fields = exclude_fields or []
fields = filter(lambda name: not name in exclude_fields, fields)
from cms.utils import reversion_hacks
reversion_hacks.register_draft_only(model_class, fields, follow, format)
def make_revision_with_plugins(obj, user=None, message=None):
from cms.models.pluginmodel import CMSPlugin
import reversion
"""
Only add to revision if it is a draft.
"""
revision_manager = reversion.revision
cls = obj.__class__
if cls in revision_manager._registry:
placeholder_relation = find_placeholder_relation(obj)
if revision_manager.is_active():
# add toplevel object to the revision
revision_manager.add(obj)
# add plugins and subclasses to the revision
filters = {'placeholder__%s' % placeholder_relation: obj}
for plugin in CMSPlugin.objects.filter(**filters):
plugin_instance, admin = plugin.get_plugin_instance()
if plugin_instance:
revision_manager.add(plugin_instance)
revision_manager.add(plugin)
def find_placeholder_relation(obj):
return 'page' | bsd-3-clause | -278,905,006,542,900,480 | 32.35 | 96 | 0.639 | false | 4.395604 | false | false | false |
lizenn/erlang-dbus | test/dbus_client_SUITE_data/example-service.py | 1 | 1776 | #!/usr/bin/env python3
from gi.repository import GObject
import dbus
import dbus.service
import dbus.glib
class SampleObject(dbus.service.Object):
def __init__(self, bus_name, object_path="/"):
dbus.service.Object.__init__(self, bus_name, object_path)
@dbus.service.method("net.lizenn.dbus.SampleInterface",
in_signature='s', out_signature='as')
def HelloWorld(self, hello_message):
print (str(hello_message))
self.SampleSignal(42, 24)
self.SampleSignal2()
return ["Hello World", " from example-service.py"]
@dbus.service.method("net.lizenn.dbus.SampleInterface",
out_signature='as')
def GetTuple(self):
return ("Hello Tuple", " from example-service.py")
@dbus.service.method("net.lizenn.dbus.SampleInterface")
def GetDict(self):
return {"first": "Hello Dict", "second": " from example-service.py"}
@dbus.service.method("net.lizenn.dbus.SampleInterface", in_signature='u')
def GetString(self, size):
s = ""
i = size
while (i > 0):
s += "x"
i -= 1
return s
@dbus.service.signal("net.lizenn.dbus.SampleInterface")
def SampleSignal(self, x, y):
print("SampleSignal")
@dbus.service.signal("net.lizenn.dbus.SampleInterface")
def SampleSignal2(self):
print("SampleSignal2")
session_bus = dbus.SessionBus()
service = dbus.service.BusName("net.lizenn.dbus.SampleService", bus=session_bus)
SampleObject(service, object_path="/root")
SampleObject(service, object_path="/root/child1")
SampleObject(service, object_path="/root/child2/little1")
SampleObject(service, object_path="/root/child2/little2")
mainloop = GObject.MainLoop()
mainloop.run()
| apache-2.0 | 2,494,797,983,573,573,600 | 31.290909 | 80 | 0.648086 | false | 3.552 | false | false | false |
z/xonotic-server-management-suite | tests/test_command.py | 1 | 1055 | import os
import xsms.util as util
from xsms.command import Command
from xsms.config import conf
root_dir = os.path.dirname(os.path.abspath(__file__))
def test_command_object():
command = Command(conf=conf)
assert isinstance(command, Command)
assert command.conf['xsms_config_root'] == os.path.expanduser('~/.xsms')
def test_command_generate_engine_configs():
command = Command(conf=conf)
command.generate_engine_configs()
assert os.path.exists(conf['supervisor_conf'])
def test_command_generate_server_configs():
command = Command(conf=conf)
command.generate_server_configs()
assert os.path.exists('{}/insta.cfg'.format(conf['xsms_generated_servers_root']))
def test_command_generate_custom_server_configs():
util.check_if_not_create('{}/insta.cfg.tpl'.format(conf['xsms_templates_servers_root']), '{}/data/servers/custom.cfg.tpl'.format(root_dir))
command = Command(conf=conf)
command.generate_server_configs()
assert os.path.exists('{}/insta.cfg'.format(conf['xsms_generated_servers_root'])) | mit | 3,742,688,121,942,573,000 | 33.064516 | 143 | 0.718483 | false | 3.403226 | true | false | false |
Naeka/wagtailmodelchooser | tests/conftest.py | 1 | 3177 | def pytest_configure():
from django.conf import settings
settings.configure(
DEBUG_PROPAGATE_EXCEPTIONS=True,
DATABASES={'default': {'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'}},
SITE_ID=1,
SECRET_KEY='not very secret in tests',
USE_I18N=True,
USE_L10N=True,
STATIC_URL='/static/',
ROOT_URLCONF='tests.urls',
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
# insert your TEMPLATE_DIRS here
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this
# list if you haven't customized them:
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
}
],
MIDDLEWARE=(
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
),
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'wagtail.contrib.forms',
'wagtail.contrib.redirects',
'wagtail.embeds',
'wagtail.sites',
'wagtail.users',
'wagtail.snippets',
'wagtail.documents',
'wagtail.images',
'wagtail.search',
'wagtail.admin',
'wagtail.core',
'wagtailmodelchooser',
'modelcluster',
'taggit',
'tests',
),
PASSWORD_HASHERS=(
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
'django.contrib.auth.hashers.CryptPasswordHasher',
),
)
try:
import django
django.setup()
except AttributeError:
pass
| bsd-2-clause | -5,454,537,738,664,717,000 | 36.821429 | 82 | 0.533207 | false | 4.917957 | false | false | false |
tkliuxing/django-wiki | wiki/plugins/template/south_migrations/0001_initial.py | 1 | 12750 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Template'
db.create_table('template_template', (
('reusableplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(
to=orm['wiki.ReusablePlugin'], unique=True, primary_key=True)),
('current_revision', self.gf('django.db.models.fields.related.OneToOneField')(
blank=True, related_name='current_set', unique=True, null=True, to=orm['template.TemplateRevision'])),
('template_title', self.gf('django.db.models.fields.SlugField')
(unique=True, max_length=50)),
('extend_to_children', self.gf(
'django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('template', ['Template'])
# Adding model 'TemplateRevision'
db.create_table('template_templaterevision', (
('id', self.gf('django.db.models.fields.AutoField')
(primary_key=True)),
('revision_number', self.gf(
'django.db.models.fields.IntegerField')()),
('user_message', self.gf(
'django.db.models.fields.TextField')(blank=True)),
('automatic_log', self.gf(
'django.db.models.fields.TextField')(blank=True)),
('ip_address', self.gf('django.db.models.fields.IPAddressField')
(max_length=15, null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(
to=orm['auth.User'], null=True, on_delete=models.SET_NULL, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')
(auto_now=True, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')
(auto_now_add=True, blank=True)),
('previous_revision', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[
'template.TemplateRevision'], null=True, on_delete=models.SET_NULL, blank=True)),
('deleted', self.gf('django.db.models.fields.BooleanField')
(default=False)),
('locked', self.gf('django.db.models.fields.BooleanField')
(default=False)),
('template', self.gf('django.db.models.fields.related.ForeignKey')
(to=orm['template.Template'])),
('template_content', self.gf(
'django.db.models.fields.TextField')(blank=True)),
('description', self.gf(
'django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('template', ['TemplateRevision'])
def backwards(self, orm):
# Deleting model 'Template'
db.delete_table('template_template')
# Deleting model 'TemplateRevision'
db.delete_table('template_templaterevision')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'user_set'", 'blank': 'True', 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'user_set'", 'blank': 'True', 'to': "orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'template.template': {
'Meta': {'object_name': 'Template', '_ormbases': ['wiki.ReusablePlugin']},
'current_revision': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'current_set'", 'unique': 'True', 'null': 'True', 'to': "orm['template.TemplateRevision']"}),
'reusableplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['wiki.ReusablePlugin']", 'unique': 'True', 'primary_key': 'True'}),
'template_title': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'extend_to_children': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
},
'template.templaterevision': {
'Meta': {'ordering': "('created',)", 'object_name': 'TemplateRevision'},
'automatic_log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'previous_revision': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['template.TemplateRevision']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'revision_number': ('django.db.models.fields.IntegerField', [], {}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['template.Template']"}),
'template_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'user_message': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'wiki.article': {
'Meta': {'object_name': 'Article'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_revision': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'current_set'", 'unique': 'True', 'null': 'True', 'to': "orm['wiki.ArticleRevision']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'group_read': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'group_write': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'other_read': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'other_write': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_articles'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"})
},
'wiki.articleplugin': {
'Meta': {'object_name': 'ArticlePlugin'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Article']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'wiki.articlerevision': {
'Meta': {'ordering': "('created',)", 'unique_together': "(('article', 'revision_number'),)", 'object_name': 'ArticleRevision'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Article']"}),
'automatic_log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'previous_revision': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.ArticleRevision']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'revision_number': ('django.db.models.fields.IntegerField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'user_message': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'wiki.reusableplugin': {
'Meta': {'object_name': 'ReusablePlugin', '_ormbases': ['wiki.ArticlePlugin']},
'articleplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['wiki.ArticlePlugin']", 'unique': 'True', 'primary_key': 'True'}),
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shared_plugins_set'", 'symmetrical': 'False', 'to': "orm['wiki.Article']"})
}
}
complete_apps = ['template']
| gpl-3.0 | -1,443,735,191,738,663,700 | 73.561404 | 214 | 0.567373 | false | 3.787879 | false | false | false |
obs145628/2048-ai | tests/commander.py | 1 | 2290 | import os
import subprocess
import ioutils
def ebool(x):
if x:
return 'T'
else:
return 'F'
class Commander:
def __init__(self):
self.use_valgrind = False
self.timeout = -1
def run_cmd(self, args, read_stdout = True, read_stderr = True):
run_dict = { 'args': args }
if read_stderr:
run_dict['stdout'] = subprocess.PIPE
if read_stderr:
run_dict['stderr'] = subprocess.PIPE
return (res.returncode, res.stdout, res.stderr)
'''
Returns tuple (valid, errs, res)
valid: bool, true if no errors
errs: string errors
res: subprocess object
'''
def run_test(self, cmd, cwd = None, code = None, env = None,
has_stdout = None, stdout = None, stdout_file = None,
has_stderr = None, stderr = None, stderr_file = None):
if env != None:
new_env = os.environ.copy()
for key in env.keys():
new_env[key] = env[key]
env = new_env
res = subprocess.run(cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE,
cwd = cwd, env = env)
cmd_code = res.returncode
cmd_stdout = res.stdout
cmd_stderr = res.stderr
errs = []
if code != None and code != cmd_code:
errs.append(('CODE', cmd_code, code))
if has_stdout != None and (len(cmd_stdout) == 0) == has_stdout:
errs.append(('HAS_STDOUT', ebool(len(cmd_stdout)), ebool(has_stdout)))
if stdout != None and cmd_stdout.decode('ascii') != stdout:
errs.append(('STDOUT', '.', '.'))
if stdout_file != None and not ioutils.file_content_is(cmd_stdout, stdout_file):
errs.append(('STDOUT_FILE', '.', '.'))
if has_stderr != None and (len(cmd_stderr) == 0) == has_stderr:
errs.append(('HAS_STDERR', ebool(len(cmd_stderr)), ebool(has_stderr)))
if stderr != None and cmd_stderr.decode('ascii') != stderr:
errs.append(('STDERR', '.', '.'))
if stderr_file != None and not ioutils.file_content_is(cmd_stderr, stderr_file):
errs.append(('STDERR_FILE', '.', '.'))
return (len(errs) == 0, errs, res)
| mit | -7,483,657,585,588,319,000 | 29.533333 | 88 | 0.531878 | false | 3.687601 | false | false | false |
jfterpstra/bluebottle | bluebottle/slides/tests/test_api.py | 1 | 2299 | import json
from django.core.urlresolvers import reverse
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.slides import SlideFactory, \
DraftSlideFactory
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
class SlideTestCase(BluebottleTestCase):
"""
Base class for test cases for ``slide`` module.
The testing classes for ``slide`` module related to the API must
subclass this.
"""
def setUp(self):
super(SlideTestCase, self).setUp()
self.user = BlueBottleUserFactory.create()
self.slide1 = SlideFactory.create(author=self.user, language='nl')
self.slide2 = SlideFactory.create(author=self.user, language='en')
self.slide3 = DraftSlideFactory.create(author=self.user,
language='en', )
class SlideListTestCase(SlideTestCase):
"""
Test case for ``SlideList`` API view.
Endpoint: /api/textwallposts/
"""
def test_slides_list(self):
"""
Ensure we return list of published slides list.
"""
response = self.client.get(reverse('slide_list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 2)
def test_api_slides_list_filtered(self):
"""
Ensure slides can be filtered by language
"""
response = self.client.get(reverse('slide_list'), {'language': 'en'})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
def test_slides_list_data(self):
"""
Ensure get request returns record with correct data.
"""
response = self.client.get(reverse('slide_list'), {'language': 'nl'})
slide = response.data['results'][0]
self.assertEqual(slide['title'], self.slide1.title)
self.assertEqual(slide['body'], self.slide1.body)
self.assertEqual(slide['author'], self.slide1.author.id)
self.assertEqual(slide['status'], self.slide1.status)
self.assertEqual(slide['sequence'], self.slide1.sequence)
| bsd-3-clause | 7,004,832,615,452,423,000 | 31.842857 | 77 | 0.661157 | false | 4.019231 | true | false | false |
grundic/yagocd | yagocd/resources/agent.py | 1 | 6973 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import json
from yagocd.resources import Base, BaseManager
from yagocd.resources.job import JobInstance
from yagocd.util import since
@since('15.2.0')
class AgentManager(BaseManager):
"""
The agents API allows users with administrator role to manage agents.
`Official documentation. <https://api.go.cd/current/#agents>`_
:versionadded: 15.2.0.
:warning: Please note that this API requires using v4 of the API using `Accept: application/vnd.go.cd.v4+json`
"""
RESOURCE_PATH = '{base_api}/agents'
ACCEPT_HEADER = 'application/vnd.go.cd.v4+json'
VERSION_TO_ACCEPT_HEADER = {
'16.1.0': 'application/vnd.go.cd.v1+json',
'16.7.0': 'application/vnd.go.cd.v2+json',
'16.9.0': 'application/vnd.go.cd.v3+json',
}
def __iter__(self):
"""
Method add iterator protocol for the manager.
:return: an array of agents.
:rtype: list of yagocd.resources.agent.AgentEntity
"""
return iter(self.list())
def __getitem__(self, uuid):
"""
Method add possibility to get agent by the uuid using dictionary like syntax.
:param uuid: uuid of the agent
:return: Agent entity.
:rtype: yagocd.resources.agent.AgentEntity
"""
return self.get(uuid=uuid)
def list(self): # noqa
"""
Lists all available agents, these are agents that are present in the
<agents/> tag inside cruise-config.xml and also agents that are in
Pending state awaiting registration.
:versionadded: 15.2.0.
:return: an array of agents.
:rtype: list of yagocd.resources.agent.AgentEntity
"""
response = self._session.get(
path=self.RESOURCE_PATH.format(base_api=self.base_api),
headers={'Accept': self._accept_header()},
)
agents = list()
# Depending on Go version, return value would be either list of dict.
# Support both cases here.
json_response = response.json()
if isinstance(json_response, list):
agents_json = json_response
elif isinstance(json_response, dict):
agents_json = json_response.get('_embedded', {}).get('agents', {})
else:
raise ValueError("Expected response to be in [list, dict], but '{}' found!".format(json_response))
for data in agents_json:
agents.append(AgentEntity(session=self._session, data=data))
return agents
def dict(self): # noqa
"""
Wrapper for `list()` method, that transforms founded agents to
dictionary by `uuid` key.
:return: dictionary of agents with `uuid` as a key and agent as a value.
:rtype: dict[str, yagocd.resources.agent.AgentEntity]
"""
agents = self.list()
result = dict()
for agent in agents:
result[agent.data.uuid] = agent
return result
def get(self, uuid):
"""
Gets an agent by its unique identifier (uuid).
:versionadded: 15.2.0.
:param uuid: uuid of the agent
:return: Agent entity.
:rtype: yagocd.resources.agent.AgentEntity
"""
response = self._session.get(
path=self._session.urljoin(self.RESOURCE_PATH, uuid).format(
base_api=self.base_api
),
headers={'Accept': self._accept_header()},
)
return AgentEntity(session=self._session, data=response.json())
def update(self, uuid, config):
"""
Update some attributes of an agent.
:versionadded: 15.2.0.
:param uuid: uuid of the agent
:param config: dictionary of parameters for update
:return: Agent entity.
:rtype: yagocd.resources.agent.AgentEntity
"""
response = self._session.patch(
path=self._session.urljoin(self.RESOURCE_PATH, uuid).format(
base_api=self.base_api
),
data=json.dumps(config),
headers={
'Accept': self._accept_header(),
'Content-Type': 'application/json'
},
)
return AgentEntity(session=self._session, data=response.json())
def delete(self, uuid):
"""
Deletes an agent.
:versionadded: 15.2.0.
:param uuid: uuid of the agent.
:return: a message confirmation if the agent was deleted.
"""
response = self._session.delete(
path=self._session.urljoin(self.RESOURCE_PATH, uuid).format(
base_api=self.base_api
),
headers={'Accept': self._accept_header()},
)
return response.json().get('message')
@since('14.3.0')
def job_history(self, uuid, offset=0):
"""
Lists the jobs that have executed on an agent.
:versionadded: 14.3.0.
:param uuid: uuid of the agent.
:param offset: number of jobs to be skipped.
:return: an array of :class:`yagocd.resources.job.JobInstance` along with the job transitions.
:rtype: list of yagocd.resources.job.JobInstance
"""
response = self._session.get(
path=self._session.urljoin(self.RESOURCE_PATH, uuid, 'job_run_history', offset).format(
base_api=self.base_api
),
headers={'Accept': 'application/json'},
)
jobs = list()
for data in response.json()['jobs']:
jobs.append(JobInstance(session=self._session, data=data, stage=None))
return jobs
class AgentEntity(Base):
pass
| isc | 8,598,892,318,151,013,000 | 32.047393 | 114 | 0.601176 | false | 4.160501 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.