repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
akej74/grid-control | grid-control/helper.py | 1 | 3109 | """
helper.py
---------
Implements various helper functions, e.g. to display messages using a QT Message box.
"""
import io
import sys
import traceback
from PyQt5 import QtCore, QtWidgets, QtGui
def excepthook(excType, excValue, tracebackobj):
"""Rewritten "excepthook" function, to display a message box with details about the exception.
@param excType exception type
@param excValue exception value
@param tracebackobj traceback object
"""
separator = '-' * 40
notice = "An unhandled exception has occurred\n"
tbinfofile = io.StringIO()
traceback.print_tb(tracebackobj, None, tbinfofile)
tbinfofile.seek(0)
tbinfo = tbinfofile.read()
errmsg = '%s: \n%s' % (str(excType), str(excValue))
sections = [separator, errmsg, separator, tbinfo]
msg = '\n'.join(sections)
# Create a QMessagebox
error_box = QtWidgets.QMessageBox()
error_box.setText(str(notice)+str(msg))
error_box.setWindowTitle("Grid Control - unhandled exception")
error_box.setIcon(QtWidgets.QMessageBox.Critical)
error_box.setStandardButtons(QtWidgets.QMessageBox.Ok)
error_box.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse)
# Show the window
error_box.exec_()
sys.exit(1)
def exception_message_qthread(excType, excValue, tracebackobj):
"""Display an error message box with the exception details."""
separator = '-' * 40
notice = "An exception occurred in the polling thread!\n"
tbinfofile = io.StringIO()
traceback.print_tb(tracebackobj, None, tbinfofile)
tbinfofile.seek(0)
tbinfo = tbinfofile.read()
errmsg = '%s: \n%s' % (str(excType), str(excValue))
sections = [notice, separator, errmsg, separator, tbinfo]
msg = '\n'.join(sections)
return msg
def show_error(message):
"""Display "message" in a "Critical error" message box with 'OK' button."""
# Create a QMessagebox
message_box = QtWidgets.QMessageBox()
message_box.setText(message)
message_box.setWindowTitle("Error")
message_box.setWindowIcon(QtGui.QIcon(QtGui.QPixmap(":/icons/grid.png")))
message_box.setIcon(QtWidgets.QMessageBox.Critical)
message_box.setStandardButtons(QtWidgets.QMessageBox.Ok)
message_box.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse)
#Show the window
message_box.exec_()
def show_notification(message):
"""Display "message" in a "Information" message box with 'OK' button."""
# Create a QMessagebox
message_box = QtWidgets.QMessageBox()
message_box.setText(message)
message_box.setWindowTitle("Note")
message_box.setWindowIcon(QtGui.QIcon(QtGui.QPixmap(":/icons/grid.png")))
message_box.setIcon(QtWidgets.QMessageBox.Information)
message_box.setStandardButtons(QtWidgets.QMessageBox.Ok)
message_box.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse)
#Show the window
message_box.exec_()
class CustomDialog(QtWidgets.QDialog):
def __init__(self, parent=None):
super().__init__(parent)
def closeEvent(self, event):
event.accept()
sys.exit(1) | gpl-3.0 | -29,618,981,212,196,560 | 30.1 | 98 | 0.700547 | false | 3.805386 | false | false | false |
tehtechguy/mldata | src/mldata/status_bar.py | 1 | 7525 | # status_bar.py
#
# Author : James Mnatzaganian
# Contact : http://techtorials.me
# Date Created : 09/17/14
#
# Description : Fancy console-based status bar.
# Usage : See "run_example()".
# Python Version : 2.7.8
#
# Adapted From : https://github.com/tehtechguy/py_StatusBar
#
# License : MIT License http://opensource.org/licenses/mit-license.php
# Copyright : (c) 2015 James Mnatzaganian
"""
Fancy console-based status bar.
G{packagetree mldata}
"""
__docformat__ = 'epytext'
# Native imports
import sys
# Program imports
from mldata.exception_handler import BaseException, wrap_error
###############################################################################
########## Exception Handling
###############################################################################
class StatusBarLengthTooSmallError(BaseException):
"""
Exception if the bar length drops below a length of one.
"""
def __init__(self, sb):
"""
Initialize this class.
@param sb: The status bar object to finish.
"""
sb.finish()
self.msg = wrap_error('The bar length became too small to represent. '
'This occurs when the percent complete reaches a very large '
'number. Make sure that your total_length value is accurate.')
###############################################################################
########## Primary Classes
###############################################################################
class StatusBar(object):
"""
Class for a status bar.
"""
def __init__(self, total_length, bar_length=72, max_bar_length=72,
min_bar_length=1, style=('[','=',']')):
"""
Initializes this StatusBar instance.
@param total_length: The total number of steps.
@param bar_length: How many characters the bar should be on the screen.
@param max_bar_length: The maximum length of the bar. Set to be 79 - 7.
(screen width for Windows compatibility, *NIX can use 80) -
(2 closing braces + 1 space + 3 digits + 1 percent sign). Make sure the
max_bar_length is always 7 less than the total window size.
@param style: The status bar style format to use. This needs to be a
tuple of three elements: start of the bar, the bar progress notation,
and end of the bar.
"""
# Initializations
self.total_length = total_length
self.bar_length = bar_length
self.max_bar_length = max_bar_length
self.min_bar_length = min_bar_length
self.style = style
self.position = 0
self.percent_length = 3
# Ensure that the minimum bar length isn't too small
if self.min_bar_length < 0:
self.min_bar_length = 1
# Ensure that everything can fit in a normal window
if self.bar_length > self.max_bar_length:
self.bar_length = max_bar_length
# Ensure that the bar_length isn't too small
if self.bar_length < self.min_bar_length:
self.bar_length = self.min_bar_length
# Ensure that the provided style is valid
if (len(style) != 3) or (sum([len(x) for x in style]) != 3):
self.style = ('[','=',']')
def increment(self, step_size=1):
"""
Increments the bars position by the specified amount.
@param step_size: The number to increment the bar's position by.
@raise StatusBarLengthTooSmallError: Raised if the length of the status
bar becomes too small.
"""
# Update position
self.position += step_size
# Calculate the progress
progress = self.position / float(self.total_length)
percent_progress = int(progress * 100)
percent_length = len(str(percent_progress))
# Calculate the current bar length, limiting it to the max size
current_bar_length = min(int(progress * self.bar_length),
self.bar_length)
# Shrink bar to account for overflow scenarios
if (current_bar_length == self.bar_length) and \
(percent_length > self.percent_length):
# If the bar length has room to grow, give it to it
if (self.bar_length + percent_length - 3) < self.max_bar_length:
self.bar_length += 1
current_bar_length = self.bar_length
self.percent_length = percent_length
else:
self.bar_length -= (percent_length - self.percent_length)
current_bar_length = self.bar_length
self.percent_length = percent_length
# Check for bar being too small
if self.bar_length < self.min_bar_length:
raise StatusBarLengthTooSmallError(self)
# Update the status bar
bars = self.style[1] * current_bar_length
bar_spaces = ' ' * (self.bar_length - current_bar_length)
sys.stdout.write('\r{0}{1}{2}{3} {4}%'.format(self.style[0], bars,
bar_spaces, self.style[2], percent_progress))
sys.stdout.flush()
def reset(self):
"""
Resets the bar's current position. This should method should be used as
a way to reuse the bar. Do not use it unless you have first called the
"finish" method.
"""
self.position = 0
def finish(self):
"""
Ends the status bar, resetting the terminal to normal usage.
"""
sys.stdout.write('\n')
###############################################################################
########## Primary Functions
###############################################################################
def run_example():
"""
Example of various usage cases for this status bar.
Note - This example will not work with Python 3, due to the print
statements.
"""
# Native imports
import time
# Initializations
total_length = 100 # Total number of steps
sb = StatusBar(total_length) # Create the class instance
print '\nThis example shows how the status bar will work under regular ' \
'conditions'
# Increment the status bar by 1 for every item
for i in xrange(total_length):
time.sleep(0.05)
sb.increment()
# Disable the status bar
sb.finish()
print '\nThis example shows how the status bar handles overflow ' \
'(completeness > 100%)'
# Create the class instance
sb = StatusBar(total_length, bar_length=70)
# Increment the status bar by 100 for every item
for i in xrange(total_length):
time.sleep(0.05)
sb.increment(100)
# Disable the status bar
sb.finish()
print '\nThis example shows what happens if the status bar gets to be ' \
'small'
# Create the class instance
sb = StatusBar(total_length, bar_length=3)
# Increment the status bar
for i in xrange(total_length):
time.sleep(0.05)
sb.increment(100*i)
# Disable the status bar
sb.finish()
print "\nThis example shows what happens if the status bar can't grow " \
"anymore."
# Create the class instance
sb = StatusBar(total_length, bar_length=3, max_bar_length=5,
min_bar_length=2)
# Increment the status bar to cause fatal termination
try:
for i in xrange(total_length):
time.sleep(0.05)
sb.increment(500*i)
except StatusBarLengthTooSmallError, e:
print 'StatusBarLengthTooSmallError Exception Caught', e
print '\nThis example shows how styling the status bar works.' \
'\nNote that invalid styles will be ignored.'
# Create the class instance
sb = StatusBar(total_length, style=('{','-','}'))
# Increment the status bar
for i in xrange(total_length):
time.sleep(0.05)
sb.increment(i)
# Disable the status bar
sb.finish()
if __name__ == "__main__":
run_example() | mit | 5,515,393,496,029,649,000 | 27.517647 | 79 | 0.608372 | false | 3.532864 | false | false | false |
NexusIS/tempest | tempest/config.py | 1 | 57981 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import logging as std_logging
import os
from oslo_config import cfg
from oslo_log import log as logging
from tempest.test_discover import plugins
# TODO(marun) Replace use of oslo_config's global ConfigOpts
# (cfg.CONF) instance with a local instance (cfg.ConfigOpts()) once
# the cli tests move to the clients. The cli tests rely on oslo
# incubator modules that use the global cfg.CONF.
_CONF = cfg.CONF
def register_opt_group(conf, opt_group, options):
if opt_group:
conf.register_group(opt_group)
for opt in options:
conf.register_opt(opt, group=getattr(opt_group, 'name', None))
auth_group = cfg.OptGroup(name='auth',
title="Options for authentication and credentials")
AuthGroup = [
cfg.StrOpt('test_accounts_file',
help="Path to the yaml file that contains the list of "
"credentials to use for running tests. If used when "
"running in parallel you have to make sure sufficient "
"credentials are provided in the accounts file. For "
"example if no tests with roles are being run it requires "
"at least `2 * CONC` distinct accounts configured in "
" the `test_accounts_file`, with CONC == the "
"number of concurrent test processes."),
cfg.BoolOpt('allow_tenant_isolation',
default=True,
help="Allows test cases to create/destroy tenants and "
"users. This option requires that OpenStack Identity "
"API admin credentials are known. If false, isolated "
"test cases and parallel execution, can still be "
"achieved configuring a list of test accounts",
deprecated_opts=[cfg.DeprecatedOpt('allow_tenant_isolation',
group='compute'),
cfg.DeprecatedOpt('allow_tenant_isolation',
group='orchestration')]),
cfg.ListOpt('tempest_roles',
help="Roles to assign to all users created by tempest",
default=[]),
cfg.StrOpt('default_credentials_domain_name',
default='Default',
help="Default domain used when getting v3 credentials. "
"This is the name keystone uses for v2 compatibility.",
deprecated_opts=[cfg.DeprecatedOpt(
'tenant_isolation_domain_name',
group='auth')]),
cfg.BoolOpt('create_isolated_networks',
default=True,
help="If allow_tenant_isolation is set to True and Neutron is "
"enabled Tempest will try to create a useable network, "
"subnet, and router when needed for each tenant it "
"creates. However in some neutron configurations, like "
"with VLAN provider networks, this doesn't work. So if "
"set to False the isolated networks will not be created"),
]
identity_group = cfg.OptGroup(name='identity',
title="Keystone Configuration Options")
IdentityGroup = [
cfg.StrOpt('catalog_type',
default='identity',
help="Catalog type of the Identity service."),
cfg.BoolOpt('disable_ssl_certificate_validation',
default=False,
help="Set to True if using self-signed SSL certificates."),
cfg.StrOpt('ca_certificates_file',
default=None,
help='Specify a CA bundle file to use in verifying a '
'TLS (https) server certificate.'),
cfg.StrOpt('uri',
help="Full URI of the OpenStack Identity API (Keystone), v2"),
cfg.StrOpt('uri_v3',
help='Full URI of the OpenStack Identity API (Keystone), v3'),
cfg.StrOpt('auth_version',
default='v2',
help="Identity API version to be used for authentication "
"for API tests."),
cfg.StrOpt('region',
default='RegionOne',
help="The identity region name to use. Also used as the other "
"services' region name unless they are set explicitly. "
"If no such region is found in the service catalog, the "
"first found one is used."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the identity service."),
cfg.StrOpt('username',
help="Username to use for Nova API requests."),
cfg.StrOpt('tenant_name',
help="Tenant name to use for Nova API requests."),
cfg.StrOpt('admin_role',
default='admin',
help="Role required to administrate keystone."),
cfg.StrOpt('password',
help="API key to use when authenticating.",
secret=True),
cfg.StrOpt('domain_name',
help="Domain name for authentication (Keystone V3)."
"The same domain applies to user and project"),
cfg.StrOpt('alt_username',
help="Username of alternate user to use for Nova API "
"requests."),
cfg.StrOpt('alt_tenant_name',
help="Alternate user's Tenant name to use for Nova API "
"requests."),
cfg.StrOpt('alt_password',
help="API key to use when authenticating as alternate user.",
secret=True),
cfg.StrOpt('alt_domain_name',
help="Alternate domain name for authentication (Keystone V3)."
"The same domain applies to user and project"),
cfg.StrOpt('admin_username',
help="Administrative Username to use for "
"Keystone API requests."),
cfg.StrOpt('admin_tenant_name',
help="Administrative Tenant name to use for Keystone API "
"requests."),
cfg.StrOpt('admin_password',
help="API key to use when authenticating as admin.",
secret=True),
cfg.StrOpt('admin_domain_name',
help="Admin domain name for authentication (Keystone V3)."
"The same domain applies to user and project"),
cfg.StrOpt('default_domain_id',
default='default',
help="ID of the default domain"),
]
identity_feature_group = cfg.OptGroup(name='identity-feature-enabled',
title='Enabled Identity Features')
IdentityFeatureGroup = [
cfg.BoolOpt('trust',
default=True,
help='Does the identity service have delegation and '
'impersonation enabled'),
cfg.BoolOpt('api_v2',
default=True,
help='Is the v2 identity API enabled'),
cfg.BoolOpt('api_v3',
default=True,
help='Is the v3 identity API enabled'),
]
compute_group = cfg.OptGroup(name='compute',
title='Compute Service Options')
ComputeGroup = [
cfg.StrOpt('image_ref',
help="Valid primary image reference to be used in tests. "
"This is a required option"),
cfg.StrOpt('image_ref_alt',
help="Valid secondary image reference to be used in tests. "
"This is a required option, but if only one image is "
"available duplicate the value of image_ref above"),
cfg.StrOpt('flavor_ref',
default="1",
help="Valid primary flavor to use in tests."),
cfg.StrOpt('flavor_ref_alt',
default="2",
help='Valid secondary flavor to be used in tests.'),
cfg.StrOpt('image_ssh_user',
default="root",
help="User name used to authenticate to an instance."),
cfg.StrOpt('image_ssh_password',
default="password",
help="Password used to authenticate to an instance."),
cfg.StrOpt('image_alt_ssh_user',
default="root",
help="User name used to authenticate to an instance using "
"the alternate image."),
cfg.IntOpt('build_interval',
default=1,
help="Time in seconds between build status checks."),
cfg.IntOpt('build_timeout',
default=300,
help="Timeout in seconds to wait for an instance to build. "
"Other services that do not define build_timeout will "
"inherit this value."),
cfg.StrOpt('ssh_shell_prologue',
default="set -eu -o pipefail; PATH=$$PATH:/sbin;",
help="Shell fragments to use before executing a command "
"when sshing to a guest."),
cfg.StrOpt('ssh_auth_method',
default='keypair',
help="Auth method used for authenticate to the instance. "
"Valid choices are: keypair, configured, adminpass "
"and disabled. "
"Keypair: start the servers with a ssh keypair. "
"Configured: use the configured user and password. "
"Adminpass: use the injected adminPass. "
"Disabled: avoid using ssh when it is an option."),
cfg.StrOpt('ssh_connect_method',
default='floating',
help="How to connect to the instance? "
"fixed: using the first ip belongs the fixed network "
"floating: creating and using a floating ip."),
cfg.StrOpt('ssh_user',
default='root',
help="User name used to authenticate to an instance."),
cfg.IntOpt('ping_timeout',
default=120,
help="Timeout in seconds to wait for ping to "
"succeed."),
cfg.IntOpt('ping_size',
default=56,
help="The packet size for ping packets originating "
"from remote linux hosts"),
cfg.IntOpt('ping_count',
default=1,
help="The number of ping packets originating from remote "
"linux hosts"),
cfg.IntOpt('ready_wait',
default=0,
help="Additional wait time for clean state, when there is "
"no OS-EXT-STS extension available"),
cfg.StrOpt('fixed_network_name',
help="Name of the fixed network that is visible to all test "
"tenants. If multiple networks are available for a tenant"
" this is the network which will be used for creating "
"servers if tempest does not create a network or a "
"network is not specified elsewhere. It may be used for "
"ssh validation only if floating IPs are disabled."),
cfg.StrOpt('network_for_ssh',
default='public',
help="Network used for SSH connections. Ignored if "
"use_floatingip_for_ssh=true or run_validation=false."),
cfg.BoolOpt('use_floatingip_for_ssh',
default=True,
help="Does SSH use Floating IPs?"),
cfg.StrOpt('catalog_type',
default='compute',
help="Catalog type of the Compute service."),
cfg.StrOpt('region',
default='',
help="The compute region name to use. If empty, the value "
"of identity.region is used instead. If no such region "
"is found in the service catalog, the first found one is "
"used."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the compute service."),
cfg.StrOpt('volume_device_name',
default='vdb',
help="Expected device name when a volume is attached to "
"an instance"),
cfg.IntOpt('shelved_offload_time',
default=0,
help='Time in seconds before a shelved instance is eligible '
'for removing from a host. -1 never offload, 0 offload '
'when shelved. This time should be the same as the time '
'of nova.conf, and some tests will run for as long as the '
'time.'),
cfg.StrOpt('floating_ip_range',
default='10.0.0.0/29',
help='Unallocated floating IP range, which will be used to '
'test the floating IP bulk feature for CRUD operation. '
'This block must not overlap an existing floating IP '
'pool.')
]
compute_features_group = cfg.OptGroup(name='compute-feature-enabled',
title="Enabled Compute Service Features")
ComputeFeaturesGroup = [
cfg.BoolOpt('disk_config',
default=True,
help="If false, skip disk config tests"),
cfg.ListOpt('api_extensions',
default=['all'],
help='A list of enabled compute extensions with a special '
'entry all which indicates every extension is enabled. '
'Each extension should be specified with alias name. '
'Empty list indicates all extensions are disabled'),
cfg.BoolOpt('change_password',
default=False,
help="Does the test environment support changing the admin "
"password?"),
cfg.BoolOpt('console_output',
default=True,
help="Does the test environment support obtaining instance "
"serial console output?"),
cfg.BoolOpt('resize',
default=False,
help="Does the test environment support resizing?"),
cfg.BoolOpt('pause',
default=True,
help="Does the test environment support pausing?"),
cfg.BoolOpt('shelve',
default=True,
help="Does the test environment support shelving/unshelving?"),
cfg.BoolOpt('suspend',
default=True,
help="Does the test environment support suspend/resume?"),
cfg.BoolOpt('live_migration',
default=True,
help="Does the test environment support live migration "
"available?"),
cfg.BoolOpt('metadata_service',
default=True,
help="Does the test environment support metadata service? "
"Ignored unless validation.run_validation=true."),
cfg.BoolOpt('block_migration_for_live_migration',
default=False,
help="Does the test environment use block devices for live "
"migration"),
cfg.BoolOpt('block_migrate_cinder_iscsi',
default=False,
help="Does the test environment block migration support "
"cinder iSCSI volumes. Note, libvirt doesn't support this, "
"see https://bugs.launchpad.net/nova/+bug/1398999"),
# TODO(gilliard): Remove live_migrate_paused_instances at juno-eol.
cfg.BoolOpt('live_migrate_paused_instances',
default=False,
help="Does the test system allow live-migration of paused "
"instances? Note, this is more than just the ANDing of "
"paused and live_migrate, but all 3 should be set to True "
"to run those tests"),
cfg.BoolOpt('vnc_console',
default=False,
help='Enable VNC console. This configuration value should '
'be same as [nova.vnc]->vnc_enabled in nova.conf'),
cfg.BoolOpt('spice_console',
default=False,
help='Enable Spice console. This configuration value should '
'be same as [nova.spice]->enabled in nova.conf'),
cfg.BoolOpt('rdp_console',
default=False,
help='Enable RDP console. This configuration value should '
'be same as [nova.rdp]->enabled in nova.conf'),
cfg.BoolOpt('rescue',
default=True,
help='Does the test environment support instance rescue '
'mode?'),
cfg.BoolOpt('enable_instance_password',
default=True,
help='Enables returning of the instance password by the '
'relevant server API calls such as create, rebuild '
'or rescue.'),
cfg.BoolOpt('interface_attach',
default=True,
help='Does the test environment support dynamic network '
'interface attachment?'),
cfg.BoolOpt('snapshot',
default=True,
help='Does the test environment support creating snapshot '
'images of running instances?'),
cfg.BoolOpt('ec2_api',
default=True,
help='Does the test environment have the ec2 api running?'),
# TODO(mriedem): Remove preserve_ports once juno-eol happens.
cfg.BoolOpt('preserve_ports',
default=False,
help='Does Nova preserve preexisting ports from Neutron '
'when deleting an instance? This should be set to True '
'if testing Kilo+ Nova.'),
cfg.BoolOpt('attach_encrypted_volume',
default=True,
help='Does the test environment support attaching an '
'encrypted volume to a running server instance? This may '
'depend on the combination of compute_driver in nova and '
'the volume_driver(s) in cinder.'),
# TODO(mriedem): Remove allow_duplicate_networks once kilo-eol happens
# since the option was removed from nova in Liberty and is the default
# behavior starting in Liberty.
cfg.BoolOpt('allow_duplicate_networks',
default=False,
help='Does the test environment support creating instances '
'with multiple ports on the same network? This is only '
'valid when using Neutron.'),
]
image_group = cfg.OptGroup(name='image',
title="Image Service Options")
ImageGroup = [
cfg.StrOpt('catalog_type',
default='image',
help='Catalog type of the Image service.'),
cfg.StrOpt('region',
default='',
help="The image region name to use. If empty, the value "
"of identity.region is used instead. If no such region "
"is found in the service catalog, the first found one is "
"used."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the image service."),
cfg.StrOpt('http_image',
default='http://download.cirros-cloud.net/0.3.1/'
'cirros-0.3.1-x86_64-uec.tar.gz',
help='http accessible image'),
cfg.IntOpt('build_timeout',
default=300,
help="Timeout in seconds to wait for an image to "
"become available."),
cfg.IntOpt('build_interval',
default=1,
help="Time in seconds between image operation status "
"checks.")
]
image_feature_group = cfg.OptGroup(name='image-feature-enabled',
title='Enabled image service features')
ImageFeaturesGroup = [
cfg.BoolOpt('api_v2',
default=True,
help="Is the v2 image API enabled"),
cfg.BoolOpt('api_v1',
default=True,
help="Is the v1 image API enabled"),
cfg.BoolOpt('deactivate_image',
default=False,
help="Is the deactivate-image feature enabled."
" The feature has been integrated since Kilo."),
]
network_group = cfg.OptGroup(name='network',
title='Network Service Options')
NetworkGroup = [
cfg.StrOpt('catalog_type',
default='network',
help='Catalog type of the Neutron service.'),
cfg.StrOpt('region',
default='',
help="The network region name to use. If empty, the value "
"of identity.region is used instead. If no such region "
"is found in the service catalog, the first found one is "
"used."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the network service."),
cfg.StrOpt('tenant_network_cidr',
default="10.100.0.0/16",
help="The cidr block to allocate tenant ipv4 subnets from"),
cfg.IntOpt('tenant_network_mask_bits',
default=28,
help="The mask bits for tenant ipv4 subnets"),
cfg.StrOpt('tenant_network_v6_cidr',
default="2003::/48",
help="The cidr block to allocate tenant ipv6 subnets from"),
cfg.IntOpt('tenant_network_v6_mask_bits',
default=64,
help="The mask bits for tenant ipv6 subnets"),
cfg.BoolOpt('tenant_networks_reachable',
default=False,
help="Whether tenant networks can be reached directly from "
"the test client. This must be set to True when the "
"'fixed' ssh_connect_method is selected."),
cfg.StrOpt('public_network_id',
default="",
help="Id of the public network that provides external "
"connectivity"),
cfg.StrOpt('floating_network_name',
help="Default floating network name. Used to allocate floating "
"IPs when neutron is enabled."),
cfg.StrOpt('public_router_id',
default="",
help="Id of the public router that provides external "
"connectivity. This should only be used when Neutron's "
"'allow_overlapping_ips' is set to 'False' in "
"neutron.conf. usually not needed past 'Grizzly' release"),
cfg.IntOpt('build_timeout',
default=300,
help="Timeout in seconds to wait for network operation to "
"complete."),
cfg.IntOpt('build_interval',
default=1,
help="Time in seconds between network operation status "
"checks."),
cfg.ListOpt('dns_servers',
default=["8.8.8.8", "8.8.4.4"],
help="List of dns servers which should be used"
" for subnet creation"),
cfg.StrOpt('port_vnic_type',
choices=[None, 'normal', 'direct', 'macvtap'],
help="vnic_type to use when Launching instances"
" with pre-configured ports."
" Supported ports are:"
" ['normal','direct','macvtap']"),
]
network_feature_group = cfg.OptGroup(name='network-feature-enabled',
title='Enabled network service features')
NetworkFeaturesGroup = [
cfg.BoolOpt('ipv6',
default=True,
help="Allow the execution of IPv6 tests"),
cfg.ListOpt('api_extensions',
default=['all'],
help="A list of enabled network extensions with a special "
"entry all which indicates every extension is enabled. "
"Empty list indicates all extensions are disabled. "
"To get the list of extensions run: 'neutron ext-list'"),
cfg.BoolOpt('ipv6_subnet_attributes',
default=False,
help="Allow the execution of IPv6 subnet tests that use "
"the extended IPv6 attributes ipv6_ra_mode "
"and ipv6_address_mode"
),
cfg.BoolOpt('port_admin_state_change',
default=True,
help="Does the test environment support changing"
" port admin state"),
]
messaging_group = cfg.OptGroup(name='messaging',
title='Messaging Service')
MessagingGroup = [
cfg.StrOpt('catalog_type',
default='messaging',
help='Catalog type of the Messaging service.'),
cfg.IntOpt('max_queues_per_page',
default=20,
help='The maximum number of queue records per page when '
'listing queues'),
cfg.IntOpt('max_queue_metadata',
default=65536,
help='The maximum metadata size for a queue'),
cfg.IntOpt('max_messages_per_page',
default=20,
help='The maximum number of queue message per page when '
'listing (or) posting messages'),
cfg.IntOpt('max_message_size',
default=262144,
help='The maximum size of a message body'),
cfg.IntOpt('max_messages_per_claim',
default=20,
help='The maximum number of messages per claim'),
cfg.IntOpt('max_message_ttl',
default=1209600,
help='The maximum ttl for a message'),
cfg.IntOpt('max_claim_ttl',
default=43200,
help='The maximum ttl for a claim'),
cfg.IntOpt('max_claim_grace',
default=43200,
help='The maximum grace period for a claim'),
]
validation_group = cfg.OptGroup(name='validation',
title='SSH Validation options')
ValidationGroup = [
cfg.BoolOpt('run_validation',
default=False,
help='Enable ssh on created servers and creation of additional'
' validation resources to enable remote access',
deprecated_opts=[cfg.DeprecatedOpt('run_ssh',
group='compute')]),
cfg.StrOpt('connect_method',
default='floating',
choices=['fixed', 'floating'],
help='Default IP type used for validation: '
'-fixed: uses the first IP belonging to the fixed network '
'-floating: creates and uses a floating IP'),
cfg.StrOpt('auth_method',
default='keypair',
choices=['keypair'],
help='Default authentication method to the instance. '
'Only ssh via keypair is supported for now. '
'Additional methods will be handled in a separate spec.'),
cfg.IntOpt('ip_version_for_ssh',
default=4,
help='Default IP version for ssh connections.',
deprecated_opts=[cfg.DeprecatedOpt('ip_version_for_ssh',
group='compute')]),
cfg.IntOpt('ping_timeout',
default=120,
help='Timeout in seconds to wait for ping to succeed.'),
cfg.IntOpt('connect_timeout',
default=60,
help='Timeout in seconds to wait for the TCP connection to be '
'successful.',
deprecated_opts=[cfg.DeprecatedOpt('ssh_channel_timeout',
group='compute')]),
cfg.IntOpt('ssh_timeout',
default=300,
help='Timeout in seconds to wait for the ssh banner.',
deprecated_opts=[cfg.DeprecatedOpt('ssh_timeout',
group='compute')]),
]
volume_group = cfg.OptGroup(name='volume',
title='Block Storage Options')
VolumeGroup = [
cfg.IntOpt('build_interval',
default=1,
help='Time in seconds between volume availability checks.'),
cfg.IntOpt('build_timeout',
default=300,
help='Timeout in seconds to wait for a volume to become '
'available.'),
cfg.StrOpt('catalog_type',
default='volume',
help="Catalog type of the Volume Service"),
cfg.StrOpt('region',
default='',
help="The volume region name to use. If empty, the value "
"of identity.region is used instead. If no such region "
"is found in the service catalog, the first found one is "
"used."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the volume service."),
cfg.StrOpt('backend1_name',
default='BACKEND_1',
help="Name of the backend1 (must be declared in cinder.conf)"),
cfg.StrOpt('backend2_name',
default='BACKEND_2',
help="Name of the backend2 (must be declared in cinder.conf)"),
cfg.StrOpt('storage_protocol',
default='iSCSI',
help='Backend protocol to target when creating volume types'),
cfg.StrOpt('vendor_name',
default='Open Source',
help='Backend vendor to target when creating volume types'),
cfg.StrOpt('disk_format',
default='raw',
help='Disk format to use when copying a volume to image'),
cfg.IntOpt('volume_size',
default=1,
help='Default size in GB for volumes created by volumes tests'),
]
volume_feature_group = cfg.OptGroup(name='volume-feature-enabled',
title='Enabled Cinder Features')
VolumeFeaturesGroup = [
cfg.BoolOpt('multi_backend',
default=False,
help="Runs Cinder multi-backend test (requires 2 backends)"),
cfg.BoolOpt('backup',
default=True,
help='Runs Cinder volumes backup test'),
cfg.BoolOpt('snapshot',
default=True,
help='Runs Cinder volume snapshot test'),
cfg.ListOpt('api_extensions',
default=['all'],
help='A list of enabled volume extensions with a special '
'entry all which indicates every extension is enabled. '
'Empty list indicates all extensions are disabled'),
cfg.BoolOpt('api_v1',
default=True,
help="Is the v1 volume API enabled"),
cfg.BoolOpt('api_v2',
default=True,
help="Is the v2 volume API enabled"),
cfg.BoolOpt('bootable',
default=False,
help='Update bootable status of a volume '
'Not implemented on icehouse ')
]
object_storage_group = cfg.OptGroup(name='object-storage',
title='Object Storage Service Options')
ObjectStoreGroup = [
cfg.StrOpt('catalog_type',
default='object-store',
help="Catalog type of the Object-Storage service."),
cfg.StrOpt('region',
default='',
help="The object-storage region name to use. If empty, the "
"value of identity.region is used instead. If no such "
"region is found in the service catalog, the first found "
"one is used."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the object-store service."),
cfg.IntOpt('container_sync_timeout',
default=600,
help="Number of seconds to time on waiting for a container "
"to container synchronization complete."),
cfg.IntOpt('container_sync_interval',
default=5,
help="Number of seconds to wait while looping to check the "
"status of a container to container synchronization"),
cfg.StrOpt('operator_role',
default='Member',
help="Role to add to users created for swift tests to "
"enable creating containers"),
cfg.StrOpt('reseller_admin_role',
default='ResellerAdmin',
help="User role that has reseller admin"),
cfg.StrOpt('realm_name',
default='realm1',
help="Name of sync realm. A sync realm is a set of clusters "
"that have agreed to allow container syncing with each "
"other. Set the same realm name as Swift's "
"container-sync-realms.conf"),
cfg.StrOpt('cluster_name',
default='name1',
help="One name of cluster which is set in the realm whose name "
"is set in 'realm_name' item in this file. Set the "
"same cluster name as Swift's container-sync-realms.conf"),
]
object_storage_feature_group = cfg.OptGroup(
name='object-storage-feature-enabled',
title='Enabled object-storage features')
ObjectStoreFeaturesGroup = [
cfg.ListOpt('discoverable_apis',
default=['all'],
help="A list of the enabled optional discoverable apis. "
"A single entry, all, indicates that all of these "
"features are expected to be enabled"),
cfg.BoolOpt('container_sync',
default=True,
help="Execute (old style) container-sync tests"),
cfg.BoolOpt('object_versioning',
default=True,
help="Execute object-versioning tests"),
cfg.BoolOpt('discoverability',
default=True,
help="Execute discoverability tests"),
]
database_group = cfg.OptGroup(name='database',
title='Database Service Options')
DatabaseGroup = [
cfg.StrOpt('catalog_type',
default='database',
help="Catalog type of the Database service."),
cfg.StrOpt('db_flavor_ref',
default="1",
help="Valid primary flavor to use in database tests."),
cfg.StrOpt('db_current_version',
default="v1.0",
help="Current database version to use in database tests."),
]
orchestration_group = cfg.OptGroup(name='orchestration',
title='Orchestration Service Options')
OrchestrationGroup = [
cfg.StrOpt('catalog_type',
default='orchestration',
help="Catalog type of the Orchestration service."),
cfg.StrOpt('region',
default='',
help="The orchestration region name to use. If empty, the "
"value of identity.region is used instead. If no such "
"region is found in the service catalog, the first found "
"one is used."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the orchestration service."),
cfg.StrOpt('stack_owner_role', default='heat_stack_owner',
help='Role required for users to be able to manage stacks'),
cfg.IntOpt('build_interval',
default=1,
help="Time in seconds between build status checks."),
cfg.IntOpt('build_timeout',
default=1200,
help="Timeout in seconds to wait for a stack to build."),
cfg.StrOpt('instance_type',
default='m1.micro',
help="Instance type for tests. Needs to be big enough for a "
"full OS plus the test workload"),
cfg.StrOpt('keypair_name',
help="Name of existing keypair to launch servers with."),
cfg.IntOpt('max_template_size',
default=524288,
help="Value must match heat configuration of the same name."),
cfg.IntOpt('max_resources_per_stack',
default=1000,
help="Value must match heat configuration of the same name."),
]
telemetry_group = cfg.OptGroup(name='telemetry',
title='Telemetry Service Options')
TelemetryGroup = [
cfg.StrOpt('catalog_type',
default='metering',
help="Catalog type of the Telemetry service."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the telemetry service."),
cfg.BoolOpt('too_slow_to_test',
default=True,
help="This variable is used as flag to enable "
"notification tests")
]
telemetry_feature_group = cfg.OptGroup(name='telemetry-feature-enabled',
title='Enabled Ceilometer Features')
TelemetryFeaturesGroup = [
cfg.BoolOpt('events',
default=False,
help="Runs Ceilometer event-related tests"),
]
dashboard_group = cfg.OptGroup(name="dashboard",
title="Dashboard options")
DashboardGroup = [
cfg.StrOpt('dashboard_url',
default='http://localhost/',
help="Where the dashboard can be found"),
cfg.StrOpt('login_url',
default='http://localhost/auth/login/',
help="Login page for the dashboard"),
]
data_processing_group = cfg.OptGroup(name="data_processing",
title="Data Processing options")
DataProcessingGroup = [
cfg.StrOpt('catalog_type',
default='data_processing',
help="Catalog type of the data processing service."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the data processing "
"service."),
]
data_processing_feature_group = cfg.OptGroup(
name="data_processing-feature-enabled",
title="Enabled Data Processing features")
DataProcessingFeaturesGroup = [
cfg.ListOpt('plugins',
default=["vanilla", "hdp"],
help="List of enabled data processing plugins")
]
boto_group = cfg.OptGroup(name='boto',
title='EC2/S3 options')
BotoGroup = [
cfg.StrOpt('ec2_url',
default="http://localhost:8773/services/Cloud",
help="EC2 URL"),
cfg.StrOpt('s3_url',
default="http://localhost:8080",
help="S3 URL"),
cfg.StrOpt('aws_secret',
help="AWS Secret Key",
secret=True),
cfg.StrOpt('aws_access',
help="AWS Access Key"),
cfg.StrOpt('aws_zone',
default="nova",
help="AWS Zone for EC2 tests"),
cfg.StrOpt('s3_materials_path',
default="/opt/stack/devstack/files/images/"
"s3-materials/cirros-0.3.0",
help="S3 Materials Path"),
cfg.StrOpt('ari_manifest',
default="cirros-0.3.0-x86_64-initrd.manifest.xml",
help="ARI Ramdisk Image manifest"),
cfg.StrOpt('ami_manifest',
default="cirros-0.3.0-x86_64-blank.img.manifest.xml",
help="AMI Machine Image manifest"),
cfg.StrOpt('aki_manifest',
default="cirros-0.3.0-x86_64-vmlinuz.manifest.xml",
help="AKI Kernel Image manifest"),
cfg.StrOpt('instance_type',
default="m1.tiny",
help="Instance type"),
cfg.IntOpt('http_socket_timeout',
default=3,
help="boto Http socket timeout"),
cfg.IntOpt('num_retries',
default=1,
help="boto num_retries on error"),
cfg.IntOpt('build_timeout',
default=60,
help="Status Change Timeout"),
cfg.IntOpt('build_interval',
default=1,
help="Status Change Test Interval"),
]
stress_group = cfg.OptGroup(name='stress', title='Stress Test Options')
StressGroup = [
cfg.StrOpt('nova_logdir',
help='Directory containing log files on the compute nodes'),
cfg.IntOpt('max_instances',
default=16,
help='Maximum number of instances to create during test.'),
cfg.StrOpt('controller',
help='Controller host.'),
# new stress options
cfg.StrOpt('target_controller',
help='Controller host.'),
cfg.StrOpt('target_ssh_user',
help='ssh user.'),
cfg.StrOpt('target_private_key_path',
help='Path to private key.'),
cfg.StrOpt('target_logfiles',
help='regexp for list of log files.'),
cfg.IntOpt('log_check_interval',
default=60,
help='time (in seconds) between log file error checks.'),
cfg.IntOpt('default_thread_number_per_action',
default=4,
help='The number of threads created while stress test.'),
cfg.BoolOpt('leave_dirty_stack',
default=False,
help='Prevent the cleaning (tearDownClass()) between'
' each stress test run if an exception occurs'
' during this run.'),
cfg.BoolOpt('full_clean_stack',
default=False,
help='Allows a full cleaning process after a stress test.'
' Caution : this cleanup will remove every objects of'
' every tenant.')
]
scenario_group = cfg.OptGroup(name='scenario', title='Scenario Test Options')
ScenarioGroup = [
cfg.StrOpt('img_dir',
default='/opt/stack/new/devstack/files/images/'
'cirros-0.3.1-x86_64-uec',
help='Directory containing image files'),
cfg.StrOpt('img_file', deprecated_name='qcow2_img_file',
default='cirros-0.3.1-x86_64-disk.img',
help='Image file name'),
cfg.StrOpt('img_disk_format',
default='qcow2',
help='Image disk format'),
cfg.StrOpt('img_container_format',
default='bare',
help='Image container format'),
cfg.DictOpt('img_properties', help='Glance image properties. '
'Use for custom images which require them'),
cfg.StrOpt('ami_img_file',
default='cirros-0.3.1-x86_64-blank.img',
help='AMI image file name'),
cfg.StrOpt('ari_img_file',
default='cirros-0.3.1-x86_64-initrd',
help='ARI image file name'),
cfg.StrOpt('aki_img_file',
default='cirros-0.3.1-x86_64-vmlinuz',
help='AKI image file name'),
cfg.StrOpt('ssh_user',
default='cirros',
help='ssh username for the image file'),
cfg.IntOpt(
'large_ops_number',
default=0,
help="specifies how many resources to request at once. Used "
"for large operations testing."),
# TODO(yfried): add support for dhcpcd
cfg.StrOpt('dhcp_client',
default='udhcpc',
choices=["udhcpc", "dhclient"],
help='DHCP client used by images to renew DCHP lease. '
'If left empty, update operation will be skipped. '
'Supported clients: "udhcpc", "dhclient"')
]
service_available_group = cfg.OptGroup(name="service_available",
title="Available OpenStack Services")
ServiceAvailableGroup = [
cfg.BoolOpt('cinder',
default=True,
help="Whether or not cinder is expected to be available"),
cfg.BoolOpt('neutron',
default=False,
help="Whether or not neutron is expected to be available"),
cfg.BoolOpt('glance',
default=True,
help="Whether or not glance is expected to be available"),
cfg.BoolOpt('swift',
default=True,
help="Whether or not swift is expected to be available"),
cfg.BoolOpt('nova',
default=True,
help="Whether or not nova is expected to be available"),
cfg.BoolOpt('heat',
default=False,
help="Whether or not Heat is expected to be available"),
cfg.BoolOpt('ceilometer',
default=True,
help="Whether or not Ceilometer is expected to be available"),
cfg.BoolOpt('horizon',
default=True,
help="Whether or not Horizon is expected to be available"),
cfg.BoolOpt('sahara',
default=False,
help="Whether or not Sahara is expected to be available"),
cfg.BoolOpt('ironic',
default=False,
help="Whether or not Ironic is expected to be available"),
cfg.BoolOpt('trove',
default=False,
help="Whether or not Trove is expected to be available"),
cfg.BoolOpt('zaqar',
default=False,
help="Whether or not Zaqar is expected to be available"),
]
debug_group = cfg.OptGroup(name="debug",
title="Debug System")
DebugGroup = [
cfg.StrOpt('trace_requests',
default='',
help="""A regex to determine which requests should be traced.
This is a regex to match the caller for rest client requests to be able to
selectively trace calls out of specific classes and methods. It largely
exists for test development, and is not expected to be used in a real deploy
of tempest. This will be matched against the discovered ClassName:method
in the test environment.
Expected values for this field are:
* ClassName:test_method_name - traces one test_method
* ClassName:setUp(Class) - traces specific setup functions
* ClassName:tearDown(Class) - traces specific teardown functions
* ClassName:_run_cleanups - traces the cleanup functions
If nothing is specified, this feature is not enabled. To trace everything
specify .* as the regex.
""")
]
input_scenario_group = cfg.OptGroup(name="input-scenario",
title="Filters and values for"
" input scenarios")
InputScenarioGroup = [
cfg.StrOpt('image_regex',
default='^cirros-0.3.1-x86_64-uec$',
help="Matching images become parameters for scenario tests"),
cfg.StrOpt('flavor_regex',
default='^m1.nano$',
help="Matching flavors become parameters for scenario tests"),
cfg.StrOpt('non_ssh_image_regex',
default='^.*[Ww]in.*$',
help="SSH verification in tests is skipped"
"for matching images"),
cfg.StrOpt('ssh_user_regex',
default="[[\"^.*[Cc]irros.*$\", \"cirros\"]]",
help="List of user mapped to regex "
"to matching image names."),
]
baremetal_group = cfg.OptGroup(name='baremetal',
title='Baremetal provisioning service options',
help='When enabling baremetal tests, Nova '
'must be configured to use the Ironic '
'driver. The following paremeters for the '
'[compute] section must be disabled: '
'console_output, interface_attach, '
'live_migration, pause, rescue, resize '
'shelve, snapshot, and suspend')
BaremetalGroup = [
cfg.StrOpt('catalog_type',
default='baremetal',
help="Catalog type of the baremetal provisioning service"),
cfg.BoolOpt('driver_enabled',
default=False,
help="Whether the Ironic nova-compute driver is enabled"),
cfg.StrOpt('driver',
default='fake',
help="Driver name which Ironic uses"),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the baremetal provisioning "
"service"),
cfg.IntOpt('active_timeout',
default=300,
help="Timeout for Ironic node to completely provision"),
cfg.IntOpt('association_timeout',
default=30,
help="Timeout for association of Nova instance and Ironic "
"node"),
cfg.IntOpt('power_timeout',
default=60,
help="Timeout for Ironic power transitions."),
cfg.IntOpt('unprovision_timeout',
default=300,
help="Timeout for unprovisioning an Ironic node. "
"Takes longer since Kilo as Ironic performs an extra "
"step in Node cleaning.")
]
negative_group = cfg.OptGroup(name='negative', title="Negative Test Options")
NegativeGroup = [
cfg.StrOpt('test_generator',
default='tempest.common.' +
'generator.negative_generator.NegativeTestGenerator',
help="Test generator class for all negative tests"),
]
DefaultGroup = [
cfg.StrOpt('resources_prefix',
default='tempest',
help="Prefix to be added when generating the name for "
"test resources. It can be used to discover all "
"resources associated with a specific test run when "
"running tempest on a real-life cloud"),
]
_opts = [
(auth_group, AuthGroup),
(compute_group, ComputeGroup),
(compute_features_group, ComputeFeaturesGroup),
(identity_group, IdentityGroup),
(identity_feature_group, IdentityFeatureGroup),
(image_group, ImageGroup),
(image_feature_group, ImageFeaturesGroup),
(network_group, NetworkGroup),
(network_feature_group, NetworkFeaturesGroup),
(messaging_group, MessagingGroup),
(validation_group, ValidationGroup),
(volume_group, VolumeGroup),
(volume_feature_group, VolumeFeaturesGroup),
(object_storage_group, ObjectStoreGroup),
(object_storage_feature_group, ObjectStoreFeaturesGroup),
(database_group, DatabaseGroup),
(orchestration_group, OrchestrationGroup),
(telemetry_group, TelemetryGroup),
(telemetry_feature_group, TelemetryFeaturesGroup),
(dashboard_group, DashboardGroup),
(data_processing_group, DataProcessingGroup),
(data_processing_feature_group, DataProcessingFeaturesGroup),
(boto_group, BotoGroup),
(stress_group, StressGroup),
(scenario_group, ScenarioGroup),
(service_available_group, ServiceAvailableGroup),
(debug_group, DebugGroup),
(baremetal_group, BaremetalGroup),
(input_scenario_group, InputScenarioGroup),
(negative_group, NegativeGroup),
(None, DefaultGroup)
]
def register_opts():
ext_plugins = plugins.TempestTestPluginManager()
# Register in-tree tempest config options
for g, o in _opts:
register_opt_group(_CONF, g, o)
# Call external plugin config option registration
ext_plugins.register_plugin_opts(_CONF)
def list_opts():
"""Return a list of oslo.config options available.
The purpose of this is to allow tools like the Oslo sample config file
generator to discover the options exposed to users.
"""
return [(getattr(g, 'name', None), o) for g, o in _opts]
# this should never be called outside of this class
class TempestConfigPrivate(object):
"""Provides OpenStack configuration information."""
DEFAULT_CONFIG_DIR = os.path.join(
os.path.abspath(os.path.dirname(os.path.dirname(__file__))),
"etc")
DEFAULT_CONFIG_FILE = "tempest.conf"
def __getattr__(self, attr):
# Handles config options from the default group
return getattr(_CONF, attr)
def _set_attrs(self):
self.auth = _CONF.auth
self.compute = _CONF.compute
self.compute_feature_enabled = _CONF['compute-feature-enabled']
self.identity = _CONF.identity
self.identity_feature_enabled = _CONF['identity-feature-enabled']
self.image = _CONF.image
self.image_feature_enabled = _CONF['image-feature-enabled']
self.network = _CONF.network
self.network_feature_enabled = _CONF['network-feature-enabled']
self.validation = _CONF.validation
self.volume = _CONF.volume
self.volume_feature_enabled = _CONF['volume-feature-enabled']
self.object_storage = _CONF['object-storage']
self.object_storage_feature_enabled = _CONF[
'object-storage-feature-enabled']
self.database = _CONF.database
self.orchestration = _CONF.orchestration
self.messaging = _CONF.messaging
self.telemetry = _CONF.telemetry
self.telemetry_feature_enabled = _CONF['telemetry-feature-enabled']
self.dashboard = _CONF.dashboard
self.data_processing = _CONF.data_processing
self.data_processing_feature_enabled = _CONF[
'data_processing-feature-enabled']
self.boto = _CONF.boto
self.stress = _CONF.stress
self.scenario = _CONF.scenario
self.service_available = _CONF.service_available
self.debug = _CONF.debug
self.baremetal = _CONF.baremetal
self.input_scenario = _CONF['input-scenario']
self.negative = _CONF.negative
_CONF.set_default('domain_name',
self.auth.default_credentials_domain_name,
group='identity')
_CONF.set_default('alt_domain_name',
self.auth.default_credentials_domain_name,
group='identity')
def __init__(self, parse_conf=True, config_path=None):
"""Initialize a configuration from a conf directory and conf file."""
super(TempestConfigPrivate, self).__init__()
config_files = []
failsafe_path = "/etc/tempest/" + self.DEFAULT_CONFIG_FILE
if config_path:
path = config_path
else:
# Environment variables override defaults...
conf_dir = os.environ.get('TEMPEST_CONFIG_DIR',
self.DEFAULT_CONFIG_DIR)
conf_file = os.environ.get('TEMPEST_CONFIG',
self.DEFAULT_CONFIG_FILE)
path = os.path.join(conf_dir, conf_file)
if not os.path.isfile(path):
path = failsafe_path
# only parse the config file if we expect one to exist. This is needed
# to remove an issue with the config file up to date checker.
if parse_conf:
config_files.append(path)
logging.register_options(_CONF)
if os.path.isfile(path):
_CONF([], project='tempest', default_config_files=config_files)
else:
_CONF([], project='tempest')
logging.setup(_CONF, 'tempest')
LOG = logging.getLogger('tempest')
LOG.info("Using tempest config file %s" % path)
register_opts()
self._set_attrs()
if parse_conf:
_CONF.log_opt_values(LOG, std_logging.DEBUG)
class TempestConfigProxy(object):
_config = None
_path = None
_extra_log_defaults = [
('paramiko.transport', std_logging.INFO),
('requests.packages.urllib3.connectionpool', std_logging.WARN),
]
def _fix_log_levels(self):
"""Tweak the oslo log defaults."""
for name, level in self._extra_log_defaults:
std_logging.getLogger(name).setLevel(level)
def __getattr__(self, attr):
if not self._config:
self._fix_log_levels()
self._config = TempestConfigPrivate(config_path=self._path)
return getattr(self._config, attr)
def set_config_path(self, path):
self._path = path
CONF = TempestConfigProxy()
| apache-2.0 | 724,095,199,711,786,900 | 42.044543 | 79 | 0.559821 | false | 4.663101 | true | false | false |
cdiazbas/MPySIR | setup.py | 1 | 4525 | ###############################################################
# MPySIR: MPI python script for SIR
#
# CALL: mpirun -n 10 python setup.py
##############################################################
"""
# Author: [email protected]
# Date: 09.06.2015
# Version: 1.7
"""
"""
Archivos necesarios:
====================
imagen.npy / imagen.fits
xLambda.npy
SIRMODE:
====================
1.- 'perPixel'
2.- 'beforePixel'
3.- 'gammaV'
4.- 'medianFilter'
5.- 'addFullProfile'
6.- 'gammVaddFullProfile'
"""
# ================================================= TIME
import time; start_time = time.time()
# ================================================= LIBRARIES
import numpy as np
from mpi4py import MPI
import os
from sirutils import *
import pySir.sirtools as sirtools
import pyLib.imtools as imtools
# ================================================= MPI INIT - CLEAN
comm = MPI.COMM_WORLD
widthT = 1
if comm.rank == 0:
(widthT, heightT) = getTerminalSize()
print('-'*widthT)
print('Running on %d cores' % comm.size)
print('-'*widthT)
try:
pncore()
except:
pass
from clean import clean; clean()
comm.Barrier()
# We prepare the directory with all necesary
os.system('cp -R invDefault node'+str(comm.rank))
print('Node '+str(comm.rank)+' prepared.')
comm.Barrier()
# ================================================= INPUT
imagefits = 'mapa34B2.npy'
dictLines = {'SiI':300} # Line Number in LINEAS file
rango = range(0,120) # Where the spectral line is
modeloInit = 'hsraB.mod'
modeloFin = 'hsraB_3.mod'
sirmode = 'gammVaddFullProfile'
chi2map = True
lambdaRef = 10827.110
# ================================================= LOAD DATA
xlambda = np.load('xLambdaBin.npy')
x = (xlambda[rango] -lambdaRef)*1e3 # Longitud de onda en mA
FileName = imagefits
imagen = np.load(imagefits)
#imagen = np.copy(imagen1[:,:,48:50,:])
height, nStokes, width, nLambdas = imagen.shape
pprint((height, nStokes, width, nLambdas))
divImage = height % comm.size
if divImage == 0:
pprint('Height is exactly divisible')
else :
pprint('Not divisible')
import sys
sys.exit()
myHeight = int(height / comm.size)
pprint('==> Dividida en '+str(comm.size)+' trozos de altura '+str(myHeight)+'.')
myPart = np.copy(imagen[myHeight*comm.rank:myHeight*comm.rank+myHeight,:,:,:])
print(myPart.shape)
comm.Barrier()
pprint('==> Data loaded ..... {0:2.3f} s'.format(time.time() - start_time))
pprint('-'*widthT)
# ================================================= INVERSION
curr = os.getcwd() # Current path
os.chdir(curr+'/node'+str(comm.rank)) # enter to each node_folder
# # os.system('mkdir node_'+str(comm.rank))
comm.Barrier()
pprint('==> Inside! ..... {0:2.3f} s'.format(time.time() - start_time))
import platform; _platform = platform.system() # We execute SIR according to the OS:
if _platform == "Linux": # Linux OS
sirfile = 'sir.x'
pprint('Platform: Linux OS')
elif _platform == "Darwin": # MAC OS X
sirfile = './sir_mac.x'
pprint('Platform: Mac OS X')
#from cjd_pysir.sirtools import lmodel8
#tau, magnitudes = lmodel8(modeloInit,verbose=False)
#comm.Barrier()
resultadoSir = []
totalPixel = myHeight*width
if comm.rank == 0: print('... {0:4.2f} % ...'.format(0./totalPixel*100))
for fila in range(0,myHeight):
for columna in range(0,width):
mapa = myPart[fila,:,columna,:]
stokes = [mapa[0,rango],mapa[1,rango],mapa[2,rango],mapa[3,rango]]
sirtools.wperfil('data.per',dictLines['SiI'],x,stokes)
sirexe(fila,columna,myHeight,comm.rank,sirfile, modeloFin, resultadoSir, sirmode, chi2map)
# plotper() # Realiza los plots de los perfiles de Stokes
# plotmfit() # Realiza los plots de los modelos ajustados
actualPixel = (fila+1)*(width)
if comm.rank == 0: print('... {0:4.2f} % ...'.format(float(actualPixel)/float(totalPixel)*100.))
#print('Soy {0} a tiempo {1:2.3f} s'.format(comm.rank,time.time() - start_time))
comm.Barrier()
pprint('==> Inversion terminada ..... {0:2.3f} s'.format(time.time() - start_time))
pprint('-'*widthT)
# ================================================= SAVING RESULTS
resultadoSir = np.array(resultadoSir, dtype=object)
np.save('modelos.npy', resultadoSir)
if comm.rank != 0:
comm.send(resultadoSir, dest = 0 , tag = 0)
else:
finalSir = []
finalSir.append(resultadoSir)
for nodei in range(1, comm.size):
vari = comm.recv(source = nodei, tag = 0)
finalSir.append(vari)
os.chdir(curr)
np.save('finalSir.npy', finalSir)
comm.Barrier()
pprint('==> MPySIR <== ..... {0:2.3f} s'.format(time.time() - start_time))
pprint('-'*widthT)
| mit | -7,876,671,923,221,983,000 | 25.934524 | 97 | 0.603536 | false | 2.878499 | false | false | false |
AndiDog/git-cola | cola/settings.py | 1 | 6963 | """Save settings, bookmarks, etc.
"""
from __future__ import division, absolute_import, unicode_literals
import json
import os
import sys
from . import core
from . import git
from . import resources
def mkdict(obj):
"""Transform None and non-dicts into dicts"""
if isinstance(obj, dict):
value = obj
else:
value = {}
return value
def mklist(obj):
"""Transform None and non-lists into lists"""
if isinstance(obj, list):
value = obj
elif isinstance(obj, tuple):
value = list(obj)
else:
value = []
return value
def read_json(path):
try:
with core.xopen(path, 'rt') as fp:
return mkdict(json.load(fp))
except (ValueError, TypeError, OSError, IOError): # bad path or json
return {}
def write_json(values, path):
try:
parent = os.path.dirname(path)
if not core.isdir(parent):
core.makedirs(parent)
with core.xopen(path, 'wt') as fp:
json.dump(values, fp, indent=4)
except (ValueError, TypeError, OSError, IOError):
sys.stderr.write('git-cola: error writing "%s"\n' % path)
class Settings(object):
config_path = resources.config_home('settings')
bookmarks = property(lambda self: mklist(self.values['bookmarks']))
gui_state = property(lambda self: mkdict(self.values['gui_state']))
recent = property(lambda self: mklist(self.values['recent']))
copy_formats = property(lambda self: mklist(self.values['copy_formats']))
def __init__(self, verify=git.is_git_worktree):
"""Load existing settings if they exist"""
self.values = {
'bookmarks': [],
'gui_state': {},
'recent': [],
'copy_formats': [],
}
self.verify = verify
def remove_missing(self):
missing_bookmarks = []
missing_recent = []
for bookmark in self.bookmarks:
if not self.verify(bookmark['path']):
missing_bookmarks.append(bookmark)
for bookmark in missing_bookmarks:
try:
self.bookmarks.remove(bookmark)
except ValueError:
pass
for recent in self.recent:
if not self.verify(recent['path']):
missing_recent.append(recent)
for recent in missing_recent:
try:
self.recent.remove(recent)
except ValueError:
pass
def add_bookmark(self, path, name):
"""Adds a bookmark to the saved settings"""
bookmark = {'path': path, 'name': name}
if bookmark not in self.bookmarks:
self.bookmarks.append(bookmark)
def remove_bookmark(self, path, name):
"""Remove a bookmark"""
bookmark = {'path': path, 'name': name}
try:
self.bookmarks.remove(bookmark)
except ValueError:
pass
def rename_bookmark(self, path, name, new_name):
return rename_entry(self.bookmarks, path, name, new_name)
def add_recent(self, path, max_recent):
try:
index = [recent['path'] for recent in self.recent].index(path)
entry = self.recent.pop(index)
except (IndexError, ValueError):
entry = {
'name': os.path.basename(path),
'path': path,
}
self.recent.insert(0, entry)
if len(self.recent) > max_recent:
self.recent.pop()
def remove_recent(self, path):
"""Removes an item from the recent items list"""
try:
index = [
recent.get('path', '') for recent in self.recent
].index(path)
except ValueError:
return
try:
self.recent.pop(index)
except IndexError:
return
def rename_recent(self, path, name, new_name):
return rename_entry(self.recent, path, name, new_name)
def path(self):
return self.config_path
def save(self):
write_json(self.values, self.path())
def load(self):
self.values.update(self.asdict())
self.upgrade_settings()
self.remove_missing()
def upgrade_settings(self):
"""Upgrade git-cola settings"""
# Upgrade bookmarks to the new dict-based bookmarks format.
if self.bookmarks and not isinstance(self.bookmarks[0], dict):
bookmarks = [dict(name=os.path.basename(path), path=path)
for path in self.bookmarks]
self.values['bookmarks'] = bookmarks
if self.recent and not isinstance(self.recent[0], dict):
recent = [dict(name=os.path.basename(path), path=path)
for path in self.recent]
self.values['recent'] = recent
def asdict(self):
path = self.path()
if core.exists(path):
return read_json(path)
# We couldn't find ~/.config/git-cola, try ~/.cola
values = {}
path = os.path.join(core.expanduser('~'), '.cola')
if core.exists(path):
json_values = read_json(path)
# Keep only the entries we care about
for key in self.values:
try:
values[key] = json_values[key]
except KeyError:
pass
return values
def reload_recent(self):
values = self.asdict()
self.values['recent'] = mklist(values.get('recent', []))
def save_gui_state(self, gui):
"""Saves settings for a cola view"""
name = gui.name()
self.gui_state[name] = mkdict(gui.export_state())
self.save()
def get_gui_state(self, gui):
"""Returns the saved state for a gui"""
try:
state = mkdict(self.gui_state[gui.name()])
except KeyError:
state = self.gui_state[gui.name()] = {}
return state
def rename_entry(entries, path, name, new_name):
entry = {'name': name, 'path': path}
try:
index = entries.index(entry)
except ValueError:
return False
if all([item['name'] != new_name for item in entries]):
entries[index]['name'] = new_name
return True
return False
class Session(Settings):
"""Store per-session settings"""
_sessions_dir = resources.config_home('sessions')
repo = property(lambda self: self.values['repo'])
def __init__(self, session_id, repo=None):
Settings.__init__(self)
self.session_id = session_id
self.values.update({'repo': repo})
def path(self):
return os.path.join(self._sessions_dir, self.session_id)
def load(self):
path = self.path()
if core.exists(path):
self.values.update(read_json(path))
try:
os.unlink(path)
except (OSError, ValueError):
pass
return True
return False
| gpl-2.0 | 378,139,903,769,501,250 | 28.379747 | 77 | 0.559242 | false | 4.127445 | false | false | false |
davidblewett/gunny | twisted/plugins/cox.py | 1 | 1717 |
from zope.interface import implements
from twisted.application.internet import UNIXClient
from twisted.plugin import IPlugin
from twisted.python import log
from twisted.python import usage
from twisted.application.service import IServiceMaker
from autobahn.wamp import WampClientFactory
from gunny.reveille.command import ReveilleClientProtocol
from gunny.reveille.service import CoxswainService
class EnqueueOptions(usage.Options):
optParameters = [
['file', 'f', None, None],
]
def __init__(self):
usage.Options.__init__(self)
self['files'] = []
def opt_file(self, fname):
self['files'].append(fname)
opt_f = opt_file
class ToggleOptions(usage.Options):
optParameters = []
class Options(usage.Options):
subCommands = [
['enqueue', None, EnqueueOptions, "Queue File(s)"],
['toggle', None, ToggleOptions, "Toggle play/pause"],
]
optParameters = [
["host", "h", '127.0.0.1', "The host to connect to."],
["port", "p", 9876, "The port number to connect to.", int],
]
def parseArgs(self):
pass
class CoxswainMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "coxswaind"
description = "Play tracks from server."
options = Options
def makeService(self, options):
"""
Construct a UNIXClient from a factory defined in myproject.
"""
#log.startLogging(sys.stderr)
return UNIXClient('/tmp/rcp.sock')
# Now construct an object which *provides* the relevant interfaces
# The name of this variable is irrelevant, as long as there is *some*
# name bound to a provider of IPlugin and IServiceMaker.
coxswain = CoxswainMaker()
| bsd-2-clause | -7,284,582,898,553,781,000 | 24.626866 | 69 | 0.672685 | false | 3.732609 | false | false | false |
BlueBrain/deap | deap/tools/selIBEA.py | 1 | 5350 | """IBEA selector"""
# This file is part of DEAP.
#
# DEAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# DEAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with DEAP. If not, see <http://www.gnu.org/licenses/>.
# The code in this file was original written in 2015 at the
# BlueBrain Project, EPFL, Lausanne
# The authors were Werner Van Geit, Michael Gevaert and Jean-Denis Courcol
# It is based on a C implementation of the IBEA algorithm in the PISA
# optimization framework developed at the ETH, Zurich
# http://www.tik.ee.ethz.ch/pisa/selectors/ibea/?page=ibea.php
from __future__ import division
import numpy as numpy
import itertools
import random
def selIBEA(population, mu, alpha=None, kappa=.05, tournament_n=4):
"""IBEA Selector"""
if alpha is None:
alpha = len(population)
# Put all the objectives of all individuals in a matrix
# DEAP selector are supposed to maximise the objective values
# We take the negative objectives because this algorithm will minimise
population_matrix = [
[-x for x in individual.fitness.wvalues] for individual in population]
# Calculate minimal square bounding box of the objectives
min_box_bounds, max_box_bounds = _calc_box_bounds(population_matrix)
# Calculate a matrix with the fitness components of every individual
components = _calc_fitness_components(
population_matrix,
min_box_bounds,
max_box_bounds,
kappa=kappa)
# Calculate the fitness values
_calc_fitnesses(population, components)
# Do the environmental selection
population[:] = _environmental_selection(population, alpha)
# Select the parents in a tournament
parents = _mating_selection(population, mu, tournament_n)
return parents
def _calc_box_bounds(population_matrix):
"""Calculate the minimal square bounding box of the objectives"""
# Calculate the min/max over the columns
min_bounds = list(numpy.min(population_matrix, axis=0))
max_bounds = list(numpy.max(population_matrix, axis=0))
# Return, parse to a list (indicators need lists, not numpy arrays)
return list(min_bounds), list(max_bounds)
def _calc_fitness_components(
population_matrix,
min_box_bounds,
max_box_bounds,
kappa=None):
"""returns an N * N numpy array of doubles, which is their IBEA fitness """
# Population size is the number of rows in the population_matrix
pop_size = len(population_matrix)
components_matrix = numpy.zeros((pop_size, pop_size))
# pylint: disable=F0401, E0611
import eps
# pylint: enable=F0401, E0611
# Calculator the indicator value for every element in the matrix
# The code inside this for loop is (has to be) heavily optimised for speed
for i in xrange(0, pop_size):
ind1 = population_matrix[i]
for j in itertools.chain(xrange(0, i), xrange(i + 1, pop_size)):
ind2 = population_matrix[j]
components_matrix[i, j] = eps.indicator(ind1,
ind2,
min_box_bounds,
max_box_bounds)
# Calculate max of absolute value of all elements in matrix
max_absolute_indicator = numpy.max(abs(components_matrix))
# Normalisation
components_matrix = \
numpy.exp(numpy.multiply(components_matrix,
-1.0 / (kappa * max_absolute_indicator)))
return components_matrix
def _calc_fitnesses(population, components):
"""Calculate the IBEA fitness of every individual"""
# Calculate sum of every column in the matrix, ignore diagonal elements
column_sums = numpy.sum(components, axis=0) - numpy.diagonal(components)
# Fill the 'ibea_fitness' field on the individuals with the fitness value
for individual, ibea_fitness in zip(population, column_sums):
individual.ibea_fitness = ibea_fitness
def _mating_selection(population, mu, tournament_n):
"""Returns the n_of_parents individuals with the best fitness"""
parents = []
for _ in xrange(mu):
# Pick individuals for tournament
tournament = [random.choice(population) for _ in range(tournament_n)]
# Sort according to fitness
tournament.sort(key=lambda ind: ind.ibea_fitness)
# Winner is element with smallest fitness
parents.append(tournament[0])
return parents
def _environmental_selection(population, selection_size):
"""Returns the selection_size individuals with the best fitness"""
# Sort the individuals based on their fitness
population.sort(key=lambda ind: ind.ibea_fitness)
# Return the first 'selection_size' elements
return population[:selection_size]
__all__ = ['selIBEA']
| lgpl-3.0 | -2,592,904,637,388,376,000 | 34.666667 | 79 | 0.674766 | false | 3.933824 | false | false | false |
spolight/spolight_service | teaminformation/teaminfo/teaminfo/spiders/spider_teaminfodetail.py | 1 | 1190 | import scrapy
from scrapy_splash import SplashRequest
class DmozSpider(scrapy.Spider):
name = "teaminfodetail"
#allowed_domains = ["dmoz.org"]
start_urls = [
"http://www.nowgoal.com/detail/1245738.html"
]
def start_requests(self):
for url in self.start_urls:
yield SplashRequest(url, self.parse, args = {'wait': 0.5})
def parse(self, response):
"""
response.xpath('//tr/td[@style="text-align:right;"]/a/text()').extract()
print response.xpath('//tr/td[@style="text-align:right;"]/a/text()').extract()
"""
print response.xpath('//div[@id="home"]/a/span[@class="name"]/text()').extract() #hometeam
print response.xpath('//div[@id="guest"]/a/span[@class="name"]/text()').extract() #guestteam
print response.xpath('//span[@class="b t15"]/text()').extract() #hometeam score (needed to sperate)
print response.xpath('//div[@id="matchData"]/div[7]/table[@class="bhTable"]/tbody/tr/td/text()').extract()
"""
filename = response.url.split("/")[-2] + '2.html'
with open(filename, 'wb') as f:
f.write(response.body)
"""
| gpl-3.0 | -5,053,716,850,511,357,000 | 38.666667 | 114 | 0.581513 | false | 3.4 | false | false | false |
bally12345/enigma2 | lib/python/Components/MediaPlayer.py | 1 | 3417 | from MenuList import MenuList
from Tools.Directories import SCOPE_CURRENT_SKIN, resolveFilename
from os import path
from enigma import eListboxPythonMultiContent, RT_VALIGN_CENTER, gFont, eServiceCenter
from Tools.LoadPixmap import LoadPixmap
STATE_PLAY = 0
STATE_PAUSE = 1
STATE_STOP = 2
STATE_REWIND = 3
STATE_FORWARD = 4
STATE_NONE = 5
class PlayList(MenuList):
def __init__(self, enableWrapAround = False):
MenuList.__init__(self, [], enableWrapAround, eListboxPythonMultiContent)
self.l.setFont(0, gFont("Regular", 18))
self.l.setItemHeight(23)
self.currPlaying = -1
self.oldCurrPlaying = -1
self.serviceHandler = eServiceCenter.getInstance()
self.icons = [
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_mp_play.png")),
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_mp_pause.png")),
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_mp_stop.png")),
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_mp_rewind.png")),
LoadPixmap(path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_mp_forward.png")),
]
def PlaylistEntryComponent(self, serviceref, state):
res = [ serviceref ]
text = serviceref.getName()
if text is "":
text = path.split(serviceref.getPath().split('/')[-1])[1]
res.append((eListboxPythonMultiContent.TYPE_TEXT,25, 1, 470, 22, 0, RT_VALIGN_CENTER, text))
try:
png = self.icons[state]
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 5, 3, 16, 16, png))
except:
pass
return res
def clear(self):
del self.list[:]
self.l.setList(self.list)
self.currPlaying = -1
self.oldCurrPlaying = -1
def getSelection(self):
return self.l.getCurrentSelection() and self.l.getCurrentSelection()[0]
def addFile(self, serviceref):
self.list.append(self.PlaylistEntryComponent(serviceref, STATE_NONE))
def updateFile(self, index, newserviceref):
if index < len(self.list):
self.list[index] = self.PlaylistEntryComponent(newserviceref, STATE_NONE)
def deleteFile(self, index):
if self.currPlaying >= index:
self.currPlaying -= 1
del self.list[index]
def setCurrentPlaying(self, index):
self.oldCurrPlaying = self.currPlaying
self.currPlaying = index
self.moveToIndex(index)
def updateState(self, state):
if len(self.list) > self.oldCurrPlaying and self.oldCurrPlaying != -1:
self.list[self.oldCurrPlaying] = self.PlaylistEntryComponent(self.list[self.oldCurrPlaying][0], STATE_NONE)
if self.currPlaying != -1 and self.currPlaying < len(self.list):
self.list[self.currPlaying] = self.PlaylistEntryComponent(self.list[self.currPlaying][0], state)
self.updateList()
def playFile(self):
self.updateState(STATE_PLAY)
def pauseFile(self):
self.updateState(STATE_PAUSE)
def stopFile(self):
self.updateState(STATE_STOP)
def rewindFile(self):
self.updateState(STATE_REWIND)
def forwardFile(self):
self.updateState(STATE_FORWARD)
def updateList(self):
self.l.setList(self.list)
def getCurrentIndex(self):
return self.currPlaying
def getCurrentEvent(self):
l = self.l.getCurrentSelection()
return l and self.serviceHandler.info(l[0]).getEvent(l[0])
def getCurrent(self):
l = self.l.getCurrentSelection()
return l and l[0]
def getServiceRefList(self):
return [ x[0] for x in self.list ]
def __len__(self):
return len(self.list)
| gpl-2.0 | -4,197,196,912,761,128,000 | 29.508929 | 110 | 0.733977 | false | 3.002636 | true | false | false |
crccheck/atx-bandc | bandc/apps/agenda/feeds.py | 1 | 1276 | from django.contrib.syndication.views import Feed
from django.urls import reverse
from .models import BandC, Document
RSS_SIZE = 100
class BandCDocumentFeed(Feed):
description_template = "agenda/document_description.html"
def get_object(self, request, slug):
self.slug = slug
if slug == "all":
return None
return BandC.objects.get(slug=slug)
def title(self, obj):
return str(obj) if obj else "City of Austin Boards and Commissions - All"
def link(self, obj):
return reverse("feed", kwargs={"slug": getattr(obj, "slug", "all")})
def description(self, obj):
if obj:
return obj.description
return "Meeting activity of Austin Boards and Commissions."
def items(self, obj):
queryset = (
Document.objects.filter(active=True)
.select_related("meeting__bandc")
.order_by("-scraped_at")
)
if obj:
queryset = queryset.filter(meeting__bandc=obj)
return queryset[:RSS_SIZE]
def item_pubdate(self, item):
return item.scraped_at
def item_title(self, item):
if self.slug == "all":
return "{} - {}".format(item.meeting.bandc, item)
return str(item)
| bsd-3-clause | -4,029,099,297,184,972,300 | 25.040816 | 81 | 0.604232 | false | 3.866667 | false | false | false |
inkton/nester | setup.py | 1 | 2206 | #!/usr/bin/env python
#
# Copyright (C) 2016 Inkton <[email protected]>
#
# This file is part of nester
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#from distutils.core import setup
from setuptools import setup
# import nester specifics
from nester.variables import __version__
if __name__ == '__main__':
setup(name='nester',
version='%s' % __version__,
description='Nester Shell',
long_description="""Nester <long-description>.""",
author='rajitha wijayaratne',
author_email='[email protected]',
maintainer='rajitha wijayaratne',
maintainer_email='[email protected]',
keywords=['nest', 'api', 'cli', 'python'],
url='https://nester.yt',
license='GPL',
platforms='UNIX',
scripts=['bin/nester'],
package_dir={'nester': 'nester'},
packages=['nester', 'nester.api'],
data_files=[('/etc', ['etc/nester.conf']),
('share/doc/nester', ['README.md']),
('share/man/man1/', ['man/nester.1'])],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console'
'Intended Audience :: Advanced End Users',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License v3',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: System Shells',
'Topic :: Terminals'
],
)
| mit | -8,053,686,960,828,547,000 | 37.034483 | 72 | 0.598368 | false | 4.062615 | false | false | false |
KT26/PythonCourse | 8. Class/7.py | 1 | 1724 | # Created by PyCharm Pro Edition
# User: Kaushik Talukdar
# Date: 23-04-17
# Time: 11:33 PM
# What if we want to increment the value of an attribute (instead of replacing it outright)
class Car():
def __init__(self, make, model, year):
self.make = make
self.model = model
self.year = year
self.mileage = 0
def get_descriptive_name(self):
full_name = self.make.title() + ' ' + self.model.title() + ' ' + str(self.year)
return full_name
def read_odometer(self):
if(self.mileage == 0):
return "brand new"
else:
return "used"
# Here we are using 2 methods with the same argument. Won't there be collision? NO. The arguments are not the same
# Different methods can have arguments that have the same name. A person can have different names depending on location
##################################################
def update_odometer(self, mileage):
self.mileage = mileage
def increase_odometer(self, mileage):
self.mileage += mileage
return self.mileage
def get_odometer_readings(self):
return self.mileage
##################################################
my_new_car = Car('Maruti', '800', '2002')
car_name = my_new_car.get_descriptive_name()
my_new_car.update_odometer(1000)
my_new_car.increase_odometer(200)
car_status = my_new_car.read_odometer()
print("I just bought a " + car_status + " car. " + car_name)
odometer_reading = my_new_car.get_odometer_readings()
if(my_new_car.get_odometer_readings()>0):
print("It has ran " + str(odometer_reading) + "kms") | mit | -7,323,777,537,104,650,000 | 29.381818 | 119 | 0.571926 | false | 3.393701 | false | false | false |
RubenRocha/ftpscout | ftpscout.py | 1 | 5653 | #!/usr/bin/env python3
# ftpscout v.0.1
#
# notice: this requires RabbitMQ.
# to add to the queue simply pass a text file to server.py containing the hosts in
# ip:port or hostname:port format, seperated by a newline
#
# usage: ./ftpscout.py [logfile.txt]
# usage (threaded): ./launch.sh [number of threads] [logfile.txt]
#
# This project is licenced under the GNU GPLv3
# GitHub: https://github.com/RubenRocha/ftpscout
import ftplib
import os
import socket
import sys
import colorama
import random
import pika
import time
import json
from urllib.parse import urlparse
from random import shuffle
from colorama import init, Fore, Back, Style
from datetime import datetime
init()
log_file = ""
log_strs = []
users = ["anonymous", "root", "admin"]
passwords = ["guest","anonymous","admin", "toor", " ", ""]
mq_queue="task_queue2"
def log(color, m_str, log_this=True):
global log_file
pid = str(os.getpid())
print(Fore.MAGENTA + "[*] [thread {}] {} {} {}".format(Fore.GREEN + pid, color, m_str, Style.RESET_ALL))
if len(log_file) > 0 and log_this:
log_strs.append(m_str)
def save_log():
global log_strs
with open(log_file, "a+") as myfile:
myfile.write('\n'.join(log_strs))
log_strs = []
def color(clr, strp):
return "{}{}{}".format(clr,strp,Style.RESET_ALL)
def try_login(custom_users, custom_passwords, host, port):
for user in custom_users:
for password in custom_passwords:
try:
con = ftplib.FTP(timeout=3.5)
con.connect(host, port)
ans = con.login(user,password)
if "230" in ans:
anon_login = "Success ({} - {})".format(user, password)
dir_listing = get_directory_listing(con)
return (anon_login, dir_listing)
else:
con.quit()
con.close()
continue
except socket.timeout:
anon_login = "Timed out"
return (anon_login, None)
except Exception as e:
anon_login = "Disallowed"
return (anon_login, None)
def get_banner(host,port):
socket.setdefaulttimeout(3.5)
s = socket.socket()
try:
s.connect((host, port))
ans = s.recv(1024)
s.close()
ans = ans.decode().rstrip().replace("\r", "").replace("\n", "\n\t")
if len(ans) == 0:
return "Empty"
else:
return ans
except Exception as e:
return "Error obtaining banner"
def get_directory_listing(ftp_con):
try:
files = ftp_con.nlst()
if "." in files: files.remove(".")
if ".." in files: files.remove("..")
rnd_files = random.sample(set(files), 3)
return "{} files listed - sample of files: {}".format(len(files), rnd_files)
except ftplib.error_perm as resp:
if "550" in resp:
return "No files"
finally:
con.quit()
con.close()
def differenciate_list(custom_list, real_list, l_type):
for k,i in enumerate(custom_list[:]):
if k >= len(real_list):
log(Fore.MAGENTA, "Added {} '{}' to current domain".format(l_type, custom_list[k]), log_this=False)
def port_check(host, port):
try:
socket.setdefaulttimeout(1)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
res = s.connect_ex((host,port))
s.close()
return True if res == 0 else False
except Exception as e:
return False
def scan(host,port=21):
try:
results = []
custom_users = list(users)
custom_passwords = list(passwords)
if not port_check(host,port):
log(Fore.RED, "Port {} seems to be closed for {}. Skipping...".format(port, color(Fore.YELLOW, host)), log_this=False)
return
try:
socket.inet_aton(host)
except socket.error:
nhost = host.replace("www.", "")
urlp = urlparse("//" + nhost)
custom_users.append("anonymous"+"@"+nhost)
custom_users.append(urlp.netloc)
custom_users.append(nhost.split(".")[0])
custom_passwords.append(nhost.split(".")[0])
differenciate_list(custom_users, users, "username")
differenciate_list(custom_passwords, passwords, "password")
log(Fore.CYAN, "Scanning {}:{} - please wait.".format(host,port), log_this=False)
anon_login,dir_listing = try_login(custom_users, custom_passwords, host, port)
banner = get_banner(host, port)
if "Timed out" in anon_login:
log(Fore.RED, "Results for {}:{} \r\n\t-> Timed out\n".format(host,port))
return
if "Disallowed" in anon_login:
log(Fore.RED, ("Results for {}:{} \r\n" +
"\t-> Anonymous access: {}\r\n" +
"\t-> FTP banner: {}\n")
.format(host,port,anon_login,banner)
)
return
log(Fore.GREEN, ("Results for {}:{} \r\n" +
"\t-> Anonymous access: {}\r\n" +
"\t-> FTP banner: {}\r\n" +
"\t-> Dir listing: {}\n")
.format(host,port,anon_login,banner,dir_listing)
)
except(KeyboardInterrupt):
log(Fore.RED, "Interrupted. Later!", log_this=False)
sys.exit()
def callback(ch, method, properties, body):
global log_strs
try:
server = json.loads(body.decode('utf-8'))
scan(server[0], 21 if 1 <= len(server) else server[1])
save_log()
ch.basic_ack(delivery_tag = method.delivery_tag)
except:
save_log()
ch.basic_ack(delivery_tag = method.delivery_tag)
def mq_worker():
try:
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
channel.queue_declare(queue=mq_queue, durable=True)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(callback,
queue=mq_queue)
channel.start_consuming()
except Exception as e:
connection.close()
mq_worker()
def main():
global log_file
log(Fore.YELLOW, "Loaded {} usernames.".format(len(users)))
log(Fore.YELLOW, "Loaded {} passwords.\n".format(len(passwords)))
log(Fore.BLUE, "Waiting for data from queue...", log_this=False)
log_file = sys.argv[1] if len(sys.argv) >= 2 else "log.txt"
mq_worker()
if __name__ == "__main__":
main()
| gpl-3.0 | -799,190,912,222,037,000 | 24.931193 | 121 | 0.65735 | false | 2.868087 | false | false | false |
torchbox/longform | longform/models.py | 1 | 1584 | from django.db import models
from django.conf import settings
from modelcluster.fields import ParentalKey
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailcore.fields import StreamField
from wagtail.wagtailadmin.edit_handlers import (
FieldPanel, StreamFieldPanel, InlinePanel, PageChooserPanel
)
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtailsearch import index
from .blocks import LongformBlock
class LongformPage(Page):
body = StreamField(LongformBlock())
introduction = models.CharField(max_length=255)
background_image = models.ForeignKey(
settings.WAGTAILIMAGES_IMAGE_MODEL,
null=True, blank=True,
on_delete=models.SET_NULL,
related_name='+',
)
search_fields = Page.search_fields + [
index.SearchField('body'),
]
content_panels = Page.content_panels + [
FieldPanel('introduction'),
ImageChooserPanel('background_image'),
StreamFieldPanel('body'),
]
subpage_types = []
def get_template(self, request, *args, **kwargs):
if request.is_ajax():
return self.ajax_template or self.template
else:
return 'longform/longform_page.html'
def get_context(self, request):
context = super().get_context(request)
if request.GET.get('accessible'):
context.update(render_accessible=True)
return context
class Meta:
verbose_name = "Longform Page"
verbose_name_plural = "Longform Pages"
abstract = True
| mit | -4,258,515,471,768,198,000 | 27.285714 | 65 | 0.682449 | false | 4.103627 | false | false | false |
suriya/save-your-vote | init-db-scripts/add-flatpages.py | 1 | 12188 |
from django.contrib.sites.models import Site
from django.contrib.flatpages.models import FlatPage
from django.conf import settings
import textwrap
FlatPage.objects.all().delete()
site = Site.objects.get(pk=settings.SITE_ID)
site.domain = 'saveyourvote.in'
site.name = 'Save Your Vote'
site.save()
PAGES = [
('/fp/terms-and-conditions/', 'Terms and Conditions, and Privacy Policy', textwrap.dedent("""
<h2>
Web Site Terms and Conditions of Use
</h2>
<h3>
1. Terms
</h3>
<p>
By accessing this web site, you are agreeing to be bound by these
web site Terms and Conditions of Use, all applicable laws and regulations,
and agree that you are responsible for compliance with any applicable local
laws. If you do not agree with any of these terms, you are prohibited from
using or accessing this site. The materials contained in this web site are
protected by applicable copyright and trade mark law.
</p>
<h3>
2. Use License
</h3>
<ol type="a">
<li>
Permission is granted to temporarily download one copy of the materials
(information or software) on Save Your Vote's web site for personal,
non-commercial transitory viewing only. This is the grant of a license,
not a transfer of title, and under this license you may not:
<ol type="i">
<li>modify or copy the materials;</li>
<li>use the materials for any commercial purpose, or for any public display (commercial or non-commercial);</li>
<li>attempt to decompile or reverse engineer any software contained on Save Your Vote's web site;</li>
<li>remove any copyright or other proprietary notations from the materials; or</li>
<li>transfer the materials to another person or "mirror" the materials on any other server.</li>
</ol>
</li>
<li>
This license shall automatically terminate if you violate any of these restrictions and may be terminated by Save Your Vote at any time. Upon terminating your viewing of these materials or upon the termination of this license, you must destroy any downloaded materials in your possession whether in electronic or printed format.
</li>
</ol>
<h3>
3. Disclaimer
</h3>
<ol type="a">
<li>
The materials on Save Your Vote's web site are provided "as is". Save Your Vote makes no warranties, expressed or implied, and hereby disclaims and negates all other warranties, including without limitation, implied warranties or conditions of merchantability, fitness for a particular purpose, or non-infringement of intellectual property or other violation of rights. Further, Save Your Vote does not warrant or make any representations concerning the accuracy, likely results, or reliability of the use of the materials on its Internet web site or otherwise relating to such materials or on any sites linked to this site.
</li>
</ol>
<h3>
4. Limitations
</h3>
<p>
In no event shall Save Your Vote or its suppliers be liable for any damages (including, without limitation, damages for loss of data or profit, or due to business interruption,) arising out of the use or inability to use the materials on Save Your Vote's Internet site, even if Save Your Vote or a Save Your Vote authorized representative has been notified orally or in writing of the possibility of such damage. Because some jurisdictions do not allow limitations on implied warranties, or limitations of liability for consequential or incidental damages, these limitations may not apply to you.
</p>
<h3>
5. Revisions and Errata
</h3>
<p>
The materials appearing on Save Your Vote's web site could include technical, typographical, or photographic errors. Save Your Vote does not warrant that any of the materials on its web site are accurate, complete, or current. Save Your Vote may make changes to the materials contained on its web site at any time without notice. Save Your Vote does not, however, make any commitment to update the materials.
</p>
<h3>
6. Links
</h3>
<p>
Save Your Vote has not reviewed all of the sites linked to its Internet web site and is not responsible for the contents of any such linked site. The inclusion of any link does not imply endorsement by Save Your Vote of the site. Use of any such linked web site is at the user's own risk.
</p>
<h3>
7. Site Terms of Use Modifications
</h3>
<p>
Save Your Vote may revise these terms of use for its web site at any time without notice. By using this web site you are agreeing to be bound by the then current version of these Terms and Conditions of Use.
</p>
<h3>
8. Governing Law
</h3>
<p>
Any claim relating to Save Your Vote's web site shall be governed by the laws of the State of Karnataka, India without regard to its conflict of law provisions.
</p>
<p>
General Terms and Conditions applicable to Use of a Web Site.
</p>
<h2>
Privacy Policy
</h2>
<p>
Your privacy is very important to us. Accordingly, we have developed this Policy in order for you to understand how we collect, use, communicate and disclose and make use of personal information. The following outlines our privacy policy.
</p>
<ul>
<li>
Before or at the time of collecting personal information, we will identify the purposes for which information is being collected.
</li>
<li>
We will collect and use of personal information solely with the objective of fulfilling those purposes specified by us and for other compatible purposes, unless we obtain the consent of the individual concerned or as required by law.
</li>
<li>
We will only retain personal information as long as necessary for the fulfillment of those purposes.
</li>
<li>
We will collect personal information by lawful and fair means and, where appropriate, with the knowledge or consent of the individual concerned.
</li>
<li>
Personal data should be relevant to the purposes for which it is to be used, and, to the extent necessary for those purposes, should be accurate, complete, and up-to-date.
</li>
<li>
We will protect personal information by reasonable security safeguards against loss or theft, as well as unauthorized access, disclosure, copying, use or modification.
</li>
<li>
We will make readily available to customers information about our policies and practices relating to the management of personal information.
</li>
</ul>
<p>
We are committed to conducting our business in accordance with these principles in order to ensure that the confidentiality of personal information is protected and maintained.
</p>
""")),
('/fp/contact/', 'Contact Us', textwrap.dedent("""
<h2 class="text-center"> Contact Us </h2>
<p>
The best way to contact us is by leaving a comment below or emailing
[email protected].
</p>
""")),
('/fp/the-missing-voter/', "The missing voter", textwrap.dedent("""
<div class="page-header"> <h1>The missing voter</h1> </div>
<p>
A major issue ailing India's election process is that genuine registered
Indian voters become disenfranchised and end up unable to vote on election
day. Most Indian voters know someone who goes to vote on election day only to
find their name missing from the electoral roll. News articles about this
issue come up every election, however nothing changes. For example, take a
look
<a href="http://news.oneindia.in/2013/05/05/bangalore-names-go-missing-from-electoral-rolls-1209745.html">here</a>,
<a href="http://m.indianexpress.com/news/no-response-84700-names-deleted-from-electoral-rolls/1119836/">here</a>,
<a href="http://pressclubofindia.co.in/names-go-missing-from-electoral-rolls-in-bangalore/">here</a>,
<a href="http://articles.timesofindia.indiatimes.com/2013-06-08/ranchi/39833960_1_missing-names-revised-electoral-roll-omission">here</a>,
<a href="http://www.hindu.com/2009/04/25/stories/2009042559980400.htm">here</a>,
... Here's a voter showing Voter IDs for his neighbours whose names were
missing in the rolls.
</p>
<p class="text-center">
<img
src="http://www.hindu.com/2009/04/25/images/2009042559980401.jpg"
alt="Unable to Vote" />
</p>
<h2 class="text-center">A simple solution</h2>
<p>
We want to make it easy for voters to ensure that their name does not get
deleted accidentally or otherwise. Once you enter your Voter ID number on
our website, we will monitor the Election Commissioner's database for
changes ensuring that your record does not get deleted.
</p>
""")),
('/fp/faq/', 'Frequently Anticipated Questions (FAQ)', textwrap.dedent("""
<div class="page-header"> <h2>Frequently Anticipated Questions</h2> </div>
<dl class="faq">
<dt> 1. Why should I trust you with my Voter ID number? </dt>
<dd>
Your Voter ID number along with other details such as your name, age, and
street address is public information. You can go look them up on the
Election Commissioner's website. The Election Commission makes PDF copies
of the voter roll available on their website.
</dd>
<dt>
<dt> 2. Will you sell my data? </dt>
<dd>
We care about your privacy and we will not sell your data to anyone. We may
send your notification emails such as the date of election, your polling
booth, etc. In any case, we will let you opt out of any e-mail from us.
</dd>
<!--
<dt> 3. Will you have advertising on the site? </dt>
<dd>
Maybe, in order to pay for our hosting costs.
</dd>
-->
<dt> 3. Why aren't all states supported at the moment? </dt>
<dd>
At the moment, we support Delhi, Karnataka, Tamil Nadu, and Uttar Pradesh.
It is a matter of time before we get to support other states as well.
<strong>Even if your state is not supported, you can add your Voter ID
number and we will notify you once we add support for your state. </strong>
If you are a developer you can contribute to our source code to quickly
support all the states in India.
</dd>
</dl>
If you have more questions, feel free to leave a message below.
""")),
('/', 'Home Page', textwrap.dedent("""
<h2 class="text-center"> Welcome to Save Your Vote </h2>
<dl class="dl-horizontal">
<dt><button style="cursor: default;" class="btn btn-block btn-danger" type="button">Problem</button></dt>
<dd>Several voters in India find that they are unable to vote on
election day because their name has been deleted from the voter roll.
<br /> <br /> </dd>
<dt><button style="cursor: default;" class="btn btn-block btn-success" type="button">Solution</button></dt>
<dd>Save your Voter ID number on our website and we will notify you
over Facebook and e-mail if your record is deleted by mistake. </dd>
</dl>
<p>
Don't wait until it is too late. It only takes a couple of minutes to add
your Voter ID number. You can also add details of other voters such as your
family members. <br /> <br />
</p>
<p class="text-center">
<a href="/voterreg/epic/new/"><button class="btn btn-large btn-primary"
type="button">Add Voter Information</button></a>
</p>
""")),
]
for (url, title, content) in PAGES:
fp = FlatPage(url=url, title=title, content=content)
fp.save()
fp.sites.add(site)
print fp
# Save Your Vote is an initiative to ensure that genuine registered Indian
# voters are not disenfranchised and end up unable to vote on election day.
# Most Indian voters know someone who goes to vote on election day only to
# find their name missing from the electoral role. News articles about this
# issue come up every election, however nothing changes.
#
# On the Save Your Vote website, you will be able to add your Voter ID
# information (and that of your family members who are not internet-savvy).
# We will monitor the Chief Election Commissioner's database to ensure that
# your record does not get deleted. In case your record gets deleted, we will
# notify you through Facebook and e-mail so that you can restore it.
#
# In case you are hesitant to enter your Voter ID information, please note
# that your voter details are public information and can already be found on
# the Election Commissioner's website. Save Your Vote merely allows you to
# prevent government officials from deleting your name by mistake.
#
# Don't hesitate. Go ahead and save your vote.
| mit | 7,640,967,246,950,248,000 | 41.915493 | 632 | 0.735888 | false | 3.59528 | false | false | false |
shengqh/ngsperl | lib/QC/fastQCSummary.py | 1 | 5883 | import argparse
import sys
import logging
import os
def initialize_logger(logfile, logname, isdebug):
logger = logging.getLogger(logname)
loglevel = logging.DEBUG if isdebug else logging.INFO
logger.setLevel(loglevel)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)-8s - %(message)s')
# create console handler and set level to info
handler = logging.StreamHandler()
handler.setLevel(loglevel)
handler.setFormatter(formatter)
logger.addHandler(handler)
# create error file handler and set level to error
handler = logging.FileHandler(logfile, "w")
handler.setLevel(loglevel)
handler.setFormatter(formatter)
logger.addHandler(handler)
return(logger)
parser = argparse.ArgumentParser(description="Summarize the fastqc result.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-i', '--input', action='store', nargs='?', help='Input fastqc data file list', required=True)
parser.add_argument('-o', '--output', action='store', nargs='?', help="Output summary prefix", required=False)
args = parser.parse_args()
logger = initialize_logger(args.output + ".log", 'fastQCSummary', False)
logger.info("Output overall summary file.")
class Section:
def __init__(self, fileName, sectionTag, headerTag):
self.fileName = fileName
self.sectionTag = sectionTag
self.headerTag = headerTag
self.inSection = False
self.header = ""
self.data = {}
sections = [Section("adapter", ">>Adapter Content", "#Position"),
Section("baseQuality", ">>Per base sequence quality", "#Base"),
Section("baseContent", ">>Per base sequence content", "#Base"),
Section("sequenceGC", ">>Per sequence GC content", "#GC Content")]
summary={}
reads={}
overrepresented={}
overrepresentedHeader = ""
has_error = False
with open(args.input, "r") as flistin:
for line in flistin:
parts = line.split('\t')
sampleName = parts[1].rstrip()
datafile = parts[0]
datafolder, datafilename = os.path.split(datafile)
datafilePrefix = os.path.splitext(os.path.basename(datafolder))[0]
if datafilePrefix.endswith("_fastqc"):
datafilePrefix = datafilePrefix[0:-7]
if not sampleName in summary:
summary[sampleName] = {}
prefixDic = {}
summary[sampleName][datafilePrefix] = prefixDic
summaryfile = datafolder + "/summary.txt"
if (not os.path.isfile(summaryfile)):
logger.error("File not exists: %s" % summaryfile)
has_error = True
continue
with open(summaryfile, "r") as fsummary:
for sline in fsummary:
sparts = sline.rstrip().split('\t')
prefixDic[sparts[1]] = sparts[0]
if not sampleName in reads:
reads[sampleName] = {}
if not sampleName in overrepresented:
overrepresented[sampleName] = {}
for section in sections:
section.inSection = False
section.done = False
if not sampleName in section.data:
section.data[sampleName] = {}
with open(datafile, "r") as fdata:
bInOver = False
bInAdapter = False
for sline in fdata:
if sline.startswith("Total Sequences"):
reads[sampleName][datafilePrefix] = sline[16:].rstrip()
continue
if sline.startswith(">>Overrepresented"):
bInOver = True
continue
if bInOver:
if sline.startswith("#Sequence"):
overrepresentedHeader = sline[1:].rstrip()
continue
if not sline.startswith(">>END_MODULE"):
overrepresented[sampleName][datafilePrefix] = sline.rstrip()
bInOver = False
continue
for section in sections:
if sline.startswith(section.sectionTag):
section.inSection = True
section.data[sampleName][datafilePrefix] = []
continue
if section.inSection:
if sline.startswith(section.headerTag):
section.header = sline[1:].rstrip()
continue
if sline.startswith(">>END_MODULE"):
section.inSection = False
continue
section.data[sampleName][datafilePrefix].append(sline.rstrip())
continue
output_prefix = "error." + args.output if has_error else args.output
with open(output_prefix + ".summary.tsv", "w") as fout:
fout.write("Sample\tFile\tCategory\tQCResult\n")
for skey, svalue in sorted(summary.items()):
slen = len(svalue)
for vkey, vvalue in sorted(svalue.items()):
for ckey, cvalue in sorted(vvalue.items()):
fout.write("%s\t%s\t%s\t%s\n" % (skey, skey if slen==1 else vkey, ckey, cvalue))
with open(output_prefix + ".reads.tsv", "w") as fout:
fout.write("Sample\tFile\tReads\n")
for skey, svalue in sorted(reads.items()):
slen = len(svalue)
for vkey, vvalue in sorted(svalue.items()):
fout.write("%s\t%s\t%s\n" % (skey, skey if slen==1 else vkey, vvalue))
with open(output_prefix + ".overrepresented.tsv", "w") as fout:
fout.write("Sample\tiFile\t%s\n" % overrepresentedHeader)
for skey, svalue in sorted(overrepresented.items()):
slen = len(svalue)
for vkey, vvalue in sorted(svalue.items()):
fout.write("%s\t%s\t%s\n" % (skey, skey if slen==1 else vkey, vvalue))
for section in sections:
with open("%s.%s.tsv" % (output_prefix, section.fileName), "w") as fout:
fout.write("Sample\tFile\t%s\n" % section.header)
for skey, svalue in sorted(section.data.items()):
slen = len(svalue)
for vkey, vvalue in sorted(svalue.items()):
for avalue in vvalue:
fout.write("%s\t%s\t%s\n" % (skey, skey if slen==1 else vkey, avalue))
| apache-2.0 | 7,128,921,792,951,964,000 | 33.227545 | 114 | 0.623491 | false | 3.716361 | false | false | false |
heyfaraday/CMB_test | points_1.py | 1 | 1859 | from numpy import genfromtxt, zeros
from math import pi
from lib import minkowski
N = 2048
file_map_Q = genfromtxt('planck_2_dir/file_map_Q_64_2048.dat')
file_map_Qx = genfromtxt('planck_2_dir/file_map_Qx_64_2048.dat')
file_map_Qy = genfromtxt('planck_2_dir/file_map_Qy_64_2048.dat')
file_map_U = genfromtxt('planck_2_dir/file_map_U_64_2048.dat')
file_map_Ux = genfromtxt('planck_2_dir/file_map_Ux_64_2048.dat')
file_map_Uy = genfromtxt('planck_2_dir/file_map_Uy_64_2048.dat')
file_map_points = open('planck_2_dir/file_map_points_64.dat', 'w')
file_normal_Q = zeros((N + 1, N / 2 + 1))
file_normal_Qx = zeros((N + 1, N / 2 + 1))
file_normal_Qy = zeros((N + 1, N / 2 + 1))
file_normal_U = zeros((N + 1, N / 2 + 1))
file_normal_Ux = zeros((N + 1, N / 2 + 1))
file_normal_Uy = zeros((N + 1, N / 2 + 1))
x = zeros((N + 1, N / 2 + 1))
y = zeros((N + 1, N / 2 + 1))
for i in xrange(0, N + 1):
for j in xrange(0, N / 2 + 1):
x[i][j] = (2.0 * i - N) / N * pi
y[i][j] = 2.0 * j / N * pi - pi / 2.0
for i in xrange(0, (N + 1) * (N/2 + 1)):
file_normal_Q[int(file_map_Q[i][1]), int(file_map_Q[i][2])] = file_map_Q[i][0]
file_normal_Qx[int(file_map_Qx[i][1]), int(file_map_Qx[i][2])] = file_map_Qx[i][0]
file_normal_Qy[int(file_map_Qy[i][1]), int(file_map_Qy[i][2])] = file_map_Qy[i][0]
file_normal_U[int(file_map_U[i][1]), int(file_map_U[i][2])] = file_map_U[i][0]
file_normal_Ux[int(file_map_Ux[i][1]), int(file_map_Ux[i][2])] = file_map_Ux[i][0]
file_normal_Uy[int(file_map_Uy[i][1]), int(file_map_Uy[i][2])] = file_map_Uy[i][0]
# cmb_map = cmbplot.moll(x, y, file_normal_Q*file_normal_Q + file_normal_U*file_normal_U)
minkowski.singular_points(x, y, file_normal_Q, file_normal_U, file_normal_Qx, file_normal_Qy, file_normal_Ux,
file_normal_Uy, my_file=file_map_points, print_num=True)
| mit | -2,522,698,348,195,412,000 | 40.311111 | 109 | 0.596019 | false | 2.15162 | false | false | false |
gentoo/grss | grs/Execute.py | 2 | 2922 | #!/usr/bin/env python
#
# Execute.py: this file is part of the GRS suite
# Copyright (C) 2015 Anthony G. Basile
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import signal
import shlex
import subprocess
import sys
from grs.Constants import CONST
class Execute():
""" Execute a shell command """
def __init__(
self, cmd, timeout=1, extra_env={}, failok=False, shell=False, logfile=CONST.LOGFILE
):
""" Execute a shell command.
cmd - Simple string of the command to be execute as a
fork()-ed child.
timeout - The time in seconds to wait() on the child before
sending a SIGTERM. timeout = None means wait indefinitely.
extra_env - Dictionary of extra environment variables for the fork()-ed
child. Note that the child inherits all the env variables
of the grandparent shell in which grsrun/grsup was spawned.
logfile - A file to log output to. If logfile = None, then we log
to sys.stdout.
"""
if shell:
args = cmd
else:
args = shlex.split(cmd)
extra_env = dict(os.environ, **extra_env)
if logfile:
_file = open(logfile, 'a')
proc = subprocess.Popen(args, stdout=_file, stderr=_file, env=extra_env, shell=shell)
else:
_file = sys.stderr
proc = subprocess.Popen(args, env=extra_env, shell=shell)
try:
proc.wait(timeout)
timed_out = False
except subprocess.TimeoutExpired:
proc.kill()
timed_out = True
if not timed_out:
# _rc = None if we had a timeout
_rc = proc.returncode
if _rc != 0:
_file.write('EXIT CODE: %d\n' % _rc)
if timed_out:
_file.write('TIMEOUT ERROR: %s\n' % cmd)
if not failok and (_rc != 0 or timed_out):
pid = os.getpid()
_file.write('SENDING SIGTERM: %s\n' % pid)
_file.close()
os.kill(pid, signal.SIGTERM)
# Only close a logfile, don't close sys.stderr!
if logfile:
_file.close()
| gpl-2.0 | 7,208,477,461,994,891,000 | 35.074074 | 97 | 0.575633 | false | 4.13881 | false | false | false |
allenai/allennlp | allennlp/training/metrics/mean_absolute_error.py | 1 | 2326 | from typing import Optional
from overrides import overrides
import torch
import torch.distributed as dist
from allennlp.common.util import is_distributed
from allennlp.training.metrics.metric import Metric
@Metric.register("mean_absolute_error")
class MeanAbsoluteError(Metric):
"""
This `Metric` calculates the mean absolute error (MAE) between two tensors.
"""
def __init__(self) -> None:
self._absolute_error = 0.0
self._total_count = 0.0
def __call__(
self,
predictions: torch.Tensor,
gold_labels: torch.Tensor,
mask: Optional[torch.BoolTensor] = None,
):
"""
# Parameters
predictions : `torch.Tensor`, required.
A tensor of predictions of shape (batch_size, ...).
gold_labels : `torch.Tensor`, required.
A tensor of the same shape as `predictions`.
mask : `torch.BoolTensor`, optional (default = `None`).
A tensor of the same shape as `predictions`.
"""
predictions, gold_labels, mask = self.detach_tensors(predictions, gold_labels, mask)
device = gold_labels.device
absolute_errors = torch.abs(predictions - gold_labels)
if mask is not None:
absolute_errors *= mask
_total_count = torch.sum(mask)
else:
_total_count = gold_labels.numel()
_absolute_error = torch.sum(absolute_errors)
if is_distributed():
absolute_error = torch.tensor(_absolute_error, device=device)
total_count = torch.tensor(_total_count, device=device)
dist.all_reduce(absolute_error, op=dist.ReduceOp.SUM)
dist.all_reduce(total_count, op=dist.ReduceOp.SUM)
_absolute_error = absolute_error.item()
_total_count = total_count.item()
self._absolute_error += _absolute_error
self._total_count += _total_count
def get_metric(self, reset: bool = False):
"""
# Returns
The accumulated mean absolute error.
"""
mean_absolute_error = self._absolute_error / self._total_count
if reset:
self.reset()
return {"mae": mean_absolute_error}
@overrides
def reset(self):
self._absolute_error = 0.0
self._total_count = 0.0
| apache-2.0 | -1,180,683,229,054,832,600 | 30.432432 | 92 | 0.604041 | false | 4.052265 | false | false | false |
mh11/chip-qc | chipqc/filter_samples.py | 1 | 4044 | __author__ = 'mh719'
import sqlite3 as lite
import os
import time
import sys
from exec_util import execCmd
import chipqc_db
def getHelpInfo():
return "[OPTIONAL STEP] Filter wiggle files"
def addArguments(parser):
parser.add_argument('-s','--skip',dest='skip',help="Skip filtering - use original files",action='store_true')
parser.add_argument('-r','--regulatory-build',type=str,dest='reg_build',help="RegBuild.bb file")
parser.add_argument('-o','--out-dir',type=str,dest='out_dir',default='wiggle-filtered',help="Output directory of the filtered wiggle file")
parser.add_argument('-d','--data_id',type=int,dest='data_id',help="File id to process - default all files are processed")
parser.add_argument('-w','--wiggle-tool',type=str,dest='wig_tool',default="wiggletools",help="Set path to specific wiggle tool to use")
parser.add_argument('-f','--force-all',dest='force',help="Force refilter",action='store_true')
def skipFilter(db,args):
filterid = list()
## Store in DB
file_data = db.getFiles()
if 'data_id' in args and args.data_id != None:
filterid.append(int(args.data_id))
else:
filterid = [ int(row[0]) for row in file_data]
print ("Skip %s files ... " % (len(filterid)))
now=time.time()
filterupdate = [( "done",now,row[2],now,0,int(row[0]) ) for row in file_data if int(row[0]) in filterid]
db.updateFileFilter(filterupdate)
print ("Updated %s filter files. " % (len(filterupdate)))
def filter(db,args):
ret = 0
out_dir = args.out_dir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
filterid = list()
## Store in DB
file_data = db.getFiles()
if 'data_id' in args and args.data_id != None:
filterid.append(int(args.data_id))
else:
filterid = [ int(row[0]) for row in file_data]
currFilterDetails = db.getFilesFilteredDetails()[1]
for fid in filterid:
# cur.execute("SELECT did,data_file from data_file WHERE did = ? ",(fid,))
url = [row[2] for row in file_data if int(row[0]) == fid][0]
# cur.execute("SELECT exit_code,status from filter WHERE did = ? ",(fid,))
(curr_code,curr_status) = [(row[5],row[2]) for row in currFilterDetails if int(row[0]) == fid][0]
ofile=out_dir+"/"+str(fid)+".filtered.wig"
print("For %s found url %s to store as %s ..." % (fid,url,ofile))
if curr_code is not None:
print("Already processed finished with %s" % curr_code)
if curr_code == "0" or curr_code == 0:
print ("Already downloaded - done")
if 'force' in args and args.force:
print ("Force rerun of %s " % ofile)
else:
continue
if os.path.isfile(ofile):
print ("remove file %s " % ofile)
os.unlink(ofile) # clean up
# oerr=out_dir+"/"+exp_id+".filtered.err"
# olog=out_dir+"/"+exp_id+".filtered.log"
nowStart=time.time()
# status=?, started=?,f_file_path=?,finished=?,exit_code=?
db.updateFileFilter((("started",nowStart,ofile,None,None,None,None,fid),))
cmd="%s write %s mult %s %s" % (args.wig_tool,ofile,args.reg_build,url)
print (cmd)
(res,sto,ste) = execCmd(cmd)
nowEnd=time.time()
msg = "done"
if res != 0:
msg = "error"
ret = int(res)
db.updateFileFilter(((msg,nowStart,ofile,nowEnd,res,sto,ste,fid),))
print ("%s producing filtered %s " % (msg,ofile))
return ret
def run(parser,args):
args.out_dir = os.path.abspath(args.out_dir)
db_file=args.db_file
db = chipqc_db.ChipQcDbSqlite(path=db_file)
if args.skip:
skipFilter(db,args)
elif 'reg_build' not in args or args.reg_build is None:
print ("Regulatory build parameter required ")
return 1
else:
ret = filter(db,args)
if ret != 0:
print ("Error: there were errors during execution !!!")
return 0 | gpl-2.0 | 697,717,661,326,373,500 | 34.79646 | 143 | 0.597428 | false | 3.242983 | false | false | false |
DonLakeFlyer/ardupilot | libraries/SITL/examples/Morse/rover_scanner.py | 7 | 2562 | '''
This is an example builder script that sets up a rover in Morse to
be driven by ArduPilot.
The rover has the basic set of sensors that ArduPilot needs
To start the simulation use this:
morse run rover.py
Then connect with ArduPilot like this:
sim_vehicle.py --model morse --console --map
This model assumes you will setup a skid-steering rover with left throttle on
channel 1 and right throttle on channel 2, which means you need to set:
SERVO1_FUNCTION 73
SERVO3_FUNCTION 74
'''
from morse.builder import *
# use the ATRV rover
vehicle = ATRV()
vehicle.properties(Object = True, Graspable = False, Label = "Vehicle")
vehicle.translate(x=0.0, z=0.0)
# add a camera
camera = SemanticCamera(name="Camera")
camera.translate(x=0.2, y=0.3, z=0.9)
vehicle.append(camera)
camera.properties(cam_far=800)
camera.properties(Vertical_Flip=True)
# we could optionally stream the video to a port
#camera.add_stream('socket')
# add sensors needed for ArduPilot operation to a vehicle
pose = Pose()
vehicle.append(pose)
imu = IMU()
vehicle.append(imu)
gps = GPS()
gps.alter('UTM')
vehicle.append(gps)
velocity = Velocity()
vehicle.append(velocity)
# add a 360 degree laser scanner, sitting 1m above the rover
# this is setup to be similar to the RPLidarA2
scan = Hokuyo()
scan.translate(x=0.0, z=1.0)
vehicle.append(scan)
scan.properties(Visible_arc = True)
scan.properties(laser_range = 18.0)
scan.properties(resolution = 5.0)
scan.properties(scan_window = 360.0)
scan.create_laser_arc()
# create a compound sensor of all of the individual sensors and stream it
all_sensors = CompoundSensor([imu, gps, velocity, pose, scan])
all_sensors.add_stream('socket')
vehicle.append(all_sensors)
# make the vehicle controllable with speed and angular velocity
# this will be available on port 60001 by default
# an example command is:
# {"v":2, "w":1}
# which is 2m/s fwd, and rotating left at 1 radian/second
motion = MotionVW()
vehicle.append(motion)
motion.add_stream('socket')
# this would allow us to control the vehicle with a keyboard
# we don't enable it as it causes issues with sensor consistency
#keyboard = Keyboard()
#keyboard.properties(Speed=3.0)
#vehicle.append(keyboard)
# Environment
env = Environment('land-1/trees', fastmode=False)
env.set_camera_location([10.0, -10.0, 10.0])
env.set_camera_rotation([1.0470, 0, 0.7854])
env.select_display_camera(camera)
env.set_camera_clip(clip_end=1000)
# startup at CMAC. A location is needed for the magnetometer
env.properties(longitude = 149.165230, latitude = -35.363261, altitude = 584.0)
| gpl-3.0 | -873,690,527,554,634,500 | 26.548387 | 79 | 0.747853 | false | 3.039146 | false | false | false |
FrankNagel/qlc | src/webapp/quanthistling/scripts/importfunctions.py | 1 | 16483 | # -*- coding: utf8 -*-
import re, os
import unicodedata
from quanthistling import model
from annotations import functions
def normalize_stroke(string_src):
return functions.normalize_stroke(string_src)
def delete_book_from_db(Session, bibtex_key):
book_q = Session.query(model.Book)
book = book_q.filter_by(bibtex_key=bibtex_key).first()
if book:
data_array = ()
if book.type == 'dictionary':
data_array = book.dictdata
elif book.type == 'wordlist':
data_array = book.wordlistdata
for data in data_array:
for entry in data.entries:
for a in entry.annotations:
Session.delete(a)
Session.delete(entry)
Session.commit()
if book.type == 'dictionary':
for l in data.src_languages:
Session.delete(l)
for l in data.tgt_languages:
Session.delete(l)
Session.delete(data)
for data in book.nondictdata:
Session.delete(data)
for data in book.nonwordlistdata:
Session.delete(data)
Session.delete(book)
Session.commit()
def insert_book_to_db(Session, bookdata):
book = model.Book()
book.title = bookdata['title']
book.author = bookdata['author']
book.year = bookdata['year']
book.bibtex_key = bookdata['bibtex_key']
book.columns = bookdata['columns']
book.pages = bookdata['pages']
book.is_ready = bookdata['is_ready']
book.type = bookdata['type']
Session.add(book)
Session.commit()
return book
def insert_language_bookname_to_db(Session, language_bookname):
language = model.LanguageBookname()
language.name = language_bookname;
Session.add(language)
Session.commit()
return language
def insert_language_to_db(Session, languagedata):
language = model.Language()
language.name = languagedata['name']
language.langcode = languagedata['langcode']
language.description = languagedata['description']
language.url = languagedata['url']
Session.add(language)
Session.commit()
return language
def insert_wordlistdata_to_db(Session, data, book):
wordlistdata = model.Wordlistdata()
wordlistdata.startpage = data['startpage']
wordlistdata.endpage = data['endpage']
#wordlistdata.language_bookname = []
#wordlistdata.language_iso = []
wordlistdata.book = book
if data['component'] != '':
component = Session.query(model.Component).filter_by(name=data['component']).first()
if component == None:
log.warn("Component not found, inserting dictdata without component.")
wordlistdata.component = component
if data['language_name'] != "":
language_iso = Session.query(model.LanguageIso).filter_by(name=data['language_name']).first()
if language_iso == None:
#log.warn("Language " + b['src_language_name'] + " not found, inserting book " + b['title'].encode('ascii', errors='ingore') + " without source language." )
print("Language %s not found, inserting book without source language." % data['language_name'])
wordlistdata.language_iso = language_iso
if data['language_bookname'] != "":
lang_bookname = data['language_bookname']
lang_bookname = normalize_stroke(lang_bookname)
lang_bookname = unicodedata.normalize("NFD", lang_bookname)
language_bookname = Session.query(model.LanguageBookname).filter_by(name=lang_bookname).first()
if language_bookname == None:
language_bookname = insert_language_bookname_to_db(Session, lang_bookname)
wordlistdata.language_bookname = language_bookname
Session.add(wordlistdata)
Session.commit()
return wordlistdata
##
# Parses an entry from text to an entry model
def process_line(text, type="dictionary"):
if type == "dictionary":
entry = model.Entry()
elif type == "wordlist":
entry = model.WordlistEntry()
else:
print "unknown type in process_line"
return None
# head word is bold at the beginning of the entry
#entry.head = re.sub(re.compile(r'^\t?\t?<b>(.*?)</b>.*', re.DOTALL), r'\1', text)
#entry.head = u'dummy'
in_html_entity = False
html_entity = ''
html_entity_stack = []
html_entity_start = 0
html_entity_start_stack = []
fullentry = ''
annotations = []
prevchar = ''
prevchar_special = False
for char in text:
if char == '<':
in_html_entity = True
elif char == '>':
in_html_entity = False
if re.match(r'^\/', html_entity):
html_end_entity = re.sub(r'^/', '', html_entity)
len_html_entity_stack = len(html_entity_stack)
html_start_entity = ''
if len(html_entity_stack) > 0:
html_start_entity = html_entity_stack.pop()
if (len_html_entity_stack < 1) or (html_end_entity != html_start_entity):
log.warning("html start/end tag mismatch")
log.warning(" Start tag: " + html_start_entity)
log.warning(" End tag: " + html_end_entity)
log.warning(" Full entry: " + text.encode('utf-8'))
html_entity_start = html_entity_start_stack.pop()
html_entity_end = len(fullentry)
if html_end_entity == 'b':
annotations.append([html_entity_start, html_entity_end, u'bold', u'formatting'])
elif html_end_entity == 'i':
annotations.append([html_entity_start, html_entity_end, u'italic', u'formatting'])
elif html_end_entity == 'u':
annotations.append([html_entity_start, html_entity_end, u'underline', u'formatting'])
elif html_end_entity == 'sup':
annotations.append([html_entity_start, html_entity_end, u'superscript', u'formatting'])
elif html_end_entity == 'sc':
annotations.append([html_entity_start, html_entity_end, u'smallcaps', u'formatting'])
html_entity = ''
else:
html_entity_start = len(fullentry)
html_entity_start_stack.append(html_entity_start)
html_entity_stack.append(html_entity)
html_entity = ''
elif char == '\n':
pos = 0
if prevchar == '-':
fullentry = fullentry[:-1]
pos = len(fullentry)
for a in annotations:
if a[1] == pos + 1:
a[1] = pos
annotations.append([pos, pos, u'hyphen', u'pagelayout'])
else:
pos = len(fullentry)
if fullentry[-1] != " ":
fullentry = fullentry + " "
annotations.append([pos, pos, u'newline', u'pagelayout'])
elif char == '\t':
pos = len(fullentry)
if pos > 0 and not prevchar_special:
#print "inserted space for tab"
#print text.encode("utf-8")
#print fullentry.encode("utf-8")
fullentry = fullentry + " "
annotations.append([pos, pos, u'tab', u'pagelayout'])
elif char == '\f':
pos = len(fullentry)
annotations.append([pos, pos, u'pagebreak', u'pagelayout'])
elif in_html_entity:
html_entity = html_entity + char
else:
fullentry = fullentry + char
if not in_html_entity and char != '>' and char != '\f' and char != '\n' and char != '\t':
prevchar = char
if char == '\f' or char == '\n' or char == '\t':
prevchar_special = True
else:
prevchar_special = False
entry.fullentry = fullentry
#fullentry_search = re.sub(r'[\.\,\!\?\)\(;:¿║¡/\\\[\]]', ' ', entry.fullentry)
#entry.fullentry_search = re.sub(r' +', ' ', fullentry_search).lower()
#print entry.fullentry_search.encode('utf-8')
for a in annotations:
entry.append_annotation(a[0], a[1], a[2], a[3])
return entry
def insert_nondictdata_to_db(Session, data, book, filename):
nondictdata = model.Nondictdata()
nondictdata.startpage = data['startpage']
nondictdata.endpage = data['endpage']
nondictdata.title = data['title']
file = open(filename, 'r')
text = file.read()
file.close()
if re.search(u"<meta http-equiv=Content-Type content=\"text/html; charset=windows-1252\">", text):
html = text.decode('windows-1252')
elif re.search(u"<meta http-equiv=Content-Type content=\"text/html; charset=utf-8\">", text):
html = text.decode('utf-8')
elif re.search(u"<meta http-equiv=Content-Type content=\"text/html; charset=macintosh\">", text):
html = text.decode('mac_roman')
if book.bibtex_key == 'burtch1983':
html = re.sub(u"#001", u"ɨ", html)
html = re.sub(u"#002", u"Ɨ", html)
elif book.bibtex_key == 'thiesen1998':
html = re.sub(u"#003", u"-̀", html)
html = re.sub(u"#004", u"-́", html)
html = normalize_stroke(html)
html = unicodedata.normalize("NFD", html)
nondictdata.data = html
nondictdata.book = book
component = Session.query(model.Component).filter_by(name=data['component']).first()
if component == None:
print "Warning: Component {0} not found, inserting nondictdata without component.".format(data['component'])
nondictdata.component = component
Session.add(nondictdata)
Session.commit()
return nondictdata
def insert_nonwordlistdata_to_db(Session, data, book, filename):
nondictdata = model.Nonwordlistdata()
nondictdata.startpage = data['startpage']
nondictdata.endpage = data['endpage']
if 'volume' in data:
nondictdata.volume = data['volume']
nondictdata.title = data['title']
file = open(filename, 'r')
text = file.read()
file.close()
if re.search(u"<meta http-equiv=Content-Type content=\"text/html; charset=windows-1252\">", text):
html = text.decode('windows-1252')
elif re.search(u"<meta http-equiv=Content-Type content=\"text/html; charset=utf-8\">", text):
html = text.decode('utf-8')
elif re.search(u"<meta http-equiv=Content-Type content=\"text/html; charset=macintosh\">", text):
html = text.decode('mac_roman')
html = unicodedata.normalize("NFD", html)
html = normalize_stroke(html)
nondictdata.data = html
nondictdata.book = book
component = Session.query(model.Component).filter_by(name=data['component']).first()
if component == None:
print "Warning: Component {0} not found, inserting nondictdata without component.".format(data['component'])
nondictdata.component = component
Session.add(nondictdata)
Session.commit()
return nondictdata
def insert_dictdata_to_db(Session, data, book):
if type(data['src_language_name']) is not list:
src_languages = [data['src_language_name']]
src_languages_booknames = [data['src_language_bookname']]
else:
src_languages = data['src_language_name']
src_languages_booknames = data['src_language_bookname']
if type(data['tgt_language_name']) is not list:
tgt_languages = [data['tgt_language_name']]
tgt_languages_booknames = [data['tgt_language_bookname']]
else:
tgt_languages = data['tgt_language_name']
tgt_languages_booknames = data['tgt_language_bookname']
# Init Dictdata object
dictdata = model.Dictdata()
dictdata.startpage = data['startpage']
dictdata.endpage = data['endpage']
dictdata.src_languages = []
dictdata.tgt_languages = []
dictdata.src_languages_booknames = []
dictdata.tgt_languages_booknames = []
# Add languages
for i, src_language_name in enumerate(src_languages):
#print "Inserting src language " + src_language_name
srclanguage_iso = Session.query(model.LanguageIso).filter_by(name=src_language_name).first()
if srclanguage_iso == None:
print("Language %s not found, inserting book without source language." % src_language_name)
#dictdata.src_languages.append(srclanguage)
srclanguage_bookname = Session.query(model.LanguageBookname).filter_by(name=src_languages_booknames[i]).first()
if srclanguage_bookname == None:
srclanguage_bookname = insert_language_bookname_to_db(Session, src_languages_booknames[i])
#dictdata.src_languages_booknames.append(srclanguage_bookname)
srclanguage = model.LanguageSrc()
srclanguage.language_iso = srclanguage_iso
srclanguage.language_bookname = srclanguage_bookname
dictdata.src_languages.append(srclanguage)
for j, tgt_language_name in enumerate(tgt_languages):
#print "Inserting tgt language " + tgt_language_name
tgtlanguage_iso = Session.query(model.LanguageIso).filter_by(name=tgt_language_name).first()
if tgtlanguage_iso == None:
print("Language %s not found, inserting book without target language." % tgt_language_name)
#dictdata.tgt_languages.append(tgtlanguage)
tgtlanguage_bookname = Session.query(model.LanguageBookname).filter_by(name=tgt_languages_booknames[j]).first()
if tgtlanguage_bookname == None:
tgtlanguage_bookname = insert_language_bookname_to_db(Session, tgt_languages_booknames[j])
#dictdata.tgt_languages_booknames.append(tgtlanguage_bookname)
tgtlanguage = model.LanguageTgt()
tgtlanguage.language_iso = tgtlanguage_iso
tgtlanguage.language_bookname = tgtlanguage_bookname
dictdata.tgt_languages.append(tgtlanguage)
#dictdata.src_language_bookname = src_languages_booknames[i]
#dictdata.tgt_language_bookname = tgt_languages_booknames[j]
dictdata.book = book
component = Session.query(model.Component).filter_by(name=data['component']).first()
if component == None:
print("Component not found, inserting dictdata without component.")
dictdata.component = component
Session.add(dictdata)
Session.commit()
return dictdata
def insert_wordlistentry_to_db(Session, entry, annotation, volume, page, column, concept_id, wordlistdata, languages, languages_iso):
for lang in iter(entry):
#entry_db = model.WordlistEntry()
entry_db = process_line(entry[lang]["fullentry"], "wordlist")
language_bookname = languages[lang]
language_iso = languages_iso[language_bookname]
entry_db.wordlistdata = wordlistdata[language_bookname]
#entry_db.language_bookname = wordlistdata[language_bookname].language
#entry_db.fullentry = entry[lang]['fullentry']
entry_db.pos_on_page = entry[lang]['pos_on_page']
entry_db.startpage = page
entry_db.endpage = page
entry_db.startcolumn = column
entry_db.endcolumn = column
if volume:
entry_db.volume = volume
concept_db = model.meta.Session.query(model.WordlistConcept).filter_by(concept=concept_id).first()
if concept_db == None:
concept_db = model.WordlistConcept()
concept_db.concept = concept_id
entry_db.concept = concept_db
#print entry_db.id
#print entry_db.fullentry.encode("utf-8")
if lang in annotation:
inserted = []
for a in annotation[lang]:
a['string'] = a['string'].strip()
if a['string'] not in inserted:
entry_db.append_annotation(a['start'], a['end'], a['value'], a['type'], a['string'])
if a['value'] == 'counterpart':
entry_db.append_annotation(a['start'], a['end'], u'doculect', u'dictinterpretation', language_bookname)
entry_db.append_annotation(a['start'], a['end'], u'iso639-3', u'dictinterpretation', language_iso)
inserted.append(a['string'])
Session.add(entry_db)
Session.commit() | gpl-3.0 | 2,759,722,053,947,207,000 | 40.396985 | 168 | 0.601032 | false | 3.776072 | false | false | false |
jdaigneau/geoq | geoq/core/models.py | 1 | 18539 | # -*- coding: utf-8 -*-
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
import json
import cgi
from django.contrib.auth.models import User, Group
from django.contrib.gis.db import models
from django.contrib.gis.geos import MultiPolygon, Polygon
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.utils.datastructures import SortedDict
from managers import AOIManager
from jsonfield import JSONField
from collections import defaultdict
from django.db.models import Q
from geoq.training.models import Training
from geoq.core.utils import clean_dumps
TRUE_FALSE = [(0, 'False'), (1, 'True')]
STATUS_VALUES_LIST = ['Unassigned', 'Assigned', 'In work', 'Awaiting review', 'In review', 'Completed']
class AssigneeType:
USER, GROUP = range(1, 3)
class Setting(models.Model):
"""
Model for site-wide settings.
"""
name = models.CharField(max_length=200, help_text="Name of site-wide variable")
value = JSONField(null=True, blank=True,
help_text="Value of site-wide variable that scripts can reference - must be valid JSON")
def __unicode__(self):
return self.name
class Assignment(models.Model):
"""
A generic relation to either a user or group
"""
assignee_type = models.ForeignKey(ContentType, null=True)
assignee_id = models.PositiveIntegerField(null=True)
content_object = generic.GenericForeignKey('assignee_type', 'assignee_id')
class Meta:
abstract = True
class GeoQBase(models.Model):
"""
A generic model for GeoQ objects.
"""
active = models.BooleanField(default=True, help_text="Check to make project 'Active' and visible to all users. Uncheck this to 'Archive' the project")
created_at = models.DateTimeField(auto_now_add=True)
name = models.CharField(max_length=200)
description = models.TextField()
updated_at = models.DateTimeField(auto_now=True)
properties = JSONField(null=True, blank=True,
help_text='JSON key/value pairs associated with this object, e.g. {"usng":"18 S TJ 87308 14549", "favorite":"true"}')
def __unicode__(self):
return self.name
class Meta:
abstract = True
ordering = ('active', '-created_at',)
class Project(GeoQBase):
"""
Top-level organizational object.
"""
PROJECT_TYPES = [
("Hurricane/Cyclone", "Hurricane/Cyclone"),
("Tornado", "Tornado"),
("Earthquake", "Earthquake"),
("Extreme Weather", "Extreme Weather"),
("Fire", "Fire"),
("Flood", "Flood"),
("Tsunami", "Tsunami"),
("Volcano", "Volcano"),
("Pandemic", "Pandemic"),
("Exercise", "Exercise"),
("Special Event", "Special Event"),
("Training", "Training"),
]
project_type = models.CharField(max_length=50, choices=PROJECT_TYPES)
private = models.BooleanField(default=False, help_text="Check this to make this project 'Private' and available only to users assigned to it.")
project_admins = models.ManyToManyField(
User, blank=True, null=True,
related_name="project_admins", help_text='User that has admin rights to project.')
contributors = models.ManyToManyField(
User, blank=True, null=True,
related_name="contributors", help_text='User that will be able to take on jobs.')
class Meta:
permissions = (
('open_project', 'Open Project'), ('close_project', 'Close Project'),
('archive_project', 'Archive Project'),
)
ordering = ('-created_at',)
@property
def jobs(self):
return Job.objects.filter(project=self)
@property
def job_count(self):
return self.jobs.count()
@property
def user_count(self):
return User.objects.filter(analysts__project__id=self.id).distinct().count()
@property
def aois(self):
return AOI.objects.filter(job__project__id=self.id)
@property
def aoi_count(self):
return self.aois.count()
@property
def aois_envelope(self):
return MultiPolygon([n.aois_envelope() for n in self.jobs if n.aois.count()])
@property
def aois_envelope_by_job(self):
jobs = []
for job in self.jobs:
if job.aois.count():
job_envelope = job.aois_envelope()
envelope_string = job_envelope.json
if envelope_string:
job_poly = json.loads(envelope_string)
job_poly['properties'] = {"job_id": str(job.id), "link": str(job.get_absolute_url()),
"name": str(job.name)}
jobs.append(job_poly)
return clean_dumps(jobs, ensure_ascii=True)
def get_absolute_url(self):
return reverse('project-detail', args=[self.id])
def get_update_url(self):
return reverse('project-update', args=[self.id])
class Job(GeoQBase, Assignment):
"""
Mid-level organizational object.
"""
GRID_SERVICE_VALUES = ['usng', 'mgrs']
GRID_SERVICE_CHOICES = [(choice, choice) for choice in GRID_SERVICE_VALUES]
EDITORS = ['geoq','osm']
EDITOR_CHOICES = [(choice, choice) for choice in EDITORS]
analysts = models.ManyToManyField(User, blank=True, null=True, related_name="analysts")
teams = models.ManyToManyField(Group, blank=True, null=True, related_name="teams")
reviewers = models.ManyToManyField(User, blank=True, null=True, related_name="reviewers")
progress = models.SmallIntegerField(max_length=2, blank=True, null=True)
project = models.ForeignKey(Project, related_name="project")
grid = models.CharField(max_length=5, choices=GRID_SERVICE_CHOICES, default=GRID_SERVICE_VALUES[0],
help_text='Select usng for Jobs inside the US, otherwise use mgrs')
tags = models.CharField(max_length=50, blank=True, null=True, help_text='Useful tags to search social media with')
editor = models.CharField(max_length=20, help_text='Editor to be used for creating features', choices=EDITOR_CHOICES, default=EDITOR_CHOICES[0])
editable_layer = models.ForeignKey( 'maps.EditableMapLayer', blank=True, null=True)
map = models.ForeignKey('maps.Map', blank=True, null=True)
feature_types = models.ManyToManyField('maps.FeatureType', blank=True, null=True)
required_courses = models.ManyToManyField(Training, blank=True, null=True, help_text="Courses that must be passed to open these cells")
class Meta:
permissions = (
)
ordering = ('-created_at',)
def get_absolute_url(self):
return reverse('job-detail', args=[self.id])
def get_update_url(self):
return reverse('job-update', args=[self.id])
def get_export_url(self):
return reverse('job-export', args=[self.id])
def aois_geometry(self):
return self.aois.all().collect()
def aois_envelope(self):
"""
Returns the envelope of related AOIs geometry.
"""
return getattr(self.aois.all().collect(), 'envelope', None)
def aoi_count(self):
return self.aois.count()
def aois(self):
return self.aois.all()
@property
def aoi_counts_html(self):
count = defaultdict(int)
for cell in AOI.objects.filter(job__id=self.id):
count[cell.status] += 1
return str(', '.join("%s: <b>%r</b>" % (key, val) for (key, val) in count.iteritems()))
@property
def user_count(self):
return self.analysts.count()
@property
def base_layer(self):
if self.map is not None and self.map.layers is not None:
layers = sorted([l for l in self.map.layers if l.is_base_layer], key = lambda x: x.stack_order)
if len(layers) > 0:
layer = layers[0].layer
return [layer.name, layer.url, layer.attribution]
else:
return []
else:
return []
def features_table_html(self):
counts = {}
for feature_item in self.feature_set.all():
status = str(feature_item.status)
featuretype = str(feature_item.template.name)
if not featuretype in counts:
counts[featuretype] = {}
if not status in counts[featuretype]:
counts[featuretype][status] = 0
counts[featuretype][status] += 1
#TODO: Also return this as JSON
if len(counts):
output = "<table class='job_feature_list'>"
header = "<th><i>Feature Counts</i></th>"
for (featuretype, status_obj) in counts.iteritems():
header = header + "<th><b>" + cgi.escape(featuretype) + "</b></th>"
output += "<tr>" + header + "</tr>"
for status in STATUS_VALUES_LIST:
status = str(status)
row = "<td><b>" + status + "</b></td>"
for (featuretype, status_obj) in counts.iteritems():
if status in status_obj:
val = status_obj[status]
else:
val = 0
row += "<td>" + cgi.escape(str(val)) + "</td>"
output += "<tr>" + row + "</tr>"
output += "</table>"
else:
output = ""
return output
def complete(self):
"""
Returns the completed AOIs.
"""
return self.aois.filter(status='Completed')
def in_work(self):
"""
Returns the AOIs currently being worked on or in review
"""
return self.aois.filter(Q(status='In work') | Q(status='Awaiting review') | Q(status='In review'))
def in_work_count(self):
return self.in_work().count()
def complete_count(self):
return self.complete().count()
def complete_percent(self):
if self.aois.count() > 0:
return round(float(self.complete().count() * 100) / self.aois.count(), 2)
return 0.0
def total_count(self):
return self.aois.count()
def geoJSON(self, as_json=True):
"""
Returns geoJSON of the feature.
"""
geojson = SortedDict()
geojson["type"] = "FeatureCollection"
geojson["features"] = [json.loads(aoi.geoJSON()) for aoi in self.aois.all()]
return clean_dumps(geojson) if as_json else geojson
def features_geoJSON(self, as_json=True, using_style_template=True):
geojson = SortedDict()
geojson["type"] = "FeatureCollection"
geojson["properties"] = dict(id=self.id)
geojson["features"] = [n.geoJSON(as_json=False, using_style_template=using_style_template) for n in self.feature_set.all()]
return clean_dumps(geojson, indent=2) if as_json else geojson
def grid_geoJSON(self, as_json=True):
"""
Return geoJSON of grid for export
"""
geojson = SortedDict()
geojson["type"] = "FeatureCollection"
geojson["features"] = [json.loads(aoi.grid_geoJSON()) for aoi in self.aois.all()]
return clean_dumps(geojson) if as_json else geojson
def base_layer_object(self):
"""
create base layer object that can override leaflet base OSM map
"""
obj = {}
if len(self.base_layer) > 0:
obj["layers"] = [self.base_layer]
return obj
class AOI(GeoQBase, Assignment):
"""
Low-level organizational object. Now (6/1/14) referred to as a 'Workcell'
"""
STATUS_VALUES = STATUS_VALUES_LIST
STATUS_CHOICES = [(choice, choice) for choice in STATUS_VALUES]
PRIORITIES = [(n, n) for n in range(1, 6)]
analyst = models.ForeignKey(User, blank=True, null=True, help_text="User assigned to work the workcell.")
job = models.ForeignKey(Job, related_name="aois")
reviewers = models.ManyToManyField(User, blank=True, null=True, related_name="aoi_reviewers",
help_text='Users that actually reviewed this work.')
objects = AOIManager()
polygon = models.MultiPolygonField()
priority = models.SmallIntegerField(choices=PRIORITIES, max_length=1, default=5)
status = models.CharField(max_length=15, choices=STATUS_CHOICES, default='Unassigned')
#Signifies When the work cell is created
cellCreated_at = models.DateTimeField(blank=True,null=True)
#signifies when the work cell enters the assigned state
cellAssigned_at = models.DateTimeField(blank=True,null=True)
#signifies when the work cell enters the "In Work " state
cellStarted_at = models.DateTimeField(blank=True,null=True)
#Cell enters the waiting for review state
cellWaitingReview_at = models.DateTimeField(blank=True,null=True)
#cell enters QA
cellInReview_at = models.DateTimeField(blank=True,null=True)
#cell enters completed state
cellFinished_at = models.DateTimeField(blank=True,null=True)
class Meta:
verbose_name = 'Area of Interest'
verbose_name_plural = 'Areas of Interest'
permissions = (
('assign_workcells', 'Assign Workcells'), ('certify_workcells', 'Certify Workcells'),
)
def __unicode__(self):
aoi_obj = '%s - AOI %s' % (self.name, self.id)
return aoi_obj
@property
def log(self):
return Comment.objects.filter(aoi=self).order_by('created_at')
@property
def assignee_name(self):
if self.assignee_id is None:
return 'Unknown'
else:
if self.assignee_type_id == AssigneeType.USER:
return User.objects.get(id=self.assignee_id).username
else:
return Group.objects.get(id=self.assignee_id).name
#def save(self):
# if analyst or reviewer updated, then create policy to give them permission to edit this object.....
# -- Afterwards -- check how this will work with the views.
def get_absolute_url(self):
if self.job.editable_layer_id is None:
return reverse('aoi-work', args=[self.id])
else:
return reverse('aoi-mapedit', args=[self.id])
def geoJSON(self):
"""
Returns geoJSON of the feature.
"""
if self.id is None:
self.id = 1
geojson = SortedDict()
geojson["type"] = "Feature"
geojson["properties"] = dict(
id=self.id,
status=self.status,
analyst=(self.analyst.username if self.analyst is not None else 'None'),
assignee=self.assignee_name,
priority=self.priority,
delete_url=reverse('aoi-deleter', args=[self.id]),
time=dict(
assigned=str(self.cellAssigned_at),
in_work=str(self.cellStarted_at),
waiting_review=str(self.cellWaitingReview_at),
in_review=str(self.cellInReview_at),
finished=str(self.cellFinished_at)
))
geojson["geometry"] = json.loads(self.polygon.json)
geojson["properties"]["absolute_url"] = self.get_absolute_url()
return clean_dumps(geojson)
def logJSON(self):
return [ob.to_dict() for ob in self.log]
def properties_json(self):
"""
Returns json of the feature properties.
"""
if self.id is None:
self.id = 1
properties_main = self.properties or {}
properties_built = dict(
status=self.status,
analyst=(self.analyst.username if self.analyst is not None else 'Unassigned'),
priority=self.priority)
prop_json = dict(properties_built.items() + properties_main.items())
return clean_dumps(prop_json)
def map_detail(self):
"""
Get map coordinates for MapEdit
"""
center = self.polygon.centroid
return "15/%f/%f" % (center.y, center.x)
def grid_geoJSON(self):
"""
Return geoJSON of workcells for export
"""
if self.id is None:
self.id = 1
geojson = SortedDict()
geojson["type"] = "Feature"
geojson["properties"] = dict(
id=self.id,
priority=self.priority,
status=self.status)
geojson["geometry"] = json.loads(self.polygon.json)
return clean_dumps(geojson)
def user_can_complete(self, user):
"""
Returns whether the user can update the AOI as complete.
"""
return user == self.analyst or user in self.job.reviewers.all()
class Comment(models.Model):
"""
Track comments regarding work on a Workcell
"""
user = models.ForeignKey(User, blank=True, null=True, help_text="User who made comment")
aoi = models.ForeignKey(AOI, blank=False, null=False, help_text="Associated AOI for comment")
text = models.CharField(max_length=200)
created_at = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
comment_obj = '%s Comment on %s' % (self.user, self.aoi.id)
return comment_obj
def to_dict(self):
format = "%D %H:%M:%S"
if self.user:
username = self.user.username
else:
username = "Anonymous or Removed User"
o = {'user': username, 'timestamp': self.created_at.strftime(format), 'text': self.text}
return o
class Organization(models.Model):
"""
Organizations and Agencies that we work with.
"""
name = models.CharField(max_length=200, unique=True, help_text="Short name of this organization")
url = models.CharField(max_length=600, blank=True, null=True, help_text="Link that users should be directed to if icon is clicked")
icon = models.ImageField(upload_to="static/organizations/", blank=True, null=True, help_text="Upload an icon of the organization here")
show_on_front = models.BooleanField(default=False, help_text="Show on the front of the GeoQ App")
order = models.IntegerField(default=0, null=True, blank=True, help_text='Optionally specify the order orgs should appear on the front page. Lower numbers appear sooner.')
def __unicode__(self):
return self.name
class Meta:
verbose_name_plural = 'Organizations'
ordering = ['order', 'name']
| mit | -4,003,007,089,227,908,000 | 33.979245 | 174 | 0.612007 | false | 3.83592 | false | false | false |
saarahy/NGP-LS | neat_gp.py | 1 | 2338 | class neat:
#propiedades de la especiacion
#cambio para neat
def get_id(self):
return self.id
def set_id_(self, id):
self.id=id
def get_parent(self):
return self.parent_
def set_parent(self, arg):
self.parent_ = arg
def specie(self,sp):
self.tspecie=sp
def get_specie(self):
return self.tspecie
def fitness_sharing(self, avg):
self.fitness_h=avg
def get_fsharing(self):
return self.fitness_h
def descendents(self, des):
self.descendent=des
def get_descendents(self):
return self.descendent
def penalty(self, p):
self.penalizado=p
def num_specie(self,ns):
self.nspecie=ns
def get_numspecie(self):
return self.nspecie
def LS_probability(self, ps):
self.LS_prob=ps
def get_LS_prob(self):
return self.LS_prob
def params_set(self, params):
self.params=params
def get_params(self):
return self.params
def bestspecie_set(self, value):
self.best_ind=value
def bestspecie_get(self):
return self.best_ind
def LS_applied_set(self, value):
self.ls_ind=value
def LS_applied_get(self):
return self.ls_ind
def LS_fitness_set(self,value):
self.ls_fitness=value
def LS_fitness_get(self):
return self.ls_fitness
def LS_story_set(self, value):
self.ls_story=value
def LS_story_get(self):
return self.ls_story
def off_cx_set(self, value):
self.off_cx=value
def off_cx_get(self):
return self.off_cx
def off_mut_set(self, value):
self.off_mut=value
def off_mut_get(self):
return self.off_mut
def binary_rep_get(self):
return self.repr_bin
def binary_rep_set(self, value):
self.repr_bin=value
def binary_level_get(self):
return self.level_bin
def binary_level_set(self, value):
self.level_bin=value
def nodefeat_get(self):
return self.node_feat
def nodefeat_set(self, value):
self.node_feat=value
def get_intracluster(self):
return self.intracluster
def set_intracluster(self, value):
self.intracluster = value
class pop_param:
def save_ind(self):
return True | gpl-3.0 | -4,350,969,255,644,673,500 | 18.822034 | 38 | 0.600513 | false | 3.292958 | false | false | false |
GenosResearchGroup/ContourMetrics | lib/utils.py | 1 | 4520 | import math
import numpy
import pandas
import scipy
import sklearn.decomposition
from django.core import serializers
from matplotlib import pyplot as plt
class ExtendedDataFrame(pandas.DataFrame):
def heat_plot(self, filename=None):
plt.clf()
column_labels = self.index.tolist()
row_labels = column_labels
_, ax = plt.subplots()
_ = plt.gcf()
# heat map
_ = ax.pcolor(self, cmap=plt.cm.YlOrRd)
# put the major ticks at the middle of each cell
ax.set_xticks(numpy.arange(self.shape[0]) + 0.5, minor=False)
ax.set_yticks(numpy.arange(self.shape[1]) + 0.5, minor=False)
# want a more natural, table-like display
ax.invert_yaxis()
ax.xaxis.tick_top()
ax.set_xticklabels(row_labels, minor=False)
ax.set_yticklabels(column_labels, minor=False)
plt.xticks(rotation=90)
if filename:
plt.savefig(filename)
else:
return plt.show()
def scatter(self, with_labels=False):
if with_labels:
fig, ax = plt.subplots()
self.plot(kind='scatter', x=self.columns[0], y=self.columns[1], ax=ax)
for k, v in self.iterrows():
ax.annotate(k, v)
else:
self.plot(kind='scatter', x=self.columns[0], y=self.columns[1])
def get_diagonal(self, n=1):
mat = numpy.array(self)
return pandas.Series(numpy.diagonal(mat, n))
def variation_coefficient(self):
seq = []
for _, row in self.T.iteritems():
r = row[~row.isnull()]
seq.append(r.std() / r.mean())
return pandas.Series(seq, index=self.index)
# return self.T.std() / self.T.mean()
def ks_test(self, columns=None):
if not columns:
columns = self.columns[:2]
return scipy.stats.ks_2samp(self[columns[0]], self[columns[1]])
def print_cseg(c):
return '< {} >'.format(' '.join(map(str, c)))
def reduce_dimensions(matrix):
pca = sklearn.decomposition.PCA(n_components=2)
pca.fit(matrix)
return pca.transform(matrix)
def numpy_to_float(number):
if abs(number) == numpy.inf:
return None
else:
return float(number)
def get_histogram_data(series, bins=6):
_hist_amount, _hist_range = numpy.histogram(series.fillna(0), bins=bins)
return list(zip(list(map(float, _hist_range)), list(map(int, _hist_amount))))
def serialize_model(query_set, t='json'):
data = serializers.serialize(t, query_set.objects.all())
print(len(data))
with open(query_set.__name__ + '-serialized.' + t, 'w') as out:
out.write(data)
def deserialize(filename, save=False):
t = filename.split('.')[-1]
with open(filename, 'r') as out:
data = out.readline()
x = []
for obj in serializers.deserialize(t, data):
if not type(obj.object).objects.filter(id=obj.object.id) and save:
obj.save()
else:
x.append(obj)
if not save:
return x
def sample_size(population_size, confidence_level=95, confidence_interval=5):
# SUPPORTED CONFIDENCE LEVELS: 50%, 68%, 90%, 95%, and 99%
confidence_level_constant = [50, .67], [68, .99], [90, 1.64], [95, 1.96], [99, 2.57]
Z = 0.0
p = 0.5
e = confidence_interval / 100.0
N = population_size
n_0 = 0.0
n = 0.0
# LOOP THROUGH SUPPORTED CONFIDENCE LEVELS AND FIND THE NUM STD
# DEVIATIONS FOR THAT CONFIDENCE LEVEL
for i in confidence_level_constant:
if i[0] == confidence_level:
Z = i[1]
if Z == 0.0:
return -1
# CALC SAMPLE SIZE
n_0 = ((Z ** 2) * p * (1 - p)) / (e ** 2)
# ADJUST SAMPLE SIZE FOR FINITE POPULATION
n = n_0 / (1 + ((n_0 - 1) / float(N)))
return int(math.ceil(n)) # THE SAMPLE SIZE
def get_first(sequence):
if not isinstance(sequence, list):
raise AttributeError('The argument must be of list type')
return sequence[0]
def get_last(sequence):
if not isinstance(sequence, list):
raise AttributeError('The argument must be of list type')
return sequence[-1]
def get_distinct(queryset, k):
return [el[k] for el in queryset.values(k).distinct()]
def apply_fn_to_matrix(matrix, fn):
size = len(matrix.T)
return numpy.matrix([fn(matrix[:, i]) for i in range(size)])
def round_tolist(matrix, n=2):
return matrix.round(n).tolist()[0]
def flat_list(seq):
return [item for sublist in seq for item in sublist]
| mit | -5,579,092,799,780,234,000 | 26.560976 | 88 | 0.600664 | false | 3.330877 | false | false | false |
seanjh/FDICTransactions | __main__.py | 1 | 2673 | import os
import sys
from sqlalchemy import create_engine
from storage.sqlsession import session_scope, Base
from scrape.scrape_filers import FDICFilerScraper
from scrape.scrape_listing import FDICOwnFilingScraper
from scrape.scrape_trades import FDICInsiderFileScraper
from storage.transactions import FDICTradeHandler
def get_mssql_engine():
try:
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "settings.cfg")
with open(config_file, 'r') as infile:
settings = dict([line.strip().split("=") for line in infile.readlines()])
except FileNotFoundError as e:
settings = None
filename = create_blank_settings_file()
print("Required settings.cfg file is missing. A blank settings.cfg has been created at %s." % filename)
print("Input server name and table name and retry.")
exit(0)
connection_string = "mssql+pyodbc://%s/%s" % (settings.get("database"), settings.get("table"))
return create_engine(connection_string, echo=True)
def create_blank_settings_file():
if hasattr(sys, "frozen"):
path = os.path.dirname(sys.executable)
else:
path = os.path.dirname(__file__)
filename = os.path.join(path, "settings.cfg")
with open(os.path.join(path, "settings.cfg"), "w") as outfile:
outfile.write("database=")
outfile.write("table=")
return filename
def main():
engine = get_mssql_engine()
Base.metadata.create_all(engine)
with session_scope(engine) as session:
# Scrape the list of filers
f0 = FDICFilerScraper()
filers = f0.update(session)
# Scrape the file listing for each filer
f1 = FDICOwnFilingScraper()
#f1.update(session, "35095")
for filer in filers:
f1.update(session, filer.get("Cert Number"))
# Commit before fetching files to ensure the disclosure IDs are in the DB.
# The underlying table/trade data have FK references that depend on these disclosure IDs
session.commit()
# From the full file listing, identify those that do not exist on the DB
existing_discl_ids = FDICTradeHandler.get_existing_discl_ids(session)
new_urls = f1.get_new_urls(session, [item for (item,) in existing_discl_ids])
for i, url in enumerate(new_urls):
print("%d new files identified. Beginning scrape.")
sys.stdout.write("\rRequesting file #%d/%d @ %s" % (i + 1, len(new_urls), url))
sys.stdout.flush()
# Scrape the table
f2 = FDICInsiderFileScraper(url)
f2.update(session)
if __name__ == '__main__':
main() | mit | -7,940,477,416,846,872,000 | 36.661972 | 112 | 0.651328 | false | 3.631793 | false | false | false |
uholzer/pkb4unix | PKB4Unix/construction.py | 1 | 11320 | import re
import itertools
from collections import defaultdict, namedtuple
import subprocess
from rdflib import util, Graph, Literal, BNode, URIRef
COUNT_ONEORNONE = -1
COUNT_ONEORMORE = -2
COUNT_ANY = -3
class TemplateError(Exception):
pass
class NotPName(Exception):
pass
class PrefixNotFound(Exception):
pass
TemplateVariable = namedtuple('TemplateVariable', [
'nodetype',
'count',
'name',
'classhint',
'datatypehint',
'langhint',
'prompt'
])
class Section:
def __init__(self):
self.name = ""
self.quads = ""
self.mainvariable = None
self.variables = list()
self.ns = dict()
self.expand_re = re.compile(r"^([^\s]*):([^\s]*)$")
def expand(self, shortname):
match = self.expand_re.match(shortname)
if match:
prefix = match.group(1)
localname = match.group(2)
try:
return self.ns[prefix] + localname
except KeyError:
raise PrefixNotFound()
else:
raise NotPName()
def construct(self, g, sections, mainvar_value=None):
raise NotImplementedError()
class TerminalSection(Section):
def __init__(self, out):
super().__init__()
self.out = out
def construct(self, g, sections, mainvar_value=None):
print("", file=self.out)
print("=== {}".format(self.name), file=self.out)
varvalues = defaultdict(list)
if self.quads:
print("I will insert the following triples:\n"+self.quads, file=self.out)
else:
print("No triples will be inserted for this section", file=self.out)
if not mainvar_value:
mainvar_value = self.input("CONSTRUCTING {}> ".format(self.mainvariable.prompt))
mainvar_value = URIRef(mainvar_value)
varvalues[self.mainvariable.name].append(mainvar_value)
else:
varvalues[self.mainvariable.name].append(mainvar_value)
for var in self.variables:
askfunc = getattr(self, "ask_" + var.nodetype)
if var.count > 0:
r = range(0, var.count)
elif var.count == COUNT_ONEORNONE:
r = range(0, 1)
elif var.count == COUNT_ANY or var.count == COUNT_ONEORMORE:
r = itertools.count(0)
else:
raise Error("Invalide count")
for i in r:
val = askfunc(g, sections, var, self.prompt(var.nodetype, i, var.count, var.prompt))
if val is None or not str(val): # val itself could be false: "false"^^xsd:bool
break
varvalues[var.name].append(val)
if self.quads:
where_part = ""
for (var, values) in varvalues.items():
values_list = " ".join("({})".format(v.n3()) for v in values)
where_part += "VALUES ({}) {{ {} }}\n".format(str(var), values_list)
q = "INSERT {{\n{}}}\nWHERE {{\n{}}}".format(self.quads, where_part)
print("Adding tribles with SPARQL:\n"+q, file=self.out)
g.update(q, initNs=self.ns)
print("=== {}".format("done"), file=self.out)
print("", file=self.out)
return mainvar_value
def ask_NODE(self, g, sections, var, prompt):
answer = self.input(prompt)
if answer.startswith("c") and var.classhint and var.classhint in sections:
s = sections[answer[1:].strip()]
node = s.construct(g, sections, None)
print("back to {}".format(self.name), file=self.out)
return node
elif answer:
return util.from_n3(answer)
else:
return None
def ask_RESOURCE(self, g, sections, var, prompt):
if var.classhint and var.classhint in sections:
answer = self.input(prompt)
if answer == "c":
s = sections[var.classhint]
node = s.construct(g, sections, None)
print("back to {}".format(self.name), file=self.out)
return node
else:
return URIRef(answer)
else:
answer = self.input(prompt)
return URIRef(answer)
def ask_LITERAL(self, g, sections, var, prompt):
answer = self.input(prompt)
if answer.startswith('"') or answer.startswith("'"):
return util.from_n3(answer)
else:
return Literal(answer, lang=var.langhint, datatype=var.datatypehint)
def ask_BNODE(self, g, sections, var, prompt):
# In order to implement casshints and construction,
# one would create a blank node and then call
# section.construct(g, sections, theNewBNode)
print("{} ({}):".format(varname, descr), file=self.out)
answer = self.input(prompt)
return BNode()
def prompt(self, nodetype, number, count, text):
if count == COUNT_ANY:
count = "*"
if count == COUNT_ONEORNONE:
count = "?"
elif count == COUNT_ONEORMORE:
count = "+"
return "{}{}/{} {}> ".format(nodetype, number+1, count, text)
def input(self, prompt):
value = input(prompt)
if value:
if value[0] == '@':
value = '| ./know-rdf-edit --null'
if value[0] == '!':
subprocess.call(value[1:], shell=True, stdout=self.out)
return self.input(prompt) # start over
elif value[0] == '|':
try:
value = subprocess.check_output(value[1:], shell=True, universal_newlines=True)
# universal_newlines=True causes value to be a string
except subprocess.CalledProcessError:
print("Your shell command failed, try again!", file=self.out)
return self.input(prompt)
if value and value[-1] == '\n':
# Remove last newline in order to simplify
# providing single-line literals and URIs.
value = value[0:-1]
return value
class Parser:
def __init__(self, sectionFactory=Section):
self.sectionFactory = sectionFactory
self.classhint_re = re.compile(r"^\[([^\]]*)\]")
self.globalns = dict()
self.sections = dict()
def parse(self, lineiter):
current_section = None
first_section = None
# Preamble
while True:
try:
line = next(lineiter)
except StopIteration:
raise TemplateError("Tempalte contains no section")
line = line.strip()
if line=="" or line[0] == '#':
pass
elif line[0] == '[':
current_section = self.startSection(line)
first_section = current_section
break
elif line.split(None, 1)[0] == 'NS':
self.really_parse_NS(self.globalns, line)
else:
raise TemplateError("Only NS declarations allowed in the preamble")
# Sections
while True:
try:
line = next(lineiter)
except StopIteration:
break
line = line.strip()
if line=="" or line[0] == '#':
pass
elif line[0] == '[':
current_section = self.startSection(line)
else:
instruction = line.split(None, 1)[0]
try:
pfunc = getattr(self, "parse_" + instruction)
except AttributeError:
raise TemplateError("Unknown instruction '{}'".format(instruction))
pfunc(current_section, line, lineiter)
return (self.sections, first_section)
def really_parse_NS(self, ns, argline):
args = argline.split(None, 2)
if not args[1][-1] == ':':
raise TemplateError("Prefix must end in :")
ns[args[1][0:-1]] = args[2]
def parse_NS(self, section, argline, lineiter):
self.really_parse_NS(section.ns, argline)
def parse_NODE(self, section, argline, lineiter):
args = argline.split(None, 3)
classhint = None
datatypehint = None
langhint = None
if args[0] == "LITERAL":
(datatypehint, langhint) = self.lithint(section, args[3])
else:
classhint = self.classhint(section, args[3])
var = TemplateVariable(
nodetype=args[0],
count=self.count(args[1]),
name=self.variable(args[2]),
classhint=classhint,
datatypehint=datatypehint,
langhint=langhint,
prompt=args[3]
)
section.variables.append(var)
parse_RESOURCE = parse_NODE
parse_LITERAL = parse_NODE
parse_BNODE = parse_NODE
def parse_INSERT(self, section, argline, lineiter):
if not argline.split(None, 1) == ["INSERT", "{"]:
raise
quads = ""
try:
line = next(lineiter)
while line.rstrip() != '}': # } must be at beginning of line
quads += line
line = next(lineiter)
except StopIteration:
raise TemplateError("INSERT block not closed at EOF")
section.quads = quads
def startSection(self, line):
section = self.sectionFactory()
section.ns = dict(self.globalns) # Include globally defined namespaces
args = line.split(None, 2)
c = self.classhint(section, args[0])
if not c:
raise TemplateError("Invalid Syntax in section start")
section.name = c
self.sections[c] = section
section.mainvariable = TemplateVariable(
nodetype="RESOURCE",
count=1,
name=self.variable(args[1]),
classhint=c,
datatypehint=None,
langhint=None,
prompt=args[2]
)
return section
def count(self, s):
if s == '*':
return COUNT_ANY
elif s == '+':
return COUNT_ONEORMORE
elif s == '?':
return COUNT_ONEORNONE
else:
try:
return int(s)
except:
raise TemplateError("Count expected")
def variable(self, s):
if s[0] == '?' or s[0] == '$':
return s
else:
raise TemplateError("Variable expected")
def classhint(self, section, s):
match = self.classhint_re.search(s)
if match:
hint = match.group(1)
try:
return section.expand(hint)
except NotPName:
return hint
else:
return None
def lithint(self, section, s):
match = self.classhint_re.search(s)
if match:
hint = match.group(1)
if hint.startswith("^^"):
return (URIRef(section.expand(hint[2:])), None)
elif hint.startswith("@"):
return (None, hint[1:])
else:
raise TemplateError("Malformed literal type hint")
else:
return (None, None)
| gpl-3.0 | 12,468,833,544,212,732 | 32.491124 | 100 | 0.526678 | false | 4.174041 | false | false | false |
DarkFenX/Pyfa | gui/utils/exportHtml.py | 2 | 11498 | import threading
import time
# noinspection PyPackageRequirements
import wx
from service.const import PortEftOptions
from service.settings import HTMLExportSettings
from service.fit import Fit
from service.port import Port
from service.market import Market
from logbook import Logger
from eos.db import getFit
pyfalog = Logger(__name__)
class exportHtml:
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = exportHtml()
return cls._instance
def __init__(self):
self.thread = exportHtmlThread()
def refreshFittingHtml(self, force=False, callback=False):
settings = HTMLExportSettings.getInstance()
if force or settings.getEnabled():
self.thread.stop()
self.thread = exportHtmlThread(callback)
self.thread.start()
class exportHtmlThread(threading.Thread):
def __init__(self, callback=False):
threading.Thread.__init__(self)
self.name = "HTMLExport"
self.callback = callback
self.stopRunning = False
def stop(self):
self.stopRunning = True
def run(self):
# wait 1 second just in case a lot of modifications get made
time.sleep(1)
if self.stopRunning:
return
sMkt = Market.getInstance()
sFit = Fit.getInstance()
settings = HTMLExportSettings.getInstance()
minimal = settings.getMinimalEnabled()
dnaUrl = "https://o.smium.org/loadout/dna/"
if minimal:
HTML = self.generateMinimalHTML(sMkt, sFit, dnaUrl)
else:
HTML = self.generateFullHTML(sMkt, sFit, dnaUrl)
try:
FILE = open(settings.getPath(), "w", encoding='utf-8')
FILE.write(HTML)
FILE.close()
except IOError as ex:
pyfalog.warning("Failed to write to " + settings.getPath())
pass
except (KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
pass
if self.callback:
wx.CallAfter(self.callback, -1)
def generateFullHTML(self, sMkt, sFit, dnaUrl):
""" Generate the complete HTML with styling and javascript """
timestamp = time.localtime(time.time())
localDate = "%d/%02d/%02d %02d:%02d" % (timestamp[0], timestamp[1], timestamp[2], timestamp[3], timestamp[4])
HTML = """
<!DOCTYPE html>
<html>
<head>
<title>Pyfa Fittings</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta charset="utf-8" />
<link rel="stylesheet" href="https://code.jquery.com/mobile/1.4.2/jquery.mobile-1.4.2.min.css" />
<script src="https://code.jquery.com/jquery-1.11.0.min.js"></script>
<script>
//http://stackoverflow.com/questions/32453806/uncaught-securityerror-failed-to-execute-replacestate-on-history-cannot-be
$(document).bind('mobileinit',function(){
$.mobile.changePage.defaults.changeHash = false;
$.mobile.hashListeningEnabled = false;
$.mobile.pushStateEnabled = false;
});
</script>
<script src="https://code.jquery.com/mobile/1.4.2/jquery.mobile-1.4.2.min.js"></script>
<style>
/* Basic settings */
.ui-li-static.ui-collapsible {
padding: 0;
}
.ui-li-static.ui-collapsible > .ui-collapsible-content > .ui-listview,
.ui-li-static.ui-collapsible > .ui-collapsible-heading {
margin: 0;
}
.ui-li-static.ui-collapsible > .ui-collapsible-content {
padding-top: 0;
padding-bottom: 0;
padding-right: 0;
border-bottom-width: 0;
}
/* collapse vertical borders */
.ui-li-static.ui-collapsible > .ui-collapsible-content > .ui-listview > li.ui-last-child,
.ui-li-static.ui-collapsible.ui-collapsible-collapsed > .ui-collapsible-heading > a.ui-btn {
border-bottom-width: 0;
}
.ui-li-static.ui-collapsible > .ui-collapsible-content > .ui-listview > li.ui-first-child,
.ui-li-static.ui-collapsible > .ui-collapsible-content > .ui-listview > li.ui-first-child > a.ui-btn,
.ui-li-static.ui-collapsible > .ui-collapsible-heading > a.ui-btn {
border-top-width: 0;
}
/* Remove right borders */
.ui-li-static.ui-collapsible > .ui-collapsible-heading > a.ui-btn,
.ui-li-static.ui-collapsible > .ui-collapsible-content > .ui-listview > .ui-li-static,
.ui-li-static.ui-collapsible > .ui-collapsible-content > .ui-listview > li > a.ui-btn,
.ui-li-static.ui-collapsible > .ui-collapsible-content {
border-right-width: 0;
}
/* Remove left borders */
/* Here, we need class ui-listview-outer to identify the outermost listview */
.ui-listview-outer > .ui-li-static.ui-collapsible .ui-li-static.ui-collapsible.ui-collapsible,
.ui-listview-outer > .ui-li-static.ui-collapsible > .ui-collapsible-heading > a.ui-btn,
.ui-li-static.ui-collapsible > .ui-collapsible-content {
border-left-width: 0;
}
.ui-content { max-width: 800px !important; margin: 0 auto !important; }
.ui-listview > .ui-li-static.ui-li-has-count { padding-right: 0px }
</style>
<script>
$(document).ready(function() {
var start = new Date(%d * 1000);
setInterval(function() {
var diff = (new Date - start) / 1000;
var days = Math.floor((diff %% 31536000) / 86400);
var hours = Math.floor(((diff %% 31536000) %% 86400) / 3600);
var minutes = Math.floor((((diff %% 31536000) %% 86400) %% 3600) / 60);
var seconds = Math.floor(((diff %% 31536000) %% 86400) %% 3600) %% 60;
$('.timer').text(days+":"+hours+":"+minutes+":"+seconds+" ago");
}, 1000);
$('a[data-dna]').each(function( index ) {
var dna = $(this).data('dna');
if (typeof CCPEVE !== 'undefined') { // inside IGB
$(this).attr('href', 'javascript:CCPEVE.showFitting("'+dna+'");');}
else { // outside IGB
$(this).attr('href', '%s'+dna); }
});
});
</script>
</head>
<body>
<div id="canvas" data-role="page">
<div data-role="header">
<h1>Pyfa fits</h1>
</div>
<div data-role="content">
<div style="text-align: center;"><strong>Last updated:</strong> %s <small>(<span class="timer"></span>)</small></div>
""" % (time.time(), dnaUrl, localDate)
HTML += ' <ul data-role="listview" class="ui-listview-outer" data-inset="true" data-filter="true">\n'
categoryList = list(sMkt.getShipRoot())
categoryList.sort(key=lambda _ship: _ship.name)
count = 0
for group in categoryList:
# init market group string to give ships something to attach to
HTMLgroup = ''
ships = list(sMkt.getShipList(group.ID))
ships.sort(key=lambda _ship: _ship.name)
# Keep track of how many ships per group
groupFits = 0
for ship in ships:
fits = sFit.getFitsWithShip(ship.ID)
if len(fits) > 0:
groupFits += len(fits)
HTMLship = (
' <li data-role="collapsible" data-iconpos="right" data-shadow="false" '
'data-corners="false">\n'
' <h2>' + ship.name + ' <span class="ui-li-count">' + str(
len(fits)) + '</span></h2>\n'
' <ul data-role="listview" data-shadow="false" data-inset="true" '
'data-corners="false">\n'
)
for fit in fits:
if self.stopRunning:
return
try:
eftFit = Port.exportEft(getFit(fit[0]), options={
PortEftOptions.IMPLANTS: True,
PortEftOptions.MUTATIONS: True,
PortEftOptions.LOADED_CHARGES: True})
HTMLfit = (
' <li data-role="collapsible" data-iconpos="right" data-shadow="false" '
'data-corners="false">\n'
' <h2>' + fit[1] + '</h2>\n'
' <ul data-role="listview" data-shadow="false" data-inset="true" '
'data-corners="false">\n'
)
HTMLfit += ' <li><pre>' + eftFit + '\n </pre></li>\n'
HTMLfit += ' </ul>\n </li>\n'
HTMLship += HTMLfit
except (KeyboardInterrupt, SystemExit):
raise
except:
pyfalog.warning("Failed to export line")
continue
finally:
if self.callback:
wx.CallAfter(self.callback, count)
count += 1
HTMLgroup += HTMLship + (' </ul>\n'
' </li>\n')
if groupFits > 0:
# Market group header
HTML += (
' <li data-role="collapsible" data-iconpos="right" data-shadow="false" data-corners="false">\n'
' <h2>' + group.groupName + ' <span class="ui-li-count">' + str(groupFits) + '</span></h2>\n'
' <ul data-role="listview" data-shadow="false" data-inset="true" data-corners="false">\n' +
HTMLgroup +
' </ul>\n'
' </li>'
)
HTML += """
</ul>
</div>
</div>
</body>
</html>"""
return HTML
def generateMinimalHTML(self, sMkt, sFit, dnaUrl):
""" Generate a minimal HTML version of the fittings, without any javascript or styling"""
categoryList = list(sMkt.getShipRoot())
categoryList.sort(key=lambda _ship: _ship.name)
count = 0
HTML = ''
for group in categoryList:
# init market group string to give ships something to attach to
ships = list(sMkt.getShipList(group.ID))
ships.sort(key=lambda _ship: _ship.name)
ships.sort(key=lambda _ship: _ship.name)
for ship in ships:
fits = sFit.getFitsWithShip(ship.ID)
for fit in fits:
if self.stopRunning:
return
try:
dnaFit = Port.exportDna(getFit(fit[0]))
HTML += '<a class="outOfGameBrowserLink" target="_blank" href="' + dnaUrl + dnaFit + '">' \
+ ship.name + ': ' + \
fit[1] + '</a><br> \n'
except (KeyboardInterrupt, SystemExit):
raise
except:
pyfalog.error("Failed to export line")
continue
finally:
if self.callback:
wx.CallAfter(self.callback, count)
count += 1
return HTML
| gpl-3.0 | 78,808,129,691,388,850 | 37.713805 | 120 | 0.518786 | false | 3.809808 | false | false | false |
fabioz/PyDev.Debugger | tests_python/resources/_debugger_case_gevent.py | 2 | 1440 | #!/usr/bin/env python
from gevent import monkey, sleep, threading as gevent_threading
import sys
if 'remote' in sys.argv:
import pydevd
port = int(sys.argv[1])
print('before pydevd.settrace')
pydevd.settrace(host=('' if 'as-server' in sys.argv else '127.0.0.1'), port=port, suspend=False)
print('after pydevd.settrace')
monkey.patch_all()
import threading
called = []
class MyGreenThread2(threading.Thread):
def run(self):
for _i in range(3):
sleep()
class MyGreenletThread(threading.Thread):
def run(self):
for _i in range(5):
called.append(self.name) # break here
t1 = MyGreenThread2()
t1.start()
sleep()
if __name__ == '__main__':
t1 = MyGreenletThread()
t1.name = 't1'
t2 = MyGreenletThread()
t2.name = 't2'
if hasattr(gevent_threading, 'Thread'):
# Only available in newer versions of gevent.
assert isinstance(t1, gevent_threading.Thread)
assert isinstance(t2, gevent_threading.Thread)
t1.start()
t2.start()
for t1 in (t1, t2):
t1.join()
# With gevent it's always the same (gevent coroutine support makes thread
# switching serial).
expected = ['t1', 't2', 't1', 't2', 't1', 't2', 't1', 't2', 't1', 't2']
if called != expected:
raise AssertionError("Expected:\n%s\nFound:\n%s" % (expected, called))
print('TEST SUCEEDED')
| epl-1.0 | 7,166,115,034,483,378,000 | 24.263158 | 100 | 0.604861 | false | 3.295195 | false | false | false |
pjsdev/simplerpc | simplerpc/payload.py | 1 | 1783 | import json
from .exceptions import SimpleRPCException
class Payload:
"""
Namespace for payload encoding/decoding
"""
class MalformedPayload(SimpleRPCException):
pass
class BufferDecoder:
def __init__(self):
self.dangling = ''
def packages(self, buf):
buf = buf.decode()
while True:
eof = buf.find('\n')
if eof is -1: # didnt find end of message
self.dangling += buf
break
pkg = self.dangling + buf[:eof]
buf = buf[eof+1:]
self.dangling = ''
yield pkg
@staticmethod
def from_string(data):
"""
Return tuple:
(int opcode, dict data) -> the rpc
Raise:
MalformedPayload -> simple rpc fail
"""
try:
json_start = data.index("{")
except ValueError:
raise Payload.MalformedPayload("JSON data not found")
opcode = data[:json_start]
if opcode == "":
raise Payload.MalformedPayload("Could not find opcode")
json_string = data[json_start:]
json_string = json_string.strip()
try:
args = json.loads(json_string)
except ValueError:
raise Payload.MalformedPayload("JSON malformed for opcode: %s" % opcode)
return (opcode, args)
@staticmethod
def to_string(opcode, args):
"""
Return string representing a simplerpc message
Raises:
ValueError if we cannot convert opcode to string or
parse JSON
"""
# Note: no net_id is ever input into message
return "{}{}\n".format(opcode, json.dumps(args))
| bsd-3-clause | -4,787,527,358,436,784,000 | 24.471429 | 84 | 0.527201 | false | 4.742021 | false | false | false |
Obooks/Obooks | ObooksService/App/migrations/0004_auto_20170928_0930.py | 1 | 2466 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-28 09:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('App', '0003_auto_20170928_0925'),
]
operations = [
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('isbn', models.CharField(max_length=13, unique=True)),
('title', models.CharField(max_length=150)),
('subtitle', models.CharField(max_length=150, null=True)),
('author', models.CharField(max_length=150)),
('originTitle', models.CharField(max_length=150, null=True)),
('translator', models.CharField(max_length=150, null=True)),
('publisher', models.CharField(max_length=150)),
('pubDate', models.CharField(max_length=30)),
('authorInfo', models.TextField(null=True)),
('summary', models.TextField(null=True)),
('tags', models.TextField(null=True)),
('rating', models.CharField(max_length=5, null=True)),
('image', models.CharField(max_length=1024, null=True)),
],
options={
'ordering': ('isbn',),
},
),
migrations.CreateModel(
name='Bookcase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=150)),
('isbn', models.CharField(max_length=13)),
],
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=150)),
('contactUsername', models.CharField(max_length=150)),
],
),
migrations.AlterUniqueTogether(
name='contact',
unique_together=set([('username', 'contactUsername')]),
),
migrations.AlterUniqueTogether(
name='bookcase',
unique_together=set([('username', 'isbn')]),
),
]
| gpl-3.0 | 3,339,412,799,141,698,600 | 38.142857 | 114 | 0.534874 | false | 4.3879 | false | false | false |
ninovolador/animal-guesser | adivinaconf.py | 1 | 2209 | # -*- coding: utf-8 -*-
YES = "si"
NO = "no"
POS_YES= ['si','s','sí','yes','y']
POS_NO= ['no','n']
ACCEPTABLE_YESNO = POS_YES + POS_NO
THING = "animal" #"persona", "cosa", "película"
RANDOM = False
NEW_AI = True
FAST = False
POINTS = True
MAXPOINTS = 5
MAXTRIES = 6
'''This is the animal-guesser configuration file
YES: word used on the database to represent YES
NO: word used on the database to represent NO
POS_YES and POS_NO: acceptable yes and no answers
THING: thing to guess. The original game is for animales, but can be used to play for
any thing or person. In order to be able to play the game, one must set up a new file called
"database_THING.json" with the following content:
# #
# {QUESTION:{YES:[ANSWER],NO:[]}} #
# #
where YES and NO are the variables specified in this file, and QUESTION and ANSWER
correspond to an initial question and answer, with quotes.
RANDOM: no "smart" question choosing, random instead.
# #
# possible values: True o False #
# #
FAST: Enables fast answer discard. If the database does not contain the answer for a thing, discards it.
Less learning, but faster games.
# #
# possible values: True o False #
# #
NEW_AI: Enables new question choosing system, using possible answers in current game, rather than all answers,
as it was before. Automatically disabled if used along RANDOM. Maybe too fast if used along FAST.
# #
# possible values: True o False #
# #
POINTS: Enables point system. This allows the program to try to guess when some thing has enough points, rather than
discarding all the other possible answers.
# #
# possible values: True o False #
# #
MAXPOINTS: Number of points needed to try and guess. (All things starts with 1 point)
MAXTRIES: Number of guessing tries before surrending.
'''
| unlicense | 2,208,694,325,087,729,400 | 38.410714 | 116 | 0.577707 | false | 3.927046 | false | false | false |
simonmonk/prog_pi_ed2 | 10_RGB_LED.py | 1 | 2182 | # 10_RGB_LED.py
from tkinter import *
import RPi.GPIO as GPIO
import time
# Configure the Pi to use the BCM (Broadcom) pin names, rather than the pin positions
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.OUT)
GPIO.setup(23, GPIO.OUT)
GPIO.setup(24, GPIO.OUT)
# Start Pulse Width Modulation (PWM) on the red, green and blue channels
pwmRed = GPIO.PWM(18, 500)
pwmRed.start(100)
pwmGreen = GPIO.PWM(23, 500)
pwmGreen.start(100)
pwmBlue = GPIO.PWM(24, 500)
pwmBlue.start(100)
# group together all of the GUI code into a class called App
class App:
# this function gets called when the app is created
def __init__(self, master):
# A frame holds the various GUI controls
frame = Frame(master)
frame.pack()
# Create the labels and position them in a grid layout
Label(frame, text='Red').grid(row=0, column=0)
Label(frame, text='Green').grid(row=1, column=0)
Label(frame, text='Blue').grid(row=2, column=0)
# Create the sliders and position them in a grid layout
# the 'command' attribute specifys a method to call when
# a slider is moved
scaleRed = Scale(frame, from_=0, to=100,
orient=HORIZONTAL, command=self.updateRed)
scaleRed.grid(row=0, column=1)
scaleGreen = Scale(frame, from_=0, to=100,
orient=HORIZONTAL, command=self.updateGreen)
scaleGreen.grid(row=1, column=1)
scaleBlue = Scale(frame, from_=0, to=100,
orient=HORIZONTAL, command=self.updateBlue)
scaleBlue.grid(row=2, column=1)
# These methods called whenever a slider moves
def updateRed(self, duty):
# change the led brightness to match the slider
pwmRed.ChangeDutyCycle(float(duty))
def updateGreen(self, duty):
pwmGreen.ChangeDutyCycle(float(duty))
def updateBlue(self, duty):
pwmBlue.ChangeDutyCycle(float(duty))
# Set the GUI running, give the window a title, size and position
root = Tk()
root.wm_title('RGB LED Control')
app = App(root)
root.geometry("200x150+0+0")
try:
root.mainloop()
finally:
print("Cleaning up")
GPIO.cleanup()
| mit | 8,000,308,267,194,469,000 | 30.171429 | 85 | 0.653987 | false | 3.398754 | false | false | false |
RomanOsadchuk/GoStory | stories/forms.py | 1 | 1779 | from django import forms
from django.forms import widgets
from django.core.validators import MinLengthValidator
from .models import Story, Chapter
class CreateStoryForm(forms.Form):
title = forms.CharField(min_length=Story.MIN_TITLE_LEN,
max_length=Story.MAX_TITLE_LEN)
# next is about first chapter:
headline = forms.CharField(min_length=Chapter.MIN_HEADLINE_LEN,
max_length=Chapter.MAX_HEADLINE_LEN,
label='Headline of first chapter')
body = forms.CharField(min_length=Chapter.MIN_BODY_LEN,
max_length=Chapter.MAX_BODY_LEN,
widget=widgets.Textarea(),
label='Body of first chapter')
class AddChapterForm(forms.ModelForm):
class Meta:
model = Chapter
fields = ['parent', 'headline', 'body']
widgets = {
'parent': widgets.HiddenInput(),
'body': widgets.Textarea(),
}
def __init__(self, user=None, *args, **kwargs):
super(AddChapterForm, self).__init__(*args, **kwargs)
self.user = user
headline_min_len_val = MinLengthValidator(Chapter.MIN_HEADLINE_LEN)
self.fields['headline'].validators.append(headline_min_len_val)
body_min_len_val = MinLengthValidator(Chapter.MIN_BODY_LEN)
self.fields['body'].validators.append(body_min_len_val)
def save(self, *args, **kwargs):
created_chapter = super(AddChapterForm, self).save(*args, **kwargs)
if self.user and self.user.is_authenticated():
created_chapter.author = self.user
created_chapter.readers.add(self.user)
created_chapter.save()
return created_chapter
| gpl-3.0 | 799,759,088,412,932,700 | 39.431818 | 75 | 0.606521 | false | 4.12761 | false | false | false |
jonesmartins/pycrusher | pycrusher/name_generator.py | 1 | 3792 | #!/usr/bin/env python
import os
def make_dir(dir_name):
"""Makes sure target directory doesn't exist, or that the
command wasn't called from inside said directory.
Args:
dir_name(str): Name of directory where all compressed
images will be stored. Usually called 'compressed'
Returns:
dir_name(str): Same as above.
"""
if not os.path.isdir(dir_name): # dir_name doesn't exist
if dir_name in os.getcwd(): # but already inside dir_name
return '' # doesn't add dir_name to path
os.mkdir(dir_name) # else, creates it
return dir_name # returns name, so adds to path
def check_filenames(input_name, output_name):
"""This function prepares the file to be saved in another
format, so it removes its format and inserts 'compressed_'
before the actual name to avoid collisions. If user inserted
a correct output_name, nothing happens to it.
Args:
input_name(str): Name of input file
output_name(str): Name of output file (optional)
Returns:
Returns output(str) and its format(str) in a list.
Raises:
ValueError: File format not in acceptable set
"""
if not output_name:
output_name = os.path.basename(input_name)
'''
if 'compressed_' in input_name:
output_name = os.path.basename(input_name)
else:
output_name = 'compressed_{}'.format(os.path.basename(input_name))
'''
acceptable = set(['.bmp', '.jpg', '.pbm', '.pgm',
'.png', '.ppm', '.rgb', '.webp'])
input_path = os.path.splitext(os.path.basename(input_name))
output_path = os.path.splitext(os.path.basename(output_name))
if input_path[1] not in acceptable:
raise ValueError('Input format {} not acceptable!'.format(input_path[1]))
if not output_path[0] or output_path[1] not in acceptable:
# User gave partial output(no name or no format) or gave wrong format
print(output_path)
raise ValueError('Output not acceptable!')
return os.path.splitext(os.path.basename(output_name))
def get_final_output_name(input_name, output_name='',
iterations=50, extra=1,
colors=1.0, reverse=False,
preprocess=False, dir_name=''):
"""Gets all files related to the output and runs them all together.
Args:
input_name(str): Input given by parse_args.
output_name(str)[Optional]: Output given by parse_args.
iterations(int)[Optional]: How many times to iterate compression
extra(int)[Optional]: How much to enforce compression
colors(List[float])[Optional]: Adds 'c' and list of colors to final name
reverse(bool)[Optional]: Adds 'r' to final name to indicate use
preprocess(bool)[Optional]: Adds 'p' to final name to indicate use
dir_name(str)[Optional]: Name of directory to store compressed images
Returns:
String representing the final file name, with some info about its
configuration.
"""
typeless_output, file_format = check_filenames(input_name,
output_name)
if not output_name:
typeless_output = '{}_i{}e{}'.format(typeless_output,
iterations,
extra)
if reverse:
typeless_output += 'r'
if preprocess:
typeless_output += 'p'
if colors != [1.0] or colors != 1.0:
typeless_output += 'c{}'.format(str(colors).replace(' ', ''))
return os.path.join(dir_name, '{}{}'.format(typeless_output, file_format))
| apache-2.0 | 24,299,729,753,013,950 | 41.606742 | 81 | 0.591245 | false | 4.199336 | false | false | false |
SqueezeStudioAnimation/dpAutoRigSystem | dpAutoRigSystem/Extras/dpHeadDeformer.py | 1 | 6699 | # importing libraries:
import maya.cmds as cmds
import maya.mel as mel
from ..Modules.Library import dpControls as ctrls
# global variables to this module:
CLASS_NAME = "HeadDeformer"
TITLE = "m051_headDef"
DESCRIPTION = "m052_headDefDesc"
ICON = "/Icons/dp_headDeformer.png"
class HeadDeformer():
def __init__(self, *args, **kwargs):
# call main function
self.dpHeadDeformer(self)
def dpHeadDeformer(self, *args):
""" Create the arrow curve and deformers (squash and bends).
"""
# get a list of selected items
selList = cmds.ls(selection=True)
if selList:
# twist deformer
twistDefList = cmds.nonLinear(selList, name="TwistHead", type="twist")
defSize = cmds.getAttr(twistDefList[0]+".highBound")
defScale = cmds.getAttr(twistDefList[1]+".scaleY")
defTy = -(defSize * defScale)
defTy = ctrls.dpCheckLinearUnit(defTy)
cmds.setAttr(twistDefList[0]+".lowBound", 0)
cmds.setAttr(twistDefList[0]+".highBound", (defSize * 2))
cmds.setAttr(twistDefList[1]+".ty", defTy)
# squash deformer
squashDefList = cmds.nonLinear(selList, name="SquashHead", type="squash")
cmds.setAttr(squashDefList[0]+".highBound", (defSize * 4))
cmds.setAttr(squashDefList[1]+".ty", defTy)
cmds.setAttr(squashDefList[0]+".startSmoothness", 1)
# side bend deformer
sideBendDefList = cmds.nonLinear(selList, name="BendSideHead", type="bend")
cmds.setAttr(sideBendDefList[0]+".lowBound", 0)
cmds.setAttr(sideBendDefList[0]+".highBound", (defSize * 4))
cmds.setAttr(sideBendDefList[1]+".ty", defTy)
# front bend deformer
frontBendDefList = cmds.nonLinear(selList, name="BendFrontHead", type="bend")
cmds.setAttr(frontBendDefList[0]+".lowBound", 0)
cmds.setAttr(frontBendDefList[0]+".highBound", (defSize * 4))
cmds.setAttr(frontBendDefList[1]+".ry", -90)
cmds.setAttr(frontBendDefList[1]+".ty", defTy)
# arrow control curve
arrowCtrl = self.dpCvArrow("Deformer_Ctrl")
# add control intensite attributes
cmds.addAttr(arrowCtrl, longName="intensityX", attributeType='float', keyable=True)
cmds.addAttr(arrowCtrl, longName="intensityY", attributeType='float', keyable=True)
cmds.addAttr(arrowCtrl, longName="intensityZ", attributeType='float', keyable=True)
cmds.setAttr(arrowCtrl+".intensityX", 0.1)
cmds.setAttr(arrowCtrl+".intensityY", 0.1)
cmds.setAttr(arrowCtrl+".intensityZ", 0.1)
# multiply divide in order to intensify influences
mdNode = cmds.createNode("multiplyDivide", name="Deformer_MD")
mdTwistNode = cmds.createNode("multiplyDivide", name="Deformer_Twist_MD")
cmds.setAttr(mdTwistNode+".input2Y", -1)
# connections
cmds.connectAttr(arrowCtrl+".tx", mdNode+".input1X", force=True)
cmds.connectAttr(arrowCtrl+".ty", mdNode+".input1Y", force=True)
cmds.connectAttr(arrowCtrl+".tz", mdNode+".input1Z", force=True)
cmds.connectAttr(arrowCtrl+".ry", mdTwistNode+".input1Y", force=True)
cmds.connectAttr(arrowCtrl+".intensityX", mdNode+".input2X", force=True)
cmds.connectAttr(arrowCtrl+".intensityY", mdNode+".input2Y", force=True)
cmds.connectAttr(arrowCtrl+".intensityZ", mdNode+".input2Z", force=True)
cmds.connectAttr(mdNode+".outputX", sideBendDefList[0]+".curvature", force=True)
cmds.connectAttr(mdNode+".outputY", squashDefList[0]+".factor", force=True)
cmds.connectAttr(mdNode+".outputZ", frontBendDefList[0]+".curvature", force=True)
cmds.connectAttr(mdTwistNode+".outputY", twistDefList[0]+".endAngle", force=True)
# change squash to be more cartoon
cmds.setDrivenKeyframe(squashDefList[0]+".lowBound", currentDriver=mdNode+".outputY", driverValue=-1, value=-4, inTangentType="auto", outTangentType="auto")
cmds.setDrivenKeyframe(squashDefList[0]+".lowBound", currentDriver=mdNode+".outputY", driverValue=0, value=-2, inTangentType="auto", outTangentType="auto")
cmds.setDrivenKeyframe(squashDefList[0]+".lowBound", currentDriver=mdNode+".outputY", driverValue=2, value=-1, inTangentType="auto", outTangentType="flat")
# fix side values
axisList = ["X", "Y", "Z"]
for axis in axisList:
unitConvNode = cmds.listConnections(mdNode+".output"+axis, destination=True)[0]
if unitConvNode:
if cmds.objectType(unitConvNode) == "unitConversion":
cmds.setAttr(unitConvNode+".conversionFactor", 1)
attrList = ['rx', 'rz', 'sx', 'sy', 'sz', 'v']
for attr in attrList:
cmds.setAttr(arrowCtrl+"."+attr, lock=True, keyable=False)
cmds.setAttr(arrowCtrl+".intensityX", edit=True, keyable=False, channelBox=True)
cmds.setAttr(arrowCtrl+".intensityY", edit=True, keyable=False, channelBox=True)
cmds.setAttr(arrowCtrl+".intensityZ", edit=True, keyable=False, channelBox=True)
# create groups
arrowCtrlGrp = cmds.group(arrowCtrl, name="Deformer_Ctrl_Grp")
cmds.setAttr(arrowCtrlGrp+".ty", -1.75*defTy)
cmds.group((squashDefList[1], sideBendDefList[1], frontBendDefList[1], twistDefList[1]), name="Deformer_Data_Grp")
# finish selection the arrow control
cmds.select(arrowCtrl)
else:
mel.eval("warning" + "\"" + "Select objects to create headDeformers, usually we create in the blendShape RECEPT target" + "\"" + ";")
def dpCvArrow(self, ctrlName="arrowCurve", radius=1, *args):
""" Create an arrow control curve and returns it.
"""
if cmds.objExists(ctrlName) == True:
originalCtrlName = ctrlName
i = 1
while cmds.objExists(ctrlName) == True:
ctrlName = originalCtrlName+str(i)
i += 1
r = radius*0.1
arrowCurve = cmds.curve(name=ctrlName, d=1, p=[(0, 0, 0), (-2*r, r, 0), (-r, r, 0), (-r, 4*r, 0), (r, 4*r, 0), (r, r, 0), (2*r, r, 0), (0, 0, 0)])
cmds.rename(cmds.listRelatives(arrowCurve, children=True, type="nurbsCurve", shapes=True), arrowCurve+"Shape")
return arrowCurve | gpl-2.0 | -8,011,064,889,020,540,000 | 53.032258 | 168 | 0.605613 | false | 3.518382 | false | false | false |
jkr/notmuch-client | nmclient/dates.py | 1 | 5062 | # -*- coding: utf-8 -*-
#########################################################################
# dates.py: for use with date-range substitution in notmuch-client #
# #
# Copyright © 2011 Jesse Rosenthal #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see http://www.gnu.org/licenses/ . #
# #
# Author: Jesse Rosenthal <[email protected]> #
#########################################################################
import time
from datetime import datetime, date, timedelta
relative_day_dict = {"monday": 0,
"tuesday": 1,
"wednesday": 2,
"thursday": 3,
"friday": 4,
"saturday": 5,
"sunday": 6,
"mon": 0,
"tue": 1,
"wed": 2,
"thu": 3,
"fri": 4,
"sat": 5,
"sun": 6}
def _dayname_to_datetime (dayname):
dayname_lower = dayname.lower()
today = date.today()
today_day_num = today.weekday()
if dayname_lower in ("today", "yesterday"):
return today - timedelta(("today", "yesterday").index(dayname_lower))
elif dayname_lower in relative_day_dict:
return today - timedelta((today_day_num -
day_dict[dayname_lower]) % 7)
else:
raise NotmuchDateRangeError, \
"Unknow date keyword: %s" % dayname
def _timestring_to_datetime (date_string, default_datetime = None):
"""
Takes a timestring of the form:
[[{YYYY}-]{M}-]{D}
and converts it a datetime. The.
"""
if not default_datetime:
default_datetime = date.today()
try:
out = _dayname_to_datetime(date_string)
except NotmuchDateRangeError:
try:
split_date = [int(elem) for elem in date_string.split('-')]
except ValueError:
raise NotmuchDateRangeError, \
"Illegal date format: %s" % date_string
# Now, we go through the date, and fill in missing parts with our
# default
if len(split_date) == 1:
modified_date = (default_datetime.year,
default_datetime.month,
split_date[0])
elif len(split_date) == 2:
modified_date = (default_datetime.year,
split_date[0],
split_date[1])
elif len(split_date) == 3:
modified_date = split_date
else:
raise NotmuchDateRangeError, \
"Illegal date format: %s" % split_date
out = datetime(*modified_date)
return out
class NotmuchDateRangeError (Exception):
pass
class DateRange (object):
def __init__ (self, startstamp, endstamp):
self.start = startstamp
self.end = endstamp
@classmethod
def from_string_range(cls, range_string):
split_range = range_string.split("--")
if len(split_range) == 1:
start = _timestring_to_datetime(split_range[0])
end = start
elif len(split_range) == 2:
if not split_range[0]:
start = datetime.fromtimestamp(0)
end = _timestring_to_datetime(split_range[0])
elif not split_range[1]:
start = _timestring_to_datetime(split_range[0])
end = date.today()
else:
start = _timestring_to_datetime(split_range[0])
end = _timestring_to_datetime(split_range[1])
else:
raise NotmuchDateRangeError, \
"Not a valid range string: %s" % range_string
end += timedelta(1)
startstamp = time.mktime(start.timetuple())
endstamp = time.mktime(end.timetuple())
return cls(startstamp, endstamp)
def as_timestamp_range(self):
return "%d..%d" % (self.start, self.end)
| gpl-3.0 | 4,121,855,125,936,414,700 | 37.930769 | 77 | 0.478166 | false | 4.427822 | false | false | false |
tlhallock/line-search-dfo | python/algorithms/filter_linesearch.py | 1 | 9089 | from math import inf as infinity
from numpy import int as integral
from numpy import bmat as blockmat
from numpy import concatenate
from numpy import dot
from numpy import empty
from numpy import zeros
from numpy.linalg import cond as condition_number
from numpy.linalg import lstsq
from numpy.linalg import norm as norm
from numpy.linalg import solve as linsolve
from scipy.optimize import minimize
import matplotlib.pyplot as plt
from utilities.nondom import NonDomSet
class Constants:
def __init__(self, theta_max):
self.theta_max = theta_max # (theta(x0), infty)
self.gamma_theta = .01 # (0,1)
self.gamma_f = .75 # (0,1)
self.delta = .01 # (0,infty)
self.gamma_alpha = .5 # (0,1]
self.s_theta = 2 # (1,infty)
self.s_f = 3 # [1,infty)
self.eta_f = .025 # (0, .5)
self.tau_one = .25 # (0, tau_two]
self.tau_two = .75 # [tau_two, 1)
self.plot = True # Should all the plots be generated
self.max_condition_number = 1000
self.tau = (self.tau_one + self.tau_two) / 2
class Result:
def __init__(self):
self.number_of_iterations = -1 # we increment by one in the first iteration
self.restorations = 0
self.ftype_iterations = 0
self.filter_modified_count = 0
self.pareto = NonDomSet()
self.success = False
self.f_min = infinity
self.x_min = 0
self.filterRejectedCount = 0
self.criteria_satifisfied_but_trust_region_not = 0
def newF(self, otherX, otherF):
if self.f_min < otherF:
return
self.f_min = otherF
self.x_min = otherX
def theta(statement, x):
return getThetaAndIneq(statement, x)[0]
def getThetaAndIneq(statement, x):
c, _, ineq = getConstraintInfo(statement, x)
return norm(c), ineq
# retVal = 0
# if statement.hasEqualityConstraints():
# retVal += norm(statement.equalityConstraints(x))
# if statement.hasInequalityConstraints():
# c = statement.inequalityConstraints(x)
# retVal += norm(c[c > -statement.tol])
# return retVal
def getConstraintInfo(statement, x):
if statement.hasEqualityConstraints():
cEq = statement.equalityConstraints(x)
aEq = statement.equalityConstraintsJacobian(x)
if not statement.hasInequalityConstraints():
return cEq, aEq, empty(0)
if statement.hasInequalityConstraints():
cIneqAll = statement.inequalityConstraints(x)
aIneqAll = statement.inequalityConstraintsJacobian(x)
active = cIneqAll > -statement.tol
cIneqActive = cIneqAll[active]
aIneqActive = aIneqAll[active]
if statement.hasEqualityConstraints():
c = concatenate([cEq, cIneqActive])
A = blockmat([[aEq], [aIneqActive]])
return c, A, cIneqAll
else:
return cIneqActive, aIneqActive, cIneqAll
return None, None, None
# don't check constraints that are currently active going to false...
def addedActiveConstraint(newIneq, cIneq, tol):
# Check that we are not adding any active constraints...
# Don't want to just check the "active" variable from computeConstraintInfo
# because of the tolerance issue while we are on it.
# addedInactive = all([not newIneq[i] for i, x in enumerate(state.cIneq) if not x])
# comparing with zero instead of tolerance (not entirely sure why...)
# I might should use -tol...
return any([newIneq[i] > 0 for i, x in enumerate(cIneq) if x < tol])
class AlgorithmState:
def __init__(self, statement):
self.x = statement.x0
self.grad = 0
self.pareto = NonDomSet()
self.f = infinity
self.grad = None
self.hess = None
self.A = None
self.c = None
self.cIneq = None
self.d = empty(len(self.x))
self.x_new = None
self.ftype = False
self.accept = False
self.theta = None
def setCurrentIterate(self, statement):
self.f = statement.objective(self.x)
self.grad = statement.gradient(self.x)
self.hess = statement.hessian(self.x)
self.theta = theta(statement, self.x)
self.c, self.A, self.cIneq = getConstraintInfo(statement, self.x)
def createKKT(self):
if self.A is None:
return self.hess
m = self.getM()
return blockmat([[self.hess, self.A.T], [self.A, zeros((m, m))]])
def createRhs(self):
if self.A is None:
return self.grad
return concatenate([self.grad, self.c])
def getN(self):
return len(self.x)
def getM(self):
if self.A is None:
return 0
return self.A.shape[0]
def show(self, statement):
fileName = statement.createBasePlotAt(self.x)
self.model.addPointsToPlot()
#amin(shifted, 0)
totalDist = norm(self.x_new - self.x)
hw = .1 * totalDist
hl = .1 * totalDist
plt.arrow(x=self.x[0], y=self.x[1],
dx = (self.x_new[0] - self.x[0]), dy = (self.x_new[1] - self.x[1]),
head_width = hw, head_length = hl, fc = 'g', ec = 'g')
plt.arrow(x=self.x[0], y=self.x[1],
dx = -totalDist * self.grad[0] / norm(self.grad),
dy = -totalDist * self.grad[1] / norm(self.grad),
head_width = hw, head_length = hl, fc = 'y', ec = 'y')
plt.savefig(fileName)
plt.close()
def checkStoppingCriteria(statement, state):
hasConstraints = statement.hasEqualityConstraints() or statement.hasInequalityConstraints()
if not hasConstraints:
return norm(state.grad) < statement.tol
if statement.hasConstraints() and (state.c > statement.tol).any():
return False
lmbda,_,_,_ = lstsq(state.A.T, -state.grad)
# What exactly is this supposed to be checking?
if norm(state.grad + dot(state.A.T, lmbda)) > statement.tol:
return False
if statement.hasInequalityConstraints():
numEqualityConstraints = statement.getNumEqualityConstraints()
if any(lmbda[numEqualityConstraints:len(lmbda)] < -statement.tol):
return False
return True
def compute_alpha_min(statement, constants, state):
gDotd = dot(state.grad.T, state.d)
if gDotd < -statement.tol:
return constants.gamma_alpha * min(
constants.gamma_theta,
-constants.gamma_f*state.theta/(gDotd),
(constants.delta*state.theta**constants.s_theta)/((-gDotd)**constants.s_f))
else:
return constants.gamma_alpha * constants.gamma_theta
def restore_feasibility(statement, x0):
res = minimize(lambda x: theta(statement, x), x0, method='Nelder-Mead', options={'xtol': 1e-8, 'disp': False, 'maxfev': 1000})
return res.x
def filter_line_search(program, constants):
results = Result()
state = AlgorithmState(program)
while True:
results.number_of_iterations += 1
print(results.number_of_iterations)
state.setCurrentIterate(program)
n = state.getN()
if checkStoppingCriteria(program, state):
if not program.converged():
results.criteria_satifisfied_but_trust_region_not += 1
continue
results.newF(state.x, state.f)
results.success = True
break
kktmat = state.createKKT()
state.cond = condition_number(kktmat)
if state.cond > constants.max_condition_number:
results.restorations += 1
state.x = restore_feasibility(program, state.x)
continue
rhs = state.createRhs()
vec = linsolve(kktmat, rhs.T)
state.d[:] = -vec[0:n]
state.alpha_min = compute_alpha_min(program, constants, state)
state.alpha = 1
state.accept = False
gDotd = dot(state.grad.T, state.d)
while not state.accept:
m = state.alpha * gDotd
# Hack, maybe: clip to trust region: this should be solved in the subproblem!!!
state.d = program.clipToTrustRegion(state.d)
if state.alpha < state.alpha_min:
state.x = restore_feasibility(program, state.x)
results.restorations += 1
break
state.x_new = state.x + state.alpha * state.d
state.theta_new, newIneq = getThetaAndIneq(program, state.x_new)
state.f_new = program.objective(state.x_new)
if norm(state.d) * state.alpha < program.model.modelRadius / 4:
program.model.multiplyRadius(program.radius_decrease)
program._improve()
# If we are about to add a constraint that was not active, then don't
if addedActiveConstraint(newIneq, state.cIneq, program.tol):
state.alpha = state.alpha * constants.tau
continue
if constants.plot:
state.show(program)
if results.pareto.is_dominated((state.theta_new, state.f_new)):
state.alpha = state.alpha * constants.tau
results.filterRejectedCount += 1
continue
state.ftype = m < 0 and ((-m)**constants.s_f * state.alpha**(1-constants.s_f) > constants.delta * state.theta ** constants.s_theta);
if state.ftype:
if state.f_new <= state.f + constants.eta_f * m:
state.accept = True
else:
eight_a = state.theta_new <= (1-constants.gamma_theta) * state.theta
eight_b = state.f_new <= state.f - constants.gamma_f * state.theta_new
if eight_a or eight_b:
state.accept = True
state.alpha = state.alpha * constants.tau
if state.accept:
if not program.acceptable(state.x_new):
continue
if state.ftype:
results.ftype_iterations += 1
if (1-constants.gamma_theta) * state.theta_new > program.tol:
results.pareto.add(((1 - constants.gamma_theta) * state.theta_new, state.f_new - constants.gamma_f * state.theta_new))
results.filter_modified_count += 1
state.x = state.x_new
return results
| gpl-3.0 | -2,567,806,604,022,658,600 | 28.22508 | 135 | 0.679503 | false | 2.931935 | false | false | false |
wufangjie/leetcode | 661. Image Smoother.py | 1 | 2006 | class Solution(object):
def imageSmoother(self, M):
"""
:type M: List[List[int]]
:rtype: List[List[int]]
"""
if not M:
return M
nrow, ncol = len(M), len(M[0])
if ncol == 1:
if nrow == 1:
return M
else:
temp = self.sum_row([row[0] for row in M], nrow)
return [[v // (3 if 0 < j < nrow - 1 else 2)]
for j, v in enumerate(temp)]
rows = [self.sum_row(row, ncol) for row in M]
if nrow == 1:
return [[rows[0][j] // (3 if 0 < j < ncol - 1 else 2)
for j in range(ncol)]]
ret = []
for i, row in enumerate(M):
if i == 0:
ret.append([(rows[i][j] + rows[i + 1][j])
// (6 if 0 < j < ncol - 1 else 4)
for j in range(ncol)])
elif i == nrow - 1:
ret.append([(rows[i - 1][j] + rows[i][j])
// (6 if 0 < j < ncol - 1 else 4)
for j in range(ncol)])
else:
ret.append([(rows[i - 1][j] + rows[i][j] + rows[i + 1][j])
// (9 if 0 < j < ncol - 1 else 6)
for j in range(ncol)])
return ret
@staticmethod
def sum_row(row, ncol):
if ncol < 2:
return row
ret = [row[0] + row[1]]
if ncol == 2:
return ret * 2
ret.append(ret[0] + row[2])
for i in range(3, ncol):
ret.append(ret[-1] + row[i] - row[i - 3])
ret.append(row[-1] + row[-2])
return ret
# print(Solution().imageSmoother([[255,1,1],[1,0,1],[1,1,39]]))
# print(Solution().imageSmoother([[255,1]]))
# print(Solution().imageSmoother([[1]]))
# print(Solution().imageSmoother([[2, 3]]))
# print(Solution().imageSmoother([[3],[2]]))
print(Solution().imageSmoother([[7],[9],[6]]))
| gpl-3.0 | 8,525,313,534,943,113,000 | 28.940299 | 74 | 0.410768 | false | 3.360134 | false | false | false |
erdc/proteus | proteus/MeshTools.py | 1 | 350670 | """
Tools for creating and manipulating 1,2, and 3D meshes.
.. inheritance-diagram:: proteus.MeshTools
:parts: 1
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from builtins import input
from builtins import zip
from builtins import next
from builtins import str
from builtins import range
from past.utils import old_div
from builtins import object
from .EGeometry import *
import numpy as np
import array
import h5py
import os
from xml.etree import ElementTree as ET
from .Archiver import *
from .LinearAlgebraTools import ParVec_petsc4py
from .Profiling import logEvent,memory
from . import Domain
from . import Comm
from subprocess import check_call, check_output
class Node(object):
"""A numbered point in 3D Euclidean space
:ivar N: node number
:ivar p: Euclidean coordinates
Comparison operators and a hash value are defined using the 3-tuple of
coordinates. This allows using Node objects and tuples of node objects as
dictionary keys, but in that use case one should be careful not to modify
the node coordinates.
>>> n0 = Node(nodeNumber=0,x=0.0,y=0.0,z=0.0)
>>> n1 = Node(nodeNumber=1,x=1.0,y=1.0,z=1.0)
>>> n1 >= n0
True
"""
xUnitVector = EVec(1.0,0.0,0.0)
yUnitVector = EVec(0.0,1.0,0.0)
zUnitVector = EVec(0.0,0.0,1.0)
def __init__(self,nodeNumber=0,x=0.0,y=0.0,z=0.0):
self.N=nodeNumber
self.p=EVec(x,y,z)
self.basis = [Node.xUnitVector,
Node.yUnitVector,
Node.zUnitVector]
self.elementBoundaries=[]
self.barycenter = self.p
self.length = 1.0
self.diameter=self.length
self.innerDiameter=self.length
self.hasGeometricInfo = True
self.unitNormal = Node.xUnitVector
self.nodes=(self,)
def computeGeometricInfo(self):
pass
def __str__(self):
return str(self.N)+":"+str(self.p)
def __hash__(self):
return hash((self.p[X],self.p[Y],self.p[Z]))
def __lt__(self,other):
return (self.p[X],self.p[Y],self.p[Z]) < \
(other.p[X],other.p[Y],other.p[Z])
def __le__(self,other):
return (self.p[X],self.p[Y],self.p[Z]) <= \
(other.p[X],other.p[Y],other.p[Z])
def __eq__(self,other):
return (self.p[X],self.p[Y],self.p[Z]) == \
(other.p[X],other.p[Y],other.p[Z])
def __ne__(self,other):
return (self.p[X],self.p[Y],self.p[Z]) != \
(other.p[X],other.p[Y],other.p[Z])
def __gt__(self,other):
return (self.p[X],self.p[Y],self.p[Z]) > \
(other.p[X],other.p[Y],other.p[Z])
def __ge__(self,other):
return (self.p[X],self.p[Y],self.p[Z]) >= \
(other.p[X],other.p[Y],other.p[Z])
class Element(object):
"""An numbered polytope in R^n
:ivar N: element number
:ivar nodes: sorted tuple of nodes defining the polytope
The nodes data member can be used as a dictionary key for the polytope as
long as the nodes aren't later modified.
"""
def __init__(self,elementNumber=0,nodes=[]):
self.N = elementNumber
nodeList = nodes
nodeList.sort()
self.nodes = tuple(nodeList)
self.elementBoundaries=[]
class Edge(Element):
xUnitVector = EVec(1.0,1.0,0.0)
yUnitVector = EVec(0.0,1.0,0.0)
zUnitVector = EVec(0.0,0.0,1.0)
"""
1D Element--a line connecting two Nodes
The nodes are stored as a lexicographically sorted node list.
"""
def __init__(self,edgeNumber=0,nodes=[]):
#Element.__init__(self,edgeNumber,nodes)
#inline Element.__init__
self.N = edgeNumber
nodeList = nodes
nodeList.sort()
self.nodes = tuple(nodeList)
#self.nodes=nodes
#self.nodes=nodes[:]
#self.nodes.sort()
self.elementBoundaries = [self.nodes[1],self.nodes[0]]
self.hasGeometricInfo = False
def computeGeometricInfo(self):
if not self.hasGeometricInfo:
self.basis = [self.nodes[1].p - self.nodes[0].p,
Edge.yUnitVector,
Edge.zUnitVector]
self.barycenter = old_div((self.nodes[0].p + self.nodes[1].p),2.0)
self.length = enorm(self.basis[0])
self.normal = EVec(-self.basis[0][Y], self.basis[0][X],0.0)
norm = enorm(self.normal)
if norm:
self.unitNormal = old_div(self.normal,norm)
else:
#in 3D edge normals don't make sense in general so above
#may divide by zero if edge has zero projection onto x-y plane
self.normal = EVec(0.0, -self.basis[0][Z], self.basis[0][Y])
self.unitNormal = old_div(self.normal,enorm(self.normal))
self.diameter=self.length
self.innerDiameter = self.length
self.hasGeometricInfo = True
self.nodeUnitNormalList=[]
self.nodeUnitNormalList.append(old_div(-self.basis[0],self.length))
self.nodeUnitNormalList.append(old_div(self.basis[0],self.length))
self.elementBoundaryUnitNormalList=self.nodeUnitNormalList
self.elementBoundaryJacobianList=[Edge.xUnitVector,Edge.xUnitVector]
def getNodesFromEdges(edges):
"""Extract the subset of nodes from a list of edges."""
nodes={}
for e in edges:
for n in e.nodes:
nodes[n]=n
return list(nodes.values())
class Polygon(Element):
"""An abstract 2D element--a closed set of Edges connecting a set of Nodes.
The nodes and edges are stored as lexicographically sorted lists."""
def __init__(self,polygonNumber=0,nodes=[]):
Element.__init__(self,polygonNumber,nodes)
#the edges have to be set up by the specific polygon
self.edges=[]
def getEdgesFromPolygons(polygons):
"""Extract the subset of edges from a list of polygons"""
edges={}
for p in polygons:
for e in p.edges:
edges[e.nodes] = e
return list(edges.values())
class Triangle(Polygon):
"""A 2D triangular element"""
edgeMap = {(1,2):0,(0,2):1,(0,1):2}
zUnitVector = EVec(0.0,0.0,1.0)
def __init__(self,triangleNumber=0,nodes=[],edgeDict=None):
#Polygon.__init__(self,triangleNumber,nodes)
#inline
self.edges=[]
#Element.__init__
#inline
self.N = triangleNumber
nodeList = nodes
nodeList.sort()
self.nodes = tuple(nodeList)
#self.nodes=nodes[:]
#self.nodes.sort()
#
edgeNodeList = [(self.nodes[1],self.nodes[2]),
(self.nodes[0],self.nodes[2]),
(self.nodes[0],self.nodes[1])]
if edgeDict is None:
self.edges = [Edge(eN,list(edgeNodes)) for \
eN,edgeNodes in enumerate(edgeNodeList)]
else:
self.edges = [edgeDict[edgeNodes] for edgeNodes in edgeNodeList]
self.hasGeometricInfo=False
self.elementBoundaries=self.edges
def computeGeometricInfo(self):
if not self.hasGeometricInfo:
self.barycenter = old_div((self.nodes[0].p +
self.nodes[1].p +
self.nodes[2].p),3.0)
self.basis = [ n.p - self.nodes[0].p for n in self.nodes[1:]]
self.basis.append(Triangle.zUnitVector)
self.linearMap = ETen(self.basis[0],self.basis[1],self.basis[2])
self.normal = ecross(self.basis[0],self.basis[1])
normNormal = enorm(self.normal)
self.unitNormal = old_div(self.normal,normNormal)
self.area = 0.5*normNormal
for e in self.edges: e.computeGeometricInfo()
self.diameter = max([e.length for e in self.edges])
self.innerDiameter = 4.0*self.area/sum(
[e.length for e in self.edges])
self.edgeUnitNormalList=[]
for nNt,eN in Triangle.edgeMap.items():
unitNormal = self.edges[eN].unitNormal
if edot(unitNormal,self.nodes[nNt[0]].p - self.nodes[eN].p) < 0:
unitNormal*=-1.0
self.edgeUnitNormalList.append(unitNormal)
self.elementBoundaryUnitNormalList = self.edgeUnitNormalList
self.hasGeometricInfo=True
class Quadrilateral(Polygon):
"""A 2D quadrilateral element"""
def __init__(self,quadrilateralNumber=0,edges=[],simple=True):
Polygon.__init__(self,quadrilateralNumber)
self.edges = edges
nodeList = getNodesFromEdges(self.edges)
nodeList = self.sortNodes(nodeList)
self.nodes = tuple(nodeList)
self.hasGeometricInfo = False
self.elementBoundaries = self.edges
# This boolean flags whether the quadrilateral is simple
# (eg. a rectangle). Certain features are more difficult
# to implement if this is not the case.
self.simple = True
def sortNodes(self,nodeList):
newList = [None] * 4
coordinate_list = [1,1,1]
# initialize coordinate mins and maxs
xMin = nodeList[0].p[X]
xMax = nodeList[0].p[X]
yMin = nodeList[0].p[Y]
yMax = nodeList[0].p[Y]
zMin = nodeList[0].p[Z]
zMax = nodeList[0].p[Z]
for node in nodeList:
if xMin > node.p[X]:
xMin = node.p[X]
if xMax < node.p[X]:
xMax = node.p[X]
if yMin > node.p[Y]:
yMin = node.p[Y]
if yMax < node.p[Y]:
yMax = node.p[Y]
if zMin > node.p[Z]:
zMin = node.p[Z]
if zMax < node.p[Z]:
zMax = node.p[Z]
# indentify degenerate coordinate space.
# NOTE - this is not entirely accurate, but assumes
# 2D quadrilateral objects are orthogonal to one of
# the cononical coordinate axes
if xMin==xMax:
coordinate_list[0] = 0
if yMin==yMax:
coordinate_list[1] = 0
if zMin==zMax:
coordinate_list[2] = 0
if sum(coordinate_list) !=2:
assert 0, 'Invalid 2D quadrilateral object'
for i, t in enumerate(coordinate_list):
if t == 0:
case = i
# x is degenerate variable
if case == 0:
var1 = 1 # y marked as first node
var2 = 2 # z marked as second
var1_min = yMin
var1_max = yMax
var2_min = zMin
var2_max = zMax
# y is degenerate variable
elif case == 1:
var1 = 0 # x marked as first node
var2 = 2 # z marked as second
var1_min = xMin
var1_max = xMax
var2_min = zMin
var2_max = zMax
# z is degenerate variable
elif case == 2:
var1 = 0 # x marked as first node
var2 = 1 # y marked as second
var1_min = xMin
var1_max = xMax
var2_min = yMin
var2_max = yMax
else:
assert 0, 'Invalide Quadrilateral Mesh Case'
for node in nodeList:
if node.p[var1]==var1_min and node.p[var2]==var2_min:
newList[0] = node
elif node.p[var1]==var1_min and node.p[var2]==var2_max:
newList[1] = node
elif node.p[var1]==var1_max and node.p[var2]==var2_max:
newList[2] = node
elif node.p[var1]==var1_max and node.p[var2]==var2_min:
newList[3] = node
for i,item in enumerate(newList):
if not newList[i]:
assert 0,'Quadrialteral Mesh Generation Error '+str(newList)+" i = "+str(i)
return newList
def computeGeometricInfo(self):
if not self.hasGeometricInfo:
for e in self.edges: e.computeGeometricInfo()
#the nodes must lie in a plane
#use triangles to compute area
#grab one triangle
t0 = Triangle(0,list(self.nodes[0:3]))
t0.computeGeometricInfo()
#find the nodes that lie on the new edge,diagonal0
for et in t0.edges:
edgeIsNew=True
for e in self.edges:
if e.nodes == et.nodes:
edgeIsNew=False
if edgeIsNew:
break
diagonal0=et
t1 = Triangle(0,[self.nodes[3],
diagonal0.nodes[0],
diagonal0.nodes[1]])
t1.computeGeometricInfo()
#get normal from one of the triangles
self.unitNormal = t0.unitNormal
self.area = t0.area + t1.area
#find the long diagonal
diagonalNode=0
for n in self.nodes[0:3]:
if n != diagonal0.nodes[0] and n != diagonal0.nodes[1]:
diagonalNode=n
break;
diagonal1 = Edge(0,[n,self.nodes[3]])
diagonal1.computeGeometricInfo()
self.diameter = max(diagonal1.length,diagonal0.length)
self.innerDiameter = 4.0*self.area/sum(
[e.length for e in self.edges])
# Calculate the coordinate of a simple quad
if self.simple==True:
self.xmin = self.nodes[0].p[X]
self.ymin = self.nodes[0].p[Y]
self.xmax = self.nodes[0].p[X]
self.ymax = self.nodes[0].p[Y]
for node in self.nodes:
if node.p[X] < self.xmin:
self.xmin = node.p[X]
elif node.p[X] > self.xmax:
self.xmax = node.p[X]
else:
pass
if node.p[Y] < self.ymin:
self.ymin = node.p[Y]
elif node.p[Y] > self.ymax:
self.ymax = node.p[Y]
else:
pass
self.xmid = old_div((self.xmin+self.xmax),2.)
self.ymid = old_div((self.ymin+self.ymax),2.)
self.zmid = 0.
class Polyhedron(Element):
"""
An abstract 3D Element--a closed set of Polygons connecting a set
of Edges.
The nodes and edges are stored as lexicographically sorted lists.
"""
def __init__(self,polyhedronNumber=0,nodes=[]):
Element.__init__(self,polyhedronNumber,nodes)
self.edges=[]
self.polygons=[]
def __cmp__(self,other):
return compareNodes(self.nodes,other.nodes)
class Tetrahedron(Polyhedron):
"""A 3D tetrahedral element"""
triangleMap = {(1,2,3):0,(0,2,3):1,(0,1,3):2,(0,1,2):3}
edgeMap = {(0,1): 0,
(0,2): 1,
(0,3): 2,
(1,2): 3,
(1,3): 4,
(2,3): 5}
def __init__(self,tetrahedronNumber,nodes,edgeDict=None,triangleDict=None):
#Polyhedron.__init__(self,tetrahedronNumber,nodes)
#inline
#Element.__init__
#inline
self.N = tetrahedronNumber
nodeList = nodes
nodeList.sort()
self.nodes = tuple(nodeList)
#self.nodes=nodes[:]
#self.nodes.sort()
#
triangleNodeList = [(self.nodes[1],
self.nodes[2],
self.nodes[3]),
(self.nodes[0],
self.nodes[2],
self.nodes[3]),
(self.nodes[0],
self.nodes[1],
self.nodes[3]),
(self.nodes[0],
self.nodes[1],
self.nodes[2])]
if triangleDict is None:
self.triangles = [Triangle(triangleNumber=tN,
nodes=list(triangleNodes))
for tN,triangleNodes in
enumerate(triangleNodeList)]
else:
self.triangles = [triangleDict[triangleNodes] for triangleNodes in
triangleNodeList]
self.polygons=self.triangles
edgeNodeList = [(self.nodes[0],self.nodes[1]),
(self.nodes[0],self.nodes[2]),
(self.nodes[0],self.nodes[3]),
(self.nodes[1],self.nodes[2]),
(self.nodes[1],self.nodes[3]),
(self.nodes[2],self.nodes[3])]
if edgeDict is None:
self.edges = [Edge(edgeNumber=eN,nodes=list(edgeNodes)) for
eN,edgeNodes in enumerate(edgeNodeList)]
else:
self.edges = [edgeDict[edgeNodes] for edgeNodes in edgeNodeList]
self.hasGeometricInfo=False
self.elementBoundaries = self.triangles
def computeGeometricInfo(self):
if not self.hasGeometricInfo:
for t in self.triangles: t.computeGeometricInfo()
self.barycenter =old_div((self.nodes[0].p +
self.nodes[1].p +
self.nodes[2].p +
self.nodes[3].p),4.0)
self.basis = [n.p - self.nodes[0].p for n in self.nodes[1:]]
self.linearMap = ETen(self.basis[0],self.basis[1],self.basis[2])
self.volume = old_div(abs(edet(self.linearMap)),6.0)
self.diameter = max([t.diameter for t in self.triangles])
#Zhang's formula for rho=innerDiameter of a simplex
self.innerDiameter = 6.0*self.volume/sum([t.area for t in
self.triangles])
self.triangleUnitNormalList=[]
for nNt,tN in Tetrahedron.triangleMap.items():
unitNormal = self.triangles[tN].unitNormal
if edot(unitNormal,self.nodes[nNt[0]].p - self.nodes[tN].p) < 0:
unitNormal *= -1.0
self.triangleUnitNormalList.append(unitNormal)
self.elementBoundaryUnitNormalList = self.triangleUnitNormalList
self.hasGeometricInfo=True
class Hexahedron(Polyhedron):
"""A 3D hexahedral element"""
def __init__(self,HN,quadrilaterals):
Polyhedron.__init__(self,HN)
self.N = HN
self.quadrilaterals = quadrilaterals
self.polygons = self.quadrilaterals
self.edges = getEdgesFromPolygons(quadrilaterals)
#self.nodes = getNodesFromEdges(self.edges)
#self.nodes.sort()
nodeList = getNodesFromEdges(self.edges)
nodeList.sort()
self.nodes = tuple(nodeList)
self.hasGeometricInfo=False
self.elementBoundaries = self.quadrilaterals
#todo add enum34 and replace with real Python enum
class MeshParallelPartitioningTypes(object):
"""
fake an enum for parallel partitioning options
"""
element = 0 ; node = 1
class Mesh(object):
"""A partition of a domain in R^n into elements.
This is the base class for meshes. Contains routines for
plotting the edges of the mesh in Matlab
Attributes
----------
elementBoundariesArray : array type
This array lists the global edge number associated with every
edge or face of an element.
"""
#cek adding parallel support
def __init__(self):
#array interface
self.nSubdomains_global=1
self.sN = 0
#node coordinates indexed by node number
self.nNodes_global=0
self.nNodes_subdomain=0
self.nodeArray=None
self.nodeVelocityArray=None
self.nNodes_element=0
#element node numbers, indexed by element number
self.nElements_global=0
self.nElements_proc=0
self.elementNodesArray=None
self.max_nElements_node=0
self.nElements_node=None #mwf warning not calculated in buildPythonFromC
self.nodeElementsArray=None
self.nodeElementOffsets=None
#element boundary numbers, indexed by element number
self.nElementBoundaries_element=0
self.elementBoundariesArray=None
#element numbers, indexed by element boundary number and left(0) and right(1) element
self.nElementBoundaries_global=0
self.elementBoundaryElementsArray=None
#local element boundary numbers, indexed by element boundary number and left(0) and right(1) element
self.elementBoundaryLocalElementBoundariesArray=None
#neighboring element numbers, indexed by local element boundary number
self.elementNeighborsArray=None
#node numbers, indexed by element boundary number
self.elementBoundaryNodesArray=None
#element boundary numbers, indexed by interior/exterior
#element boundary number
self.interiorElementBoundariesArray=None
self.nInteriorElementBoundaries_global=0
self.exteriorElementBoundariesArray=None
self.nExteriorElementBoundaries_global=0
#edge node numbers, indexed by edge number
self.nEdges_global=0
self.edgeNodesArray=None
self.nodeStarArray=None
self.nodeStarOffsets=None
self.h=0.0
self.hMin=0.0
self.hasGeometricInfo=False
self.boundaryMesh=None
#physical coordinates of element barycenters and elementBoundary barycenters
self.elementBarycentersArray=None
self.elementBoundaryBarycentersArray=None
self.nodeDiametersArray=None
self.nodeSupportArray=None
#unique labels for classes of elements, elementBoundaries, nodes,
self.elementMaterialTypes=None
self.elementBoundaryMaterialTypes=None
self.nodeMaterialTypes=None
#parallel stuff
self.nElements_owned=self.nElements_global
self.nNodes_owned=self.nNodes_global
self.nElementBoundaries_owned=self.nElementBoundaries_global
self.nEdges_owned=self.nEdges_global
self.elementOffsets_subdomain_owned=[0,self.nElements_global]
self.elementNumbering_subdomain2global=np.arange(self.nElements_global,dtype='i')
self.nodeOffsets_subdomain_owned=[0,self.nNodes_global]
self.nodeNumbering_subdomain2global=np.arange(self.nNodes_global,dtype='i')
self.elementBoundaryOffsets_subdomain_owned=[0,self.nElementBoundaries_global]
self.elementBoundaryNumbering_subdomain2global=np.arange(self.nElementBoundaries_global,dtype='i')
self.edgeOffsets_subdomain_owned=[0,self.nEdges_global]
self.edgeNumbering_subdomain2global=np.arange(self.nEdges_global,dtype='i')
self.subdomainMesh=self
self.globalMesh = None
self.arGridCollection=None
self.arGrid=None
self.nLayersOfOverlap = None
self.parallelPartitioningType = MeshParallelPartitioningTypes.element
def partitionMesh(self,nLayersOfOverlap=1,parallelPartitioningType=MeshParallelPartitioningTypes.node):
from . import cmeshTools
from . import Comm
from . import cpartitioning
comm = Comm.get()
self.comm=comm
logEvent(memory("partitionMesh 1","MeshTools"),level=4)
logEvent("Partitioning mesh among %d processors using partitioningType = %d" % (comm.size(),parallelPartitioningType))
self.subdomainMesh=self.__class__()
self.subdomainMesh.globalMesh = self
self.subdomainMesh.cmesh=cmeshTools.CMesh()
self.nLayersOfOverlap = nLayersOfOverlap; self.parallelPartitioningType = parallelPartitioningType
logEvent(memory("partitionMesh 2","MeshTools"),level=4)
if parallelPartitioningType == MeshParallelPartitioningTypes.node:
#mwf for now always gives 1 layer of overlap
(self.elementOffsets_subdomain_owned,
self.elementNumbering_subdomain2global,
self.nodeOffsets_subdomain_owned,
self.nodeNumbering_subdomain2global,
self.elementBoundaryOffsets_subdomain_owned,
self.elementBoundaryNumbering_subdomain2global,
self.edgeOffsets_subdomain_owned,
self.edgeNumbering_subdomain2global) = cpartitioning.partitionNodes(comm.comm.tompi4py(),
nLayersOfOverlap,
self.cmesh,
self.subdomainMesh.cmesh)
else:
(self.elementOffsets_subdomain_owned,
self.elementNumbering_subdomain2global,
self.nodeOffsets_subdomain_owned,
self.nodeNumbering_subdomain2global,
self.elementBoundaryOffsets_subdomain_owned,
self.elementBoundaryNumbering_subdomain2global,
self.edgeOffsets_subdomain_owned,
self.edgeNumbering_subdomain2global) = cpartitioning.partitionElements(comm.comm.tompi4py(),
nLayersOfOverlap,
self.cmesh,
self.subdomainMesh.cmesh)
#
logEvent(memory("partitionMesh 3","MeshTools"),level=4)
self.subdomainMesh.buildFromC(self.subdomainMesh.cmesh)
self.subdomainMesh.nElements_owned = self.elementOffsets_subdomain_owned[comm.rank()+1] - self.elementOffsets_subdomain_owned[comm.rank()]
self.subdomainMesh.nNodes_owned = self.nodeOffsets_subdomain_owned[comm.rank()+1] - self.nodeOffsets_subdomain_owned[comm.rank()]
self.subdomainMesh.nElementBoundaries_owned = self.elementBoundaryOffsets_subdomain_owned[comm.rank()+1] - self.elementBoundaryOffsets_subdomain_owned[comm.rank()]
self.subdomainMesh.nEdges_owned = self.edgeOffsets_subdomain_owned[comm.rank()+1] - self.edgeOffsets_subdomain_owned[comm.rank()]
comm.barrier()
logEvent(memory("partitionMesh 4","MeshTools"),level=4)
logEvent("Number of Subdomain Elements Owned= "+str(self.subdomainMesh.nElements_owned))
logEvent("Number of Subdomain Elements = "+str(self.subdomainMesh.nElements_global))
logEvent("Number of Subdomain Nodes Owned= "+str(self.subdomainMesh.nNodes_owned))
logEvent("Number of Subdomain Nodes = "+str(self.subdomainMesh.nNodes_global))
logEvent("Number of Subdomain elementBoundaries Owned= "+str(self.subdomainMesh.nElementBoundaries_owned))
logEvent("Number of Subdomain elementBoundaries = "+str(self.subdomainMesh.nElementBoundaries_global))
logEvent("Number of Subdomain Edges Owned= "+str(self.subdomainMesh.nEdges_owned))
logEvent("Number of Subdomain Edges = "+str(self.subdomainMesh.nEdges_global))
comm.barrier()
logEvent("Finished partitioning")
par_nodeDiametersArray = ParVec_petsc4py(self.subdomainMesh.nodeDiametersArray,
bs=1,
n=self.subdomainMesh.nNodes_owned,
N=self.nNodes_global,
nghosts=self.subdomainMesh.nNodes_global - self.subdomainMesh.nNodes_owned,
subdomain2global=self.nodeNumbering_subdomain2global)
par_nodeDiametersArray.scatter_forward_insert()
# comm.beginSequential()
# from Profiling import memory
# memory()
# logEvent(memory("Partitioning Mesh","Mesh"),level=1)
# del self.cmesh
# #cmeshTools.deleteMeshDataStructures(self.cmesh)
# logEvent(memory("Without global mesh","Mesh"),level=1)
# comm.endSequential()
def partitionMeshFromFiles(self,filebase,base,nLayersOfOverlap=1,parallelPartitioningType=MeshParallelPartitioningTypes.node):
from . import cmeshTools
from . import Comm
from . import cpartitioning
comm = Comm.get()
self.comm=comm
logEvent(memory("partitionMesh 1","MeshTools"),level=4)
logEvent("Partitioning mesh among %d processors using partitioningType = %d" % (comm.size(),parallelPartitioningType))
self.subdomainMesh=self.__class__()
self.subdomainMesh.globalMesh = self
self.subdomainMesh.cmesh=cmeshTools.CMesh()
self.nLayersOfOverlap = nLayersOfOverlap; self.parallelPartitioningType = parallelPartitioningType
logEvent(memory("partitionMesh 2","MeshTools"),level=4)
if parallelPartitioningType == MeshParallelPartitioningTypes.node:
logEvent("Starting nodal partitioning")#mwf for now always gives 1 layer of overlap
logEvent("filebase {0:s}".format(filebase))
logEvent("base {0:d}".format(base))
logEvent("nLayersOfOverlap {0:d}".format(nLayersOfOverlap))
logEvent("parallelPartitioningType {0:d}".format(parallelPartitioningType))
if isinstance(self,TetrahedralMesh):
(self.elementOffsets_subdomain_owned,
self.elementNumbering_subdomain2global,
self.nodeOffsets_subdomain_owned,
self.nodeNumbering_subdomain2global,
self.elementBoundaryOffsets_subdomain_owned,
self.elementBoundaryNumbering_subdomain2global,
self.edgeOffsets_subdomain_owned,
self.edgeNumbering_subdomain2global) = cpartitioning.partitionNodesFromTetgenFiles(comm.comm.tompi4py(),
filebase,
base,
nLayersOfOverlap,
self.cmesh,
self.subdomainMesh.cmesh)
elif isinstance(self,TriangularMesh):
(self.elementOffsets_subdomain_owned,
self.elementNumbering_subdomain2global,
self.nodeOffsets_subdomain_owned,
self.nodeNumbering_subdomain2global,
self.elementBoundaryOffsets_subdomain_owned,
self.elementBoundaryNumbering_subdomain2global,
self.edgeOffsets_subdomain_owned,
self.edgeNumbering_subdomain2global) = cpartitioning.partitionNodesFromTriangleFiles(comm.comm.tompi4py(),
filebase,
base,
nLayersOfOverlap,
self.cmesh,
self.subdomainMesh.cmesh)
else:
assert 0,"can't partition non-simplex mesh"
else:
logEvent("Starting element partitioning")
(self.elementOffsets_subdomain_owned,
self.elementNumbering_subdomain2global,
self.nodeOffsets_subdomain_owned,
self.nodeNumbering_subdomain2global,
self.elementBoundaryOffsets_subdomain_owned,
self.elementBoundaryNumbering_subdomain2global,
self.edgeOffsets_subdomain_owned,
self.edgeNumbering_subdomain2global) = cpartitioning.partitionElementsFromTetgenFiles(comm.comm.tompi4py(),
filebase,
base,
nLayersOfOverlap,
self.cmesh,
self.subdomainMesh.cmesh)
#
logEvent(memory("partitionMesh 3","MeshTools"),level=4)
self.buildFromCNoArrays(self.cmesh)
self.subdomainMesh.buildFromC(self.subdomainMesh.cmesh)
self.subdomainMesh.nElements_owned = self.elementOffsets_subdomain_owned[comm.rank()+1] - self.elementOffsets_subdomain_owned[comm.rank()]
self.subdomainMesh.nNodes_owned = self.nodeOffsets_subdomain_owned[comm.rank()+1] - self.nodeOffsets_subdomain_owned[comm.rank()]
self.subdomainMesh.nElementBoundaries_owned = self.elementBoundaryOffsets_subdomain_owned[comm.rank()+1] - self.elementBoundaryOffsets_subdomain_owned[comm.rank()]
self.subdomainMesh.nEdges_owned = self.edgeOffsets_subdomain_owned[comm.rank()+1] - self.edgeOffsets_subdomain_owned[comm.rank()]
comm.barrier()
logEvent(memory("partitionMesh 4","MeshTools"),level=4)
logEvent("Number of Subdomain Elements Owned= "+str(self.subdomainMesh.nElements_owned))
logEvent("Number of Subdomain Elements = "+str(self.subdomainMesh.nElements_global))
logEvent("Number of Subdomain Nodes Owned= "+str(self.subdomainMesh.nNodes_owned))
logEvent("Number of Subdomain Nodes = "+str(self.subdomainMesh.nNodes_global))
logEvent("Number of Subdomain elementBoundaries Owned= "+str(self.subdomainMesh.nElementBoundaries_owned))
logEvent("Number of Subdomain elementBoundaries = "+str(self.subdomainMesh.nElementBoundaries_global))
logEvent("Number of Subdomain Edges Owned= "+str(self.subdomainMesh.nEdges_owned))
logEvent("Number of Subdomain Edges = "+str(self.subdomainMesh.nEdges_global))
comm.barrier()
logEvent("Finished partitioning")
par_nodeDiametersArray = ParVec_petsc4py(self.subdomainMesh.nodeDiametersArray,
bs=1,
n=self.subdomainMesh.nNodes_owned,
N=self.nNodes_global,
nghosts=self.subdomainMesh.nNodes_global - self.subdomainMesh.nNodes_owned,
subdomain2global=self.nodeNumbering_subdomain2global)
par_nodeDiametersArray.scatter_forward_insert()
# comm.beginSequential()
# from Profiling import memory
# memory()
# logEvent(memory("Partitioning Mesh","Mesh"),level=1)
# del self.cmesh
# #cmeshTools.deleteMeshDataStructures(self.cmesh)
# logEvent(memory("Without global mesh","Mesh"),level=1)
# comm.endSequential()
def writeMeshXdmf(self,ar,name='',t=0.0,init=False,meshChanged=False,Xdmf_ElementTopology="Triangle",tCount=0, EB=False):
if self.arGridCollection is not None:
init = False
elif not init:
grids = ar.domain.findall("Grid")
self.arGridCollection = grids[0]
if EB:
assert(len(grids) > 1)
self.arEBGridCollection = grids[1]
if init:
self.arGridCollection = SubElement(ar.domain,"Grid",{"Name":"Mesh "+name,
"GridType":"Collection",
"CollectionType":"Temporal"})
if EB:
self.arEBGridCollection = SubElement(ar.domain,"Grid",{"Name":"EBMesh "+name,
"GridType":"Collection",
"CollectionType":"Temporal"})
if self.arGrid is None or self.arTime.get('Value') != "{0:e}".format(t):
#
#topology and geometry
#
if ar.global_sync:
self.arGrid = SubElement(self.arGridCollection,"Grid",{"GridType":"Uniform"})
self.arTime = SubElement(self.arGrid,"Time",{"Value":"%e" % (t,),"Name":"%i" % (tCount,)})
topology = SubElement(self.arGrid,"Topology",
{"Type":Xdmf_ElementTopology,
"NumberOfElements":"%i" % (self.globalMesh.nElements_global,)})
elements = SubElement(topology,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i %i" % (self.globalMesh.nElements_global,self.nNodes_element)})
geometry = SubElement(self.arGrid,"Geometry",{"Type":"XYZ"})
nodes = SubElement(geometry,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Float",
"Precision":"8",
"Dimensions":"%i %i" % (self.globalMesh.nNodes_global,3)})
if ar.hdfFile is not None:
elements.text = ar.hdfFilename+":/elements"+name+str(tCount)
nodes.text = ar.hdfFilename+":/nodes"+name+str(tCount)
if init or meshChanged:
ar.create_dataset_sync('elements'+name+str(tCount),
offsets=self.globalMesh.elementOffsets_subdomain_owned,
data=self.globalMesh.nodeNumbering_subdomain2global[self.elementNodesArray[:self.nElements_owned]])
ar.create_dataset_sync('nodes'+name+str(tCount),
offsets=self.globalMesh.nodeOffsets_subdomain_owned,
data=self.nodeArray[:self.nNodes_owned])
else:
assert False, "global_sync not supported with text heavy data"
else:
self.arGrid = SubElement(self.arGridCollection,"Grid",{"GridType":"Uniform"})
self.arTime = SubElement(self.arGrid,"Time",{"Value":"%e" % (t,),"Name":"%i" % (tCount,)})
topology = SubElement(self.arGrid,"Topology",
{"Type":Xdmf_ElementTopology,
"NumberOfElements":"%i" % (self.nElements_owned,)})
elements = SubElement(topology,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i %i" % (self.nElements_owned,self.nNodes_element)})
geometry = SubElement(self.arGrid,"Geometry",{"Type":"XYZ"})
nodes = SubElement(geometry,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Float",
"Precision":"8",
"Dimensions":"%i %i" % (self.nNodes_global,3)})
if ar.hdfFile is not None:
elements.text = ar.hdfFilename+":/elements"+str(ar.comm.rank())+name+str(tCount)
nodes.text = ar.hdfFilename+":/nodes"+str(ar.comm.rank())+name+str(tCount)
if init or meshChanged:
ar.create_dataset_async('elements'+str(ar.comm.rank())+name+str(tCount),data=self.elementNodesArray[:self.nElements_owned])
ar.create_dataset_async('nodes'+str(ar.comm.rank())+name+str(tCount),data=self.nodeArray)
else:
SubElement(elements,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/elements"+name+".txt"})
SubElement(nodes,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/nodes"+name+".txt"})
if init or meshChanged:
numpy.savetxt(ar.textDataDir+"/elements"+name+".txt",self.elementNodesArray[:self.nElements_owned],fmt='%d')
numpy.savetxt(ar.textDataDir+"/nodes"+name+".txt",self.nodeArray)
#
#element boundary topology and geometry
#
if EB:
self.arEBGrid = SubElement(self.arEBGridCollection,"Grid",{"GridType":"Uniform"})
self.arEBTime = SubElement(self.arEBGrid,"Time",{"Value":"%e" % (t,),"Name":"%i" % (tCount,)})
Xdmf_ElementEBTopology = "Triangle" #cek hack
ebtopology = SubElement(self.arEBGrid,"Topology",
{"Type":Xdmf_ElementEBTopology,
"NumberOfElements":"%i" % (self.nElementBoundaries_global,)})
ebelements = SubElement(ebtopology,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i %i" % (self.nElementBoundaries_global,self.nNodes_elementBoundary)})
ebgeometry = SubElement(self.arEBGrid,"Geometry",{"Type":"XYZ"})
ebnodes = SubElement(ebgeometry,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Float",
"Precision":"8",
"Dimensions":"%i %i" % (self.nNodes_global,3)})
if ar.hdfFile is not None:
ebelements.text = ar.hdfFilename+":/elementBoundaries"+str(ar.comm.rank())+name+str(tCount)
ebnodes.text = ar.hdfFilename+":/nodes"+str(ar.comm.rank())+name+str(tCount)
if init or meshChanged:
ar.create_dataset_async('elementBoundaries'+str(ar.comm.rank())+name+str(tCount), data = self.elementBoundaryNodesArray)
#ar.create_dataset_async('nodes'+`ar.comm.rank()`+name+`tCount`, data = self.nodeArray)
else:
SubElement(ebelements,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/elementBoundaries"+name+".txt"})
SubElement(ebnodes,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/nodes"+name+".txt"})
if init or meshChanged:
np.savetxt(ar.textDataDir+"/elementBoundaries"+name+".txt",self.elementBoundaryNodesArray,fmt='%d')
# Add the local->global index maps for collect.py and for
# reverse mapping in hotstarts from a global XDMF file.
if self.globalMesh is not None and not ar.global_sync:
nodeMapAtt = SubElement(self.arGrid,"Attribute",
{"Name":"NodeMapL2G",
"AttributeType":"Scalar",
"Center":"Node"})
nodeMap = SubElement(nodeMapAtt,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Precision":"4",
"Dimensions":"%i" % (self.nNodes_global,)})
elemMapAtt = SubElement(self.arGrid,"Attribute",
{"Name":"CellMapL2G",
"AttributeType":"Scalar",
"Center":"Cell"})
elemMap = SubElement(elemMapAtt,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Precision":"4",
"Dimensions":"%i" % (self.nElements_owned,)})
if ar.hdfFile is not None:
nodeMap.text = ar.hdfFilename+":/nodeMapL2G"+str(ar.comm.rank())+name+str(tCount)
elemMap.text = ar.hdfFilename+":/cellMapL2G"+str(ar.comm.rank())+name+str(tCount)
if init or meshChanged:
ar.create_dataset_async('nodeMapL2G'+str(ar.comm.rank())+name+str(tCount), data=self.globalMesh.nodeNumbering_subdomain2global)
ar.create_dataset_async('cellMapL2G'+str(ar.comm.rank())+name+str(tCount), data=self.globalMesh.elementNumbering_subdomain2global[:self.nElements_owned])
else:
SubElement(nodeMap,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/nodeMapL2G"+name+".txt"})
SubElement(nodeMap,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/cellMapL2G"+name+".txt"})
if init or meshChanged:
np.savetxt(ar.textDataDir+"/nodeMapL2G"+name+".txt",self.globalMesh.nodeNumbering_subdomain2global)
np.savetxt(ar.textDataDir+"/cellMapL2G"+name+".txt",self.globalMesh.elementNumbering_subdomain2global[:self.nElements_owned])
#
#material types
#
if ar.global_sync:
nodeMaterialTypes = SubElement(self.arGrid,"Attribute",{"Name":"nodeMaterialTypes",
"AttributeType":"Scalar",
"Center":"Node"})
nodeMaterialTypesValues = SubElement(nodeMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.globalMesh.nNodes_global,)})
elementMaterialTypes = SubElement(self.arGrid,"Attribute",{"Name":"elementMaterialTypes",
"AttributeType":"Scalar",
"Center":"Cell"})
elementMaterialTypesValues = SubElement(elementMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.globalMesh.nElements_global,)})
if EB:
ebnodeMaterialTypes = SubElement(self.arEBGrid,"Attribute",{"Name":"ebnodeMaterialTypes",
"AttributeType":"Scalar",
"Center":"Node"})
ebnodeMaterialTypesValues = SubElement(ebnodeMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.globalMesh.nNodes_global,)})
elementBoundaryMaterialTypes = SubElement(self.arEBGrid,"Attribute",{"Name":"elementBoundaryMaterialTypes",
"AttributeType":"Scalar",
"Center":"Cell"})
elementBoundaryMaterialTypesValues = SubElement(elementBoundaryMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.globalMesh.nElementBoundaries_global,)})
if ar.hdfFile is not None:
nodeMaterialTypesValues.text = ar.hdfFilename+":/"+"nodeMaterialTypes"+"_t"+str(tCount)
ar.create_dataset_sync("nodeMaterialTypes"+"_t"+str(tCount), offsets=self.globalMesh.nodeOffsets_subdomain_owned, data=self.nodeMaterialTypes[:self.nNodes_owned])
elementMaterialTypesValues.text = ar.hdfFilename+":/"+"elementMaterialTypes"+"_t"+str(tCount)
ar.create_dataset_sync("elementMaterialTypes"+"_t"+str(tCount), offsets=self.globalMesh.elementOffsets_subdomain_owned, data=self.elementMaterialTypes[:self.nElements_owned])
if EB:
ebnodeMaterialTypesValues.text = ar.hdfFilename+":/"+"nodeMaterialTypes"+"_t"+str(tCount)
elementBoundaryMaterialTypesValues.text = ar.hdfFilename+":/"+"elementBoundaryMaterialTypes"+"_t"+str(tCount)
ar.create_dataset_sync("elementBoundaryMaterialTypes"+"_t"+str(tCount), offsets = self.globalMesh.elementBoundaryOffsets_subdomain_owned, data=self.elementBoundaryMaterialTypes[:self.nElementBoundaries_owned])
else:
assert False, "global_sync not supported with text heavy data"
else:
nodeMaterialTypes = SubElement(self.arGrid,"Attribute",{"Name":"nodeMaterialTypes",
"AttributeType":"Scalar",
"Center":"Node"})
nodeMaterialTypesValues = SubElement(nodeMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.nNodes_global,)})
elementMaterialTypes = SubElement(self.arGrid,"Attribute",{"Name":"elementMaterialTypes",
"AttributeType":"Scalar",
"Center":"Cell"})
elementMaterialTypesValues = SubElement(elementMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.nElements_owned,)})
if EB:
ebnodeMaterialTypes = SubElement(self.arEBGrid,"Attribute",{"Name":"ebnodeMaterialTypes",
"AttributeType":"Scalar",
"Center":"Node"})
ebnodeMaterialTypesValues = SubElement(ebnodeMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.nNodes_global,)})
elementBoundaryMaterialTypes = SubElement(self.arEBGrid,"Attribute",{"Name":"elementBoundaryMaterialTypes",
"AttributeType":"Scalar",
"Center":"Cell"})
elementBoundaryMaterialTypesValues = SubElement(elementBoundaryMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.nElementBoundaries_global,)})
if ar.hdfFile is not None:
nodeMaterialTypesValues.text = ar.hdfFilename+":/"+"nodeMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount)
ar.create_dataset_async("nodeMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount), data=self.nodeMaterialTypes)
elementMaterialTypesValues.text = ar.hdfFilename+":/"+"elementMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount)
ar.create_dataset_async("elementMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount), data=self.elementMaterialTypes[:self.nElements_owned])
if EB:
ebnodeMaterialTypesValues.text = ar.hdfFilename+":/"+"nodeMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount)
elementBoundaryMaterialTypesValues.text = ar.hdfFilename+":/"+"elementBoundaryMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount)
ar.create_dataset_async("elementBoundaryMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount), data=self.elementBoundaryMaterialTypes)
else:
numpy.savetxt(ar.textDataDir+"/"+"nodeMaterialTypes"+str(tCount)+".txt",self.nodeMaterialTypes)
SubElement(nodeMaterialTypesValues,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/"+"nodeMaterialTypes"+str(tCount)+".txt"})
numpy.savetxt(ar.textDataDir+"/"+"elementMaterialTypes"+str(tCount)+".txt",self.elementMaterialTypes[:self.nElements_owned])
SubElement(elementMaterialTypesValues,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/"+"elementMaterialTypes"+str(tCount)+".txt"})
#done with material types
def buildFromC(self,cmesh):
from . import cmeshTools
#
logEvent(memory("buildFromC","MeshTools"),level=4)
self.cmesh = cmesh
(self.nElements_global,
self.nNodes_global,
self.nNodes_element,
self.nNodes_elementBoundary,
self.nElementBoundaries_element,
self.nElementBoundaries_global,
self.nInteriorElementBoundaries_global,
self.nExteriorElementBoundaries_global,
self.max_nElements_node,
self.nEdges_global,
self.max_nNodeNeighbors_node,
self.elementNodesArray,
self.nodeElementsArray,
self.nodeElementOffsets,
self.elementNeighborsArray,
self.elementBoundariesArray,
self.elementBoundaryNodesArray,
self.elementBoundaryElementsArray,
self.elementBoundaryLocalElementBoundariesArray,
self.interiorElementBoundariesArray,
self.exteriorElementBoundariesArray,
self.edgeNodesArray,
self.nodeStarArray,
self.nodeStarOffsets,
self.elementMaterialTypes,
self.elementBoundaryMaterialTypes,
self.nodeMaterialTypes,
self.nodeArray,
self.nx,self.ny, self.nz, #NURBS
self.px,self.py, self.pz, #NURBS
self.elementIJK, #NURBS
self.weights, #NURBS
self.U_KNOT, #NURBS
self.V_KNOT, #NURBS
self.W_KNOT, #NURBS
self.elementDiametersArray,
self.elementInnerDiametersArray,
self.elementBoundaryDiametersArray,
self.elementBarycentersArray,
self.elementBoundaryBarycentersArray,
self.nodeDiametersArray,
self.nodeSupportArray,
self.h,
self.hMin,
self.sigmaMax,
self.volume) = cmeshTools.buildPythonMeshInterface(self.cmesh)
# print("from C")
# print (self.nElements_global,
# self.nNodes_global,
# self.nNodes_element,
# self.nNodes_elementBoundary,
# self.nElementBoundaries_element,
# self.nElementBoundaries_global,
# self.nInteriorElementBoundaries_global,
# self.nExteriorElementBoundaries_global,
# self.max_nElements_node,
# self.nEdges_global,
# self.max_nNodeNeighbors_node,
# self.elementNodesArray,
# self.nodeElementsArray,
# self.nodeElementOffsets,
# self.elementNeighborsArray,
# self.elementBoundariesArray,
# self.elementBoundaryNodesArray,
# self.elementBoundaryElementsArray,
# self.elementBoundaryLocalElementBoundariesArray,
# self.interiorElementBoundariesArray,
# self.exteriorElementBoundariesArray,
# self.edgeNodesArray,
# self.nodeStarArray,
# self.nodeStarOffsets,
# self.elementMaterialTypes,
# self.elementBoundaryMaterialTypes,
# self.nodeMaterialTypes,
# self.nodeArray,
# self.elementDiametersArray,
# self.elementInnerDiametersArray,
# self.elementBoundaryDiametersArray,
# self.elementBarycentersArray,
# self.elementBoundaryBarycentersArray,
# self.nodeDiametersArray,
# self.nodeSupportArray,
# self.h,
# self.hMin,
# self.volume)
self.hasGeometricInfo = True
#default to single processor
self.nNodes_owned = self.nNodes_global
self.nElements_owned = self.nElements_global
self.nElementBoundaries_owned = self.nElementBoundaries_global
self.nEdges_owned = self.nEdges_global
logEvent(memory("buildFromC","MeshTools"),level=4)
def buildFromCNoArrays(self,cmesh):
from . import cmeshTools
#
logEvent(memory("buildFromC","MeshTools"),level=4)
self.cmesh = cmesh
(self.nElements_global,
self.nNodes_global,
self.nNodes_element,
self.nNodes_elementBoundary,
self.nElementBoundaries_element,
self.nElementBoundaries_global,
self.nInteriorElementBoundaries_global,
self.nExteriorElementBoundaries_global,
self.max_nElements_node,
self.nEdges_global,
self.max_nNodeNeighbors_node,
self.h,
self.hMin,
self.sigmaMax,
self.volume) = cmeshTools.buildPythonMeshInterfaceNoArrays(self.cmesh)
self.hasGeometricInfo = False
logEvent(memory("buildFromCNoArrays","MeshTools"),level=4)
def buildNodeStarArrays(self):
import itertools
if self.nodeStarArray is None:
self.nodeStarList=[]
for n in range(self.nNodes_global):
self.nodeStarList.append(set())
for i_ele in range(self.nElements_global): #: is this OK for parallel mesh?
for n1,n2 in itertools.permutations(self.elementNodesArray[i_ele],2):#: works for combination of triangle and quadrilateral
#: if n1<self.nNodes_global: #: Saving only locally owned node is not enough; should include ghost node
self.nodeStarList[n1].add(n2) #: does not contain itself; use set() instead of list since each pair is visited 1 or 2 times for 2D mesh
self.nodeStarOffsets = np.zeros((self.nNodes_global+1,),'i')
lenNodeStarArray=0
for nN in range(1,self.nNodes_global+1):
self.nodeStarOffsets[nN] = self.nodeStarOffsets[nN-1] + len(self.nodeStarList[nN-1])
self.nodeStarArray =np.fromiter(itertools.chain.from_iterable(self.nodeStarList),'i')
del self.nodeStarList
def buildArraysFromLists(self):
#nodes
self.nNodes_global = len(self.nodeList)
self.nodeArray = np.zeros((self.nNodes_global,3),'d')
nodeElementsList=[]
for nN,n in enumerate(self.nodeList):
self.nodeArray[nN][:] = n.p
nodeElementsList.append([])
#elements
self.nNodes_element = len(self.elementList[0].nodes)
self.nElements_global = len(self.elementList)
self.elementNodesArray = np.zeros((self.nElements_global,
self.nNodes_element),
'i')
for en,e in enumerate(self.elementList):
for nN_element,n in enumerate(e.nodes):
self.elementNodesArray[en,nN_element]=n.N
nodeElementsList[n.N].append(en)
#elements per node
nodeElementsDict={}
for eN in range(self.nElements_global):
for nN_element in range(self.nNodes_element):
nN = self.elementNodesArray[eN,nN_element]
if nN in nodeElementsDict:
nodeElementsDict[nN].append(eN)
else:
nodeElementsDict[nN] = [eN]
self.max_nElements_node = max(len(nodeElementsDict[nN]) for nN in range(self.nNodes_global))
self.nElements_node = np.zeros((self.nNodes_global),'i')
#mwf make a 1d array now
#self.nodeElementsArrayOld = np.zeros((self.nNodes_global,self.max_nElements_node),'i')
self.nodeElementOffsets = np.zeros((self.nNodes_global+1,),'i')
for nN in range(len(nodeElementsDict)):
elementList = nodeElementsDict[nN]
self.nElements_node[nN] = len(elementList)
self.nodeElementOffsets[nN+1] = self.nodeElementOffsets[nN]+self.nElements_node[nN]
#for eN_element,eN in enumerate(elementList):
# self.nodeElementsArrayOld[nN,eN_element]=eN
self.nodeElementsArray = np.zeros((self.nodeElementOffsets[self.nNodes_global],),'i')
for nN,elementList in nodeElementsDict.items():
for eN_element,eN in enumerate(elementList):
self.nodeElementsArray[self.nodeElementOffsets[nN]+eN_element]=eN
#
#
#elementBoundariesArray
self.nElementBoundaries_element = len(
self.elementList[0].elementBoundaries)
self.elementBoundariesArray = np.zeros(
(self.nElements_global,self.nElementBoundaries_element),
'i')
#collect set of element boundaries while we're looping
elementBoundaryNumbers=set()
for eN,e in enumerate(self.elementList):
for ebN_element,eb in enumerate(e.elementBoundaries):
self.elementBoundariesArray[eN,ebN_element]=eb.N
elementBoundaryNumbers.add(eb.N)
self.nElementBoundaries_global=len(elementBoundaryNumbers)
#elementBoundaryElementsArray
self.elementBoundaryElementsArray=np.ones(
(self.nElementBoundaries_global,2),'i')
self.elementBoundaryElementsArray*=-1
self.elementBoundaryLocalElementBoundariesArray=np.zeros(
(self.nElementBoundaries_global,2),'i')
elementBoundaryElementsCardArray =np.zeros(
(self.nElementBoundaries_global),'i')
for eN in range(self.nElements_global):
for ebN_element in range(self.nElementBoundaries_element):
ebN = self.elementBoundariesArray[eN,ebN_element]
elementBoundaryElementsCardArray[ebN]+=1
eN_boundaryElement=elementBoundaryElementsCardArray[ebN]-1
self.elementBoundaryElementsArray[ebN,eN_boundaryElement]=eN
self.elementBoundaryLocalElementBoundariesArray[ebN,eN_boundaryElement]=ebN_element
if elementBoundaryElementsCardArray[ebN] > 2:
logEvent("WARNING, element neighbors of boundary element > 2")
elementBoundaryElementsCardArray[ebN]=2
#interior and exterior
self.nExteriorElementBoundaries_global=2*self.nElementBoundaries_global\
- np.sum(
elementBoundaryElementsCardArray)
self.nInteriorElementBoundaries_global= self.nElementBoundaries_global-\
self.nExteriorElementBoundaries_global
self.exteriorElementBoundariesArray=np.zeros(
(self.nExteriorElementBoundaries_global,),'i')
self.interiorElementBoundariesArray=np.zeros(
(self.nInteriorElementBoundaries_global,),'i')
interior=0
exterior=0
for ebN in range(self.nElementBoundaries_global):
if elementBoundaryElementsCardArray[ebN]==1:
self.exteriorElementBoundariesArray[exterior]=ebN
exterior+=1
else:
self.interiorElementBoundariesArray[interior]=ebN
interior+=1
del elementBoundaryElementsCardArray
self.nNodes_elementBoundary = len(self.elementBoundaryList[0].nodes)
self.elementBoundaryNodesArray = np.zeros((self.nElementBoundaries_global,
self.nNodes_elementBoundary),
'i')
for ebN,eb in enumerate(self.elementBoundaryList):
for nN_element,n in enumerate(eb.nodes):
self.elementBoundaryNodesArray[ebN,nN_element]=n.N
#element neighbors
self.elementNeighborsArray = np.zeros((self.nElements_global,self.nElementBoundaries_element),'i')
for eN in range(self.nElements_global):
for ebN_element in range(self.nElementBoundaries_element):
ebN = self.elementBoundariesArray[eN,ebN_element]
eN_left = self.elementBoundaryElementsArray[ebN,0]
eN_right = self.elementBoundaryElementsArray[ebN,1]
if eN == eN_left:
self.elementNeighborsArray[eN,ebN_element] = eN_right
elif eN == eN_right:
self.elementNeighborsArray[eN,ebN_element] = eN_left
else:
self.elementNeighborsArray[eN,ebN_element] = -1
#edges
self.edgeNodesArray = np.zeros(
(len(self.edgeList),2),'i')
for en,e in enumerate(self.edgeList):
self.edgeNodesArray[en,0]=e.nodes[0].N
self.edgeNodesArray[en,1]=e.nodes[1].N
#geometric info
self.computeGeometricInfo()
self.elementDiametersArray = np.zeros((self.nElements_global,),'d')
self.elementInnerDiametersArray = np.zeros((self.nElements_global,),'d')
for en in range(self.nElements_global):
self.elementDiametersArray[en] = self.elementList[en].diameter
self.elementInnerDiametersArray[en]=self.elementList[en].innerDiameter
self.elementBoundaryDiametersArray = np.zeros((self.nElementBoundaries_global,),'d')
for eN,e in enumerate(self.elementList):
for ebN_element,eb in enumerate(e.elementBoundaries):
self.elementBoundaryDiametersArray[self.elementBoundariesArray[eN,ebN_element]] = eb.diameter
self.elementMaterialTypes = np.zeros((self.nElements_global,),'i')
self.elementBoundaryMaterialTypes = np.zeros((self.nElementBoundaries_global,),'i')
self.nodeMaterialTypes = np.zeros((self.nNodes_global,),'i')
#
self.elementBarycentersArray = np.zeros((self.nElements_global,3),'d')
self.elementBoundaryBarycentersArray = np.zeros((self.nElementBoundaries_global,3),'d')
for eN in range(self.nElements_global):
self.elementBarycentersArray[eN,:] = 0.0
for ebN in range(self.nNodes_element):
self.elementBarycentersArray[eN,:] += self.nodeArray[self.elementNodesArray[eN,ebN],:]
self.elementBarycentersArray[eN,:] /= float(self.nNodes_element)
for ebN in range(self.nElementBoundaries_global):
self.elementBoundaryBarycentersArray[ebN,:] = 0.0
for nN in range(self.nNodes_elementBoundary):
self.elementBoundaryBarycentersArray[ebN,:] += self.nodeArray[self.elementBoundaryNodesArray[ebN,nN],:]
self.elementBoundaryBarycentersArray[ebN,:] /= float(self.nNodes_elementBoundary)
#
#now get rid of lists
del self.nodeList
del self.elementList
del self.elementBoundaryList
del self.edgeList
#self.partitionMesh()
def computeGeometricInfo(self):
self.elementList[0].computeGeometricInfo()
self.h=self.elementList[0].diameter
self.hMin=self.h
for e in self.elementList[1:]:
e.computeGeometricInfo()
self.h = max(self.h,e.diameter)
self.hMin=min(self.hMin,e.diameter)
for eb in e.elementBoundaries:
e.computeGeometricInfo()
self.hasGeometricInfo=True
def buildMatlabMeshDataStructures(self,meshFileBase='meshMatlab',writeToFile=True):
"""
build array data structures for matlab finite element mesh
representation and write to a file to view and play with in
matlatb. The current matlab support is mostly for 2d, but this
will return basic arrays for 1d and 3d too
in matlab can then print mesh with
pdemesh(p,e,t)
if one has pdetoolbox
where
p is the vertex or point matrix
e is the edge matrix, and
t is the element matrix
e will be the elementBoundary matrix in 1d and 3d, but perhaps
should remain the edge array?
points matrix is [nd x num vertices]
format :
row 1 = x coord,
row 2 = y coord for nodes in mesh
row 3 = z coord for nodes in mesh ...
edge matrix is [2*nd+3 x num faces]
format:
row 1 = start vertex number
...
row nd = end vertex number
row nd+1 = start value in edge parameterization, should be 0
row nd+2 = next value in edge parameterization, should be 1 or 2
row nd+nd= end value in edge parameterization, should be 2 or 1
row 2*nd+1 = global face id, base 1
row 2*nd+2 = subdomain on left? always 1 for now
row 2*nd+3 = subdomain on right? always 0 for now
element matrix is [nd+2 x num elements]
row 1 = vertex 1 global number
row 2 = vertex 2 global number
...
row nd+1 = vertex 3 global number
row 4 = triangle subdomain number
where 1,2,3 is a local counter clockwise numbering of vertices in
triangle
"""
matlabBase = 1
nd = self.nNodes_element-1
p = np.zeros((nd,self.nNodes_global),'d')
e = np.zeros((2*nd+3,self.nElementBoundaries_global),'d')
t = np.zeros((nd+2,self.nElements_global),'d')
#load p,e,t and write file
if writeToFile:
mfile = open(meshFileBase+'.m','w')
else:
mfile = open('/dev/null','w')
#
if writeToFile:
mfile.write('p = [ ... \n')
for nN in range(self.nNodes_global):
for I in range(nd):
p[I,nN]=self.nodeArray[nN,I]
if writeToFile:
mfile.write('%g ' % p[I,nN])
mfile.write('\n')
if writeToFile:
mfile.write(']; \n')
mfile.write("p = p\';\n") #need transpose for matlab
if writeToFile:
mfile.write('e = [ ... \n')
for ebN in range(self.nElementBoundaries_global):
eN_left = self.elementBoundaryElementsArray[ebN,0]
eN_right= self.elementBoundaryElementsArray[ebN,1]#-1 --> exterior
for nN in range(self.nNodes_elementBoundary):
e[nN,ebN]=self.elementBoundaryNodesArray[ebN,nN] + matlabBase #global node number of start node base 1
#assume for now existing parameterization ok
for nN in range(self.nNodes_elementBoundary):
e[self.nNodes_elementBoundary+nN,ebN]=nN #edge param. is 0 to 1
e[2*self.nNodes_elementBoundary+1,ebN] = ebN+matlabBase
e[2*self.nNodes_elementBoundary+1,ebN] = self.elementMaterialTypes[eN_left] #subdomain to left
if eN_right >= 0:
e[2*self.nNodes_elementBoundary+2,ebN]= self.elementMaterialTypes[eN_right] #subdomain to right
else:
e[2*self.nNodes_elementBoundary+2,ebN]= -1
if writeToFile:
for i in range(e.shape[0]):
mfile.write(' %g ' % e[i,ebN])
mfile.write(' \n ')
if writeToFile:
mfile.write(']; \n')
mfile.write("e = e\';\n") #need transpose for matlab
#write triangles last
if writeToFile:
mfile.write('t = [ ... \n')
for eN in range(self.nElements_global):
for nN in range(self.nNodes_element):
t[nN,eN]=self.elementNodesArray[eN,nN]+matlabBase #global node number for vertex nN
t[self.nNodes_element,eN]=self.elementMaterialTypes[eN] #subdomain id
if writeToFile:
for i in range(t.shape[0]):
mfile.write('%g ' % t[i,eN])
mfile.write('\n')
if writeToFile:
mfile.write(']; \n');
mfile.write("t = t\';\n") #need transpose for matlab
mfile.close()
return p,e,t
def writeEdgesMatlab(self,filename):
"""store coordinates in files formatted for Matlab"""
xfile=filename+'_x.grf'
yfile=filename+'_y.grf'
zfile=filename+'_z.grf'
print('Storing edge information in %s, %s, and %s' % \
(xfile,yfile,zfile))
xOut = open(xfile,'w')
yOut = open(yfile,'w')
zOut = open(zfile,'w')
for edge in self.edgeList:
xOut.write('%14.8e ' % edge.nodes[0].p[X] )
yOut.write('%14.8e ' % edge.nodes[0].p[Y] )
zOut.write('%14.8e ' % edge.nodes[0].p[Z] )
xOut.write('\n')
yOut.write('\n')
zOut.write('\n')
for edge in self.edgeList:
xOut.write('%14.8e ' % edge.nodes[1].p[X])
yOut.write('%14.8e ' % edge.nodes[1].p[Y])
zOut.write('%14.8e ' % edge.nodes[1].p[Z])
xOut.write('\n')
yOut.write('\n')
zOut.write('\n')
xOut.close()
yOut.close()
zOut.close()
def viewTetrahedraMatlab(self,filename):
"""plot the edges"""
cmdfile = filename +'.m'
xfile=filename+'_x.grf'
yfile=filename+'_y.grf'
zfile=filename+'_z.grf'
xedges=filename+'_x'
yedges=filename+'_y'
zedges=filename+'_z'
#the following is for debugging: plot each tet seperately
nT = old_div(len(self.edgeList),6)
plotcommand = "-r \"load " + xfile + \
", load " + yfile + \
", load " + zfile
plots=''
for i in range(nT):
plots = plots + \
", figure(" +str(i+1)+")" \
", axis([0 1 0 1 0 1]), plot3("+xedges+\
"(:,"+str(i)+"*6+1:("+str(i)+"+1)*6),"+yedges+\
"(:,"+str(i)+"*6+1:("+str(i)+"+1)*6),"+zedges+\
"(:,"+str(i)+"*6+1:("+str(i)+"+1)*6),\'b-\') "
plotcommand = plotcommand + plots +'\"'
cmdOut = open(cmdfile,'w')
cmdOut.write(plotcommand)
cmdOut.close()
print('Calling matlab to view mesh')
os.execlp('matlab',
'matlab',
'-nodesktop',
'-nosplash',
'-r',
filename)
def viewMeshMatlab(self,filename):
"""plot the edges"""
cmdfile = filename +'.m'
xfile=filename+'_x.grf'
yfile=filename+'_y.grf'
zfile=filename+'_z.grf'
xedges=filename+'_x'
yedges=filename+'_y'
zedges=filename+'_z'
plotcommand = "load " + xfile + \
", load " + yfile + \
", load " + zfile + \
", figure " + \
", axis([0 1 0 1 0 1]), plot3("+xedges+\
","+yedges+\
","+zedges+\
",\'b-\')"
print(plotcommand)
cmdOut = open(cmdfile,'w')
cmdOut.write(plotcommand)
cmdOut.close()
print('Calling matlab to view mesh')
os.execlp('matlab',
'matlab',
'-nodesktop',
'-nosplash',
'-r',
filename)
# from os import popen
# matlab = popen('matlab','w')
# matlab.write(plotcommand+'\n')
# matlab.flush()
# raw_input('Please press return to continue...\n')
def writeEdgesGnuplot(self,filename):
"""store coordinates in files formatted for Matlab"""
datfile=filename+'.dat'
print('Storing edge information in %s' % datfile)
edgesOut = open(datfile,'w')
for edge in self.edgeList:
dataline = '%14.8e %14.8e %14.8e \n' % \
(edge.nodes[0].p[X],
edge.nodes[0].p[Y],
edge.nodes[0].p[Z])
edgesOut.write(dataline)
dataline = '%14.8e %14.8e %14.8e \n \n \n' % \
(edge.nodes[1].p[X],
edge.nodes[1].p[Y],
edge.nodes[1].p[Z])
edgesOut.write(dataline)
edgesOut.close()
def writeEdgesGnuplot2(self,filename):
"""store coordinates in files formatted for Matlab"""
datfile=filename+'.dat'
print('Storing edge information in %s' % datfile)
edgesOut = open(datfile,'w')
for n0,n1 in self.edgeNodesArray:
dataline = '%14.8e %14.8e %14.8e \n' % \
(self.nodeArray[n0][0],
self.nodeArray[n0][1],
self.nodeArray[n0][2])
edgesOut.write(dataline)
dataline = '%14.8e %14.8e %14.8e \n \n \n' % \
(self.nodeArray[n1][0],
self.nodeArray[n1][1],
self.nodeArray[n1][2])
edgesOut.write(dataline)
edgesOut.close()
def viewMeshGnuplot(self,filename):
cmdfile = filename +'.cmd'
datfile = filename +'.dat'
cmd = "set pointsize 2.5 \n set term x11 \n splot \'"+datfile+"\' with linespoints pointsize 2.5 pt 2\n"+\
"set xlabel \'x\' \n set ylabel \'y\' \n set zlabel \'z\' \n "
cmdOut = open(cmdfile,'w')
cmdOut.write(cmd)
cmdOut.close()
from os import execlp
print('Calling gnuplot to view mesh')
execlp('gnuplot','gnuplot',cmdfile,'-')
def viewMeshGnuplotPipe(self,filename):
cmdfile = filename +'.cmd'
datfile = filename +'.dat'
cmd = "set pointsize 1.5 \n set term x11 \n splot \'"+datfile+"\' with linespoints pointsize 2.5 pt 2 \n"+\
"set xlabel \'x\' \n set ylabel \'y\' \n set zlabel \'z\' \n "
cmdOut = open(cmdfile,'w')
cmdOut.write(cmd)
cmdOut.close()
from os import execlp
print('Calling gnuplot to view mesh')
from os import popen
gnuplot = popen('gnuplot','w')
gnuplot.write(cmd+'\n')
gnuplot.flush()
input('Please press return to continue... \n')
def viewMeshGnuplotPipePar(self,filenames):
from os import popen
gnuplot = popen('gnuplot','w')
for i,filename in enumerate(filenames):
cmdfile = filename +'.cmd'
datfile = filename +'.dat'
cmd = ("set term x11 %i \n splot \'" % (i,))+datfile+"\' with linespoints \n"+\
"set xlabel \'x\' \n set ylabel \'y\' \n set zlabel \'z\'"
cmdOut = open(cmdfile,'w')
cmdOut.write(cmd)
cmdOut.close()
from os import execlp
print('Calling gnuplot to view mesh')
gnuplot.write(cmd+'\n')
gnuplot.flush()
input('Please press return to continue... \n')
def convertFromPUMI(self, domain, MeshAdapt, faceList,regList, parallel=False, dim=3):
from . import cmeshTools
from . import MeshAdaptPUMI
from . import cpartitioning
from . import Comm
comm = Comm.get()
self.cmesh = cmeshTools.CMesh()
if parallel:
self.subdomainMesh=self.__class__()
self.subdomainMesh.globalMesh = self
self.subdomainMesh.cmesh = cmeshTools.CMesh()
MeshAdapt.constructFromParallelPUMIMesh(self.cmesh,
self.subdomainMesh.cmesh)
if(domain.AdaptManager.reconstructedFlag==1):
logEvent("Material arrays updating based on reconstructed model.\n")
MeshAdapt.updateMaterialArrays(self.subdomainMesh.cmesh);
elif(domain.AdaptManager.reconstructedFlag==2):
logEvent("Material arrays updating based on better reconstructed model.\n")
MeshAdapt.updateMaterialArrays2(self.subdomainMesh.cmesh);
else:
logEvent("Material arrays updating based on geometric model.\n")
for i in range(len(faceList)):
for j in range(len(faceList[i])):
#MeshAdapt.updateMaterialArrays(self.subdomainMesh.cmesh,(dim-1), i+1,
# faceList[i][j])
MeshAdapt.updateMaterialArrays(self.subdomainMesh.cmesh,(dim-1), domain.boundaryLabels[i], faceList[i][j])
for i in range(len(regList)):
for j in range(len(regList[i])):
MeshAdapt.updateMaterialArrays(self.subdomainMesh.cmesh,dim, i+1, regList[i][j])
if dim == 3:
cmeshTools.allocateGeometricInfo_tetrahedron(self.subdomainMesh.cmesh)
cmeshTools.computeGeometricInfo_tetrahedron(self.subdomainMesh.cmesh)
if dim == 2:
cmeshTools.allocateGeometricInfo_triangle(self.subdomainMesh.cmesh)
cmeshTools.computeGeometricInfo_triangle(self.subdomainMesh.cmesh)
self.buildFromCNoArrays(self.cmesh)
(self.elementOffsets_subdomain_owned,
self.elementNumbering_subdomain2global,
self.nodeOffsets_subdomain_owned,
self.nodeNumbering_subdomain2global,
self.elementBoundaryOffsets_subdomain_owned,
self.elementBoundaryNumbering_subdomain2global,
self.edgeOffsets_subdomain_owned,
self.edgeNumbering_subdomain2global) = (
cpartitioning.convertPUMIPartitionToPython(comm.comm.tompi4py(),
self.cmesh,
self.subdomainMesh.cmesh))
self.subdomainMesh.buildFromC(self.subdomainMesh.cmesh)
self.subdomainMesh.nElements_owned = (
self.elementOffsets_subdomain_owned[comm.rank()+1] -
self.elementOffsets_subdomain_owned[comm.rank()])
self.subdomainMesh.nNodes_owned = (
self.nodeOffsets_subdomain_owned[comm.rank()+1] -
self.nodeOffsets_subdomain_owned[comm.rank()])
self.subdomainMesh.nElementBoundaries_owned = (
self.elementBoundaryOffsets_subdomain_owned[comm.rank()+1] -
self.elementBoundaryOffsets_subdomain_owned[comm.rank()])
self.subdomainMesh.nEdges_owned = (
self.edgeOffsets_subdomain_owned[comm.rank()+1] -
self.edgeOffsets_subdomain_owned[comm.rank()])
comm.barrier()
par_nodeDiametersArray = (
ParVec_petsc4py(self.subdomainMesh.nodeDiametersArray,
bs=1,
n=self.subdomainMesh.nNodes_owned,
N=self.nNodes_global,
nghosts = self.subdomainMesh.nNodes_global -
self.subdomainMesh.nNodes_owned,
subdomain2global =
self.nodeNumbering_subdomain2global))
par_nodeDiametersArray.scatter_forward_insert()
comm.barrier()
else:
MeshAdapt.constructFromSerialPUMIMesh(self.cmesh)
if(domain.AdaptManager.reconstructedFlag==1):
logEvent("Material arrays updating based on reconstructed model.\n")
MeshAdapt.updateMaterialArrays(self.cmesh);
elif(domain.AdaptManager.reconstructedFlag==2):
logEvent("Material arrays updating based on better reconstructed model.\n")
MeshAdapt.updateMaterialArrays2(self.cmesh);
else:
for i in range(len(faceList)):
for j in range(len(faceList[i])):
#MeshAdapt.updateMaterialArrays(self.cmesh,(dim-1), i+1, faceList[i][j])
MeshAdapt.updateMaterialArrays(self.cmesh,(dim-1), domain.boundaryLabels[i], faceList[i][j])
for i in range(len(regList)):
for j in range(len(regList[i])):
MeshAdapt.updateMaterialArrays(self.cmesh,dim, i+1, regList[i][j])
if dim == 3:
cmeshTools.allocateGeometricInfo_tetrahedron(self.cmesh)
cmeshTools.computeGeometricInfo_tetrahedron(self.cmesh)
if dim == 2:
cmeshTools.allocateGeometricInfo_triangle(self.cmesh)
cmeshTools.computeGeometricInfo_triangle(self.cmesh)
self.buildFromC(self.cmesh)
logEvent("meshInfo says : \n"+self.meshInfo())
class MultilevelMesh(Mesh):
"""A hierchical multilevel mesh"""
def __init__(self,levels=1):
self.meshList=[]
self.elementParents=None
def buildFromC(self,cmultilevelMesh):
from . import cmeshTools
self.cmultilevelMesh = cmultilevelMesh
(self.nLevels,
self.cmeshList,
self.elementParentsArrayList,
self.elementChildrenArrayList,
self.elementChildrenOffsetsList) = cmeshTools.buildPythonMultilevelMeshInterface(cmultilevelMesh)
def refine(self):
pass
def locallyRefine(self,elementTagArray):
pass
def buildArrayLists(self):
self.nLevels = len(self.meshList)
self.calculateElementParents()
self.elementParentsArrayList=[[]]
self.elementChildrenArrayList=[]
self.elementChildrenOffsetsList=[]
for l in range(1,self.nLevels):
self.elementParentsArrayList.append(self.elementParents[l])
len_children=0
for children in list(self.elementChildren[l-1].values()):
len_children += len(children)
self.elementChildrenArrayList.append(np.zeros((len_children,),'i'))
self.elementChildrenOffsetsList.append(np.zeros((self.meshList[l-1].nElements_global+1,),'i'))
index=0
for eN_p,children in enumerate(self.elementChildren[l-1].values()):
self.elementChildrenOffsetsList[l-1][eN_p] = index
for ec in children:
self.elementChildrenArrayList[l-1][index] = ec.N
index += 1
self.elementChildrenOffsetsList[l-1][-1] = index
def calculateElementParents(self,recalculate=False):
"""
get array elementParents[l,e] = e_c, where element e_c is the parent of element e
elementParents[0,:] = -1
"""
if (self.elementParents is None or recalculate):
self.elementParents = {}
nLevels = len(self.meshList)
for l in range(nLevels):
nE = self.meshList[l].nElements_global
self.elementParents[l] = np.ones((nE,),'i')
self.elementParents[l][:]=-1
for l in range(0,nLevels-1):
nEc = self.meshList[l].nElements_global
for ec in range(nEc):
for ef in self.elementChildren[l][ec]:
#print """l=%s ec= %s ef.N= %s """ % (l,ec,ef.N)
self.elementParents[l+1][ef.N] = ec
#ef
#ec
#l
class PointMesh(Mesh):
#Elements=Nodes
"""
0D mesh
"""
def __init__(self,points):
self.nodeArray=points
self.nNodes_global = points.shape[0]
self.elementNodesArray=np.arange(self.nNodes_global,dtype='i')
self.nElements_global = self.nNodes_global
MX=0
MY=1
MZ=2
I=0
J=1
K=1
class EdgeGrid(Mesh):
"""A 1D regular grid on an interval"""
def __init__(self,nx=2,Lx=1.0):
Mesh.__init__(self)
#dimensions and ranges
self.nx=nx
self.ex=nx-1
self.nRange_x = list(range(nx))
self.eRange_x = list(range(self.ex))
#lengths
self.Lx=Lx
self.dx = old_div(Lx,self.ex)
#node coordinates
self.nodeGridArray = np.zeros((self.nx,3),'d')
for i in self.nRange_x:
self.nodeGridArray[i,MX] = i*self.dx
#edge node numbers
self.edgeNodesArray=np.zeros((self.ex,2),'i')
#try to do this like we'll do 2d and 3d
#edge nodes
en=2
edgeNodeNumbers = np.zeros((en,),'i')
#reference edge
eI=1
refEdge_nodeIndeces = [-eI,eI]
refEdge_NodeDict={}
for rn,rnii in enumerate(refEdge_nodeIndeces):
refEdge_NodeDict[rnii] = rn
for i in self.eRange_x:
#edge number
eN=i
#fine grid index of edge
ii = 2*i + 1
#fine grid index of edge nodes
for rn,rnii in enumerate(refEdge_nodeIndeces):
nii = rnii + ii
edgeNodeNumbers[rn]=old_div(nii,2)
self.edgeNodesArray[eN,:]=edgeNodeNumbers
#Mesh interface
self.nNodes_global=self.nx
self.nEdges_global=self.ex
self.nElements_global=self.ex
self.nElementBoundaries_global=self.nx
self.nodeArray=self.nodeGridArray
self.elementNodesArray=self.edgeNodesArray
self.elementBoundariesArray=self.nodeArray
self.boundaryMesh=PointMesh(np.array([self.nodeArray[0],
self.nodeArray[-1]],dtype='d'))
class QuadrilateralGrid(Mesh):
"""A 2D regular grid of quadrilateral cells"""
def __init__(self,nx=2,ny=2,Lx=1.0,Ly=1.0):
Mesh.__init__(self)
#nodes
self.nx=nx
self.ny=ny
self.nxy=nx*ny
#edges
self.eXx=nx-1
self.eXy=ny
self.eXxy=self.eXx*self.eXy
self.eYx=nx
self.eYy=ny-1
self.eYxy = self.eYx*self.eYy
self.eXYx = self.eXx + self.eYx
self.eXYy = self.eXy + self.eYy
self.eXYxy = self.eXxy + self.eYxy
#quads
self.qx = nx-1
self.qy = ny-1
self.qxy = self.qx*self.qy
#ranges
self.nRange_x = list(range(self.nx))
self.nRange_y = list(range(self.ny))
self.qRange_x = list(range(self.qx))
self.qRange_y = list(range(self.qx))
#lengths
self.Lx=Lx
self.Ly=Ly
self.dx = old_div(Lx,self.eXx)
self.dy = old_div(Ly,self.eYy)
#node coordinates
self.nodeGridArray=np.zeros((nx,ny,3),'d')
for i in self.nRange_x:
for j in self.nRange_y:
self.nodeGridArray[i,j,MX]=i*self.dx
self.nodeGridArray[i,j,MY]=j*self.dy
#edge node numbers
en=2
edgeNodeNumbers = np.zeros((en,),'i')
self.edgeNodesArray=np.zeros((self.eXYxy,en),'i')
#quad node numbers
qn=4
quadNodeNumbers = np.zeros((qn,),'i')
self.quadrilateralNodesArray=np.zeros((self.qxy,qn),'i')
#quad edge numbers
qe=4
quadEdgeNumbers = np.zeros((qe,),'i')
self.quadrilateralEdgesArray=np.zeros((self.qxy,qe),'i')
#reference quad
refQuad_NodeIndeces = [(-1,-1),
(-1, 1),
( 1,-1),
( 1, 1)]
refQuad_NodeIndeces.sort()
#a map between reference node indeces and numbers
refQuad_NodeDict={}
for rn,rniijj in enumerate(refQuad_NodeIndeces):
refQuad_NodeDict[rniijj]=rn
refQuad_EdgeIndeces = [(-1,0),
( 0,-1),
( 0, 1),
( 1, 0)]
refQuad_EdgeIndeces.sort()
refQuad_EdgeNodes=[]
#use the map between indeces and numbers to
#map edge indeces to the edge's node numbers
for reiijj in refQuad_EdgeIndeces:
if reiijj[I] == 0:
refQuad_EdgeNodes.append([
refQuad_NodeDict[(-1,reiijj[J])],
refQuad_NodeDict[( 1,reiijj[J])]])
else:
refQuad_EdgeNodes.append([
refQuad_NodeDict[(reiijj[I],-1)],
refQuad_NodeDict[(reiijj[I], 1)]])
for i in self.qRange_x:
for j in self.qRange_y:
#quad number
qN = i*self.qy + j
#fine grid indeces of quad
ii = 2*i + 1
jj = 2*j + 1
#nodes
for rn,rniijj in enumerate(refQuad_NodeIndeces):
nii = rniijj[I] + ii
njj = rniijj[J] + jj
nN = (old_div(nii,2))*self.ny + old_div(njj,2)
quadNodeNumbers[rn]=nN
self.quadrilateralNodesArray[qN][:]=quadNodeNumbers
#edges
for re,reiijj in enumerate(refQuad_EdgeIndeces):
eii = reiijj[I] + ii
ejj = reiijj[J] + jj
eN = (old_div(eii,2))*self.eXYy + (eii%2)*self.eYy + old_div(ejj,2)
quadEdgeNumbers[re]=eN
#nodes
for n,rn in enumerate(refQuad_EdgeNodes[re]):
self.edgeNodesArray[eN][n] = quadNodeNumbers[rn]
self.quadrilateralEdgesArray[qN][:]=quadEdgeNumbers
#Mesh interface (dimensions)
self.nNodes_global=self.nxy
self.nEdges_global=self.eXYxy
self.nElements_global=self.qxy
self.nElementBoundaries_global=self.eXYxy
self.nodeArray=np.reshape(self.nodeGridArray,(self.nxy,3))
self.elementNodesArray=self.quadrilateralNodesArray
self.elementBoundariesArray=self.edgeNodesArray
#todo extract boundary mesh
class RectangularGrid(Mesh):
"""A regular partition into rectangles.
Nodes, edges, and faces can be indexed by (i,j,k) as follows.
The edges and faces are divided according to orientation (i.e. x-edge...).
An (i,j,k) index is associated with the type of edge or face
having node (i,j,k) as the first node in a lexicographically sorted
list of nodes corresponding to the edge or face."""
def __init__(self,nx=1,ny=1,nz=1,Lx=1.0,Ly=1.0,Lz=1.0):
Mesh.__init__(self)
self.Lx = Lx
self.Ly = Ly
self.Lz = Lz
#nodes
self.nx=nx
self.ny=ny
self.nz=nz
self.nxy = nx*ny;
self.nxyz = nx*ny*nz;
#edges
self.nXex=nx-1 #number of x-edges in the x dimension
self.nXey=ny
self.nXez=nz
self.nYex=nx
self.nYey=ny-1
self.nYez=nz
self.nZex=nx
self.nZey=ny
self.nZez=nz-1
#number of edges of all types associated with a row of nodes
self.nXYZex = self.nXex + self.nYex + self.nZex
#number of edges associated with an xy plane of nodes
self.nXexy=self.nXex*self.nXey
self.nYexy=self.nYex*self.nYey
self.nZexy=self.nZex*self.nZey
self.nXYZexy = self.nXexy + self.nYexy + self.nZexy
#number of edges of each type in the grid
self.nXexyz = self.nXexy*self.nXez
self.nYexyz = self.nYexy*self.nYez
self.nZexyz = self.nZexy*self.nZez
#total number of edges
self.nXYZexyz = self.nXexyz + self.nYexyz + self.nZexyz
#quadrilaterals
self.nXYhx=nx-1 #number of XY quadrilaterals in x-dimension
self.nXYhy=ny-1
self.nXYhz=nz
self.nXZhx=nx-1
self.nXZhy=ny
self.nXZhz=nz-1
self.nYZhx=nx
self.nYZhy=ny-1
self.nYZhz=nz-1
#number of quadrilaterals of all types associate with a row of nodes
self.nXY_XZ_YZhx =self.nXYhx + self.nXZhx + self.nYZhx
#number of quadrilaterals associated with an xy plane of nodes
self.nXYhxy=self.nXYhx*self.nXYhy
self.nXZhxy=self.nXZhx*self.nXZhy
self.nYZhxy=self.nYZhx*self.nYZhy
self.nXY_XZ_YZhxy =self.nXYhxy + self.nXZhxy + self.nYZhxy
#number of quadrilaterals of each type in the grid
self.nXYhxyz = self.nXYhxy*self.nXYhz
self.nXZhxyz = self.nXZhxy*self.nXZhz
self.nYZhxyz = self.nYZhxy*self.nYZhz
#total number of quadrilaterals
self.nXY_XZ_YZhxyz =self.nXYhxyz + self.nXZhxyz + self.nYZhxyz
#hexahedra
self.nHx=nx-1
self.nHy=ny-1
self.nHz=nz-1
self.nHxy = self.nHx*self.nHy
self.nHxyz = self.nHxy*self.nHz
#encode red and black
self.black=0
self.red=1
#dimensions of hexahedra
if self.nHx>0:
hx = old_div(float(Lx),(nx-1))
else:
hx = 1.0
if self.nHy>0:
hy = old_div(float(Ly),(ny-1))
else:
hy=1.0
if self.nHz>0:
hz = old_div(float(Lz),(nz-1))
else:
hz=1.0
self.nodeDict={}
self.xedgeDict={}
self.yedgeDict={}
self.zedgeDict={}
self.xedgeList=[]
self.yedgeList=[]
self.zedgeList=[]
self.XYQuadrilateralDict={}
self.XZQuadrilateralDict={}
self.YZQuadrilateralDict={}
self.XYQuadrilateralList=[]
self.XZQuadrilateralList=[]
self.YZQuadrilateralList=[]
self.hexahedronDict={}
self.hexahedronList=[]
self.nodeList=[]
for k in range(self.nz):
for j in range(self.ny):
for i in range(self.nx):
n = self.getNodeNumber(i,j,k)
x = i*hx
y = j*hy
z = k*hz
self.nodeDict[(i,j,k)]=Node(n,x,y,z)
self.nodeList.append(self.nodeDict[(i,j,k)])
for k in range(self.nXez):
for j in range(self.nXey):
for i in range(self.nXex):
en = self.getXEdgeNumber(i,j,k)
self.xedgeDict[(i,j,k)] = Edge(en,
[self.getNode(i,j,k),
self.getNode(i+1,j,k)])
self.xedgeList.append(self.xedgeDict[(i,j,k)])
for k in range(self.nYez):
for j in range(self.nYey):
for i in range(self.nYex):
en = self.getYEdgeNumber(i,j,k)
self.yedgeDict[(i,j,k)] = Edge(en,
[self.getNode(i,j,k),
self.getNode(i,j+1,k)])
self.yedgeList.append(self.yedgeDict[(i,j,k)])
for k in range(self.nZez):
for j in range(self.nZey):
for i in range(self.nZex):
en = self.getZEdgeNumber(i,j,k)
self.zedgeDict[(i,j,k)] = Edge(en,
[self.getNode(i,j,k),
self.getNode(i,j,k+1)])
self.zedgeList.append(self.zedgeDict[(i,j,k)])
for k in range(self.nXYhz):
for j in range(self.nXYhy):
for i in range(self.nXYhx):
qn = self.getXYQuadrilateralNumber(i,j,k)
edges = [self.getXEdge(i,j,k),
self.getXEdge(i,j+1,k),
self.getYEdge(i,j,k),
self.getYEdge(i+1,j,k)]
self.XYQuadrilateralDict[(i,j,k)] = Quadrilateral(qn,edges)
self.XYQuadrilateralList.append(
self.XYQuadrilateralDict[(i,j,k)])
for k in range(self.nXZhz):
for j in range(self.nXZhy):
for i in range(self.nXZhx):
qn = self.getXZQuadrilateralNumber(i,j,k)
edges = [self.getXEdge(i,j,k),
self.getXEdge(i,j,k+1),
self.getZEdge(i,j,k),
self.getZEdge(i+1,j,k)]
self.XZQuadrilateralDict[(i,j,k)] = Quadrilateral(qn,edges)
self.XZQuadrilateralList.append(
self.XZQuadrilateralDict[(i,j,k)])
for k in range(self.nYZhz):
for j in range(self.nYZhy):
for i in range(self.nYZhx):
qn = self.getYZQuadrilateralNumber(i,j,k)
edges = [self.getYEdge(i,j,k),
self.getYEdge(i,j,k+1),
self.getZEdge(i,j,k),
self.getZEdge(i,j+1,k)]
self.YZQuadrilateralDict[(i,j,k)] = Quadrilateral(qn,edges)
self.YZQuadrilateralList.append(
self.YZQuadrilateralDict[(i,j,k)])
for k in range(self.nHz):
for j in range(self.nHy):
for i in range(self.nHx):
Hn = self.getHexahedronNumber(i,j,k)
quadrilaterals = [self.getXYQuadrilateral(i,j,k),
self.getXYQuadrilateral(i,j,k+1),
self.getXZQuadrilateral(i,j,k),
self.getXZQuadrilateral(i,j+1,k),
self.getYZQuadrilateral(i,j,k),
self.getYZQuadrilateral(i+1,j,k)]
self.hexahedronDict[(i,j,k)] = Hexahedron(Hn,
quadrilaterals)
self.hexahedronList.append(self.hexahedronDict[(i,j,k)])
#build lists for mesh base class
self.edgeList = self.xedgeList + \
self.yedgeList + \
self.zedgeList
#figure out if this is a 1D,2D, or 3D grid
if self.nz > 1:
self.elementList = self.hexahedronList
self.elementDict = self.hexahedronDict
elif self.ny > 1:
self.elementList = self.XYQuadrilateralList
self.elementDict = self.XYQuadrilateralDict
else:
self.elementList = self.xedgeList
self.elementDict = self.xedgeDict
#self.buildArraysFromLists()
#todo: extract boundary mesh
def getNodeNumber(self,i,j,k):
return i + j*self.nx + k*self.nxy
def getNode(self,i,j,k):
return self.nodeDict[(i,j,k)]
def getXEdgeNumber(self,ie,je,ke):
return ie + je*self.nXex + ke*self.nXexy
def getYEdgeNumber(self,ie,je,ke):
return ie + je*self.nYex + ke*self.nYexy
def getZEdgeNumber(self,ie,je,ke):
return ie + je*self.nZex + ke*self.nZexy
def getXEdge(self,ie,je,ke):
return self.xedgeDict[(ie,je,ke)]
def getYEdge(self,ie,je,ke):
return self.yedgeDict[(ie,je,ke)]
def getZEdge(self,ie,je,ke):
return self.zedgeDict[(ie,je,ke)]
def getXYQuadrilateralNumber(self,ih,jh,kh):
return ih + jh*self.nXYhx + kh*self.nXYhxy
def getXZQuadrilateralNumber(self,ih,jh,kh):
return ih + jh*self.nXZhx + kh*self.nXZhxy
def getYZQuadrilateralNumber(self,ih,jh,kh):
return ih + jh*self.nYZhx + kh*self.nYZhxy
def getXYQuadrilateral(self,ih,jh,kh):
return self.XYQuadrilateralDict[(ih,jh,kh)]
def getXZQuadrilateral(self,ih,jh,kh):
return self.XZQuadrilateralDict[(ih,jh,kh)]
def getYZQuadrilateral(self,ih,jh,kh):
return self.YZQuadrilateralDict[(ih,jh,kh)]
def getHexahedronNumber(self,iH,jH,kH):
return iH + jH*self.nHx + kH*self.nHxy
def getHexahedron(self,iH,jH,kH):
return self.hexahedronDict[(iH,jH,kH)]
def getColor(self,i,j,k):
return (i%2 + j%2 + k%2)%2
def refine(self,oldMesh,refineFactorX=2,refineFactorY=2,refineFactorZ=2):
NX = oldMesh.nx
NY = oldMesh.ny
NZ = oldMesh.nz
if NX > 1:
NX = (NX-1)*refineFactorX + 1
else:
refineFactorX=1
if NY > 1:
NY = (NY-1)*refineFactorY + 1
else:
refineFactorY=1
if NZ > 1:
NZ = (NZ-1)*refineFactorZ + 1
else:
refineFactorZ=1
RectangularGrid.__init__(self,NX,NY,NZ,
oldMesh.Lx,oldMesh.Ly,oldMesh.Lz)
childrenDict={}
for IJK,e in oldMesh.elementDict.items():
I = IJK[0]
J = IJK[1]
K = IJK[2]
childrenDict[e.N]=[]
for xOffset in range(refineFactorX):
for yOffset in range(refineFactorY):
for zOffset in range(refineFactorZ):
i = I*refineFactorX + xOffset
j = J*refineFactorY + yOffset
k = K*refineFactorZ + zOffset
childrenDict[e.N].append(self.elementDict[(i,j,k)])
return childrenDict
class MultilevelRectangularGrid(MultilevelMesh):
"""A hierarchical multilevel grid"""
def __init__(self,levels,nx,ny=1,nz=1,
Lx=1.0,Ly=1.0,Lz=1.0,
refinementLevels=1):
MultilevelMesh.__init__(self)
self.refineFactorList=[EVec(0,0,0)]
self.meshList.append(RectangularGrid(nx,ny,nz,Lx,Ly,Lz))
self.elementChildren = []
logEvent(self.meshList[0].meshInfo())
for l in range(1,refinementLevels+1):
self.refine()
logEvent(self.meshList[-1].meshInfo())
def refine():
self.meshList.append(RectangularMesh())
childrenDict = self.meshList[-1].refine(self.meshList[-2])
self.elementChildren.append(childrenDict)
class TetrahedralMesh(Mesh):
"""A mesh of tetrahedra.
The nodes, edges, triangles, and tetrahedra are indexed by their
node tuples. The corresponding lists are derived from the dictionaries, and
sorted lexicographically. The global node numbers are redefined to
give a lexicographic ordering.
The mesh can be generated from a rectangular grid and refined using either
4T or Freudenthal-Bey global refinement.
Attributes
----------
elementNodesArray : array_like
A list of lists storing the node values associated with each element
in the triangulation. The first index refers to the element number,
while the second index refers to the global node value.
nodeArray : array_like
A list of lists storing node coordinates. The first index referes
to the global node number, while the second index refers to the x, y
and z coordinates of the node respectively.
"""
def __init__(self):
Mesh.__init__(self)
self.nodeDict={}
self.edgeDict={}
self.triangleDict={}
self.triangleList=[]
self.tetrahedronDict={}
self.tetrahedronList=[]
self.oldToNewNode=[]
self.boundaryMesh=TriangularMesh()
def meshType(self):
return 'simplex'
def computeGeometricInfo(self):
from . import cmeshTools
cmeshTools.computeGeometricInfo_tetrahedron(self.cmesh)
def generateTetrahedralMeshFromRectangularGrid(self,nx,ny,nz,Lx,Ly,Lz):
from . import cmeshTools
self.cmesh = cmeshTools.CMesh()
logEvent("Generating grid and mesh")
cmeshTools.generateTetrahedralMeshFromRectangularGrid(nx,ny,nz,Lx,Ly,Lz,self.cmesh)
logEvent("Allocating geometric info")
cmeshTools.allocateGeometricInfo_tetrahedron(self.cmesh)
logEvent("Computing geometric info")
cmeshTools.computeGeometricInfo_tetrahedron(self.cmesh)
self.buildFromC(self.cmesh)
def rectangularToTetrahedral6T(self,grid):
#copy the nodes from the rectangular mesh
#I want to be able to renumber later without
#changing the grid nodes, so I do deep copies here
self.nodeList = [Node(n.N,n.p[X],n.p[Y],n.p[Z]) for n in grid.nodeList]
self.nodeDict = dict([(n,n) for n in self.nodeList])
for i in range(grid.nHx):
for j in range(grid.nHy):
for k in range(grid.nHz):
#associate the element (i,j,k) with the
#left, front, bottom node
#do a top down numbering to match Ong's dissertation
n1 = self.nodeList[grid.getNodeNumber(i,j,k+1)]
n2 = self.nodeList[grid.getNodeNumber(i,j+1,k+1)]
n3 = self.nodeList[grid.getNodeNumber(i+1,j+1,k+1)]
n4 = self.nodeList[grid.getNodeNumber(i+1,j,k+1)]
n5 = self.nodeList[grid.getNodeNumber(i,j,k)]
n6 = self.nodeList[grid.getNodeNumber(i,j+1,k)]
n7 = self.nodeList[grid.getNodeNumber(i+1,j+1,k)]
n8 = self.nodeList[grid.getNodeNumber(i+1,j,k)]
self.newTetrahedron(nodes=[n1,n2,n3,n6])
self.newTetrahedron(nodes=[n1,n3,n5,n6])
self.newTetrahedron(nodes=[n3,n5,n6,n7])
self.newTetrahedron(nodes=[n1,n3,n4,n5])
self.newTetrahedron(nodes=[n3,n4,n5,n7])
self.newTetrahedron(nodes=[n4,n5,n7,n8])
self.finalize()
def rectangularToTetrahedral5T(self,grid):
#copy the nodes from the rectangular mesh
#I want to be able to renumber later without
#changing the grid nodes, so I do deep copies here
self.nodeList = [Node(n.N,n.p[X],n.p[Y],n.p[Z]) for n in grid.nodeList]
self.nodeDict = dict([(n,n) for n in self.nodeList])
for i in range(grid.nHx):
for j in range(grid.nHy):
for k in range(grid.nHz):
#associate the element (i,j,k) with the
#left, front, bottom node
#get the left,front,bottom,node and its color
if (grid.getColor(i,j,k) == grid.black):
b0 = self.nodeList[grid.getNodeNumber(i,j,k)]
rx = self.nodeList[grid.getNodeNumber(i+1,j,k)]
ry = self.nodeList[grid.getNodeNumber(i,j+1,k)]
rz = self.nodeList[grid.getNodeNumber(i,j,k+1)]
r0 = self.nodeList[grid.getNodeNumber(i+1,j+1,k+1)]
bx = self.nodeList[grid.getNodeNumber(i,j+1,k+1)]
by = self.nodeList[grid.getNodeNumber(i+1,j,k+1)]
bz = self.nodeList[grid.getNodeNumber(i+1,j+1,k)]
else:
r0 = self.nodeList[grid.getNodeNumber(i,j,k)]
bx = self.nodeList[grid.getNodeNumber(i+1,j,k)]
by = self.nodeList[grid.getNodeNumber(i,j+1,k)]
bz = self.nodeList[grid.getNodeNumber(i,j,k+1)]
b0 = self.nodeList[grid.getNodeNumber(i+1,j+1,k+1)]
rx = self.nodeList[grid.getNodeNumber(i,j+1,k+1)]
ry = self.nodeList[grid.getNodeNumber(i+1,j,k+1)]
rz = self.nodeList[grid.getNodeNumber(i+1,j+1,k)]
self.newTetrahedron(nodes=[rx,by,bz,b0])
self.newTetrahedron(nodes=[ry,bz,bx,b0])
self.newTetrahedron(nodes=[rz,b0,bx,by])
self.newTetrahedron(nodes=[r0,bx,by,bz])
self.newTetrahedron(nodes=[b0,bx,by,bz])
self.finalize()
rectangularToTetrahedral = rectangularToTetrahedral6T
def fixLocalNumbering(self):
for TN in range(len(self.tetrahedronList)):
self.tetrahedronList[TN].computeGeometricInfo()
if edet(self.tetrahedronList[TN].linearMap) < 0:
newNodes = list(self.tetrahedronList[TN].nodes)
newNodes[2] = self.tetrahedronList[TN].nodes[1]
newNodes[1] = self.tetrahedronList[TN].nodes[2]
self.tetrahedronList[TN].nodes = newNodes
def finalize(self):
self.buildLists()
#self.fixLocalNumbering()
self.buildBoundaryMaps()
self.buildArraysFromLists()
self.hMax = 0.0
self.hMin = 1.0e16
self.sigmaMax = 0.0
self.totalVolume = 0.0
for T in self.tetrahedronList:
T.computeGeometricInfo()
self.hMax = max(T.diameter,self.hMax)
self.hMin = min(T.diameter,self.hMin)
self.sigmaMax = max(old_div(T.diameter,T.innerDiameter),self.sigmaMax)
self.totalVolume += T.volume
def buildLists(self):
self.buildListsNodes()
self.buildListsEdges()
self.buildListsTriangles()
self.buildListsTetrahedra()
self.elementList = self.tetrahedronList
self.elementBoundaryList = self.triangleList
def buildListsNodes(self):
keyList = list(self.nodeDict.keys())
keyList.sort()
self.nodeList=[]
self.oldToNewNode=list(range(len(self.nodeDict)))
for nN,k in enumerate(keyList):
self.oldToNewNode[self.nodeDict[k].N]=nN
self.nodeDict[k].N = nN
self.nodeList.append(self.nodeDict[k])
def buildListsEdges(self):
keyList = list(self.edgeDict.keys())
keyList.sort()
self.edgeList=[]
for eN,k in enumerate(keyList):
self.edgeDict[k].N = eN
self.edgeList.append(self.edgeDict[k])
def buildListsTriangles(self):
keyList = list(self.triangleDict.keys())
keyList.sort()
self.triangleList=[]
for tN,k in enumerate(keyList):
self.triangleDict[k].N = tN
self.triangleList.append(self.triangleDict[k])
self.polygonList = self.triangleList
def buildListsTetrahedra(self):
keyList = list(self.tetrahedronDict.keys())
keyList.sort()
self.tetrahedronList=[]
for TN,k in enumerate(keyList):
self.tetrahedronDict[k].N = TN
self.tetrahedronList.append(self.tetrahedronDict[k])
self.polyhedronList = self.tetrahedronList
def buildBoundaryMaps(self):
"""
Extract a mapping tn -> list((TN,tnLocal)) that
provides all elements with the boundary face (triangle) tn
and the local triangle number for that triangle.
Likewise build mappings for edges and nodes
Also extract a list of the triangles with only one associate
element; these are the external boundary triangles. Then extract
the edges and nodes from the boundary triangles.
"""
self.triangleMap=[[] for t in self.triangleList]
self.edgeMap=[[] for e in self.edgeList]
self.nodeMap=[[] for n in self.nodeList]
self.boundaryTriangles=set()
self.interiorTriangles=set()
self.boundaryEdges=set()
self.boundaryNodes=set()
self.interiorEdges=set()
self.interiorNodes=set()
logEvent("Building triangle,edge, and node maps")
for T in self.tetrahedronList:
for localTriangleNumber,t in enumerate(T.triangles):
self.triangleMap[t.N].append((T.N,localTriangleNumber))
for localEdgeNumber,e in enumerate(T.edges):
self.edgeMap[e.N].append((T.N,localEdgeNumber))
for localNodeNumber,n in enumerate(T.nodes):
self.nodeMap[n.N].append((T.N,localNodeNumber))
logEvent("Extracting boundary and interior triangles")
for tN,etList in enumerate(self.triangleMap):
if len(etList) == 1:
self.boundaryTriangles.add(self.triangleList[tN])
else:
self.interiorTriangles.add(self.triangleList[tN])
logEvent("Extracting boundary edges and nodes")
for t in self.boundaryTriangles:
self.boundaryEdges.update(t.edges)
self.boundaryNodes.update(t.nodes)
logEvent("Extracting interior edges and nodes")
for t in self.interiorTriangles:
self.interiorEdges.update(t.edges)
self.interiorNodes.update(t.nodes)
self.boundaryMesh.buildFromSets(self.boundaryTriangles,
self.boundaryEdges,self.boundaryNodes)
def newTetrahedron(self,nodes):
T = Tetrahedron(tetrahedronNumber=len(self.tetrahedronDict),
nodes=nodes)
self.tetrahedronDict[T.nodes] = T
self.registerTriangles(T)
return T
def registerEdges(self,t):
for en,e in enumerate(t.edges):
if e.nodes in self.edgeDict:
t.edges[en]=self.edgeDict[e.nodes]
else:
eN=len(self.edgeDict)
e.N=eN
self.edgeDict[e.nodes]=e
def registerTriangles(self,T):
for tn,t in enumerate(T.triangles):
if t.nodes in self.triangleDict:
T.triangles[tn]=self.triangleDict[t.nodes]
else:
t.N=len(self.triangleDict)
self.triangleDict[t.nodes]=t
self.registerEdges(t)
def registerNode(self,node):
if node in self.nodeDict:
node = self.nodeDict[node]
else:
node.N = len(self.nodeDict)
self.nodeDict[node] = node
return node
def readMeshADH(self,filename,adhBase=1):
meshIn = open(filename+'.3dm','r')
firstLine = meshIn.readline()
firstWords = firstLine.split()
logEvent("Reading object=%s from file=%s" % (firstWords[0],filename))
line = meshIn.readline()
columns = line.split()
tets = []
tetEdges=set()
tetTriangles=set()
logEvent("Reading "+str(filename)+" and building node lists for tetrahedra,triangles, and edges")
#assume test are ordered by tet number
while (columns[0] == 'E4T'):
nodeNumbers = [int(c) - adhBase for c in columns[2:6]]
nodeNumbers.sort()
tets.append(array.array('i',nodeNumbers))
tetTriangles.update([(nodeNumbers[1],nodeNumbers[2],nodeNumbers[3]),
(nodeNumbers[0],nodeNumbers[2],nodeNumbers[3]),
(nodeNumbers[0],nodeNumbers[1],nodeNumbers[3]),
(nodeNumbers[0],nodeNumbers[1],nodeNumbers[2])])
tetEdges.update([(nodeNumbers[0],nodeNumbers[1]),
(nodeNumbers[0],nodeNumbers[2]),
(nodeNumbers[0],nodeNumbers[3]),
(nodeNumbers[1],nodeNumbers[2]),
(nodeNumbers[1],nodeNumbers[3]),
(nodeNumbers[2],nodeNumbers[3])])
line = meshIn.readline()
columns = line.split()
print("Building node list and dict")
#assume nodes are ordered by node number
while (len(columns) == 5):
newNode = Node(int(columns[1]) - adhBase,
float(columns[2]),
float(columns[3]),
float(columns[4]))
self.nodeList.append(newNode)
self.nodeDict[newNode]=newNode
line = meshIn.readline()
columns = line.split()
print("Number of tetrahedra:"+str(len(tets)))
print("Number of triangles :"+str(len(tetTriangles)))
print("Number of edges :"+str(len(tetEdges)))
print("Number of nodes :"+str(len(self.nodeList)))
print("Number of objects :"+str(len(tetEdges)+len(tetTriangles)+len(tets)+len(self.nodeList)))
print("Building edge list")
self.edgeList =[Edge(edgeNumber=eN,nodes=[self.nodeList[nN[0]],self.nodeList[nN[1]]]) \
for eN,nN in enumerate(tetEdges)]
print("Building edge dict")
self.edgeDict = dict([(e.nodes,e) for e in self.edgeList])
print("Building triangle list")
self.triangleList =[Triangle(triangleNumber=tN,nodes=[self.nodeList[nN[0]],self.nodeList[nN[1]],self.nodeList[nN[2]]],edgeDict=self.edgeDict) \
for tN,nN in enumerate(tetTriangles)]
print("Building triangle dict")
self.triangleDict = dict([(t.nodes,t) for t in self.triangleList])
print("Building tetredron list")
self.tetrahedronList = [Tetrahedron(tetrahedronNumber=TN,
nodes=[self.nodeList[nN[0]],self.nodeList[nN[1]],self.nodeList[nN[2]],self.nodeList[nN[3]]],
edgeDict=self.edgeDict,
triangleDict=self.triangleDict) \
for TN,nN in enumerate(tets)]
self.elementList = self.tetrahedronList
self.elementBoundaryList = self.triangleList
print("Building tetrahedron dict")
self.tetrahedronDict = dict([(T.nodes,T) for T in self.tetrahedronList])
print("Building boundary maps")
self.buildBoundaryMaps()
def writeMeshXdmf(self,ar,name='',t=0.0,init=False,meshChanged=False,tCount=0, EB=False):
#print "Warning mwf hack for EB printing for tet writeMeshXdmf for now"
#EB = True
Mesh.writeMeshXdmf(self,ar,name,t,init,meshChanged,"Tetrahedron",tCount,EB=EB)
def writeMeshEnsight(self,filename,description=None):
base=1
#write the casefile
caseOut=open(filename+'.case','w')
caseOut.write('FORMAT\n'+'type: ensight gold\n')
caseOut.write('GEOMETRY\n'+'model: '+filename+'.geo\n')
caseOut.close()
meshOut=open(filename+'.geo','w')
meshOut.write('Ensight Gold\n')
meshOut.write('Unstructured Tetrahedral Mesh\n')
meshOut.write('node id given\n')
meshOut.write('element id given\n')
#extents = 'extents\n %12.5E %12.5E\n %12.5E %12.5E\n %12.5E %12.5E\n' % (self.xmin,self.xmax,self.ymin,self.ymax,self.zmin,self.zmax)
#meshOut.write('extents\n'+`self.xmin`+' '+`self.xmax`+'\n')
meshOut.write('part \n'+'%10i\n' % 1)
if description:
meshOut.write(description+'\n')
else:
meshOut.write('A Mesh\n')
meshOut.write('coordinates\n'+'%10i\n' % self.nNodes_global)
for nN in range(self.nNodes_global):
meshOut.write('%10i\n' % (nN+base))
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,0])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,1])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,2])
meshOut.write('tetra4\n'+'%10i\n' % self.nElements_global)
for eN in range(self.nElements_global):
meshOut.write('%10i\n' % (eN+base))
for eN in range(self.nElements_global):
meshOut.write('%10i%10i%10i%10i\n' % tuple((nN+base) for nN in self.elementNodesArray[eN,:]))
meshOut.close()
def appendMeshEnsight(self,filename,description=None):
base=1
#write the casefile
caseOut=open(filename+'.case','a')
caseOut.write('GEOMETRY\n'+'model: '+filename+'.geo\n')
caseOut.close()
meshOut=open(filename+'.geo','w')
meshOut.write('Unstructured Tetrahedral Mesh\n\n')
meshOut.write('node id given\n')
meshOut.write('element id given\n')
#extents = 'extents\n %12.5E %12.5E\n %12.5E %12.5E\n %12.5E %12.5E\n' % (self.xmin,self.xmax,self.ymin,self.ymax,self.zmin,self.zmax)
#meshOut.write('extents\n'+`self.xmin`+' '+`self.xmax`+'\n')
meshOut.write('part \n'+'%10i\n' % 1)
if description:
meshOut.write(description+'\n')
else:
meshOut.write("A Mesh")
meshOut.write('coordinates\n'+'%10i\n' % len(self.nodeList))
for n in self.nodeList:
nN = n.N+base
meshOut.write('%10i\n' % nN)
for n in self.nodeList:
meshOut.write('%12.5E\n' % n.p[X])
for n in self.nodeList:
meshOut.write('%12.5E\n' % n.p[Y])
for n in self.nodeList:
meshOut.write('%12.5E\n' % n.p[Z])
meshOut.write('tetra4\n'+'%10i\n' % len(self.elementList))
for e in self.elementList:
eN = e.N + base
meshOut.write('%10i\n' % eN)
for e in self.elementList:
meshOut.write('%10i%10i%10i%10i\n' % tuple(n.N+base for n in e.nodes))
meshOut.close()
def writeMeshADH(self,filename,adhBase=1):
from . import cmeshTools
cmeshTools.write3dmFiles(self.cmesh,filename,adhBase)
def writeBoundaryFacesADH(self,filename,adhBase=1):
boundaryFacesOut=open(filename,'w')
for t in self.boundaryTriangles:
TN = self.triangleMap[t.N][0][0]
T = self.tetrahedronList[TN]
localFaceNumber = self.triangleMap[t.N][0][1]
T.computeGeometricInfo()
DJ = edet(T.linearMap)
if DJ < 0:
#print "Negative determinant ="+`DJ`+" Swapping two nodes"
newNodes = list(T.nodes)
newNodes[3] = T.nodes[2]
newNodes[2] = T.nodes[3]
newBasis = [n - newNodes[0] for n in newNodes[1:]]
newMap = ETen(newBasis[0],newBasis[1],newBasis[2])
#print "New Determinant "+`edet(newMap)`
if localFaceNumber == T.nodes[2]:
localFaceNumber = T.nodes[3]
elif localFaceNumber == T.nodes[3]:
localFaceNumber = T.nodes[2]
line = 'FCS %5i %5i %5i' % \
(T.N + adhBase,
localFaceNumber + adhBase,
1)
#print line
boundaryFacesOut.write(line+'\n')
boundaryFacesOut.close()
def writeBoundaryNodesADH(self,filename,adhBase=1):
boundaryNodesOut=open(filename,'w')
for n in self.boundaryNodes:
line = 'NDS %5i %5i' % \
(n.N + adhBase,
1)
#print line
boundaryNodesOut.write(line+'\n')
boundaryNodesOut.close()
def refine4T(self,oldMesh):
childrenDict={}
for T in oldMesh.tetrahedronList:
#deep copy old nodes because we'll renumber
TNodes = [Node(eN,n.p[X],n.p[Y],n.p[Z]) for eN,n in enumerate(T.nodes)]
for lnN,n in enumerate(TNodes): TNodes[lnN]=self.registerNode(n)
#add new node
T.computeGeometricInfo()
newNode = Node(len(self.nodeDict),
T.barycenter[X],
T.barycenter[Y],
T.barycenter[Z])
newNode = self.registerNode(newNode)
T1=self.newTetrahedron([TNodes[0],TNodes[1],TNodes[2],newNode])
T2=self.newTetrahedron([TNodes[1],TNodes[2],TNodes[3],newNode])
T3=self.newTetrahedron([TNodes[2],TNodes[3],TNodes[0],newNode])
T4=self.newTetrahedron([TNodes[3],TNodes[0],TNodes[1],newNode])
childrenDict[T.N]=[T1,T2,T3,T4]
self.finalize()
return childrenDict
def refineFreudenthalBey(self,oldMesh):
logEvent("Refining the mesh using Freudenthal-Bey refinement")
childrenDict={}
for T in list(oldMesh.tetrahedronDict.values()):
#deep copy old nodes because we'll renumber
TNodes = [Node(nN,n.p[X],n.p[Y],n.p[Z]) for nN,n in \
enumerate(T.nodes)]
for lnN,n in enumerate(TNodes): TNodes[lnN]=self.registerNode(n)
#add new nodes (midpoints of edges)
#use local edge tuples as keys
newNodes={}
for et,en in T.edgeMap.items():
T.edges[en].computeGeometricInfo()
p = T.edges[en].barycenter
newNodes[et] = Node(en,p[X],p[Y],p[Z])
#set the global node numbers
for k,n in newNodes.items(): newNodes[k]=self.registerNode(n)
#add corner tets
T1=self.newTetrahedron([TNodes[0],
newNodes[(0,1)],
newNodes[(0,2)],
newNodes[(0,3)]])
T2=self.newTetrahedron([TNodes[1],
newNodes[(0,1)],
newNodes[(1,2)],
newNodes[(1,3)]])
T3=self.newTetrahedron([TNodes[2],
newNodes[(0,2)],
newNodes[(1,2)],
newNodes[(2,3)]])
T4=self.newTetrahedron([TNodes[3],
newNodes[(0,3)],
newNodes[(1,3)],
newNodes[(2,3)]])
#add center tets
#choose the shortest diagonal of the octahedron
dLengths = [enorm(newNodes[(0,1)].p-newNodes[(2,3)].p),
enorm(newNodes[(0,2)].p-newNodes[(1,3)].p),
enorm(newNodes[(0,3)].p-newNodes[(1,2)].p)]
shortestEdgeLength = min(dLengths)
if shortestEdgeLength == dLengths[0]:
#diagonal (0,1)(2,3)
T5=self.newTetrahedron([newNodes[(0,1)],
newNodes[(2,3)],
newNodes[(0,3)],
newNodes[(1,3)]])
T6=self.newTetrahedron([newNodes[(0,1)],
newNodes[(2,3)],
newNodes[(0,3)],
newNodes[(0,2)]])
T7=self.newTetrahedron([newNodes[(0,1)],
newNodes[(2,3)],
newNodes[(0,2)],
newNodes[(1,2)]])
T8=self.newTetrahedron([newNodes[(0,1)],
newNodes[(2,3)],
newNodes[(1,2)],
newNodes[(1,3)]])
elif shortestEdgeLength == dLengths[1]:
#diagonal (0,2)(1,3)
T5=self.newTetrahedron([newNodes[(0,2)],
newNodes[(1,3)],
newNodes[(0,3)],
newNodes[(2,3)]])
T6=self.newTetrahedron([newNodes[(0,2)],
newNodes[(1,3)],
newNodes[(2,3)],
newNodes[(1,2)]])
T7=self.newTetrahedron([newNodes[(0,2)],
newNodes[(1,3)],
newNodes[(1,2)],
newNodes[(0,1)]])
T8=self.newTetrahedron([newNodes[(0,2)],
newNodes[(1,3)],
newNodes[(0,1)],
newNodes[(0,3)]])
else:
#diagonal (0,3)(1,2)
T5=self.newTetrahedron([newNodes[(0,3)],
newNodes[(1,2)],
newNodes[(0,1)],
newNodes[(1,3)]])
T6=self.newTetrahedron([newNodes[(0,3)],
newNodes[(1,2)],
newNodes[(1,3)],
newNodes[(2,3)]])
T7=self.newTetrahedron([newNodes[(0,3)],
newNodes[(1,2)],
newNodes[(2,3)],
newNodes[(0,2)]])
T8=self.newTetrahedron([newNodes[(0,3)],
newNodes[(1,2)],
newNodes[(0,2)],
newNodes[(0,1)]])
childrenDict[T.N]=[T1,T2,T3,T4,T5,T6,T7,T8]
self.finalize()
return childrenDict
#for debugging: print each tet
#self.edgeList=[]
#Tlist = self.tetrahedronDict.values()
#for T in Tlist:
# self.edgeList = self.edgeList + T.edges
def refine(self,oldMesh):
return self.refineFreudenthalBey(oldMesh)
def generateFromTetgenFiles(self,filebase,base,skipGeometricInit=False,parallel=False):
from . import cmeshTools
logEvent(memory("declaring CMesh"),level=4)
self.cmesh = cmeshTools.CMesh()
logEvent(memory("Initializing CMesh"),level=4)
if parallel:
cmeshTools.generateFromTetgenFilesParallel(self.cmesh,filebase,base)
else:
cmeshTools.generateFromTetgenFiles(self.cmesh,filebase,base)
logEvent(memory("calling cmeshTools.generateFromTetgenFiles","cmeshTools"),level=4)
if skipGeometricInit == False:
cmeshTools.allocateGeometricInfo_tetrahedron(self.cmesh)
cmeshTools.computeGeometricInfo_tetrahedron(self.cmesh)
self.buildFromC(self.cmesh)
logEvent(memory("calling buildFromC"),level=4)
def generateFrom3DMFile(self,filebase,base=1):
from . import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateFrom3DMFile(self.cmesh,filebase,base)
cmeshTools.allocateGeometricInfo_tetrahedron(self.cmesh)
cmeshTools.computeGeometricInfo_tetrahedron(self.cmesh)
self.buildFromC(self.cmesh)
def writeTetgenFiles(self,filebase,base):
from . import cmeshTools
cmeshTools.writeTetgenFiles(self.cmesh,filebase,base)
def meshInfo(self):
minfo = """Number of tetrahedra : %d
Number of triangles : %d
Number of edges : %d
Number of nodes : %d
max(sigma_k) : %f
min(h_k) : %f\n""" % (self.nElements_global,
self.nElementBoundaries_global,
self.nEdges_global,
self.nNodes_global,
self.sigmaMax,
self.hMin)
if self.subdomainMesh != self:
sinfo = self.subdomainMesh.meshInfo()
info = "*** Global ***\n" + minfo + "\n*** Local ***\n" + sinfo
return info
return minfo
class HexahedralMesh(Mesh):
"""A mesh of hexahedra.
"""
def __init__(self):
Mesh.__init__(self)
self.nodeDict={}
self.edgeDict={}
self.faceDict={}
self.faceList=[]
self.elemDict={}
self.elemList=[]
self.oldToNewNode=[]
self.boundaryMesh=QuadrilateralMesh()
def meshType(self):
return 'cuboid'
def computeGeometricInfo(self):
from . import cmeshTools
print("no info yet for hexahedral mesh")
#cmeshTools.computeGeometricInfo_tetrahedron(self.cmesh)
def generateHexahedralMeshFromRectangularGrid(self,nx,ny,nz,Lx,Ly,Lz):
from . import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateHexahedralMeshFromRectangularGrid(nx,ny,nz,0,0,0,Lx,Ly,Lz,self.cmesh)
cmeshTools.allocateGeometricInfo_hexahedron(self.cmesh)
cmeshTools.computeGeometricInfo_hexahedron(self.cmesh)
self.buildFromC(self.cmesh)
def finalize(self):
self.buildLists()
#self.fixLocalNumbering()
self.buildBoundaryMaps()
self.buildArraysFromLists()
self.hMax = 0.0
self.hMin = 1.0e16
self.sigmaMax = 0.0
self.totalVolume = 0.0
for T in self.tetrahedronList:
T.computeGeometricInfo()
self.hMax = max(T.diameter,self.hMax)
self.hMin = min(T.diameter,self.hMin)
self.sigmaMax = max(old_div(T.diameter,T.innerDiameter),self.sigmaMax)
self.totalVolume += T.volume
def buildLists(self):
self.buildListsNodes()
self.buildListsEdges()
self.buildListsFaces()
self.buildListsElems()
self.elementList = self.elemList
self.elementBoundaryList = self.faceList
def buildListsNodes(self):
keyList = list(self.nodeDict.keys())
keyList.sort()
self.nodeList=[]
self.oldToNewNode=list(range(len(self.nodeDict)))
for nN,k in enumerate(keyList):
self.oldToNewNode[self.nodeDict[k].N]=nN
self.nodeDict[k].N = nN
self.nodeList.append(self.nodeDict[k])
def buildListsEdges(self):
keyList = list(self.edgeDict.keys())
keyList.sort()
self.edgeList=[]
for eN,k in enumerate(keyList):
self.edgeDict[k].N = eN
self.edgeList.append(self.edgeDict[k])
def buildListsFaces(self):
keyList = list(self.faceDict.keys())
keyList.sort()
self.triangleList=[]
for tN,k in enumerate(keyList):
self.faceDict[k].N = tN
self.faceList.append(self.faceDict[k])
self.polygonList = self.faceList
def buildListsElems(self):
keyList = list(self.elemDict.keys())
keyList.sort()
self.elemList=[]
for TN,k in enumerate(keyList):
self.elemDict[k].N = TN
self.elemList.append(self.elemDict[k])
self.polyhedronList = self.elemList
def buildBoundaryMaps(self):
"""
Extract a mapping tn -> list((TN,tnLocal)) that
provides all elements with the boundary face tn
and the local triangle number for that face
Likewise build mappings for edges and nodes
Also extract a list of the triangles with only one associate
element; these are the external boundary triangles. Then extract
the edges and nodes from the boundary triangles.
"""
self.faceMap=[[] for t in self.faceList]
self.edgeMap=[[] for e in self.edgeList]
self.nodeMap=[[] for n in self.nodeList]
self.boundaryTriangles=set()
self.interiorTriangles=set()
self.boundaryEdges=set()
self.boundaryNodes=set()
self.interiorEdges=set()
self.interiorNodes=set()
logEvent("Building triangle,edge, and node maps")
for T in self.elemList:
for localFaceNumber,t in enumerate(T.faces):
self.faceMap[t.N].append((T.N,localFaceNumber))
for localEdgeNumber,e in enumerate(T.edges):
self.edgeMap[e.N].append((T.N,localEdgeNumber))
for localNodeNumber,n in enumerate(T.nodes):
self.nodeMap[n.N].append((T.N,localNodeNumber))
logEvent("Extracting boundary and interior triangles")
for tN,etList in enumerate(self.faceMap):
if len(etList) == 1:
self.boundaryFaces.add(self.faceList[tN])
else:
self.interiorFaces.add(self.faceList[tN])
logEvent("Extracting boundary edges and nodes")
for t in self.boundaryTriangles:
self.boundaryEdges.update(t.edges)
self.boundaryNodes.update(t.nodes)
logEvent("Extracting interior edges and nodes")
for t in self.interiorTriangles:
self.interiorEdges.update(t.edges)
self.interiorNodes.update(t.nodes)
self.boundaryMesh.buildFromSets(self.boundaryFaces,
self.boundaryEdges,self.boundaryNodes)
def registerEdges(self,t):
for en,e in enumerate(t.edges):
if e.nodes in self.edgeDict:
t.edges[en]=self.edgeDict[e.nodes]
else:
eN=len(self.edgeDict)
e.N=eN
self.edgeDict[e.nodes]=e
def registerFaces(self,T):
for tn,t in enumerate(T.faces):
if t.nodes in self.faceDict:
T.faces[tn]=self.faceDict[t.nodes]
else:
t.N=len(self.faceDict)
self.faceDict[t.nodes]=t
self.registerEdges(t)
def registerNode(self,node):
if node in self.nodeDict:
node = self.nodeDict[node]
else:
node.N = len(self.nodeDict)
self.nodeDict[node] = node
return node
# def refine(self,oldMesh):
# return self.refineFreudenthalBey(oldMesh)
def meshInfo(self):
minfo = """Number of hexahedra : %d
Number of faces : %d
Number of edges : %d
Number of nodes : %d
max(sigma_k) : %d
min(h_k) : %d\n""" % (self.nElements_global,
self.nElementBoundaries_global,
self.nEdges_global,
self.nNodes_global,
self.sigmaMax,
self.hMin)
if self.subdomainMesh != self:
sinfo = self.subdomainMesh.meshInfo()
info = "*** Global ***\n" + minfo + "\n*** Local ***\n" + sinfo
return info
return minfo
def writeMeshXdmf(self,ar,name='',t=0.0,init=False,meshChanged=False,tCount=0,EB=False):
Mesh.writeMeshXdmf(self,ar,name,t,init,meshChanged,"Hexahedron",tCount,EB=EB)
def generateFromHexFile(self,filebase,base=0):
from . import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateFromHexFile(self.cmesh,filebase,base)
cmeshTools.allocateGeometricInfo_hexahedron(self.cmesh)
cmeshTools.computeGeometricInfo_hexahedron(self.cmesh)
self.buildFromC(self.cmesh)
class Mesh2DM(Mesh):
"""A triangular mesh based on an ADH 3dm file"""
def __init__(self,filename,adhBase=1):
meshIn = open(filename+'.3dm','r')
firstLine = meshIn.readline()
firstWords = firstLine.split()
logEvent("Reading object=%s from file=%s" % (firstWords[0],filename))
line = meshIn.readline()
columns = line.split()
#read in the tetrahedra and nodes as memory-efficiently as possible
tn0 = array.array('i')
tn1 = array.array('i')
tn2 = array.array('i')
material = array.array('i')
nx = array.array('d')
ny = array.array('d')
nz = array.array('d')
print("Reading "+str(filename))
#assume tets are ordered by tet number
while (len(columns) > 0 and (columns[0] == 'E3T' or columns[0] == 'GE3')):
tn0.append(int(columns[2]))
tn1.append(int(columns[3]))
tn2.append(int(columns[4]))
material.append(int(columns[5]))
line = meshIn.readline()
columns = line.split()
#allow for missing lines
while (len(columns) == 0):
line = meshIn.readline()
columns = line.split()
#assume nodes are ordered by node number
while (len(columns) == 5):
nx.append(float(columns[2]))
ny.append(float(columns[3]))
nz.append(float(columns[4]))
line = meshIn.readline()
columns = line.split()
meshIn.close()
print("Allocating node and element arrays")
self.nTriangles_global = len(tn0)
self.triangleArray = np.zeros(
(self.nTriangles_global,3),'i')
tA = self.triangleArray
self.triangleMaterialArray = np.zeros(
(self.nTriangles_global,),'i')
tMA = self.triangleMaterialArray
self.nNodes_global = len(nx)
self.nodeArray = np.zeros((self.nNodes_global,3),'d')
for tN in range(self.nTriangles_global):
tA[tN,0] = tn0[tN] - adhBase
tA[tN,1] = tn1[tN] - adhBase
tA[tN,2] = tn2[tN] - adhBase
tMA[tN] = material[tN] - adhBase
for nN in range(self.nNodes_global):
self.nodeArray[nN,0]= nx[nN]
self.nodeArray[nN,1]= ny[nN]
self.nodeArray[nN,2]= nz[nN]
print("Deleting temporary storage")
del tn0,tn1,tn2,nx,ny,nz
self.nElements_global = self.nTriangles_global
self.elementNodesArray = self.triangleArray
self.elementMaterialTypes = self.triangleMaterialArray
print("Number of triangles:"+str(self.nElements_global))
print("Number of nodes :"+str(self.nNodes_global))
#archive with Xdmf
self.nNodes_element = 3
self.arGridCollection = None
self.arGrid = None; self.arTime = None
def buildEdgeArrays(self):
print("Extracting edges triangles dictionary")
edges_triangles={}
t=self.triangleArray
self.nInteriorEdges_global=0
for N in range(self.nTriangles_global):
#sort node numbers so the nodes can
#uniquely identify the triangles/edges
n = list(t[N,:])
n.sort()
edges = [(n[0],n[1]),
(n[0],n[2]),
(n[1],n[2])]
for t in triangles:
if t in edges_triangles:
edges_triangles[t].append(N)
self.nInteriorTriangles_global+=1
else:
edges_triangles[t]=[N]
print("Building edge and exterior arrays")
self.nEdges_global = len(edges_triangles)
self.edgeArray = np.zeros(
(self.nEdges_global,2),'i')
self.edgeMaterialArray = np.zeros(
(self.nEdges_global,2),'i')
self.interiorEdgeArray = np.zeros(
(self.nInteriorEdges_global,),'i')
self.nExteriorEdges_global = self.nEdges_global - \
self.nInteriorEdges_global
self.exteriorEdgeArray = np.zeros(
(self.nExteriorEdges_global,),'i')
eN=0
ieN=0
eeN=0
exteriorNodes=set()
eA = self.edgeArray
eMA = self.edgeMaterialArray
tMA = self.triangleMaterialArray
for eNodes,tlist in edges_triangles.items():
eA[eN,0]=eNodes[0]
eA[eN,1]=eNodes[1]
if len(tlist)==2:
self.interiorEdgeArray[ieN]=eN
eMA[eN][0]= tMA[tlist[0]]
eMA[eN][1]= tMA[Tlist[1]]
ieN+=1
else:
exteriorNodes.update(tNodes)
self.exteriorEdgeArray[eeN]=eN
eMA[eN][0]=tMA[tlist[0]]
eeN+=1
eN+=1
self.nExteriorNodes_global = len(exteriorNodes)
self.exteriorNodeArray = np.zeros(
(self.nExteriorNodes_global,),'i')
self.globalToExteriorNodeArray = np.zeros(
(self.nNodes_global,),'i')
for nExtN,nN in enumerate(exteriorNodes):
self.exteriorNodeArray[nExtN]=nN
self.globalToExteriorNodeArray[nN]=nExtN
print("Number of edges :"+str(self.nEdges_global))
print("Number on interior :"+str(self.nInteriorEdges_global))
print("Number on exterior :"+str(self.nExteriorEdges_global))
print("Number of exterior nodes:"+str(self.nExteriorNodes_global))
#at this point we can easily build a boundary mesh by renumbering using
#exteriorNodeArray and exteriorEdgeArray to renumber
#and the info in nodeArray and edgeArray
def writeBoundaryMeshADH(self,filename,adhBase=1):
#I'll print it using node numbers from the 3D mesh
meshOut = open(filename+'Boundary.3dm','w')
meshOut.write('MESH1D\n')
for eeN in range(self.nExteriorEdges_global):
eN = self.exteriorEdgeArray[eeN]
n0 = self.edgeArray[eN][0] + adhBase
n1 = self.edgeArray[eN][1] + adhBase
m = self.edgeMaterialArray[eN][0] + adhBase
line = 'E3T %5i %5i %5i %5i' % \
(tN+adhBase,n0,n1,m)
meshOut.write(line+'\n')
meshOut.close()
def writeMeshEnsight(self,filename,description=None):
base=1
#write the casefile
caseOut=open(filename+'.case','w')
caseOut.write('FORMAT\n'+'type: ensight gold\n')
caseOut.write('GEOMETRY\n'+'model: '+filename+'.geo\n')
caseOut.close()
meshOut=open(filename+'.geo','w')
meshOut.write('Ensight Gold\n')
meshOut.write('Unstructured Triangular Mesh\n')
meshOut.write('node id given\n')
meshOut.write('element id given\n')
#extents = 'extents\n %12.5E %12.5E\n %12.5E %12.5E\n %12.5E %12.5E\n' % (self.xmin,self.xmax,self.ymin,self.ymax,self.zmin,self.zmax)
#meshOut.write('extents\n'+`self.xmin`+' '+`self.xmax`+'\n')
meshOut.write('part \n'+'%10i\n' % 1)
if description:
meshOut.write(description+'\n')
else:
meshOut.write('A Mesh\n')
meshOut.write('coordinates\n'+'%10i\n' % self.nNodes_global)
for nN in range(self.nNodes_global):
ensightNodeNumber = (nN+base)
meshOut.write('%10i\n' % ensightNodeNumber)
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,0])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,1])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,2])
meshOut.write('tria3\n'+'%10i\n' % self.nTriangles_global)
for tN in range(self.nTriangles_global):
ensightElementNumber = tN + base
meshOut.write('%10i\n' % ensightElementNumber)
tA = self.triangleArray
for tN in range(self.nTriangles_global):
meshOut.write('%10i%10i%10i\n' % (tA[tN,0]+base,tA[tN,1]+base,tA[tN,2]+base))
meshOut.close()
def writeMeshXdmf(self,ar,name='',t=0.0,init=False,meshChanged=False,Xdmf_ElementTopology="Triangle",tCount=0):
if self.arGridCollection is not None:
init = False
elif not init:
self.arGridCollection = ar.domain.find("Grid")
if init:
self.arGridCollection = SubElement(ar.domain,"Grid",{"Name":"Mesh "+name,
"GridType":"Collection",
"CollectionType":"Temporal"})
if self.arGrid is None or self.arTime.get('Value') != "{0:e}".format(t):
#
#topology and geometry
#
if ar.global_sync:
self.arGrid = SubElement(self.arGridCollection,"Grid",{"GridType":"Uniform"})
self.arTime = SubElement(self.arGrid,"Time",{"Value":str(t),"Name":str(tCount)})
topology = SubElement(self.arGrid,"Topology",
{"Type":Xdmf_ElementTopology,
"NumberOfElements":"%i" % (self.globalMesh.nElements_global,)})
elements = SubElement(topology,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i %i" % (self.globalMesh.nElements_global,
self.nNodes_element)})
geometry = SubElement(self.arGrid,"Geometry",{"Type":"XYZ"})
nodes = SubElement(geometry,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Float",
"Precision":"8",
"Dimensions":"%i %i" % (self.globalMesh.nNodes_global,3)})
#material types
elementMaterialTypes = SubElement(self.arGrid,"Attribute",{"Name":"elementMaterialTypes",
"AttributeType":"Scalar",
"Center":"Cell"})
elementMaterialTypesValues = SubElement(elementMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.globalMesh.nElements_global,)})
if ar.hdfFile is not None:
elements.text = ar.hdfFilename+":/elements"+name+str(tCount)
nodes.text = ar.hdfFilename+":/nodes"+name+str(tCount)
elementMaterialTypesValues.text = ar.hdfFilename+":/"+"elementMaterialTypes"+"_t"+str(tCount)
if init or meshChanged:
ar.create_dataset_sync('elements'+name+str(tCount),
offsets = self.globalMesh.elementOffsets_subdomain_owned,
data = self.globalMesh.nodeNumbering_subdomain2global[self.elementNodesArray[:self.nElements_owned]])
ar.create_dataset_sync('nodes'+name+str(tCount),
offsets = self.globalMesh.nodeOffsets_subdomain_owned,
data = self.nodeArray[:self.nNodes_owned])
ar.create_dataset_sync("elementMaterialTypes"+"_t"+str(tCount),
offsets = self.globalMesh.elementOffsets_subdomain_owned,
data = self.elementMaterialTypes[:self.nElements_owned])
else:
assert False, "global_sync with text heavy data not supported"
else:
self.arGrid = SubElement(self.arGridCollection,"Grid",{"GridType":"Uniform"})
self.arTime = SubElement(self.arGrid,"Time",{"Value":str(t),"Name":str(tCount)})
topology = SubElement(self.arGrid,"Topology",
{"Type":Xdmf_ElementTopology,
"NumberOfElements":"%i" % (self.nElements_owned,)})
elements = SubElement(topology,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i %i" % (self.nElements_owned,self.nNodes_element)})
geometry = SubElement(self.arGrid,"Geometry",{"Type":"XYZ"})
nodes = SubElement(geometry,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Float",
"Precision":"8",
"Dimensions":"%i %i" % (self.nNodes_global,3)})
#material types
elementMaterialTypes = SubElement(self.arGrid,"Attribute",{"Name":"elementMaterialTypes",
"AttributeType":"Scalar",
"Center":"Cell"})
elementMaterialTypesValues = SubElement(elementMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.nElements_owned,)})
if ar.hdfFile is not None:
elements.text = ar.hdfFilename+":/elements"+str(ar.comm.rank())+name+str(tCount)
nodes.text = ar.hdfFilename+":/nodes"+str(ar.comm.rank())+name+str(tCount)
elementMaterialTypesValues.text = ar.hdfFilename+":/"+"elementMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount)
if init or meshChanged:
ar.create_dataset_async('elements'+str(ar.comm.rank())+name+str(tCount), data = self.elementNodesArray[:self.nElements_owned])
ar.create_dataset_async('nodes'+str(ar.comm.rank())+name+str(tCount), data = self.nodeArray)
ar.create_dataset_async("elementMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount), data = self.elementMaterialTypes[:self.nElements_owned])
else:
SubElement(elements,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/elements"+name+".txt"})
SubElement(nodes,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/nodes"+name+".txt"})
SubElement(elementMaterialTypesValues,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/"+"elementMaterialTypes"+str(tCount)+".txt"})
if init or meshChanged:
np.savetxt(ar.textDataDir+"/elements"+name+".txt",self.elementNodesArray[:self.nElements_owned],fmt='%d')
np.savetxt(ar.textDataDir+"/nodes"+name+".txt",self.nodeArray)
np.savetxt(ar.textDataDir+"/"+"elementMaterialTypes"+str(tCount)+".txt",self.elementMaterialTypes[:self.nElements_owned])
class Mesh3DM(Mesh):
"""
A Mesh for reading in tetrahedral meshes in the .3dm format
"""
def __init__(self,filename,adhBase=1):
meshIn = open(filename+'.3dm','r')
firstLine = meshIn.readline()
firstWords = firstLine.split()
print("Reading object=%s from file=%s" % (firstWords[0],filename))
line = meshIn.readline()
columns = line.split()
#read in the tetrahedra and nodes as memory-efficiently as possible
Tn0 = array.array('i')
Tn1 = array.array('i')
Tn2 = array.array('i')
Tn3 = array.array('i')
material = array.array('i')
nx = array.array('d')
ny = array.array('d')
nz = array.array('d')
print("Reading "+str(filename))
#assume tets are ordered by tet number
while (len(columns) > 0 and (columns[0] == 'E4T' or columns[0] == 'GE4')):
Tn0.append(int(columns[2]))
Tn1.append(int(columns[3]))
Tn2.append(int(columns[4]))
Tn3.append(int(columns[5]))
material.append(int(columns[6]))
line = meshIn.readline()
columns = line.split()
#assume nodes are ordered by node number
while (len(columns) == 5):
nx.append(float(columns[2]))
ny.append(float(columns[3]))
nz.append(float(columns[4]))
line = meshIn.readline()
columns = line.split()
meshIn.close()
print("Allocating node and element arrays")
self.nTetrahedra_global = len(Tn0)
self.tetrahedronArray = np.zeros(
(self.nTetrahedra_global,4),'i')
TA = self.tetrahedronArray
self.tetrahedronMaterialArray = np.zeros(
(self.nTetrahedra_global,),'i')
TMA = self.tetrahedronMaterialArray
self.nNodes_global = len(nx)
self.nodeArray = np.zeros((self.nNodes_global,3),'d')
for TN in range(self.nTetrahedra_global):
TA[TN,0] = Tn0[TN] - adhBase
TA[TN,1] = Tn1[TN] - adhBase
TA[TN,2] = Tn2[TN] - adhBase
TA[TN,3] = Tn3[TN] - adhBase
TMA[TN] = material[TN] - adhBase
for nN in range(self.nNodes_global):
self.nodeArray[nN,0]= nx[nN]
self.nodeArray[nN,1]= ny[nN]
self.nodeArray[nN,2]= nz[nN]
print("Deleting temporary storage")
del Tn0,Tn1,Tn2,Tn3,nx,ny,nz
self.nElements_global = self.nTetrahedra_global
self.elementNodesArray = self.tetrahedronArray
self.elementMaterialTypes = self.tetrahedronMaterialArray
self.arGridCollection=None
print("Number of tetrahedra:"+str(self.nElements_global))
print("Number of nodes :"+str(self.nNodes_global))
def buildTriangleArrays(self):
print("Extracting triangles tetrahedra dictionary")
triangles_tetrahedra={}
T=self.tetrahedronArray
self.nInteriorTriangles_global=0
for N in range(self.nTetrahedra_global):
#sort node numbers so the nodes can
#uniquely identify the triangles/edges
n = list(T[N,:])
n.sort()
triangles = [(n[0],n[1],n[2]),
(n[0],n[1],n[3]),
(n[0],n[2],n[3]),
(n[1],n[2],n[3])]
for t in triangles:
if t in triangles_tetrahedra:
triangles_tetrahedra[t].append(N)
self.nInteriorTriangles_global+=1
else:
triangles_tetrahedra[t]=[N]
print("Building triangle and exterior arrays")
self.nTriangles_global = len(triangles_tetrahedra)
self.triangleArray = np.zeros(
(self.nTriangles_global,3),'i')
self.triangleMaterialArray = np.zeros(
(self.nTriangles_global,2),'i')
self.interiorTriangleArray = np.zeros(
(self.nInteriorTriangles_global,),'i')
self.nExteriorTriangles_global = self.nTriangles_global - \
self.nInteriorTriangles_global
self.exteriorTriangleArray = np.zeros(
(self.nExteriorTriangles_global,),'i')
tN=0
itN=0
etN=0
exteriorNodes=set()
tA = self.triangleArray
tMA = self.triangleMaterialArray
TMA = self.tetrahedronMaterialArray
for tNodes,Tlist in triangles_tetrahedra.items():
tA[tN,0]=tNodes[0]
tA[tN,1]=tNodes[1]
tA[tN,2]=tNodes[2]
if len(Tlist)==2:
self.interiorTriangleArray[itN]=tN
tMA[tN][0]= TMA[Tlist[0]]
tMA[tN][1]= TMA[Tlist[1]]
itN+=1
else:
exteriorNodes.update(tNodes)
self.exteriorTriangleArray[etN]=tN
tMA[tN][0]=TMA[Tlist[0]]
etN+=1
tN+=1
self.nExteriorNodes_global = len(exteriorNodes)
self.exteriorNodeArray = np.zeros(
(self.nExteriorNodes_global,),'i')
self.globalToExteriorNodeArray = np.zeros(
(self.nNodes_global,),'i')
for nExtN,nN in enumerate(exteriorNodes):
self.exteriorNodeArray[nExtN]=nN
self.globalToExteriorNodeArray[nN]=nExtN
print("Number of triangles :"+str(self.nTriangles_global))
print("Number on interior :"+str(self.nInteriorTriangles_global))
print("Number on exterior :"+str(self.nExteriorTriangles_global))
print("Number of exterior nodes:"+str(self.nExteriorNodes_global))
#at this point we can easily build a boundary mesh by renumbering using
#exteriorNodeArray and exteriorTriangleArray to renumber
#and the info in nodeArray and triangleArray
def buildEdgeArray(self):
print("Extracting set of edges")
edges = set()
t=self.triangleArray
for N in range(self.nTriangles_global):
#triangle nodes are assumed sorted
edges.update([(t[N,0],t[N,1]),
(t[N,0],t[N,2]),
(t[N,1],t[N,2])])
print("Building edgeArray")
self.nEdges_global = len(edges)
self.edgeArray = np.zeros(
(self.nEdges_global,2),'i')
eN=0
for e in edges:
self.edgeArray[eN][0] = e[0]
self.edgeArray[eN][1] = e[1]
del edges
print("Number of edges :"+str(self.nEdges_global))
def writeBoundaryMeshADH(self,filename,adhBase=1):
#I'll print it using node numbers from the 3D mesh
meshOut = open(filename+'Boundary.3dm','w')
meshOut.write('MESH2D\n')
for tN in self.exteriorTriangleArray:
n0 = self.triangleArray[tN][0] + adhBase
n1 = self.triangleArray[tN][1] + adhBase
n2 = self.triangleArray[tN][2] + adhBase
m = self.triangleMaterialArray[tN][0] + adhBase
line = 'E3T %5i %5i %5i %5i %5i' % \
(tN+adhBase,n0,n1,n2,m)
meshOut.write(line+'\n')
for nN in self.exteriorNodeArray:
n = self.nodeArray[nN]
line = 'ND %5i %14.8e %14.8e %14.8e' % \
(nN + adhBase,n[0],n[1],n[2])
#print line
meshOut.write(line+'\n')
meshOut.close()
def writeMeshEnsight(self,filename,description=None):
base=1
#write the casefile
caseOut=open(filename+'.case','w')
caseOut.write('FORMAT\n'+'type: ensight gold\n')
caseOut.write('GEOMETRY\n'+'model: '+filename+'.geo\n')
caseOut.close()
meshOut=open(filename+'.geo','w')
meshOut.write('Ensight Gold\n')
meshOut.write('Unstructured Tetrahedral Mesh\n')
meshOut.write('node id given\n')
meshOut.write('element id given\n')
#extents = 'extents\n %12.5E %12.5E\n %12.5E %12.5E\n %12.5E %12.5E\n' % (self.xmin,self.xmax,self.ymin,self.ymax,self.zmin,self.zmax)
#meshOut.write('extents\n'+`self.xmin`+' '+`self.xmax`+'\n')
meshOut.write('part \n'+'%10i\n' % 1)
if description:
meshOut.write(description+'\n')
else:
meshOut.write('A Mesh\n')
meshOut.write('coordinates\n'+'%10i\n' % self.nNodes_global)
for nN in range(self.nNodes_global):
ensightNodeNumber = (nN+base)
meshOut.write('%10i\n' % ensightNodeNumber)
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,0])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,1])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,2])
meshOut.write('tetra4\n'+'%10i\n' % self.nTetrahedra_global)
for TN in range(self.nTetrahedra_global):
ensightElementNumber = TN + base
meshOut.write('%10i\n' % ensightElementNumber)
TA = self.tetrahedronArray
for TN in range(self.nTetrahedra_global):
meshOut.write('%10i%10i%10i%10i\n' % (TA[TN,0]+base,
TA[TN,1]+base,
TA[TN,2]+base,
TA[TN,3]+base))
meshOut.close()
def writeBoundaryMeshEnsight(self,filename,description=None):
base=1
#write the casefile
caseOut=open(filename+'Boundary.case','w')
caseOut.write('FORMAT\n'+'type: ensight gold\n')
caseOut.write('GEOMETRY\n'+'model: '+filename+'Boundary.geo\n')
caseOut.close()
meshOut=open(filename+'Boundary.geo','w')
meshOut.write('Unstructured Triangular Surface Mesh\n\n')
meshOut.write('node id given\n')
meshOut.write('element id given\n')
meshOut.write('part \n'+'%10i\n' % 1)
if description:
meshOut.write(description+'\n')
else:
meshOut.write('A Mesh\n')
meshOut.write('coordinates\n'+'%10i\n' % self.nExteriorNodes_global)
for nN in range(self.nExteriorNodes_global):
ensightNodeNumber = (nN+base)
meshOut.write('%10i\n' % ensightNodeNumber)
for nN in range(self.nExteriorNodes_global):
meshOut.write('%12.5E\n' %
self.nodeArray[self.exteriorNodeArray[nN],0])
for nN in range(self.nExteriorNodes_global):
meshOut.write('%12.5E\n' %
self.nodeArray[self.exteriorNodeArray[nN],1])
for nN in range(self.nExteriorNodes_global):
meshOut.write('%12.5E\n' %
self.nodeArray[self.exteriorNodeArray[nN],2])
meshOut.write('tria3\n'+'%10i\n' % self.nExteriorTriangles_global)
for tN in range(self.nExteriorTriangles_global):
ensightElementNumber = tN + base
meshOut.write('%10i\n' % ensightElementNumber)
tA = self.triangleArray
for tN in self.exteriorTriangleArray:
meshOut.write('%10i%10i%10i\n' %
(self.globalToExteriorNodeArray[tA[tN,0]]+base,
self.globalToExteriorNodeArray[tA[tN,1]]+base,
self.globalToExteriorNodeArray[tA[tN,2]]+base))
meshOut.close()
def writeMeshXdmf(self,ar,name='',t=0.0,init=False,meshChanged=False,Xdmf_ElementTopology="Tetrahedron",tCount=0):
if self.arGridCollection is not None:
init = False
elif not init:
self.arGridCollection = ar.domain.find("Grid")
if init:
self.arGridCollection = SubElement(ar.domain,"Grid",{"Name":"Mesh "+name,
"GridType":"Collection",
"CollectionType":"Temporal"})
if self.arGrid is None or self.arTime.get('Value') != "{0:e}".format(t):
if ar.global_sync:
#
#topology and geometry
#
self.arGrid = SubElement(self.arGridCollection,"Grid",{"GridType":"Uniform"})
self.arTime = SubElement(self.arGrid,"Time",{"Value":str(t),"Name":str(tCount)})
topology = SubElement(self.arGrid,"Topology",
{"Type":Xdmf_ElementTopology,
"NumberOfElements":"%i" % (self.globalMesh.nElements_global,)})
elements = SubElement(topology,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i %i" % (self.globalMesh.nElements_owned,
self.nNodes_element)})
geometry = SubElement(self.arGrid,"Geometry",{"Type":"XYZ"})
nodes = SubElement(geometry,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Float",
"Precision":"8",
"Dimensions":"%i %i" % (self.globalMesh.nNodes_global,3)})
#material types
elementMaterialTypes = SubElement(self.arGrid,"Attribute",{"Name":"elementMaterialTypes",
"AttributeType":"Scalar",
"Center":"Cell"})
elementMaterialTypesValues = SubElement(elementMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.globalMesh.nElements_owned,)})
if ar.hdfFile is not None:
elements.text = ar.hdfFilename+":/elements"+name+str(tCount)
nodes.text = ar.hdfFilename+":/nodes"+name+str(tCount)
elementMaterialTypesValues.text = ar.hdfFilename+":/"+"elementMaterialTypes"+"_t"+str(tCount)
if init or meshChanged:
ar.create_dataset_sync('elements'+name+str(tCount),
offsets = self.globalMesh.elementOffsets_subdomain_owned,
data = self.globalMesh.nodeNumbering_subdomain2global[self.elementNodesArray[:self.nElements_owned]])
ar.create_dataset_sync('nodes'+name+str(tCount),
offsets = self.globalMesh.nodeOffsets_subdomain_owned,
data = self.nodeArray[:self.nNodes_owned])
ar.create_dataset_sync("elementMaterialTypes"+"_t"+str(tCount),
offsets = self.globalMesh.elementOffsets_subdomain_owned,
data = self.elementMaterialTypes[:self.nElements_owned])
else:
assert False, "global_sync not supported with text heavy data"
else:
#
#topology and geometry
#
self.arGrid = SubElement(self.arGridCollection,"Grid",{"GridType":"Uniform"})
self.arTime = SubElement(self.arGrid,"Time",{"Value":str(t),"Name":str(tCount)})
topology = SubElement(self.arGrid,"Topology",
{"Type":Xdmf_ElementTopology,
"NumberOfElements":"%i" % (self.nElements_owned,)})
elements = SubElement(topology,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i %i" % (self.nElements_owned,self.nNodes_element)})
geometry = SubElement(self.arGrid,"Geometry",{"Type":"XYZ"})
nodes = SubElement(geometry,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Float",
"Precision":"8",
"Dimensions":"%i %i" % (self.nNodes_global,3)})
#material types
elementMaterialTypes = SubElement(self.arGrid,"Attribute",{"Name":"elementMaterialTypes",
"AttributeType":"Scalar",
"Center":"Cell"})
elementMaterialTypesValues = SubElement(elementMaterialTypes,"DataItem",
{"Format":ar.dataItemFormat,
"DataType":"Int",
"Dimensions":"%i" % (self.nElements_owned,)})
if ar.hdfFile is not None:
elements.text = ar.hdfFilename+":/elements"+str(ar.comm.rank())+name+str(tCount)
nodes.text = ar.hdfFilename+":/nodes"+str(ar.comm.rank())+name+str(tCount)
elementMaterialTypesValues.text = ar.hdfFilename+":/"+"elementMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount)
if init or meshChanged:
ar.create_dataset_async('elements'+str(ar.comm.rank())+name+str(tCount), data = self.elementNodesArray[:self.nElements_owned])
ar.create_dataset_async('nodes'+str(ar.comm.rank())+name+str(tCount), data = self.nodeArray)
ar.create_dataset_async("elementMaterialTypes"+"_p"+str(ar.comm.rank())+"_t"+str(tCount), data = self.elementMaterialTypes[:self.nElements_owned])
else:
SubElement(elements,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/elements"+name+".txt"})
SubElement(nodes,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/nodes"+name+".txt"})
SubElement(elementMaterialTypesValues,"xi:include",{"parse":"text","href":"./"+ar.textDataDir+"/"+"elementMaterialTypes"+str(tCount)+".txt"})
if init or meshChanged:
np.savetxt(ar.textDataDir+"/elements"+name+".txt",self.elementNodesArray[:self.nElements_owned],fmt='%d')
np.savetxt(ar.textDataDir+"/nodes"+name+".txt",self.nodeArray)
np.savetxt(ar.textDataDir+"/"+"elementMaterialTypes"+str(tCount)+".txt",self.elementMaterialTypes[:self.nElements_owned])
class MultilevelTetrahedralMesh(MultilevelMesh):
"""A hierarchical multilevel mesh with tetrahedral cells"""
def __init__(self,
nx, ny, nz,
x=0.0, y=0.0, z=0.0,
Lx=1.0, Ly=1.0, Lz=1.0,
refinementLevels=1,
skipInit=False,
nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from . import cmeshTools
from . import Comm
MultilevelMesh.__init__(self)
self.useC = True
self.nLayersOfOverlap = nLayersOfOverlap; self.parallelPartitioningType = parallelPartitioningType
logEvent("Generating tetrahedral mesh")
if not skipInit:
if self.useC:
self.meshList.append(TetrahedralMesh())
self.meshList[0].generateTetrahedralMeshFromRectangularGrid(nx,ny,nz,Lx,Ly,Lz)
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.meshList[0].nodeArray[:,0] += x
self.meshList[0].nodeArray[:,1] += y
self.meshList[0].nodeArray[:,2] += z
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(TetrahedralMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.cmeshList[l])
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
else:
grid=RectangularGrid(nx,ny,nz,Lx,Ly,Lz)
self.meshList.append(TetrahedralMesh())
self.meshList[0].rectangularToTetrahedral(grid)
self.meshList[0].nodeArray[:,0] += x
self.meshList[0].nodeArray[:,1] += y
self.meshList[0].nodeArray[:,2] += z
self.elementChildren=[]
logEvent(self.meshList[0].meshInfo())
for l in range(1,refinementLevels):
self.refine()
logEvent(self.meshList[-1].meshInfo())
self.buildArrayLists()
def generateFromExistingCoarseMesh(self,mesh0,refinementLevels,nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from . import cmeshTools
#blow away or just trust garbage collection
self.nLayersOfOverlap=nLayersOfOverlap;self.parallelPartitioningType=parallelPartitioningType
self.meshList = []
self.elementParents = None
self.cmultilevelMesh = None
if self.useC:
self.meshList.append(mesh0)
logEvent("cmeshTools.CMultilevelMesh")
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
logEvent("buildFromC")
self.buildFromC(self.cmultilevelMesh)
logEvent("partitionMesh")
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(TetrahedralMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.meshList[l].cmesh)
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
else:
grid=RectangularGrid(nx,ny,nz,Lx,Ly,Lz)
self.meshList.append(TetrahedralMesh())
self.meshList[0].rectangularToTetrahedral(grid)
self.meshList[0].subdomainMesh = self.meshList[0]
self.elementChildren=[]
logEvent(self.meshList[0].meshInfo())
for l in range(1,refinementLevels):
self.refine()
self.meshList[l].subdomainMesh = self.meshList[l]
logEvent(self.meshList[-1].meshInfo())
self.buildArrayLists()
def generatePartitionedMeshFromPUMI(self,mesh0,refinementLevels,nLayersOfOverlap=1):
from . import cmeshTools
self.meshList = []
self.meshList.append(mesh0)
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.elementParents = None
self.elementChildren=[]
def generatePartitionedMeshFromTetgenFiles(self,filebase,base,mesh0,refinementLevels,nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from . import cmeshTools
if filebase==None:
filebase="mesh"
assert(refinementLevels==1)
assert(parallelPartitioningType==MeshParallelPartitioningTypes.node)
assert(nLayersOfOverlap<=1)
mesh0.cmesh = cmeshTools.CMesh()
#blow away or just trust garbage collection
self.nLayersOfOverlap=nLayersOfOverlap;self.parallelPartitioningType=parallelPartitioningType
self.meshList = []
self.elementParents = None
self.cmultilevelMesh = None
self.meshList.append(mesh0)
logEvent("cmeshTools.CMultilevelMesh")
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
logEvent("buildFromC")
self.buildFromC(self.cmultilevelMesh)
logEvent("partitionMesh")
self.meshList[0].partitionMeshFromFiles(filebase,base,nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
def refine(self):
self.meshList.append(TetrahedralMesh())
childrenDict = self.meshList[-1].refine(self.meshList[-2])
self.elementChildren.append(childrenDict)
def computeGeometricInfo(self):
for m in self.meshList:
m.computeGeometricInfo()
class MultilevelHexahedralMesh(MultilevelMesh):
"""A hierarchical multilevel mesh with hexahedral cells"""
def __init__(self,
nx, ny, nz,
px=0, py=0, pz=0,
x=0.0, y=0.0, z=0.0,
Lx=1.0, Ly=1.0, Lz=1.0,
refinementLevels=1,
skipInit=False,
nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from . import cmeshTools
from . import Comm
MultilevelMesh.__init__(self)
if refinementLevels == 1:
self.useC = True
else:
self.useC = False
self.nLayersOfOverlap = nLayersOfOverlap; self.parallelPartitioningType = parallelPartitioningType
logEvent("Generating hexahedral mesh")
if not skipInit:
if self.useC:
self.meshList.append(HexahedralMesh())
self.meshList[0].generateHexahedralMeshFromRectangularGrid(nx,ny,nz,Lx,Ly,Lz)
self.meshList[0].nodeArray[:,0] += x
self.meshList[0].nodeArray[:,1] += y
self.meshList[0].nodeArray[:,2] += z
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(HexahedralMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.cmeshList[l])
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
else:
grid=RectangularGrid(nx,ny,nz,Lx,Ly,Lz)
self.meshList.append(HexahedralMesh())
self.elementChildren=[]
self.meshList[0].sigmaMax=0.0
logEvent(self.meshList[0].meshInfo())
for l in range(1,refinementLevels):
self.refine()
self.meshList[-1].sigmaMax=0.0
logEvent(self.meshList[-1].meshInfo())
self.buildArrayLists()
def generateFromExistingCoarseMesh(self,mesh0,refinementLevels,nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from . import cmeshTools
#blow away or just trust garbage collection
self.nLayersOfOverlap=nLayersOfOverlap;self.parallelPartitioningType=parallelPartitioningType
self.meshList = []
self.elementParents = None
self.cmultilevelMesh = None
self.meshList.append(mesh0)
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(HexahedralMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.meshList[l].cmesh)
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
def refine(self):
raise NotImplementedError
self.meshList.append(HexahedralMesh())
childrenDict = self.meshList[-1].refine(self.meshList[-2])
self.elementChildren.append(childrenDict)
def computeGeometricInfo(self):
for m in self.meshList:
m.computeGeometricInfo()
def buildReferenceSimplex(nd=2):
"""
Create and return a Proteus mesh object for the reference
element.
Parameters
----------
nd : int
Dimension of reference element
Returns
-------
mesh : :class:`proteus.MeshTools.TriangularMesh`
Simplex mesh
"""
from proteus import Domain
assert(nd in [1,2,3])
if nd==1:
pass # Note sure what needs to go here?!
unit_simplex_domain = Domain.unitSimplex(nd)
polyfile = "reference_element"
unit_simplex_domain.writePoly(polyfile)
if nd==2:
runTriangle(polyfile,
"Yp")
mesh = genMeshWithTriangle(polyfile,
nbase=1)
mesh.partitionMesh()
mesh.globalMesh = mesh
return mesh
if nd==3:
runTetgen(polyfile,
"Yp")
mesh = genMeshWithTetgen(polyfile,
nbase = 1)
return mesh
class TriangularMesh(Mesh):
"""A mesh of triangles
The nodes, edges, and triangles are indexed by their
node tuples. The corresponding lists are derived from the dictionaries, and
sorted lexicographically. The global node numbers are redefined to
give a lexicographic ordering.
The mesh can be generated from a rectangular grid and refined using either
3t or Freudenthal-Bey global refinement.
"""
def __init__(self):
Mesh.__init__(self)
self.nodeDict={}
self.edgeDict={}
self.triangleDict={}
self.triangleList=[]
self.oldToNewNode=[]
def meshType(self):
return 'simplex'
def computeGeometricInfo(self):
from . import cmeshTools
cmeshTools.computeGeometricInfo_triangle(self.cmesh)
def generateTriangularMeshFromRectangularGrid(self,nx,ny,Lx,Ly,triangleFlag=1):
from . import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateTriangularMeshFromRectangularGrid(nx,ny,Lx,Ly,self.cmesh,triangleFlag)
cmeshTools.allocateGeometricInfo_triangle(self.cmesh)
cmeshTools.computeGeometricInfo_triangle(self.cmesh)
self.buildFromC(self.cmesh)
def rectangularToTriangularOriented(self,grid):
#copy the nodes from the rectangular mesh
#I want to be able to renumber latter without
#changing the grid nodes, so I do deep copies here
self.nodeList = [Node(n.N,n.p[X],n.p[Y],n.p[Z]) for n in grid.nodeList]
self.nodeDict = dict([(n,n) for n in self.nodeList])
for i in range(grid.nHx):
for j in range(grid.nHy):
k=0
n0 = self.nodeList[grid.getNodeNumber(i,j,k)]
n1 = self.nodeList[grid.getNodeNumber(i,j+1,k)]
n2 = self.nodeList[grid.getNodeNumber(i+1,j,k)]
n3 = self.nodeList[grid.getNodeNumber(i+1,j+1,k)]
self.newTriangle([n0,n1,n3])
self.newTriangle([n0,n2,n3])
self.finalize()
#self.buildListsEdges()
#self.buildListsTriangles()
def rectangularToTriangularOrientedOtherWay(self,grid):
#copy the nodes from the rectangular mesh
#I want to be able to renumber latter without
#changing the grid nodes, so I do deep copies here
self.nodeList = [Node(n.N,n.p[X],n.p[Y],n.p[Z]) for n in grid.nodeList]
self.nodeDict = dict([(n,n) for n in self.nodeList])
for i in range(grid.nHx):
for j in range(grid.nHy):
k=0
n0 = self.nodeList[grid.getNodeNumber(i,j,k)]
n1 = self.nodeList[grid.getNodeNumber(i,j+1,k)]
n2 = self.nodeList[grid.getNodeNumber(i+1,j,k)]
n3 = self.nodeList[grid.getNodeNumber(i+1,j+1,k)]
self.newTriangle([n0,n2,n1])
self.newTriangle([n2,n3,n1])
self.finalize()
#self.buildListsEdges()
#self.buildListsTriangles()
def rectangularToTriangularRedBlack(self,grid):
#copy the nodes from the rectangular mesh
#I want to be able to renumber latter without
#changing the grid nodes, so I do deep copies here
self.nodeList = [Node(n.N,n.p[X],n.p[Y],n.p[Z]) for n in grid.nodeList]
self.nodeDict = dict([(n,n) for n in self.nodeList])
self.triangleDict={}
for i in range(grid.nHx):
for j in range(grid.nHy):
k=0
#associate the element (i,j,k) with the
#left, front, bottom node
#get the left,front,bottom,node and its color
if (grid.getColor(i,j,k) == grid.black):
b0 = self.nodeList[grid.getNodeNumber(i,j,k)]
r0 = self.nodeList[grid.getNodeNumber(i+1,j,k)]
r1 = self.nodeList[grid.getNodeNumber(i,j+1,k)]
b1 = self.nodeList[grid.getNodeNumber(i+1,j+1,k)]
else:
r0 = self.nodeList[grid.getNodeNumber(i,j,k)]
b0 = self.nodeList[grid.getNodeNumber(i+1,j,k)]
b1 = self.nodeList[grid.getNodeNumber(i,j+1,k)]
r1 = self.nodeList[grid.getNodeNumber(i+1,j+1,k)]
self.newTriangle([b0,r0,r1])
self.newTriangle([b1,r0,r1])
self.finalize()
#self.buildListsEdges()
#self.buildListsTriangles()
#mwf debug switch to redblac
rectangularToTriangular = rectangularToTriangularOrientedOtherWay#rectangularToTriangularOriented
def generateFromTriangleMesh(self,ctrirep,base):
from .import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateFromTriangleMesh(self.cmesh,ctrirep,base)
cmeshTools.allocateGeometricInfo_triangle(self.cmesh)
cmeshTools.computeGeometricInfo_triangle(self.cmesh)
self.buildFromC(self.cmesh)
def generateFromTriangleFiles(self,filebase,base):
from .import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateFromTriangleFiles(self.cmesh,filebase,base)
cmeshTools.allocateGeometricInfo_triangle(self.cmesh)
cmeshTools.computeGeometricInfo_triangle(self.cmesh)
self.buildFromC(self.cmesh)
def writeTriangleFiles(self,filebase,base):
from .import cmeshTools
cmeshTools.writeTriangleFiles(self.cmesh,filebase,base)
def generateFrom2DMFile(self,filebase,base=1):
from .import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateFrom2DMFile(self.cmesh,filebase,base)
cmeshTools.allocateGeometricInfo_triangle(self.cmesh)
cmeshTools.computeGeometricInfo_triangle(self.cmesh)
self.buildFromC(self.cmesh)
def constructTriangularMeshOnRectangle(self,Lx,Ly,nx,ny,writeMesh=0,
meshFileBase='mesh2d'):
"""
wrapper function for making a triangular mesh on the rectangle
[0,Lx] x [0,Ly].
"""
nz = 1
Lz = 1.0
grid2d = RectangularGrid(nx,ny,nz,Lx,Ly,Lz)
#grid2d.writeEdgesGnuplot('grid2d')
#grid2d.viewMeshGnuplotPipe('grid2d')
self.rectangularToTriangular(grid2d)
if writeMesh == 1:
#print mesh in gnuplot format
self.writeEdgesGnuplot(meshFileBase)
#can view with
#self.viewMeshGnuplotPipe(meshFileBase)
elif writeMesh == 2:
self.writeEdgesMatlab(meshFileBase)
#view in matlab with meshFileBase.m
#end else
return self
def buildFromSets(self,triangleSet,edgeSet,nodeSet):
self.nodeList = list(nodeSet)
self.nodeDict = dict([(n,n) for n in self.nodeList])
self.edgeList = list(edgeSet)
self.edgeDict = dict([(e.nodes,e) for e in self.edgeList])
self.triangleList = list(triangleSet)
self.triangleDict = dict([(t.nodes,t) for t in self.triangleList])
self.elementList = self.triangleList
self.elementBoundaryList = self.edgeList
def fixLocalNumbering(self):
for tN in range(len(self.triangleList)):
self.triangleList[tN].computeGeometricInfo()
if edet(self.triangleList[tN].linearMap) < 0:
newNodes = list(self.triangleList[tN].nodes)
newNodes[2] = self.triangleList[tN].nodes[1]
newNodes[1] = self.triangleList[tN].nodes[2]
self.triangleList[tN].nodes = newNodes
def finalize(self):
self.buildLists()
#self.fixLocalNumbering()
self.buildArraysFromLists()
#todo: build boundary mesh
def buildLists(self):
self.buildListsNodes()
self.buildListsEdges()
self.buildListsTriangles()
self.elementList = self.triangleList
self.elementBoundaryList = self.edgeList
def buildListsNodes(self):
keyList = list(self.nodeDict.keys())
keyList.sort()
self.nodeList=[]
self.oldToNewNode=list(range(len(self.nodeDict)))
for nN,k in enumerate(keyList):
self.oldToNewNode[self.nodeDict[k].N]=nN
self.nodeDict[k].N = nN
self.nodeList.append(self.nodeDict[k])
def buildListsEdges(self):
keyList = list(self.edgeDict.keys())
keyList.sort()
self.edgeList=[]
for eN,k in enumerate(keyList):
self.edgeDict[k].N = eN
self.edgeList.append(self.edgeDict[k])
def buildListsTriangles(self):
keyList = list(self.triangleDict.keys())
keyList.sort()
self.triangleList=[]
for tN,k in enumerate(keyList):
self.triangleDict[k].N = tN
self.triangleList.append(self.triangleDict[k])
self.polygonList = self.triangleList
def newTriangle(self,nodes):
t = Triangle(len(self.triangleDict),nodes)
self.triangleDict[t.nodes] = t
self.registerEdges(t)
return t
def registerEdges(self,t):
for en,e in enumerate(t.edges):
if e.nodes in self.edgeDict:
t.edges[en]=self.edgeDict[e.nodes]
else:
eN=len(self.edgeDict)
e.N=eN
self.edgeDict[e.nodes]=e
def registerNode(self,node):
if node in self.nodeDict:
node = self.nodeDict[node]
else:
node.N = len(self.nodeDict)
self.nodeDict[node] = node
return node
def buildLevelSetMesh(self,value,nodalValues):
levelSetMesh = EdgeMesh()
self.levelSetNodeNumbers = set()
for t in self.triangleList:
nodes={}
for e in t.edges:
nl = e.nodes[0]
vl = nodalValues[nl.N]
nr = e.nodes[1]
vr = nodalValues[nr.N]
if ((vl >= value and value >= vr) or
(vl <= value and value <= vr)):
if vl == vr:
newNl = Node(len(levelSetMesh.nodeDict),
nl.p[X],
nl.p[Y],
nl.p[Z])
newNl = levelSetMesh.registerNode(newNl)
newNr = Node(len(levelSetMesh.nodeDict),
nr.p[X],
nr.p[Y],
nr.p[Z])
newNr = levelSetMesh.registerNode(newNr)
levelSetMesh.newEdge([newNl,newNr])
self.levelSetNodeNumbers.add(nl.N)
self.levelSetNodeNumbers.add(nr.N)
elif value == vl:
newNode = Node(len(levelSetMesh.nodeDict),
nl.p[X],
nl.p[Y],
nl.p[Z])
nodes[newNode] = newNode
self.levelSetNodeNumbers.add(nl.N)
elif value == vr and len(nodes) < 2:
newNode = Node(len(levelSetMesh.nodeDict),
nr.p[X],
nr.p[Y],
nr.p[Z])
nodes[newNode] = newNode
self.levelSetNodeNumbers.add(nr.N)
else:
wr = old_div((value - vl), (vr - vl))
wl = old_div((value - vr), (vl - vr))
newPoint = nl.p*wl + nr.p*wr
newNode = Node(len(levelSetMesh.nodeDict),
newPoint[X],
newPoint[Y],
newPoint[Z])
nodes[newNode] = newNode
self.levelSetNodeNumbers.add(nl.N)
self.levelSetNodeNumbers.add(nr.N)
elif vl < value:
self.levelSetNodeNumbers.add(nl.N)
elif vr < value:
self.levelSetNodeNumbers.add(nr.N)
if len(nodes) == 0:
pass
elif len(nodes) == 1:
print("singleton")
elif len(nodes) == 2:
newNodes=[]
for n in list(nodes.values()):
newNodes.append(levelSetMesh.registerNode(n))
levelSetMesh.newEdge(newNodes)
else:
print("unexpected case in buildLevelSetMesh")
print(t.N)
for e in t.edges:
print(e.N)
for n in e.nodes:
print(n.N)
print(n.p)
print("level set triangle")
for n in list(nodes.values()):
print(n.p)
if len(levelSetMesh.edgeDict) == 0:
print("level set does not cross any edges")
return None
else:
levelSetMesh.finalize()
return levelSetMesh
def refine3t(self,oldMesh):
childrenDict={}
for t in oldMesh.triangleList:
#deep copy old nodes because we'll renumber
tNodes = [Node(eN,n.p[X],n.p[Y],n.p[Z])
for eN,n in enumerate(t.nodes)]
for lnN,n in enumerate(tNodes): tNodes[lnN]=self.registerNode(n)
#add new node
t.computeGeometricInfo()
newNode = Node(len(self.nodeDict),
t.barycenter[X],
t.barycenter[Y],
t.barycenter[Z])
newNode = self.registerNode(newNode)
t1=self.newTriangle([tNodes[0],tNodes[1],newNode])
t2=self.newTriangle([tNodes[1],tNodes[2],newNode])
t3=self.newTriangle([tNodes[2],tNodes[0],newNode])
childrenDict[t.N]=[t1,t2,t3]
self.finalize()
return childrenDict
def refineFreudenthalBey(self,oldMesh):
logEvent("Refining the mesh using Freudenthal-Bey refinement")
childrenDict={}
for t in list(oldMesh.triangleDict.values()):
#deep copy old nodes because we'll renumber
tNodes = [Node(nN,n.p[X],n.p[Y],n.p[Z])
for nN,n in enumerate(t.nodes)]
for lnN,n in enumerate(tNodes): tNodes[lnN]=self.registerNode(n)
#add new nodes (midpoints of edges)
#use local edge tuples as keys
newNodes={}
for et,en in t.edgeMap.items():
t.edges[en].computeGeometricInfo()
p = t.edges[en].barycenter
newNodes[et] = Node(en,p[X],p[Y],p[Z])
#set the global node numbers
for k,n in newNodes.items(): newNodes[k]=self.registerNode(n)
#add corner triangles
t1=self.newTriangle([tNodes[0],
newNodes[(0,1)],
newNodes[(0,2)]])
t2=self.newTriangle([tNodes[1],
newNodes[(0,1)],
newNodes[(1,2)]])
t3=self.newTriangle([tNodes[2],
newNodes[(0,2)],
newNodes[(1,2)]])
#add center triangle
t4=self.newTriangle([newNodes[(0,1)],
newNodes[(1,2)],
newNodes[(0,2)]])
childrenDict[t.N]=[t1,t2,t3,t4]
self.finalize()
return childrenDict
#for debugging: print each tet
#self.edgeList=[]
#Tlist = self.tetrahedronDict.values()
#for T in Tlist:
# self.edgeList = self.edgeList + T.edges
def refine(self,oldMesh):
return self.refineFreudenthalBey(oldMesh)
def meshInfo(self):
minfo = """Number of triangles : %d
Number of edges : %d
Number of nodes : %d\n""" % (self.nElements_global,
self.nElementBoundaries_global,
self.nNodes_global)
if self.subdomainMesh != self:
sinfo = self.subdomainMesh.meshInfo()
info = "*** Global ***\n" + minfo + "\n*** Local ***\n" + sinfo
return info
return minfo
def readMeshADH(self,filename,adhBase=1,suffix='3dm'):
meshIn = open(filename+'.'+suffix,'r')
firstLine = meshIn.readline()
firstWords = firstLine.split()
print("Reading object=%s from file=%s" % (firstWords[0],filename))
line = meshIn.readline()
columns = line.split()
triangles = []
triangleEdges=set()
logEvent("Reading "+str(filename)+ \
" and building node lists for triangles, and edges")
#assume triangles are ordered by triangle number
while (columns[0] == 'E3T'):
nodeNumbers = [int(c) - adhBase for c in columns[2:5]]
nodeNumbers.sort()
triangles.append(array.array('i',nodeNumbers))
triangleEdges.update([(nodeNumbers[0],nodeNumbers[1]),
(nodeNumbers[0],nodeNumbers[2]),
(nodeNumbers[1],nodeNumbers[2])])
line = meshIn.readline()
columns = line.split()
print("Building node list and dict")
#assume nodes are ordered by node number
while (len(columns) == 5):
newNode = Node(int(columns[1]) - adhBase,
float(columns[2]),
float(columns[3]),
float(columns[4]))
self.nodeList.append(newNode)
self.nodeDict[newNode]=newNode
line = meshIn.readline()
columns = line.split()
print("Number of triangles :"+str(len(triangles)))
print("Number of edges :"+str(len(triangleEdges)))
print("Number of nodes :"+str(len(self.nodeList)))
print("Number of objects :"+\
str(len(triangleEdges)+len(triangles)+len(self.nodeList)))
print("Building edge list")
self.edgeList =[Edge(edgeNumber=eN,nodes=[self.nodeList[nN[0]],
self.nodeList[nN[1]]])
for eN,nN in enumerate(triangleEdges)]
print("Building edge dict")
self.edgeDict = dict([(e.nodes,e) for e in self.edgeList])
print("Building triangle list")
self.triangleList =[Triangle(triangleNumber=tN,
nodes=[self.nodeList[nN[0]],
self.nodeList[nN[1]],
self.nodeList[nN[2]]],
edgeDict=self.edgeDict)
for tN,nN in enumerate(triangles)]
print("Building triangle dict")
self.triangleDict = dict([(t.nodes,t) for t in self.triangleList])
self.elementList = self.triangleList
self.elementBoundaryList = self.edgeList
def writeMeshXdmf(self,ar,name='',t=0.0,init=False,meshChanged=False,tCount=0,EB=False):
Mesh.writeMeshXdmf(self,ar,name,t,init,meshChanged,"Triangle",tCount,EB=EB)
def writeMeshEnsight(self,filename,description=None):
base=1
#write the casefile
caseOut=open(filename+'.case','w')
caseOut.write('FORMAT\n'+'type: ensight gold\n')
caseOut.write('GEOMETRY\n'+'model: '+filename+'.geo\n')
caseOut.close()
meshOut=open(filename+'.geo','w')
meshOut.write('Ensight Gold\n')
meshOut.write('Unstructured Triangular Mesh\n')
meshOut.write('node id given\n')
meshOut.write('element id given\n')
meshOut.write('part \n'+'%10i\n' % 1)
if description:
meshOut.write(description+'\n')
else:
meshOut.write('A Mesh\n')
meshOut.write('coordinates\n'+'%10i\n' % self.nNodes_global)
for nN in range(self.nNodes_global):
meshOut.write('%10i\n' % (nN+base))
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,0])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,1])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,2])
meshOut.write('tria3\n'+'%10i\n' % self.nElements_global)
for eN in range(self.nElements_global):
meshOut.write('%10i\n' % (eN+base))
for eN in range(self.nElements_global):
meshOut.write('%10i%10i%10i\n' % tuple((nN+base) for nN in self.elementNodesArray[eN,:]))
meshOut.close()
def appendMeshEnsight(self,filename,description=None):
base=1
#write the casefile
caseOut=open(filename+'.case','a')
caseOut.write('GEOMETRY\n'+'model: '+filename+'.geo\n')
caseOut.close()
meshOut=open(filename+'.geo','w')
meshOut.write('Unstructured Triangular Mesh\n\n')
meshOut.write('node id given\n')
meshOut.write('element id given\n')
meshOut.write('part \n'+'%10i\n' % 1)
if description:
meshOut.write(description+'\n')
else:
meshOut.write('The whole mesh\n')
meshOut.write('coordinates\n'+'%10i\n' % len(self.nodeList))
for n in self.nodeList:
nN = n.N+base
meshOut.write('%10i\n' % nN)
for n in self.nodeList:
meshOut.write('%12.5E\n' % n.p[X])
for n in self.nodeList:
meshOut.write('%12.5E\n' % n.p[Y])
for n in self.nodeList:
meshOut.write('%12.5E\n' % n.p[Z])
meshOut.write('tria3\n'+'%10i\n' % len(self.elementList))
for e in self.elementList:
eN = e.N + base
meshOut.write('%10i\n' % eN)
for e in self.elementList:
meshOut.write('%10i%10i%10i\n' % tuple(n.N+base for n in e.nodes))
meshOut.close()
def writeMeshADH(self,filename,adhBase=1):
from .import cmeshTools
cmeshTools.write2dmFiles(self.cmesh,filename,adhBase)
def writeAsymptote(self,fileprefix,L,x,units="m"):
"""
Write a representation of the triangular mesh in the Asymptote vector graphics language
"""
unitsize=old_div(4.0,L[0])
f = open(fileprefix+".asy",'w')
fileString="""
unitsize(4.0 inches / %(Lx)f);
size(5 inches);
real Lx=%(Lx)f;
real Ly=%(Ly)f;
real offset=0.0125Lx;
real x=%(x)f;
real y=%(y)f;
string strx="$%(Lx)2.2f\mbox{%(units)s}$";
string stry="$%(Ly)2.2f\mbox{%(units)s}$";
draw(strx,(x,y-offset)--(x+Lx,y-offset),S,black,Bars,Arrows,PenMargins);
draw(stry,(x-offset,y)--(x-offset,y+Ly),W,black,Bars,Arrows,PenMargins);
import graph;
import palette;
pen[] regionPens = Rainbow(NColors=%(nRegionFlags)d);
pen[] boundaryPens = Rainbow(NColors=%(nBoundaryFlags)d);
""" % {'Lx':L[0],'Ly':L[1],'x':x[0],'y':x[1],'units':units,
'nRegionFlags':(max(self.elementMaterialTypes) - min(self.elementMaterialTypes)),
'nBoundaryFlags':(max(self.elementBoundaryMaterialTypes)-min(self.elementBoundaryMaterialTypes))}
#now draw triangles
for t,tFlag in zip(self.elementNodesArray,self.elementMaterialTypes):
fileString+="fill((%f,%f)--(%f,%f)--(%f,%f)--cycle,regionPens[%d]);\n" % (self.nodeArray[t[0]][0],self.nodeArray[t[0]][1],
self.nodeArray[t[1]][0],self.nodeArray[t[1]][1],
self.nodeArray[t[2]][0],self.nodeArray[t[2]][1],
tFlag-min(self.elementMaterialTypes))
for eb,ebFlag in zip(self.elementBoundaryNodesArray,self.elementBoundaryMaterialTypes):
if True:#ebFlag > 0:
fileString+="draw((%f,%f)--(%f,%f),boundaryPens[%d]+linewidth(0.01));\n" % (self.nodeArray[eb[0]][0],self.nodeArray[eb[0]][1],
self.nodeArray[eb[1]][0],self.nodeArray[eb[1]][1],
ebFlag-min(self.elementBoundaryMaterialTypes))
f.write(fileString)
f.close()
# def buildMatlabMeshDataStructures(self,meshFileBase='meshMatlab',writeToFile=True):
# """
# build array data structures for matlab finite element mesh representation
# and write to a file to view and play with in matlatb
# in matlab can then print mesh with
# pdemesh(p,e,t)
# where
# p is the vertex or point matrix
# e is the edge matrix, and
# t is the element matrix
# points matrix is [2 x num vertices]
# format :
# row 1 = x coord,
# row 2 = y coord for nodes in mesh
# edge matrix is [7 x num edges]
# format:
# row 1 = start vertex number
# row 2 = end vertex number
# row 3 = start value in edge parameterization, should be 0
# row 4 = end value in edge parameterization, should be 1
# row 5 = global edge id, base 1
# row 6 = subdomain on left? always 1 for now
# row 7 = subdomain on right? always 0 for now
# element matrix is [4 x num elements]
# row 1 = vertex 1 global number
# row 2 = vertex 2 global number
# row 3 = vertex 3 global number
# row 4 = triangle subdomain number
# where 1,2,3 is a local counter clockwise numbering of vertices in
# triangle
# """
# matlabBase = 1
# p = np.zeros((2,self.nNodes_global),'d')
# e = np.zeros((7,self.nElementBoundaries_global),'d')
# t = np.zeros((4,self.nElements_global),'d')
# #load p,e,t and write file
# if writeToFile:
# mfile = open(meshFileBase+'.m','w')
# else:
# mfile = open('/dev/null','w')
# #
# if writeToFile:
# mfile.write('p = [ ... \n')
# for nN in range(self.nNodes_global):
# p[0,nN]=self.nodeArray[nN,0]
# p[1,nN]=self.nodeArray[nN,1]
# if writeToFile:
# mfile.write('%g %g \n' % tuple(p[:,nN]))
# if writeToFile:
# mfile.write(']; \n')
# mfile.write("p = p\';\n") #need transpose for matlab
# if writeToFile:
# mfile.write('e = [ ... \n')
# for ebN in range(self.nElementBoundaries_global):
# e[0,ebN]=self.elementBoundaryNodesArray[ebN,0] + matlabBase #global node number of start node base 1
# e[1,ebN]=self.elementBoundaryNodesArray[ebN,1] + matlabBase #global node number of end node base 1
# e[2,ebN]=0.0 #edge param. is 0 to 1
# e[3,ebN]=1.0
# e[4,ebN]=ebN + matlabBase #global edge number base 1
# e[5,ebN]=0 #subdomain to left
# e[6,ebN]=1 #subdomain to right
# if writeToFile:
# mfile.write('%g %g %g %g %g %g %g \n' % tuple(e[:,ebN]))
# if writeToFile:
# mfile.write(']; \n')
# mfile.write("e = e\';\n") #need transpose for matlab
# #write triangles last
# if writeToFile:
# mfile.write('t = [ ... \n')
# for eN in range(self.nElements_global):
# t[0,eN]=self.elementNodesArray[eN,0]+matlabBase #global node number for vertex 0
# t[1,eN]=self.elementNodesArray[eN,1]+matlabBase #global node number for vertex 0
# t[2,eN]=self.elementNodesArray[eN,2]+matlabBase #global node number for vertex 0
# t[3,eN]=1 #subdomain id
# if writeToFile:
# mfile.write('%g %g %g %g \n' % tuple(t[:,eN]))
# if writeToFile:
# mfile.write(']; \n');
# mfile.write("t = t\';\n") #need transpose for matlab
class QuadrilateralMesh(Mesh):
"""A mesh of quads
The nodes, edges, and triangles are indexed by their
node tuples. The corresponding lists are derived from the dictionaries, and
sorted lexicographically. The global node numbers are redefined to
give a lexicographic ordering.
The mesh can be generated from a rectangular grid and refined using either
3t or Freudenthal-Bey global refinement.
"""
def __init__(self):
Mesh.__init__(self)
self.nodeDict={}
self.edgeDict={}
self.quadDict={}
self.quadList=[]
self.oldToNewNode=[]
# tempoaray
self.max_nNodeNeighbors_node = 4
def buildFromSets(self,faceSet,edgeSet,nodeSet):
self.nodeList = list(nodeSet)
self.nodeDict = dict([(n,n) for n in self.nodeList])
self.edgeList = list(edgeSet)
self.edgeDict = dict([(e.nodes,e) for e in self.edgeList])
self.quadList = list(faceSet)
self.quadDict = dict([(t.nodes,t) for t in self.faceList])
self.elementList = self.triangleList
self.elementBoundaryList = self.edgeList
def rectangularToQuadrilateral(self,grid,x=0.0,y=0.0,z=0.0):
''' WIP - I think this is the first function that needs to be
written so that MultilevelQuadrilateralMesh can work. This
function does not call C functions.
'''
self.nodeList = [Node(n.N,n.p[X]+x,n.p[Y]+y,n.p[Z]+z) for n in grid.nodeList]
# Is the following line necessary?
self.nodeDict = dict([(n,n) for n in self.nodeList])
for i in range(grid.nHx):
for j in range(grid.nHy):
k=0
n0 = self.nodeList[grid.getNodeNumber(i,j,k)]
n1 = self.nodeList[grid.getNodeNumber(i,j+1,k)]
n2 = self.nodeList[grid.getNodeNumber(i+1,j+1,k)]
n3 = self.nodeList[grid.getNodeNumber(i+1,j,k)]
e0 = Edge(nodes=[n0,n1])
e1 = Edge(nodes=[n1,n2])
e2 = Edge(nodes=[n2,n3])
e3 = Edge(nodes=[n3,n0])
self.newQuadrilateral([e0,e1,e2,e3])
self.finalize()
self.buildNodeDiameterArray()
def generateQuadrilateralMeshFromRectangularGrid(self,nx,ny,Lx,Ly):
from .import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateQuadrilateralMeshFromRectangularGrid(nx,ny,0,0,Lx,Ly,self.cmesh)
cmeshTools.allocateGeometricInfo_quadrilateral(self.cmesh)
cmeshTools.computeGeometricInfo_quadrilateral(self.cmesh)
self.buildFromC(self.cmesh)
def generateFromQuadFileIFISS(self,meshfile):
''' WIP - read a matlab.mat file containing IFISS vertices
and elements
'''
import scipy.io
griddata = scipy.io.loadmat(meshfile+'.mat')
self.nodeList = [Node(nN,n[0],n[1],0.0) for nN,n in enumerate(griddata['vertices'])]
# Is the following line necessary?
self.nodeDict = dict([(n,n) for n in self.nodeList])
for q in griddata['quads']:
n0,n3,n2,n1 = q # clockwise ordering needed
e0 = Edge(nodes=[self.nodeList[n0],self.nodeList[n1]])
e1 = Edge(nodes=[self.nodeList[n1],self.nodeList[n2]])
e2 = Edge(nodes=[self.nodeList[n2],self.nodeList[n3]])
e3 = Edge(nodes=[self.nodeList[n3],self.nodeList[n0]])
self.newQuadrilateral([e0,e1,e2,e3])
self.finalize()
for F,nN in griddata['bdyflags']:
self.nodeMaterialTypes[nN] = F
for ebNE in range(self.nExteriorElementBoundaries_global):
ebN = self.exteriorElementBoundariesArray[ebNE]
n0,n1 = self.elementBoundaryNodesArray[ebN]
self.elementBoundaryMaterialTypes[ebN]=max(self.nodeMaterialTypes[n0],
self.nodeMaterialTypes[n1])
def meshType(self):
return 'cuboid'
def meshInfo(self):
minfo = """Number of quadrilaterals : %d
Number of edges : %d
Number of nodes : %d\n""" % (self.nElements_global,
self.nElementBoundaries_global,
self.nNodes_global)
if self.subdomainMesh != self:
sinfo = self.subdomainMesh.meshInfo()
info = "*** Global ***\n" + minfo + "\n*** Local ***\n" + sinfo
return info
return minfo
def newQuadrilateral(self,edges):
q = Quadrilateral(len(self.quadDict),edges)
self.quadDict[q.nodes] = q
self.registerEdges(q)
return q
def registerEdges(self,q):
'''check if an edge is in the mesh dictionary
if it is, point to existing entry
otherwise, create a new entry
'''
for en,e in enumerate(q.edges):
if e.nodes in self.edgeDict:
q.edges[en]=self.edgeDict[e.nodes]
else:
eN=len(self.edgeDict)
e.N=eN
self.edgeDict[e.nodes]=e
def registerNode(self,node):
''' check if a node is in the mesh dictionary
if it is, point to existing entry
otherwise, create a new entry
'''
if node in self.nodeDict:
node = self.nodeDict[node]
else:
node.N = len(self.nodeDict)
self.nodeDict[node] = node
return node
def refine(self,oldMesh):
logEvent("Refining Using Standard Quadrilateral Refinement")
childrenDict={}
for q in list(oldMesh.quadDict.values()):
qNodes = [Node(nN,n.p[X],n.p[Y],n.p[Z]) for nN,n in enumerate(q.nodes)]
for lnN,n in enumerate(qNodes): qNodes[lnN] = self.registerNode(n)
q.computeGeometricInfo()
newNodeLeft = Node(len(self.nodeDict),q.xmin,q.ymid,q.zmid)
newNodeLeft = self.registerNode(newNodeLeft)
newNodeTop = Node(len(self.nodeDict),q.xmid,q.ymax,q.zmid)
newNodeTop = self.registerNode(newNodeTop)
newNodeRight = Node(len(self.nodeDict),q.xmax,q.ymid,q.zmid)
newNodeRight = self.registerNode(newNodeRight)
newNodeBottom = Node(len(self.nodeDict),q.xmid,q.ymin,q.zmid)
newNodeBottom = self.registerNode(newNodeBottom)
newNodeMid = Node(len(self.nodeDict),q.xmid,q.ymid,q.zmid)
newNodeMid = self.registerNode(newNodeMid)
e1 = Edge(nodes=[qNodes[0],newNodeLeft])
e2 = Edge(nodes=[newNodeLeft,newNodeMid])
e3 = Edge(nodes=[newNodeMid,newNodeBottom])
e4 = Edge(nodes=[newNodeBottom,qNodes[0]])
e5 = Edge(nodes=[newNodeLeft,qNodes[1]])
e6 = Edge(nodes=[qNodes[1],newNodeTop])
e7 = Edge(nodes=[newNodeTop,newNodeMid])
e8 = Edge(nodes=[newNodeTop,qNodes[2]])
e9 = Edge(nodes=[qNodes[2], newNodeRight])
e10 = Edge(nodes=[newNodeRight, newNodeMid])
e11 = Edge(nodes=[qNodes[3],newNodeBottom])
e12 = Edge(nodes=[newNodeRight,qNodes[3]])
q1 = self.newQuadrilateral([e1,e2,e3,e4])
self.registerEdges(q1)
q2 = self.newQuadrilateral([e5,e6,e7,e2])
self.registerEdges(q2)
q3 = self.newQuadrilateral([e3,e10,e12,e11])
self.registerEdges(q3)
q4 = self.newQuadrilateral([e7,e8,e9,e10])
self.registerEdges(q4)
childrenDict[q.N]=[q1,q2,q3,q4]
self.finalize()
return childrenDict
def finalize(self):
''' WIP '''
self.buildLists()
self.buildArraysFromLists()
def buildLists(self):
''' WIP '''
self.buildListsNodes()
self.buildListsEdges()
self.buildListsQuadrilaterals()
self.elementList = self.quadList
self.elementBoundaryList = self.edgeList
def buildListsNodes(self):
keyList = list(self.nodeDict.keys())
keyList.sort()
self.nodeList=[]
self.oldToNewNode=list(range(len(self.nodeDict)))
for nN,k in enumerate(keyList):
self.oldToNewNode[self.nodeDict[k].N]=nN
self.nodeDict[k].N = nN
self.nodeList.append(self.nodeDict[k])
def buildListsEdges(self):
keyList = list(self.edgeDict.keys())
keyList.sort()
self.edgeList=[]
for eN,k in enumerate(keyList):
self.edgeDict[k].N = eN
self.edgeList.append(self.edgeDict[k])
def buildListsQuadrilaterals(self):
keyList = list(self.quadDict.keys())
keyList.sort()
self.quadList = []
for qN,q in enumerate(keyList):
self.quadDict[q].N = qN
self.quadList.append(self.quadDict[q])
self.polygonList = self.quadList
def writeMeshXdmf(self,ar,name='',t=0.0,init=False,meshChanged=False,tCount=0,EB=False):
Mesh.writeMeshXdmf(self,ar,name,t,init,meshChanged,"Quadrilateral",tCount,EB=EB)
def buildNodeDiameterArray(self):
nNodes = len(self.nodeArray)
self.nodeDiametersArray = np.zeros(nNodes)
self.nodeSupportArray = np.zeros(nNodes)
self.volume = 0.
for eN in range(self.nElements_global):
area = self._calc_quad_area(eN)
self.volume += area
hMax = self.elementDiametersArray[eN]
for nN in range(self.nNodes_element):
nodeDiameter = hMax*area
idx = self.elementNodesArray[eN][nN]
self.nodeDiametersArray[idx]+=nodeDiameter
self.nodeSupportArray[idx]+=area
for nN in range(nNodes):
self.nodeDiametersArray[nN] /= self.nodeSupportArray[nN]
@staticmethod
def _calc_pt_distance(pt1,pt2):
""" Calculate the distance between two points.
Arguments
---------
pt1: lst
Coordinates of the first point
pt2: lst
Coordinates of the second point
Returns
-------
distance : float
"""
d = 0.
for i,j in zip(pt1,pt2):
d += (i-j)**2
return math.sqrt(d)
def _calc_hmax(self,i):
""" Find the largest edge length of an element.
Arguments
---------
i : int
Element number
Returns
-------
hmax : float
The largest edge length of element i
"""
hMax = 0.
element_nodes = self.nodeArray[self.elementNodesArray[i]]
for j, nN_L in enumerate(element_nodes):
print('nN_L = ' + str(nN_L))
for nN_R in element_nodes[j+1:]:
print('nN_R = ' + str(nN_R))
hMax = max(hMax,self._calc_pt_distance(nN_L,nN_R))
return hMax
def _calc_quad_area(self,i):
""" Calculates the area of a quadrilateral.
Arguments
---------
i : int
The quadrilateral whose volume is being calculated.
Returns
-------
A : float
The quadrilateral's area
"""
n = [n0,n1,n2,n3] = self.nodeArray[self.elementNodesArray[0]]
d = [self._calc_pt_distance(n0,n[1]),
self._calc_pt_distance(n0,n[-1])]
A = d[0]*d[1]
return A
class MultilevelTriangularMesh(MultilevelMesh):
"""A hierarchical multilevel mesh of triangular cells"""
from .import cmeshTools
def __init__(self,
nx, ny, nz,
x=0.0, y=0.0, z=0.0,
Lx=1.0, Ly=1.0, Lz=1.0,
refinementLevels=1,
skipInit=False,
nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node,triangleFlag=0):
from . import cmeshTools
MultilevelMesh.__init__(self)
self.useC = True
self.nLayersOfOverlap=nLayersOfOverlap; self.parallelPartitioningType = parallelPartitioningType
#self.useC = False
if not skipInit:
if self.useC:
self.meshList.append(TriangularMesh())
self.meshList[0].generateTriangularMeshFromRectangularGrid(nx,ny,Lx,Ly,triangleFlag=triangleFlag)
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.meshList[0].nodeArray[:,0] += x
self.meshList[0].nodeArray[:,1] += y
self.meshList[0].nodeArray[:,2] += z
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(TriangularMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.meshList[l].cmesh)
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
else:
grid=RectangularGrid(nx,ny,nz,Lx,Ly,Lz)
self.meshList.append(TriangularMesh())
self.meshList[0].rectangularToTriangular(grid)
self.meshList[0].nodeArray[:,0] += x
self.meshList[0].nodeArray[:,1] += y
self.meshList[0].nodeArray[:,2] += z
self.meshList[0].subdomainMesh = self.meshList[0]
self.elementChildren=[]
logEvent(self.meshList[0].meshInfo())
for l in range(1,refinementLevels):
self.refine()
self.meshList[l].subdomainMesh = self.meshList[l]
logEvent(self.meshList[-1].meshInfo())
self.buildArrayLists()
#
#mwf what's the best way to build from an existing mesh
def generateFromExistingCoarseMesh(self,mesh0,refinementLevels,nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from .import cmeshTools
#blow away or just trust garbage collection
self.nLayersOfOverlap = nLayersOfOverlap; self.parallelPartitioningType = parallelPartitioningType
self.meshList = []
self.elementParents = None
self.cmultilevelMesh = None
if self.useC:
self.meshList.append(mesh0)
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(TriangularMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.meshList[l].cmesh)
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
else:
grid=RectangularGrid(nx,ny,nz,Lx,Ly,Lz)
self.meshList.append(TriangularMesh())
self.meshList[0].rectangularToTriangular(grid)
self.meshList[0].subdomainMesh = self.meshList[0]
self.elementChildren=[]
logEvent(self.meshList[0].meshInfo())
for l in range(1,refinementLevels):
self.refine()
self.meshList[l].subdomainMesh = self.meshList[l]
logEvent(self.meshList[-1].meshInfo())
self.buildArrayLists()
def generatePartitionedMeshFromPUMI(self,mesh0,refinementLevels,nLayersOfOverlap=1):
from .import cmeshTools
self.meshList = []
self.meshList.append(mesh0)
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.elementParents = None
self.elementChildren=[]
def generatePartitionedMeshFromTriangleFiles(self,filebase,base,mesh0,refinementLevels,nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from . import cmeshTools
if filebase==None:
filebase="mesh"
assert(refinementLevels==1)
assert(parallelPartitioningType==MeshParallelPartitioningTypes.node)
assert(nLayersOfOverlap<=1)
mesh0.cmesh = cmeshTools.CMesh()
#blow away or just trust garbage collection
self.nLayersOfOverlap=nLayersOfOverlap;self.parallelPartitioningType=parallelPartitioningType
self.meshList = []
self.elementParents = None
self.cmultilevelMesh = None
self.meshList.append(mesh0)
logEvent("cmeshTools.CMultilevelMesh")
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
logEvent("buildFromC")
self.buildFromC(self.cmultilevelMesh)
logEvent("partitionMesh")
self.meshList[0].partitionMeshFromFiles(filebase,base,nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
def refine(self):
self.meshList.append(TriangularMesh())
childrenDict = self.meshList[-1].refine(self.meshList[-2])
self.elementChildren.append(childrenDict)
def computeGeometricInfo(self):
for m in self.meshList:
m.computeGeometricInfo()
def locallyRefine(self,elementTagArray,flagForRefineType=0):
"""
simple local refinement assuming elementTagArray[eN]=1 --> bisect
flagForRefineType = 0 -- newest node, 1 -- 4T, 2 -- U4T
"""
logEvent("MultilevelTriangularMesh:locallyRefine")
if flagForRefineType == 0:
logEvent("MultilevelTriangularMesh: calling cmeshTools.setNewestNodeBases")
self.cmeshTools.setNewestNodeBases(2,self.cmultilevelMesh)
if self.useC:
logEvent("MultilevelTriangularMesh: calling locallRefineMultilevelMesh")
self.cmeshTools.locallyRefineMultilevelMesh(2,self.cmultilevelMesh,elementTagArray,flagForRefineType)
logEvent("MultilevelTriangularMesh: calling buildFromC")
self.buildFromC(self.cmultilevelMesh)
self.meshList.append(TriangularMesh())
self.meshList[self.nLevels-1].cmesh = self.cmeshList[self.nLevels-1]
self.meshList[self.nLevels-1].buildFromC(self.meshList[self.nLevels-1].cmesh)
self.meshList[self.nLevels-1].partitionMesh(nLayersOfOverlap=self.nLayersOfOverlap,parallelPartitioningType=self.parallelPartitioningType)
else:
print("""locallyRefine not implemented for self.useC= %s """ % (self.useC))
#
class MultilevelQuadrilateralMesh(MultilevelMesh):
""" A heirarchical multilevel mesh of quadrilaterals
WIP """
def __init__(self,
nx,ny,nz,
x=0.0,y=0.0,z=0.0,
Lx=1.0,Ly=1.0,Lz=1.0,
refinementLevels=1,
skipInit=False,
nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node,triangleFlag=0,
useC=True):
from .import cmeshTools
MultilevelMesh.__init__(self)
self.useC = useC # Implementing with C will take a bit more work. Disabling for now.
if refinementLevels > 1:
logEvent("Quad refinement is not supported in C routines, switching off c-mesh");
self.useC = False # Currently quad refinement is not supported in C routines.
self.nLayersOfOverlap=nLayersOfOverlap ; self.parallelPartitioningType = parallelPartitioningType
if not skipInit:
if self.useC:
self.meshList.append(QuadrilateralMesh())
self.meshList[0].generateQuadrilateralMeshFromRectangularGrid(nx,ny,Lx,Ly)
self.meshList[0].nodeArray[:,0] += x
self.meshList[0].nodeArray[:,1] += y
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(QuadrilateralMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.cmeshList[l])
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
else:
grid=RectangularGrid(nx,ny,nz,Lx,Ly,Lz)
self.meshList.append(QuadrilateralMesh())
self.meshList[0].rectangularToQuadrilateral(grid,x,y,z)
self.meshList[0].subdomainMesh = self.meshList[0]
self.elementChildren=[]
logEvent(self.meshList[0].meshInfo())
self.meshList[0].globalMesh = self.meshList[0]
# The following four lines should be called elsewhere...Most of this is don in
# the c-function calls that are not implemented yet for 2D quads
self.meshList[0].nElements_owned = self.meshList[0].nElements_global
self.meshList[0].nodeNumbering_subdomain2global = np.zeros((self.meshList[0].nNodes_global,), 'd')
self.meshList[0].elementNumbering_subdomain2global = np.zeros((self.meshList[0].nElements_global,), 'd')
self.meshList[0].nodeOffsets_subdomain_owned[-1] = self.meshList[0].nNodes_global
self.meshList[0].nNodes_owned = self.meshList[0].nNodes_global
self.meshList[0].elementOffsets_subdomain_owned[-1] = self.meshList[0].nElements_global
for node in range(self.meshList[0].nNodes_global):
self.meshList[0].nodeNumbering_subdomain2global.itemset(node,node)
for element in range(self.meshList[0].nElements_global):
self.meshList[0].elementNumbering_subdomain2global.itemset(element,element)
self.meshList[0].buildNodeStarArrays()
for l in range(1,refinementLevels):
self.refine()
self.meshList[l].subdomainMesh = self.meshList[l]
logEvent(self.meshList[-1].meshInfo())
self.meshList[l].buildNodeStarArrays()
self.buildArrayLists()
# print("from Python")
# print (self.meshList[0].nElements_global,
# self.meshList[0].nNodes_global,
# self.meshList[0].nNodes_element,
# self.meshList[0].nNodes_elementBoundary,
# self.meshList[0].nElementBoundaries_element,
# self.meshList[0].nElementBoundaries_global,
# self.meshList[0].nInteriorElementBoundaries_global,
# self.meshList[0].nExteriorElementBoundaries_global,
# self.meshList[0].max_nElements_node,
# self.meshList[0].nEdges_global,
# self.meshList[0].max_nNodeNeighbors_node,
# self.meshList[0].elementNodesArray,
# self.meshList[0].nodeElementsArray,
# self.meshList[0].nodeElementOffsets,
# self.meshList[0].elementNeighborsArray,
# self.meshList[0].elementBoundariesArray,
# self.meshList[0].elementBoundaryNodesArray,
# self.meshList[0].elementBoundaryElementsArray,
# self.meshList[0].elementBoundaryLocalElementBoundariesArray,
# self.meshList[0].interiorElementBoundariesArray,
# self.meshList[0].exteriorElementBoundariesArray,
# self.meshList[0].edgeNodesArray,
# self.meshList[0].nodeStarArray,
# self.meshList[0].nodeStarOffsets,
# self.meshList[0].elementMaterialTypes,
# self.meshList[0].elementBoundaryMaterialTypes,
# self.meshList[0].nodeMaterialTypes,
# self.meshList[0].nodeArray,
# self.meshList[0].elementDiametersArray,
# self.meshList[0].elementInnerDiametersArray,
# self.meshList[0].elementBoundaryDiametersArray,
# self.meshList[0].elementBarycentersArray,
# self.meshList[0].elementBoundaryBarycentersArray,
# self.meshList[0].nodeDiametersArray,
# self.meshList[0].nodeSupportArray,
# self.meshList[0].h,
# self.meshList[0].hMin,
# self.meshList[0].volume)
def refine(self):
self.meshList.append(QuadrilateralMesh())
self.meshList[-1].globalMesh = self.meshList[-1]
childrenDict = self.meshList[-1].refine(self.meshList[-2])
# The following four lines should be called elsewhere...Most of this is don in
# the c-function calls that are not implemented yet for 2D quads
self.meshList[-1].nElements_owned = self.meshList[-1].nElements_global
self.meshList[-1].nodeNumbering_subdomain2global = np.zeros((self.meshList[-1].nNodes_global,), 'i')
self.meshList[-1].elementNumbering_subdomain2global = np.zeros((self.meshList[-1].nElements_global,), 'i')
self.meshList[-1].nodeOffsets_subdomain_owned[-1] = self.meshList[-1].nNodes_global
self.meshList[-1].nNodes_owned = self.meshList[-1].nNodes_global
self.meshList[-1].elementOffsets_subdomain_owned[-1] = self.meshList[-1].nElements_global
for node in range(self.meshList[-1].nNodes_global):
self.meshList[-1].nodeNumbering_subdomain2global.itemset(node,node)
for element in range(self.meshList[-1].nElements_global):
self.meshList[-1].elementNumbering_subdomain2global.itemset(element,element)
self.elementChildren.append(childrenDict)
class InterpolatedBathymetryMesh(MultilevelTriangularMesh):
"""A triangular mesh that interpolates bathymetry from a point cloud"""
def __init__(self,
domain,
triangleOptions,
atol=1.0e-4,
rtol=1.0e-4,
maxElementDiameter=None,
maxLevels=20,
maxNodes=100000,
bathyType="points",#"grid"
bathyAssignmentScheme="interpolation",#"localAveraging","L2-projection","H1-projection"
errorNormType="L2", #L1,Linfty
refineType=0,
):
from scipy import interpolate as scipy_interpolate
if maxElementDiameter:
self.maxElementDiameter = maxElementDiameter
else:
self.maxElementDiameter = np.inf
self.atol = atol
self.rtol = rtol
self.maxLevels=maxLevels
self.maxNodes=maxNodes
self.domain = domain
self.triangleOptions = triangleOptions
self.bathyType=bathyType
self.bathyAssignmentScheme=bathyAssignmentScheme
self.errorNormType = errorNormType
logEvent("InterpolatedBathymetryMesh: Calling Triangle to generate 2D coarse mesh for "+self.domain.name)
runTriangle(domain.polyfile,
self.triangleOptions)
logEvent("InterpolatedBathymetryMesh: Converting to Proteus Mesh")
self.coarseMesh = TriangularMesh()
self.coarseMesh.generateFromTriangleFiles(filebase=domain.polyfile,base=1)
MultilevelTriangularMesh.__init__(self,0,0,0,skipInit=True,nLayersOfOverlap=0,
parallelPartitioningType=MeshParallelPartitioningTypes.node)
self.generateFromExistingCoarseMesh(self.coarseMesh,1,
parallelPartitioningType=MeshParallelPartitioningTypes.node)
self.computeGeometricInfo()
#allocate some arrays based on the bathymetry data
logEvent("InterpolatedBathymetryMesh:Allocating data structures for bathymetry interpolation algorithm")
if bathyType == "points":
self.nPoints_global = self.domain.bathy.shape[0]
self.pointElementsArray_old = -np.ones((self.nPoints_global,),'i')
self.pointElementsArray = -np.ones((self.nPoints_global,),'i')
self.pointNodeWeightsArray = np.zeros((self.nPoints_global,3),'d')
self.bathyInterpolant = scipy_interpolate.LinearNDInterpolator(self.domain.bathy[:,:2],self.domain.bathy[:,2])
self.bathyNearestNeighbor = scipy_interpolate.NearestNDInterpolator(self.domain.bathy[:,:2], self.domain.bathy[:,2])
elif bathyType == "grid":
self.nPoints_global = self.domain.bathy.shape[0]
self.pointElementsArray_old = -np.ones((self.nPoints_global,),'i')
self.pointElementsArray = -np.ones((self.nPoints_global,),'i')
self.pointNodeWeightsArray = np.zeros((self.nPoints_global,3),'d')
x = self.domain.bathy[:self.domain.bathyGridDim[1],0]
y = self.domain.bathy[:self.domain.bathyGridDim[0]*self.domain.bathyGridDim[1]:self.domain.bathyGridDim[1],1]
z = self.domain.bathy[:,2].reshape(self.domain.bathyGridDim).transpose()
self.bathyInterpolant = scipy_interpolate.RectBivariateSpline(x,y,z,kx=1,ky=1)
#self.bathyInterpolant = scipy_interpolate.interp2d(x,y,z)
#
logEvent("InterpolatedBathymetryMesh: Locating points on initial mesh")
self.locatePoints_initial(self.meshList[-1])
logEvent("InterpolatedBathymetryMesh:setting mesh bathymetry from data")
self.setMeshBathymetry(self.meshList[-1])
logEvent("InterpolatedBathymetryMesh: tagging elements for refinement")
self.tagElements(self.meshList[-1])
levels = 0
error = 1.0;
while error >= 1.0 and self.meshList[-1].nNodes_global < self.maxNodes and levels < self.maxLevels:
levels += 1
logEvent("InterpolatedBathymetryMesh: Locally refining, level = %i" % (levels,))
self.locallyRefine(self.meshList[-1].elementTags,flagForRefineType=refineType)
logEvent("InterpolatedBathymetryMesh: interpolating bathymetry from parent mesh to refined mesh")
self.interpolateBathymetry()
logEvent("InterpolatedBathymetryMesh: Locating points on child mesh")
self.locatePoints_refined(self.meshList[-1])
logEvent("InterpolatedBathymetryMesh: setting mesh bathmetry from data")
self.setMeshBathymetry(self.meshList[-1])
logEvent("InterpolatedBathymetryMesh: tagging elements for refinement")
error = self.tagElements(self.meshList[-1])
logEvent("InterpolatedBathymetryMesh: error = %f atol = %f rtol = %f number of elements tagged = %i" % (error,self.atol,self.rtol,self.meshList[-1].elementTags.sum()))
def setMeshBathymetry(self,mesh):
if self.bathyAssignmentScheme == "interpolation":
self.setMeshBathymetry_interpolate(mesh)
elif self.bathyAssignmentScheme == "localAveraging":
self.setMeshBathymetry_localAveraging(mesh)
elif self.bathyAssignmentScheme == "L2-projection":
raise NotImplementedError
elif self.bathyAssignmentScheme == "H1-projection":
raise NotImplementedError
def setMeshBathymetry_interpolate(self,mesh):
if self.bathyType == 'grid':
mesh.nodeArray[:,2] = self.bathyInterpolant.ev(mesh.nodeArray[:,0],mesh.nodeArray[:,1])
else:
mesh.nodeArray[:,2] = self.bathyInterpolant(mesh.nodeArray[:,0],mesh.nodeArray[:,1])
nI = np.isnan(mesh.nodeArray[:,2])
mesh.nodeArray[nI,2] = self.bathyNearestNeighbor(mesh.nodeArray[nI,0],mesh.nodeArray[nI,1])
def setMeshBathymetry_localAveraging(self,mesh):
"""
calculate the arithmetic mean bathymetry of points inside each triangle and then assign the area-weighted average of the element means to each node
"""
from .FemTools import AffineMaps,ReferenceSimplex,LinearOnSimplexWithNodalBasis
interpolationSpace = LinearOnSimplexWithNodalBasis(nd=2)
#maps = AffineMaps(mesh,interpolationSpace.referenceElement,interpolationSpace)
#maps.useC = True
#calculate mean element height for each element
#uses arithmetic mean, so it assumes the "patch" associated with each point the same size (weight)
mesh.elementMeanZ = np.zeros((mesh.nElements_global,),'d')
for pN in range(self.nPoints_global):
eN = self.pointElementsArray[pN]
if eN >= 0:
if mesh.nPoints_element[eN] > 0:
mesh.elementMeanZ[eN] += old_div(self.domain.bathy[pN,2],float(mesh.nPoints_element[eN]))
mesh.nodeArray[mesh.elementNodesArray[eN,0],2] = 0.0
mesh.nodeArray[mesh.elementNodesArray[eN,1],2] = 0.0
mesh.nodeArray[mesh.elementNodesArray[eN,2],2] = 0.0
#now assign the mesh node bathmetry as an area weighted average of the element mean
sumArray = mesh.nodeArray[:,2].copy()
sumArray[:]=0.0
for eN in range(mesh.nElements_global):
if mesh.nPoints_element[eN] > 0:#only calculate a contribution if this element contains a point
#calculate triangle area and assign weighted average of element means to node
xiArray = np.zeros((2,),'d')
#
grad_psi = np.zeros((interpolationSpace.dim,
interpolationSpace.referenceElement.dim),
'd')
dx = np.zeros((interpolationSpace.referenceElement.dim),
'd')
jacobian = np.zeros((interpolationSpace.referenceElement.dim,
interpolationSpace.referenceElement.dim),
'd')
inverseJacobian = np.zeros((interpolationSpace.referenceElement.dim,
interpolationSpace.referenceElement.dim),
'd')
for j in interpolationSpace.range_dim:
grad_psi[j,:] = interpolationSpace.basisGradients[j](xiArray)#evaluate at zero because we can (psi is linear)
jacobian.flat[:]=0.0
inverseJacobian.flat[:]=0.0
for j in interpolationSpace.range_dim:
J = mesh.elementNodesArray[eN,j]
for m in interpolationSpace.referenceElement.range_dim:
for n in interpolationSpace.referenceElement.range_dim:
jacobian[m,n] += mesh.nodeArray[J,m]*grad_psi[j,n]
J = mesh.elementNodesArray[eN,0]
inverseJacobian = inv(jacobian)
area = 0.5*det(jacobian)
sumArray[mesh.elementNodesArray[eN,0]] += old_div(area,mesh.nodeSupportArray[mesh.elementNodesArray[eN,0]])
sumArray[mesh.elementNodesArray[eN,1]] += old_div(area,mesh.nodeSupportArray[mesh.elementNodesArray[eN,1]])
sumArray[mesh.elementNodesArray[eN,2]] += old_div(area,mesh.nodeSupportArray[mesh.elementNodesArray[eN,2]])
mesh.nodeArray[mesh.elementNodesArray[eN,0],2] += area*mesh.elementMeanZ[eN]/mesh.nodeSupportArray[mesh.elementNodesArray[eN,0]]
mesh.nodeArray[mesh.elementNodesArray[eN,1],2] += area*mesh.elementMeanZ[eN]/mesh.nodeSupportArray[mesh.elementNodesArray[eN,1]]
mesh.nodeArray[mesh.elementNodesArray[eN,2],2] += area*mesh.elementMeanZ[eN]/mesh.nodeSupportArray[mesh.elementNodesArray[eN,2]]
#cek debug
#print "sum of a nodes element areas divided by node support shoudl be 1 ",sumArray
def locatePoints(self,mesh):
"""
locate the element containing each point
this should only be used on very coarse meshes
"""
from .FemTools import AffineMaps,ReferenceSimplex,LinearOnSimplexWithNodalBasis
interpolationSpace = LinearOnSimplexWithNodalBasis(nd=2)
#maps = AffineMaps(mesh,interpolationSpace.referenceElement,interpolationSpace)
#maps.useC = False
#find the elements that contain bathymetry points and calculate:
# - for each element, the number of bathmetry points in that element
# - for each node, the total area of the nodes elements that containing bathmetry points
# - the area of each element
# - the total area covered by elements containing bathmetry points
mesh.nPoints_element = np.zeros((mesh.nElements_global,),'i')
mesh.nodeSupportArray = np.zeros((mesh.nNodes_global,),'d')
mesh.area_element = np.zeros((mesh.nElements_global,),'d')
self.pointElementsArray[:] = -1
self.totalArea = 0.0
for eN in range(mesh.nElements_global):
#map points to reference space and test if it lies in the reference triangle
xiArray = np.zeros((2,),'d')
xiArray[:] = 0.0
#
grad_psi = np.zeros((interpolationSpace.dim,
interpolationSpace.referenceElement.dim),
'd')
dx = np.zeros((interpolationSpace.referenceElement.dim),
'd')
jacobian = np.zeros((interpolationSpace.referenceElement.dim,
interpolationSpace.referenceElement.dim),
'd')
inverseJacobian = np.zeros((interpolationSpace.referenceElement.dim,
interpolationSpace.referenceElement.dim),
'd')
for j in interpolationSpace.range_dim:
grad_psi[j,:] = interpolationSpace.basisGradients[j](xiArray[0])#evalute at zero because we can (psi is linear)
jacobian.flat[:]=0.0
inverseJacobian.flat[:]=0.0
for j in interpolationSpace.range_dim:
J = mesh.elementNodesArray[eN,j]
for m in interpolationSpace.referenceElement.range_dim:
for n in interpolationSpace.referenceElement.range_dim:
jacobian[m,n] += mesh.nodeArray[J,m]*grad_psi[j,n]
J = mesh.elementNodesArray[eN,0]
inverseJacobian = inv(jacobian)
area = 0.5*det(jacobian)
mesh.area_element[eN] = area
self.totalArea += area
for pN in range(self.nPoints_global):#can optimize by skipping previously found points
xiArray[:] = 0.0
dx[:]=self.domain.bathy[pN,:2]
for m in interpolationSpace.referenceElement.range_dim:
dx[m]-=mesh.nodeArray[J,m]
for m in interpolationSpace.referenceElement.range_dim:
for n in interpolationSpace.referenceElement.range_dim:
xiArray[m] += inverseJacobian[m,n]*dx[n]
#barycentric coordinates are non-negative so we're in this element
if xiArray[0] >=0.0 and xiArray[1] >= 0.0 and 1.0 - xiArray[0] - xiArray[1] >= 0.0:
self.pointElementsArray[pN] = eN
self.pointNodeWeightsArray[pN,0] = interpolationSpace.basis[0](xiArray)
self.pointNodeWeightsArray[pN,1] = interpolationSpace.basis[1](xiArray)
self.pointNodeWeightsArray[pN,2] = interpolationSpace.basis[2](xiArray)
#count the number of points inside each element
for pN in range(self.nPoints_global):
if self.pointElementsArray[pN] >= 0:
mesh.nPoints_element[self.pointElementsArray[pN]] += 1
#add up the support area for each node
for eN in range(mesh.nElements_global):
if mesh.nPoints_element[eN] > 0:
mesh.nodeSupportArray[mesh.elementNodesArray[eN,0]] += mesh.area_element[eN]
mesh.nodeSupportArray[mesh.elementNodesArray[eN,1]] += mesh.area_element[eN]
mesh.nodeSupportArray[mesh.elementNodesArray[eN,2]] += mesh.area_element[eN]
def locatePoints_refined(self,mesh):
"""
locate the element containing each point
this should only be used on very coarse meshes
"""
from .FemTools import AffineMaps,ReferenceSimplex,LinearOnSimplexWithNodalBasis
interpolationSpace = LinearOnSimplexWithNodalBasis(nd=2)
#maps = AffineMaps(mesh,interpolationSpace.referenceElement,interpolationSpace)
#maps.useC = False
#find the elements that contain bathymetry points and calculate:
# - for each element, the number of bathmetry points in that element
# - for each node, the total area of the nodes elements that containing bathmetry points
# - the area of each element
# - the total area covered by elements containing bathmetry points
mesh.nPoints_element = np.zeros((mesh.nElements_global,),'i')
mesh.nodeSupportArray = np.zeros((mesh.nNodes_global,),'d')
mesh.area_element = np.zeros((mesh.nElements_global,),'d')
self.totalArea = 0.0
self.pointElementsArray_old[:] = self.pointElementsArray
self.pointElementsArray[:] = -1
for pN in range(self.nPoints_global):
eN_parent = self.pointElementsArray_old[pN]
for eN in self.elementChildrenArrayList[-1][self.elementChildrenOffsetsList[-1][eN_parent]:self.elementChildrenOffsetsList[-1][eN_parent+1]]:
xiArray = np.zeros((2,),'d')
xiArray[:] = 0.0
grad_psi = np.zeros((interpolationSpace.dim,
interpolationSpace.referenceElement.dim),
'd')
dx = np.zeros((interpolationSpace.referenceElement.dim),
'd')
jacobian = np.zeros((interpolationSpace.referenceElement.dim,
interpolationSpace.referenceElement.dim),
'd')
inverseJacobian = np.zeros((interpolationSpace.referenceElement.dim,
interpolationSpace.referenceElement.dim),
'd')
for j in interpolationSpace.range_dim:
grad_psi[j,:] = interpolationSpace.basisGradients[j](xiArray[0])#evalute at zero because we can (psi is linear)
jacobian.flat[:]=0.0
inverseJacobian.flat[:]=0.0
for j in interpolationSpace.range_dim:
J = mesh.elementNodesArray[eN,j]
for m in interpolationSpace.referenceElement.range_dim:
for n in interpolationSpace.referenceElement.range_dim:
jacobian[m,n] += mesh.nodeArray[J,m]*grad_psi[j,n]
J = mesh.elementNodesArray[eN,0]
inverseJacobian = inv(jacobian)
area = 0.5*det(jacobian)
mesh.area_element[eN] = area
self.totalArea += area
xiArray[:] = 0.0
dx[:]=self.domain.bathy[pN,:2]
for m in interpolationSpace.referenceElement.range_dim:
dx[m]-=mesh.nodeArray[J,m]
for m in interpolationSpace.referenceElement.range_dim:
for n in interpolationSpace.referenceElement.range_dim:
xiArray[m] += inverseJacobian[m,n]*dx[n]
#barycentric coordinates are non-negative so we're in this element
if xiArray[0] >=0.0 and xiArray[1] >= 0.0 and 1.0 - xiArray[0] - xiArray[1] >= 0.0:
self.pointElementsArray[pN] = eN
self.pointNodeWeightsArray[pN,0] = interpolationSpace.basis[0](xiArray)
self.pointNodeWeightsArray[pN,1] = interpolationSpace.basis[1](xiArray)
self.pointNodeWeightsArray[pN,2] = interpolationSpace.basis[2](xiArray)
#count the number of points inside each element
for pN in range(self.nPoints_global):
if self.pointElementsArray[pN] >= 0:
mesh.nPoints_element[self.pointElementsArray[pN]] += 1
#add up the support area for each node
for eN in range(mesh.nElements_global):
if mesh.nPoints_element[eN] > 0:
mesh.nodeSupportArray[mesh.elementNodesArray[eN,0]] += mesh.area_element[eN]
mesh.nodeSupportArray[mesh.elementNodesArray[eN,1]] += mesh.area_element[eN]
mesh.nodeSupportArray[mesh.elementNodesArray[eN,2]] += mesh.area_element[eN]
def locatePoints_initial(self,mesh):
"""
locate the element containing each point
first find the nearest node, then loop over that node's elements
"""
from scipy.spatial import cKDTree
from .FemTools import AffineMaps,ReferenceSimplex,LinearOnSimplexWithNodalBasis
interpolationSpace = LinearOnSimplexWithNodalBasis(nd=2)
#find the elements that contain bathymetry points and calculate:
# - for each element, the number of bathmetry points in that element
# - for each node, the total area of the nodes elements that contain bathmetry points
# - the area of each element
# - the total area covered by elements containing bathmetry points
mesh.nPoints_element = np.zeros((mesh.nElements_global,),'i')
mesh.nodeSupportArray = np.zeros((mesh.nNodes_global,),'d')
mesh.area_element = np.zeros((mesh.nElements_global,),'d')
self.totalArea = 0.0
self.pointElementsArray[:] = -1
tree = cKDTree(mesh.nodeArray[:,:2])
xiArray = np.zeros((2,),'d')
grad_psi = np.zeros((interpolationSpace.dim,
interpolationSpace.referenceElement.dim),
'd')
dx = np.zeros((interpolationSpace.referenceElement.dim),
'd')
jacobian = np.zeros((interpolationSpace.referenceElement.dim,
interpolationSpace.referenceElement.dim),
'd')
inverseJacobian = np.zeros((interpolationSpace.referenceElement.dim,
interpolationSpace.referenceElement.dim),
'd')
for pN in range(self.nPoints_global):
(distance,nN) = tree.query(self.domain.bathy[pN,:2])
for eN in mesh.nodeElementsArray[mesh.nodeElementOffsets[nN]:mesh.nodeElementOffsets[nN+1]]:
xiArray[:] = 0.0
for j in interpolationSpace.range_dim:
grad_psi[j,:] = interpolationSpace.basisGradients[j](xiArray[0])#evalute at zero because we can (psi is linear)
jacobian.flat[:]=0.0
inverseJacobian.flat[:]=0.0
for j in interpolationSpace.range_dim:
J = mesh.elementNodesArray[eN,j]
for m in interpolationSpace.referenceElement.range_dim:
for n in interpolationSpace.referenceElement.range_dim:
jacobian[m,n] += mesh.nodeArray[J,m]*grad_psi[j,n]
J = mesh.elementNodesArray[eN,0]
inverseJacobian = inv(jacobian)
area = 0.5*det(jacobian)
mesh.area_element[eN] = area
xiArray[:] = 0.0
dx[:]=self.domain.bathy[pN,:2]
for m in interpolationSpace.referenceElement.range_dim:
dx[m]-=mesh.nodeArray[J,m]
for m in interpolationSpace.referenceElement.range_dim:
for n in interpolationSpace.referenceElement.range_dim:
xiArray[m] += inverseJacobian[m,n]*dx[n]
#if the barycentric coordinates are non-negative we're in this element
if xiArray[0] >=0.0 and xiArray[1] >= 0.0 and 1.0 - xiArray[0] - xiArray[1] >= 0.0:
self.pointElementsArray[pN] = eN
self.pointNodeWeightsArray[pN,0] = interpolationSpace.basis[0](xiArray)
self.pointNodeWeightsArray[pN,1] = interpolationSpace.basis[1](xiArray)
self.pointNodeWeightsArray[pN,2] = interpolationSpace.basis[2](xiArray)
self.totalArea += mesh.area_element.sum()
#count the number of points inside each element
for pN in range(self.nPoints_global):
if self.pointElementsArray[pN] >= 0:
mesh.nPoints_element[self.pointElementsArray[pN]] += 1
#add up the support area for each node
for eN in range(mesh.nElements_global):
if mesh.nPoints_element[eN] > 0:
mesh.nodeSupportArray[mesh.elementNodesArray[eN,0]] += mesh.area_element[eN]
mesh.nodeSupportArray[mesh.elementNodesArray[eN,1]] += mesh.area_element[eN]
mesh.nodeSupportArray[mesh.elementNodesArray[eN,2]] += mesh.area_element[eN]
def interpolateBathymetry(self):
"""
interpolate bathymetry for the refinement from the parent mesh
"""
from proteus.FemTools import C0_AffineLinearOnSimplexWithNodalBasis,DOFBoundaryConditions,MultilevelProjectionOperators
mlMeshTemp = MultilevelMesh(levels=2)
mlMeshTemp.meshList = self.meshList[-2:]
mlMeshTemp.nLevels=2
mlMeshTemp.cmeshList = self.cmeshList[-2:]
mlMeshTemp.elementParentsArrayList = self.elementParentsArrayList[-2:]
mlMeshTemp.elementChildrenArrayList = self.elementChildrenArrayList[-1:]
mlMeshTemp.elementChildrenOffsetsList = self.elementChildrenOffsetsList[-1:]
nd=2
TrialSpaceTypeDict = {0:C0_AffineLinearOnSimplexWithNodalBasis}
trialSpaceDictParent = dict([ (cj,TrialSpaceType(mlMeshTemp.meshList[0],nd)) for (cj,TrialSpaceType) in TrialSpaceTypeDict.items()])
trialSpaceDictChild = dict([ (cj,TrialSpaceType(mlMeshTemp.meshList[1],nd)) for (cj,TrialSpaceType) in TrialSpaceTypeDict.items()])
trialSpaceDictList = [trialSpaceDictParent,trialSpaceDictChild]
offsetListList=[[0],[0]]
strideListList=[[1],[1]]
def getDBC(x,flag):
return None
bcDictList=[dict([(0,DOFBoundaryConditions(trialSpaceDictParent[0],getPointwiseBoundaryConditions=getDBC,weakDirichletConditions=False))]),
dict([(0,DOFBoundaryConditions(trialSpaceDictChild[0],getPointwiseBoundaryConditions=getDBC,weakDirichletConditions=False))])]
self.meshTransfers = MultilevelProjectionOperators(
mlMeshTemp,
trialSpaceDictList,
offsetListList,
strideListList,
bcDictList)
zParent = self.meshList[-2].nodeArray[:,2].copy()
zChild = self.meshList[-1].nodeArray[:,2].copy()
self.meshTransfers.prolongList[-1].matvec(zParent,zChild)
self.meshList[-1].nodeArray[:,2] = zChild
def tagElements(self,mesh):
"""
loop over points and calculate whether the interpolation error is within the tolerance
this should only be used on very coarse meshes
"""
mesh.elementTags = np.zeros((mesh.nElements_global,),'i')
mesh.errorAverage_element = np.zeros((mesh.nElements_global,),'d')
errorInfty = 0.0
mesh.elementTags[mesh.elementDiametersArray > self.maxElementDiameter ] = 1
for pN in range(self.nPoints_global):
eN = self.pointElementsArray[pN]
if eN >= 0:
zInterp = self.pointNodeWeightsArray[pN,0]*mesh.nodeArray[mesh.elementNodesArray[eN,0],2] + \
self.pointNodeWeightsArray[pN,1]*mesh.nodeArray[mesh.elementNodesArray[eN,1],2] + \
self.pointNodeWeightsArray[pN,2]*mesh.nodeArray[mesh.elementNodesArray[eN,2],2]
errorPointwise = fabs(zInterp - self.domain.bathy[pN,2])/(fabs(self.domain.bathy[pN,2])*self.rtol + self.atol)
errorInfty = max(errorPointwise,errorInfty)
mesh.errorAverage_element[eN] += (errorPointwise/float(mesh.nPoints_element[eN]))
#print "error average",mesh.errorAverage_element[eN]
if errorPointwise >= 1.0:
mesh.elementTags[eN] = 1
if self.errorNormType == "L1":
mesh.elementTags[:] = 0
errorL1 = 0.0
for eN in range(mesh.nElements_global):
errorL1 += mesh.errorAverage_element[eN]*mesh.area_element[eN]
if mesh.errorAverage_element[eN] >= 1.0:
mesh.elementTags[eN] = 1
errorL1 /= self.totalArea#normalize by domain error to make error have units of length
return errorL1
if self.errorNormType == "L2":
mesh.elementTags[:] = 0
errorL2 = 0.0
for eN in range(mesh.nElements_global):
errorL2 += (mesh.errorAverage_element[eN])**2 * mesh.area_element[eN]
if mesh.errorAverage_element[eN] >= 1.0:
mesh.elementTags[eN] = 1
errorL2 = old_div(sqrt(errorL2),self.totalArea)#normalize by domain error to make error have units of length
return errorL2
else:
print("Interpolation Error, L_infty ",errorInfty)
return errorInfty
class EdgeMesh(Mesh):
"""A mesh of edges
The nodes, and edges are indexed by their node tuples. The
corresponding lists are derived from the dictionaries, and sorted
lexicographically. The global node numbers are redefined to give a
lexicographic ordering.
"""
def __init__(self):
Mesh.__init__(self)
self.nodeDict={}
self.edgeDict={}
self.oldToNewNode=[]
def computeGeometricInfo(self):
from .import cmeshTools
cmeshTools.computeGeometricInfo_edge(self.cmesh)
def generateEdgeMeshFromRectangularGrid(self,nx,Lx):
from .import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateEdgeMeshFromRectangularGrid(nx,Lx,self.cmesh)
cmeshTools.allocateGeometricInfo_edge(self.cmesh)
cmeshTools.computeGeometricInfo_edge(self.cmesh)
self.buildFromC(self.cmesh)
#mwf debug
#print "EdgeMesh rect->edge after build nodes=%s " % (self.nodeArray)
def rectangularToEdge(self,grid):
#copy the nodes from the rectangular mesh
#I want to be able to renumber latter without
#changing the grid nodes, so I do deep copies here
self.nodeList = [Node(n.N,n.p[X],n.p[Y],n.p[Z]) for n in grid.nodeList]
self.nodeDict = dict([(n,n) for n in self.nodeList])
for e in grid.edgeList:
self.newEdge([self.nodeDict[e.nodes[0]],self.nodeDict[e.nodes[1]]])
self.finalize()
#self.buildListsEdges()
def finalize(self):
self.buildLists()
self.buildArraysFromLists()
#todo: build boundary mesh
def buildLists(self):
self.buildListsNodes()
self.buildListsEdges()
self.elementList = self.edgeList
self.elementBoundaryList = self.nodeList
def buildListsNodes(self):
keyList = list(self.nodeDict.keys())
keyList.sort()
self.nodeList=[]
self.oldToNewNode=list(range(len(self.nodeDict)))
for nN,k in enumerate(keyList):
self.oldToNewNode[self.nodeDict[k].N]=nN
self.nodeDict[k].N = nN
self.nodeList.append(self.nodeDict[k])
def buildListsEdges(self):
keyList = list(self.edgeDict.keys())
keyList.sort()
self.edgeList=[]
for eN,k in enumerate(keyList):
self.edgeDict[k].N = eN
self.edgeList.append(self.edgeDict[k])
def newEdge(self,nodes):
e = Edge(len(self.edgeDict),nodes)
self.edgeDict[e.nodes] = e
return e
def registerNode(self,node):
if node in self.nodeDict:
node = self.nodeDict[node]
else:
node.N = len(self.nodeDict)
self.nodeDict[node] = node
return node
def refine2e(self,oldMesh):
childrenDict={}
for e in oldMesh.edgeList:
#deep copy old nodes because we'll renumber
eNodes = [Node(eN,n.p[X],n.p[Y],n.p[Z])
for eN,n in enumerate(e.nodes)]
for lnN,n in enumerate(eNodes): eNodes[lnN]=self.registerNode(n)
#add new node
e.computeGeometricInfo()
newNode = Node(len(self.nodeDict),
e.barycenter[X],
e.barycenter[Y],
e.barycenter[Z])
newNode = self.registerNode(newNode)
e1=self.newEdge([eNodes[0],newNode])
e2=self.newEdge([newNode,eNodes[1]])
childrenDict[e.N]=[e1,e2]
self.finalize()
return childrenDict
def refine(self,oldMesh):
return self.refine2e(oldMesh)
def meshInfo(self):
minfo = """Number of edges : %d
Number of nodes : %d\n""" % (self.nElements_global,self.nNodes_global)
if self.subdomainMesh != self:
sinfo = self.subdomainMesh.meshInfo()
info = "*** Global ***\n" + minfo + "\n*** Local ***\n" + sinfo
return info
return minfo
def writeMeshADH(self,filename):
pass
def writeMeshXdmf(self,ar,name='',t=0.0,init=False,meshChanged=False,tCount=0):
Mesh.writeMeshXdmf(self,ar,name,t,init,meshChanged,"Polyline",tCount)
def writeMeshEnsight(self,filename,description=None):
base=1
#write the casefile
caseOut=open(filename+'.case','w')
caseOut.write('FORMAT\n'+'type: ensight gold\n')
caseOut.write('GEOMETRY\n'+'model: '+filename+'.geo\n')
caseOut.close()
meshOut=open(filename+'.geo','w')
meshOut.write('Ensight Gold\n')
meshOut.write('Unstructured Edge Mesh\n')
meshOut.write('node id given\n')
meshOut.write('element id given\n')
meshOut.write('part \n'+'%10i\n' % 1)
if description:
meshOut.write(description+'\n')
else:
meshOut.write('A Mesh\n')
meshOut.write('coordinates\n'+'%10i\n' % self.nNodes_global)
for nN in range(self.nNodes_global):
meshOut.write('%10i\n' % (nN+base))
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,0])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,1])
for nN in range(self.nNodes_global):
meshOut.write('%12.5E\n' % self.nodeArray[nN,2])
meshOut.write('bar2\n'+'%10i\n' % self.nElements_global)
for eN in range(self.nElements_global):
meshOut.write('%10i\n' % (eN+base))
for eN in range(self.nElements_global):
meshOut.write('%10i%10i\n' % tuple((nN+base) for nN in self.elementNodesArray[eN,:]))
meshOut.close()
class MultilevelEdgeMesh(MultilevelMesh):
"""A hierarchical multilevel mesh of intervals (edges)"""
from .import cmeshTools
def __init__(self,
nx, ny, nz,
x=0.0, y=0.0, z=0.0,
Lx=1.0, Ly=1.0, Lz=1.0,
refinementLevels=1,
nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from . import cmeshTools
MultilevelMesh.__init__(self)
self.useC=True
self.nLayersOfOverlap=nLayersOfOverlap; self.parallelPartitioningType = parallelPartitioningType
if self.useC:
self.meshList.append(EdgeMesh())
self.meshList[0].generateEdgeMeshFromRectangularGrid(nx,Lx)
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.meshList[0].nodeArray[:,0] += x
self.meshList[0].nodeArray[:,1] += y
self.meshList[0].nodeArray[:,2] += z
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(EdgeMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.meshList[l].cmesh)
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
else:
grid=RectangularGrid(nx,ny,nz,Lx,Ly,Lz)
self.meshList.append(EdgeMesh())
self.meshList[0].rectangularToEdge(grid)
self.meshList[0].nodeArray[:,0] += x
self.meshList[0].nodeArray[:,1] += y
self.meshList[0].nodeArray[:,2] += z
self.elementChildren=[]
print(self.meshList[0].meshInfo())
for l in range(1,refinementLevels):
self.refine()
print(self.meshList[-1].meshInfo())
def refine(self):
self.meshList.append(EdgeMesh())
childrenDict = self.meshList[-1].refine(self.meshList[-2])
self.elementChildren.append(childrenDict)
def computeGeometricInfo(self):
for m in self.meshList:
m.computeGeometricInfo()
def locallyRefine(self,elementTagArray):
"""
simple local refinement assuming elementTagArray[eN]=1 --> bisect
"""
if self.useC:
self.cmeshTools.locallyRefineMultilevelMesh(1,self.cmultilevelMesh,elementTagArray)
self.buildFromC(self.cmultilevelMesh)
self.meshList.append(EdgeMesh())
self.meshList[self.nLevels-1].cmesh = self.cmeshList[self.nLevels-1]
self.meshList[self.nLevels-1].buildFromC(self.meshList[self.nLevels-1].cmesh)
self.meshList[self.nLevels-1].partitionMesh(nLayersOfOverlap=self.nLayersOfOverlap,parallelPartitioningType=self.parallelPartitioningType)
else:
print("""locallyRefine not implemented for self.useC= %s """ % (self.useC))
#
#
class MultilevelSimplicialMesh(MultilevelMesh):
"""A wrapper for all the simplicial hierarchical meshes in 1,2, and 3D"""
def __init__(self,nd,nx,ny=1,nz=1,Lx=1.0,Ly=1.0,Lz=1.0,refinementLevels=1):
if nd==1:
MultilevelEdgeMesh.__init__(self,nx,ny,nz,
Lx,Ly,Lz,
refinementLevels)
elif nd==2:
MultilevelTriangularMesh.__init__(self,nx,ny,nz,
Lx,Ly,Lz,
refinementLevels)
elif nd==3:
MultilevelTetrahedralMesh.__init__(self,nx,ny,nz,
Lz,Ly,Lz,
refineMentLevels)
def refine(self):
if nd==1:
MultilevelEdgeMesh.refine(self)
elif nd==2:
MultilevelTrianglularMesh.refine(self)
elif nd==3:
MultilevelTetrahedralMesh.refine(self)
## @}
###utility functions for reading meshes from Xdmf
def findXMLgridElement(xmf,MeshTag='Spatial_Domain',id_in_collection=-1,verbose=0):
"""Try to find the element of the xml tree xmf that holds a uniform
grid with the name given in MeshTag by searching through Temporal
Grid Collections and Grid Collections.
If MeshTag isn't found, uses the first entry in the Domain
"""
Domain = xmf.getroot()[-1]
GridCollection = None
Grid = None
for collection in Domain:
if 'Name' in collection.attrib and MeshTag in collection.attrib['Name']:
GridCollection = collection
break
if GridCollection is None:
GridCollection = Domain[0]
logEvent("Trying GridCollection.tag= %s" % (GridCollection.tag),4)
if GridCollection.attrib['GridType'] == 'Collection':
Grid = GridCollection[-1]
elif GridCollection.attrib['GridType'] == 'Uniform':
Grid = GridCollection
assert Grid.tag == 'Grid'
assert Grid.attrib['GridType'] == 'Uniform'
return Grid
def extractPropertiesFromXdmfGridNode(Grid):
"""unpack the Topology, Geometry, NodeMaterials, and ElementMaterials
nodes from xdmf node for a uniform grid
"""
#Geometry first
Topology = None; Geometry = None; NodeMaterials= None; ElementMaterials = None
for i,leaf in enumerate(Grid):
logEvent("Grid leaf %d tag= %s " % (i,leaf.tag),4)
if leaf.tag == 'Topology':
Topology = Grid[i]
logEvent("Topology found in leaf %d " % i,4)
elif leaf.tag == 'Geometry':
Geometry = Grid[i]
logEvent("Geometry found in leaf %d " % i,4)
elif leaf.tag == 'Attribute' and leaf.attrib['Name'] == 'nodeMaterialTypes':
NodeMaterials = Grid[i]
logEvent("NodeMaterials found in leaf %d " % i,4)
elif leaf.tag == 'Attribute' and leaf.attrib['Name'] == 'elementMaterialTypes':
ElementMaterials = Grid[i]
logEvent("ElementMaterials found in leaf %d " % i,4)
return Topology,Geometry,NodeMaterials,ElementMaterials
def readUniformElementTopologyFromXdmf(elementTopologyName,Topology,hdf5,topologyid2name,topology2nodes):
"""
Read xmdf element topology information when there are uniform elements in the mesh
Type of element given by elementTopologyName
Heavy data stored in hdf5
topologyid2name -- lookup for number of nodes in a given element type
returns
nElements_global -- the number of elements in the mesh
nNodes_element -- number of nodes per element
elementNodesArray -- element --> node connectivity stored as flattened array accessed using elementNodes_offset
elementNodes_offset -- offsets into the elementNodesArray storage for element connectivity,
element eN nodes are in elementNodesArray[elementNodes_offset[eN]:elementNodes_offset[eN+1]]
"""
nNodes_element = topology2nodes[elementTopologyName]
entry = Topology[0].text.split(':')[-1]
logEvent("Reading elementNodesArray from %s " % entry,3)
elementNodesArray = hdf5["/"+entry][:]
assert elementNodesArray.shape[1] == nNodes_element
nElements_global = elementNodesArray.shape[0]
logEvent("nElements_global,nNodes_element= (%d,%d) " % (nElements_global,nNodes_element),3)
elementNodes_offset = np.arange(nElements_global*nNodes_element+1,step=nNodes_element,dtype='i')
return nElements_global, nNodes_element, elementNodesArray, elementNodes_offset
def readMixedElementTopologyFromXdmf(elementTopologyName,Topology,hdf5,topologyid2name,topology2nodes):
"""
Read xmdf element topology information when there are mixed elements in the mesh
Heavy data stored in hdf5
topologyid2name -- lookup for number of nodes in a given element type
returns
nElements_global -- the number of elements in the mesh
elementNodesArray -- element --> node connectivity stored as flattened
array accessed using elementNodes_offset
elementNodes_offset -- offsets into the elementNodesArray storage for element
connectivity, element eN nodes are
inelementNodesArray[elementNodes_offset[eN]:elementNodes_offset[eN+1]]
"""
assert elementTopologyName == 'Mixed'
entry = Topology[0].text.split(':')[-1]
logEvent("Reading xdmf_topology from %s " % entry,3)
xdmf_topology = hdf5["/"+entry][:]
#build elementNodesArray and offsets now
nElements_global = 0
i = 0
while i < len(xdmf_topology):
nElements_global += 1
nNodes_local = topology2nodes[topologyid2name[xdmf_topology[i]]]
i += nNodes_local+1
#
logEvent("Mixed topology found %s elements " % nElements_global,3)
elementNodes_offset = np.zeros((nElements_global+1,),'i')
i = 0; eN = 0
while i < len(xdmf_topology):
nNodes_local = topology2nodes[topologyid2name[xdmf_topology[i]]]
elementNodes_offset[eN+1] = elementNodes_offset[eN] + nNodes_local
eN += 1; i += nNodes_local+1
elementNodesArray = np.zeros((elementNodes_offset[nElements_global],),'i')
i = 0; eN = 0
while i < len(self.xdmf_topology):
nNodes_local = topology2nodes[topologyid2name[xdmf_topology[i]]]
elementNodesArray[elementNodes_offset[eN]:elementNodes_offset[eN+1]][:] = xdmf_topology[i+1:i+1+nNodes_local][:]
eN += 1; i += nNodes_local+1
return nElements_global, elementNodesArray, elementNodes_offset
def readMeshXdmf(xmf_archive_base,heavy_file_base,MeshTag="Spatial_Domain",hasHDF5=True,verbose=0):
"""Read in a mesh from XDMF, assuming heavy data is in hdf5
:return: a BasicMeshInfo object with the minimal information read
"""
# start trying to read an xdmf archive with name xmf_archive_base.xmf
# assumes heavy_file_base.h5 has heavy data
# root Element is Xdmf
# last child of Xdmf which should be a Domain Element
# find child of Domain that is a Temporal Grid Collection with a name containing MeshTag, if None use first collection
# last child of Temporal Grid Collection should be a Uniform Grid at final time
# Attribute (usually 1) of child is Topology
# set elementTopologyName to Type
# if Type != Mixed
# get text attribute and read this entry from hdf5 file
# set nNodes_element based on Type, nElements_global from leading dimension of elementNodesArray
# create elementNodes_offset from Type and flatten elementNodesArray
# else
# get text attribute and read this entry from hdf5 file to place in into xdmf_topology
# generate elementNodesArray from xdmf_topology, calculating the number of elements using
# walk through xdmf_topology
# Attribute (usually 2) of child is Geometry --> load data into nodeArray
# set nNodes_global from nodeArray
# If has Attribute nodeMaterials read this from hdf file, else set to default of all zeros
# If has Attribute elementMaterialTypes, read this from hdf file, else set to default of all zeros
assert os.path.isfile(xmf_archive_base+'.xmf')
assert os.path.isfile(heavy_file_base+'.h5')
###information about allowed Xdmf topologies
#Xdmf cell type id to Name
topologyid2name = {2:'Polyline',4:'Triangle',5:'Quadrilateral',6:'Tetrahedron',8:'Wedge',9:'Hexahedron',
112:'Mixed'} #Mixed isn't actually used 0x070
#Topology name to number of local nodes
topology2nodes = {'Polyline':2,'Triangle':3,'Quadrilateral':4,'Tetrahedron':4,'Wedge':6,'Hexahedron':8}
#for output
class BasicMeshInfo(object):
def __init__(self):
self.nNodes_global = None
self.nodeArray = None
self.nodeMaterialTypes = None
self.nNodes_element = None
self.nElements_global = None
self.elementTopologyName = None
self.elementNodesArray = None
self.elementNodes_offset = None
self.elementMaterialTypes = None
self.nNodes_owned = None
self.nElements_owned = None
#
#
MeshInfo = BasicMeshInfo()
xmf = ET.parse(xmf_archive_base+'.xmf')
hdf5= h5py.File(heavy_file_base+'.h5',"r")
assert hasHDF5
Grid = findXMLgridElement(xmf,MeshTag,id_in_collection=-1,verbose=verbose)
Topology,Geometry,NodeMaterials,ElementMaterials = extractPropertiesFromXdmfGridNode(Grid)
assert Geometry is not None
entry = Geometry[0].text.split(':')[-1]
logEvent("Reading nodeArray from %s " % entry,3)
MeshInfo.nodeArray = hdf5["/"+entry][:]
MeshInfo.nNodes_global = MeshInfo.nodeArray.shape[0]
if NodeMaterials is not None:
entry = NodeMaterials[0].text.split(':')[-1]
logEvent("Reading nodeMaterialTypes from %s " % entry,4)
MeshInfo.nodeMaterialTypes = hdf5["/"+entry][:]
else:
MeshInfo.nodeMaterialTypes = np.zeros((MeshInfo.nNodes_global,),'i')
assert Topology is not None
if 'Type' in Topology.attrib:
MeshInfo.elementTopologyName = Topology.attrib['Type']
elif 'TopologyType' in Topology.attrib:
MeshInfo.elementTopologyName = Topology.attrib['TopologyType']
assert MeshInfo.elementTopologyName is not None
logEvent("elementTopologyName= %s " % MeshInfo.elementTopologyName,3)
assert MeshInfo.elementTopologyName in list(topologyid2name.values())
if MeshInfo.elementTopologyName != 'Mixed':
MeshInfo.nElements_global, MeshInfo.nNodes_element, \
MeshInfo.elementNodesArray, MeshInfo.elementNodes_offset = readUniformElementTopologyFromXdmf(MeshInfo.elementTopologyName,Topology,
hdf5,topologyid2name,topology2nodes)
else:
MeshInfo.nElements_global, MeshInfo.elementNodesArray, \
MeshInfo.elementNodes_offset = readMixedElementTopologyFromXdmf(MeshInfo.elementTopologyName,Topology,hdf5,topologyid2name,topology2nodes)
#
if ElementMaterials is not None:
entry = ElementMaterials[0].text.split(':')[-1]
logEvent("Reading elementMaterialTypes from %s " % entry,3)
MeshInfo.elementMaterialTypes = hdf5["/"+entry][:]
else:
MeshInfo.elementMaterialTypes = np.zeros((MeshInfo.nElements_global,),'i')
#
###only serial for now
MeshInfo.nNodes_owned = MeshInfo.nNodes_global
MeshInfo.nElements_owned = MeshInfo.nElements_global
hdf5.close()
return MeshInfo
#
def writeHexMesh(mesh_info,hexfile_base,index_base=0):
"""
Write a hex mesh in Ido's format with base numbering index_base
HEX
nNodes_global nElements_global
x0 y0 z0
x1 y1 z1
...
xN yN zN
[n0 n1 n2 n3 n4 n5 n6 n7 mat0]
[n0 n1 n2 n3 n4 n5 n6 n7 mat1]
"""
assert mesh_info.elementTopologyName=='Hexahedron'
header="""HEX
{nNodes_global} {nElements_global}
""".format(nNodes_global=mesh_info.nNodes_global,nElements_global=mesh_info.nElements_global)
with open(hexfile_base+'.mesh','w') as mout:
mout.write(header)
np.savetxt(mout,mesh_info.nodeArray)
#format the elements, appending element material type
elems_with_mat = np.append(mesh_info.elementNodesArray,mesh_info.elementMaterialTypes.reshape(mesh_info.nElements_global,1),axis=1)
elems_with_mat[:,:-1] += index_base
np.savetxt(mout,elems_with_mat,fmt='%d')
class MultilevelNURBSMesh(MultilevelMesh):
def __init__(self,
nx, ny, nz,
x=0.0, y=0.0, z=0.0,
px=1, py=1, pz=1,
Lx=1.0, Ly=1.0, Lz=1.0,
refinementLevels=1,
skipInit=False,
nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from .import cmeshTools
from . import Comm
MultilevelMesh.__init__(self)
self.useC = True
self.nLayersOfOverlap = nLayersOfOverlap; self.parallelPartitioningType = parallelPartitioningType
logEvent("Generating NURBS mesh")
if not skipInit:
self.meshList.append(NURBSMesh())
self.meshList[0].generateNURBSMeshFromRectangularGrid(nx,ny,nz,px,py,pz,Lx,Ly,Lz)
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.meshList[0].nodeArray[:,0] += x
self.meshList[0].nodeArray[:,1] += y
self.meshList[0].nodeArray[:,2] += z
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(NURBSMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.cmeshList[l])
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
def generateFromExistingCoarseMesh(self,mesh0,refinementLevels,nLayersOfOverlap=1,
parallelPartitioningType=MeshParallelPartitioningTypes.node):
from .import cmeshTools
#blow away or just trust garbage collection
self.nLayersOfOverlap=nLayersOfOverlap;self.parallelPartitioningType=parallelPartitioningType
self.meshList = []
self.elementParents = None
self.cmultilevelMesh = None
self.meshList.append(mesh0)
self.cmultilevelMesh = cmeshTools.CMultilevelMesh(self.meshList[0].cmesh,refinementLevels)
self.buildFromC(self.cmultilevelMesh)
self.meshList[0].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
for l in range(1,refinementLevels):
self.meshList.append(NURBSMesh())
self.meshList[l].cmesh = self.cmeshList[l]
self.meshList[l].buildFromC(self.meshList[l].cmesh)
self.meshList[l].partitionMesh(nLayersOfOverlap=nLayersOfOverlap,parallelPartitioningType=parallelPartitioningType)
def refine(self):
self.meshList.append(NURBSMesh())
childrenDict = self.meshList[-1].refine(self.meshList[-2])
self.elementChildren.append(childrenDict)
def computeGeometricInfo(self):
for m in self.meshList:
m.computeGeometricInfo()
class NURBSMesh(HexahedralMesh):
"""A mesh consisting of NURBS.
"""
def __init__(self):
HexahedralMesh.__init__(self)
def generateHexahedralMeshFromRectangularGrid(self,nx,ny,nz,Lx,Ly,Lz):
generateNURBSMeshFromRectangularGrid(self,nx,ny,nz,1,1,1,Lx,Ly,Lz)
def generateNURBSMeshFromRectangularGrid(self,nx,ny,nz,px,py,pz,Lx,Ly,Lz):
from .import cmeshTools
self.cmesh = cmeshTools.CMesh()
cmeshTools.generateNURBSMeshFromRectangularGrid(nx,ny,nz,px,py,pz,Lx,Ly,Lz,self.cmesh)
cmeshTools.allocateGeometricInfo_NURBS(self.cmesh)
cmeshTools.computeGeometricInfo_NURBS(self.cmesh)
self.buildFromC(self.cmesh)
def distance(a, b):
norm = np.linalg.norm
return norm(b - a)
def triangleVerticesToNormals(elementVertices):
"""
Given a set of vertices to a triangle, return normals and a point corresponding to each normal
"""
norm = np.linalg.norm
elementVertices = np.asarray(elementVertices)
if norm(elementVertices[:,2]) > 0:
raise ValueError("Expected triangles in 2D plane, got something else")
sets = ((0, 1), (0, 2), (1, 2))
outs = (2, 1, 0)
faces = []
rotate = np.asarray(((0., -1., 0.),
(1., 0., 0.),
(0., 0., 0.)))
for seti, out in zip(sets, outs):
vertices = elementVertices[np.array(seti,'i')]
ab = vertices[1] - vertices[0]
v_out = vertices[0] - elementVertices[out]
normal = rotate.dot(ab)
# normal should point *away* from remaining point
if normal.dot(v_out) < 0:
normal = -1*normal
faces.append((normal, vertices[0]))
return faces
def tetrahedronVerticesToNormals(elementVertices):
"""
Given a set of vertices to a tetrahedron, return normals and a point corresponding to each normal
"""
elementVertices = np.asarray(elementVertices)
sets = ((0, 1, 2), (0, 1, 3), (0, 2, 3), (1, 2, 3))
outs = (3, 2, 1, 0)
faces = []
for seti, out in zip(sets, outs):
vertices = elementVertices[np.array(seti,'i')]
ab = vertices[1] - vertices[0]
ac = vertices[2] - vertices[0]
normal = np.cross(ab, ac)
v_out = vertices[0] - elementVertices[out]
# normal should point *away* from remaining point
if normal.dot(v_out) < 0:
normal = -1*normal
faces.append((normal, vertices[0]))
return faces
def intersectPoints(line, points):
"""
Given a line segment (defined as two points), identify all points that the line segment intersects.
This hasn't been vectorized.
"""
a, b = line
a = np.asarray(a)
b = np.asarray(b)
distanceAB = distance(a, b)
def onAB(p):
p = np.asarray(p)
eps = 2*np.max((np.max(np.spacing(a)), np.max(np.spacing(b)), np.max(np.spacing(p))))
distancePA = distance(a, p)
distancePB = distance(p, b)
return p if abs(distancePA + distancePB - distanceAB) < eps else None
return [onAB(p) for p in points]
def intersectEdges(line, edges):
"""
Given a line segment (defined as two points), identify the locations of its intersections with all
given edges (defined as line segments). If the line and an edge overlap, the *furthest* point
along the line (closest to the second point) that is still on each edge is returned.
This hasn't been vectorized.
"""
norm = np.linalg.norm
def intersectEdge(line, edge):
line = np.asarray(line)
edge = np.asarray(edge)
a, b = line
c, d = edge
v_l = b - a
v_e = d - c
vl_cross_ve = np.cross(v_l, v_e)
mag_vl_cross_ve = norm(vl_cross_ve)
if mag_vl_cross_ve == 0:
# lines are parallel, check for overlap
intersects = intersectPoints(line, edge) + intersectPoints(edge, line)
# test for an intersect in intersectPoints
intersect = next((i for i in intersects if i is not None), None)
if intersect is not None:
# farthest endpoint is a, so start from there
closest_endpoint = a
closest_distance = distance(closest_endpoint, b)
# could reuse iterator from above, but it's confusing enough as it is :)
for intersect in intersects:
if intersect is None:
continue
intersect_distance = distance(intersect, b)
if intersect_distance < closest_distance:
closest_endpoint = intersect
closest_distance = intersect_distance
return closest_endpoint
else:
return None
# lines are not parallel, check for intersection
vl_cross_ve = np.cross(v_l, v_e)
# if v_l and v_e intersect, then there is an x that satisfies
x_vl_cross_ve = np.cross((c - a), v_e)
# but the two above vectors must be parallel
if norm(np.cross(vl_cross_ve, x_vl_cross_ve)) > 1e-8:
return None
# two lines are parallel, solve for x
x = old_div(norm(x_vl_cross_ve),norm(vl_cross_ve))
intersect = a + x*(b-a)
# and verify intersection is on the line
points = intersectPoints(line, [intersect])
assert(len(points) == 1)
return points[0]
return [intersectEdge(line, edge) for edge in edges]
def intersectPolyhedron(line, polyhedron):
"""
Given a line (defined as two points), identify the locations that it enters and exits the
polyhedron (defined as a collection of half-planes in three-space in normal, vertex form)
If the facets of the polyhedron are in edge form, the normal can be computed by taking the cross product of any
two non-parallel edges of the facet (in three-space). Any vertex of the facet will work.
Implementation of algorithm described here: http://geomalgorithms.com/a13-_intersect-4.html
This hasn't been vectorized.
"""
a, b = line
a, b = np.asarray(a), np.asarray(b)
if distance(a, b) == 0:
raise ValueError("Line segment must not have length 0")
v_l = b - a
t_e = 0 # location along line entering polyhedron (initial value 0)
t_l = 1 # location along line leaving polyhedron (initial value 1)
for plane in polyhedron:
n, v = plane
n, v = np.asarray(n), np.asarray(v)
ndotba = -n.dot(a - v)
d = n.dot(v_l)
if d == 0:
# the line segment is parallel to this face
if ndotba < 0:
# the line is outside the face
return None
else:
# the line is in or on the face, ignore this face
continue
t = old_div(ndotba, float(d))
if d < 0:
# segment is entering polyhedron across this facet
t_e = max(t_e, t)
if t_e > t_l:
# segment enters polyhedron after leaving, no intersection
return None
else:
# segment is exiting polyhedron across this facet
t_l = min(t_l, t)
if t_l < t_e:
# segment exits polyhedron before entering, no intersection
return None
assert(t_e <= t_l)
return [a + t_e*v_l, a + t_l*v_l]
def getMeshIntersections(mesh, toPolyhedron, endpoints):
"""
Return all intersections between a line segment and a Proteus mesh
:param mesh - a Proteus mesh
:param toPolyhedron - a method for converting Proteus element vertices to polyhedra in normal/point form
:param endpoints - a pair of points in 3-space defining the line segment
:return a list of pairs of intersections through the mesh
"""
intersections = set()
for element in mesh.elementNodesArray:
# map nodes to physical vertices
elementVertices = mesh.nodeArray[element]
# get plane normals
polyhedron = toPolyhedron(elementVertices)
elementIntersections = intersectPolyhedron(endpoints, polyhedron)
if elementIntersections:
if np.array_equal(elementIntersections[0], elementIntersections[1]):
continue
intersections.update(((tuple(elementIntersections[0]), tuple(elementIntersections[1])),),)
return intersections
def runTriangle(polyfile,
baseFlags="Yp",
name = ""):
"""
Generate tetgen files from a polyfile.
Arguments
---------
polyfile : str
Filename with appropriate data for tengen.
baseFlags : str
Standard Tetgen options for generation
name : str
"""
from subprocess import check_call
tricmd = "triangle -%s -e %s.poly" % (baseFlags, polyfile)
check_call(tricmd,shell=True)
logEvent("Done running triangle")
elefile = "%s.1.ele" % polyfile
nodefile = "%s.1.node" % polyfile
edgefile = "%s.1.edge" % polyfile
assert os.path.exists(elefile), "no 1.ele"
tmp = "%s.ele" % polyfile
os.rename(elefile,tmp)
assert os.path.exists(tmp), "no .ele"
assert os.path.exists(nodefile), "no 1.node"
tmp = "%s.node" % polyfile
os.rename(nodefile,tmp)
assert os.path.exists(tmp), "no .node"
if os.path.exists(edgefile):
tmp = "%s.edge" % polyfile
os.rename(edgefile,tmp)
assert os.path.exists(tmp), "no .edge"
def runTetgen(polyfile,
baseFlags="Yp",
name = ""):
"""
Generate tetgen files from a polyfile.
Arguments
---------
polyfile : str
Filename with appropriate data for tengen.
baseFlags : str
Standard Tetgen options for generation
name : str
"""
from subprocess import check_call
tetcmd = "tetgen - %s %s.poly" % (baseFlags, polyfile)
check_call(tetcmd,shell=True)
logEvent("Done running tetgen")
elefile = "%s.1.ele" % polyfile
nodefile = "%s.1.node" % polyfile
facefile = "%s.1.face" % polyfile
edgefile = "%s.1.edge" % polyfile
assert os.path.exists(elefile), "no 1.ele"
tmp = "%s.ele" % polyfile
os.rename(elefile,tmp)
assert os.path.exists(tmp), "no .ele"
assert os.path.exists(nodefile), "no 1.node"
tmp = "%s.node" % polyfile
os.rename(nodefile,tmp)
assert os.path.exists(tmp), "no .node"
if os.path.exists(facefile):
tmp = "%s.face" % polyfile
os.rename(facefile,tmp)
assert os.path.exists(tmp), "no .face"
if os.path.exists(edgefile):
tmp = "%s.edge" % polyfile
os.rename(edgefile,tmp)
assert os.path.exists(tmp), "no .edge"
def genMeshWithTriangle(polyfile,
nbase=1):
"""
Generate a mesh from a set of triangle files.
Arguments
---------
polyfile : str
Filename base for triangle files
nbase : int
Returns
--------
mesh : :class:`proteus.MeshTools.TriangularMesh`
Simplex mesh
"""
elefile = "%s.ele" % polyfile
nodefile = "%s.node" % polyfile
edgefile = "%s.edge" % polyfile
assert os.path.exists(elefile), "no .ele file"
assert os.path.exists(nodefile), "no .node file"
assert os.path.exists(edgefile), "no .edge"
mesh = TriangularMesh()
mesh.generateFromTriangleFiles(polyfile,
base=nbase)
return mesh
def genMeshWithTetgen(polyfile,
nbase=1):
"""
Generate a mesh from a set of tetgen files.
Arguments
---------
polyfile : str
Filename base for tetgen files
nbase : int
Returns
--------
mesh : :class:`proteus.MeshTools.TetrahedralMesh`
Simplex mesh
"""
elefile = "%s.ele" % polyfile
nodefile = "%s.node" % polyfile
facefile = "%s.face" % polyfile
edgefile = "%s.edge" % polyfile
assert os.path.exists(elefile), "no .ele file"
assert os.path.exists(nodefile), "no .node file"
assert os.path.exists(facefile), "no .face file"
mesh=TetrahedralMesh()
mesh.generateFromTetgenFiles(polyfile,
base=nbase)
return mesh
class MeshOptions(object):
"""
Mesh options for the domain
Parameters
----------
nd: 2 for 2D, 3 for 3D
"""
def __init__(self, nd=None):
self.nd = nd
self.he = 1.
self.use_gmsh = False
self.genMesh = True
self.outputFiles_name = 'mesh'
self.outputFiles = {'poly': True,
'ply': False,
'asymptote': False,
'geo': False}
self.restrictFineSolutionToAllMeshes = False
self.parallelPartitioningType = MeshParallelPartitioningTypes.node
self.nLayersOfOverlapForParallel = 1
self.triangleOptions = None # defined when setTriangleOptions called
self.nLevels = 1
self.structured = False
self.nn = None
self.nnx = None
self.nny = None
self.nnz = None
self.triangleFlag = 1
self.nd = nd
if nd is not None:
if nd == 2:
self.triangle_string = 'VApq30Dena'
if nd == 3:
self.triangle_string = 'VApq1.35q12feena'
else:
self.triangle_string = None
def setElementSize(self, he):
"""
Sets element size for uniform mesh.
Parameters
----------
he: float
mesh characteristic element size
"""
self.he = he
def setParallelPartitioningType(self, partitioning_type='node', layers_overlap=0):
"""
Changes parallel partitioning type
Parameters
----------
partitioning_type: Optional[str, int]
parallel partitioning type (default: 'node' (1))
layers: int
layers of overlap for paralllel (default: 0)
"""
if partitioning_type == 'element' or partitioning_type == 0:
self.parallelPartitioningType = MeshParallelPartitioningTypes.element
if partitioning_type == 'node' or partitioning_type == 1:
self.parallelPartitioningType = MeshParallelPartitioningTypes.node
self.nLayersOfOverlapForParallel = layers_overlap
def setTriangleOptions(self, triangleOptions=None):
"""
Sets the trangle options
Parameters
----------
triangle_options: Optional[str]
string for triangle options. If not passed, it will be
set with triangle_string attribute and 'he' value, with
default for 2D: he**2/2; default for 3D: he**3/6
"""
if triangleOptions is not None:
self.triangleOptions = triangleOptions
else:
if self.triangleOptions is None:
assert self.he is not None, 'Element size (he) must be set before setting triangle options'
assert self.triangle_string is not None, 'triangle_string must be set before setting triangle options'
if self.nd == 2:
self.triangleOptions = self.triangle_string + '%8.8f' \
% (old_div(self.he**2,2.),)
elif self.nd == 3:
self.triangleOptions = self.triangle_string + '%21.16e' \
% (old_div(self.he**3,6.),)
def setMeshGenerator(self, generator):
"""
Indicates mesh generator to use
Parameters
----------
generator: str
options: 'gmsh', 'triangle', 'tetgen'
(!) Only has an effect when setting to 'gmsh' in current
implementation (triangle is default for 2D, tetgen for 3D)
"""
generators = ['gmsh', 'triangle', 'tetgen']
assert generator in generators, 'Unknown mesh generator'
if generator == 'gmsh':
self.use_gmsh = True
else:
self.use_gmsh = False
def setOutputFiles(self, name='mesh', poly=True, ply=False, asymptote=False, geo=False):
"""
Output files to be created
Parameters
----------
name: Optional[str]
name of the mesh files (prefix) (default: 'mesh')
poly: Optional[bool]
create a poly file
ply: Optional[bool]
create a ply file
asymptote: Optional[bool]
create an asymptote file
geo:
create a geofile
"""
self.outputFiles_name = name
self.outputFiles['poly'] = poly
self.outputFiles['ply'] = ply
self.outputFiles['asymptote'] = asymptote
self.outputFiles['geo'] = geo
def msh2simplex(fileprefix, nd):
"""
Converts a .msh file (Gmsh) to .ele .edge .node files (triangle).
(!) Works only with triangle elements in 2D and tetrahedral elements in 3D.
Parameters
----------
fileprefix: str
prefix of the .msh file (e.g. 'mesh' if file called 'mesh.msh')
"""
assert nd == 2 or nd == 3, 'nd must be 2 or 3'
mshfile = open(fileprefix+'.msh', 'r')
nodes = []
edges_msh = []
triangles = []
tetrahedra = []
tetrahedron_nb = 0
triangle_nb = 0
edge_nb = 0
switch = None
switch_count = -1
logEvent('msh2simplex: getting nodes and elements')
for i, line in enumerate(mshfile):
if 'Nodes' in line:
switch = 'nodes'
switch_count = -1
if 'Elements' in line:
switch = 'elements'
switch_count = -1
if switch == 'nodes' and switch_count >= 0:
words = line.split()
if switch_count == 0:
node_nb = int(words[0])
else:
nid = int(words[0])
if nd == 2:
x, y, z = float(words[1]), float(words[2]), 0
elif nd == 3:
x, y, z = float(words[1]), float(words[2]), float(words[3])
nodes += [[nid, x, y, z, 0]]
if switch == 'elements' and switch_count >= 0:
words = line.split()
if switch_count == 0:
el_nb = int(words[0])
else:
el_id = int(words[0])
el_type = int(words[1])
nb_tags = int(words[2])
if nb_tags == 2:
flag = int(words[3])
else:
flag = 0
s = 3+nb_tags # starting index on words for element info
if el_type == 1: # segment
edge_nb += 1
edges_msh += [[edge_nb, int(words[s]), int(words[s+1]), flag]]
elif el_type == 2: # triangle
triangle_nb += 1
triangles += [[triangle_nb, int(words[s]), int(words[s+1]), int(words[s+2]), flag]]
# update nodes flags
if nd == 3:
for i in range(3):
if nodes[int(words[s+i])-1][4] == 0:
nodes[int(words[s+i])-1][4] = flag
elif el_type == 4: # tetrahedron
tetrahedron_nb += 1
tetrahedra += [[tetrahedron_nb, int(words[s]), int(words[s+1]), int(words[s+2]), int(words[s+3]), flag]]
elif el_type == 15: # node
nodes[el_id-1][4] = flag
switch_count += 1
mshfile.close()
# construct ALL edges with flags and add flags to nodes
edges_dict = {}
triangles = np.array(triangles)
edge_nb = 0
edges = []
logEvent('msh2simplex: constructing edges')
for triangle in triangles[:,1:4]: # take only vertices index
for i in range(len(triangle)):
edge = Edge(edgeNumber=edge_nb, nodes=[triangle[i-1], triangle[i]])
edge_exist = bool(edges_dict.get(edge.nodes))
if not edge_exist:
edge_nb += 1
edges_dict[edge.nodes] = edge
edges += [[edge_nb, edge.nodes[0], edge.nodes[1], 0]]
logEvent('msh2simplex: updating edges and nodes flags')
edges = np.array(edges)
for edge in edges_msh:
edge_nodes = [edge[1], edge[2]]
edge_nodes.sort()
edge_nodes = tuple(edge_nodes)
edge_class = edges_dict.get(edge_nodes)
edges[edge_class.N, 3] = edge[3]
# ! edge nodes are indexed from 1 with gmsh
if nodes[edge[1]-1][-1] == 0: # update node flags
nodes[edge[1]-1][-1] = edge[3]
if nodes[edge[2]-1][-1] == 0: # update node flags
nodes[edge[1]-1][-1] = edge[3]
if nd == 2:
logEvent('msh2simplex: writing .node .ele .edge files')
elif nd == 3:
logEvent('msh2simplex: writing .node .ele .edge .face files')
header = '{0:d} {1:d} 0 1'.format(node_nb, nd)
if nd == 2:
nodes = np.array(nodes)
nodes = np.delete(nodes, 3, 1)
fmt = ['%d', '%f', '%f', '%d']
elif nd == 3:
fmt = ['%d', '%f', '%f', '%f', '%d']
np.savetxt(fileprefix+'.node', nodes, fmt=fmt, header=header, comments='')
header = '{0:d} 1'.format(edge_nb)
np.savetxt(fileprefix+'.edge', edges, fmt='%d', header=header, comments='')
if nd == 2:
header = '{0:d} 3 1'.format(triangle_nb)
np.savetxt(fileprefix+'.ele', triangles, fmt='%d', header=header, comments='')
elif nd == 3:
header = '{0:d} 3 1'.format(triangle_nb)
np.savetxt(fileprefix+'.face', triangles, fmt='%d', header=header, comments='')
header = '{0:d} 4 1'.format(tetrahedron_nb)
np.savetxt(fileprefix+'.ele', tetrahedra, fmt='%d', header=header, comments='')
logEvent('msh2simplex: finished converting .msh to simplex files')
def generateMesh(physics,numerics,generatePartitionedMeshFromFiles=False):
try:
meshOptions = physics.domain.MeshOptions
assert(not (meshOptions.genMesh == True and
meshOptions.nn == None and
meshOptions.nnx == None and
meshOptions.nny == None and
meshOptions.nnz == None and
meshOptions.triangleOptions == None))
mlMesh = _generateMesh(physics.domain, meshOptions, generatePartitionedMeshFromFiles)
except:
meshOptions = numerics
meshOptions.genMesh = physics.genMesh
mlMesh = _generateMesh(physics.domain, meshOptions, generatePartitionedMeshFromFiles)
return mlMesh
def _generateMesh(domain,meshOptions,generatePartitionedMeshFromFiles=False):
# convenience function to generate a mesh using triangle/tetgen/gmsh
comm = Comm.get()
name = domain.name
# this is the perfect place to create a factory function which takes in an instance and outputs a corresponding mesh
# support for old-style domain input
# now generate meshes, could move to Domain and use polymorphism or MeshTools
if isinstance(domain, Domain.RectangularDomain):
if domain.nd == 1:
mlMesh = MultilevelEdgeMesh(meshOptions.nn, 1, 1,
domain.x[0], 0.0, 0.0,
domain.L[0], 1.0, 1.0,
refinementLevels=meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
elif domain.nd == 2:
if (meshOptions.nnx == meshOptions.nny is None):
nnx = nny = meshOptions.nn
else:
nnx = meshOptions.nnx
nny = meshOptions.nny
logEvent("Building %i x %i rectangular mesh for %s" % (nnx, nny,name))
if not hasattr(meshOptions, 'quad'):
meshOptions.quad = False
if (meshOptions.quad):
mlMesh = MultilevelQuadrilateralMesh(nnx, nny,1,
domain.x[0], domain.x[1], 0.0,
domain.L[0], domain.L[1],1,
refinementLevels=meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
else:
fileprefix = domain.polyfile
if fileprefix is None:
fileprefix = "regular_{0}x{1}_grid".format(nnx,nny)
nbase = 1
if hasattr(meshOptions,'triangleFlag') ==True:
triangleFlag = meshOptions.triangleFlag
else:
triangleFlag = 0
if generatePartitionedMeshFromFiles:
if comm.isMaster():
globalMesh = TriangularMesh()
logEvent(Profiling.memory("Before Generating Mesh", className="NumericalSolution", memSaved=Profiling.memLast))
memBeforeMesh = Profiling.memLast
logEvent("Generating triangular mesh from regular grid")
globalMesh.generateTriangularMeshFromRectangularGrid(nnx, nny,domain.L[0],domain.L[1],triangleFlag=triangleFlag)
logEvent("Writing triangle files to {0:s}.ele, etc.".format(fileprefix))
globalMesh.writeTriangleFiles(fileprefix, nbase)
globalMesh.cmesh.deleteCMesh()
del globalMesh
import gc
gc.collect()
logEvent(Profiling.memory("After Generating Mesh", className="NumericalSolution", memSaved=memBeforeMesh))
memAfterMesh = Profiling.memLast
logEvent(Profiling.memory("After deleting mesh", className="NumericalSolution", memSaved=memAfterMesh))
comm.barrier()
logEvent(Profiling.memory("Before partitioning", className="NumericalSolution"))
memBeforePart = Profiling.memLast
logEvent("Generating partitioned mesh from Triangle files")
mesh = TriangularMesh()
mlMesh = MultilevelTriangularMesh(0,0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
mlMesh.generatePartitionedMeshFromTriangleFiles(fileprefix, nbase,mesh,meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
mlMesh.meshList[0].subdomainMesh.nodeArray[:, 0] += domain.x[0]
mlMesh.meshList[0].subdomainMesh.nodeArray[:, 1] += domain.x[1]
logEvent(Profiling.memory("After partitioning", className="NumericalSolution", memSaved=memBeforePart))
else:
mlMesh = MultilevelTriangularMesh(nnx, nny,1,
domain.x[0], domain.x[1], 0.0,
domain.L[0], domain.L[1],1,
refinementLevels=meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType,
triangleFlag=triangleFlag)
elif domain.nd == 3:
if (meshOptions.nnx == meshOptions.nny == meshOptions.nnz is None):
nnx = nny = nnz = meshOptions.nn
else:
nnx = meshOptions.nnx
nny = meshOptions.nny
nnz = meshOptions.nnz
logEvent("Building %i x %i x %i rectangular mesh for %s" % (nnx, nny,nnz,name))
if not hasattr(meshOptions,'NURBS'):
meshOptions.NURBS = False
if not hasattr(meshOptions, 'hex'):
meshOptions.hex = False
if (meshOptions.NURBS):
mlMesh = MultilevelNURBSMesh(nnx,nny,nnz,
meshOptions.px,meshOptions.py,meshOptions.pz,
domain.x[0], domain.x[1], domain.x[2],
domain.L[0], domain.L[1], domain.L[2],
refinementLevels=meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
elif (meshOptions.hex):
if not hasattr(meshOptions, 'px'):
meshOptions.px = 0
meshOptions.py = 0
meshOptions.pz = 0
mlMesh = MultilevelHexahedralMesh(nnx, nny, nnz,
meshOptions.px, meshOptions.py,meshOptions.pz,
domain.x[0], domain.x[1], domain.x[2],
domain.L[0], domain.L[1], domain.L[2],
refinementLevels=meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
else:
fileprefix = domain.polyfile
if fileprefix is None:
fileprefix = "regular_{0}x{1}x{2}_grid".format(nnx,nny,nnz)
nbase = 1
if meshOptions.genMesh:
if generatePartitionedMeshFromFiles:
if comm.isMaster():
globalMesh = TetrahedralMesh()
logEvent(Profiling.memory("Before Generating Mesh", className="NumericalSolution", memSaved=Profiling.memLast))
memBeforeMesh = Profiling.memLast
logEvent("Generating tetrahedral mesh from regular grid")
globalMesh.generateTetrahedralMeshFromRectangularGrid(nnx, nny,nnz,domain.L[0],domain.L[1],domain.L[2])
logEvent("Writing tetgen files to {0:s}.ele, etc.".format(fileprefix))
globalMesh.writeTetgenFiles(fileprefix, nbase)
globalMesh.cmesh.deleteCMesh()
del globalMesh
import gc
gc.collect()
logEvent("Writing tetgen edge files to {0:s}.edge".format(fileprefix))
check_call("rm -f {0:s}.1.edge {0:s}.edge".format(fileprefix), shell=True)
check_call("tetgen -Vfeen {0:s}.ele".format(fileprefix), shell=True)
check_call("mv -f {0:s}.1.ele {0:s}.ele".format(fileprefix), shell=True)
check_call("mv -f {0:s}.1.node {0:s}.node".format(fileprefix), shell=True)
check_call("mv -f {0:s}.1.face {0:s}.face".format(fileprefix), shell=True)
check_call("mv -f {0:s}.1.neigh {0:s}.neigh".format(fileprefix), shell=True)
check_call("mv -f {0:s}.1.edge {0:s}.edge".format(fileprefix), shell=True)
logEvent(Profiling.memory("After Generating Mesh", className="NumericalSolution", memSaved=memBeforeMesh))
memAfterMesh = Profiling.memLast
logEvent(Profiling.memory("After deleting mesh", className="NumericalSolution", memSaved=memAfterMesh))
comm.barrier()
logEvent(Profiling.memory("Before partitioning", className="NumericalSolution"))
memBeforePart = Profiling.memLast
logEvent("Generating partitioned mesh from Tetgen files")
mesh = TetrahedralMesh()
mlMesh = MultilevelTetrahedralMesh(0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
mlMesh.generatePartitionedMeshFromTetgenFiles(fileprefix, nbase,mesh,meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
mlMesh.meshList[0].subdomainMesh.nodeArray[:, 0] += domain.x[0]
mlMesh.meshList[0].subdomainMesh.nodeArray[:, 1] += domain.x[1]
mlMesh.meshList[0].subdomainMesh.nodeArray[:, 2] += domain.x[2]
logEvent(Profiling.memory("After partitioning", className="NumericalSolution", memSaved=memBeforePart))
else:
mlMesh = MultilevelTetrahedralMesh(nnx, nny, nnz,
domain.x[0], domain.x[1], domain.x[2],
domain.L[0], domain.L[1], domain.L[2],
refinementLevels=meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
else:
mesh = TetrahedralMesh()
mlMesh = MultilevelTetrahedralMesh(0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
if generatePartitionedMeshFromFiles:
logEvent("Generating partitioned mesh from Tetgen files")
mlMesh.generatePartitionedMeshFromTetgenFiles(fileprefix, nbase,mesh,meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
else:
logEvent("Generating coarse global mesh from Tetgen files")
mesh.generateFromTetgenFiles(fileprefix, nbase,parallel = comm.size() > 1)
logEvent("Generating partitioned %i-level mesh from coarse global Tetgen mesh" % (meshOptions.nLevels,))
mlMesh.generateFromExistingCoarseMesh(mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
elif isinstance(domain, Domain.PUMIDomain):
import sys
if(comm.size() >1 and meshOptions.parallelPartitioningType!=MeshParallelPartitioningTypes.element):
sys.exit("The mesh must be partitioned by elements and NOT nodes for adaptivity functionality. Do this with: `meshOptions.setParallelPartitioningType('element')'.")
if comm.size() > 1 and n.conservativeFlux != None:
sys.exit("ERROR: Element based partitions don't have a functioning conservative flux calculation. Set conservativeFlux to None in twp_navier_stokes")
# ibaned: PUMI conversion #1
if domain.nd == 3:
mesh = TetrahedralMesh()
else:
mesh = TriangularMesh()
logEvent("Converting PUMI mesh to Proteus")
mesh.convertFromPUMI(domain, domain.AdaptManager.PUMIAdapter, domain.faceList,
domain.regList,
parallel = comm.size() > 1, dim = domain.nd)
if domain.nd == 3:
mlMesh = MultilevelTetrahedralMesh(
0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
if domain.nd == 2:
mlMesh = MultilevelTriangularMesh(
0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
logEvent("Generating %i-level mesh from PUMI mesh" % (meshOptions.nLevels,))
if comm.size() ==1:
mlMesh.generateFromExistingCoarseMesh(
mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
else:
mlMesh.generatePartitionedMeshFromPUMI(
mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel)
elif isinstance(domain, Domain.MeshTetgenDomain):
nbase = 1
mesh = TetrahedralMesh()
logEvent("Reading coarse mesh from tetgen file")
mlMesh = MultilevelTetrahedralMesh(0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
if generatePartitionedMeshFromFiles:
logEvent("Generating partitioned mesh from Tetgen files")
mlMesh.generatePartitionedMeshFromTetgenFiles(domain.meshfile, nbase,mesh,meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
else:
logEvent("Generating coarse global mesh from Tetgen files")
mesh.generateFromTetgenFiles(domain.polyfile, nbase,parallel = comm.size() > 1)
logEvent("Generating partitioned %i-level mesh from coarse global Tetgen mesh" % (meshOptions.nLevels,))
mlMesh.generateFromExistingCoarseMesh(mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
elif isinstance(domain, Domain.Mesh3DMDomain):
mesh = TetrahedralMesh()
logEvent("Reading coarse mesh from 3DM file")
mesh.generateFrom3DMFile(domain.meshfile)
mlMesh = MultilevelTetrahedralMesh(0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
logEvent("Generating %i-level mesh from coarse 3DM mesh" % (meshOptions.nLevels,))
mlMesh.generateFromExistingCoarseMesh(mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
elif isinstance(domain, Domain.Mesh2DMDomain):
mesh = TriangularMesh()
logEvent("Reading coarse mesh from 2DM file")
mesh.generateFrom2DMFile(domain.meshfile)
mlMesh = MultilevelTriangularMesh(0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
logEvent("Generating %i-level mesh from coarse 2DM mesh" % (meshOptions.nLevels,))
mlMesh.generateFromExistingCoarseMesh(mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
elif isinstance(domain, Domain.MeshHexDomain):
mesh = HexahedralMesh()
logEvent("Reading coarse mesh from file")
mesh.generateFromHexFile(domain.meshfile)
mlMesh = MultilevelHexahedralMesh(0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
logEvent("Generating %i-level mesh from coarse mesh" % (meshOptions.nLevels,))
mlMesh.generateFromExistingCoarseMesh(mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
elif isinstance(domain, Domain.GMSH_3D_Domain):
from subprocess import call
import sys
if comm.rank() == 0 and (meshOptions.genMesh or not (os.path.exists(domain.polyfile+".ele") and
os.path.exists(domain.polyfile+".node") and
os.path.exists(domain.polyfile+".face"))):
logEvent("Running gmsh to generate 3D mesh for "+name, level=1)
gmsh_cmd = "time gmsh {0:s} -v 10 -3 -o {1:s} -format mesh -clmax {2:f}".format(domain.geofile, domain.name+".mesh", 0.5*domain.he)
logEvent("Calling gmsh on rank 0 with command %s" % (gmsh_cmd,))
check_call(gmsh_cmd, shell=True)
logEvent("Done running gmsh; converting to tetgen")
gmsh2tetgen_cmd = "gmsh2tetgen {0} {1:f} {2:d} {3:d} {4:d}".format(
domain.name+".mesh",
domain.length_scale,
domain.permute_dims[0]+1, # switch to base 1 index...
domain.permute_dims[1]+1,
domain.permute_dims[2]+1)
check_call(gmsh2tetgen_cmd, shell=True)
fileprefix = "mesh"
check_call("rm -f {0:s}.1.ele {0:s}.ele".format(fileprefix), shell=True)
check_call("rm -f {0:s}.1.node {0:s}.node".format(fileprefix), shell=True)
check_call("rm -f {0:s}.1.face {0:s}.face".format(fileprefix), shell=True)
check_call("rm -f {0:s}.1.neigh {0:s}.neigh".format(fileprefix), shell=True)
check_call("rm -f {0:s}.1.edge {0:s}.edge".format(fileprefix), shell=True)
check_call("tetgen -Vfeen %s.ele" % ("mesh",), shell=True)
check_call("mv %s.1.ele %s.ele" % ("mesh", "mesh"), shell=True)
check_call("mv %s.1.node %s.node" % ("mesh", "mesh"), shell=True)
check_call("mv %s.1.face %s.face" % ("mesh", "mesh"), shell=True)
check_call("mv %s.1.neigh %s.neigh" % ("mesh", "mesh"), shell=True)
check_call("mv %s.1.edge %s.edge" % ("mesh", "mesh"), shell=True)
elefile = "mesh.ele"
nodefile = "mesh.node"
facefile = "mesh.face"
edgefile = "mesh.edge"
assert os.path.exists(elefile), "no mesh.ele"
tmp = "%s.ele" % domain.polyfile
os.rename(elefile, tmp)
assert os.path.exists(tmp), "no .ele"
assert os.path.exists(nodefile), "no mesh.node"
tmp = "%s.node" % domain.polyfile
os.rename(nodefile, tmp)
assert os.path.exists(tmp), "no .node"
if os.path.exists(facefile):
tmp = "%s.face" % domain.polyfile
os.rename(facefile, tmp)
assert os.path.exists(tmp), "no .face"
if os.path.exists(edgefile):
tmp = "%s.edge" % domain.polyfile
os.rename(edgefile, tmp)
assert os.path.exists(tmp), "no .edge"
comm.barrier()
logEvent("Initializing mesh and MultilevelMesh")
nbase = 1
mesh = TetrahedralMesh()
mlMesh = MultilevelTetrahedralMesh(0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
if generatePartitionedMeshFromFiles:
logEvent("Generating partitioned mesh from Tetgen files")
mlMesh.generatePartitionedMeshFromTetgenFiles(domain.polyfile, nbase,mesh,meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
else:
logEvent("Generating coarse global mesh from Tetgen files")
mesh.generateFromTetgenFiles(domain.polyfile, nbase,parallel = comm.size() > 1)
logEvent("Generating partitioned %i-level mesh from coarse global Tetgen mesh" % (meshOptions.nLevels,))
mlMesh.generateFromExistingCoarseMesh(mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
elif isinstance(domain, Domain.PlanarStraightLineGraphDomain):
fileprefix = None
# run mesher
if domain.use_gmsh is True:
fileprefix = domain.geofile
if comm.isMaster() and (not (os.path.exists(fileprefix+".ele") and
os.path.exists(fileprefix+".node") and
os.path.exists(fileprefix+".edge"))):
if not os.path.exists(fileprefix+".msh"):
logEvent("Running gmsh to generate 2D mesh for "+name, level=1)
gmsh_cmd = "time gmsh {0:s} -v 10 -2 -o {1:s} -format msh2".format(fileprefix+".geo", fileprefix+".msh")
logEvent("Calling gmsh on rank 0 with command %s" % (gmsh_cmd,))
check_call(gmsh_cmd, shell=True)
logEvent("Done running gmsh; converting to triangle")
else:
logEvent("Using "+fileprefix+".msh to convert to triangle")
# convert gmsh to triangle format
msh2simplex(fileprefix=fileprefix, nd=2)
else:
fileprefix = domain.polyfile
if comm.isMaster() and meshOptions.genMesh:
logEvent("Calling Triangle to generate 2D mesh for "+name)
tricmd = "triangle -{0} -e {1}.poly".format(meshOptions.triangleOptions, fileprefix)
logEvent("Calling triangle on rank 0 with command %s" % (tricmd,))
output = check_output(tricmd,shell=True)
logEvent(str(output, 'utf-8'))
logEvent("Done running triangle")
check_call("mv {0:s}.1.ele {0:s}.ele".format(fileprefix), shell=True)
check_call("mv {0:s}.1.node {0:s}.node".format(fileprefix), shell=True)
check_call("mv {0:s}.1.edge {0:s}.edge".format(fileprefix), shell=True)
comm.barrier()
assert fileprefix is not None, 'did not find mesh file name'
# convert mesh to proteus format
nbase = 1
mesh = TriangularMesh()
mlMesh = MultilevelTriangularMesh(0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
logEvent("Generating %i-level mesh from coarse Triangle mesh" % (meshOptions.nLevels,))
if generatePartitionedMeshFromFiles:
logEvent("Generating partitioned mesh from Triangle files")
mlMesh.generatePartitionedMeshFromTriangleFiles(fileprefix, nbase,mesh,meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
else:
mesh.generateFromTriangleFiles(filebase=fileprefix,
base=1)
mlMesh.generateFromExistingCoarseMesh(mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
elif isinstance(domain, Domain.PiecewiseLinearComplexDomain):
import sys
if domain.use_gmsh is True:
fileprefix = domain.geofile
else:
fileprefix = domain.polyfile
if comm.rank() == 0 and (meshOptions.genMesh or (
not (os.path.exists(fileprefix+".ele") and
os.path.exists(fileprefix+".node") and
os.path.exists(fileprefix+".face")))):
if domain.use_gmsh is True:
if not os.path.exists(fileprefix+".msh"):
logEvent("Running gmsh to generate 3D mesh for "+name, level=1)
gmsh_cmd = "time gmsh {0:s} -v 10 -3 -o {1:s} -format msh2".format(fileprefix+'.geo', domain.geofile+'.msh')
logEvent("Calling gmsh on rank 0 with command %s" % (gmsh_cmd,))
check_call(gmsh_cmd, shell=True)
logEvent("Done running gmsh; converting to tetgen")
else:
logEvent("Using "+domain.geofile+".msh to convert to tetgen")
msh2simplex(fileprefix=fileprefix, nd=3)
check_call("tetgen -Vfeen {0:s}.ele".format(fileprefix), shell=True)
else:
logEvent("Running tetgen to generate 3D mesh for "+name, level=1)
check_call("rm -f {0:s}.ele".format(fileprefix), shell=True)
check_call("rm -f {0:s}.node".format(fileprefix), shell=True)
check_call("rm -f {0:s}.face".format(fileprefix), shell=True)
check_call("rm -f {0:s}.neigh".format(fileprefix), shell=True)
check_call("rm -f {0:s}.edge".format(fileprefix), shell=True)
tetcmd = "tetgen -{0} {1}.poly".format(meshOptions.triangleOptions, fileprefix)
logEvent("Calling tetgen on rank 0 with command %s" % (tetcmd,))
check_call(tetcmd, shell=True)
logEvent("Done running tetgen")
check_call("mv {0:s}.1.ele {0:s}.ele".format(fileprefix), shell=True)
check_call("mv {0:s}.1.node {0:s}.node".format(fileprefix), shell=True)
check_call("mv {0:s}.1.face {0:s}.face".format(fileprefix), shell=True)
try:
check_call("mv {0:s}.1.neigh {0:s}.neigh".format(fileprefix), shell=True)
except:
logEvent("Warning: couldn't move {0:s}.1.neigh".format(fileprefix))
pass
try:
check_call("mv {0:s}.1.edge {0:s}.edge".format(fileprefix), shell=True)
except:
logEvent("Warning: couldn't move {0:s}.1.edge".format(fileprefix))
pass
comm.barrier()
logEvent("Initializing mesh and MultilevelMesh")
nbase = 1
mesh = TetrahedralMesh()
mlMesh = MultilevelTetrahedralMesh(0, 0,0,skipInit=True,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
if generatePartitionedMeshFromFiles:
logEvent("Generating partitioned mesh from Tetgen files")
if("f" not in meshOptions.triangleOptions or "ee" not in meshOptions.triangleOptions):
sys.exit("ERROR: Remake the mesh with the `f` flag and `ee` flags in triangleOptions.")
mlMesh.generatePartitionedMeshFromTetgenFiles(fileprefix, nbase,mesh,meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
else:
logEvent("Generating coarse global mesh from Tetgen files")
mesh.generateFromTetgenFiles(fileprefix, nbase,parallel = comm.size() > 1)
logEvent("Generating partitioned %i-level mesh from coarse global Tetgen mesh" % (meshOptions.nLevels,))
mlMesh.generateFromExistingCoarseMesh(mesh, meshOptions.nLevels,
nLayersOfOverlap=meshOptions.nLayersOfOverlapForParallel,
parallelPartitioningType=meshOptions.parallelPartitioningType)
meshOptions.genMesh = False
return mlMesh
| mit | -1,103,119,450,410,455,700 | 47.195437 | 233 | 0.561488 | false | 3.853686 | false | false | false |
3rduncle/knowledgeflow | knowledgeflow/utility/qa_utils.py | 1 | 12373 | #coding:utf8
import sys
import re
import logging
import random
import itertools
from keras.preprocessing import sequence
from utility import build_vocab
from letor_metrics import ndcg_score
import numpy as np
def extract_sentences(fname):
sentences = []
start_r = re.compile('<\d+>')
end_r = re.compile('</\d+>')
for line in open(fname):
line = line.rstrip('\n')
if start_r.match(line):
phrase = []
hit = True
continue
elif end_r.match(line):
sentences.append(' '.join(phrase))
phrase = []
continue
elif not line:
hit = True
continue
else:
pass
if hit:
hit = False
phrase.append(line)
return sentences
def generate_neg(question, answer):
qsize = len(question)
asize = len(answer)
assert qsize == asize
neg_q = []
neg_a = []
for i in xrange(qsize):
while True:
qindex = random.randint(0, qsize - 1)
aindex = random.randint(0, asize - 1)
if qindex != aindex and question[qindex] != question[aindex]:
break
neg_q.append(question[qindex])
neg_a.append(answer[aindex])
return neg_q, neg_a
def extract_qapair(fname, index=[0,1,2]):
questions = []
answers = []
labels = []
for line in open(fname):
line = line.rstrip('\n')
terms = line.split('\t')
question = terms[index[0]]
answer = terms[index[1]]
label = terms[index[2]]
questions.append(map(lambda x:x.lower(), question.split()))
answers.append(map(lambda x:x.lower(), answer.split()))
labels.append(int(label))
return questions, answers, labels
def extract_balanced_qapair(fname):
questions = []
answers = []
labels = []
for line in open(fname):
line = line.rstrip('\n')
url, question, answer1, socre1, answer2, score2 = line.split('\t')
question = map(lambda x:x.lower(), question.split())
questions.append(question)
answers.append(map(lambda x:x.lower(), answer1.split()))
labels.append(1)
questions.append(question)
answers.append(map(lambda x:x.lower(), answer2.split()))
labels.append(0)
return questions, answers, labels
# data format question \t answer \t label
# total C{2,n}/2 pairwise
def extract_ranked_qapair(fname, shuffle=True):
entries = {}
for line in open(fname):
line = line.rstrip('\n')
url, question, answer, label = line.split('\t')
question = map(lambda x:x.lower(), question.split())
answers.append(map(lambda x:x.lower(), answer1.split()))
entry = entries.setdefault(url, {})
entry.setdefault('question', question)
labels = entry.setdefault('label', {})
labels.setdefault(label, []).append(answer)
question_answer_pair = []
for url, entry in entries.items():
question = entry['question']
labels = entry['label']
keys = labels.keys()
assert len(keys) > 1
keys = sorted(keys, reverse=False)
label_pair = zip(keys, keys[1:])
for high, low in label_pair:
for ans1, ans2 in itertools.product(labels[high], labels[low]):
question_answer_pair.append((question, ans1, ans2))
if shuffle:
random.shuffle(question_answer_pair)
questions = []
answers = []
for question, ans1, ans2 in question_answer_pair:
questions.append(question)
answers.append(ans1)
questions.append(question)
answers.append(ans2)
return questions, answers, [1] * len(questions)
class QaPairs(object):
def __init__(self, path, loader=extract_qapair):
self.xq_data, self.xa_data, self.labels = loader(path)
self.qmax = len(max(self.xq_data, key=lambda x:len(x)))
self.amax = len(max(self.xa_data, key=lambda x:len(x)))
self.questions = {}
self.pos2neg = {}
self.neg2pos = {}
self.makeQuestions()
def makeQuestions(self):
for idx, (question, answer, label) in enumerate(zip(self.xq_data, self.xa_data, self.labels)):
entry = self.questions.setdefault(' '.join(question), {})
entry.setdefault('idx', []).append(idx)
entry.setdefault('label', []).append(label)
entry.setdefault('answer', []).append(' '.join(answer))
for _, entry in self.questions.items():
pos, neg = [], []
for idx, label in zip(entry['idx'], entry['label']):
if label == 1:
pos.append(idx)
else:
neg.append(idx)
for idx in pos:
self.pos2neg[idx] = neg
for idx in neg:
self.neg2pos[idx] = pos
def build(self, vocabulary, q_length, a_length):
self.xq_data = [map(lambda x: vocabulary[x], terms) for terms in self.xq_data]
self.xa_data = [map(lambda x: vocabulary[x], terms) for terms in self.xa_data]
self.xq_np = sequence.pad_sequences(self.xq_data, maxlen = q_length)
self.xa_np = sequence.pad_sequences(self.xa_data, maxlen = a_length)
self.y_np = np.array(self.labels)
self.built = True
def shuffle(self):
idx = np.arange(self.xq_np.shape[0])
random.shuffle(idx)
self.xq_np = self.xq_np[idx]
self.xa_np = self.xa_np[idx]
self.y_np = self.y_np[idx]
def sampling(self, batch = None):
assert self.built
if not batch:
yield self.xq_np, self.xa_np, self.y_np
return
total = self.xq_np.shape[0]
batches = total / batch + 1
for i in xrange(batches):
start = i * batch
end = (i + 1) * batch
yield self.xq_np[start:end], self.xa_np[start:end], self.y_np[start:end]
return
class QaPairsTrain(QaPairs):
def __init__(self, path, **kvargs):
super(QaPairsTrain, self).__init__(path, **kvargs)
def partiteSamples(self):
self.idx_neg = self.y_np == 0
self.idx_pos = self.y_np == 1
self.xq_np_neg, self.xq_np_pos = self.xq_np[self.idx_neg], self.xq_np[self.idx_pos]
self.xa_np_neg, self.xa_np_pos = self.xa_np[self.idx_neg], self.xa_np[self.idx_pos]
self.y_np_neg, self.y_np_pos = self.y_np[self.idx_neg], self.y_np[self.idx_pos]
self.isPartited = True
def underSampling(self):
assert self.isPartited
idx = np.arange(self.xq_np_neg.shape[0])
idx = np.random.choice(idx, self.xq_np_pos.shape[0])
xq_epoch = np.concatenate((self.xq_np_pos, self.xq_np_neg[idx]))
xa_epoch = np.concatenate((self.xa_np_pos, self.xa_np_neg[idx]))
y_epoch = np.concatenate((self.y_np_pos, self.y_np_neg[idx]))
return xq_epoch, xa_epoch, y_epoch
def pairwiseSampling(self, batch = None):
assert self.isPartited
candidate = np.arange(self.xq_np.shape[0])[self.idx_pos]
random.shuffle(candidate)
posids = []
negids = []
for pidx in candidate:
neg = self.pos2neg[pidx]
if not neg: continue
nidx = np.random.choice(neg)
posids.append(pidx)
negids.append(nidx)
pairs = len(posids)
total = pairs * 2
qshape = list(self.xq_np.shape)
ashape = list(self.xa_np.shape)
qshape[0] = total
ashape[0] = total
xq_epoch = np.zeros(qshape)
xa_epoch = np.zeros(ashape)
xq_epoch[0::2] = self.xq_np[posids]
xq_epoch[1::2] = self.xq_np[posids]
xa_epoch[0::2] = self.xa_np[posids]
xa_epoch[1::2] = self.xa_np[negids]
y_epoch = np.array([1,0] * pairs)
if not batch:
yield xq_epoch, xa_epoch, y_epoch
return
batches = total / batch + 1
for i in xrange(batches):
start = i * batch
end = (i + 1) * batch
yield xq_epoch[start:end], xa_epoch[start:end], y_epoch[start:end]
return
class QaPairsTest(QaPairs):
def __init__(self, path, **kvargv):
super(QaPairsTest, self).__init__(path, **kvargv)
self.last_predict = {}
def label_ranking_average_precision_score(self, predictor, batch_size=50):
from sklearn.metrics import label_ranking_average_precision_score
# 计算predict
p = []
for xq_batch, xa_batch, _ in super(QaPairsTest, self).sampling(batch_size):
delta = predictor(xq_batch, xa_batch)
p += delta[0].tolist()
p = np.array(p)
# 筛选可以用来评估的样本
# 1. 没有正例无法计算得分
# 2. 没有负例评分没有意义
map_record = []
skip1 = 0
skip2 = 0
for question, entry in self.questions.items():
idx = np.array(entry['idx'])
if self.y_np[idx].max() == 0:
skip1 += 1
continue
if self.y_np[idx].min() != 0:
skip2 += 1
#continue
score = p[idx].reshape(idx.shape).tolist()
map = label_ranking_average_precision_score(np.array([entry['label']]), np.array([score]))
map_record.append(map)
logging.info('Skip1 %d Skip2 %d' % (skip1, skip2))
return np.array(map_record).mean()
def label_ranking_average_precision_score2(self, model, batch_size=50):
def label_ranking_average_precision_score(label, score):
assert len(label) == len(score)
data = zip(label, score)
data = sorted(data, key=lambda x:x[1],reverse=True)
count = 0.0
values = []
for i in range(len(data)):
if data[i][0]:
count += 1
values.append(count / (i + 1))
assert len(values)
return sum(values) / count, values[0]
p = model.predict(
{'q_input': self.xq_np, 'a_input':self.xa_np},
batch_size=batch_size
)
map_record = []
for question, entry in self.questions.items():
idx = np.array(entry['idx'])
if self.y_np[idx].max() == 0:
continue
score = p[idx].reshape(idx.shape).tolist()
map, _ = label_ranking_average_precision_score(entry['label'], score)
map_record.append(map)
self.saveResult(question, map, score)
map = np.array(map_record).mean()
self.saveResult('__TOTAL_MAP__', map)
return map
def ndcg_score(self, model, k=10, batch_size=50):
p = model.predict(
{'q_input': self.xq_np, 'a_input':self.xa_np},
batch_size=batch_size
)
records = []
for question, entry in self.questions.items():
idx = np.array(entry['idx'])
if self.y_np[idx].max() == 0:
continue
score = p[idx].reshape(idx.shape).tolist()
record = ndcg_score(entry['label'], score, k=k)
records.append(record)
self.saveResult(question, record, score)
result = np.array(records).mean()
self.saveResult('__TOTAL_RESULT__', result)
return result
def saveResult(self, question, map, score=None):
entry = self.last_predict.setdefault(question, {})
entry['map'] = map
if score:
entry['score'] = score
def dumpResult(self, path):
with open(path, 'w') as f:
entry = self.last_predict['__TOTAL_MAP__']
print >>f, '%s\tNULL\t%f' % ('__TOTAL_MAP__', entry['map'])
for question, entry in self.questions.items():
answers = entry['answer']
predict = self.last_predict.get(question)
if not predict:
continue
for answer, label, score in zip(answers, entry['label'], predict['score']):
print >>f,'%s\t%s\t%d\t%f' % (question, answer, label, score)
if __name__ == '__main__':
a = extract_sentence('./data/qg/train.answer')
b = extract_sentence('./data/qg/train.question')
c, d = generate_neg(a, b)
print len(a), len(b), len(c), len(d)
| mit | 179,556,366,745,157,000 | 35.090909 | 102 | 0.550012 | false | 3.444444 | false | false | false |
Lemma1/MAC-POSTS | doc_builder/sphinx-contrib/swf/sphinxcontrib/swf/__init__.py | 2 | 7134 | '''
sphinxcontrib.swf
~~~~~~~~~~~~~~~~~
This module provides :rst:directive:`swf`, which you can use to embed
flash objects into your documentation.
'''
from docutils import nodes
from docutils.parsers.rst import Directive, directives
import os, sys, re, shutil
__version__ = '0.3'
def bool_option(argument):
return directives.choice(argument,
('yes', 'no', 'true', 'false', '0', '1'))
def quality_option(argument):
return directives.choice(argument,
('low', 'autolow', 'autohigh', 'medium', 'high', 'best'))
def scale_option(argument):
return directives.choice(argument,
('default', 'noborder', 'exactfit', 'noscale'))
def align_option(argument):
return directives.choice(argument,
('l', 'r', 't'))
def salign_option(argument):
return directives.choice(argument,
('l', 'r', 't', 'tl', 'tr'))
def wmode_option(argument):
return directives.choice(argument,
('window', 'direct', 'opaque', 'transparent', 'gpu'))
color_re = re.compile('#[A-Fa-f0-9]{6}')
def color_option(argument):
argument = argument.strip()
if not color_re.match(argument):
raise ValueError("color must have form #HHHHHH where H is a hexvalue")
return argument
def aspectratio_option(argument):
return directives.choice(argument, ('portrait', 'landscape'))
class swf(nodes.General, nodes.Inline, nodes.Element): pass
# http://helpx.adobe.com/flash/kb/flash-object-embed-tag-attributes.html
FLASH_PARAMS = {
'width': directives.nonnegative_int,
'height': directives.nonnegative_int,
'loop': bool_option,
'menu': bool_option,
'width': directives.length_or_percentage_or_unitless,
'height': directives.length_or_percentage_or_unitless,
'play': bool_option,
'quality': quality_option,
'scale': scale_option,
'align': align_option,
'salign': salign_option,
'wmode': wmode_option,
'bgcolor': color_option,
'base': directives.uri,
'allowFullScreen': bool_option,
'allowfullscreen': bool_option,
'fullScreenAspectRatio': aspectratio_option,
'fullscreenaspectratio': aspectratio_option,
'flashvars': directives.unchanged
}
class ShockWaveFlash(Directive):
'''This directive handles flash content.
Example::
.. swf:: path/to/file.swf
:width:
:height:
:allowfullscreen: true
:class:
:zoom-to-fit: yes
'''
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = dict({
'class': directives.class_option,
'zoom-to-fit': bool_option,
},
**FLASH_PARAMS)
has_content = False
def run(self):
env = self.state.document.settings.env
if 'width' not in self.options:
self.options['width'] = env.config.swf_width_default
if 'height' not in self.options:
self.options['height'] = env.config.swf_height_default
if 'zoom-to-fit' not in self.options:
self.options['zoom-to-fit'] = env.config.swf_zoom_to_fit_default
if 'allowfullscreen' not in self.options and \
'allowFullScreen' not in self.options:
self.options['allowfullscreen'] = env.config.swf_allowfullscreen_default
for opt in self.options:
typ = self.option_spec.get(opt, None)
if typ is bool_option:
if self.options[opt].lower() in ('yes', 'true', '1'):
self.options[opt] = True
else:
self.options[opt] = False
if 'allowfullscreen' in self.options:
self.options['allowFullScreen'] = self.options['allowfullscreen']
del self.options['allowfullscreen']
if 'fullscreenaspectratio' in self.options:
self.options['fullScreenAspecRatio'] = \
self.options['fullscreenaspecratio']
del self.options['fullscreenaspecratio']
reference = directives.uri(self.arguments[0])
self.options['uri'] = reference
env.config.swf_flash_files.append(reference)
return [ swf(rawsource=self.block_text, **self.options) ]
def html_visit_swf(self, node):
result = ''
width = node['width']
height = node['height']
src = self.attval(node['uri'])
params = ''
for k in node.attlist():
if k in FLASH_PARAMS:
val = node[k]
if val is True or val is False: val = str(val).lower()
params += '<param name="%s" value="%s">\n' % (k, self.attval(val))
classes = list(node['classes'])
if len(classes):
result += '<span class="sphinxcontrib-swf %s">'%' '.join(classes)
else:
result += '<span class="sphinxcontrib-swf">'
# zoom-to-fit onload event of object, for now only for non-IE browsers
zoom_to_fit = ''
if node['zoom-to-fit']:
classes.append('swf-zoom-to-fit')
attrs = ''
if classes:
attrs += ' class="%s"'%' '.join(classes)
result += '<object%s classid="clsid:D27CDB6E-AE6D-11cf-' \
'96B8-444553540000" width="%s" height="%s">\n' \
% (attrs, width, height)
result += '<param name="src" value="%s">\n' % src
result += params
result += '''<!--[if !IE]>-->
<object
type="application/x-shockwave-flash"
data="%s"
width="%s"
height="%s"
>''' %(src, width, height)
result += params
result += '''
<!--<![endif]-->
<span>
<a href="http://www.adobe.com/go/getflashplayer"><img
src="http://www.adobe.com/images/shared/download_buttons/get_flash_player.gif" alt="Get Adobe Flash player"></a>
</span>
<!--[if !IE]>-->
</object>
<!--<![endif]-->
</object>
</span>'''
self.body.append(result)
raise nodes.SkipNode
def on_builder_inited(app):
app.config.html_static_path.append( os.path.relpath(
os.path.join(os.path.dirname(__file__), 'javascript'),
app.confdir
))
app.config.swf_flash_files[:] = []
def on_html_collect_pages(app):
for f in app.config.swf_flash_files:
src = os.path.join(app.srcdir, f)
if os.path.exists(src):
shutil.copy(src, os.path.join(app.builder.outdir, f))
return []
def setup(app):
app.add_config_value('swf_zoom_to_fit_default', 'yes', 'html')
app.add_config_value('swf_allowfullscreen_default', 'yes', 'html')
app.add_config_value('swf_width_default', 400, 'html')
app.add_config_value('swf_height_default', 300, 'html')
# for internal use
app.add_config_value('swf_flash_files', [], 'html')
app.connect('builder-inited', on_builder_inited)
app.connect('html-collect-pages', on_html_collect_pages)
sys.stderr.write("path: %s\n" % app.config.html_static_path)
app.add_javascript('swf_zoom_to_fit.js')
app.add_directive('swf', ShockWaveFlash)
app.add_node(swf, html=(html_visit_swf, None))
| mit | -3,679,307,147,774,893,600 | 30.017391 | 133 | 0.589851 | false | 3.549254 | true | false | false |
bluenote10/SimpleLanguageBenchmarks | bench_suite/specs.py | 1 | 3588 | #!/usr/bin/env python
from __future__ import division, print_function
import re
import subprocess
import platform
from .utils import print_warn
def match_line_in_text(text, pattern):
for line in text.split("\n"):
m = re.match(pattern, line)
if m is not None:
return m.group(1)
return None
def match_line_from_file(filename, pattern):
text = open(filename).read()
return match_line_in_text(text, pattern)
def match_line_from_command(command, pattern):
p = subprocess.Popen(
[command],
stdout=subprocess.PIPE,
)
stdout, stderr = p.communicate()
return match_line_in_text(stdout, pattern)
def secure_execution(func, label):
try:
result = func()
if result is not None:
return result
else:
return "failed to determine"
except Exception as exc:
print_warn("Failed to get information for '{}':".format(label))
print(exc)
return "failed to determine"
def get_system_specs():
def get_mem():
mem_total_kB = float(match_line_from_file('/proc/meminfo', 'MemTotal:\s+(\d+)'))
if mem_total_kB is not None:
return "{:.1f} MB".format(mem_total_kB / 1024)
def get_distribution():
return match_line_from_file("/etc/lsb-release", 'DISTRIB_DESCRIPTION="(.*)"')
def get_cpu_model():
return match_line_from_file("/proc/cpuinfo", 'model name\s+:\s+(.*)')
def get_cpu_cores():
return match_line_from_file("/proc/cpuinfo", 'cpu cores\s+:\s+(.*)')
def get_cpu_cache_size_l1d():
return match_line_from_command("lscpu", 'L1d cache:\s+(.*)')
def get_cpu_cache_size_l1i():
return match_line_from_command("lscpu", 'L1i cache:\s+(.*)')
def get_cpu_cache_size_l2():
return match_line_from_command("lscpu", 'L2 cache:\s+(.*)')
def get_cpu_cache_size_l3():
return match_line_from_command("lscpu", 'L3 cache:\s+(.*)')
spec_getters = [
("OS", platform.system),
("Distribution", get_distribution),
("Kernel", platform.release),
("CPU", get_cpu_model),
("Number of cores", get_cpu_cores),
("L1 data cache size", get_cpu_cache_size_l1d),
("L1 instruction cache size", get_cpu_cache_size_l1i),
("L2 cache size", get_cpu_cache_size_l2),
("L3 cache size", get_cpu_cache_size_l3),
("Memory", get_mem)
]
specs = [
(label, secure_execution(func, label))
for label, func in spec_getters
]
return specs
def get_line_from_command(command, lineno=0):
p = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
)
stdout, stderr = p.communicate()
# some programs write version info to stderr
if stdout == '':
stdout = stderr
lines = stdout.split("\n")
return lines[lineno]
def get_software_specs():
spec_getters = [
("GCC", lambda: get_line_from_command("gcc --version")),
("Clang", lambda: get_line_from_command("clang++-3.8 --version")),
("JVM", lambda: get_line_from_command("java -version", 1)),
("Python", lambda: get_line_from_command("python --version")),
("Go", lambda: get_line_from_command("go version")),
("Rust", lambda: get_line_from_command("rustc --version")),
("Nim", lambda: get_line_from_command("nim --version")),
]
specs = [
(label, secure_execution(func, label))
for label, func in spec_getters
]
return specs
| mit | 792,032,795,738,131,300 | 27.47619 | 88 | 0.58835 | false | 3.480116 | false | false | false |
yaricom/brainhash | src/cA.py | 1 | 9264 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
The contractive autoencoder implementation taken from Deeplearning Theano
tutorial, see: https://github.com/lisa-lab/DeepLearningTutorials/blob/master/code/cA.py
Copyright (c) 2010--2015, Deep Learning Tutorials Development Team
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Theano nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import numpy
import theano
import theano.tensor as T
class cA(object):
""" Contractive Auto-Encoder class (cA)
The contractive autoencoder tries to reconstruct the input with an
additional constraint on the latent space. With the objective of
obtaining a robust representation of the input space, we
regularize the L2 norm(Froebenius) of the jacobian of the hidden
representation with respect to the input. Please refer to Rifai et
al.,2011 for more details.
If x is the input then equation (1) computes the projection of the
input into the latent space h. Equation (2) computes the jacobian
of h with respect to x. Equation (3) computes the reconstruction
of the input, while equation (4) computes the reconstruction
error and the added regularization term from Eq.(2).
.. math::
h_i = s(W_i x + b_i) (1)
J_i = h_i (1 - h_i) * W_i (2)
x' = s(W' h + b') (3)
L = -sum_{k=1}^d [x_k \log x'_k + (1-x_k) \log( 1-x'_k)]
+ lambda * sum_{i=1}^d sum_{j=1}^n J_{ij}^2 (4)
"""
def __init__(self, numpy_rng, input=None, n_visible=784, n_hidden=100,
n_batchsize=1, W=None, bhid=None, bvis=None):
"""Initialize the cA class by specifying the number of visible units
(the dimension d of the input), the number of hidden units (the
dimension d' of the latent or hidden space) and the contraction level.
The constructor also receives symbolic variables for the input, weights
and bias.
:type numpy_rng: numpy.random.RandomState
:param numpy_rng: number random generator used to generate weights
:type theano_rng: theano.tensor.shared_randomstreams.RandomStreams
:param theano_rng: Theano random generator; if None is given
one is generated based on a seed drawn from `rng`
:type input: theano.tensor.TensorType
:param input: a symbolic description of the input or None for
standalone cA
:type n_visible: int
:param n_visible: number of visible units
:type n_hidden: int
:param n_hidden: number of hidden units
:type n_batchsize int
:param n_batchsize: number of examples per batch
:type W: theano.tensor.TensorType
:param W: Theano variable pointing to a set of weights that should be
shared belong the dA and another architecture; if dA should
be standalone set this to None
:type bhid: theano.tensor.TensorType
:param bhid: Theano variable pointing to a set of biases values (for
hidden units) that should be shared belong dA and another
architecture; if dA should be standalone set this to None
:type bvis: theano.tensor.TensorType
:param bvis: Theano variable pointing to a set of biases values (for
visible units) that should be shared belong dA and another
architecture; if dA should be standalone set this to None
"""
self.n_visible = n_visible
self.n_hidden = n_hidden
self.n_batchsize = n_batchsize
# note : W' was written as `W_prime` and b' as `b_prime`
if not W:
# W is initialized with `initial_W` which is uniformely sampled
# from -4*sqrt(6./(n_visible+n_hidden)) and
# 4*sqrt(6./(n_hidden+n_visible))the output of uniform if
# converted using asarray to dtype
# theano.config.floatX so that the code is runable on GPU
initial_W = numpy.asarray(
numpy_rng.uniform(
low=-4 * numpy.sqrt(6. / (n_hidden + n_visible)),
high=4 * numpy.sqrt(6. / (n_hidden + n_visible)),
size=(n_visible, n_hidden)
),
dtype=theano.config.floatX
)
W = theano.shared(value=initial_W, name='W', borrow=True)
if not bvis:
bvis = theano.shared(value=numpy.zeros(n_visible,
dtype=theano.config.floatX),
borrow=True)
if not bhid:
bhid = theano.shared(value=numpy.zeros(n_hidden,
dtype=theano.config.floatX),
name='b',
borrow=True)
self.W = W
# b corresponds to the bias of the hidden
self.b = bhid
# b_prime corresponds to the bias of the visible
self.b_prime = bvis
# tied weights, therefore W_prime is W transpose
self.W_prime = self.W.T
# if no input is given, generate a variable representing the input
if input is None:
# we use a matrix because we expect a minibatch of several
# examples, each example being a row
self.x = T.dmatrix(name='input')
else:
self.x = input
self.params = [self.W, self.b, self.b_prime]
def get_hidden_values(self, input):
""" Computes the values of the hidden layer """
return T.nnet.sigmoid(T.dot(input, self.W) + self.b)
def get_jacobian(self, hidden, W):
"""Computes the jacobian of the hidden layer with respect to
the input, reshapes are necessary for broadcasting the
element-wise product on the right axis
"""
return T.reshape(hidden * (1 - hidden),
(self.n_batchsize, 1, self.n_hidden)) * T.reshape(
W, (1, self.n_visible, self.n_hidden))
def get_reconstructed_input(self, hidden):
"""Computes the reconstructed input given the values of the
hidden layer
"""
return T.nnet.sigmoid(T.dot(hidden, self.W_prime) + self.b_prime)
def get_cost_updates(self, contraction_level, learning_rate):
""" This function computes the cost and the updates for one trainng
step of the cA """
y = self.get_hidden_values(self.x)
z = self.get_reconstructed_input(y)
J = self.get_jacobian(y, self.W)
# note : we sum over the size of a datapoint; if we are using
# minibatches, L will be a vector, with one entry per
# example in minibatch
self.L_rec = - T.sum(self.x * T.log(z) +
(1 - self.x) * T.log(1 - z),
axis=1)
# Compute the jacobian and average over the number of samples/minibatch
self.L_jacob = T.sum(J ** 2) // self.n_batchsize
# note : L is now a vector, where each element is the
# cross-entropy cost of the reconstruction of the
# corresponding example of the minibatch. We need to
# compute the average of all these to get the cost of
# the minibatch
cost = T.mean(self.L_rec) + contraction_level * T.mean(self.L_jacob)
# compute the gradients of the cost of the `cA` with respect
# to its parameters
gparams = T.grad(cost, self.params)
# generate the list of updates
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, param - learning_rate * gparam))
return (cost, updates) | gpl-3.0 | 4,133,111,761,884,029,000 | 46.757732 | 87 | 0.611939 | false | 4.197553 | false | false | false |
googleapis/googleapis-gen | google/cloud/asset/v1p7beta1/asset-v1p7beta1-py/google/cloud/asset_v1p7beta1/types/asset_service.py | 1 | 14878 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.cloud.asset.v1p7beta1',
manifest={
'ContentType',
'ExportAssetsRequest',
'ExportAssetsResponse',
'OutputConfig',
'OutputResult',
'GcsOutputResult',
'GcsDestination',
'BigQueryDestination',
'PartitionSpec',
},
)
class ContentType(proto.Enum):
r"""Asset content type."""
CONTENT_TYPE_UNSPECIFIED = 0
RESOURCE = 1
IAM_POLICY = 2
ORG_POLICY = 4
ACCESS_POLICY = 5
RELATIONSHIP = 7
class ExportAssetsRequest(proto.Message):
r"""Export asset request.
Attributes:
parent (str):
Required. The relative name of the root
asset. This can only be an organization number
(such as "organizations/123"), a project ID
(such as "projects/my-project-id"), or a project
number (such as "projects/12345"), or a folder
number (such as "folders/123").
read_time (google.protobuf.timestamp_pb2.Timestamp):
Timestamp to take an asset snapshot. This can
only be set to a timestamp between the current
time and the current time minus 35 days
(inclusive). If not specified, the current time
will be used. Due to delays in resource data
collection and indexing, there is a volatile
window during which running the same query may
get different results.
asset_types (Sequence[str]):
A list of asset types to take a snapshot for. For example:
"compute.googleapis.com/Disk".
Regular expressions are also supported. For example:
- "compute.googleapis.com.*" snapshots resources whose
asset type starts with "compute.googleapis.com".
- ".*Instance" snapshots resources whose asset type ends
with "Instance".
- ".*Instance.*" snapshots resources whose asset type
contains "Instance".
See `RE2 <https://github.com/google/re2/wiki/Syntax>`__ for
all supported regular expression syntax. If the regular
expression does not match any supported asset type, an
INVALID_ARGUMENT error will be returned.
If specified, only matching assets will be returned,
otherwise, it will snapshot all asset types. See
`Introduction to Cloud Asset
Inventory <https://cloud.google.com/asset-inventory/docs/overview>`__
for all supported asset types.
content_type (google.cloud.asset_v1p7beta1.types.ContentType):
Asset content type. If not specified, no
content but the asset name will be returned.
output_config (google.cloud.asset_v1p7beta1.types.OutputConfig):
Required. Output configuration indicating
where the results will be output to.
relationship_types (Sequence[str]):
A list of relationship types to export, for example:
``INSTANCE_TO_INSTANCEGROUP``. This field should only be
specified if content_type=RELATIONSHIP. If specified, it
will snapshot [asset_types]' specified relationships, or
give errors if any relationship_types' supported types are
not in [asset_types]. If not specified, it will snapshot all
[asset_types]' supported relationships. An unspecified
[asset_types] field means all supported asset_types. See
`Introduction to Cloud Asset
Inventory <https://cloud.google.com/asset-inventory/docs/overview>`__
for all supported asset types and relationship types.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
read_time = proto.Field(
proto.MESSAGE,
number=2,
message=timestamp_pb2.Timestamp,
)
asset_types = proto.RepeatedField(
proto.STRING,
number=3,
)
content_type = proto.Field(
proto.ENUM,
number=4,
enum='ContentType',
)
output_config = proto.Field(
proto.MESSAGE,
number=5,
message='OutputConfig',
)
relationship_types = proto.RepeatedField(
proto.STRING,
number=6,
)
class ExportAssetsResponse(proto.Message):
r"""The export asset response. This message is returned by the
[google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation]
method in the returned
[google.longrunning.Operation.response][google.longrunning.Operation.response]
field.
Attributes:
read_time (google.protobuf.timestamp_pb2.Timestamp):
Time the snapshot was taken.
output_config (google.cloud.asset_v1p7beta1.types.OutputConfig):
Output configuration indicating where the
results were output to.
output_result (google.cloud.asset_v1p7beta1.types.OutputResult):
Output result indicating where the assets were exported to.
For example, a set of actual Google Cloud Storage object
uris where the assets are exported to. The uris can be
different from what [output_config] has specified, as the
service will split the output object into multiple ones once
it exceeds a single Google Cloud Storage object limit.
"""
read_time = proto.Field(
proto.MESSAGE,
number=1,
message=timestamp_pb2.Timestamp,
)
output_config = proto.Field(
proto.MESSAGE,
number=2,
message='OutputConfig',
)
output_result = proto.Field(
proto.MESSAGE,
number=3,
message='OutputResult',
)
class OutputConfig(proto.Message):
r"""Output configuration for export assets destination.
Attributes:
gcs_destination (google.cloud.asset_v1p7beta1.types.GcsDestination):
Destination on Cloud Storage.
bigquery_destination (google.cloud.asset_v1p7beta1.types.BigQueryDestination):
Destination on BigQuery. The output table
stores the fields in asset proto as columns in
BigQuery.
"""
gcs_destination = proto.Field(
proto.MESSAGE,
number=1,
oneof='destination',
message='GcsDestination',
)
bigquery_destination = proto.Field(
proto.MESSAGE,
number=2,
oneof='destination',
message='BigQueryDestination',
)
class OutputResult(proto.Message):
r"""Output result of export assets.
Attributes:
gcs_result (google.cloud.asset_v1p7beta1.types.GcsOutputResult):
Export result on Cloud Storage.
"""
gcs_result = proto.Field(
proto.MESSAGE,
number=1,
oneof='result',
message='GcsOutputResult',
)
class GcsOutputResult(proto.Message):
r"""A Cloud Storage output result.
Attributes:
uris (Sequence[str]):
List of uris of the Cloud Storage objects. Example:
"gs://bucket_name/object_name".
"""
uris = proto.RepeatedField(
proto.STRING,
number=1,
)
class GcsDestination(proto.Message):
r"""A Cloud Storage location.
Attributes:
uri (str):
The uri of the Cloud Storage object. It's the same uri that
is used by gsutil. Example: "gs://bucket_name/object_name".
See `Viewing and Editing Object
Metadata <https://cloud.google.com/storage/docs/viewing-editing-metadata>`__
for more information.
uri_prefix (str):
The uri prefix of all generated Cloud Storage objects.
Example: "gs://bucket_name/object_name_prefix". Each object
uri is in format:
"gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER}
and only contains assets for that type. starts from 0.
Example:
"gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0"
is the first shard of output objects containing all
compute.googleapis.com/Disk assets. An INVALID_ARGUMENT
error will be returned if file with the same name
"gs://bucket_name/object_name_prefix" already exists.
"""
uri = proto.Field(
proto.STRING,
number=1,
oneof='object_uri',
)
uri_prefix = proto.Field(
proto.STRING,
number=2,
oneof='object_uri',
)
class BigQueryDestination(proto.Message):
r"""A BigQuery destination for exporting assets to.
Attributes:
dataset (str):
Required. The BigQuery dataset in format
"projects/projectId/datasets/datasetId", to which the
snapshot result should be exported. If this dataset does not
exist, the export call returns an INVALID_ARGUMENT error.
table (str):
Required. The BigQuery table to which the
snapshot result should be written. If this table
does not exist, a new table with the given name
will be created.
force (bool):
If the destination table already exists and this flag is
``TRUE``, the table will be overwritten by the contents of
assets snapshot. If the flag is ``FALSE`` or unset and the
destination table already exists, the export call returns an
INVALID_ARGUMEMT error.
partition_spec (google.cloud.asset_v1p7beta1.types.PartitionSpec):
[partition_spec] determines whether to export to partitioned
table(s) and how to partition the data.
If [partition_spec] is unset or
[partition_spec.partition_key] is unset or
``PARTITION_KEY_UNSPECIFIED``, the snapshot results will be
exported to non-partitioned table(s). [force] will decide
whether to overwrite existing table(s).
If [partition_spec] is specified. First, the snapshot
results will be written to partitioned table(s) with two
additional timestamp columns, readTime and requestTime, one
of which will be the partition key. Secondly, in the case
when any destination table already exists, it will first try
to update existing table's schema as necessary by appending
additional columns. Then, if [force] is ``TRUE``, the
corresponding partition will be overwritten by the snapshot
results (data in different partitions will remain intact);
if [force] is unset or ``FALSE``, it will append the data.
An error will be returned if the schema update or data
appension fails.
separate_tables_per_asset_type (bool):
If this flag is ``TRUE``, the snapshot results will be
written to one or multiple tables, each of which contains
results of one asset type. The [force] and [partition_spec]
fields will apply to each of them.
Field [table] will be concatenated with "*" and the asset
type names (see
https://cloud.google.com/asset-inventory/docs/supported-asset-types
for supported asset types) to construct per-asset-type table
names, in which all non-alphanumeric characters like "." and
"/" will be substituted by "*". Example: if field [table] is
"mytable" and snapshot results contain
"storage.googleapis.com/Bucket" assets, the corresponding
table name will be "mytable_storage_googleapis_com_Bucket".
If any of these tables does not exist, a new table with the
concatenated name will be created.
When [content_type] in the ExportAssetsRequest is
``RESOURCE``, the schema of each table will include
RECORD-type columns mapped to the nested fields in the
Asset.resource.data field of that asset type (up to the 15
nested level BigQuery supports
(https://cloud.google.com/bigquery/docs/nested-repeated#limitations)).
The fields in >15 nested levels will be stored in JSON
format string as a child column of its parent RECORD column.
If error occurs when exporting to any table, the whole
export call will return an error but the export results that
already succeed will persist. Example: if exporting to
table_type_A succeeds when exporting to table_type_B fails
during one export call, the results in table_type_A will
persist and there will not be partial results persisting in
a table.
"""
dataset = proto.Field(
proto.STRING,
number=1,
)
table = proto.Field(
proto.STRING,
number=2,
)
force = proto.Field(
proto.BOOL,
number=3,
)
partition_spec = proto.Field(
proto.MESSAGE,
number=4,
message='PartitionSpec',
)
separate_tables_per_asset_type = proto.Field(
proto.BOOL,
number=5,
)
class PartitionSpec(proto.Message):
r"""Specifications of BigQuery partitioned table as export
destination.
Attributes:
partition_key (google.cloud.asset_v1p7beta1.types.PartitionSpec.PartitionKey):
The partition key for BigQuery partitioned
table.
"""
class PartitionKey(proto.Enum):
r"""This enum is used to determine the partition key column when
exporting assets to BigQuery partitioned table(s). Note that, if the
partition key is a timestamp column, the actual partition is based
on its date value (expressed in UTC. see details in
https://cloud.google.com/bigquery/docs/partitioned-tables#date_timestamp_partitioned_tables).
"""
PARTITION_KEY_UNSPECIFIED = 0
READ_TIME = 1
REQUEST_TIME = 2
partition_key = proto.Field(
proto.ENUM,
number=1,
enum=PartitionKey,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 3,185,242,482,510,889,000 | 36.857506 | 101 | 0.631268 | false | 4.547066 | true | false | false |
BROADSoftware/hadeploy | lib/hadeploy/plugins/yarn/code.py | 1 | 6452 | # Copyright (C) 2018 BROADSoftware
#
# This file is part of HADeploy
#
# HADeploy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HADeploy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HADeploy. If not, see <http://www.gnu.org/licenses/>.
import logging
import hadeploy.core.misc as misc
import os
from hadeploy.core.plugin import Plugin
from hadeploy.core.const import SRC,DEFAULT_TOOLS_FOLDER,ACTION_STATUS,ACTION_START,ACTION_STOP,ACTION_DEPLOY,ACTION_REMOVE,SCOPE_YARN,DATA
logger = logging.getLogger("hadeploy.plugins.kafka")
YARN_RELAY="yarn_relay"
YARN_SERVICES="yarn_services"
# Our private space in data model
YARN="yarn"
ALL_SERVICES="allServices"
SERVICES_TO_KILL="servicesToKill"
class YarnPlugin(Plugin):
def __init__(self, name, path, context):
Plugin.__init__(self, name, path, context)
def onNewSnippet(self, snippetPath):
model = self.context.model
if YARN_RELAY in model[SRC]:
if LOCAL_KEYTAB_PATH in model[SRC][YARN_RELAY]:
model[SRC][YARN_RELAY][LOCAL_KEYTAB_PATH] = misc.snippetRelocate(snippetPath, model[SRC][YARN_RELAY][LOCAL_KEYTAB_PATH])
def getGroomingPriority(self):
return 2520
def getSupportedScopes(self):
return [SCOPE_YARN]
def getSupportedActions(self):
if self.context.toExclude(SCOPE_YARN):
return []
else:
# NB: We need to add ACTION_DEPLOY, as we need role 'yarn_modules' to be added in the playbook of deployment, for files notifications
return [ACTION_START, ACTION_STOP, ACTION_STATUS, ACTION_DEPLOY, ACTION_REMOVE]
def getPriority(self, action):
if action == ACTION_START:
return 6400
elif action == ACTION_STOP:
return 3600
elif action == ACTION_STATUS:
return 5000
elif action == ACTION_DEPLOY:
return 7050
elif action == ACTION_REMOVE:
return 1550
else:
misc.ERROR("Plugin 'yarn' called with invalid action: '{0}'".format(action))
def onGrooming(self):
if self.context.toExclude(SCOPE_YARN):
return
misc.applyWhenOnSingle(self.context.model[SRC], YARN_RELAY)
misc.applyWhenOnList(self.context.model[SRC], YARN_SERVICES)
misc.ensureObjectInMaps(self.context.model[DATA], [YARN], {})
groomYarnRelay(self.context.model)
groomYarnServices(self.context.model)
HOST="host"
DEFAULT_TIMEOUT_SECS="default_timeout_secs"
PRINCIPAL="principal"
LOCAL_KEYTAB_PATH="local_keytab_path"
RELAY_KEYTAB_PATH="relay_keytab_path"
TOOLS_FOLDER="tools_folder"
KDEBUG="kdebug"
_RELAY_KEYTAB_FOLDER_="_relayKeytabFolder_"
_KERBEROS_="_kerberos_"
def groomYarnRelay(model):
if YARN_RELAY in model[SRC]:
if not YARN_SERVICES in model[SRC] or len(model[SRC][YARN_SERVICES]) == 0:
del(model[SRC][YARN_RELAY])
else:
misc.setDefaultInMap(model[SRC][YARN_RELAY], DEFAULT_TIMEOUT_SECS, 90)
misc.setDefaultInMap(model[SRC][YARN_RELAY], TOOLS_FOLDER, DEFAULT_TOOLS_FOLDER)
if PRINCIPAL in model[SRC][YARN_RELAY]:
if LOCAL_KEYTAB_PATH not in model[SRC][YARN_RELAY] and RELAY_KEYTAB_PATH not in model[SRC][YARN_RELAY]:
misc.ERROR("yarn_relay: Please provide a 'local_keytab_path' and/or a 'relay_keytab_path' if you want to use a Kerberos 'principal'")
model[SRC][YARN_RELAY][_KERBEROS_] = True
if LOCAL_KEYTAB_PATH in model[SRC][YARN_RELAY]:
if not os.path.exists(model[SRC][YARN_RELAY][LOCAL_KEYTAB_PATH]):
misc.ERROR("yarn_relay: local_keytab_file '{0}' does not exists!".format(model[SRC][YARN_RELAY][LOCAL_KEYTAB_PATH]))
if RELAY_KEYTAB_PATH not in model[SRC][YARN_RELAY]:
model[SRC][YARN_RELAY][_RELAY_KEYTAB_FOLDER_] = os.path.join(model[SRC][YARN_RELAY][TOOLS_FOLDER], "keytabs")
model[SRC][YARN_RELAY][RELAY_KEYTAB_PATH] = os.path.join( model[SRC][YARN_RELAY][_RELAY_KEYTAB_FOLDER_], os.path.basename(model[SRC][YARN_RELAY][LOCAL_KEYTAB_PATH]))
misc.setDefaultInMap(model[SRC][YARN_RELAY], KDEBUG, False)
else:
if LOCAL_KEYTAB_PATH in model[SRC][YARN_RELAY] or RELAY_KEYTAB_PATH in model[SRC][YARN_RELAY]:
misc.ERROR("yarn_relay: Please, provide a 'principal' if you need to use a keytab")
model[SRC][YARN_RELAY][_KERBEROS_] = False
LAUNCHING_CMD="launching_cmd"
KILLING_CMD="killing_cmd"
LAUNCHING_DIR="launching_dir"
TIMEOUT_SECS="timeout_secs"
NAME="name"
def groomYarnServices(model):
if YARN_SERVICES in model[SRC] and len(model[SRC][YARN_SERVICES]) > 0 :
if not YARN_RELAY in model[SRC]:
misc.ERROR("A yarn_relay must be defined if at least one yarn_services is defined")
for service in model[SRC][YARN_SERVICES]:
misc.setDefaultInMap(service, LAUNCHING_DIR, "~")
misc.setDefaultInMap(service, TIMEOUT_SECS, model[SRC][YARN_RELAY][DEFAULT_TIMEOUT_SECS])
if LAUNCHING_DIR in service:
if not os.path.isabs(service[LAUNCHING_DIR]) and not service[LAUNCHING_DIR].startswith("~"):
misc.ERROR("yarn_services '{}': launching_dir must be an absolute path".format(service[NAME]))
if ALL_SERVICES in model[DATA][YARN]:
model[DATA][YARN][ALL_SERVICES] = model[DATA][YARN][ALL_SERVICES] + "," + service[NAME]
else:
model[DATA][YARN][ALL_SERVICES] = service[NAME]
if not KILLING_CMD in service:
if SERVICES_TO_KILL in model[DATA][YARN]:
model[DATA][YARN][SERVICES_TO_KILL] = model[DATA][YARN][SERVICES_TO_KILL] + "," + service[NAME]
else:
model[DATA][YARN][SERVICES_TO_KILL] = service[NAME]
| gpl-3.0 | -8,574,510,820,610,604,000 | 42.013333 | 185 | 0.643211 | false | 3.43009 | false | false | false |
KarolBedkowski/photocat | photocat/lib/wxtools/dialogs/message_boxes.py | 1 | 6488 | #!/usr/bin/python2.4
# -*- coding: utf-8 -*-
"""
message_boxes
KPyLibs
Copyright (c) Karol Będkowski, 2004, 2005, 2006
This file is part of KPyLibs
"""
__author__ = 'Karol Będkowski'
__copyright__ = 'Copyright (C) Karol Będkowski 2006'
__revision__ = '$Id$'
__all__ = ['message_box_error', 'message_box_info',
'message_box_question_yesno', 'message_box_warning_yesno',
'message_box_warning_yesnocancel', 'message_box_not_save_confirm',
'message_box_error_ex', 'message_box_info_ex', 'message_box_delete_confirm',
'message_box_question']
import wx
class MyMessageDialog(wx.Dialog):
"""docstring for MyMessageDialog"""
def __init__(self, parent, primary_text, secondary_text, buttons=None,
icon=None):
wx.Dialog.__init__(self, parent, -1, '')
sizer = wx.BoxSizer(wx.VERTICAL)
sizer_inner = wx.BoxSizer(wx.HORIZONTAL)
if icon:
bmp = wx.ArtProvider.GetBitmap(icon, wx.ART_MESSAGE_BOX)
sizer_inner.Add(wx.StaticBitmap(self, -1, bmp), 0, wx.EXPAND)
sizer_inner.Add((12, 12))
sizer_text = wx.BoxSizer(wx.VERTICAL)
if primary_text and secondary_text:
fstyle = wx.SystemSettings_GetFont(wx.SYS_DEFAULT_GUI_FONT)
fstyle.SetWeight(wx.FONTWEIGHT_BOLD)
fstyle.SetPointSize(fstyle.GetPointSize() + 2)
ptext = wx.StaticText(self, -1, primary_text)
ptext.SetFont(fstyle)
sizer_text.Add(ptext, 0, wx.EXPAND)
sizer_text.Add((12, 12))
elif not secondary_text:
ptext = wx.StaticText(self, -1, primary_text)
sizer_text.Add(ptext, 0, wx.EXPAND)
if secondary_text:
ptext = wx.StaticText(self, -1, secondary_text)
sizer_text.Add(ptext, 0, wx.EXPAND)
sizer_inner.Add(sizer_text, 0)
sizer.Add(sizer_inner, 0, wx.EXPAND | wx.ALL, 12)
buttons_grid = self._create_buttons(buttons)
sizer.Add(buttons_grid, 0, wx.EXPAND | wx.ALL, 12)
self.SetSizerAndFit(sizer)
self.Bind(wx.EVT_BUTTON, self._on_btn_no, id=wx.ID_NO)
self.Bind(wx.EVT_BUTTON, self._on_btn_yes, id=wx.ID_YES)
def _on_btn_no(self, evt):
self.EndModal(wx.ID_NO)
def _on_btn_yes(self, evt):
self.EndModal(wx.ID_YES)
def _create_buttons(self, buttons):
return self.CreateStdDialogButtonSizer(buttons or wx.ID_OK)
class DialogConfirmSave(MyMessageDialog):
"""docstring for DialogConfirmSave"""
def __init__(self, parent, doc_name, time_period=None, saveas=False):
primary_text = _("Save the changes to\n%(doc_name)s before closing?") \
% dict(doc_name=doc_name)
if time_period is None:
secondary_text = _('If you close without saving, changes will be discarded')
else:
secondary_text = _('If you close without saving, changes from the last\n'
'%(time)s will be discarded''') % dict(time=time_period)
self.saveas = saveas
MyMessageDialog.__init__(self, parent, primary_text, secondary_text, None,
wx.ART_WARNING)
def _create_buttons(self, buttons):
grid = wx.StdDialogButtonSizer()
btn = wx.Button(self, wx.ID_NO, _('Close &without Saving'))
grid.AddButton(btn)
btn = wx.Button(self, wx.ID_CANCEL)
grid.AddButton(btn)
btn_save_text = _('Save &As') if self.saveas else _('Save')
btn = wx.Button(self, wx.ID_YES, btn_save_text)
btn.SetDefault()
grid.AddButton(btn)
grid.Realize()
return grid
class DialogConfirmDelete(MyMessageDialog):
"""docstring for DialogConfirmSave"""
def __init__(self, parent, name):
primary_text = _("Delete %s?") % name
secondary_text = _('After removal, it cannot be recovered.')
MyMessageDialog.__init__(self, parent, primary_text, secondary_text, None,
wx.ART_QUESTION)
def _create_buttons(self, buttons):
grid = wx.StdDialogButtonSizer()
btn = wx.Button(self, wx.ID_CANCEL)
grid.AddButton(btn)
btn = wx.Button(self, wx.ID_YES, _("Delete"))
btn.SetDefault()
grid.AddButton(btn)
grid.Realize()
return grid
class DialogQuestion(MyMessageDialog):
"""docstring for DialogConfirmSave"""
def __init__(self, parent, primary_text, secondary_text, affirmative_button,
cancel_button):
self.affirmative_button = affirmative_button
self.cancel_button = cancel_button
MyMessageDialog.__init__(self, parent, primary_text, secondary_text, None,
wx.ART_QUESTION)
def _create_buttons(self, buttons):
grid = wx.StdDialogButtonSizer()
btn = wx.Button(self, wx.ID_CANCEL, self.cancel_button)
grid.AddButton(btn)
btn = wx.Button(self, wx.ID_YES, self.affirmative_button)
btn.SetDefault()
grid.AddButton(btn)
grid.Realize()
return grid
def message_box_error(parent, msg, title=''):
dlg = wx.MessageDialog(parent, str(msg), title,
wx.OK | wx.CENTRE | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy()
def message_box_error_ex(parent, header, message):
dlg = MyMessageDialog(parent, header, message, wx.OK, wx.ART_ERROR)
dlg.ShowModal()
dlg.Destroy()
def message_box_info(parent, msg, title=''):
dlg = wx.MessageDialog(parent, str(msg), title,
wx.OK | wx.CENTRE | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
def message_box_info_ex(parent, header, message):
dlg = MyMessageDialog(parent, header, message, wx.OK, wx.ART_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
def message_box_question_yesno(parent, msg, title=''):
dlg = wx.MessageDialog(parent, msg, title,
wx.YES_NO | wx.NO_DEFAULT | wx.CENTRE | wx.ICON_QUESTION)
res = dlg.ShowModal()
dlg.Destroy()
return res == wx.ID_YES
def message_box_warning_yesno(parent, msg, title=''):
dlg = wx.MessageDialog(parent, msg, title,
wx.YES_NO | wx.NO_DEFAULT | wx.CENTRE | wx.ICON_WARNING)
res = dlg.ShowModal()
dlg.Destroy()
return res == wx.ID_YES
def message_box_warning_yesnocancel(parent, msg, title=''):
dlg = wx.MessageDialog(parent, msg, title,
wx.YES_NO | wx.CANCEL | wx.YES_DEFAULT | wx.CENTRE | wx.ICON_WARNING)
res = dlg.ShowModal()
dlg.Destroy()
return res
def message_box_not_save_confirm(parent, doc_name, time_period=None,
saveas=False):
dlg = DialogConfirmSave(parent, doc_name, time_period, saveas)
res = dlg.ShowModal()
dlg.Destroy()
return res
def message_box_delete_confirm(parent, name):
dlg = DialogConfirmDelete(parent, name)
res = dlg.ShowModal()
dlg.Destroy()
return res == wx.ID_YES
def message_box_question(parent, primary_text, secondary_text,
affirmative_button=None, cancel_button=None):
affirmative_button = affirmative_button or _('Ok')
cancel_button = cancel_button or _("Cancel")
dlg = DialogQuestion(parent, primary_text, secondary_text, affirmative_button,
cancel_button)
res = dlg.ShowModal()
dlg.Destroy()
return res == wx.ID_YES
# vim: encoding=utf8:
| gpl-2.0 | 8,468,368,418,460,517,000 | 28.080717 | 79 | 0.698843 | false | 2.815892 | false | false | false |
amboycharlie/Child-Friendly-LCMS | leonardo/module/web/page/dimension/forms.py | 1 | 1262 |
import copy
import floppyforms as forms
from crispy_forms.bootstrap import Tab, TabHolder
from crispy_forms.layout import Field, HTML, Layout
from django.utils.translation import ugettext_lazy as _
from horizon.utils.memoized import memoized
from horizon_contrib.forms import SelfHandlingModelForm
from leonardo.module.web.models import PageDimension
class Slider(forms.RangeInput):
min = 0
max = 12
step = 1
class PageDimensionForm(SelfHandlingModelForm):
col1_width = forms.CharField(widget=Slider(), initial=4)
col2_width = forms.CharField(widget=Slider(), initial=4)
col3_width = forms.CharField(widget=Slider(), initial=4)
def _check_dimensions(self):
col1_width = self.cleaned_data['col1_width']
col2_width = self.cleaned_data['col2_width']
col3_width = self.cleaned_data['col3_width']
if not (int(col1_width) + int(col2_width) + int(col3_width)) <= 12:
raise forms.ValidationError(_("Summary of dimensions must be 12"))
def clean(self):
cleaned = super(PageDimensionForm, self).clean()
self._check_dimensions()
return cleaned
class Meta:
model = PageDimension
exclude = tuple()
widgets = {'page': forms.HiddenInput}
| apache-2.0 | -8,510,855,863,465,506,000 | 29.780488 | 78 | 0.692552 | false | 3.755952 | false | false | false |
nextstrain/augur | setup.py | 1 | 3412 | from pathlib import Path
import setuptools
import sys
min_version = (3, 6)
if sys.version_info < min_version:
error = """
Beginning with augur 7.0.0, Python {0} or above is required.
This may be due to an out of date pip.
Make sure you have pip >= 9.0.1.
""".format('.'.join(str(n) for n in min_version)),
sys.exit(error)
base_dir = Path(__file__).parent.resolve()
version_file = base_dir / "augur/__version__.py"
readme_file = base_dir / "README.md"
# Eval the version file to get __version__; avoids importing our own package
with version_file.open() as f:
exec(f.read())
# Get the long description from the README file
with readme_file.open(encoding = "utf-8") as f:
long_description = f.read()
setuptools.setup(
name = "nextstrain-augur",
version = __version__,
author = "Nextstrain developers",
author_email = "[email protected], [email protected]",
description = "A bioinformatics toolkit for phylogenetic analysis",
long_description = long_description,
long_description_content_type = "text/markdown",
keywords = "nextstrain, molecular epidemiology",
url = "https://github.com/nextstrain/augur",
project_urls = {
"Bug Reports": "https://github.com/nextstrain/augur/issues",
"Change Log": "https://github.com/nextstrain/augur/blob/master/CHANGES.md#next",
"Source": "https://github.com/nextstrain/augur",
},
packages = setuptools.find_packages(),
package_data = {'augur': ['data/*']},
python_requires = '>={}'.format('.'.join(str(n) for n in min_version)),
install_requires = [
"bcbio-gff >=0.6.0, ==0.6.*",
"biopython >=1.67, <=1.76",
"jsonschema >=3.0.0, ==3.*",
"packaging >=19.2",
"pandas >=1.0.0, ==1.*",
"phylo-treetime ==0.8.*",
"xopen >=1.0.1, ==1.*"
],
extras_require = {
'full': [
"cvxopt >=1.1.9, ==1.*",
"matplotlib >=2.0, ==2.*",
"seaborn >=0.9.0, ==0.9.*"
],
'dev': [
"cram >=0.7, ==0.*",
"deepdiff >=4.3.2, ==4.3.*",
"freezegun >=0.3.15, ==0.3.*",
"nextstrain-sphinx-theme >=2020.3",
"pylint >=1.7.6, ==1.7.*",
"pytest >=5.4.1, ==5.4.*",
"pytest-cov >=2.8.1, ==2.8.*",
"pytest-mock >= 2.0.0, ==2.0.*",
"recommonmark >=0.5.0, ==0.*",
"snakemake >=5.4.0, <5.27",
"Sphinx >=2.0.1, ==2.*",
"sphinx-argparse >=0.2.5, ==0.*",
"sphinx-markdown-tables >= 0.0.9",
"sphinx-rtd-theme >=0.4.3, ==0.*",
"wheel >=0.32.3, ==0.32.*",
"ipdb >=0.10.1, ==0.*"
]
},
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"License :: OSI Approved :: GNU Affero General Public License v3",
# Python 3 only
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
# Install an "augur" program which calls augur.__main__.main()
# https://setuptools.readthedocs.io/en/latest/setuptools.html#automatic-script-creation
entry_points = {
"console_scripts": [
"augur = augur.__main__:main",
]
}
)
| agpl-3.0 | -5,344,811,685,061,659,000 | 32.782178 | 93 | 0.540152 | false | 3.127406 | false | false | false |
helenjin/scanalysis | src/scanalysis/sca_gui.py | 1 | 7517 | #!/usr/local/bin/python3
import tkinter as tk
from tkinter import filedialog, ttk
class sca_gui(tk.Tk):
def __init__(self, parent):
tk.Tk.__init__(self, parent)
self.parent = parent
self.initialize()
def initialize(self):
self.grid()
self.vals = None
self.currentPlot = None
self.data = {}
#set up menu bar
self.menubar = tk.Menu(self)
self.fileMenu = tk.Menu(self.menubar, tearoff=0)
self.menubar.add_cascade(label="File", menu=self.fileMenu)
self.fileMenu.add_command(label="Load csv file", command=self.loadCSV)
# self.fileMenu.add_command(label="Load sparse data file", command=self.loadMTX)
# self.fileMenu.add_command(label="Load 10x file", command=self.load10x)
# self.fileMenu.add_command(label="Load saved session from pickle file", command=self.loadPickle)
# self.fileMenu.add_command(label="Save data", state='disabled', command=self.saveData)
# self.fileMenu.add_command(label="Exit", command=self.quitMAGIC)
# self.analysisMenu = tk.Menu(self.menubar, tearoff=0)
# self.menubar.add_cascade(label="Analysis", menu=self.analysisMenu)
# self.analysisMenu.add_command(label="Principal component analysis", state='disabled', command=self.runPCA)
# self.analysisMenu.add_command(label="tSNE", state='disabled', command=self.runTSNE)
# self.analysisMenu.add_command(label="Diffusion map", state='disabled', command=self.runDM)
# self.analysisMenu.add_command(label="MAGIC", state='disabled', command=self.runMagic)
# self.visMenu = tk.Menu(self.menubar, tearoff=0)
# self.menubar.add_cascade(label="Visualization", menu=self.visMenu)
# self.visMenu.add_command(label="Scatter plot", state='disabled', command=self.scatterPlot)
# self.visMenu.add_command(label="PCA-variance plot", state='disabled', command=self.plotPCAVariance)
self.config(menu=self.menubar)
#intro screen
tk.Label(self, text=u"SCAnalysis", font=('Helvetica', 48), fg="black", bg="white", padx=100, pady=20).grid(row=0)
tk.Label(self, text=u"Single Cell Analysis", font=('Helvetica', 25), fg="black", bg="white", padx=100, pady=40).grid(row=1)
tk.Label(self, text=u"Includes Wishbone, MAGIC, and Palantir", font=('Helvetica', 20), fg="black", bg="white", padx=100, pady=40).grid(row=2)
tk.Label(self, text=u"To get started, select a data file by clicking File > Load Data", fg="black", bg="white", padx=100, pady=25).grid(row=3)
#update
self.protocol('WM_DELETE_WINDOW', self.quitGUI)
self.grid_columnconfigure(0,weight=1)
self.resizable(True,True)
self.update()
self.geometry(self.geometry())
self.focus_force()
def loadCSV(self):
self.dataFileName = filedialog.askopenfilename(title='Load data file', initialdir='~/.magic/data')
if(self.dataFileName != ""):
#pop up data options menu
self.fileInfo = tk.Toplevel()
self.fileInfo.title("Data options")
tk.Label(self.fileInfo, text=u"File name: ").grid(column=0, row=0)
tk.Label(self.fileInfo, text=self.dataFileName.split('/')[-1]).grid(column=1, row=0)
tk.Label(self.fileInfo,text=u"Name:" ,fg="black",bg="white").grid(column=0, row=1)
self.fileNameEntryVar = tk.StringVar()
self.fileNameEntryVar.set('Data ' + str(len(self.data)))
tk.Entry(self.fileInfo, textvariable=self.fileNameEntryVar).grid(column=1,row=1)
tk.Label(self.fileInfo, text=u"Delimiter:").grid(column=0, row=2)
self.delimiter = tk.StringVar()
self.delimiter.set(',')
tk.Entry(self.fileInfo, textvariable=self.delimiter).grid(column=1, row=2)
tk.Label(self.fileInfo, text=u"Rows:", fg="black",bg="white").grid(column=0, row=3)
self.rowVar = tk.IntVar()
self.rowVar.set(0)
tk.Radiobutton(self.fileInfo, text="Cells", variable=self.rowVar, value=0).grid(column=1, row=3)
tk.Radiobutton(self.fileInfo, text="Genes", variable=self.rowVar, value=1).grid(column=2, row=3)
tk.Label(self.fileInfo, text=u"Number of additional rows/columns to skip after gene/cell names").grid(column=0, row=4, columnspan=3)
tk.Label(self.fileInfo, text=u"Number of rows:").grid(column=0, row=5)
self.rowHeader = tk.IntVar()
self.rowHeader.set(0)
tk.Entry(self.fileInfo, textvariable=self.rowHeader).grid(column=1, row=5)
tk.Label(self.fileInfo, text=u"Number of columns:").grid(column=0, row=6)
self.colHeader = tk.IntVar()
self.colHeader.set(0)
tk.Entry(self.fileInfo, textvariable=self.colHeader).grid(column=1, row=6)
tk.Button(self.fileInfo, text="Compute data statistics", command=partial(self.showRawDataDistributions, file_type='csv')).grid(column=1, row=7)
#filter parameters
self.filterCellMinVar = tk.StringVar()
tk.Label(self.fileInfo,text=u"Filter by molecules per cell. Min:" ,fg="black",bg="white").grid(column=0, row=8)
tk.Entry(self.fileInfo, textvariable=self.filterCellMinVar).grid(column=1,row=8)
self.filterCellMaxVar = tk.StringVar()
tk.Label(self.fileInfo, text=u" Max:" ,fg="black",bg="white").grid(column=2, row=8)
tk.Entry(self.fileInfo, textvariable=self.filterCellMaxVar).grid(column=3,row=8)
self.filterGeneNonzeroVar = tk.StringVar()
tk.Label(self.fileInfo,text=u"Filter by nonzero cells per gene. Min:" ,fg="black",bg="white").grid(column=0, row=9)
tk.Entry(self.fileInfo, textvariable=self.filterGeneNonzeroVar).grid(column=1,row=9)
self.filterGeneMolsVar = tk.StringVar()
tk.Label(self.fileInfo,text=u"Filter by molecules per gene. Min:" ,fg="black",bg="white").grid(column=0, row=10)
tk.Entry(self.fileInfo, textvariable=self.filterGeneMolsVar).grid(column=1,row=10)
#normalize
self.normalizeVar = tk.BooleanVar()
self.normalizeVar.set(True)
tk.Checkbutton(self.fileInfo, text=u"Normalize by library size", variable=self.normalizeVar).grid(column=0, row=11, columnspan=4)
#log transform
self.logTransform = tk.BooleanVar()
self.logTransform.set(False)
tk.Checkbutton(self.fileInfo, text=u"Log-transform data", variable=self.logTransform).grid(column=0, row=12)
self.pseudocount = tk.DoubleVar()
self.pseudocount.set(0.1)
tk.Label(self.fileInfo, text=u"Pseudocount (for log-transform)", fg="black",bg="white").grid(column=1, row=12)
tk.Entry(self.fileInfo, textvariable=self.pseudocount).grid(column=2, row=12)
tk.Button(self.fileInfo, text="Cancel", command=self.fileInfo.destroy).grid(column=1, row=13)
tk.Button(self.fileInfo, text="Load", command=partial(self.processData, file_type='csv')).grid(column=2, row=13)
self.wait_window(self.fileInfo)
def quitGUI(self):
self.quit()
self.destroy()
def launch():
app = sca_gui(None)
app.title('SCAnalysis')
try:
app.mainloop()
except UnicodeDecodeError:
pass
if __name__ == "__main__":
launch()
| gpl-2.0 | 659,605,362,825,649,500 | 49.449664 | 155 | 0.634562 | false | 3.390618 | false | false | false |
andredalton/bcc | 2014/MAC0242/miniep5/miniep5.py | 1 | 4921 | #!/usr/bin/env python3
# Apenas para poder manipular a linha de comando
import sys
# Manipulacao do historico de comandos para facilitar os testes.
import readline
histfile = ".history"
try:
readline.read_history_file(histfile)
except IOError:
pass
import atexit
atexit.register(readline.write_history_file, histfile)
del histfile
# Importando o analizador lexico e sintático
import ply.lex as lex
import ply.yacc as yacc
class Calc:
def __init__(self, transmite_erros=False):
""" Inicializador, pode receber True como segundo argumento permitindo o não tratamento de erros. """
self.transmite_erros = transmite_erros # Caso queira rodar por linha de comando os erros são repassados ao invés de tratados.
self.resposta = None # Resposta
self.names = { } # Nomes de variáveis
lex.lex(module=self)
yacc.yacc(module=self)
def analisar(self, s):
""" Inicializa uma análise """
self.resposta = None
self.nao_vazio = False
yacc.parse(s)
return self.resposta
# Definicao dos tokens
tokens = (
'NOME','NUMERO',
'SOM','SUB', 'MUL','DIV','ATRIB',
)
def t_SOM(self, t):
r'\+'
self.nao_vazio = True
return t
def t_SUB(self, t):
r'-'
self.nao_vazio = True
return t
def t_MUL(self, t):
r'\*'
self.nao_vazio = True
return t
def t_DIV(self, t):
r'/'
self.nao_vazio = True
return t
def t_ATRIB(self, t):
r'='
self.nao_vazio = True
return t
def t_NOME(self, t):
r'[a-zA-Z_][a-zA-Z0-9_]*'
self.nao_vazio = True
try:
self.names[t.value]
except KeyError:
self.names[t.value] = None
return t
def t_NUMERO(self, t):
r'\d+\.?\d*[eE][+\-]?\d+|\d+\.?\d*'
self.nao_vazio = True
try:
t.value = float(t.value)
except ValueError:
if self.transmite_erros: raise
print("Por favor, digite apenas numeros validos:", t.value[0])
t.value = 0
return t
# Ignorando tabulacoes
t_ignore = " \t\v"
def t_error(self, t):
if self.transmite_erros: raise TypeError
print("Caracter incorreto '%s'" % t.value)
t.lexer.skip(1)
# Regras de analise sintatica
precedence = (
('right', 'NOME', 'NUMERO'),
)
def p_expressao(self, p):
'statement : expression'
self.resposta = p[1]
def p_expressao_dupla(self, p):
"""
expression : expression expression SOM
| expression expression SUB
| expression expression MUL
| expression expression DIV
| NOME expression ATRIB
"""
if p[1] is not None and p[2] is not None:
if p[3] == '=': p[0] = self.names[p[1]] = p[2]
elif p[3] == '+': p[0] = p[1] + p[2]
elif p[3] == '-': p[0] = p[1] - p[2]
elif p[3] == '*': p[0] = p[1] * p[2]
elif p[3] == '/':
try:
p[0] = p[1] / p[2]
except ZeroDivisionError:
if self.transmite_erros: raise
print("Divisao por zero!")
def p_expressao_dupla2(self, p):
"""
expression : NOME expression SOM
| NOME expression SUB
| NOME expression MUL
| NOME expression DIV
"""
if p[1] is not None and p[2] is not None:
if p[3] == '+': p[0] = self.names[p[1]] + p[2]
elif p[3] == '-': p[0] = self.names[p[1]] - p[2]
elif p[3] == '*': p[0] = self.names[p[1]] * p[2]
elif p[3] == '/':
try:
p[0] = self.names[p[1]] / p[2]
except ZeroDivisionError:
if self.transmite_erros: raise
print("Divisao por zero!")
def p_nome(self, p):
'expression : NOME'
p[0] = self.names[p[1]]
if self.names[p[1]] is None:
if self.transmite_erros: raise KeyError
print("Variavel '%s' sem atribuicao" % p[1])
def p_numero(self, p):
'expression : NUMERO'
p[0] = p[1]
def p_error(self, p):
if self.nao_vazio:
if self.transmite_erros: raise LookupError
print("Erro de sintaxe!", p)
if __name__ == '__main__':
s = ' '.join(sys.argv[1:])
f = len(s) != 0
calc = Calc(f)
while 1:
r = calc.analisar(s)
if r is not None:
print("%.4g" % r)
pass
if f: break
try:
s = input('calc > ')
except (EOFError, KeyboardInterrupt):
print()
break | apache-2.0 | 343,323,799,782,712,000 | 27.091429 | 134 | 0.49176 | false | 3.257124 | false | false | false |
freddyb/nodeKnock | nodeKnock-Client.py | 1 | 1119 | #!/usr/bin/env python
"""****************************************************************************
* nodeKnock-Client 0.2 by freddyb
*
* this is one of many possible nodeKnock clients, and is meant to be as a
* proof of concept.
*
****************************************************************************"""
from time import time
from hashlib import sha1
from commands import getoutput
from sys import exit
try:
execfile('nodeKnock.cfg')
except:
print "Error: File nodeKnock.cfg does not exist!"
exit(1)
# prepare command
cmd1 = "ping -c 1 -p "
cmd2 = " "+ config['host']
# build pattern
header = "4e444b" # i.e. "NDK"
t = int(time()) # timestamp-integer
p_timestamp = hex(t)[2:] # hex, because ping demands this.
#sha1(client_ip + secret + timestamp')
p_hash = sha1( config['own_ip'] + config['secret'] + str(t)).hexdigest()
# build string for execution
pattern = header + p_timestamp + p_hash
execStr = cmd1 + pattern + cmd2
# ping -c1 -p^ ^bytes ^host
print "Executing", repr(execStr)
print getoutput(execStr)
#DEBUG: print [int(pattern[2*i:2*i+2],16) for i in xrange(len(pattern)/2)]
| gpl-2.0 | -5,145,565,155,360,544,000 | 26.975 | 79 | 0.585344 | false | 3.262391 | false | false | false |
downpat/django-cms-example | pkg/cms_example/settings.py | 1 | 4859 | """
Django settings for cms_example project.
Generated by 'django-admin startproject' using Django 1.9.13.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@(l@&t1_9u@5)2^0%_8jdg7yk00v^hqzj(%o$3yh9$$)xw+3a('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
#Site ID for Django CMS
SITE_ID = 1
# Application definition
INSTALLED_APPS = [
'djangocms_admin_style',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'cms',
'menus',
'treebeard',
'sekizai',
'filer',
'easy_thumbnails',
'mptt',
'djangocms_text_ckeditor',
'djangocms_link',
'djangocms_file',
'djangocms_picture',
'djangocms_video',
'djangocms_googlemap',
'djangocms_snippet',
'djangocms_style',
'djangocms_column',
'example_plugins',
]
MIDDLEWARE_CLASSES = [
'cms.middleware.utils.ApphookReloadMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.locale.LocaleMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
'cms.middleware.language.LanguageCookieMiddleware',
]
ROOT_URLCONF = 'cms_example.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'sekizai.context_processors.sekizai',
'cms.context_processors.cms_settings',
],
},
},
]
WSGI_APPLICATION = 'cms_example.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en'
LANGUAGES = [
('en', 'English'),
('de', 'German')
]
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
#Media is about uploads from users. In production, I'd normally use S3 for this
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
CMS_TEMPLATES = [
('home.html', 'Home page template'),
]
#Easy Thumbnails Settings
THUMBNAIL_HIGH_RESOLUTION = True
THUMBNAIL_PROCESSORS = (
'easy_thumbnails.processors.colorspace',
'easy_thumbnails.processors.autocrop',
'filer.thumbnail_processors.scale_and_crop_with_subject_location',
'easy_thumbnails.processors.filters'
)
#Tableau Settings
TABLEAU_USERNAME = '[email protected]'
TABLEAU_PASSWORD = 'q9zOoyulCISLYAXSxtIwH2n'
TABLEAU_SITE = 'downpatproductions'
TABLEAU_SERVER = 'https://us-east-1.online.tableau.com'
| mit | 2,701,996,148,175,975,400 | 25.551913 | 91 | 0.683268 | false | 3.409825 | false | false | false |
coteyr/home-assistant | homeassistant/components/media_player/snapcast.py | 2 | 2333 | """
homeassistant.components.media_player.snapcast
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides functionality to interact with Snapcast clients.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.snapcast/
"""
import logging
import socket
from homeassistant.components.media_player import (
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, MediaPlayerDevice)
from homeassistant.const import STATE_OFF, STATE_ON
SUPPORT_SNAPCAST = SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE
DOMAIN = 'snapcast'
REQUIREMENTS = ['snapcast==1.1.1']
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Snapcast platform. """
import snapcast.control
host = config.get('host')
port = config.get('port', snapcast.control.CONTROL_PORT)
if not host:
_LOGGER.error('No snapserver host specified')
return
try:
server = snapcast.control.Snapserver(host, port)
except socket.gaierror:
_LOGGER.error('Could not connect to Snapcast server at %s:%d',
host, port)
return
add_devices([SnapcastDevice(client) for client in server.clients])
class SnapcastDevice(MediaPlayerDevice):
""" Represents a Snapcast client device. """
# pylint: disable=abstract-method
def __init__(self, client):
self._client = client
@property
def name(self):
""" Device name. """
return self._client.identifier
@property
def volume_level(self):
""" Volume level. """
return self._client.volume / 100
@property
def is_volume_muted(self):
""" Volume muted. """
return self._client.muted
@property
def supported_media_commands(self):
""" Flags of media commands that are supported. """
return SUPPORT_SNAPCAST
@property
def state(self):
""" State of the player. """
if self._client.connected:
return STATE_ON
return STATE_OFF
def mute_volume(self, mute):
""" Mute status. """
self._client.muted = mute
def set_volume_level(self, volume):
""" Volume level. """
self._client.volume = round(volume * 100)
| mit | 5,035,044,712,423,639,000 | 27.45122 | 74 | 0.638234 | false | 4.050347 | false | false | false |
courtarro/gnuradio | gr-qtgui/examples/pyqt_const_c.py | 58 | 6058 | #!/usr/bin/env python
#
# Copyright 2011,2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, filter
from gnuradio import blocks
import sys
try:
from gnuradio import qtgui
from PyQt4 import QtGui, QtCore
import sip
except ImportError:
sys.stderr.write("Error: Program requires PyQt4 and gr-qtgui.\n")
sys.exit(1)
try:
from gnuradio import analog
except ImportError:
sys.stderr.write("Error: Program requires gr-analog.\n")
sys.exit(1)
try:
from gnuradio import channels
except ImportError:
sys.stderr.write("Error: Program requires gr-channels.\n")
sys.exit(1)
class dialog_box(QtGui.QWidget):
def __init__(self, display, control):
QtGui.QWidget.__init__(self, None)
self.setWindowTitle('PyQt Test GUI')
self.boxlayout = QtGui.QBoxLayout(QtGui.QBoxLayout.LeftToRight, self)
self.boxlayout.addWidget(display, 1)
self.boxlayout.addWidget(control)
self.resize(800, 500)
class control_box(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.setWindowTitle('Control Panel')
self.setToolTip('Control the signals')
QtGui.QToolTip.setFont(QtGui.QFont('OldEnglish', 10))
self.layout = QtGui.QFormLayout(self)
# Control the first signal
self.freq1Edit = QtGui.QLineEdit(self)
self.freq1Edit.setMinimumWidth(100)
self.layout.addRow("Signal 1 Frequency:", self.freq1Edit)
self.connect(self.freq1Edit, QtCore.SIGNAL("editingFinished()"),
self.freq1EditText)
self.amp1Edit = QtGui.QLineEdit(self)
self.amp1Edit.setMinimumWidth(100)
self.layout.addRow("Signal 1 Amplitude:", self.amp1Edit)
self.connect(self.amp1Edit, QtCore.SIGNAL("editingFinished()"),
self.amp1EditText)
# Control the second signal
self.freq2Edit = QtGui.QLineEdit(self)
self.freq2Edit.setMinimumWidth(100)
self.layout.addRow("Signal 2 Frequency:", self.freq2Edit)
self.connect(self.freq2Edit, QtCore.SIGNAL("editingFinished()"),
self.freq2EditText)
self.amp2Edit = QtGui.QLineEdit(self)
self.amp2Edit.setMinimumWidth(100)
self.layout.addRow("Signal 2 Amplitude:", self.amp2Edit)
self.connect(self.amp2Edit, QtCore.SIGNAL("editingFinished()"),
self.amp2EditText)
self.quit = QtGui.QPushButton('Close', self)
self.quit.setMinimumWidth(100)
self.layout.addWidget(self.quit)
self.connect(self.quit, QtCore.SIGNAL('clicked()'),
QtGui.qApp, QtCore.SLOT('quit()'))
def attach_signal1(self, signal):
self.signal1 = signal
self.freq1Edit.setText(QtCore.QString("%1").arg(self.signal1.frequency()))
self.amp1Edit.setText(QtCore.QString("%1").arg(self.signal1.amplitude()))
def attach_signal2(self, signal):
self.signal2 = signal
self.freq2Edit.setText(QtCore.QString("%1").arg(self.signal2.frequency()))
self.amp2Edit.setText(QtCore.QString("%1").arg(self.signal2.amplitude()))
def freq1EditText(self):
try:
newfreq = float(self.freq1Edit.text())
self.signal1.set_frequency(newfreq)
except ValueError:
print "Bad frequency value entered"
def amp1EditText(self):
try:
newamp = float(self.amp1Edit.text())
self.signal1.set_amplitude(newamp)
except ValueError:
print "Bad amplitude value entered"
def freq2EditText(self):
try:
newfreq = float(self.freq2Edit.text())
self.signal2.set_frequency(newfreq)
except ValueError:
print "Bad frequency value entered"
def amp2EditText(self):
try:
newamp = float(self.amp2Edit.text())
self.signal2.set_amplitude(newamp)
except ValueError:
print "Bad amplitude value entered"
class my_top_block(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
Rs = 8000
f1 = 100
f2 = 200
npts = 2048
self.qapp = QtGui.QApplication(sys.argv)
src1 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f1, 0.5, 0)
src2 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f2, 0.5, 0)
src = blocks.add_cc()
channel = channels.channel_model(0.001)
thr = blocks.throttle(gr.sizeof_gr_complex, 100*npts)
self.snk1 = qtgui.const_sink_c(npts, "Constellation Example", 1)
self.connect(src1, (src,0))
self.connect(src2, (src,1))
self.connect(src, channel, thr, (self.snk1, 0))
self.ctrl_win = control_box()
self.ctrl_win.attach_signal1(src1)
self.ctrl_win.attach_signal2(src2)
# Get the reference pointer to the SpectrumDisplayForm QWidget
pyQt = self.snk1.pyqwidget()
# Wrap the pointer as a PyQt SIP object
# This can now be manipulated as a PyQt4.QtGui.QWidget
pyWin = sip.wrapinstance(pyQt, QtGui.QWidget)
self.main_box = dialog_box(pyWin, self.ctrl_win)
self.main_box.show()
if __name__ == "__main__":
tb = my_top_block();
tb.start()
tb.qapp.exec_()
tb.stop()
| gpl-3.0 | 4,865,081,544,152,751,000 | 31.745946 | 82 | 0.642126 | false | 3.66707 | false | false | false |
crazyhottommy/manta | src/python/lib/workflowUtil.py | 2 | 10379 | #
# Manta - Structural Variant and Indel Caller
# Copyright (c) 2013-2015 Illumina, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
"""
util -- simple utilities shared by bwa/gatk workflow objects
"""
__author__ = "Chris Saunders"
import os
import re
def ensureDir(d):
"""
make directory if it doesn't already exist, raise exception if something else is in the way:
"""
if os.path.exists(d):
if not os.path.isdir(d) :
raise Exception("Can't create directory: %s" % (d))
else :
os.makedirs(d)
def skipJoin(sep,a,b) :
if a == "" : return b
elif b == "" : return a
return a+sep+b
def preJoin(a,b) :
return skipJoin('_',a,b)
def checkFile(filename,label="") :
if os.path.isfile(filename) : return
if label is None : label=""
if label != "" : label=" "+label.strip()
raise Exception("Can't find%s file '%s'" % (label,filename) )
def checkDir(dirname,label="") :
if os.path.isdir(dirname) : return
if label is None : label=""
if label != "" : label=" "+label.strip()
raise Exception("Can't find%s directory '%s'" % (label,dirname) )
def which(searchFile) :
"""
search the PATH for searchFile
result should be the similar to *nix 'which' utility
"""
for searchPath in os.environ["PATH"].split(os.pathsep):
test=os.path.join(searchPath,searchFile)
if os.path.isfile(test): return test
return None
def parseGenomeRegion(regionStr) :
"""
parse a samtools region string and return a (chrom,start,end) tuple
missing start and end values will be entered as None
"""
assert(regionStr is not None)
word=regionStr.strip().split(':')
if (len(word) < 1) or (len(word) > 2) :
raise Exception("Unexpected format in genome region string: %s" % (regionStr))
chrom=word[0]
if len(chrom) == 0 :
raise Exception("Unexpected format in genome region string: %s" % (regionStr))
start=None
end=None
if (len(word) > 1) :
rangeWord=word[1].split('-')
if len(rangeWord) != 2 :
raise Exception("Unexpected format in genome region string: %s" % (regionStr))
start = int(rangeWord[0])
end = int(rangeWord[1])
if (end < start) or (start < 1) or (end < 1) :
raise Exception("Unexpected format in genome region string: %s" % (regionStr))
return {"chrom":chrom, "start":start, "end":end}
def isValidSampleId(sampleId) :
return re.match("^[A-Za-z0-9_-]+$", sampleId)
def getBaiFileNames(bamFile) :
"return (picard bai filename,samtools bai filename)"
return (bamFile[:-(len(".bam"))]+".bai",bamFile+".bai")
def javaHeapMemReqest(self,javaMb,javaMinMb=None,overheadMb=None) :
"""
Input is the amount of memory requested for the java heap, output is the
amount of java heap memory you're going to actually get, and the total process memory
(heap+overhead), to request for the task.
If javaMinMb is not defined, it is assumed you need to full request
If overheadMb is not defined, it is set to the global javaTaskHeapOverheadMb value
return (javaMb,taskMb)
"""
if javaMinMb is None : javaMinMb=javaMb
if overheadMb is None : overheadMb=self.params.javaTaskHeapOverheadMb
javaMb=(self.limitMemMb(javaMb+overheadMb)-overheadMb)
if javaMb < javaMinMb :
raise Exception("Could not provide minimum java heap memory request for task. Minimum requested: %s Available: %s" % (str(javaMinMb),str(javaMb)))
assert (javaMb>0)
taskMb=(javaMb+overheadMb)
return (javaMb,taskMb)
def getFastaChromOrderSize(faiFile) :
"""
given a fasta index file,
returns
(chromOrder,chromSizes)
where:
chromOrder -- list of chromosomes in fasta order
chromSizes -- hash of chromosome sizes
"""
assert os.path.isfile(faiFile)
chromOrder=[]
chromSizes={}
for line in open(faiFile) :
(chrom,size)=line.strip().split("\t",2)[:2]
chromOrder.append(chrom)
chromSizes[chrom]=int(size)
return (chromOrder,chromSizes)
def getChromIntervals(chromOrder,chromSizes,segmentSize, genomeRegion = None) :
"""
generate chromosome intervals no greater than segmentSize
chromOrder - iterable object of chromosome names
chromSizes - a hash of chrom sizes
genomeRegionList - optionally restrict chrom intervals to only cover a list of specified chromosome region
return chromIndex,chromLabel,start,end,chromSegment
where start and end are formated for use with samtools
chromSegment is 0-indexed number of segment along each chromosome
"""
for (chromIndex, chromLabel) in enumerate(chromOrder) :
chromStart=1
chromEnd=chromSizes[chromLabel]
# adjust for the custom genome subsegment case:
if genomeRegion is not None :
if genomeRegion["chrom"] is not None :
if genomeRegion["chrom"] != chromLabel : continue
if genomeRegion["start"] is not None :
chromStart=genomeRegion["start"]
if genomeRegion["end"] is not None :
chromEnd=genomeRegion["end"]
chromSize=(chromEnd-chromStart+1)
chromSegments=1+((chromSize-1)/segmentSize)
segmentBaseSize=chromSize/chromSegments
nPlusOne=chromSize%chromSegments
start=chromStart
for i in xrange(chromSegments) :
segSize=segmentBaseSize
if i<nPlusOne : segSize += 1
end=min(start+(segSize-1),chromStart+chromSize)
yield (chromIndex,chromLabel,start,end,i,genomeRegion)
start=end+1
class PathDigger(object) :
"""
Digs into a well-defined directory structure with prefixed
folder names to extract all files associated with
combinations of directory names.
This is written primarily to go through the CASAVA 1.8 output
structure.
#casava 1.8 fastq example:
fqDigger=FileDigger(['Project_','Sample_'],".fastq.gz")
"""
def __init__(self,prefixList,targetExtension=None) :
"""
if no target extension, then list directories at the tip of the prefix list
"""
self.prefixList=prefixList
self.targetExtension=targetExtension
def getNextPath(self,basePath,depth=0,ans=tuple()) :
"""
"""
if depth < len(self.prefixList) :
for d in os.listdir(basePath) :
nextDir=os.path.join(basePath,d)
if not os.path.isdir(nextDir) : continue
if not d.startswith(self.prefixList[depth]) : continue
value=d[len(self.prefixList[depth]):]
for val in self.getNextPath(nextDir,depth+1,ans+tuple([value])) :
yield val
else:
if self.targetExtension is None :
yield ans+tuple([basePath])
else :
for f in os.listdir(basePath) :
nextPath=os.path.join(basePath,f)
if not os.path.isfile(nextPath) : continue
if not f.endswith(self.targetExtension) : continue
yield ans+tuple([nextPath])
def cleanId(input_id) :
"""
filter id so that it's safe to use as a pyflow indentifier
"""
import re
return re.sub(r'([^a-zA-Z0-9_\-])', "_", input_id)
def getRobustChromId(chromIndex,chromLabel):
return "%s_%s" % (str(chromIndex).zfill(3),cleanId(chromLabel))
class GenomeSegment(object) :
"""
organizes all variables which can change
with each genomic segment.
The genomic segment is defined by:
1. chromosome
2. begin position (1-indexed closed)
3. end position (1-indexed closed)
4. chromosome segment (ie. bin) number (0-indexed)
"""
def __init__(self,chromIndex,chromLabel,beginPos,endPos,binId,genomeRegion) :
"""
arguments are the 4 genomic interval descriptors detailed in class documentation
"""
self.chromLabel = chromLabel
self.beginPos = beginPos
self.endPos = endPos
self.bamRegion = chromLabel + ':' + str(beginPos) + '-' + str(endPos)
self.binId = binId
self.binStr = str(binId).zfill(4)
self.id = chromLabel + "_" + self.binStr
regionId=cleanId(chromLabel)
if genomeRegion is not None :
if genomeRegion['start'] is not None :
regionId += "-"+str(genomeRegion['start'])
if genomeRegion['end'] is not None :
regionId += "-"+str(genomeRegion['end'])
self.pyflowId = "chromId_%s_%s_%s" % (str(chromIndex).zfill(3), regionId, self.binStr)
def getNextGenomeSegment(params) :
"""
generator which iterates through all genomic segments and
returns a segmentValues object for each one.
"""
MEGABASE = 1000000
scanSize = params.scanSizeMb * MEGABASE
if params.genomeRegionList is None :
for segval in getChromIntervals(params.chromOrder,params.chromSizes, scanSize) :
yield GenomeSegment(*segval)
else :
for genomeRegion in params.genomeRegionList :
for segval in getChromIntervals(params.chromOrder,params.chromSizes, scanSize, genomeRegion) :
yield GenomeSegment(*segval)
def cleanPyEnv() :
"""
clear out some potentially destabilizing env variables:
"""
clearList = [ "PYTHONPATH", "PYTHONHOME"]
for key in clearList :
if key in os.environ :
del os.environ[key]
def isLocalSmtp() :
"""
return true if a local smtp server is available
"""
import smtplib
try :
smtplib.SMTP('localhost')
except :
return False
return True
| gpl-3.0 | 8,957,493,526,275,949,000 | 28.654286 | 154 | 0.637345 | false | 3.827065 | false | false | false |
janusnic/21v-python | unit_17/3.py | 2 | 1068 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from PyQt4 import QtGui, QtCore
class Example(QtGui.QWidget):
def __init__(self):
super(Example, self).__init__()
self.initUI()
def initUI(self):
self.setGeometry(300, 300, 350, 100)
self.setWindowTitle('Colours')
self.show()
def paintEvent(self, e):
qp = QtGui.QPainter()
qp.begin(self)
self.drawRectangles(qp)
qp.end()
def drawRectangles(self, qp):
color = QtGui.QColor(0, 0, 0)
color.setNamedColor('#d4d4d4')
qp.setPen(color)
qp.setBrush(QtGui.QColor(200, 0, 0))
qp.drawRect(10, 15, 90, 60)
qp.setBrush(QtGui.QColor(255, 80, 0, 160))
qp.drawRect(130, 15, 90, 60)
qp.setBrush(QtGui.QColor(25, 0, 90, 200))
qp.drawRect(250, 15, 90, 60)
def main():
app = QtGui.QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
if __name__ == '__main__':
main() | mit | 6,275,884,260,520,210,000 | 19.557692 | 50 | 0.519663 | false | 3.276074 | false | false | false |
maxime-beck/compassion-modules | message_center_compassion/models/ir_http.py | 1 | 2150 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Yannick Vaucher, Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo.http import request
from odoo import models, _
from odoo.exceptions import UserError
from werkzeug.exceptions import Unauthorized
try:
import jwt
except ImportError:
raise UserError(_("Please install python jwt"))
logger = logging.getLogger(__name__)
class IrHTTP(models.AbstractModel):
_inherit = 'ir.http'
@classmethod
def _auth_method_oauth2(self):
if request.httprequest.method == 'GET':
mode = 'read'
if request.httprequest.method == 'POST':
mode = 'write'
token_data = request.httprequest.headers.get('Authorization')
if not token_data:
raise Unauthorized()
token_authorization = token_data.split()[0]
if token_authorization != 'Bearer':
raise Unauthorized()
access_token = token_data.split()[1]
# Token validation
options = {
# not sure why, you might need to do that if token is not encrypted
'verify_signature': False,
'verify_aud': False
}
jwt_decoded = jwt.decode(access_token, options=options)
# validation
# is the iss = to Compassions IDP ?
if jwt_decoded.get('iss') != 'https://esther.ci.org':
raise Unauthorized()
# is scope read or write in scopes ?
if mode not in jwt_decoded.get('scope'):
raise Unauthorized()
client_id = jwt_decoded.get('client_id')
logger.info("TOKEN CLIENT IS -----------------> " + client_id)
user = request.env['res.users'].sudo().search(
[('login', '=', client_id)])
if user:
request.uid = user.id
else:
raise Unauthorized()
| agpl-3.0 | -2,890,648,468,614,530,000 | 32.59375 | 79 | 0.552093 | false | 4.142582 | false | false | false |
vortex-exoplanet/VIP | vip_hci/metrics/dust_distribution.py | 2 | 15666 | # -*- coding: utf-8 -*-
"""
Created on Wed May 6 17:07:00 2015
"""
import numpy as np
from scipy.optimize import newton
_author__ = 'Julien Milli'
class Dust_distribution(object):
"""This class represents the dust distribution
"""
def __init__(self,density_dico={'name':'2PowerLaws', 'ain':5, 'aout':-5,
'a':60, 'e':0, 'ksi0':1., 'gamma':2.,
'beta':1.,'amin':0.,'dens_at_r0':1.}):
"""
Constructor for the Dust_distribution class.
We assume the dust density is 0 radially after it drops below 0.5%
(the accuracy variable) of the peak density in
the midplane, and vertically whenever it drops below 0.5% of the
peak density in the midplane
"""
self.accuracy = 5.e-3
if not isinstance(density_dico, dict):
errmsg = 'The parameters describing the dust density distribution' \
' must be a Python dictionnary'
raise TypeError(errmsg)
if 'name' not in density_dico.keys():
errmsg = 'The dictionnary describing the dust density ' \
'distribution must contain the key "name"'
raise TypeError(errmsg)
self.type = density_dico['name']
if self.type == '2PowerLaws':
self.dust_distribution_calc = DustEllipticalDistribution2PowerLaws(
self.accuracy, density_dico)
else:
errmsg = 'The only dust distribution implemented so far is the' \
' "2PowerLaws"'
raise TypeError(errmsg)
def set_density_distribution(self,density_dico):
"""
Update the parameters of the density distribution.
"""
self.dust_distribution_calc.set_density_distribution(density_dico)
def density_cylindrical(self, r, costheta, z):
"""
Return the particule volume density at r, theta, z.
"""
return self.dust_distribution_calc.density_cylindrical(r, costheta, z)
def density_cartesian(self, x, y, z):
"""
Return the particule volume density at x,y,z, taking into account the
offset of the disk.
"""
return self.dust_distribution_calc.density_cartesian(x, y, z)
def print_info(self, pxInAu=None):
"""
Utility function that displays the parameters of the radial distribution
of the dust
Input:
- pxInAu (optional): the pixel size in au
"""
print('----------------------------')
print('Dust distribution parameters')
print('----------------------------')
self.dust_distribution_calc.print_info(pxInAu)
class DustEllipticalDistribution2PowerLaws:
"""
"""
def __init__(self, accuracy=5.e-3, density_dico={'ain':5,'aout':-5,\
'a':60,'e':0,'ksi0':1.,\
'gamma':2.,'beta':1.,\
'amin':0.,'dens_at_r0':1.}):
"""
Constructor for the Dust_distribution class.
We assume the dust density is 0 radially after it drops below 0.5%
(the accuracy variable) of the peak density in
the midplane, and vertically whenever it drops below 0.5% of the
peak density in the midplane
"""
self.accuracy = accuracy
self.set_density_distribution(density_dico)
def set_density_distribution(self,density_dico):
"""
"""
if 'ksi0' not in density_dico.keys():
ksi0 = 1.
else:
ksi0 = density_dico['ksi0']
if 'beta' not in density_dico.keys():
beta = 1.
else:
beta = density_dico['beta']
if 'gamma' not in density_dico.keys():
gamma = 1.
else:
gamma = density_dico['gamma']
if 'aout' not in density_dico.keys():
aout = -5.
else:
aout = density_dico['aout']
if 'ain' not in density_dico.keys():
ain = 5.
else:
ain = density_dico['ain']
if 'e' not in density_dico.keys():
e = 0.
else:
e = density_dico['e']
if 'a' not in density_dico.keys():
a = 60.
else:
a = density_dico['a']
if 'amin' not in density_dico.keys():
amin = 0.
else:
amin = density_dico['amin']
if 'dens_at_r0' not in density_dico.keys():
dens_at_r0=1.
else:
dens_at_r0=density_dico['dens_at_r0']
self.set_vertical_density(ksi0=ksi0, gamma=gamma, beta=beta)
self.set_radial_density(ain=ain, aout=aout, a=a, e=e,amin=amin,dens_at_r0=dens_at_r0)
def set_vertical_density(self, ksi0=1., gamma=2., beta=1.):
"""
Sets the parameters of the vertical density function
Parameters
----------
ksi0 : float
scale height in au at the reference radius (default 1 a.u.)
gamma : float
exponent (2=gaussian,1=exponential profile, default 2)
beta : float
flaring index (0=no flaring, 1=linear flaring, default 1)
"""
if gamma < 0.:
print('Warning the vertical exponent gamma is negative')
print('Gamma was changed from {0:6.2f} to 0.1'.format(gamma))
gamma = 0.1
if ksi0 < 0.:
print('Warning the scale height ksi0 is negative')
print('ksi0 was changed from {0:6.2f} to 0.1'.format(ksi0))
ksi0 = 0.1
if beta < 0.:
print('Warning the flaring coefficient beta is negative')
print('beta was changed from {0:6.2f} to 0 (flat disk)'.format(beta))
beta = 0.
self.ksi0 = float(ksi0)
self.gamma = float(gamma)
self.beta = float(beta)
self.zmax = ksi0*(-np.log(self.accuracy))**(1./gamma)
def set_radial_density(self, ain=5., aout=-5., a=60., e=0.,amin=0.,dens_at_r0=1.):
"""
Sets the parameters of the radial density function
Parameters
----------
ain : float
slope of the power-low distribution in the inner disk. It
must be positive (default 5)
aout : float
slope of the power-low distribution in the outer disk. It
must be negative (default -5)
a : float
reference radius in au (default 60)
e : float
eccentricity (default 0)
amin: float
minimim semi-major axis: the dust density is 0 below this
value (default 0)
"""
if ain < 0.1:
print('Warning the inner slope is greater than 0.1')
print('ain was changed from {0:6.2f} to 0.1'.format(ain))
ain = 0.1
if aout > -0.1:
print('Warning the outer slope is greater than -0.1')
print('aout was changed from {0:6.2f} to -0.1'.format(aout))
aout = -0.1
if e < 0:
print('Warning the eccentricity is negative')
print('e was changed from {0:6.2f} to 0'.format(e))
e = 0.
if e >= 1:
print('Warning the eccentricity is greater or equal to 1')
print('e was changed from {0:6.2f} to 0.99'.format(e))
e = 0.99
if a < 0:
raise ValueError('Warning the semi-major axis a is negative')
if amin < 0:
raise ValueError('Warning the minimum radius a is negative')
print('amin was changed from {0:6.2f} to 0.'.format(amin))
amin = 0.
if dens_at_r0 <0:
raise ValueError('Warning the reference dust density at r0 is negative')
print('It was changed from {0:6.2f} to 1.'.format(dens_at_r0))
dens_at_r0 = 1.
self.ain = float(ain)
self.aout = float(aout)
self.a = float(a)
self.e = float(e)
self.p = self.a*(1-self.e**2)
self.amin = float(amin)
self.pmin = self.amin*(1-self.e**2) ## we assume the inner hole is also elliptic (convention)
self.dens_at_r0 = float(dens_at_r0)
try:
# maximum distance of integration, AU
self.rmax = self.a*self.accuracy**(1/self.aout)
if self.ain != self.aout:
self.apeak = self.a * np.power(-self.ain/self.aout,
1./(2.*(self.ain-self.aout)))
Gamma_in = self.ain+self.beta
Gamma_out = self.aout+self.beta
self.apeak_surface_density = self.a * np.power(-Gamma_in/Gamma_out,
1./(2.*(Gamma_in-Gamma_out)))
else:
self.apeak = self.a
self.apeak_surface_density = self.a
except OverflowError:
print('The error occured during the calculation of rmax or apeak')
print('Inner slope: {0:.6e}'.format(self.ain))
print('Outer slope: {0:.6e}'.format(self.aout))
print('Accuracy: {0:.6e}'.format(self.accuracy))
raise OverflowError
except ZeroDivisionError:
print('The error occured during the calculation of rmax or apeak')
print('Inner slope: {0:.6e}'.format(self.ain))
print('Outer slope: {0:.6e}'.format(self.aout))
print('Accuracy: {0:.6e}'.format(self.accuracy))
raise ZeroDivisionError
self.itiltthreshold = np.rad2deg(np.arctan(self.rmax/self.zmax))
def print_info(self, pxInAu=None):
"""
Utility function that displays the parameters of the radial distribution
of the dust
Input:
- pxInAu (optional): the pixel size in au
"""
def rad_density(r):
return np.sqrt(2/(np.power(r/self.a,-2*self.ain) +
np.power(r/self.a,-2*self.aout)))
half_max_density = lambda r:rad_density(r)/rad_density(self.apeak)-1./2.
try:
if self.aout < -3:
a_plus_hwhm = newton(half_max_density,self.apeak*1.04)
else:
a_plus_hwhm = newton(half_max_density,self.apeak*1.1)
except RuntimeError:
a_plus_hwhm = np.nan
try:
if self.ain < 2:
a_minus_hwhm = newton(half_max_density,self.apeak*0.5)
else:
a_minus_hwhm = newton(half_max_density,self.apeak*0.95)
except RuntimeError:
a_minus_hwhm = np.nan
if pxInAu is not None:
msg = 'Reference semi-major axis: {0:.1f}au or {1:.1f}px'
print(msg.format(self.a,self.a/pxInAu))
msg2 = 'Semi-major axis at maximum dust density in plane z=0: {0:.1f}au or ' \
'{1:.1f}px (same as ref sma if ain=-aout)'
print(msg2.format(self.apeak,self.apeak/pxInAu))
msg3 = 'Semi-major axis at half max dust density in plane z=0: {0:.1f}au or ' \
'{1:.1f}px for the inner edge ' \
'/ {2:.1f}au or {3:.1f}px for the outer edge, with a FWHM of ' \
'{4:.1f}au or {5:.1f}px'
print(msg3.format(a_minus_hwhm,a_minus_hwhm/pxInAu,a_plus_hwhm,\
a_plus_hwhm/pxInAu,a_plus_hwhm-a_minus_hwhm,\
(a_plus_hwhm-a_minus_hwhm)/pxInAu))
msg4 = 'Semi-major axis at maximum dust surface density: {0:.1f}au or ' \
'{1:.1f}px (same as ref sma if ain=-aout)'
print(msg4.format(self.apeak_surface_density,self.apeak_surface_density/pxInAu))
msg5 = 'Ellipse p parameter: {0:.1f}au or {1:.1f}px'
print(msg5.format(self.p,self.p/pxInAu))
else:
print('Reference semi-major axis: {0:.1f}au'.format(self.a))
msg = 'Semi-major axis at maximum dust density in plane z=0: {0:.1f}au (same ' \
'as ref sma if ain=-aout)'
print(msg.format(self.apeak))
msg3 = 'Semi-major axis at half max dust density: {0:.1f}au ' \
'/ {1:.1f}au for the inner/outer edge, or a FWHM of ' \
'{2:.1f}au'
print(msg3.format(a_minus_hwhm,a_plus_hwhm,a_plus_hwhm-a_minus_hwhm))
print('Ellipse p parameter: {0:.1f}au'.format(self.p))
print('Ellipticity: {0:.3f}'.format(self.e))
print('Inner slope: {0:.2f}'.format(self.ain))
print('Outer slope: {0:.2f}'.format(self.aout))
print('Density at the reference semi-major axis: {0:4.3e} (arbitrary unit'.format(self.dens_at_r0))
if self.amin>0:
print('Minimum radius (sma): {0:.2f}au'.format(self.amin))
if pxInAu is not None:
msg = 'Scale height: {0:.1f}au or {1:.1f}px at {2:.1f}'
print(msg.format(self.ksi0,self.ksi0/pxInAu,self.a))
else:
print('Scale height: {0:.2f} au at {1:.2f}'.format(self.ksi0,
self.a))
print('Vertical profile index: {0:.2f}'.format(self.gamma))
msg = 'Disc vertical FWHM: {0:.2f} at {1:.2f}'
print(msg.format(2.*self.ksi0*np.power(np.log10(2.), 1./self.gamma),
self.a))
print('Flaring coefficient: {0:.2f}'.format(self.beta))
print('------------------------------------')
print('Properties for numerical integration')
print('------------------------------------')
print('Requested accuracy {0:.2e}'.format(self.accuracy))
# print('Minimum radius for integration: {0:.2f} au'.format(self.rmin))
print('Maximum radius for integration: {0:.2f} au'.format(self.rmax))
print('Maximum height for integration: {0:.2f} au'.format(self.zmax))
msg = 'Inclination threshold: {0:.2f} degrees'
print(msg.format(self.itiltthreshold))
return
def density_cylindrical(self, r, costheta, z):
""" Returns the particule volume density at r, theta, z
"""
radial_ratio = r/(self.p/(1-self.e*costheta))
den = (np.power(radial_ratio, -2*self.ain) +
np.power(radial_ratio,-2*self.aout))
radial_density_term = np.sqrt(2./den)*self.dens_at_r0
if self.pmin>0:
radial_density_term[r/(self.pmin/(1-self.e*costheta)) <= 1]=0
den2 = (self.ksi0*np.power(radial_ratio,self.beta))
vertical_density_term = np.exp(-np.power(np.abs(z)/den2, self.gamma))
return radial_density_term*vertical_density_term
def density_cartesian(self, x, y, z):
""" Returns the particule volume density at x,y,z, taking into account
the offset of the disk
"""
r = np.sqrt(x**2+y**2)
if r == 0:
costheta=0
else:
costheta = x/r
return self.density_cylindrical(r,costheta,z)
if __name__ == '__main__':
"""
Main of the class for debugging
"""
test = DustEllipticalDistribution2PowerLaws()
test.set_radial_density(ain=5., aout=-5., a=60., e=0.)
test.print_info()
costheta = 0.
z = 0.
for a in np.linspace(60-5,60+5,11):
t = test.density_cylindrical(a, costheta, z)
print('r={0:.1f} density={1:.4f}'.format(a, t)) | mit | -3,831,389,070,457,644,000 | 41.923288 | 120 | 0.524512 | false | 3.541139 | false | false | false |
plotly/python-api | packages/python/plotly/plotly/tests/test_core/test_graph_objs/test_instantiate_hierarchy.py | 2 | 1460 | from __future__ import absolute_import
from unittest import TestCase
import os
import importlib
import inspect
from plotly.basedatatypes import BasePlotlyType, BaseFigure
datatypes_root = "plotly/graph_objs"
datatype_modules = [
dirpath.replace("/", ".")
for dirpath, _, _ in os.walk(datatypes_root)
if not dirpath.endswith("__pycache__")
]
class HierarchyTest(TestCase):
def test_construct_datatypes(self):
for datatypes_module in datatype_modules:
module = importlib.import_module(datatypes_module)
for name in getattr(module, "__all__", []):
if name.startswith("_") or name[0].islower() or name == "FigureWidget":
continue
obj = getattr(module, name)
try:
v = obj()
except Exception:
print(
"Failed to construct {obj} in module {module}".format(
obj=obj, module=datatypes_module
)
)
raise
if obj.__module__ == "plotly.graph_objs._deprecations":
self.assertTrue(isinstance(v, list) or isinstance(v, dict))
obj()
elif name in ("Figure", "FigureWidget"):
self.assertIsInstance(v, BaseFigure)
else:
self.assertIsInstance(v, BasePlotlyType)
| mit | -6,968,331,776,134,214,000 | 34.609756 | 87 | 0.532192 | false | 4.74026 | false | false | false |
geodelic/arke | arke/plugins/collect/system.py | 1 | 5592 | #
# Copyright 2013 Geodelic
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import logging
logger = logging.getLogger(__name__)
import psutil
from psutil._pslinux import wrap_exceptions
from arke.collect import Collect
class ExProcess(psutil.Process):
@property
def _process_name(self):
return self._platform_impl._process_name
@property
@wrap_exceptions
def oom_score(self):
with open('/proc/%i/oom_score' % self.pid, 'r') as f:
return int(f.readline().strip())
class system(Collect):
default_config = {'interval': 30,
}
def collect(self):
return dict(
cpu_times=psutil.cpu_times()._asdict(),
mem=dict(
total_phymem=psutil.TOTAL_PHYMEM,
avail_phymem=psutil.avail_phymem(),
avail_virtmem=psutil.avail_virtmem(),
cached_phymem=psutil.cached_phymem(),
phymem_buffers=psutil.phymem_buffers(),
total_virtmem=psutil.total_virtmem(),
used_phymem=psutil.used_phymem(),
used_virtmem=psutil.used_virtmem(),
),
processes=list(self._processes()),
net=dict(
ifaces=self._net_dev(),
proto=self._net_proto()
),
io=self._io_stats(),
fs=dict(self._fs_usage()),
fh=self._file_handles(),
)
def _processes(self):
for pid in psutil.get_pid_list():
try:
process = ExProcess(pid)
if not process.cmdline:
continue
yield dict(
name=process.name,
cmdline=' '.join(process.cmdline),
status=str(process.status),
ppid=process.ppid,
pid=process.pid,
cpu_times=process.get_cpu_times()._asdict(),
io_counters=process.get_io_counters()._asdict(),
memory=process.get_memory_info()._asdict(),
oom_score=process.oom_score,
num_threads=process.get_num_threads(),
connections=[c._asdict() for c in process.get_connections()],
open_files=[f.path for f in process.get_open_files()],
)
except psutil.NoSuchProcess:
continue
def _file_handles(self):
with open('/proc/sys/fs/file-nr', 'r') as f:
o,f,m = map(int, f.readline().split())
return dict(
open=o,
free=f,
max=m
)
def _net_proto(self):
protocols = {}
def _parse(fn):
with open(fn, 'r') as f:
for line in f:
proto, cols = line.split(':')
cols = cols.split()
nproto, data = f.next().split(':')
assert proto == nproto, "the format of %s has changed!" % fn
proto_data = dict(zip(cols, map(int, data.split())))
protocols[proto] = proto_data
_parse('/proc/net/snmp')
_parse('/proc/net/netstat')
return protocols
def _net_dev(self):
ifaces = {}
with open('/proc/net/dev', 'r') as f:
f.readline()
columnLine = f.readline()
_, receiveCols , transmitCols = columnLine.split("|")
receiveCols = map(lambda a:"recv_"+a, receiveCols.split())
transmitCols = map(lambda a:"trans_"+a, transmitCols.split())
cols = receiveCols+transmitCols
for line in f:
if ':' not in line: continue
iface, stats = line.split(":")
ifaceData = map(int, stats.split())
if not any(ifaceData):
continue
ifaces[iface.strip()] = dict(zip(cols, ifaceData))
return ifaces
def _io_stats(self):
cols = ('read_count',
'reads_merged',
'read_sectors',
'reading_ms',
'write_count',
'writes_merged',
'write_sectors',
'writing_ms',
'io_run',
'io_rms',
'io_twms')
results = {}
with open('/proc/diskstats', 'r') as f:
for line in f:
data = line.split()[2:]
disk = data.pop(0)
if disk.startswith('ram') or disk.startswith('loop'):
continue
results[disk] = dict(zip(cols, map(int, data)))
return results
def _fs_usage(self):
for partition in psutil.disk_partitions():
usage = psutil.disk_usage(partition.mountpoint)._asdict()
usage['filesystem'] = partition.device
yield (partition.mountpoint, usage)
if __name__ == '__main__':
from pprint import pprint
pprint(system(None,None,None,None).collect())
| apache-2.0 | -6,312,197,700,102,997,000 | 31.137931 | 81 | 0.50912 | false | 4.236364 | false | false | false |
Glottotopia/aagd | moin/local/moin/build/lib.linux-x86_64-2.6/MoinMoin/stats/useragents.py | 2 | 5462 | # -*- coding: iso-8859-1 -*-
"""
MoinMoin - User-Agent Statistics
This macro creates a pie chart of the type of user agents
accessing the wiki.
TODO: should be refactored after hitcounts.
@copyright: 2002-2004 Juergen Hermann <[email protected]>,
2007 MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details.
"""
_debug = 0
from MoinMoin import wikiutil, caching, logfile
from MoinMoin.Page import Page
from MoinMoin.logfile import eventlog
def linkto(pagename, request, params=''):
_ = request.getText
if not request.cfg.chart_options:
return text(pagename, request)
if _debug:
return draw(pagename, request)
page = Page(request, pagename)
# Create escaped query string from dict and params
querystr = {'action': 'chart', 'type': 'useragents'}
querystr = wikiutil.makeQueryString(querystr)
querystr = wikiutil.escape(querystr)
if params:
querystr += '&' + params
data = {'url': page.url(request, querystr)}
data.update(request.cfg.chart_options)
result = ('<img src="%(url)s" width="%(width)d" height="%(height)d"'
' alt="useragents chart">') % data
return result
def get_data(request):
# get results from cache
cache = caching.CacheEntry(request, 'charts', 'useragents', scope='wiki', use_pickle=True)
cache_date, data = 0, {}
if cache.exists():
try:
cache_date, data = cache.content()
except:
cache.remove() # cache gone bad
log = eventlog.EventLog(request)
try:
new_date = log.date()
except logfile.LogMissing:
new_date = None
if new_date is not None:
log.set_filter(['VIEWPAGE', 'SAVEPAGE'])
for event in log.reverse():
if event[0] <= cache_date:
break
ua = event[2].get('HTTP_USER_AGENT')
if ua:
try:
pos = ua.index(" (compatible; ")
ua = ua[pos:].split(';')[1].strip()
except ValueError:
ua = ua.split()[0]
#ua = ua.replace(';', '\n')
data[ua] = data.get(ua, 0) + 1
# write results to cache
cache.update((new_date, data))
data = [(cnt, ua) for ua, cnt in data.items()]
data.sort()
data.reverse()
return data
def text(pagename, request):
from MoinMoin.util.dataset import TupleDataset, Column
from MoinMoin.widget.browser import DataBrowserWidget
_ = request.getText
data = get_data(request)
total = 0.0
for cnt, ua in data:
total += cnt
agents = TupleDataset()
agents.columns = [Column('agent', label=_("User agent"), align='left'),
Column('value', label='%', align='right')]
cnt_printed = 0
data = data[:10]
if total:
for cnt, ua in data:
try:
ua = unicode(ua)
agents.addRow((ua, "%.2f" % (100.0 * cnt / total)))
cnt_printed += cnt
except UnicodeError:
pass
if total > cnt_printed:
agents.addRow((_('Others'), "%.2f" % (100 * (total - cnt_printed) / total)))
table = DataBrowserWidget(request)
table.setData(agents)
return table.render(method="GET")
def draw(pagename, request):
import shutil, cStringIO
from MoinMoin.stats.chart import Chart, ChartData, Color
_ = request.getText
style = Chart.GDC_3DPIE
# get data
colors = ['red', 'mediumblue', 'yellow', 'deeppink', 'aquamarine', 'purple', 'beige',
'blue', 'forestgreen', 'orange', 'cyan', 'fuchsia', 'lime']
colors = ([Color(c) for c in colors])
data = get_data(request)
maxdata = len(colors) - 1
if len(data) > maxdata:
others = [x[0] for x in data[maxdata:]]
data = data[:maxdata] + [(sum(others), _('Others').encode('iso-8859-1', 'replace'))] # gdchart can't do utf-8
# shift front to end if others is very small
if data[-1][0] * 10 < data[0][0]:
data = data[1:] + data[0:1]
labels = [x[1] for x in data]
data = [x[0] for x in data]
# give us a chance to develop this
if _debug:
return "<p>data = %s</p>" % \
'<br>'.join([wikiutil.escape(repr(x)) for x in [labels, data]])
# create image
image = cStringIO.StringIO()
c = Chart()
c.addData(data)
title = ''
if request.cfg.sitename: title = "%s: " % request.cfg.sitename
title = title + _('Distribution of User-Agent Types')
c.option(
pie_color=colors,
label_font=Chart.GDC_SMALL,
label_line=1,
label_dist=20,
threed_depth=20,
threed_angle=225,
percent_labels=Chart.GDCPIE_PCT_RIGHT,
title_font=c.GDC_GIANT,
title=title.encode('iso-8859-1', 'replace')) # gdchart can't do utf-8
labels = [label.encode('iso-8859-1', 'replace') for label in labels]
c.draw(style,
(request.cfg.chart_options['width'], request.cfg.chart_options['height']),
image, labels)
request.content_type = 'image/gif'
request.content_length = len(image.getvalue())
# copy the image
image.reset()
shutil.copyfileobj(image, request, 8192)
| mit | 2,951,261,090,398,807,000 | 28.010989 | 117 | 0.554925 | false | 3.678114 | false | false | false |
acassis/lintouch | lsp-modbus/tests/test_modbusmaster.py | 1 | 3241 | #!/usr/bin/env python
# $Id: $
#
# FILE: test_mobusmaster.py --
# AUTHOR: Jiri Barton <[email protected]>
# DATE: 14 June 2005
#
# Copyright (c) 2001-2005 S.W.A.C. GmbH, Germany.
# All Rights Reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys
import time
from modlib import *
def server():
c = ModbusServerContext(
coils_address=1, coils_count=3,
discrete_inputs_address=0, discrete_inputs_count=16,
holding_registers_address=17, holding_registers_count=2,
input_registers_address=0, input_registers_count=8)
s = ModbusTCPServer(context=c, port=10502)
try:
s.serve_forever()
except KeyboardInterrupt:
print "Ctrl+C pressed - exiting..."
s.server_close()
def client():
cn = TCPMasterConnection('127.0.0.1', 10502)
try:
c = 0
while True:
#print the address space of the server
for req in [ReadCoilsRequest(address=1, count=3),
ReadDiscreteInputsRequest(address=0, count=16),
ReadHoldingRegistersRequest(address=17, count=2),
ReadInputRegistersRequest(address=0, count=8)]:
name = req.__class__.__name__[4:-7].lower()
tr = cn.createTransaction(req)
res = tr.execute()
print ' %s:' % name,
if isinstance(res, ExceptionResponse):
print 'exception, %d' % res.exception_code,
else:
if 'registers' in name:
print res.registers,
else:
print [[0, 1][i] for i in res.bits],
print
time.sleep(1)
#flash with a coil
req = WriteSingleCoilRequest(address=3, value=bool(c%2))
tr = cn.createTransaction(req)
res = tr.execute()
#display the number in a holding register
req = WriteSingleRegisterRequest(address=17, value=c)
tr = cn.createTransaction(req)
res = tr.execute()
c += 1
except KeyboardInterrupt:
print "Ctrl+C pressed - exiting..."
cn.close()
if __name__ == '__main__':
if len(sys.argv) < 2 or sys.argv[1] in ['--server', '-s']:
print 'Starting the ModbusTCP server...'
server()
elif sys.argv[1] in ['--client', '-c']:
print 'Starting the ModbusTCP client...'
client()
else:
print 'usage: %s [--server|-s|--client|-c]' % sys.argv[0]
# vim: set et ts=4 sw=4 tw=76 si:
# $Id: $
| gpl-2.0 | -3,508,030,124,394,234,000 | 33.115789 | 75 | 0.594878 | false | 3.821934 | false | false | false |
ngautam0/keras-pro-bar | keras_tqdm/tqdm_notebook_callback.py | 1 | 1601 | from tqdm import tqdm_notebook
from .tqdm_callback import TQDMCallback
import sys
class TQDMNotebookCallback(TQDMCallback):
def __init__(self,
outer_description="Training",
inner_description_initial="Epoch {epoch}",
inner_description_update="[{metrics}] ",
metric_format="{name}: {value:0.3f}",
separator=", ",
leave_inner=False,
leave_outer=True,
output_file=sys.stderr, **kwargs):
super(TQDMNotebookCallback, self).__init__(outer_description=outer_description,
inner_description_initial=inner_description_initial,
inner_description_update=inner_description_update,
metric_format=metric_format,
separator=separator,
leave_inner=leave_inner,
leave_outer=leave_outer,
output_file=output_file, **kwargs)
def tqdm(self, desc, total, leave):
"""
Extension point. Override to provide custom options to tqdm_notebook initializer.
:param desc: Description string
:param total: Total number of updates
:param leave: Leave progress bar when done
:return: new progress bar
"""
return tqdm_notebook(desc=desc, total=total, leave=leave)
| mit | -171,157,851,222,273,600 | 47.515152 | 103 | 0.501562 | false | 5.301325 | false | false | false |
kordless/zoto-server | aztk/servers/ZSPServer.py | 1 | 16722 | from twisted.python import usage
from ConfigParser import ConfigParser
from AZTKServer import AZTKServer
import Image, ImageFile, sys, errors, validation, pprint, md5, time, socket, zsp_packets
from twisted.internet.protocol import Factory
from twisted.internet.app import Application
from twisted.internet import defer, reactor
from twisted.protocols import basic
from twisted.protocols import policies
from cStringIO import StringIO
from display_sizes import display_sizes
import traceback
class ZSP(basic.LineReceiver, policies.TimeoutMixin):
"""
Connection Protocol
"""
TIMEOUT = 60 #seconds
def connectionMade(self):
"""
This is called once per instance when a client has connected to us.
It sets up the timeout for this connection, and defines the available commands
@return: Nothing
@rtype: Nothing
"""
# get current version information for Zulu client
self.major = int(self.factory.app.servers.zspserver._cfg_version_major)
self.minor = int(self.factory.app.servers.zspserver._cfg_version_minor)
self.build = int(self.factory.app.servers.zspserver._cfg_version_build)
self.tag = str(self.factory.app.servers.zspserver._cfg_version_tag)
self.storing = False
# get a local reference to the ZSP server's log instance
self.log = self.factory.log
self.bin = {
"buffer": StringIO(),
"bytes_in_buffer": 0,
"bytes_wanted": 3,
}
self.action_list = []
self.setTimeout(self.TIMEOUT)
self.header = zsp_packets.zsp_header()
remote = self.transport.getPeer()
self.log.debug("got a connection from %s:%s [%s]" % (remote.host, remote.port, remote.type))
self.setRawMode()
##remote_server = self.transport.getPeer()
## self.factory.app.servers.zspserver.log.info("Received a connection from %s. Authenticating..." % remote_server[1])
self.die = False # they are in good standing so far, if we determine not to talk to them (True) kill connection
self.sync_list = [] # a list of approved files (from FLAG requests)
self.file = {} # a dict with info for the file ready to be received or currently receiving (from FILE requests)
def connectionLost(self, reason="not sure"):
"""
This is called once per connection, when the socket has closed (for any reason)
@param reason: reason for disconnection
@type reason: String
@return: Nothing
@rtype: Nothing
"""
self.setTimeout(None)
## self.factory.app.servers.zspserver.log.info("Lost Connection: %s" % reason.getErrorMessage())
try:
self.log.debug("Lost Connection: %s" % reason.getErrorMessage())
except:
self.log.debug("Lost Connection: %s" % reason)
def timeoutConnection(self):
"""
Called when the client times out for some reason
@return: Nothing
@rtype: Nothing
"""
self.setTimeout(None)
self.factory.app.servers.zspserver.log.info("Client Timed out. Later, sucker.")
def send(self, string):
"""
Simple method for sending a string to the client.
It tacks on the line terminator for you.
@param string: the message to send to the client
@type string: string
@return: Nothing
@rtype: Nothing
"""
self.transport.write(str(string))
def rawDataReceived(self, data):
"""
This is used after the protocol mode has been switched from Line to Raw.
L{do_FILE()} switches the mode to raw. This mode does not trim
line terminators from the incoming data
Data is stacked up here, until the buffer reaches the size limit defined for
this file in the ITEM and FILE requests. Once the buffer hits its limit, this
method will call L{do_DONE()} and reset the protocol to line mode
@param data: raw data
@type: String
@return: Nothing
@rtype: Nothing
"""
self.resetTimeout()
if self.die:
self.transport.loseConnection("TEST")
return
if len(self.file.keys()) > 0:
# we have a file to receive...
if self.file["bytes_received"] >= self.file["filesize"]:
# we have the whole file, let the normal buffer get the next request
# should be a DONE packet...
pass
else:
if len(data) + self.file["bytes_received"] > self.file["filesize"]:
# we got too much data, roll the buffer back a bit
remainder = self.file["filesize"] - self.file["bytes_received"]
assert remainder > 0
partial_packet = data[0:remainder]
data = data[remainder:]
self.file["bytes_received"] += len(partial_packet)
self.file["buffer"].write(partial_packet)
else:
self.file["bytes_received"] += len(data)
self.file["buffer"].write(data)
return
self.bin["buffer"].write(data)
self.bin["bytes_in_buffer"] = len(self.bin["buffer"].getvalue())
if self.bin["bytes_in_buffer"] >= self.bin["bytes_wanted"]:
try:
self.header.parse(self.bin["buffer"].getvalue())
except Exception, ex:
# if for any reason this doesn't work the client is sending crap
# cut them off and close connection
self.transport.loseConnection('OUT OF SYNC')
return
self.bin["bytes_wanted"] = self.header.length
if self.bin["bytes_in_buffer"] >= self.header.length:
d = False
if self.header.type == zsp_packets.ZSP_AUTH:
d = self.do_AUTH()
elif self.header.type == zsp_packets.ZSP_VERSION:
d = self.do_VERSION()
elif self.header.type == zsp_packets.ZSP_FLAG:
d = self.do_FLAG()
elif self.header.type == zsp_packets.ZSP_FILE:
d = self.do_FILE()
elif self.header.type == zsp_packets.ZSP_DONE:
d = self.do_DONE()
else:
error = zsp_packets.zsp_error()
error.error_code = 500
error.error_string = "Unknown Packet Request"
self.send(error.build())
self.transport.loseConnection()
# send the response
if d: d.addCallback(self.send)
def do_HEARTBEAT(self):
"""
Sends a heartbeat packet to the client, maintaining the connection.
"""
if self.storing:
self.log.debug("sending a heartbeat packet to the client")
packet = zsp_packets.zsp_heartbeat()
self.send(packet.build())
self.factory.app.reactor.callLater(2, self.do_HEARTBEAT)
def do_AUTH(self):
"""
Checks to see if a user is authorized
@return: response object
@rtype: response object
"""
packet = zsp_packets.zsp_auth()
resp = zsp_packets.zsp_auth_resp()
error = zsp_packets.zsp_error()
# buffer now contains any leftover data
buffer = packet.parse(self.bin["buffer"].getvalue())
self.bin["buffer"] = StringIO()
self.bin["buffer"].write(buffer)
self.bin["bytes_wanted"] = 3
# really auth the user here...
def handle_user_id(result):
if result[0] != 0:
resp.return_code = zsp_packets.ZSP_AUTH_BAD
resp.response = "USERNAME OR PASSWORD INVALID"
self.die = True
self.userid = result[1]
d_user = self.factory.app.api.users.get_info(self.userid, self.userid)
d_user.addCallback(print_user)
return d_user
def print_user(result):
if result[0] != 0:
resp.return_code = zsp_packets.ZSP_AUTH_BAD
resp.response = "USERNAME OR PASSWORD INVALID"
self.die = True
if not result[1]:
resp.return_code = zsp_packets.ZSP_AUTH_BAD
resp.response = "USERNAME OR PASSWORD INVALID"
self.die = True
self.user_info = result[1]
if self.user_info['password'] == packet.pswd_hash:
self.log.debug("user authed as %s" % packet.user_name)
self.username = packet.user_name
resp.return_code = zsp_packets.ZSP_AUTH_OK
resp.response = "USER OK"
else:
self.log.debug("couldn't auth %s" % (packet.username))
resp.return_code = zsp_packets.ZSP_AUTH_BAD
resp.response = "USERNAME OR PASSWORD INVALID"
self.die = True
def bad_auth(reason):
self.log.debug("bad auth maybe a not found? %s" % reason)
error.error_code = 500
error.error_string = "INTERNAL SERVER ERROR"
self.die = True
return error.build()
db_d = self.factory.app.api.users.get_user_id(packet.user_name)
db_d.addCallback(handle_user_id)
db_d.addErrback(bad_auth)
db_d.addCallback(lambda _: resp.build())
return db_d
def do_VERSION(self):
"""
Checks the version of the client
@return: response
@rtype: (deferred) response object
"""
packet = zsp_packets.zsp_version()
resp = zsp_packets.zsp_version_resp()
# buffer now contains any leftover data
buffer = packet.parse(self.bin["buffer"].getvalue())
self.bin["buffer"] = StringIO()
self.bin["buffer"].write(buffer)
self.bin["bytes_wanted"] = 3
client_version = "%s.%s.%s" % (packet.vers_maj, packet.vers_min, packet.vers_build)
if self.factory.app.servers.zspserver.cfg.has_option("versions", client_version):
response = self.factory.app.servers.zspserver.cfg.getint("versions", client_version)
else:
response = 0
if response == 410:
# they're ok
resp.return_code = zsp_packets.ZSP_VERS_GOOD
resp.comment = "VERSION OK [%s]" % socket.gethostname()
elif response == 415:
# newer version is available, but still ok
resp.return_code = zsp_packets.ZSP_VERS_OLD
resp.comment = "%s.%s.%s %s" % (self.major, self.minor, self.build, self.tag)
elif response == 420:
# obsolete, forced update
resp.return_code = zsp_packets.ZSP_VERS_BAD
resp.comment = "%s.%s.%s %s" % (self.major, self.minor, self.build, self.tag)
self.die = True
elif response == 425:
# obsolete, show error dialog (no auto-update)
resp.return_code = zsp_packets.ZSP_VERS_FUBAR
resp.comment = "%s.%s.%s %s" % (self.major, self.minor, self.build, self.tag)
self.die = True
elif response == -1:
# drop connection
self.log.debug("unknown version")
self.transport.loseConnection("")
self.die = True
return None
else:
self.factory.app.log.warning("client version %s tried to connect, but isn't in the config file" % client_version)
self.log.warning("someone connected with version %s which isn't in the config file" % client_version)
# drop connection
self.transport.loseConnection("")
self.die = True
return None
d = defer.Deferred()
d.addCallback(lambda _: resp.build())
d.callback(0)
return d
def do_FLAG(self):
"""
foo
"""
packet = zsp_packets.zsp_flag()
resp = zsp_packets.zsp_flag_resp()
error = zsp_packets.zsp_error()
# buffer now contains any leftover data
buffer = packet.parse(self.bin["buffer"].getvalue())
self.bin["buffer"] = StringIO()
self.bin["buffer"].write(buffer)
self.bin["bytes_wanted"] = 3
try:
checksum = packet.image_id
filesize = packet.image_size
filetype = packet.image_format
filename = packet.image_name
filedate = packet.image_date
except:
# we couldn't get the right info from the packet...
error.error_code = 314
error.error_string = "Malformed FLAG request"
if error.error_code:
d = defer.Deferred()
d.addCallback(lambda _: error.build())
d.callback(0)
return d
else:
resp.image_id = checksum
self.sync_list.append(checksum)
d_flags = self.factory.app.api.images.image_exists(checksum)
def check_exists(exists):
if exists:
self.log.debug("Image [%s] already uploaded" % checksum)
resp.image_needed = 0
self.storing = True
self.do_HEARTBEAT()
d_set = self.factory.app.api.images.set_user_image(self.userid, checksum, filename, 'ZULU Client', '', '')
d_set.addCallback(check_set_success)
return d_set
else:
resp.image_needed = 1
def check_set_success(result):
self.storing = False
if result[0] != 0:
raise Exception(result[1])
d_flags.addCallback(check_exists)
d_flags.addCallback(lambda _: resp.build())
return d_flags
def do_FILE(self):
"""
foo
"""
packet = zsp_packets.zsp_file()
resp = zsp_packets.zsp_file_resp()
# buffer now contains any leftover data
buffer = packet.parse(self.bin["buffer"].getvalue())
self.bin["buffer"] = StringIO()
self.bin["buffer"].write(buffer)
self.bin["bytes_wanted"] = 3
error = zsp_packets.zsp_error()
try:
image_id = packet.image_id
filetype = packet.image_format
filesize = packet.image_size
filedate = packet.image_date
filename = packet.image_name
except:
# we couldn't get the right info from the packet...
error.error_code = zsp_packets.ZSP_FILE_BAD
error.error_string = "Malformed FILE request"
if image_id not in self.sync_list:
error.error_code = zsp_packets.ZSP_FILE_NO_FLAG
error.error_string = "image %s was not approved via FLAG request" % image_id
# setup a struct for the incoming file
self.file = {
"image_id": image_id,
"buffer": StringIO(),
"filename": filename,
"filedate": filedate,
"filesize": filesize,
"bytes_received": 0,
}
#pprint.pprint(self.file)
d = defer.Deferred()
if error.error_code:
d.addCallback(lambda _: error.build())
else:
resp.return_code = zsp_packets.ZSP_FILE_OK
resp.image_id = image_id
resp.return_string = "FILE OK"
d.addCallback(lambda _: resp.build())
d.callback(0)
return d
def do_DONE(self):
"""
DONT FORGET TO CLEAR self.sync_list and self.files!
@return: Unknown
@rtype: Unknown
"""
packet = zsp_packets.zsp_done()
resp = zsp_packets.zsp_done_resp()
# buffer now contains any leftover data
buffer = packet.parse(self.bin["buffer"].getvalue())
binary = self.file['buffer']
binary.seek(0)
self.bin["buffer"] = StringIO()
self.bin["buffer"].write(buffer)
self.bin["bytes_wanted"] = 3
error = zsp_packets.zsp_error()
checksum = md5.md5(binary.getvalue()).hexdigest()
if checksum != self.file["image_id"]:
error.error_code = zsp_packets.ZSP_DONE_BAD_SUM
error.error_string = "CHECKSUM MISMATCH"
try:
parser = ImageFile.Parser()
parser.feed(binary.getvalue())
img = parser.close()
except IOError, e:
self.factory.app.servers.zspserver.log.critical("Image couldn't be parsed %s" % e)
error.error_code = zsp_packets.ZSP_DONE_BAD_SYNC
error.error_string = "FILE OUT OF SYNC"
try:
img.verify()
except:
self.factory.app.servers.zspserver.log.critical("Decoded Image is corrupted")
error.error_code = zsp_packets.ZSP_DONE_BAD_SYNC2
error.error_string = "FILE OUT OF SYNC"
try:
image = Image.open(binary)
except (TypeError, IOError, ValueError):
self.log.warning("Image can't be loaded: %s [%d bytes]" % (checksum, len(binary.getvalue())))
error.error_code = zsp_packets.ZSP_DONE_CORRUPT
error.error_string = "UNSUPPORTED/CORRUPT FILE"
try:
exif = validation.exifdata(image._getexif())
except:
exif = {}
if not exif:
exif = {}
if exif:
has_exif = 1
else:
has_exif = 0
if error.error_code:
d = defer.Deferred()
d.callback(error.build())
return d
def worked(result):
if result[0] == 0:
self.log.debug("Successfully stored image")
resp.return_code = zsp_packets.ZSP_DONE_OK
resp.return_string = "FILE SYNCHRONIZED OK"
self.sync_list.remove(self.file["image_id"])
self.file = {}
self.storing = False
return resp.build()
else:
raise Exception(result[1])
def failed(failure):
stack = failure.value
self.log.warning("file not inserted: %s %s\n%s" % (stack.exception, stack.message, stack.trace()))
resp.return_code = zsp_packets.ZSP_DONE_BAD_WRITE
#resp.return_string = "FILE ALREADY EXISTS ON SERVER"
resp.return_string = failure.getErrorMessage()
# dont remove from the sync list incase they want to retry
self.file = {}
return error.build()
self.storing = True
self.do_HEARTBEAT()
d_insert = self.factory.app.api.images.add(
self.userid, self.file["filename"], binary.getvalue(), \
'ZULU Client', '', '')
d_insert.addCallback(worked)
d_insert.addErrback(failed)
return d_insert
def do_QUIT(self):
"""
The client calls this command when they are exiting. As a courtesy,
tell the client goodbye.
@return: Nothing
@rtype: Nothing
"""
d.callback("190 GOODBYE")
self.transport.loseConnection()
class ZSPFactory(Factory):
protocol = ZSP
def __init__(self, application, log):
"""
Start the factory with out given protocol
@param application: our reference to the running app (so we can use the DB and API)
@type application: L{Application}
@param log: a reference to the ZSP server's log instance
@type log: L{Logger} instance (see lib/aztk_main.py)
@return: Nothing
@rtype: Nothing
"""
self.app = application
self.log = log
class ZSPServer(AZTKServer):
enable_broker = False
enable_node = True
def start(self):
"""
Starts L{ZSPFactory}, reactor and configuration
"""
self.cfg = ConfigParser()
#TODO: NO HARDCODED PATHS!
self.cfg.read("/zoto/aztk/etc/uploader_versions.cfg")
factory = ZSPFactory(self.app, self.log)
ip = self.app.cfg_setup.get("interfaces", self.app.host)
self.app.reactor.listenTCP(int(self._cfg_port), factory, interface=ip)
| bsd-3-clause | -3,120,618,513,269,569,000 | 29.184116 | 119 | 0.679345 | false | 3.08524 | true | false | false |
Keripo/Beats | scripts/images/holds.py | 2 | 2285 | import Image, glob, ImageChops, ImageMath, ImageEnhance
files = glob.glob("arrow_*_4.png")
def incrange(start, stop, step):
if step > 0:
return range(start, stop+1, step)
else:
return range(start, stop-1, step)
def channelMap(f, image):
return Image.merge(image.mode, [f(chan) for chan in image.split()])
#pun not intended
class NotBrokenRGBA:
""" grrr PIL. """
def __init__(self, size):
self.white = Image.new("RGB", size, (255,255,255))
self.black = Image.new("RGB", size, (0,0,0))
def paste(self, *args):
self.white.paste(*args)
self.black.paste(*args)
def pasteRGBA(self, pasta, location):
self.paste(pasta, location, pasta)
def create(self):
mask = ImageChops.subtract(self.white, self.black, -1, 255)
maskgray = mask.convert("L")
#out = self.black.convert("RGBA") # this doesn't premultiply alpha correctly?
#this better?
def divide(ch):
env = {"val": ch, "mask": maskgray}
return ImageMath.eval("val*255/mask",env).convert("L")
out = channelMap(divide, self.black).convert("RGBA")
out.putalpha(maskgray)
return out
def save(self, *args):
self.create().save(*args)
for filename in files:
filesuffix = filename.replace("arrow","",1)
im = Image.open(filename)
im_alpha = (im.copy().split())[-1]
im_bright = ImageEnhance.Brightness(im).enhance(2)
im_dark = ImageEnhance.Brightness(im).enhance(.5)
im_weak = ImageEnhance.Color(im_dark).enhance(.2)
im_bright.putalpha(im_alpha)
im_weak.putalpha(im_alpha)
im_dark.putalpha(im_alpha)
im_middle = im_weak # depends on active/inactive?
(imw, imh) = im.size
skippix = 4 # should be factor of imh (typically 64)
holdtypes = {"_active": im_bright, "_inactive": im_dark, "_dead": im_weak}
for im_middle_name, im_middle in holdtypes.iteritems():
# stepmania has bottomcap, but we're scrolling the other direction
htopcap = NotBrokenRGBA((imw, imh)) #Image.new("RGBA", (imw, imh))
hbody = NotBrokenRGBA((imw, 2*imh)) # Image.new("RGB", (imw, 2*imh))
for y in incrange(imh*2, 0, -skippix):
htopcap.pasteRGBA(im if y==0 else im_middle, (0, y))
for y in incrange(imh*4, -imh*4, -skippix):
hbody.pasteRGBA(im_middle, (0, y))
hbody.save("hbody" + im_middle_name + filesuffix, "PNG")
htopcap.save("htopcap" + im_middle_name + filesuffix, "PNG")
| bsd-3-clause | -5,256,766,923,649,650,000 | 29.878378 | 79 | 0.674398 | false | 2.66007 | false | false | false |
wummel/linkchecker | linkcheck/plugins/locationinfo.py | 9 | 3883 | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Store and retrieve country names for IPs.
"""
from . import _ConnectionPlugin
import os
import sys
import socket
from ..lock import get_lock
from ..decorators import synchronized
from ..strformat import unicode_safe
from .. import log, LOG_PLUGIN
class LocationInfo(_ConnectionPlugin):
"""Adds the country and if possible city name of the URL host as info.
Needs GeoIP or pygeoip and a local country or city lookup DB installed."""
def __init__(self, config):
"""Check for geoip module."""
if not geoip:
log.warn(LOG_PLUGIN, "GeoIP or pygeoip not found for LocationInfo plugin.")
super(LocationInfo, self).__init__(config)
def applies_to(self, url_data):
"""Check for validity, host existence and geoip module."""
return url_data.valid and url_data.host and geoip
def check(self, url_data):
"""Try to ask GeoIP database for country info."""
location = get_location(url_data.host)
if location:
url_data.add_info(_("URL is located in %(location)s.") %
{"location": _(location)})
# It is unknown if the geoip library is already thread-safe, so
# no risks should be taken here by using a lock.
_lock = get_lock("geoip")
def get_geoip_dat ():
"""Find a GeoIP database, preferring city over country lookup."""
datafiles = ("GeoIPCity.dat", "GeoIP.dat")
if os.name == 'nt':
paths = (sys.exec_prefix, r"c:\geoip")
else:
paths = ("/usr/local/share/GeoIP", "/usr/share/GeoIP")
for path in paths:
for datafile in datafiles:
filename = os.path.join(path, datafile)
if os.path.isfile(filename):
return filename
# try importing both the C-library GeoIP and the pure-python pygeoip
geoip_dat = get_geoip_dat()
geoip = None
if geoip_dat:
try:
import GeoIP
geoip = GeoIP.open(geoip_dat, GeoIP.GEOIP_STANDARD)
geoip_error = GeoIP.error
except ImportError:
try:
import pygeoip
geoip = pygeoip.GeoIP(geoip_dat)
geoip_error = pygeoip.GeoIPError
except ImportError:
pass
if geoip_dat.endswith('GeoIPCity.dat'):
get_geoip_record = lambda host: geoip.record_by_name(host)
else:
get_geoip_record = lambda host: {'country_name': geoip.country_name_by_name(host)}
@synchronized(_lock)
def get_location (host):
"""Get translated country and optional city name.
@return: country with optional city or an boolean False if not found
"""
if geoip is None:
# no geoip available
return None
try:
record = get_geoip_record(host)
except (geoip_error, socket.error):
log.debug(LOG_PLUGIN, "Geoip error for %r", host, exception=True)
# ignore lookup errors
return None
value = u""
if record and record.get("city"):
value += unicode_safe(record["city"])
if record and record.get("country_name"):
if value:
value += u", "
value += unicode_safe(record["country_name"])
return value
| gpl-2.0 | 1,782,142,971,426,514,000 | 34.3 | 90 | 0.654906 | false | 3.938134 | false | false | false |
joaonvfigueiredo/killbill | troika/killbill.py | 2 | 1869 | from xlrd import open_workbook
from person import Person
from bill import Bill
from datetime import date, timedelta
billWorkbook = open_workbook('../bills.xls')
def readPeople(sheet, columns, rows):
people = []
for column in range(columns):
values = []
for row in range(rows):
values.append(sheet.cell(row,column).value)
if values[0] == 'Name':
print 'Ignoring column...'
continue
people += [Person(values[0])]
return people
def readBills(sheet, columns, rows):
bills = []
for column in range(columns):
values = []
for row in range(rows):
values.append(sheet.cell(row,column).value)
if values[0] == 'Name':
print 'Ignoring column...'
continue
bills += [Bill(values[0],values[1],values[2],values[3],values[4],values[5])]
return bills
def calculateMonthlyExpenses(bills, people):
monthBillStarts = [ bill.start.month for bill in bills]
monthBillEnds = [ bill.end.month for bill in bills]
monthsWithBills = [ (str(month),MonthlyExpense(month)) for month in monthBillStarts]
if monthBillEnds[-1] not in monthBillStarts:
monthsWithBills += [monthBillEnds[-1]]
monthlyExpenses = dict(monthsWithBills)
for bill in bills:
monthlyExpenses[str(bill.start.month)] = MonthlyExpense(bill.start)
def calculateDebts(bills, people):
for sheet in billWorkbook.sheets():
sheetColumns = sheet.ncols
sheetRows = sheet.nrows
print 'Sheet: ',sheet.name
print ' with ' + str(sheetRows) + ' rows and ' + str(sheetColumns) + ' columns'
if sheet.name == 'Bills':
the_bills = readBills(sheet, sheetColumns, sheetRows)
elif sheet.name == 'People':
the_people = readPeople()
debts = calculateDebts(the_bills, the_people)
print debts
| mit | 3,542,677,353,861,659,600 | 30.677966 | 90 | 0.639914 | false | 3.448339 | false | false | false |
daonb/Open-Knesset | mmm/migrations/0001_initial.py | 14 | 17515 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Document'
db.create_table('mmm_document', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('url', self.gf('django.db.models.fields.URLField')(unique=True, max_length=200)),
('title', self.gf('django.db.models.fields.CharField')(max_length=2000)),
('publication_date', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('author_names', self.gf('django.db.models.fields.CharField')(max_length=500, blank=True)),
))
db.send_create_signal('mmm', ['Document'])
# Adding M2M table for field req_committee on 'Document'
db.create_table('mmm_document_req_committee', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('document', models.ForeignKey(orm['mmm.document'], null=False)),
('committee', models.ForeignKey(orm['committees.committee'], null=False))
))
db.create_unique('mmm_document_req_committee', ['document_id', 'committee_id'])
# Adding M2M table for field req_mks on 'Document'
db.create_table('mmm_document_req_mks', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('document', models.ForeignKey(orm['mmm.document'], null=False)),
('member', models.ForeignKey(orm['mks.member'], null=False))
))
db.create_unique('mmm_document_req_mks', ['document_id', 'member_id'])
def backwards(self, orm):
# Deleting model 'Document'
db.delete_table('mmm_document')
# Removing M2M table for field req_committee on 'Document'
db.delete_table('mmm_document_req_committee')
# Removing M2M table for field req_mks on 'Document'
db.delete_table('mmm_document_req_mks')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'committees.committee': {
'Meta': {'object_name': 'Committee'},
'aliases': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'chairpersons': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'chaired_committees'", 'blank': 'True', 'to': "orm['mks.Member']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'committees'", 'blank': 'True', 'to': "orm['mks.Member']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'portal_knesset_broadcasts_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'blank': 'True'}),
'replacements': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'replacing_in_committees'", 'blank': 'True', 'to': "orm['mks.Member']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'events.event': {
'Meta': {'object_name': 'Event'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'what': ('django.db.models.fields.TextField', [], {}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'when_over': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'when_over_guessed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'where': ('django.db.models.fields.TextField', [], {}),
'which_pk': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'which_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event_for_event'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'who': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['persons.Person']", 'symmetrical': 'False'})
},
'mks.member': {
'Meta': {'ordering': "['name']", 'object_name': 'Member'},
'area_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'average_monthly_committee_presence': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'average_weekly_presence_hours': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'backlinks_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'bills_stats_approved': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bills_stats_first': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bills_stats_pre': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bills_stats_proposed': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'blog': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['planet.Blog']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'current_party': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': "orm['mks.Party']"}),
'current_role_descriptions': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parties': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'all_members'", 'symmetrical': 'False', 'through': "orm['mks.Membership']", 'to': "orm['mks.Party']"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lat': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lon': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'residence_centrality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'residence_economy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'mks.membership': {
'Meta': {'object_name': 'Membership'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}),
'party': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Party']"}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'mks.party': {
'Meta': {'ordering': "('-number_of_seats',)", 'object_name': 'Party'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_members': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'number_of_seats': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'mmm.document': {
'Meta': {'object_name': 'Document'},
'author_names': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'publication_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'req_committee': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'mmm_documents'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['committees.Committee']"}),
'req_mks': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'mmm_documents'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2000'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'})
},
'persons.person': {
'Meta': {'ordering': "('name',)", 'object_name': 'Person'},
'area_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'mk': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'person'", 'null': 'True', 'to': "orm['mks.Member']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lat': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lon': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'residence_centrality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'residence_economy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'titles': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'persons'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['persons.Title']"}),
'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'persons.title': {
'Meta': {'object_name': 'Title'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'planet.blog': {
'Meta': {'ordering': "('title', 'url')", 'object_name': 'Blog'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '1024', 'db_index': 'True'})
}
}
complete_apps = ['mmm'] | bsd-3-clause | -4,221,528,581,501,139,000 | 81.234742 | 216 | 0.552726 | false | 3.544829 | false | false | false |
krukas/Mage2Gen | mage2gen/snippets/model.py | 1 | 69809 | # A Magento 2 module generator library
# Copyright (C) 2016 Maikel Martens
#
# This file is part of Mage2Gen.
#
# Mage2Gen is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os, locale
from collections import OrderedDict
from .. import Module, Phpclass, Phpmethod, Xmlnode, StaticFile, Snippet, SnippetParam, Readme
from ..utils import upperfirst, lowerfirst
from ..module import TEMPLATE_DIR
# Long boring code to add a lot of PHP classes and xml, only go here if you feel like too bring you happiness down.
# Or make your day happy that you don't maintain this code :)
class InterfaceClass(Phpclass):
template_file = os.path.join(TEMPLATE_DIR,'interface.tmpl')
class InterfaceMethod(Phpmethod):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.template_file = os.path.join(TEMPLATE_DIR,'interfacemethod.tmpl')
class ModelSnippet(Snippet):
description = """
Model is used to create a easie CRUD interface to the database
- **Model ame:** The name of the model, the table name wil be <module_name>_<model_name>.
- **Field name:** The name of the database table field.
- **Field type:** The type of database field.
- **Adminhtml grid:** Add this field to the adminhtml grid layout
**Model ID field**: The snippet will auto add the model id field to the database table, the field name is <model_name>_id.
"""
FIELD_TYPE_CHOISES = [
('boolean','Boolean'),
('smallint','Smallint'),
('integer','Integer'),
('bigint', 'Bigint'),
('float', 'Float'),
('numeric', 'Numeric'),
('decimal', 'Decimal'),
('date', 'Date'),
('timestamp', 'Timestamp'),
('datetime', 'Datetime'),
('text', 'Text'),
('blob', 'Blob'),
('varchar','Varchar')
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.count = 0
def add(self, model_name, field_name, field_type='text', adminhtml_grid=False, adminhtml_form=False,web_api=False, extra_params=False):
self.count += 1
extra_params = extra_params if extra_params else {}
model_table = '{}_{}_{}'.format(self._module.package.lower(), self._module.name.lower(), model_name.lower())
model_id = '{}_id'.format(model_name.lower())
field_element_type = 'input'
split_field_name = field_name.split('_')
field_name_capitalized = ''.join(upperfirst(item) for item in split_field_name)
split_model_name = model_name.split('_')
model_name_capitalized = ''.join(upperfirst(item) for item in split_model_name)
model_name_capitalized_after = model_name_capitalized[0].lower() + model_name_capitalized[1:]
split_model_id = model_id.split('_')
model_id_capitalized = ''.join(upperfirst(item) for item in split_model_id)
model_id_capitalized_after = model_id_capitalized[0].lower() + model_id_capitalized[1:]
collection_model_class_name = "\\{}\\{}\\Model\\ResourceModel\\{}\\Collection".format(self._module.package,
self._module.name,
model_name_capitalized.replace('_', '\\')
)
extension_interface_class_name = "\\{}\\{}\\Api\\Data\\{}ExtensionInterface".format(self._module.package,
self._module.name,
model_name_capitalized.replace('_', '\\')
)
if field_type == 'boolean':
field_element_type = 'checkbox'
elif field_type == 'date' or field_type == 'timestamp':
field_element_type = 'date'
elif field_type == 'text':
field_element_type = 'textarea'
top_level_menu = extra_params.get('top_level_menu', True)
column_nodes = []
# add model id field
column_nodes.append(Xmlnode('column', attributes={
'xsi:type': "{}".format('smallint'),
'name': "{}".format(model_id),
'padding': "{}".format('6'),
'unsigned': "{}".format('true'),
'nullable': "{}".format('false'),
'identity': "{}".format('true'),
'comment': "{}".format('Entity Id')
}))
# create options
required = False
attributes = {
'name': "{}".format(field_name),
'nullable': "true",
'xsi:type': field_type
}
if field_type == 'integer' or field_type == 'bigint':
attributes['xsi:type'] = "int"
elif field_type == 'numeric':
attributes['xsi:type'] = "real"
if extra_params.get('default'):
attributes['default'] = "{}".format(extra_params.get('default'))
if not extra_params.get('nullable'):
attributes['nullable'] = 'false'
required = not attributes['nullable']
if field_type in {'mallint','integer','bigint'}:
attributes['identity'] = 'false'
if extra_params.get('identity'):
attributes['identity'] = 'true'
if extra_params.get('unsigned'):
attributes['unsigned'] = 'true'
if extra_params.get('precision'):
attributes['precision'] = extra_params.get('precision')
if extra_params.get('scale'):
attributes['scale'] = extra_params.get('scale')
if extra_params.get('field_size'):
attributes['length'] = '{}'.format(extra_params.get('field_size'))
elif field_type == 'decimal':
attributes['scale'] = '4'
attributes['precision'] = '12'
elif field_type == 'varchar' and not extra_params.get('field_size'):
attributes['length'] = '255'
# Create db_schema.xml declaration
self.add_xml('etc/db_schema.xml', Xmlnode('schema', attributes={
'xsi:noNamespaceSchemaLocation': "urn:magento:framework:Setup/Declaration/Schema/etc/schema.xsd"}, nodes=[
Xmlnode('table', attributes={
'name': "{}".format(model_table),
'resource': "default",
'engine': "innodb",
'comment': "{} Table".format(model_table)
}, nodes=[
Xmlnode('column', attributes={
'xsi:type': "{}".format('smallint'),
'name': "{}".format(model_id),
'padding': "{}".format('6'),
'unsigned': "{}".format('true'),
'nullable': "{}".format('false'),
'identity': "{}".format('true'),
'comment': "{}".format('Entity Id')
}),
Xmlnode('constraint', attributes={
'xsi:type': "primary",
'referenceId': "PRIMARY".format(model_id)
}, nodes=[
Xmlnode('column', attributes={
'name': "{}".format(model_id)
})
]),
Xmlnode('column', attributes=attributes)
])
]))
# Create resource class
resource_model_class = Phpclass('Model\\ResourceModel\\' + model_name_capitalized.replace('_', '\\'), extends='\\Magento\\Framework\\Model\\ResourceModel\\Db\\AbstractDb')
resource_model_class.add_method(Phpmethod('_construct',
access=Phpmethod.PROTECTED,
body="$this->_init('{}', '{}');".format(model_table, model_id),
docstring=[
'Define resource model',
'',
'@return void',
]))
self.add_class(resource_model_class)
# Create api data interface class
api_data_class = InterfaceClass('Api\\Data\\' + model_name_capitalized.replace('_', '\\') + 'Interface',
extends='\\Magento\\Framework\\Api\\ExtensibleDataInterface',
attributes=[
"const {} = '{}';".format(field_name.upper(),field_name),"const {} = '{}';".format(model_id.upper(),model_id)
])
api_data_class.add_method(InterfaceMethod('get'+model_id_capitalized,docstring=['Get {}'.format(model_id),'@return {}'.format('string|null')]))
self.add_class(api_data_class)
api_data_class.add_method(InterfaceMethod('set'+model_id_capitalized,params=['${}'.format(model_id_capitalized_after)],docstring=['Set {}'.format(model_id),'@param string ${}'.format(model_id_capitalized_after),'@return \{}'.format(api_data_class.class_namespace)]))
self.add_class(api_data_class)
api_data_class.add_method(InterfaceMethod('get'+field_name_capitalized,docstring=['Get {}'.format(field_name),'@return {}'.format('string|null')]))
self.add_class(api_data_class)
api_data_class.add_method(InterfaceMethod('set'+field_name_capitalized,params=['${}'.format(lowerfirst(field_name_capitalized))],docstring=['Set {}'.format(field_name),'@param string ${}'.format(lowerfirst(field_name_capitalized)),'@return \{}'.format(api_data_class.class_namespace)]))
self.add_class(api_data_class)
api_data_class.add_method(InterfaceMethod('getExtensionAttributes', docstring=['Retrieve existing extension attributes object or create a new one.','@return ' + extension_interface_class_name + '|null']))
api_data_class.add_method(InterfaceMethod('setExtensionAttributes', params=[extension_interface_class_name + ' $extensionAttributes'], docstring=['Set an extension attributes object.','@param ' + extension_interface_class_name +' $extensionAttributes','@return $this']))
self.add_class(api_data_class)
# Create api data interface class
api_data_search_class = InterfaceClass('Api\\Data\\' + model_name_capitalized.replace('_', '\\') + 'SearchResultsInterface',extends='\Magento\Framework\Api\SearchResultsInterface')
api_data_search_class.add_method(InterfaceMethod('getItems',docstring=['Get {} list.'.format(model_name),'@return \{}[]'.format(api_data_class.class_namespace)]))
api_data_search_class.add_method(InterfaceMethod('setItems',params=['array $items'],docstring=['Set {} list.'.format(field_name),'@param \{}[] $items'.format(api_data_class.class_namespace),'@return $this']))
self.add_class(api_data_search_class)
# Create api data interface class
api_repository_class = InterfaceClass('Api\\' + model_name_capitalized.replace('_', '\\') + 'RepositoryInterface',dependencies=['Magento\Framework\Api\SearchCriteriaInterface'])
api_repository_class.add_method(InterfaceMethod('save',params=['\{} ${}'.format(api_data_class.class_namespace,model_name_capitalized_after)],docstring=['Save {}'.format(model_name),'@param \{} ${}'.format(api_data_class.class_namespace,model_name_capitalized_after),'@return \{}'.format(api_data_class.class_namespace),'@throws \Magento\Framework\Exception\LocalizedException']))
api_repository_class.add_method(InterfaceMethod('get',params=['${}'.format(model_id_capitalized_after)],docstring=['Retrieve {}'.format(model_name),'@param string ${}'.format(model_id_capitalized_after),'@return \{}'.format(api_data_class.class_namespace),'@throws \Magento\Framework\Exception\LocalizedException']))
api_repository_class.add_method(InterfaceMethod('getList',params= ['\Magento\Framework\Api\SearchCriteriaInterface $searchCriteria'], docstring=['Retrieve {} matching the specified criteria.'.format(model_name),'@param \Magento\Framework\Api\SearchCriteriaInterface $searchCriteria','@return \{}'.format(api_data_search_class.class_namespace),'@throws \Magento\Framework\Exception\LocalizedException']))
api_repository_class.add_method(InterfaceMethod('delete',params=['\{} ${}'.format(api_data_class.class_namespace,model_name_capitalized_after)],docstring=['Delete {}'.format(model_name),'@param \{} ${}'.format(api_data_class.class_namespace,model_name_capitalized_after),'@return bool true on success','@throws \Magento\Framework\Exception\LocalizedException']))
api_repository_class.add_method(InterfaceMethod('deleteById',params=['${}'.format(model_id_capitalized_after)],docstring=['Delete {} by ID'.format(model_name),'@param string ${}'.format(model_id_capitalized_after),'@return bool true on success','@throws \\Magento\\Framework\\Exception\\NoSuchEntityException','@throws \\Magento\\Framework\\Exception\\LocalizedException']))
self.add_class(api_repository_class)
# Create model class
model_class = Phpclass('Model\\' + model_name_capitalized.replace('_', '\\'),
dependencies=[
api_data_class.class_namespace,
api_data_class.class_namespace + 'Factory',
'Magento\\Framework\\Api\\DataObjectHelper',
],
extends='\\Magento\\Framework\\Model\\AbstractModel',
attributes=[
'protected ${}DataFactory;\n'.format(model_name.lower()),
'protected $dataObjectHelper;\n',
'protected $_eventPrefix = \'{}\';'.format(model_table)
])
model_class.add_method(Phpmethod('__construct', access=Phpmethod.PUBLIC,
params=[
"\Magento\Framework\Model\Context $context",
"\Magento\Framework\Registry $registry",
"{}InterfaceFactory ${}DataFactory".format(model_name_capitalized, model_name.lower()),
"DataObjectHelper $dataObjectHelper",
"\\" + resource_model_class.class_namespace + " $resource",
collection_model_class_name + " $resourceCollection",
"array $data = []",
],
body="""$this->{variable}DataFactory = ${variable}DataFactory;
$this->dataObjectHelper = $dataObjectHelper;
parent::__construct($context, $registry, $resource, $resourceCollection, $data);
""".format(variable=model_name.lower()),
docstring=[
"@param \Magento\Framework\Model\Context $context",
"@param \Magento\Framework\Registry $registry",
"@param {}InterfaceFactory ${}DataFactory".format(model_name_capitalized, model_name.lower()),
"@param DataObjectHelper $dataObjectHelper",
"@param \\" + resource_model_class.class_namespace + " $resource",
"@param " + collection_model_class_name + " $resourceCollection",
"@param array $data",
]
))
model_class.add_method(Phpmethod('getDataModel', access=Phpmethod.PUBLIC,
body="""${variable}Data = $this->getData();
${variable}DataObject = $this->{variable}DataFactory->create();
$this->dataObjectHelper->populateWithArray(
${variable}DataObject,
${variable}Data,
{variable_upper}Interface::class
);
return ${variable}DataObject;
""".format(variable=model_name.lower(), variable_upper=model_name_capitalized),
docstring=[
"Retrieve {} model with {} data".format(model_name.lower(), model_name.lower()),
"@return {}Interface".format(model_name_capitalized),
]
))
self.add_class(model_class)
# Create collection
collection_model_class = Phpclass('Model\\ResourceModel\\' + model_name_capitalized.replace('_', '\\') + '\\Collection',
extends='\\Magento\\Framework\\Model\\ResourceModel\\Db\\Collection\\AbstractCollection',
attributes=[
"/**\n\t * @var string\n\t */\n\tprotected $_idFieldName = '{}';".format(model_id),
])
collection_model_class.add_method(Phpmethod('_construct',
access=Phpmethod.PROTECTED,
body="$this->_init(\n \{}::class,\n \{}::class\n);".format(
model_class.class_namespace ,resource_model_class.class_namespace),
docstring=[
'Define resource model',
'',
'@return void',
]))
self.add_class(collection_model_class)
# Create Repository Class
model_repository_class = Phpclass('Model\\' + model_name_capitalized.replace('_', '\\') + 'Repository',
dependencies=[
api_repository_class.class_namespace,
api_data_search_class.class_namespace + 'Factory',
api_data_class.class_namespace + 'Factory',
'Magento\\Framework\\Api\\DataObjectHelper',
'Magento\\Framework\\Exception\\CouldNotDeleteException',
'Magento\\Framework\\Exception\\NoSuchEntityException',
'Magento\\Framework\\Exception\\CouldNotSaveException',
'Magento\\Framework\\Reflection\\DataObjectProcessor',
'Magento\\Framework\\Api\\SearchCriteria\\CollectionProcessorInterface',
resource_model_class.class_namespace + ' as Resource' + model_name_capitalized,
collection_model_class.class_namespace + 'Factory as '+ model_name_capitalized +'CollectionFactory',
'Magento\\Store\\Model\\StoreManagerInterface',
'Magento\\Framework\\Api\\ExtensionAttribute\\JoinProcessorInterface',
'Magento\\Framework\\Api\\ExtensibleDataObjectConverter'
],
attributes=[
'protected $resource;\n',
'protected ${}Factory;\n'.format(model_name_capitalized_after),
'protected ${}CollectionFactory;\n'.format(model_name_capitalized_after),
'protected $searchResultsFactory;\n',
'protected $dataObjectHelper;\n',
'protected $dataObjectProcessor;\n',
'protected $data{}Factory;\n'.format(model_name_capitalized),
'protected $extensionAttributesJoinProcessor;\n',
'private $storeManager;\n',
'private $collectionProcessor;\n',
'protected $extensibleDataObjectConverter;'
],
implements=[model_name_capitalized.replace('_', '\\') + 'RepositoryInterface']
)
model_repository_class.add_method(Phpmethod('__construct', access=Phpmethod.PUBLIC,
params=[
"Resource{} $resource".format(model_name_capitalized),
"{}Factory ${}Factory".format(model_name_capitalized,model_name_capitalized_after),
"{}InterfaceFactory $data{}Factory".format(model_name_capitalized,model_name_capitalized),
"{}CollectionFactory ${}CollectionFactory".format(model_name_capitalized,model_name_capitalized_after),
"{}SearchResultsInterfaceFactory $searchResultsFactory".format(model_name_capitalized),
"DataObjectHelper $dataObjectHelper",
"DataObjectProcessor $dataObjectProcessor",
"StoreManagerInterface $storeManager",
"CollectionProcessorInterface $collectionProcessor",
"JoinProcessorInterface $extensionAttributesJoinProcessor",
"ExtensibleDataObjectConverter $extensibleDataObjectConverter"
],
body="""$this->resource = $resource;
$this->{variable}Factory = ${variable}Factory;
$this->{variable}CollectionFactory = ${variable}CollectionFactory;
$this->searchResultsFactory = $searchResultsFactory;
$this->dataObjectHelper = $dataObjectHelper;
$this->data{variable_upper}Factory = $data{variable_upper}Factory;
$this->dataObjectProcessor = $dataObjectProcessor;
$this->storeManager = $storeManager;
$this->collectionProcessor = $collectionProcessor;
$this->extensionAttributesJoinProcessor = $extensionAttributesJoinProcessor;
$this->extensibleDataObjectConverter = $extensibleDataObjectConverter;
""".format(variable=model_name_capitalized_after,variable_upper=model_name_capitalized),
docstring=[
"@param Resource{} $resource".format(model_name_capitalized),
"@param {}Factory ${}Factory".format(model_name_capitalized,model_name_capitalized_after),
"@param {}InterfaceFactory $data{}Factory".format(model_name_capitalized,model_name_capitalized),
"@param {}CollectionFactory ${}CollectionFactory".format(model_name_capitalized,model_name_capitalized_after),
"@param {}SearchResultsInterfaceFactory $searchResultsFactory".format(model_name_capitalized),
"@param DataObjectHelper $dataObjectHelper",
"@param DataObjectProcessor $dataObjectProcessor",
"@param StoreManagerInterface $storeManager",
"@param CollectionProcessorInterface $collectionProcessor",
"@param JoinProcessorInterface $extensionAttributesJoinProcessor",
"@param ExtensibleDataObjectConverter $extensibleDataObjectConverter",
]
))
model_repository_class.add_method(Phpmethod('save', access=Phpmethod.PUBLIC,
params=['\\' + api_data_class.class_namespace + ' $' + model_name_capitalized_after],
body="""/* if (empty(${variable}->getStoreId())) {{
$storeId = $this->storeManager->getStore()->getId();
${variable}->setStoreId($storeId);
}} */
${variable}Data = $this->extensibleDataObjectConverter->toNestedArray(
${variable},
[],
\{data_interface}::class
);
${variable}Model = $this->{variable}Factory->create()->setData(${variable}Data);
try {{
$this->resource->save(${variable}Model);
}} catch (\Exception $exception) {{
throw new CouldNotSaveException(__(
'Could not save the {variable}: %1',
$exception->getMessage()
));
}}
return ${variable}Model->getDataModel();
""".format(data_interface=api_data_class.class_namespace, variable=model_name_capitalized_after),
docstring=['{@inheritdoc}']
))
model_repository_class.add_method(Phpmethod('get', access=Phpmethod.PUBLIC,
params=['${}Id'.format(model_name_capitalized_after)],
body="""${variable} = $this->{variable}Factory->create();
$this->resource->load(${variable}, ${variable}Id);
if (!${variable}->getId()) {{
throw new NoSuchEntityException(__('{model_name} with id "%1" does not exist.', ${variable}Id));
}}
return ${variable}->getDataModel();
""".format(variable=model_name_capitalized_after,model_name=model_name),
docstring=['{@inheritdoc}']
))
model_repository_class.add_method(Phpmethod('getList', access=Phpmethod.PUBLIC,
params=['\Magento\Framework\Api\SearchCriteriaInterface $criteria'],
body="""$collection = $this->{variable}CollectionFactory->create();
$this->extensionAttributesJoinProcessor->process(
$collection,
\{data_interface}::class
);
$this->collectionProcessor->process($criteria, $collection);
$searchResults = $this->searchResultsFactory->create();
$searchResults->setSearchCriteria($criteria);
$items = [];
foreach ($collection as $model) {{
$items[] = $model->getDataModel();
}}
$searchResults->setItems($items);
$searchResults->setTotalCount($collection->getSize());
return $searchResults;
""".format(variable=model_name_capitalized_after,data_interface=api_data_class.class_namespace,variable_upper=model_name_capitalized),
docstring=['{@inheritdoc}']
))
model_repository_class.add_method(Phpmethod('delete', access=Phpmethod.PUBLIC,
params=['\{} ${}'.format(api_data_class.class_namespace,model_name_capitalized_after)],
body="""try {{
${variable}Model = $this->{variable}Factory->create();
$this->resource->load(${variable}Model, ${variable}->get{model_id}());
$this->resource->delete(${variable}Model);
}} catch (\Exception $exception) {{
throw new CouldNotDeleteException(__(
'Could not delete the {model_name}: %1',
$exception->getMessage()
));
}}
return true;
""".format(variable=model_name_capitalized_after,model_name=model_name,model_id=model_id_capitalized),
docstring=['{@inheritdoc}']
))
model_repository_class.add_method(Phpmethod('deleteById', access=Phpmethod.PUBLIC,
params=['${}Id'.format(model_name_capitalized_after)],
body="""return $this->delete($this->get(${variable}Id));
""".format(variable=model_name_capitalized_after,model_name=model_name),
docstring=['{@inheritdoc}']
))
self.add_class(model_repository_class)
# Create Data Model Class
data_model_class = Phpclass('Model\\Data\\' + model_name_capitalized.replace('_', '\\'),
dependencies=[api_data_class.class_namespace],
extends='\\Magento\\Framework\\Api\\AbstractExtensibleObject',
implements=[
api_data_class.class_name
])
data_model_class.add_method(Phpmethod('get' + model_id_capitalized,
docstring=['Get {}'.format(model_id),'@return {}'.format('string|null')],
body="""return $this->_get({});
""".format('self::'+model_id.upper()),
))
data_model_class.add_method(Phpmethod('set' + model_id_capitalized,
params=['${}'.format(model_id_capitalized_after)],
docstring=['Set {}'.format(model_id),'@param string ${}'.format(model_id_capitalized_after),'@return \{}'.format(api_data_class.class_namespace)],
body="""return $this->setData({}, ${});
""".format('self::' + model_id.upper(), model_id_capitalized_after)
))
data_model_class.add_method(Phpmethod('get' + field_name_capitalized,
docstring=['Get {}'.format(field_name),'@return {}'.format('string|null')],
body="""return $this->_get({});
""".format('self::' + field_name.upper()),
))
data_model_class.add_method(Phpmethod('set' + field_name_capitalized,
params=['${}'.format(lowerfirst(field_name_capitalized))],
docstring=['Set {}'.format(field_name),'@param string ${}'.format(lowerfirst(field_name_capitalized)),'@return \{}'.format(api_data_class.class_namespace)],
body="""return $this->setData({}, ${});
""".format('self::' + field_name.upper(), lowerfirst(field_name_capitalized))
))
data_model_class.add_method(Phpmethod('getExtensionAttributes',
docstring=['Retrieve existing extension attributes object or create a new one.','@return '+ extension_interface_class_name +'|null'],
body="""return $this->_getExtensionAttributes();
"""
))
data_model_class.add_method(Phpmethod('setExtensionAttributes',
params=[extension_interface_class_name + ' $extensionAttributes'],
docstring=['Set an extension attributes object.','@param ' + extension_interface_class_name +' $extensionAttributes','@return $this'],
body="""return $this->_setExtensionAttributes($extensionAttributes);
"""
))
self.add_class(data_model_class)
# Create di.xml preferences
self.add_xml('etc/di.xml', Xmlnode('config', attributes={'xsi:noNamespaceSchemaLocation': "urn:magento:framework:ObjectManager/etc/config.xsd"}, nodes=[
Xmlnode('preference', attributes={
'for': "{}\\{}\\Api\\{}RepositoryInterface".format(self._module.package, self._module.name, model_name_capitalized),
'type': model_repository_class.class_namespace
}),
Xmlnode('preference', attributes={
'for': "{}\\{}\\Api\\Data\\{}Interface".format(self._module.package, self._module.name, model_name_capitalized),
'type': "{}\\{}\\Model\\Data\\{}".format(self._module.package, self._module.name, model_name_capitalized)
}),
Xmlnode('preference', attributes={
'for': "{}\\{}\\Api\\Data\\{}SearchResultsInterface".format(self._module.package, self._module.name, model_name_capitalized),
'type': 'Magento\Framework\Api\SearchResults'
})
]))
# add grid
if adminhtml_grid:
self.add_adminhtml_grid(model_name, field_name, model_table, model_id, collection_model_class, field_element_type, top_level_menu, adminhtml_form)
if adminhtml_form:
self.add_adminhtml_form(model_name, field_name, model_table, model_id, collection_model_class, model_class, required, field_element_type)
self.add_acl(model_name)
if web_api:
self.add_web_api(model_name, field_name, model_table, model_id, collection_model_class, model_class, required, field_element_type, api_repository_class, model_id_capitalized_after)
if web_api | adminhtml_form | adminhtml_grid:
self.add_acl(model_name)
def add_adminhtml_grid(self, model_name, field_name, model_table, model_id, collection_model_class, field_element_type, top_level_menu, adminhtml_form):
frontname = self.module_name.lower()
data_source_id = '{}_listing_data_source'.format(model_table)
# create controller
index_controller_class = Phpclass('Controller\\Adminhtml\\' + model_name.replace('_', '') + '\\Index', extends='\\Magento\\Backend\\App\\Action',
attributes=[
'protected $resultPageFactory;'
])
index_controller_class.add_method(Phpmethod('__construct',
params=['\\Magento\\Backend\\App\\Action\\Context $context', '\\Magento\\Framework\\View\\Result\\PageFactory $resultPageFactory'],
body='$this->resultPageFactory = $resultPageFactory;\nparent::__construct($context);',
docstring=[
'Constructor',
'',
'@param \\Magento\\Backend\\App\\Action\\Context $context',
'@param \\Magento\\Framework\\View\\Result\\PageFactory $resultPageFactory',
]))
index_controller_class.add_method(Phpmethod('execute',
body_return="""
$resultPage = $this->resultPageFactory->create();
$resultPage->getConfig()->getTitle()->prepend(__("{model_name}"));
return $resultPage;
""".format(model_name=model_name),
docstring=[
'Index action',
'',
'@return \Magento\Framework\Controller\ResultInterface',
]))
self.add_class(index_controller_class)
# create menu.xml
top_level_menu_node = False
if top_level_menu:
top_level_menu_node = Xmlnode('add', attributes={
'id': "{}::top_level".format(self._module.package),
'title': self._module.package,
'module': self.module_name,
'sortOrder': 9999,
'resource': 'Magento_Backend::content',
})
self.add_xml('etc/adminhtml/menu.xml', Xmlnode('config', attributes={'xsi:noNamespaceSchemaLocation': "urn:magento:module:Magento_Backend:etc/menu.xsd"}, nodes=[
Xmlnode('menu', nodes=[
top_level_menu_node,
Xmlnode('add', attributes={
'id': "{}::{}".format(self.module_name, model_table),
'title': model_name.replace('_', ' '),
'module': self.module_name,
'sortOrder': 9999,
'resource': 'Magento_Backend::content',
'parent': '{}::top_level'.format(self._module.package),
'action': '{}/{}/index'.format(frontname, model_name.lower().replace('_', ''))
})
])
]))
# Create routes.xml
self.add_xml('etc/adminhtml/routes.xml', Xmlnode('config', attributes={'xsi:noNamespaceSchemaLocation': 'urn:magento:framework:App/etc/routes.xsd'}, nodes=[
Xmlnode('router', attributes={'id': 'admin'}, nodes=[
Xmlnode('route', attributes={'frontName': frontname, 'id':frontname}, nodes=[
Xmlnode('module', attributes={'before': 'Magento_Backend', 'name': self.module_name})
])
])
]))
# di.xml
self.add_xml('etc/di.xml', Xmlnode('config', attributes={'xsi:noNamespaceSchemaLocation': "urn:magento:framework:ObjectManager/etc/config.xsd"}, nodes=[
Xmlnode('virtualType', attributes={
'name': collection_model_class.class_namespace.replace('Collection', 'Grid\\Collection'),
'type': 'Magento\\Framework\\View\\Element\\UiComponent\\DataProvider\\SearchResult',
}, nodes=[
Xmlnode('arguments', nodes=[
Xmlnode('argument', attributes={'name': 'mainTable', 'xsi:type': 'string'}, node_text=model_table),
Xmlnode('argument', attributes={'name': 'resourceModel', 'xsi:type': 'string'}, node_text= collection_model_class.class_namespace),
])
]),
Xmlnode('type', attributes={'name': 'Magento\\Framework\\View\\Element\\UiComponent\\DataProvider\\CollectionFactory'}, nodes=[
Xmlnode('arguments', nodes=[
Xmlnode('argument', attributes={'name': 'collections', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': data_source_id, 'xsi:type': 'string'}, node_text=collection_model_class.class_namespace.replace('Collection', 'Grid\\Collection'))
])
])
])
]))
# create layout.xml
self.add_xml('view/adminhtml/layout/{}_{}_index.xml'.format(frontname, model_name.replace('_', '').lower()),
Xmlnode('page', attributes={'xsi:noNamespaceSchemaLocation': "urn:magento:framework:View/Layout/etc/page_configuration.xsd"}, nodes=[
Xmlnode('update', attributes={'handle': 'styles'}),
Xmlnode('body', nodes=[
Xmlnode('referenceContainer', attributes={'name': 'content'}, nodes=[
Xmlnode('uiComponent', attributes={'name': '{}_listing'.format(model_table)})
])
])
]))
# create components.xml
data_source_xml = Xmlnode('dataSource', attributes={'name': data_source_id, 'component': 'Magento_Ui/js/grid/provider'}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('updateUrl', attributes={'path': 'mui/index/render'})
]),
Xmlnode('aclResource', node_text='{}_{}::{}'.format(self._module.package, self._module.name, model_name)),
Xmlnode('dataProvider', attributes={'name': data_source_id,'class': 'Magento\\Framework\\View\\Element\\UiComponent\\DataProvider\\DataProvider'}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('requestFieldName', node_text='id'),
Xmlnode('primaryFieldName', node_text=model_id)
])
])
])
if adminhtml_form:
columns_settings_xml = Xmlnode('settings', nodes=[
Xmlnode('editorConfig', nodes=[
Xmlnode('param', attributes={'name': 'selectProvider', 'xsi:type': 'string'}, node_text='{0}_listing.{0}_listing.{0}_columns.ids'.format(model_table)),
Xmlnode('param', attributes={'name': 'enabled', 'xsi:type': 'boolean'}, node_text='true'),
Xmlnode('param', attributes={'name': 'indexField', 'xsi:type': 'string'}, node_text=model_id),
Xmlnode('param', attributes={'name': 'clientConfig', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'saveUrl', 'xsi:type': 'url', 'path': '{}/{}/inlineEdit'.format(frontname, model_name.replace('_', ''))}),
Xmlnode('item', attributes={'name': 'validateBeforeSave', 'xsi:type': 'boolean'}, node_text='false'),
]),
]),
Xmlnode('childDefaults', nodes=[
Xmlnode('param', attributes={'name': 'fieldAction', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'provider', 'xsi:type': 'string'}, node_text='{0}_listing.{0}_listing.{0}_columns_editor'.format(model_table)),
Xmlnode('item', attributes={'name': 'target', 'xsi:type': 'string'}, node_text='startEdit'),
Xmlnode('item', attributes={'name': 'params', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': '0', 'xsi:type': 'string'}, node_text='${ $.$data.rowIndex }'),
Xmlnode('item', attributes={'name': '1', 'xsi:type': 'boolean'}, node_text='true'),
]),
]),
]),
])
columns_xml = Xmlnode('columns', attributes={'name': '{}_columns'.format(model_table)}, nodes=[
Xmlnode('selectionsColumn', attributes={'name': 'ids'}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('indexField', node_text=model_id)
]),
]),
Xmlnode('column', attributes={'name': model_id}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('filter', node_text='text'),
Xmlnode('sorting', node_text='asc'),
Xmlnode('label', attributes={'translate': 'true'}, node_text='ID')
])
]),
Xmlnode('column', attributes={'name': field_name}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('filter', node_text='text'),
Xmlnode('label', attributes={'translate': 'true'}, node_text=field_name)
])
])
])
if adminhtml_form:
columns_xml = Xmlnode('columns', attributes={'name': '{}_columns'.format(model_table)}, nodes=[
columns_settings_xml,
Xmlnode('selectionsColumn', attributes={'name': 'ids'}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('indexField', node_text=model_id)
]),
]),
Xmlnode('column', attributes={'name': model_id}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('filter', node_text='text'),
Xmlnode('sorting', node_text='asc'),
Xmlnode('label', attributes={'translate': 'true'}, node_text='ID')
])
]),
Xmlnode('column', attributes={'name': field_name}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('filter', node_text='text'),
Xmlnode('label', attributes={'translate': 'true'}, node_text=field_name)
])
])
])
self.add_xml('view/adminhtml/ui_component/{}_listing.xml'.format(model_table),
Xmlnode('listing', attributes={'xsi:noNamespaceSchemaLocation': "urn:magento:module:Magento_Ui:etc/ui_configuration.xsd"}, nodes=[
Xmlnode('argument', attributes={'name': 'data', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'js_config', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'provider', 'xsi:type': 'string'}, node_text='{}_listing.{}'.format(model_table, data_source_id)),
]),
]),
Xmlnode('settings', nodes=[
Xmlnode('spinner', node_text='{}_columns'.format(model_table)),
Xmlnode('deps', nodes=[
Xmlnode('dep',node_text='{}_listing.{}'.format(model_table, data_source_id))
])
]),
data_source_xml,
Xmlnode('listingToolbar', attributes={'name': 'listing_top'}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('sticky', node_text='true'),
]),
Xmlnode('bookmark', attributes={'name': 'bookmarks'}),
Xmlnode('columnsControls', attributes={'name': 'columns_controls'}),
Xmlnode('filters', attributes={'name': 'listing_filters'}),
Xmlnode('paging', attributes={'name': 'listing_paging'})
]),
columns_xml
]))
def add_adminhtml_form(self, model_name, field_name, model_table, model_id, collection_model_class, model_class, required, field_element_type):
frontname = self.module_name.lower()
# Add block buttons
# Back button
back_button = Phpclass('Block\\Adminhtml\\' + model_name.replace('_', '\\') + '\\Edit\\BackButton', implements=['ButtonProviderInterface'],
extends='GenericButton',
dependencies=['Magento\\Framework\\View\\Element\\UiComponent\\Control\\ButtonProviderInterface'])
back_button.add_method(Phpmethod('getButtonData',
body="""return [
'label' => __('Back'),
'on_click' => sprintf("location.href = '%s';", $this->getBackUrl()),
'class' => 'back',
'sort_order' => 10
];""",
docstring=['@return array']))
back_button.add_method(Phpmethod('getBackUrl',
body="""return $this->getUrl('*/*/');""",
docstring=[
'Get URL for back (reset) button',
'',
'@return string'
]))
self.add_class(back_button)
# Delete button
delete_button = Phpclass('Block\\Adminhtml\\' + model_name.replace('_', '\\') + '\\Edit\\DeleteButton', implements=['ButtonProviderInterface'],
extends='GenericButton',
dependencies=['Magento\\Framework\\View\\Element\\UiComponent\\Control\\ButtonProviderInterface'])
delete_button.add_method(Phpmethod('getButtonData',
body="""$data = [];
if ($this->getModelId()) {{
$data = [
'label' => __('Delete {}'),
'class' => 'delete',
'on_click' => 'deleteConfirm(\\'' . __(
'Are you sure you want to do this?'
) . '\\', \\'' . $this->getDeleteUrl() . '\\')',
'sort_order' => 20,
];
}}
return $data;""".format(model_name.replace('_', ' ').title()),
docstring=['@return array']))
delete_button.add_method(Phpmethod('getDeleteUrl',
body="""return $this->getUrl('*/*/delete', ['{}' => $this->getModelId()]);""".format(model_id),
docstring=[
'Get URL for delete button',
'',
'@return string'
]))
self.add_class(delete_button)
# Generic button
generic_button = Phpclass('Block\\Adminhtml\\' + model_name.replace('_', '\\') + '\\Edit\\GenericButton',
dependencies=['Magento\\Backend\\Block\Widget\\Context'],
attributes=[
'protected $context;'
],
abstract=True)
generic_button.add_method(Phpmethod('__construct',
params=['Context $context'],
body="""$this->context = $context;""",
docstring=['@param \\Magento\\Backend\\Block\Widget\\Context $context']))
generic_button.add_method(Phpmethod('getModelId',
body="""return $this->context->getRequest()->getParam('{}');""".format(model_id),
docstring=[
'Return model ID',
'',
'@return int|null'
]))
generic_button.add_method(Phpmethod('getUrl', params=["$route = ''","$params = []"],
body="""return $this->context->getUrlBuilder()->getUrl($route, $params);""",
docstring=[
'Generate url by route and parameters',
'',
'@param string $route',
'@param array $params',
'@return string'
]
))
self.add_class(generic_button)
# Save and continu button
save_continue_button = Phpclass('Block\\Adminhtml\\' + model_name.replace('_', '\\') + '\\Edit\\SaveAndContinueButton', implements=['ButtonProviderInterface'],
extends='GenericButton',
dependencies=['Magento\\Framework\\View\\Element\\UiComponent\\Control\\ButtonProviderInterface'])
save_continue_button.add_method(Phpmethod('getButtonData',
body="""return [
'label' => __('Save and Continue Edit'),
'class' => 'save',
'data_attribute' => [
'mage-init' => [
'button' => ['event' => 'saveAndContinueEdit'],
],
],
'sort_order' => 80,
];""",
docstring=[
'@return array'
]))
self.add_class(save_continue_button)
# Save button
save_button = Phpclass('Block\\Adminhtml\\' + model_name.replace('_', '\\') + '\\Edit\\SaveButton', implements=['ButtonProviderInterface'],
extends='GenericButton',
dependencies=['Magento\\Framework\\View\\Element\\UiComponent\\Control\\ButtonProviderInterface'])
save_button.add_method(Phpmethod('getButtonData',
body="""return [
'label' => __('Save {}'),
'class' => 'save primary',
'data_attribute' => [
'mage-init' => ['button' => ['event' => 'save']],
'form-role' => 'save',
],
'sort_order' => 90,
];""".format(model_name.replace('_', ' ').title()),
docstring=[
'@return array'
]))
self.add_class(save_button)
# Add controllers
###########################################################################################
register_model = self.module_name.lower() + '_' + model_name.lower()
# link controller
link_controller = Phpclass('Controller\\Adminhtml\\' + model_name.replace('_', ''), extends='\\Magento\\Backend\\App\\Action', abstract=True,
attributes=[
"const ADMIN_RESOURCE = '{}::top_level';".format(self.module_name),
'protected $_coreRegistry;'])
link_controller.add_method(Phpmethod('__construct',
params=['\\Magento\\Backend\\App\\Action\\Context $context', '\\Magento\\Framework\\Registry $coreRegistry'],
body="""$this->_coreRegistry = $coreRegistry;\nparent::__construct($context);""",
docstring=[
'@param \\Magento\\Backend\\App\\Action\\Context $context',
'@param \\Magento\\Framework\\Registry $coreRegistry'
]))
link_controller.add_method(Phpmethod('initPage', params=['$resultPage'],
body="""$resultPage->setActiveMenu(self::ADMIN_RESOURCE)
->addBreadcrumb(__('{namespace}'), __('{namespace}'))
->addBreadcrumb(__('{model_name}'), __('{model_name}'));
return $resultPage;""".format(
namespace = self._module.package,
model_name = model_name.replace('_', ' ').title()
),
docstring=[
'Init page',
'',
'@param \Magento\Backend\Model\View\Result\Page $resultPage',
'@return \Magento\Backend\Model\View\Result\Page'
]))
self.add_class(link_controller)
# Delete controller
delete_controller = Phpclass('Controller\\Adminhtml\\' + model_name.replace('_', '') + '\\Delete', extends='\\' + link_controller.class_namespace)
delete_controller.add_method(Phpmethod('execute',
body="""/** @var \Magento\Backend\Model\View\Result\Redirect $resultRedirect */
$resultRedirect = $this->resultRedirectFactory->create();
// check if we know what should be deleted
$id = $this->getRequest()->getParam('{model_id}');
if ($id) {{
try {{
// init model and delete
$model = $this->_objectManager->create(\{model_class}::class);
$model->load($id);
$model->delete();
// display success message
$this->messageManager->addSuccessMessage(__('You deleted the {model_name}.'));
// go to grid
return $resultRedirect->setPath('*/*/');
}} catch (\Exception $e) {{
// display error message
$this->messageManager->addErrorMessage($e->getMessage());
// go back to edit form
return $resultRedirect->setPath('*/*/edit', ['{model_id}' => $id]);
}}
}}
// display error message
$this->messageManager->addErrorMessage(__('We can\\\'t find a {model_name} to delete.'));
// go to grid
return $resultRedirect->setPath('*/*/');""".format(
model_id = model_id,
model_class = model_class.class_namespace,
model_name = model_name.replace('_', ' ').title()),
docstring=[
'Delete action',
'',
'@return \Magento\Framework\Controller\ResultInterface',
]
))
self.add_class(delete_controller)
# Edit controller
edit_controller = Phpclass('Controller\\Adminhtml\\' + model_name.replace('_', '') + '\\Edit', extends= '\\' + link_controller.class_namespace,
attributes=[
'protected $resultPageFactory;'
])
edit_controller.add_method(Phpmethod('__construct',
params=['\\Magento\\Backend\\App\\Action\\Context $context',
'\\Magento\\Framework\\Registry $coreRegistry',
'\\Magento\\Framework\\View\\Result\\PageFactory $resultPageFactory'],
body="""$this->resultPageFactory = $resultPageFactory;\nparent::__construct($context, $coreRegistry);""",
docstring=[
'@param \\Magento\\Backend\\App\\Action\\Context $context',
'@param \\Magento\\Framework\\Registry $coreRegistry',
'@param \\Magento\\Framework\\View\\Result\\PageFactory $resultPageFactory',
]))
edit_controller.add_method(Phpmethod('execute',
body="""// 1. Get ID and create model
$id = $this->getRequest()->getParam('{model_id}');
$model = $this->_objectManager->create(\{model_class}::class);
// 2. Initial checking
if ($id) {{
$model->load($id);
if (!$model->getId()) {{
$this->messageManager->addErrorMessage(__('This {model_name} no longer exists.'));
/** @var \Magento\Backend\Model\View\Result\Redirect $resultRedirect */
$resultRedirect = $this->resultRedirectFactory->create();
return $resultRedirect->setPath('*/*/');
}}
}}
$this->_coreRegistry->register('{register_model}', $model);
// 3. Build edit form
/** @var \Magento\Backend\Model\View\Result\Page $resultPage */
$resultPage = $this->resultPageFactory->create();
$this->initPage($resultPage)->addBreadcrumb(
$id ? __('Edit {model_name}') : __('New {model_name}'),
$id ? __('Edit {model_name}') : __('New {model_name}')
);
$resultPage->getConfig()->getTitle()->prepend(__('{model_name}s'));
$resultPage->getConfig()->getTitle()->prepend($model->getId() ? __('Edit {model_name} %1', $model->getId()) : __('New {model_name}'));
return $resultPage;""".format(
model_id = model_id,
model_class = model_class.class_namespace,
model_name = model_name.replace('_', ' ').title(),
register_model = register_model
),
docstring=[
'Edit action',
'',
'@return \Magento\Framework\Controller\ResultInterface',
]))
self.add_class(edit_controller)
# Inline Controller
inline_edit_controller = Phpclass('Controller\\Adminhtml\\' + model_name.replace('_', '') + '\\InlineEdit', extends='\\Magento\\Backend\\App\\Action',
attributes=[
'protected $jsonFactory;'
])
inline_edit_controller.add_method(Phpmethod('__construct',
params=['\\Magento\\Backend\\App\\Action\\Context $context',
'\\Magento\\Framework\\Controller\\Result\\JsonFactory $jsonFactory'],
body="""parent::__construct($context);\n$this->jsonFactory = $jsonFactory;""",
docstring=[
'@param \\Magento\\Backend\\App\\Action\\Context $context',
'@param \\Magento\\Framework\\Controller\\Result\\JsonFactory $jsonFactory',
]))
inline_edit_controller.add_method(Phpmethod('execute',
body="""/** @var \Magento\Framework\Controller\Result\Json $resultJson */
$resultJson = $this->jsonFactory->create();
$error = false;
$messages = [];
if ($this->getRequest()->getParam('isAjax')) {{
$postItems = $this->getRequest()->getParam('items', []);
if (!count($postItems)) {{
$messages[] = __('Please correct the data sent.');
$error = true;
}} else {{
foreach (array_keys($postItems) as $modelid) {{
/** @var \{model_class} $model */
$model = $this->_objectManager->create(\{model_class}::class)->load($modelid);
try {{
$model->setData(array_merge($model->getData(), $postItems[$modelid]));
$model->save();
}} catch (\Exception $e) {{
$messages[] = "[{model_name} ID: {{$modelid}}] {{$e->getMessage()}}";
$error = true;
}}
}}
}}
}}
return $resultJson->setData([
'messages' => $messages,
'error' => $error
]);""".format(
model_class = model_class.class_namespace,
model_name = model_name.replace('_', ' ').title(),
),
docstring=[
'Inline edit action',
'',
'@return \Magento\Framework\Controller\ResultInterface',
]))
self.add_class(inline_edit_controller)
# new Controller
new_controller = Phpclass('Controller\\Adminhtml\\' + model_name.replace('_', '') + '\\NewAction', extends='\\' + link_controller.class_namespace,
attributes=[
'protected $resultForwardFactory;'
])
new_controller.add_method(Phpmethod('__construct',
params=['\\Magento\\Backend\\App\\Action\\Context $context',
'\\Magento\\Framework\\Registry $coreRegistry',
'\\Magento\\Backend\\Model\\View\\Result\\ForwardFactory $resultForwardFactory'],
body="""$this->resultForwardFactory = $resultForwardFactory;\nparent::__construct($context, $coreRegistry);""",
docstring=[
'@param \\Magento\\Backend\\App\\Action\\Context $context',
'@param \\Magento\\Framework\\Registry $coreRegistry',
'@param \\Magento\\Backend\\Model\\View\\Result\\ForwardFactory $resultForwardFactory',
]))
new_controller.add_method(Phpmethod('execute',
body="""/** @var \Magento\Framework\Controller\Result\Forward $resultForward */
$resultForward = $this->resultForwardFactory->create();
return $resultForward->forward('edit');""",
docstring=[
'New action',
'',
'@return \Magento\Framework\Controller\ResultInterface',
]))
self.add_class(new_controller)
# Save Controller
new_controller = Phpclass('Controller\\Adminhtml\\' + model_name.replace('_', '') + '\\Save', dependencies=['Magento\Framework\Exception\LocalizedException'], extends='\\Magento\\Backend\\App\\Action',
attributes=[
'protected $dataPersistor;'])
new_controller.add_method(Phpmethod('__construct',
params=['\\Magento\\Backend\\App\\Action\\Context $context',
'\\Magento\\Framework\\App\\Request\\DataPersistorInterface $dataPersistor'],
body="""$this->dataPersistor = $dataPersistor;\nparent::__construct($context);""",
docstring=[
'@param \\Magento\\Backend\\App\\Action\\Context $context',
'@param \\Magento\\Framework\\App\\Request\\DataPersistorInterface $dataPersistor',
]))
new_controller.add_method(Phpmethod('execute',
body="""/** @var \Magento\Backend\Model\View\Result\Redirect $resultRedirect */
$resultRedirect = $this->resultRedirectFactory->create();
$data = $this->getRequest()->getPostValue();
if ($data) {{
$id = $this->getRequest()->getParam('{model_id}');
$model = $this->_objectManager->create(\{model_class}::class)->load($id);
if (!$model->getId() && $id) {{
$this->messageManager->addErrorMessage(__('This {model_name} no longer exists.'));
return $resultRedirect->setPath('*/*/');
}}
$model->setData($data);
try {{
$model->save();
$this->messageManager->addSuccessMessage(__('You saved the {model_name}.'));
$this->dataPersistor->clear('{register_model}');
if ($this->getRequest()->getParam('back')) {{
return $resultRedirect->setPath('*/*/edit', ['{model_id}' => $model->getId()]);
}}
return $resultRedirect->setPath('*/*/');
}} catch (LocalizedException $e) {{
$this->messageManager->addErrorMessage($e->getMessage());
}} catch (\Exception $e) {{
$this->messageManager->addExceptionMessage($e, __('Something went wrong while saving the {model_name}.'));
}}
$this->dataPersistor->set('{register_model}', $data);
return $resultRedirect->setPath('*/*/edit', ['{model_id}' => $this->getRequest()->getParam('{model_id}')]);
}}
return $resultRedirect->setPath('*/*/');""".format(
model_id = model_id,
model_class = model_class.class_namespace,
model_name = model_name.replace('_', ' ').title(),
register_model = register_model
),
docstring=[
'Save action',
'',
'@return \Magento\Framework\Controller\ResultInterface',
]))
self.add_class(new_controller)
# Add model provider
data_provider = Phpclass('Model\\' + model_name.replace('_', '') + '\\DataProvider', extends='\\Magento\\Ui\\DataProvider\\AbstractDataProvider',
attributes=[
'protected $collection;\n',
'protected $dataPersistor;\n',
'protected $loadedData;'
],
dependencies=[collection_model_class.class_namespace + 'Factory', 'Magento\\Framework\\App\\Request\\DataPersistorInterface'])
data_provider.add_method(Phpmethod('__construct',
params=['$name',
'$primaryFieldName',
'$requestFieldName',
'CollectionFactory $collectionFactory',
'DataPersistorInterface $dataPersistor',
'array $meta = []',
'array $data = []'],
body="""$this->collection = $collectionFactory->create();
$this->dataPersistor = $dataPersistor;
parent::__construct($name, $primaryFieldName, $requestFieldName, $meta, $data);""",
docstring=[
'Constructor',
'',
'@param string $name',
'@param string $primaryFieldName',
'@param string $requestFieldName',
'@param CollectionFactory $collectionFactory',
'@param DataPersistorInterface $dataPersistor',
'@param array $meta',
'@param array $data'
]))
data_provider.add_method(Phpmethod('getData',
body="""if (isset($this->loadedData)) {{
return $this->loadedData;
}}
$items = $this->collection->getItems();
foreach ($items as $model) {{
$this->loadedData[$model->getId()] = $model->getData();
}}
$data = $this->dataPersistor->get('{register_model}');
if (!empty($data)) {{
$model = $this->collection->getNewEmptyItem();
$model->setData($data);
$this->loadedData[$model->getId()] = $model->getData();
$this->dataPersistor->clear('{register_model}');
}}
return $this->loadedData;""".format(
register_model = register_model
),
docstring=[
'Get data',
'',
'@return array',
]))
self.add_class(data_provider)
# Add model actions
actions = Phpclass('Ui\Component\Listing\Column\\' + model_name.replace('_', '') + 'Actions', extends='\\Magento\\Ui\\Component\\Listing\\Columns\Column',
attributes=[
"const URL_PATH_EDIT = '{}/{}/edit';".format(frontname, model_name.replace('_', '').lower()),
"const URL_PATH_DELETE = '{}/{}/delete';".format(frontname, model_name.replace('_', '').lower()),
"const URL_PATH_DETAILS = '{}/{}/details';".format(frontname, model_name.replace('_', '').lower()),
'protected $urlBuilder;',
])
actions.add_method(Phpmethod('__construct',
params=['\\Magento\\Framework\\View\\Element\\UiComponent\\ContextInterface $context',
'\\Magento\\Framework\\View\\Element\\UiComponentFactory $uiComponentFactory',
'\\Magento\\Framework\\UrlInterface $urlBuilder',
'array $components = []',
'array $data = []'],
body="""$this->urlBuilder = $urlBuilder;\nparent::__construct($context, $uiComponentFactory, $components, $data);""",
docstring=[
'@param \\Magento\\Framework\\View\\Element\\UiComponent\\ContextInterface $context',
'@param \\Magento\\Framework\\View\\Element\\UiComponentFactory $uiComponentFactory',
'@param \\Magento\\Framework\\UrlInterface $urlBuilder',
'@param array $components',
'@param array $data'
]))
actions.add_method(Phpmethod('prepareDataSource', params=['array $dataSource'],
body="""if (isset($dataSource['data']['items'])) {{
foreach ($dataSource['data']['items'] as & $item) {{
if (isset($item['{model_id}'])) {{
$item[$this->getData('name')] = [
'edit' => [
'href' => $this->urlBuilder->getUrl(
static::URL_PATH_EDIT,
[
'{model_id}' => $item['{model_id}']
]
),
'label' => __('Edit')
],
'delete' => [
'href' => $this->urlBuilder->getUrl(
static::URL_PATH_DELETE,
[
'{model_id}' => $item['{model_id}']
]
),
'label' => __('Delete'),
'confirm' => [
'title' => __('Delete "${{ $.$data.title }}"'),
'message' => __('Are you sure you wan\\\'t to delete a "${{ $.$data.title }}" record?')
]
]
];
}}
}}
}}
return $dataSource;""".format(
model_id = model_id
),
docstring=[
'Prepare Data Source',
'',
'@param array $dataSource',
'@return array'
]))
self.add_class(actions)
# Edit layout
self.add_xml('view/adminhtml/layout/{}_{}_edit.xml'.format(frontname, model_name.replace('_', '').lower()),
Xmlnode('page', attributes={'xsi:noNamespaceSchemaLocation': "urn:magento:framework:View/Layout/etc/page_configuration.xsd"}, nodes=[
Xmlnode('update', attributes={'handle': 'styles'}),
Xmlnode('body', nodes=[
Xmlnode('referenceContainer', attributes={'name': 'content'}, nodes=[
Xmlnode('uiComponent', attributes={'name': '{}_form'.format(model_table)})
])
])
]))
# New layout
self.add_xml('view/adminhtml/layout/{}_{}_new.xml'.format(frontname, model_name.replace('_', '').lower()),
Xmlnode('page', attributes={'xsi:noNamespaceSchemaLocation': "urn:magento:framework:View/Layout/etc/page_configuration.xsd"}, nodes=[
Xmlnode('update', attributes={'handle': '{}_{}_edit'.format(frontname, model_name.lower())})
]))
# UI Component Form
data_source = '{}_form_data_source'.format(model_name.lower())
ui_form = Xmlnode('form', attributes={'xsi:noNamespaceSchemaLocation': "urn:magento:module:Magento_Ui:etc/ui_configuration.xsd"}, nodes=[
Xmlnode('argument', attributes={'name': 'data', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'js_config', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'provider', 'xsi:type': 'string'}, node_text='{}_form.{}'.format(model_table, data_source)),
]),
Xmlnode('item', attributes={'name': 'label', 'xsi:type': 'string', 'translate': 'true'}, node_text='General Information'),
Xmlnode('item', attributes={'name': 'template', 'xsi:type': 'string'}, node_text='templates/form/collapsible'),
]),
Xmlnode('settings', nodes=[
Xmlnode('buttons', nodes=[
Xmlnode('button', attributes={'name': 'back', 'class': back_button.class_namespace}),
Xmlnode('button', attributes={'name': 'delete', 'class': delete_button.class_namespace}),
Xmlnode('button', attributes={'name': 'save', 'class': save_button.class_namespace}),
Xmlnode('button', attributes={'name': 'save_and_continue', 'class': save_continue_button.class_namespace}),
]),
Xmlnode('namespace', node_text='{}_form'.format(model_table)),
Xmlnode('dataScope', node_text='data'),
Xmlnode('deps', nodes=[
Xmlnode('dep', node_text='{}_form.{}'.format(model_table, data_source)),
]),
]),
Xmlnode('dataSource', attributes={'name': data_source}, nodes=[
Xmlnode('argument', attributes={'name': 'data', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'js_config', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'component', 'xsi:type': 'string'}, node_text='Magento_Ui/js/form/provider'),
]),
]),
Xmlnode('settings', nodes=[
Xmlnode('submitUrl', attributes={'path': '*/*/save'}),
]),
Xmlnode('dataProvider', attributes={'name': data_source, 'class': data_provider.class_namespace}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('requestFieldName', node_text=model_id),
Xmlnode('primaryFieldName', node_text=model_id),
]),
]),
]),
Xmlnode('fieldset', attributes={'name': 'general'}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('label', node_text='General'),
]),
Xmlnode('field', attributes={'name': field_name, 'formElement': field_element_type, 'sortOrder': str(10 * self.count)}, nodes=[
Xmlnode('argument', attributes={'name': 'data', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'config', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'source', 'xsi:type': 'string'}, node_text=model_name),
]),
]),
Xmlnode('settings', nodes=[
Xmlnode('dataType', node_text='text'),
Xmlnode('label', attributes={'translate': 'true'}, node_text=field_name),
Xmlnode('dataScope', node_text=field_name),
Xmlnode('validation', nodes=[
Xmlnode('rule', attributes={'name': 'required-entry', 'xsi:type': 'boolean'}, node_text= 'true' if required else 'false'),
]),
]),
]),
]),
])
self.add_xml('view/adminhtml/ui_component/{}_form.xml'.format(model_table), ui_form)
# Set UI Component Listing
ui_listing = Xmlnode('listing', attributes={
'xsi:noNamespaceSchemaLocation': "urn:magento:module:Magento_Ui:etc/ui_configuration.xsd"}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('buttons', nodes=[
Xmlnode('button', attributes={'name': 'add'}, nodes=[
Xmlnode('url', attributes={'path': '*/*/new'}),
Xmlnode('class', node_text='primary'),
Xmlnode('label', attributes={'translate': 'true'}, node_text='Add new {}'.format(model_name)),
]),
]),
]),
Xmlnode('columns', attributes={'name': '{}_columns'.format(model_table)}, nodes=[
Xmlnode('column', attributes={'name': field_name}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('editor', nodes=[
Xmlnode('editorType',
node_text=field_element_type if field_element_type == 'date' else 'text'),
Xmlnode('validation', nodes=[
Xmlnode('rule', attributes={'name': 'required-entry', 'xsi:type': 'boolean'},
node_text='true' if required else 'false'),
]),
]),
]),
]),
Xmlnode('actionsColumn', attributes={'name': 'actions', 'class': actions.class_namespace}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('indexField', node_text=model_id),
Xmlnode('resizeEnabled', node_text='false'),
Xmlnode('resizeDefaultWidth', node_text='107'),
]),
]),
]),
])
self.add_xml('view/adminhtml/ui_component/{}_listing.xml'.format(model_table), ui_listing)
# Update UI Component Listing
ui_listing = Xmlnode('listing', attributes={'xsi:noNamespaceSchemaLocation': "urn:magento:module:Magento_Ui:etc/ui_configuration.xsd"}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('buttons', nodes=[
Xmlnode('button', attributes={'name': 'add'}, nodes=[
Xmlnode('url', attributes={'path':'*/*/new'}),
Xmlnode('class', node_text='primary'),
Xmlnode('label', attributes={'translate': 'true'}, node_text='Add new {}'.format(model_name)),
]),
]),
]),
Xmlnode('columns', attributes={'name': '{}_columns'.format(model_table)}, nodes=[
Xmlnode('column', attributes={'name': field_name}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('editor', nodes=[
Xmlnode('editorType', node_text=field_element_type if field_element_type == 'date' else 'text'),
Xmlnode('validation', nodes=[
Xmlnode('rule', attributes={'name': 'required-entry', 'xsi:type': 'boolean'}, node_text='true' if required else 'false'),
]),
]),
]),
]),
Xmlnode('actionsColumn', attributes={'name': 'actions', 'class': actions.class_namespace}, nodes=[
Xmlnode('settings', nodes=[
Xmlnode('indexField', node_text=model_id),
Xmlnode('resizeEnabled', node_text='false'),
Xmlnode('resizeDefaultWidth', node_text='107'),
]),
]),
]),
])
self.add_xml('view/adminhtml/ui_component/{}_listing.xml'.format(model_table), ui_listing)
def add_web_api(self, model_name, field_name, model_table, model_id, collection_model_class, model_class, required, field_element_type, api_repository_class, model_id_capitalized_after):
resource = '{}_{}::{}_'.format(self._module.package,self._module.name,model_name);
api_url = '/V1/{}-{}/'.format(self._module.package.lower(),self._module.name.lower())
webapi_xml = Xmlnode('routes', attributes={'xmlns:xsi':'http://www.w3.org/2001/XMLSchema-instance','xsi:noNamespaceSchemaLocation':"urn:magento:module:Magento_Webapi:etc/webapi.xsd"}, nodes=[
Xmlnode('route', attributes={'url': api_url + model_name.lower(), 'method': 'POST'},match_attributes={'url','method'},nodes=[
Xmlnode('service',attributes={'class':api_repository_class.class_namespace,'method':'save'}),
Xmlnode('resources',nodes=[
Xmlnode('resource', attributes={'ref':resource + 'save'})
])
]),
Xmlnode('route', attributes={'url': api_url + model_name.lower() + '/search', 'method': 'GET'},match_attributes={'url','method'},nodes=[
Xmlnode('service',attributes={'class':api_repository_class.class_namespace,'method':'getList'}),
Xmlnode('resources',nodes=[
Xmlnode('resource', attributes={'ref':resource + 'view'})
])
]),
Xmlnode('route', attributes={'url': api_url + model_name.lower() + '/:' + model_id_capitalized_after, 'method': 'GET'},match_attributes={'url','method'},nodes=[
Xmlnode('service',attributes={'class':api_repository_class.class_namespace,'method':'get'}),
Xmlnode('resources',nodes=[
Xmlnode('resource', attributes={'ref':resource + 'view'})
])
]),
Xmlnode('route', attributes={'url': api_url + model_name.lower() + '/:' + model_id_capitalized_after, 'method': 'PUT'},match_attributes={'url','method'},nodes=[
Xmlnode('service',attributes={'class':api_repository_class.class_namespace,'method':'save'}),
Xmlnode('resources',nodes=[
Xmlnode('resource', attributes={'ref':resource + 'update'})
])
]),
Xmlnode('route', attributes={'url': api_url + model_name.lower() + '/:' + model_id_capitalized_after, 'method': 'DELETE'},match_attributes={'url','method'},nodes=[
Xmlnode('service',attributes={'class':api_repository_class.class_namespace,'method':'deleteById'}),
Xmlnode('resources',nodes=[
Xmlnode('resource', attributes={'ref':resource + 'delete'})
])
])
])
self.add_xml('etc/webapi.xml', webapi_xml)
self.add_static_file(
'.',
Readme(
specifications=" - Model\n\t- {}".format(model_name),
)
)
def add_acl(self,model_name):
namespace = '{}_{}'.format(self._module.package,self._module.name)
acl_xml = Xmlnode('config', attributes={'xmlns:xsi':'http://www.w3.org/2001/XMLSchema-instance','xsi:noNamespaceSchemaLocation':"urn:magento:framework:Acl/etc/acl.xsd"}, nodes=[
Xmlnode('acl',nodes=[
Xmlnode('resources',nodes=[
Xmlnode('resource',attributes={'id':'Magento_Backend::admin'},nodes=[
Xmlnode('resource',attributes={'id':'{}::{}'.format(namespace,model_name),'title':'{}'.format(model_name),'sortOrder':"10"}, nodes=[
Xmlnode('resource',attributes={'id':'{}::{}_{}'.format(namespace,model_name,'save'),'title':'Save {}'.format(model_name),'sortOrder':"10"}),
Xmlnode('resource',attributes={'id':'{}::{}_{}'.format(namespace,model_name,'delete'),'title':'Delete {}'.format(model_name),'sortOrder':"20"}),
Xmlnode('resource',attributes={'id':'{}::{}_{}'.format(namespace,model_name,'update'),'title':'Update {}'.format(model_name),'sortOrder':"30"}),
Xmlnode('resource',attributes={'id':'{}::{}_{}'.format(namespace,model_name,'view'),'title':'View {}'.format(model_name),'sortOrder':"40"})
])
])
])
])
])
self.add_xml('etc/acl.xml', acl_xml)
@classmethod
def params(cls):
return [
SnippetParam(
name='model_name',
description='Example: Blog',
required=True,
regex_validator= r'^[a-zA-Z]{1}\w+$',
error_message='Only alphanumeric and underscore characters are allowed, and need to start with a alphabetic character.',
repeat=True
),
SnippetParam(
name='field_name',
description='Example: content',
required=True,
regex_validator= r'^[a-zA-Z]{1}\w+$',
error_message='Only alphanumeric and underscore characters are allowed, and need to start with a alphabetic character.'
),
SnippetParam(
name='field_type',
choises=cls.FIELD_TYPE_CHOISES,
default='text',
),
SnippetParam(name='adminhtml_grid', yes_no=True),
SnippetParam(name='adminhtml_form', yes_no=True),
SnippetParam(name='web_api', yes_no=True),
]
@classmethod
def extra_params(cls):
return [
SnippetParam('comment', required=False, description='Description of database field'),
SnippetParam('default', required=False, description='Default value of field'),
SnippetParam('nullable', yes_no=True, default=True),
SnippetParam('identity', yes_no=True, depend={'field_type': r'smallint|integer|bigint'}),
'Extra',
SnippetParam(
name='field_size',
description='Size of field, Example: 512 for max chars',
required=False,
regex_validator= r'^\d+$',
error_message='Only numeric value allowed.',
depend={'field_type': r'text|blob|decimal|numeric'}
),
SnippetParam(
name='precision',
required=False,
regex_validator= r'^\d+$',
error_message='Only numeric value allowed.',
depend={'field_type': r'decimal|numeric'}
),
SnippetParam(
name='scale',
required=False,
regex_validator= r'^\d+$',
error_message='Only numeric value allowed.',
depend={'field_type': r'decimal|numeric'}
),
SnippetParam(
name='unsigned',
yes_no=True,
depend={'field_type': r'smallint|integer|bigint|float|decimal|numeric'}
),
SnippetParam(
name='top_level_menu',
yes_no=True,
default=True,
repeat=True
),
]
| gpl-3.0 | -7,379,776,745,117,829,000 | 43.951062 | 405 | 0.638156 | false | 3.421004 | false | false | false |
teampheenix/StarCraft-Casting-Tool | scctool/settings/history.py | 1 | 4381 | """Provide history manager for SCCTool."""
import json
import logging
import scctool.settings.translation
from scctool.settings import getJsonFile, idx2race, race2idx
module_logger = logging.getLogger(__name__)
_ = scctool.settings.translation.gettext
class HistoryManager:
"""History manager for SCCTool."""
__max_length = 100
def __init__(self):
"""Init the history manager."""
self.loadJson()
self.updateDataStructure()
def loadJson(self):
"""Read json data from file."""
try:
with open(getJsonFile('history'), 'r',
encoding='utf-8-sig') as json_file:
data = json.load(json_file)
except Exception:
data = dict()
self.__player_history = data.get('player', [])
self.__team_history = data.get('team', [])
def dumpJson(self):
"""Write json data to file."""
data = dict()
data['player'] = self.__player_history
data['team'] = self.__team_history
try:
with open(getJsonFile('history'), 'w',
encoding='utf-8-sig') as outfile:
json.dump(data, outfile)
except Exception:
module_logger.exception("message")
def updateDataStructure(self):
"""Update the data structure (from a previous version)."""
for idx, item in enumerate(self.__team_history):
if isinstance(item, str):
self.__team_history[idx] = {'team': item, 'logo': '0'}
def insertPlayer(self, player, race):
"""Insert a player into the history."""
player = player.strip()
if not player or player.lower() == "tbd":
return
if race is str:
race = race2idx(race)
race = idx2race(race)
for item in self.__player_history:
if item.get('player', '').lower() == player.lower():
self.__player_history.remove(item)
if race == "Random":
race = item.get('race', 'Random')
break
self.__player_history.insert(0, {"player": player, "race": race})
# self.enforeMaxLength("player")
def insertTeam(self, team, logo='0'):
"""Insert a team into the history."""
team = team.strip()
if not team or team.lower() == "tbd":
return
for item in self.__team_history:
if item.get('team', '').lower() == team.lower():
self.__team_history.remove(item)
if logo == '0':
logo = item.get('logo', '0')
break
self.__team_history.insert(0, {"team": team, "logo": logo})
# self.enforeMaxLength("team")
def enforeMaxLength(self, scope=None):
"""Delete old history elements."""
if not scope or scope == "player":
while len(self.__player_history) > self.__max_length:
self.__player_history.pop()
if not scope or scope == "team":
while len(self.__team_history) > self.__max_length:
self.__team_history.pop()
def getPlayerList(self):
"""Return a list of all players in history."""
playerList = list()
for item in self.__player_history:
player = item['player']
if player not in playerList:
playerList.append(player)
return playerList
def getTeamList(self):
"""Return a list of all teams in history."""
teamList = list()
for item in self.__team_history:
team = item.get('team')
if team not in teamList:
teamList.append(team)
return teamList
def getRace(self, player):
"""Look up the race of a player in the history."""
player = player.lower().strip()
race = "Random"
for item in self.__player_history:
if item.get('player', '').lower() == player:
race = item.get('race', 'Random')
break
return race
def getLogo(self, team):
"""Look up the logo of a team in history."""
team = team.lower().strip()
logo = '0'
for item in self.__team_history:
if item.get('team', '').lower() == team:
logo = item.get('logo', '0')
break
return logo
| gpl-3.0 | -1,711,385,107,263,561,700 | 33.226563 | 73 | 0.530701 | false | 4.241045 | false | false | false |
knowledgecommonsdc/kcdc3 | kcdc3/apps/classes/migrations/0018_auto__chg_field_event_location_address1__chg_field_event_location_addr.py | 1 | 9954 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Event.location_address1'
db.alter_column('classes_event', 'location_address1', self.gf('django.db.models.fields.CharField')(max_length=60))
# Changing field 'Event.location_address2'
db.alter_column('classes_event', 'location_address2', self.gf('django.db.models.fields.CharField')(max_length=60))
# Changing field 'Event.location_city'
db.alter_column('classes_event', 'location_city', self.gf('django.db.models.fields.CharField')(max_length=60))
# Changing field 'Event.location_name'
db.alter_column('classes_event', 'location_name', self.gf('django.db.models.fields.CharField')(max_length=100))
# Changing field 'Event.location_state'
db.alter_column('classes_event', 'location_state', self.gf('django.db.models.fields.CharField')(max_length=2))
# Changing field 'Event.location_zip'
db.alter_column('classes_event', 'location_zip', self.gf('django.db.models.fields.CharField')(max_length=5))
def backwards(self, orm):
# Changing field 'Event.location_address1'
db.alter_column('classes_event', 'location_address1', self.gf('django.db.models.fields.TextField')(max_length=60))
# Changing field 'Event.location_address2'
db.alter_column('classes_event', 'location_address2', self.gf('django.db.models.fields.TextField')(max_length=60))
# Changing field 'Event.location_city'
db.alter_column('classes_event', 'location_city', self.gf('django.db.models.fields.TextField')(max_length=60))
# Changing field 'Event.location_name'
db.alter_column('classes_event', 'location_name', self.gf('django.db.models.fields.TextField')(max_length=100))
# Changing field 'Event.location_state'
db.alter_column('classes_event', 'location_state', self.gf('django.db.models.fields.TextField')(max_length=2))
# Changing field 'Event.location_zip'
db.alter_column('classes_event', 'location_zip', self.gf('django.db.models.fields.TextField')(max_length=5))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'classes.event': {
'Meta': {'ordering': "['date']", 'object_name': 'Event'},
'additional_dates_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'documentation': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_reminder': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'email_reminder_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_welcome_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'facilitators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'facilitators'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location_address1': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}),
'location_address2': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}),
'location_city': ('django.db.models.fields.CharField', [], {'default': "'Washington'", 'max_length': '60', 'blank': 'True'}),
'location_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'location_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'location_show_exact': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'location_state': ('django.db.models.fields.CharField', [], {'default': "'DC'", 'max_length': '2', 'blank': 'True'}),
'location_zip': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'main_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'max_students': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'registration_status': ('django.db.models.fields.CharField', [], {'default': "'AUTO'", 'max_length': '7'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PUBLISHED'", 'max_length': '9'}),
'students': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'students'", 'to': "orm['auth.User']", 'through': "orm['classes.Registration']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'teachers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'teachers'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'CLASS'", 'max_length': '9'}),
'waitlist_status': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'classes.registration': {
'Meta': {'ordering': "['date_registered']", 'object_name': 'Registration'},
'attended': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'cancelled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_cancelled': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_registered': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['classes.Event']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'waitlist': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['classes'] | mit | -3,789,741,378,202,330,000 | 73.849624 | 237 | 0.582078 | false | 3.677133 | false | false | false |
jantman/awslimitchecker | awslimitchecker/tests/support.py | 1 | 13142 | """
awslimitchecker/tests/support.py
The latest version of this package is available at:
<https://github.com/jantman/awslimitchecker>
################################################################################
Copyright 2015-2018 Jason Antman <[email protected]>
This file is part of awslimitchecker, also known as awslimitchecker.
awslimitchecker is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
awslimitchecker is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with awslimitchecker. If not, see <http://www.gnu.org/licenses/>.
The Copyright and Authors attributions contained herein may not be removed or
otherwise altered, except to add the Author attribution of a contributor to
this work. (Additional Terms pursuant to Section 7b of the AGPL v3)
################################################################################
While not legally required, I sincerely request that anyone who finds
bugs please submit them at <https://github.com/jantman/awslimitchecker> or
to me via email, and that you send any contributions or improvements
either as a pull request on GitHub, or to me via email.
################################################################################
AUTHORS:
Jason Antman <[email protected]> <http://www.jasonantman.com>
################################################################################
"""
from awslimitchecker.limit import AwsLimit
import logging
from botocore.exceptions import EndpointConnectionError
class LogRecordHelper(object):
"""class to help working with an array of LogRecords"""
levelmap = {
logging.CRITICAL: 'critical',
logging.ERROR: 'error',
logging.WARNING: 'warning',
logging.INFO: 'info',
logging.DEBUG: 'debug',
logging.NOTSET: 'notset'
}
def __init__(self, logcapture):
"""
Initialize LogRecord helper.
:param logcapture: testfixtures.logcapture.LogCapture object
"""
self._logcapture = logcapture
self.records = logcapture.records
def get_at_level(self, lvl):
"""
Return a list of all records in order for a given numeric logging level
:param lvl: the level to get
:type lvl: int
:returns: list of LogRecord objects
"""
res = []
for rec in self.records:
if rec.levelno == lvl:
res.append(rec)
return res
def get_at_or_above_level(self, lvl):
"""
Return a list of all records in order, at OR ABOVE a given numeric
logging level
:param lvl: the level to get
:type lvl: int
:returns: list of LogRecord objects
"""
res = []
for rec in self.records:
if rec.levelno >= lvl:
res.append(rec)
return res
def assert_failed_message(self, records):
"""
Return a list of string representations of the log records, for use
in assertion failure messages.
:param records: list of LogRecord objects
:return: list of strings
"""
res = ""
for r in records:
res += '%s:%s.%s (%s:%s) %s - %s %s\n' % (
r.name,
r.module,
r.funcName,
r.filename,
r.lineno,
r.levelname,
r.msg,
r.args
)
return res
def unexpected_logs(self, allow_endpoint_error=False):
"""
Return a list of strings representing awslimitchecker log messages
in this object's log records, that shouldn't be encountered in normal
operation.
:param allow_endpoint_error: if true, will ignore any WARN messages
containing 'Could not connect to the endpoint URL:' in their first
argument
:type allow_endpoint_error: bool
:return: list of strings representing log records
"""
res = []
msg = 'Cannot check TrustedAdvisor: %s'
args = ('AWS Premium Support Subscription is required to use this '
'service.', )
for r in self.get_at_or_above_level(logging.WARN):
if (r.levelno == logging.WARN and r.module == 'trustedadvisor' and
r.funcName == '_get_limit_check_id' and r.msg == msg and
r.args == args):
continue
if (r.levelno == logging.WARN and r.module == 'ec2' and
r.funcName == '_find_usage_spot_instances' and
'spot instance support is experimental' in r.msg):
continue
if (
allow_endpoint_error and r.levelno == logging.WARN and
len(r.args) > 0
):
if isinstance(r.args[0], EndpointConnectionError):
continue
if 'Could not connect to the endpoint URL:' in r.args[0]:
continue
if (r.levelno == logging.ERROR and r.module == 'vpc' and
r.funcName == '_find_usage_nat_gateways' and
'perhaps NAT service does not exist in this regi' in r.msg):
continue
if (r.levelno == logging.WARNING and r.module == 'firehose' and
r.funcName == 'find_usage' and 'perhaps the Firehose '
'service is not available in this region' in r.msg):
continue
if (r.levelno == logging.WARNING and r.module == 'quotas' and
r.funcName == 'quotas_for_service' and
'Attempted to retrieve Service Quotas' in r.msg):
continue
if (
r.levelno == logging.WARNING and r.module == 'base' and
r.funcName == '_get_cloudwatch_usage_latest' and
'No data points found for AWS/Usage metric' in r.msg
):
continue
res.append('%s:%s.%s (%s:%s) %s - %s %s' % (
r.name,
r.module,
r.funcName,
r.filename,
r.lineno,
r.levelname,
r.msg,
r.args
))
return res
def verify_region(self, region_name):
"""
Verify that all connection logs are to the specified region. Raise
an AssertionError otherwise.
:param region_name: expected region name
:type region_name: str
"""
overall_region = None
support_region = None
service_regions = {}
for r in self.records:
if r.msg == 'Connected to %s in region %s':
if r.args[0] == 'support':
support_region = r.args[1]
else:
service_regions[r.args[0]] = r.args[1]
elif r.msg in [
'Connecting to region %s',
'Connecting to STS in region %s'
]:
overall_region = r.args[0]
assert overall_region == region_name, "Expected overall connection " \
"region to be %s but got %s" \
"" % (region_name,
overall_region)
assert support_region == 'us-east-1', "Expected Support API region " \
"to be us-east-1 but got %s" \
"" % support_region
for svc, rname in service_regions.items():
if svc == 'route53':
continue
assert rname == region_name, "Expected service %s to connect to " \
"region %s, but connected to %s" % (
svc, region_name, rname)
@property
def num_ta_polls(self):
"""
Return the number of times Trusted Advisor polled.
:return: number of times Trusted Advisor polled
:rtype: int
"""
count = 0
for r in self.records:
if 'Beginning TrustedAdvisor poll' in r.msg:
count += 1
return count
def sample_limits():
limits = {
'SvcBar': {
'barlimit1': AwsLimit(
'barlimit1',
'SvcBar',
1,
2,
3,
limit_type='ltbar1',
limit_subtype='sltbar1',
),
'bar limit2': AwsLimit(
'bar limit2',
'SvcBar',
2,
2,
3,
limit_type='ltbar2',
limit_subtype='sltbar2',
),
},
'SvcFoo': {
'foo limit3': AwsLimit(
'foo limit3',
'SvcFoo',
3,
2,
3,
limit_type='ltfoo3',
limit_subtype='sltfoo3',
),
},
}
limits['SvcBar']['bar limit2'].set_limit_override(99)
limits['SvcFoo']['foo limit3']._set_ta_limit(10)
return limits
def sample_limits_api():
limits = {
'SvcBar': {
'barlimit1': AwsLimit(
'barlimit1',
'SvcBar',
1,
2,
3,
limit_type='ltbar1',
limit_subtype='sltbar1',
),
'bar limit2': AwsLimit(
'bar limit2',
'SvcBar',
2,
2,
3,
limit_type='ltbar2',
limit_subtype='sltbar2',
),
},
'SvcFoo': {
'foo limit3': AwsLimit(
'foo limit3',
'SvcFoo',
3,
2,
3,
limit_type='ltfoo3',
limit_subtype='sltfoo3',
),
'zzz limit4': AwsLimit(
'zzz limit4',
'SvcFoo',
4,
1,
5,
limit_type='ltfoo4',
limit_subtype='sltfoo4',
),
'limit with usage maximums': AwsLimit(
'limit with usage maximums',
'SvcFoo',
4,
1,
5,
limit_type='ltfoo5',
limit_subtype='sltfoo5',
),
'zzz limit5': AwsLimit(
'zzz limit5',
'SvcFoo',
4,
1,
5,
limit_type='ltfoo5',
limit_subtype='sltfoo5',
),
},
}
limits['SvcBar']['bar limit2']._set_api_limit(2)
limits['SvcBar']['bar limit2'].set_limit_override(99)
limits['SvcFoo']['foo limit3']._set_ta_limit(10)
limits['SvcFoo']['zzz limit4']._set_api_limit(34)
limits['SvcFoo']['zzz limit5']._set_quotas_limit(60.0)
limits['SvcFoo']['limit with usage maximums']._add_current_usage(
1,
maximum=10,
aws_type='res_type',
resource_id='res_id')
return limits
def quotas_response():
return (
[
{
'Quotas': [
{
'QuotaName': 'qname1',
'QuotaCode': 'qcode1',
'Value': 1.1
},
{
'QuotaName': 'qname2',
'QuotaCode': 'qcode2',
'Value': 2.2
}
]
},
{
'Quotas': [
{
'QuotaName': 'qname3',
'QuotaCode': 'qcode3',
'Value': 3.3
},
{
'QuotaName': 'qname2',
'QuotaCode': 'qcode2',
'Value': 2.4 # triggers the error log for dupe
}
]
}
],
{
'qname1': {
'QuotaName': 'qname1',
'QuotaCode': 'qcode1',
'Value': 1.1
},
'qname2': {
'QuotaName': 'qname2',
'QuotaCode': 'qcode2',
'Value': 2.4
},
'qname3': {
'QuotaName': 'qname3',
'QuotaCode': 'qcode3',
'Value': 3.3
}
}
)
| agpl-3.0 | -5,978,919,376,088,293,000 | 32.35533 | 80 | 0.458302 | false | 4.528601 | false | false | false |
kiritoe/pokeapi | pokemon/models.py | 12 | 11645 | from __future__ import unicode_literals
from django.db import models
from imagekit.models.fields import ProcessedImageField
from imagekit.processors import ResizeToFill
from .utils import unique_filename
class DateTimeModel(models.Model):
class Meta:
abstract = True
modified = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now_add=True)
class Ability(DateTimeModel):
def __unicode__(self):
return self.name
name = models.CharField(max_length=50)
description = models.TextField(max_length=200)
class Type(DateTimeModel):
def __unicode__(self):
return self.name
name = models.CharField(max_length=50)
def _build_dict(self, items):
lst = []
for i in items:
lst.append(dict(
name=i.to.name,
resource_uri='/api/v1/type/' + str(i.to.id) + '/'
))
return lst
def weakness_list(self):
items = TypeChart.objects.filter(
frm__name=self.name,
ttype='weak')
if items.exists():
return self._build_dict(items)
return []
weaknesses = property(fget=weakness_list)
def resistances_list(self):
items = TypeChart.objects.filter(
frm__name=self.name,
ttype='resist')
if items.exists():
return self._build_dict(items)
return []
resistances = property(fget=resistances_list)
def super_list(self):
items = TypeChart.objects.filter(
frm__name=self.name,
ttype='super effective')
if items.exists():
return self._build_dict(items)
return []
supers = property(fget=super_list)
def ineffective_list(self):
items = TypeChart.objects.filter(
frm__name=self.name,
ttype='ineffective')
if items.exists():
return self._build_dict(items)
return []
ineffectives = property(fget=ineffective_list)
def no_list(self):
items = TypeChart.objects.filter(
frm__name=self.name,
ttype='noeffect')
if items.exists():
return self._build_dict(items)
return []
no_effects = property(fget=no_list)
class TypeChart(DateTimeModel):
def __unicode__(self):
return ' '.join([self.frm.name, self.ttype, 'against', self.to.name])
frm = models.ForeignKey(
Type, blank=True, null=True, related_name='type_frm')
to = models.ForeignKey(
Type, blank=True, null=True, related_name='type_to')
TYPES = (
('weak', 'weak'),
('super effective', 'super effective'),
('resistant', 'resistant'),
('ineffective', 'ineffective'),
('noeffect', 'noeffect'),
('resist', 'resist'),
)
ttype = models.CharField(
max_length=15, choices=TYPES, blank=True, null=True)
class EggGroup(DateTimeModel):
def __unicode__(self):
return self.name
name = models.CharField(max_length=50)
def get_pokes(self):
pokes = Pokemon.objects.filter(
egg_group=self
)
lst = []
if pokes.exists():
for p in pokes:
lst.append(dict(
name=p.name.capitalize(),
resource_uri='/api/v1/pokemon/' + str(p.pkdx_id) + '/'
))
return lst
pokemon = property(fget=get_pokes)
class Game(DateTimeModel):
def __unicode__(self):
return self.name
name = models.CharField(max_length=50)
generation = models.IntegerField(max_length=4)
release_year = models.IntegerField(max_length=6)
class Description(DateTimeModel):
def __unicode__(self):
return self.name
name = models.CharField(max_length=50)
description = models.TextField(max_length=200)
game = models.ManyToManyField(Game, blank=True, null=True)
def get_game_details(self):
lst = []
for g in self.game.all():
lst.append(dict(
name=g.name,
resource_uri='/api/v1/game/' + str(g.id) + '/')
)
return lst
n_game = property(fget=get_game_details)
def get_pokemon(self):
nm = self.name.split('_')[0]
pokes = Pokemon.objects.filter(
name=nm.lower()
)
if pokes.exists():
return dict(
name=pokes[0].name,
resource_uri='/api/v1/pokemon/' + str(pokes[0].pkdx_id) + '/')
return []
pokemon = property(fget=get_pokemon)
class Move(DateTimeModel):
def __unicode__(self):
return self.name
name = models.CharField(max_length=50)
description = models.TextField(max_length=200)
etype = models.ManyToManyField(Type, null=True)
pp = models.IntegerField(max_length=5)
CATEGORY = (
('physical', 'physical'),
('special', 'special'),
('status', 'status'),
)
category = models.CharField(choices=CATEGORY, max_length=10)
power = models.IntegerField(max_length=6)
accuracy = models.IntegerField(max_length=6)
class Sprite(DateTimeModel):
def __unicode__(self):
return self.name
name = models.CharField(max_length=50)
image = ProcessedImageField(
[ResizeToFill(96, 96)],
upload_to=unique_filename,
format='PNG',
options={'quality': 80})
def get_pokemon(self):
nm = self.name.split('_')[0]
pokes = Pokemon.objects.filter(
name=nm.lower()
)
if pokes.exists():
return dict(
name=pokes[0].name,
resource_uri='/api/v1/pokemon/' + str(pokes[0].pkdx_id) + '/')
return []
pokemon = property(fget=get_pokemon)
class Pokemon(DateTimeModel):
def __unicode__(self):
return ' - '.join([str(self.pkdx_id), self.name])
name = models.CharField(max_length=50)
pkdx_id = models.IntegerField(max_length=4, blank=True)
species = models.CharField(max_length=30)
height = models.CharField(max_length=10)
weight = models.CharField(max_length=10)
ev_yield = models.CharField(max_length=20)
catch_rate = models.IntegerField(max_length=4)
happiness = models.IntegerField(max_length=4)
exp = models.IntegerField(max_length=5)
GROWTHS = (
('slow', 'slow'),
('medium slow', 'medium slow'),
('medium', 'medium'),
('medium fast', 'medium fast'),
('fast', 'fast'),
)
growth_rate = models.CharField(choices=GROWTHS, max_length=15)
male_female_ratio = models.CharField(max_length=10)
hp = models.IntegerField(max_length=4)
attack = models.IntegerField(max_length=4)
defense = models.IntegerField(max_length=4)
sp_atk = models.IntegerField(max_length=4)
sp_def = models.IntegerField(max_length=4)
speed = models.IntegerField(max_length=4)
total = models.IntegerField(max_length=6)
egg_cycles = models.IntegerField(max_length=6)
abilities = models.ManyToManyField(
Ability, blank=True, null=True)
def ability_names(self):
lst = []
for a in self.abilities.all():
lst.append(dict(
resource_uri='/api/v1/ability/' + str(a.id) + '/',
name=a.name.lower())
)
return lst
ability_list = property(fget=ability_names)
def get_evolution_details(self):
evols = Evolution.objects.filter(
frm=self
)
if evols.exists():
lst = []
for e in evols:
d = dict(
to=e.to.name.capitalize(),
resource_uri='/api/v1/pokemon/' + str(e.to.pkdx_id) + '/',
method=e.method,
)
if e.level > 0:
d['level'] = e.level
if e.detail:
d['detail'] = e.detail
lst.append(d)
return lst
return []
evolutions = property(fget=get_evolution_details)
types = models.ManyToManyField(
Type, blank=True, null=True)
def type_list(self):
lst = []
for t in self.types.all():
lst.append(dict(
resource_uri='/api/v1/type/' + str(t.id) + '/',
name=t.name.lower())
)
return lst
type_list = property(fget=type_list)
egg_group = models.ManyToManyField(
EggGroup, blank=True, null=True)
def get_eggs(self):
lst = []
for e in self.egg_group.all():
lst.append(dict(
name=e.name.capitalize(),
resource_uri='/api/v1/egg/' + str(e.id) + '/'
))
return lst
eggs = property(fget=get_eggs)
descriptions = models.ManyToManyField(
Description, blank=True, null=True)
def get_sprites(self):
lst = []
for s in self.sprites.all():
lst.append(dict(
name=self.name,
resource_uri='/api/v1/sprite/' + str(s.id) + '/')
)
return lst
my_sprites = property(fget=get_sprites)
sprites = models.ManyToManyField(
Sprite, blank=True, null=True)
def get_moves(self):
moves = MovePokemon.objects.filter(
pokemon=self
)
lst = []
if moves.exists():
for m in moves:
d = dict(
name=m.move.name.capitalize(),
resource_uri='/api/v1/move/' + str(m.move.id) + '/',
learn_type=m.learn_type
)
if m.level > 0:
d['level'] = m.level
lst.append(d)
return lst
moves = property(fget=get_moves)
class Evolution(DateTimeModel):
def __unicode__(self):
return self.frm.name + ' to ' + self.to.name
frm = models.ForeignKey(
Pokemon, null=True, blank=True,
related_name='frm_evol_pokemon')
to = models.ForeignKey(
Pokemon, null=True, blank=True,
related_name='to_evol_pokemon')
EVOLV_METHODS = (
('level up', 'level_up'),
('stone', 'stone'),
('trade', 'trade'),
('other', 'other'),
)
level = models.IntegerField(max_length=3, default=0)
method = models.CharField(
choices=EVOLV_METHODS, max_length=10, default=0)
detail = models.CharField(max_length=10, null=True, blank=True)
class MovePokemon(DateTimeModel):
def __unicode__(self):
return self.pokemon.name + ' - ' + self.move.name
pokemon = models.ForeignKey(
Pokemon, related_name='move', null=True, blank=True)
move = models.ForeignKey(
Move, related_name='pokemon', null=True, blank=True)
LEARN = (
('level up', 'level up'),
('machine', 'machine'),
('egg move', 'egg move'),
('tutor', 'tutor'),
('other', 'other'),
)
learn_type = models.CharField(
choices=LEARN, max_length=15, default='level up')
level = models.IntegerField(
max_length=6, default=0, null=True, blank=True)
class Pokedex(DateTimeModel):
def __unicode__(self):
return self.name
name = models.CharField(max_length=60)
def _all_pokes(self):
lst = []
for p in Pokemon.objects.all():
lst.append(dict(
name=p.name,
resource_uri='api/v1/pokemon/' + str(p.pkdx_id) + '/'
))
return lst
pokemon = property(fget=_all_pokes)
| bsd-3-clause | 2,845,050,407,254,119,000 | 23.260417 | 78 | 0.552168 | false | 3.768608 | false | false | false |
labase/brython_crafty | fabfile.py | 1 | 1183 | # -*- coding: utf-8 -*-
"""
############################################################
Brython Crafty - - Fabric deployment
############################################################
:Author: *Carlo E. T. Oliveira*
:Contact: [email protected]
:Date: $Date: 2014/10/10 $
:Status: This is a "work in progress"
:Revision: $Revision: 0.01 $
:Home: `Labase <http://labase.nce.ufrj.br/>`__
:Copyright: ©2013, `GPL <http://is.gd/3Udt>__.
"""
from fabric.api import local # , settings, cd, run, lcd
#from tempfile import mkdtemp
KG_ORIGIN = '/home/carlo/Documentos/dev/brython_crafty/src/crafty/'
KG_DEST = '/home/carlo/Documentos/dev/lib/Brython2.2/Lib/site-packages/crafty'
PN_DEST = '/home/carlo/Documentos/dev/brython-in-the-classroom/pyschool/static/external/brython/Lib/site-packages/crafty'
SOURCES = '*.py'
FILENAMES = 'base.py entity.py extra.py __init__.py utils.py core.py graphics.py jscrafty.py'.split()
def _do_copy(source, targ):
#local("mkdir -p %s" % targ)
local("cp -u %s -t %s" % (source, targ))
def _k_copy(targ):
for part in FILENAMES:
_do_copy(KG_ORIGIN+part, targ)
def deploy():
_k_copy(KG_DEST)
_k_copy(PN_DEST)
#kzip()
| gpl-2.0 | 8,053,135,288,582,555,000 | 30.945946 | 121 | 0.594755 | false | 2.742459 | false | false | false |
Tocknicsu/nctuoj | backend/service/tag.py | 1 | 2375 | from req import Service
from map import *
from service.base import BaseService
from utils.form import form_validation
class TagService(BaseService):
def __init__(self, db, rs):
super().__init__(db, rs)
TagService.inst = self
def get_tag_list(self):
# res = self.rs.get('tag_list')
# if res: return (None, res)
res = yield self.db.execute('SELECT * FROM tags;')
res = res.fetchall()
print('RES: ',res)
# self.rs.set('tag_list', res)
return (None, res)
def get_tag(self, data={}):
required_args = [{
'name': '+id',
'type': int,
}]
err = form_validation(data, required_args)
if err: return (err, None)
# res = self.rs.get('tag@%s'%(str(data['id'])))
res = yield self.db.execute('SELECT * FROM tags WHERE id=%s;', (data['id'],))
if res.rowcount == 0:
return ((404, 'No tag ID'), None)
res = res.fetchone()
# self.rs.set('tag@%s'%(str(data['id'])), res)
return (None, res)
def post_tag(self, data={}):
required_args = [{
'name': '+id',
'type': int,
}, {
'name': '+tag',
'type': str,
}]
err = form_validation(data, required_args)
if err: return (err, None)
id = data.pop('id')
# self.rs.delete('tag@%s'%(id))
sql ,param = self.gen_update_sql('tags', data)
yield self.db.execute(sql, param)
return (None, id)
def post_tag(self, data={}):
required_args = [{
'name': '+tag',
'type': str,
}]
err = form_validation(data, required_args)
if err: return (err, None)
sql, param = self.gen_insert_sql('tags', data)
id = yield self.db.execute(sql, param)
id = id.fetchone()['id']
return (None, id)
def delete_tag(self, data={}):
required_args = ['id']
required_args = [{
'name': '+id',
'type': int,
}]
# err = self.check_required_args(required_args, data)
err = form_validation(data, required_args)
if err: return (err, None)
# self.rs.delete('tag@%s'%(str(data['id'])))
yield self.db.execute('DELETE FROM tags WHERE id=%s', (data['id'],))
return (None, None)
| mit | 2,419,667,435,661,461,500 | 30.25 | 85 | 0.501895 | false | 3.603945 | false | false | false |
mbway/Bayesian-Optimisation | turbo/recorder.py | 1 | 11887 | #!/usr/bin/env python3
import os
import time
import datetime
import numpy as np
import turbo as tb
import turbo.modules as tm
#TODO: I have decided that pickling is a totally inappropriate serialisation method for this data for the following reasons (note them down in the documentation)
# - from Gpy: Pickling is meant to serialize models within the same environment, and not to store models on disk to be used later on.
# - if the code which created the optimiser changes (eg file deleted) then the pickled data CANNOT BE LOADED!
# as a fix, can use sys.modules['old_module'] = new_module
# if the module has moved and hasn't changed much
# - the data isn't human readable
# - the data isn't readable by any other tool
# - the data isn't guaranteed to last between python versions
# - it introduces dill as a dependency
# - the data isn't readable if turbo changes particular classes (like Trial for example)
# - the save cannot be loaded easily from another directory because the modules for the original source code will not be present!
#
# Potential fix:
# write a way for the optimiser to be initialised from a static configuration file / dictionary. That way only static data has to be stored
# then it will be trivial to store using JSON or a binary serialisation method
# can use inspect to get the source code for functions like `beta = lambda trial_num: np.log(trial_num)` and save as strings
# could use dis to get the bytecode instead perhaps? Either way, save as something which can be re-loaded. Or perhaps pickle just the function stuff and store it as a string inside the JSON file or whatever
class Recorder(tm.Listener):
""" A listener which records data about the trials of an optimiser for plotting later
Note: design justification: recorders are required because the optimiser
only keeps track of the data it needs to perform the optimisation (for
the sake of simplicity). Different data (and different formats and
arrangements of the same data) is required for plotting, and so the
cleanest solution is to completely separate this recording of data for
plotting and the optimisation process itself.
Attributes:
trials: a list of Trial objects recorded from the optimiser
"""
class Trial:
def __init__(self):
self.trial_num = None
self.x = None
self.y = None
self.selection_info = None
self.selection_time = None
self.eval_info = None
self.eval_time = None
def __repr__(self):
attrs = ('trial_num', 'x', 'y', 'selection_info', 'selection_time', 'eval_info', 'eval_time')
return 'Trial({})'.format(', '.join('{}={}'.format(k, getattr(self, k)) for k in attrs))
def is_pre_phase(self):
return self.selection_info['type'] == 'pre_phase'
def is_bayes(self):
return self.selection_info['type'] == 'bayes'
def is_fallback(self):
return self.selection_info['type'] == 'fallback'
class Run:
def __init__(self, previously_finished, max_trials):
self.start_date = datetime.datetime.now()
self.finish_date = None
self.previously_finished = previously_finished
self.max_trials = max_trials
def finish(self):
self.finish_date = datetime.datetime.now()
def is_finished(self):
return self.finish_date is not None
@property
def duration(self):
return None if self.finish_date is None else (self.finish_date-self.start_date).total_seconds()
@property
def num_trials(self):
return self.max_trials-self.previously_finished
def __init__(self, optimiser=None, description=None, autosave_filename=None):
"""
Args:
optimiser: (optional) the optimiser to register with, otherwise
`Optimiser.register_listener()` should be called with this
object as an argument in order to receive messages.
description (str): a long-form explanation of what was being done
during this run (eg the details of the experiment set up) to
give more information than the filename alone can provide.
autosave_filename (str): when provided, save the recorder to this
file every time a trial is finished. This allows the user to do
plotting before the optimisation process has finished. The file
must not already exist.
"""
self.runs = []
self.trials = {}
self.description = description
self.unfinished_trial_nums = set()
self.optimiser = optimiser
if optimiser is not None:
optimiser.register_listener(self)
if autosave_filename is not None and os.path.exists(autosave_filename):
base_filename = autosave_filename
suffix = 1
while os.path.exists(autosave_filename):
autosave_filename = '{}_{}'.format(base_filename, suffix)
suffix += 1
print('the file "{}" already exists, using "{}" instead'.format(base_filename, autosave_filename))
self.autosave_filename = autosave_filename
def save_compressed(self, path, overwrite=False):
""" save the recorder to a file which can be loaded later and used for plotting
Args:
path: the path where the recording will be saved to
overwrite (bool): whether to overwrite the file if it already exists
Note:
this method is necessary as `utils.save_compressed()` will crash
otherwise due to the circular reference between the recorder and the
optimiser
"""
opt = self.optimiser
assert opt is not None
listeners = opt._listeners
objective = opt.objective
try:
# since self is a listener of the optimiser, if the listeners are saved
# then there is a circular reference!
opt._listeners = []
# The objective function could be arbitrarily complex and so may not pickle
opt.objective = None
problems = tb.utils.detect_pickle_problems(self, quiet=True)
assert not problems, 'problems detected: {}'.format(problems)
while True:
try:
tb.utils.save_compressed(self, path, overwrite)
break
except Exception as e:
print('in save_compressed:')
print(e)
input('press enter to try again')
finally:
opt._listeners = listeners
opt.objective = objective
@staticmethod
def load_compressed(filename, quiet=False):
rec = tb.utils.load_compressed(filename)
if not quiet:
print('Recorder loaded:')
print(rec.get_summary())
return rec
def get_summary(self):
s = '{} trials over {} run{}\n'.format(len(self.trials), len(self.runs), 's' if len(self.runs) > 1 else '')
for i, r in enumerate(self.runs):
if r.is_finished():
s += 'run {}: {} trials in {}, started {}\n'.format(i, r.num_trials, tb.utils.duration_string(r.duration), r.start_date)
else:
s += 'run {}: unfinished\n'.format(i)
if self.unfinished_trial_nums:
s += 'unfinished trials: {}\n'.format(self.unfinished_trial_nums)
s += 'description:\n{}\n'.format(self.description)
return s
def registered(self, optimiser):
assert self.optimiser is None or self.optimiser == optimiser, \
'cannot use the same Recorder with multiple optimisers'
self.optimiser = optimiser
def run_started(self, finished_trials, max_trials):
r = Recorder.Run(finished_trials, max_trials)
self.runs.append(r)
def selection_started(self, trial_num):
assert trial_num not in self.trials.keys()
t = Recorder.Trial()
t.trial_num = trial_num
t.selection_time = time.time() # use as storage for the start time until selection has finished
self.trials[trial_num] = t
self.unfinished_trial_nums.add(trial_num)
def selection_finished(self, trial_num, x, selection_info):
t = self.trials[trial_num]
t.selection_time = time.time() - t.selection_time
t.x = x
t.selection_info = selection_info
def evaluation_started(self, trial_num):
t = self.trials[trial_num]
t.eval_time = time.time() # use as storage for the start time until evaluation has finished
def evaluation_finished(self, trial_num, y, eval_info):
t = self.trials[trial_num]
t.y = y
t.eval_info = eval_info
t.eval_time = time.time() - t.eval_time
self.unfinished_trial_nums.remove(trial_num)
if self.autosave_filename is not None:
self.save_compressed(self.autosave_filename, overwrite=True)
def run_finished(self):
r = self.runs[-1]
r.finish()
if self.autosave_filename is not None:
self.save_compressed(self.autosave_filename, overwrite=True)
# Utility functions
def get_sorted_trials(self):
""" return a list of (trial_num, Trial) sorted by trial_num (and so sorted by start time) """
return sorted(self.trials.items())
def get_ranked_trials(self):
""" return a list of (trial_num, Trial) sorted by cost (best first) """
maximising = self.optimiser.is_maximising()
return sorted(self.trials.items(), key=lambda item: -item[1].y if maximising else item[1].y)
def get_incumbent(self, up_to=None):
"""
Args:
up_to (int): the incumbent for the trials up to and including this trial number. Pass None to include all trials.
"""
trials = self.get_sorted_trials()
if up_to is not None:
trials = trials[:up_to+1]
assert trials, 'no trials'
costs = [t.y for n, t in trials]
i = int(np.argmax(costs)) if self.optimiser.is_maximising() else int(np.argmin(costs))
return trials[i] # (trial_num, trial)
def get_data_for_trial(self, trial_num):
#TODO: for async cannot assume that finished == all trials before trial_num
finished = [self.trials[n] for n in range(trial_num)]
trial = self.trials[trial_num]
return finished, trial
def get_acquisition_function(self, trial_num):
""" see `Optimiser._get_acquisition_function()` """
opt = self.optimiser
acq_type = opt.acquisition.get_type()
finished, t = self.get_data_for_trial(trial_num)
assert 'model' in t.selection_info, 'the trial doesn\'t have a model'
acq_args = [trial_num, t.selection_info['model'], opt.desired_extremum]
if acq_type == 'optimism':
pass # no extra arguments needed
elif acq_type == 'improvement':
ys = [f.y for f in finished]
incumbent_cost = max(ys) if opt.is_maximising() else min(ys)
acq_args.append(incumbent_cost)
else:
raise NotImplementedError('unsupported acquisition function type: {}'.format(acq_type))
acq_fun, acq_info = opt.acquisition.construct_function(*acq_args)
return acq_fun
def remove_unfinished(self):
""" remove any unfinished trials. This is useful for still being able to
plot after interrupting an Optimiser before it finished
"""
for trial_num in self.unfinished_trial_nums:
del self.trials[trial_num]
self.unfinished_trial_nums = set()
def has_unfinished_trials(self):
return len(self.unfinished_trial_nums) > 0
| gpl-3.0 | 4,236,832,310,468,843,000 | 42.068841 | 206 | 0.625894 | false | 4.007755 | false | false | false |
LeonhardFS/flightbbq | src/extractBigTable.py | 1 | 7425 |
# coding: utf-8
# ### Linear Regression Model
# In[1]:
# import required modules for prediction tasks
import numpy as np
import pandas as pd
import math
import random
import requests
import zipfile
import StringIO
import re
import json
import os
# In[3]:
# first step: Accumulate the data
# given a list of column labels remove all columns that are in the df
def filterDF(df, cols):
colsToKeep = list(set(df.columns) & set(cols))
return df[colsToKeep]
# given a dataframe this function groups all manufacturers into one category whose market share is low (default: 1%)
# also groups together some companies
def compressManufacturers(df, percentage=1.):
df['AIRCRAFT_MFR'] = df['AIRCRAFT_MFR'].map(lambda x: x.strip())
mfr_stats = df['AIRCRAFT_MFR'].value_counts()
market_share = mfr_stats.values * 100. / np.sum(mfr_stats.values)
idxs = np.where(market_share < percentage)
names = np.array([el for el in list(mfr_stats.keys())])
# get labels for small manufacturers
smallMFR = names[idxs]
# perform merging for the big companies
# Douglas airplanes
df.loc[df['AIRCRAFT_MFR'] == 'MCDONNELL DOUGLAS AIRCRAFT CO', 'AIRCRAFT_MFR'] = 'MCDONNELL DOUGLAS'
df.loc[df['AIRCRAFT_MFR'] == 'MCDONNELL DOUGLAS CORPORATION', 'AIRCRAFT_MFR'] = 'MCDONNELL DOUGLAS'
df.loc[df['AIRCRAFT_MFR'] == 'MCDONNELL DOUGLAS CORPORATION', 'AIRCRAFT_MFR'] = 'DOUGLAS'
# Embraer
df.loc[df['AIRCRAFT_MFR'] == 'EMBRAER S A', 'AIRCRAFT_MFR'] = 'EMBRAER'
# Airbus
df.loc[df['AIRCRAFT_MFR'] == 'AIRBUS INDUSTRIE', 'AIRCRAFT_MFR'] = 'AIRBUS'
# the small manufacturers
for name in smallMFR:
df.loc[df['AIRCRAFT_MFR'] == name, 'AIRCRAFT_MFR'] = 'SMALL'
return df
# In[4]:
# MAIN PART:
# In[ ]:
print('loading helper files...')
# load helper files
z = zipfile.ZipFile('../externalData/AircraftInformation.zip')
df_master = pd.DataFrame.from_csv(z.open('MASTER.txt'))
df_aircrafts = pd.DataFrame.from_csv(z.open('ACFTREF.txt'))
master = df_master[['MFR MDL CODE', 'YEAR MFR']].reset_index()
aircrafts = df_aircrafts['MFR'].reset_index()
master.columns = ['TAIL_NUM', 'CODE', 'YEAR']
aircrafts.columns = ['CODE', 'MFR']
joinedAircraftInfo = pd.merge(master, aircrafts, how='left', on='CODE')
joinedAircraftInfo.TAIL_NUM = joinedAircraftInfo.TAIL_NUM.apply(lambda x: x.strip())
# possible fields
# [u'YEAR', u'QUARTER', u'MONTH', u'DAY_OF_MONTH', u'DAY_OF_WEEK',
# u'FL_DATE', u'UNIQUE_CARRIER', u'AIRLINE_ID', u'CARRIER', u'TAIL_NUM',
# u'FL_NUM', u'ORIGIN', u'ORIGIN_CITY_NAME', u'ORIGIN_STATE_ABR',
# u'ORIGIN_STATE_FIPS', u'ORIGIN_STATE_NM', u'ORIGIN_WAC', u'DEST',
# u'DEST_CITY_NAME', u'DEST_STATE_ABR', u'DEST_STATE_FIPS',
# u'DEST_STATE_NM', u'DEST_WAC', u'CRS_DEP_TIME', u'DEP_TIME',
# u'DEP_DELAY', u'DEP_DELAY_NEW', u'DEP_DEL15', u'DEP_DELAY_GROUP',
# u'DEP_TIME_BLK', u'TAXI_OUT', u'WHEELS_OFF', u'WHEELS_ON', u'TAXI_IN',
# u'CRS_ARR_TIME', u'ARR_TIME', u'ARR_DELAY', u'ARR_DELAY_NEW',
# u'ARR_DEL15', u'ARR_DELAY_GROUP', u'ARR_TIME_BLK', u'CANCELLED',
# u'CANCELLATION_CODE', u'DIVERTED', u'CRS_ELAPSED_TIME',
# u'ACTUAL_ELAPSED_TIME', u'AIR_TIME', u'FLIGHTS', u'DISTANCE',
# u'DISTANCE_GROUP', u'CARRIER_DELAY', u'WEATHER_DELAY', u'NAS_DELAY',
# u'SECURITY_DELAY', u'LATE_AIRCRAFT_DELAY', u'FIRST_DEP_TIME',
# u'TOTAL_ADD_GTIME', u'LONGEST_ADD_GTIME', u'DIV_AIRPORT_LANDINGS',
# u'DIV_REACHED_DEST', u'DIV_ACTUAL_ELAPSED_TIME', u'DIV_ARR_DELAY',
# u'DIV_DISTANCE', u'DIV1_AIRPORT', u'DIV1_WHEELS_ON',
# u'DIV1_TOTAL_GTIME', u'DIV1_LONGEST_GTIME', u'DIV1_WHEELS_OFF',
# u'DIV1_TAIL_NUM', u'DIV2_AIRPORT', u'DIV2_WHEELS_ON',
# u'DIV2_TOTAL_GTIME', u'DIV2_LONGEST_GTIME', u'DIV2_WHEELS_OFF',
# u'DIV2_TAIL_NUM', u'DIV3_AIRPORT', u'DIV3_WHEELS_ON',
# u'DIV3_TOTAL_GTIME', u'DIV3_LONGEST_GTIME', u'DIV3_WHEELS_OFF',
# u'DIV3_TAIL_NUM', u'DIV4_AIRPORT', u'DIV4_WHEELS_ON',
# u'DIV4_TOTAL_GTIME', u'DIV4_LONGEST_GTIME', u'DIV4_WHEELS_OFF',
# u'DIV4_TAIL_NUM', u'DIV5_AIRPORT', u'DIV5_WHEELS_ON',
# u'DIV5_TOTAL_GTIME', u'DIV5_LONGEST_GTIME', u'DIV5_WHEELS_OFF',
# u'DIV5_TAIL_NUM', u'Unnamed: 93', u'AIRCRAFT_YEAR', u'AIRCRAFT_AGE',
# u'AIRCRAFT_MFR']
# define here which columns to include in the data extraction process
columnsToUse = [u'YEAR', u'QUARTER', u'MONTH', u'DAY_OF_MONTH', u'DAY_OF_WEEK', u'DEST_CITY_NAME', u'ORIGIN_CITY_NAME'
u'FL_DATE', u'UNIQUE_CARRIER', u'AIRLINE_ID',u'TAIL_NUM',
u'FL_NUM', u'ORIGIN', u'ORIGIN_CITY_NAME',
u'ORIGIN_STATE_NM', u'ORIGIN_WAC', u'DEST',
u'DEST_CITY_NAME',u'ARR_DELAY', u'ARR_DELAY_NEW',
u'ARR_DEL15', u'CANCELLED', u'DIVERTED', u'DISTANCE',u'AIRCRAFT_YEAR', u'AIRCRAFT_AGE',
u'AIRCRAFT_MFR', u'ARR_TIME', u'DEP_TIME']
# given the raw BTS data, this function filters it and returns
# a filtered version along with how much percent has been removed
def processData(rawData):
# filter to exclude diverted and cancelled flights
filteredData = rawData[(rawData.DIVERTED == 0) & (rawData.CANCELLED == 0)]
# this is how much percent have been cleaned away!
cleaned_away = filteredData.count()[0]
# remove columns that are not needed for the model
filteredData = filterDF(filteredData, columnsToUse)
filteredData.reset_index(inplace=True)
# perform as next step join to amend information by aircraftdata
delayFinal = filteredData[['TAIL_NUM','UNIQUE_CARRIER']]
delayFinal.TAIL_NUM = delayFinal.TAIL_NUM.str.strip('N')
delaymfr = pd.merge(delayFinal, joinedAircraftInfo, how='left', on=['TAIL_NUM'])
filteredData.TAIL_NUM = delaymfr.TAIL_NUM
filteredData['AIRCRAFT_YEAR'] = delaymfr.YEAR
filteredData['AIRCRAFT_MFR'] = delaymfr.MFR
# get rid of NAN values
filteredData.dropna(axis = 0, inplace = True)
# get rid of empty year values
filteredData = filteredData[filteredData['AIRCRAFT_YEAR'] != ' ']
# compute age of aircraft
filteredData['AIRCRAFT_AGE'] = filteredData.YEAR.astype(int) - filteredData.AIRCRAFT_YEAR.astype(int)
# now, compress manufacturers to only a small amount of companies
filteredData = compressManufacturers(filteredData)
cleaned_away = 1. - filteredData.count()[0] * 1. / cleaned_away
return filteredData, cleaned_away
# In[ ]:
# the dataframe to store everything in
bigdf = None
ca_statistic = []
years = ['2005', '2006', '2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014']
months = ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']
print('starting processing')
for y in years:
for m in months:
print 'reading {y}{mo}.zip'.format(y=y, mo = m)
z = zipfile.ZipFile('../cache/{y}{mo}.zip'.format(y=y, mo = m))
rawData = pd.read_csv(z.open(z.namelist()[0]), low_memory=False)
print 'processing {y}{mo}.zip'.format(y=y, mo = m)
df, ca = processData(rawData)
if bigdf is None:
bigdf = df
else:
bigdf = bigdf.append(df, ignore_index=True)
ca_statistic.append(('{y}{mo}.zip'.format(y=y, mo = m), ca))
print '==> cleaned away {pc}%'.format(pc=ca)
print '==> added entries: {ne}'.format(ne=df.count()[0])
# save to csv
bigdf.to_csv('../cache/Big10FlightTable.csv')
| mit | 5,420,441,894,893,552,000 | 39.135135 | 118 | 0.646195 | false | 2.715801 | false | false | false |
stuartarchibald/numba | numba/tests/test_byteflow.py | 3 | 2426 | """
Test byteflow.py specific issues
"""
import unittest
from numba.tests.support import TestCase
from numba.core.compiler import run_frontend
class TestByteFlowIssues(TestCase):
def test_issue_5087(self):
# This is an odd issue. The exact number of print below is
# necessary to trigger it. Too many or too few will alter the behavior.
# Also note that the function below will not be executed. The problem
# occurs at compilation. The definition below is invalid for execution.
# The problem occurs in the bytecode analysis.
def udt():
print
print
print
for i in range:
print
print
print
print
print
print
print
print
print
print
print
print
print
print
print
print
print
print
for j in range:
print
print
print
print
print
print
print
for k in range:
for l in range:
print
print
print
print
print
print
print
print
print
print
if print:
for n in range:
print
else:
print
run_frontend(udt)
def test_issue_5097(self):
# Inspired by https://github.com/numba/numba/issues/5097
def udt():
for i in range(0):
if i > 0:
pass
a = None # noqa: F841
run_frontend(udt)
def test_issue_5680(self):
# From https://github.com/numba/numba/issues/5680#issuecomment-625351336
def udt():
for k in range(0):
if 1 == 1:
...
if 'a' == 'a':
...
run_frontend(udt)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | -2,437,444,889,852,700,700 | 24.808511 | 80 | 0.395301 | false | 5.708235 | true | false | false |
mlperf/training_results_v0.7 | Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/nnvm/tests/python/frontend/onnx/model_zoo/super_resolution.py | 2 | 2037 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""NNVM symbol corresponding to super_resolution.onnx example."""
from nnvm import sym
def get_super_resolution():
factor = 3
size = 224
data = sym.Variable(name='9')
conv1 = sym.conv2d(data, channels=64, kernel_size=(5, 5), padding=(2, 2), use_bias=False)
relu1 = sym.relu(conv1 + sym.expand_dims(sym.Variable(name='2', shape=(64)), axis=1, num_newaxis=2))
conv2 = sym.conv2d(relu1, channels=64, kernel_size=(3, 3), padding=(1, 1), use_bias=False)
relu2 = sym.relu(conv2 + sym.expand_dims(sym.Variable(name='4', shape=(64)), axis=1, num_newaxis=2))
conv3 = sym.conv2d(relu2, channels=32, kernel_size=(3, 3), padding=(1, 1), use_bias=False)
relu3 = sym.relu(conv3 + sym.expand_dims(sym.Variable(name='6', shape=(32)), axis=1, num_newaxis=2))
conv4 = sym.conv2d(relu3, channels=factor**2, kernel_size=(3, 3), padding=(1, 1), use_bias=False)
conv4 = conv4 + sym.expand_dims(sym.Variable(name='8', shape=(factor**2)), axis=1, num_newaxis=2)
# TODO(zhreshold): allow shape inference for batch size > 1
r1 = sym.reshape(conv4, shape=(1, 1, factor, factor, size, size))
t1 = sym.transpose(r1, axes=(0, 1, 4, 2, 5, 3))
r2 = sym.reshape(t1, shape=(1, 1, size * factor, size * factor))
return r2
| apache-2.0 | -2,807,129,401,254,200,300 | 55.583333 | 104 | 0.697104 | false | 3.233333 | false | false | false |
64studio/pdk | utest.py | 1 | 1519 | # $Progeny$
#
# Copyright 2005 Progeny Linux Systems, Inc.
#
# This file is part of PDK.
#
# PDK is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# PDK is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PDK; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"Test runner for the PDK unit tests."
import unittest
from pdk.test.test_package import *
from pdk.test.test_compile import *
from pdk.test.test_util import *
from pdk.test.test_component import *
from pdk.test.test_cache import *
from pdk.test.test_audit import *
from pdk.test.test_channels import *
from pdk.test.test_rules import *
from pdk.test.test_component_tree_builder import *
from pdk.test.test_diff import *
from pdk.test.test_progress import *
from pdk.test.test_meta import *
from pdk.test.test_index_file import *
from pdk.test.test_debish_condition import *
from pdk.test.test_media import *
from pdk.test.test_commands import *
if __name__ == "__main__":
unittest.main()
# vim:set ai et sw=4 ts=4 tw=75:
| gpl-2.0 | 210,661,105,084,248,740 | 32.755556 | 70 | 0.736669 | false | 3.302174 | true | false | false |
sghai/robottelo | tests/foreman/api/test_permission.py | 1 | 15179 | # -*- coding: utf-8 -*-
"""Unit tests for the ``permissions`` paths.
Each ``APITestCase`` subclass tests a single URL. A full list of URLs to be
tested can be found here: http://theforeman.org/api/apidoc/v2/permissions.html
:Requirement: Permission
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: API
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import json
import re
from fauxfactory import gen_alphanumeric
from itertools import chain
from nailgun import entities
from nailgun.entity_fields import OneToManyField
from requests.exceptions import HTTPError
from robottelo import ssh
from robottelo.constants import PERMISSIONS
from robottelo.decorators import run_only_on, tier1, upgrade
from robottelo.helpers import get_nailgun_config, get_server_software
from robottelo.test import APITestCase
class PermissionTestCase(APITestCase):
"""Tests for the ``permissions`` path."""
@classmethod
def setUpClass(cls):
super(PermissionTestCase, cls).setUpClass()
cls.permissions = PERMISSIONS.copy()
if get_server_software() == 'upstream':
cls.permissions[None].extend(cls.permissions.pop('DiscoveryRule'))
cls.permissions[None].remove('app_root')
cls.permissions[None].remove('attachments')
cls.permissions[None].remove('configuration')
cls.permissions[None].remove('logs')
cls.permissions[None].remove('view_cases')
cls.permissions[None].remove('view_log_viewer')
result = ssh.command('rpm -qa | grep rubygem-foreman_openscap')
if result.return_code != 0:
cls.permissions.pop('ForemanOpenscap::Policy')
cls.permissions.pop('ForemanOpenscap::ScapContent')
cls.permissions[None].remove('destroy_arf_reports')
cls.permissions[None].remove('view_arf_reports')
cls.permissions[None].remove('create_arf_reports')
result = ssh.command(
'rpm -qa | grep rubygem-foreman_remote_execution'
)
if result.return_code != 0:
cls.permissions.pop('JobInvocation')
cls.permissions.pop('JobTemplate')
cls.permissions.pop('RemoteExecutionFeature')
cls.permissions.pop('TemplateInvocation')
#: e.g. ['Architecture', 'Audit', 'AuthSourceLdap', …]
cls.permission_resource_types = list(cls.permissions.keys())
#: e.g. ['view_architectures', 'create_architectures', …]
cls.permission_names = list(
chain.from_iterable(cls.permissions.values()))
@run_only_on('sat')
@tier1
def test_positive_search_by_name(self):
"""Search for a permission by name.
:id: 1b6117f6-599d-4b2d-80a8-1e0764bdc04d
:expectedresults: Only one permission is returned, and the permission
returned is the one searched for.
:CaseImportance: Critical
"""
failures = {}
for permission_name in self.permission_names:
results = entities.Permission(name=permission_name).search()
if (len(results) != 1 or
len(results) == 1 and results[0].name != permission_name):
failures[permission_name] = {
'length': len(results),
'returned_names': [result.name for result in results]
}
if failures:
self.fail(json.dumps(failures, indent=True, sort_keys=True))
@run_only_on('sat')
@tier1
def test_positive_search_by_resource_type(self):
"""Search for permissions by resource type.
:id: 29d9362b-1bf3-4722-b40f-a5e8b4d0d9ba
:expectedresults: The permissions returned are equal to what is listed
for that resource type in :data:`robottelo.constants.PERMISSIONS`.
:CaseImportance: Critical
"""
failures = {}
for resource_type in self.permission_resource_types:
if resource_type is None:
continue
perm_group = entities.Permission(
resource_type=resource_type).search()
permissions = {perm.name for perm in perm_group}
expected_permissions = set(self.permissions[resource_type])
added = tuple(permissions - expected_permissions)
removed = tuple(expected_permissions - permissions)
if added or removed:
failures[resource_type] = {}
if added or removed:
failures[resource_type]['added'] = added
if removed:
failures[resource_type]['removed'] = removed
if failures:
self.fail(json.dumps(failures, indent=True, sort_keys=True))
@run_only_on('sat')
@tier1
def test_positive_search(self):
"""search with no parameters return all permissions
:id: e58308df-19ec-415d-8fa1-63ebf3cd0ad6
:expectedresults: Search returns a list of all expected permissions
:CaseImportance: Critical
"""
permissions = entities.Permission().search(query={'per_page': 1000})
names = {perm.name for perm in permissions}
resource_types = {perm.resource_type for perm in permissions}
expected_names = set(self.permission_names)
expected_resource_types = set(self.permission_resource_types)
added_resource_types = tuple(resource_types - expected_resource_types)
removed_resource_types = tuple(
expected_resource_types - resource_types)
added_names = tuple(names - expected_names)
removed_names = tuple(expected_names - names)
diff = {}
if added_resource_types:
diff['added_resource_types'] = added_resource_types
if removed_resource_types:
diff['removed_resource_types'] = removed_resource_types
if added_names:
diff['added_names'] = added_names
if removed_names:
diff['removed_names'] = removed_names
if diff:
self.fail(json.dumps(diff, indent=True, sort_keys=True))
# FIXME: This method is a hack. This information should somehow be tied
# directly to the `Entity` classes.
def _permission_name(entity, which_perm):
"""Find a permission name.
Attempt to locate a permission in :data:`robottelo.constants.PERMISSIONS`.
For example, return 'view_architectures' if ``entity`` is ``Architecture``
and ``which_perm`` is 'read'.
:param entity: A ``nailgun.entity_mixins.Entity`` subclass.
:param str which_perm: Either the word "create", "read", "update" or
"delete".
:raise: ``LookupError`` if a relevant permission cannot be found, or if
multiple results are found.
"""
pattern = {
'create': '^create_',
'delete': '^destroy_',
'read': '^view_',
'update': '^edit_',
}[which_perm]
perm_names = []
permissions = (PERMISSIONS.get(entity.__name__) or
PERMISSIONS.get('Katello::{0}'.format(entity.__name__)))
for permission in permissions:
match = re.match(pattern, permission)
if match is not None:
perm_names.append(permission)
if len(perm_names) != 1:
raise LookupError(
'Could not find the requested permission. Found: {0}'
.format(perm_names)
)
return perm_names[0]
# This class might better belong in module test_multiple_paths.
class UserRoleTestCase(APITestCase):
"""Give a user various permissions and see if they are enforced."""
@classmethod
def setUpClass(cls):
"""Create common entities"""
super(UserRoleTestCase, cls).setUpClass()
cls.org = entities.Organization().create()
cls.loc = entities.Location().create()
def setUp(self): # noqa
"""Create a set of credentials and a user."""
super(UserRoleTestCase, self).setUp()
self.cfg = get_nailgun_config()
self.cfg.auth = (gen_alphanumeric(), gen_alphanumeric()) # user, pass
self.user = entities.User(
login=self.cfg.auth[0],
password=self.cfg.auth[1],
organization=[self.org],
location=[self.loc],
).create()
def give_user_permission(self, perm_name):
"""Give ``self.user`` the ``perm_name`` permission.
This method creates a role and filter to accomplish the above goal.
When complete, the relevant relationhips look like this:
user → role ← filter → permission
:param str perm_name: The name of a permission. For example:
'create_architectures'.
:raises: ``AssertionError`` if more than one permission is found when
searching for the permission with name ``perm_name``.
:raises: ``requests.exceptions.HTTPError`` if an error occurs when
updating ``self.user``'s roles.
:returns: Nothing.
"""
role = entities.Role().create()
permissions = entities.Permission(name=perm_name).search()
self.assertEqual(len(permissions), 1)
entities.Filter(permission=permissions, role=role).create()
self.user.role += [role]
self.user = self.user.update(['role'])
def set_taxonomies(self, entity, organization=None, location=None):
"""Set organization and location for entity if it supports them.
Only administrator can choose empty taxonomies or taxonomies that
they aren't assigned to, other users can select only taxonomies they
are granted to assign and they can't leave the selection empty.
:param entity: Initialised nailgun's Entity object
:param organization: Organization object or id
:param location: Location object or id
:return: nailgun's Entity object with updated fields
"""
entity_fields = entity.get_fields()
if 'organization' in entity_fields:
if isinstance(entity_fields['organization'], OneToManyField):
entity.organization = [organization]
else:
entity.organization = organization
if 'location' in entity_fields:
if isinstance(entity_fields['location'], OneToManyField):
entity.location = [location]
else:
entity.location = location
return entity
@tier1
def test_positive_check_create(self):
"""Check whether the "create_*" role has an effect.
:id: e4c92365-58b7-4538-9d1b-93f3cf51fbef
:expectedresults: A user cannot create an entity when missing the
"create_*" role, and they can create an entity when given the
"create_*" role.
:CaseImportance: Critical
:BZ: 1464137
"""
for entity_cls in (
entities.Architecture,
entities.Domain,
entities.ActivationKey):
with self.subTest(entity_cls):
with self.assertRaises(HTTPError):
entity_cls(self.cfg).create()
self.give_user_permission(
_permission_name(entity_cls, 'create')
)
entity = self.set_taxonomies(
entity_cls(self.cfg), self.org, self.loc)
# Entities with both org and loc require
# additional permissions to set them.
fields = set(['organization', 'location'])
if fields.issubset(set(entity.get_fields())):
self.give_user_permission('assign_organizations')
self.give_user_permission('assign_locations')
entity = entity.create_json()
entity_cls(id=entity['id']).read() # As admin user.
@tier1
def test_positive_check_read(self):
"""Check whether the "view_*" role has an effect.
:id: 55689121-2646-414f-beb1-dbba5973c523
:expectedresults: A user cannot read an entity when missing the
"view_*" role, and they can read an entity when given the "view_*"
role.
:CaseImportance: Critical
"""
for entity_cls in (
entities.Architecture,
entities.Domain,
entities.ActivationKey):
with self.subTest(entity_cls):
entity = self.set_taxonomies(entity_cls(), self.org, self.loc)
entity = entity.create()
with self.assertRaises(HTTPError):
entity_cls(self.cfg, id=entity.id).read()
self.give_user_permission(_permission_name(entity_cls, 'read'))
entity_cls(self.cfg, id=entity.id).read()
@upgrade
@tier1
def test_positive_check_delete(self):
"""Check whether the "destroy_*" role has an effect.
:id: 71365147-51ef-4602-948f-78a5e78e32b4
:expectedresults: A user cannot read an entity with missing the
"destroy_*" role, and they can read an entity when given the
"destroy_*" role.
:CaseImportance: Critical
"""
for entity_cls in (
entities.Architecture,
entities.Domain,
entities.ActivationKey):
with self.subTest(entity_cls):
entity = self.set_taxonomies(entity_cls(), self.org, self.loc)
entity = entity.create()
with self.assertRaises(HTTPError):
entity_cls(self.cfg, id=entity.id).delete()
self.give_user_permission(
_permission_name(entity_cls, 'delete')
)
entity_cls(self.cfg, id=entity.id).delete()
with self.assertRaises(HTTPError):
entity.read() # As admin user
@tier1
def test_positive_check_update(self):
"""Check whether the "edit_*" role has an effect.
:id: b5de2115-b031-413e-8e5b-eac8cb714174
:expectedresults: A user cannot update an entity when missing the
"edit_*" role, and they can update an entity when given the
"edit_*" role.
NOTE: This method will only work if ``entity`` has a name.
:CaseImportance: Critical
"""
for entity_cls in (
entities.Architecture,
entities.Domain,
entities.ActivationKey
):
with self.subTest(entity_cls):
entity = self.set_taxonomies(entity_cls(), self.org, self.loc)
entity = entity.create()
name = entity.get_fields()['name'].gen_value()
with self.assertRaises(HTTPError):
entity_cls(self.cfg, id=entity.id, name=name).update(
['name']
)
self.give_user_permission(
_permission_name(entity_cls, 'update')
)
# update() calls read() under the hood, which triggers
# permission error
entity_cls(self.cfg, id=entity.id, name=name).update_json(
['name']
)
| gpl-3.0 | -6,009,887,279,699,730,000 | 36.82793 | 79 | 0.598062 | false | 4.239519 | true | false | false |
alfred82santa/envsense | envsense/logic/__init__.py | 1 | 5155 | import asyncio
import weakref
from collections import OrderedDict
from envsense.devices import BaseDeviceManager, BaseDevice
class LogicDeviceManager(BaseDeviceManager):
CONFIG_KEY = 'logics'
def __init__(self, app):
super(LogicDeviceManager, self).__init__(app)
# Add sensors
self.items['AlertLogic'] = AlertLogic(app, refresh=0.1)
self.items['BuzzerAlertLogic'] = BuzzerAlertLogic(app, refresh=0.5)
self.items['LedAlertLogic'] = LedAlertLogic(app, refresh=0.5)
from .upm import GasLogic, SoundLogic, UVLogic, LightLogic, TempLogic, TouchLogic
self.items['GasLogic'] = GasLogic(app, refresh=0.5)
self.items['SoundLogic'] = SoundLogic(app, refresh=0.5)
self.items['UVLogic'] = UVLogic(app, refresh=0.5)
self.items['LightLogic'] = LightLogic(app, refresh=0.5)
self.items['TempLogic'] = TempLogic(app, refresh=0.5)
self.items['TouchLogic'] = TouchLogic(app, refresh=0.1)
@asyncio.coroutine
def start(self):
for logic in self.items.values():
asyncio.async(logic.start(), loop=asyncio.get_event_loop())
def factory(app):
return LogicDeviceManager(app)
class BaseLogic(BaseDevice):
def __init__(self, app, refresh=1, *args, **kwargs):
self.refresh = refresh
self._app = weakref.ref(app)
@property
def app(self):
return self._app()
@asyncio.coroutine
def start(self):
while True:
yield from self.do_process()
yield from asyncio.sleep(self.refresh)
@asyncio.coroutine
def do_process(self):
pass
class BuzzerAlertLogic(BaseLogic):
def __init__(self, *args, **kwargs):
super(BuzzerAlertLogic, self).__init__(*args, **kwargs)
self.active = False
self.time = 3
@asyncio.coroutine
def do_process(self):
actuator = self.app.actuator_manager.items['BuzzerActuator']
import pyupm_buzzer as buzzer
if self.active:
actuator.chord = buzzer.DO
else:
actuator.chord = None
class LedAlertLogic(BaseLogic):
def __init__(self, *args, **kwargs):
super(LedAlertLogic, self).__init__(*args, **kwargs)
self.active = False
@asyncio.coroutine
def do_process(self):
led_green = self.app.actuator_manager.items['GreenLedActuator']
led_red = self.app.actuator_manager.items['RedLedActuator']
if self.active:
led_green.status = False
else:
led_green.status = True
led_red.status = not led_green.status
class AlertLogic(BaseLogic):
ALERT = 'alert'
WARN = 'warning'
INFO = 'information'
def __init__(self, *args, **kwargs):
super(AlertLogic, self).__init__(*args, **kwargs)
self.alerts = OrderedDict()
self.stop_buffer = False
def set_alert(self, device_name, level, text, buzzer=False):
if device_name in self.alerts and level == self.ALERT and self.alerts[device_name]['level'] != self.ALERT:
self.stop_buffer = False
self.alerts[device_name] = {'level': level, 'text': text, 'buzzer': buzzer}
def remove_alert(self, device_name):
del self.alerts[device_name]
@asyncio.coroutine
def do_process(self):
alerts = [alrt for alrt in self.alerts.values() if alrt['level'] == self.ALERT]
warns = [alrt for alrt in self.alerts.values() if alrt['level'] == self.WARN]
infos = [alrt for alrt in self.alerts.values() if alrt['level'] == self.INFO]
actuator = self.app.actuator_manager.items['DisplayActuator']
if len(alerts):
actuator.color = (255, 0, 0)
alrt = alerts[0]
actuator.line_1 = alrt['text']
if len([a for a in alerts if a['buzzer']]) and not self.stop_buffer:
self.app.logic_manager.items['BuzzerAlertLogic'].active = True
self.app.logic_manager.items['LedAlertLogic'].active = True
elif len(warns):
actuator.color = (255, 255, 0)
alrt = warns[0]
actuator.line_1 = alrt['text']
self.app.logic_manager.items['LedAlertLogic'].active = True
self.app.logic_manager.items['BuzzerAlertLogic'].active = False
else:
actuator.color = (0, 255, 0)
if len(infos):
alrt = infos[0]
actuator.line_1 = alrt['text']
self.app.logic_manager.items['LedAlertLogic'].active = False
self.app.logic_manager.items['BuzzerAlertLogic'].active = False
actuator.line_2 = "A:{};W:{};I:{}".format(len(alerts), len(warns), len(infos))
def get_structure(self):
struct = super(AlertLogic, self).get_structure()
struct['functions']['set_alert'] = {'device_name': 'string',
'level': [self.ALERT, self.WARN, self.INFO],
'text': 'string',
'buzzer': 'bool'}
struct['functions']['remove_alert'] = {'device_name': 'string'}
return struct
| mit | -1,424,948,867,478,193,400 | 32.914474 | 114 | 0.592435 | false | 3.612474 | false | false | false |
Hedde/django-networth | networth/unittests.py | 1 | 1434 | __author__ = 'heddevanderheide'
import unittest
# App specific
from networth.mixins import NetworthMixin
class TestObject(NetworthMixin):
first_name = ''
last_name = ''
tags = None
class Networth:
fields = (
('first_name', (True, 1)),
('last_name', (lambda f: f.startswith('P'), 5)),
('tags', (lambda f: len(f), 'result')),
)
def __init__(self, **kwargs):
self.first_name = kwargs.get('first_name', '')
self.last_name = kwargs.get('last_name', '')
self.tags = filter(None, kwargs.get('tags', '').split(','))
def add_tag(self, tag):
if self.tags:
self.tags.append(tag)
else:
self.tags = [tag]
class TestNetworthMixin(unittest.TestCase):
def setUp(self):
self.obj_1 = TestObject(
first_name='Pete'
)
self.obj_2 = TestObject(
first_name='Pete',
last_name='Philly'
)
self.obj_3 = TestObject(
first_name='Pete',
last_name='Philly',
tags='foo'
)
def test_obj_1(self):
self.assertEqual(self.obj_1.networth(), 2)
def test_obj_2(self):
self.assertEqual(self.obj_2.networth(), 7)
def test_obj_3(self):
self.assertEqual(self.obj_3.networth(), 8)
self.obj_3.add_tag('bar')
self.assertEqual(self.obj_3.networth(), 9) | mit | 4,209,273,522,948,263,400 | 22.916667 | 67 | 0.528591 | false | 3.374118 | true | false | false |
jrosebr1/imutils | demos/picamera_fps_demo.py | 1 | 2980 | # author: Adrian Rosebrock
# website: http://www.pyimagesearch.com
# USAGE
# BE SURE TO INSTALL 'imutils' PRIOR TO EXECUTING THIS COMMAND
# python picamera_fps_demo.py
# python picamera_fps_demo.py --display 1
# import the necessary packages
from __future__ import print_function
from imutils.video import VideoStream
from imutils.video import FPS
from picamera.array import PiRGBArray
from picamera import PiCamera
import argparse
import imutils
import time
import cv2
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-n", "--num-frames", type=int, default=100,
help="# of frames to loop over for FPS test")
ap.add_argument("-d", "--display", type=int, default=-1,
help="Whether or not frames should be displayed")
args = vars(ap.parse_args())
# initialize the camera and stream
camera = PiCamera()
camera.resolution = (320, 240)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(320, 240))
stream = camera.capture_continuous(rawCapture, format="bgr",
use_video_port=True)
# allow the camera to warmup and start the FPS counter
print("[INFO] sampling frames from `picamera` module...")
time.sleep(2.0)
fps = FPS().start()
# loop over some frames
for (i, f) in enumerate(stream):
# grab the frame from the stream and resize it to have a maximum
# width of 400 pixels
frame = f.array
frame = imutils.resize(frame, width=400)
# check to see if the frame should be displayed to our screen
if args["display"] > 0:
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# clear the stream in preparation for the next frame and update
# the FPS counter
rawCapture.truncate(0)
fps.update()
# check to see if the desired number of frames have been reached
if i == args["num_frames"]:
break
# stop the timer and display FPS information
fps.stop()
print("[INFO] elasped time: {:.2f}".format(fps.elapsed()))
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))
# do a bit of cleanup
cv2.destroyAllWindows()
stream.close()
rawCapture.close()
camera.close()
# created a *threaded *video stream, allow the camera sensor to warmup,
# and start the FPS counter
print("[INFO] sampling THREADED frames from `picamera` module...")
vs = VideoStream(usePiCamera=True).start()
time.sleep(2.0)
fps = FPS().start()
# loop over some frames...this time using the threaded stream
while fps._numFrames < args["num_frames"]:
# grab the frame from the threaded video stream and resize it
# to have a maximum width of 400 pixels
frame = vs.read()
frame = imutils.resize(frame, width=400)
# check to see if the frame should be displayed to our screen
if args["display"] > 0:
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# update the FPS counter
fps.update()
# stop the timer and display FPS information
fps.stop()
print("[INFO] elasped time: {:.2f}".format(fps.elapsed()))
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop() | mit | -895,183,849,479,864,700 | 28.22549 | 71 | 0.723154 | false | 3.225108 | false | false | false |
weissercn/MLTools | Dalitz_simplified/classifier_eval_simplified_with_MLP_code.py | 1 | 8748 |
#adapted from the example at http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html
"""
This script can be used to get the p value for classifiers. It takes input files with column vectors corresponding to features and lables.
Then there are two different routes one can go down. When mode has a value of 1, then a grid search will be performed on
one set of input files. If it is 2, then the hyperparemeter search is performed by spearmint. When the mode is turned off (0),
then the p value is computed for multiple sets of input files and the p value distribution is plotted. One sets all the valiables
including the classifier in the "args" list. The classifier provided is ignored if keras_mode is on (1) in which case a keras neural
network is used.
"""
from __future__ import print_function
print(__doc__)
import os
import p_value_scoring_object
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import Normalize
from sklearn import cross_validation
from sklearn.svm import SVC
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import load_iris
from sklearn.cross_validation import StratifiedShuffleSplit
from sklearn.grid_search import GridSearchCV
##############################################################################
# Utility function to move the midpoint of a colormap to be around
# the values of interest.
class MidpointNormalize(Normalize):
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
def classifier_eval(mode,keras_mode,args):
##############################################################################
# Setting parameters
#
name=args[0]
sample1_name= args[1]
sample2_name= args[2]
shuffling_seed = args[3]
#mode =0 if you want evaluation of a model =1 if grid hyperparameter search =2 if spearmint hyperparameter search
comp_file_list=args[4]
print(comp_file_list)
cv_n_iter = args[5]
clf = args[6]
C_range = args[7]
gamma_range = args[8]
if mode==0:
#For standard evaluation
score_list=[]
print("standard evaluation mode")
elif mode==1:
#For grid search
print("grid hyperparameter search mode")
param_grid = dict(gamma=gamma_range, C=C_range)
elif mode==2:
#For spearmint hyperparameter search
print("spearmint hyperparameter search mode")
else:
print("No valid mode chosen")
return 1
##############################################################################
# Load and prepare data set
#
# dataset for grid search
for comp_file_0,comp_file_1 in comp_file_list:
print("Operating of files :"+comp_file_0+" "+comp_file_1)
#extracts data from the files
features_0=np.loadtxt(comp_file_0,dtype='d')
features_1=np.loadtxt(comp_file_1,dtype='d')
#determine how many data points are in each sample
no_0=features_0.shape[0]
no_1=features_1.shape[0]
#Give all samples in file 0 the label 0 and in file 1 the feature 1
label_0=np.zeros((no_0,1))
label_1=np.ones((no_1,1))
#Create an array containing samples and features.
data_0=np.c_[features_0,label_0]
data_1=np.c_[features_1,label_1]
data=np.r_[data_0,data_1]
np.random.shuffle(data)
X=data[:,:-1]
y=data[:,-1]
acv = StratifiedShuffleSplit(y, n_iter=cv_n_iter, test_size=0.2, random_state=42)
print(X)
print(y)
# It is usually a good idea to scale the data for SVM training.
# We are cheating a bit in this example in scaling all of the data,
# instead of fitting the transformation on the training set and
# just applying it on the test set.
scaler = StandardScaler()
X = scaler.fit_transform(X)
if mode==1:
##############################################################################
# Grid Search
#
# Train classifiers
#
# For an initial search, a logarithmic grid with basis
# 10 is often helpful. Using a basis of 2, a finer
# tuning can be achieved but at a much higher cost.
grid = GridSearchCV(clf, scoring=p_value_scoring_object.p_value_scoring_object ,param_grid=param_grid, cv=acv)
grid.fit(X, y)
print("The best parameters are %s with a score of %0.2f"
% (grid.best_params_, grid.best_score_))
# Now we need to fit a classifier for all parameters in the 2d version
# (we use a smaller set of parameters here because it takes a while to train)
C_2d_range = [1e-2, 1, 1e2]
gamma_2d_range = [1e-1, 1, 1e1]
classifiers = []
for C in C_2d_range:
for gamma in gamma_2d_range:
clf = SVC(C=C, gamma=gamma)
clf.fit(X_2d, y_2d)
classifiers.append((C, gamma, clf))
##############################################################################
# visualization
#
# draw visualization of parameter effects
plt.figure(figsize=(8, 6))
xx, yy = np.meshgrid(np.linspace(-3, 3, 200), np.linspace(-3, 3, 200))
for (k, (C, gamma, clf)) in enumerate(classifiers):
# evaluate decision function in a grid
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# visualize decision function for these parameters
plt.subplot(len(C_2d_range), len(gamma_2d_range), k + 1)
plt.title("gamma=10^%d, C=10^%d" % (np.log10(gamma), np.log10(C)),size='medium')
# visualize parameter's effect on decision function
plt.pcolormesh(xx, yy, -Z, cmap=plt.cm.RdBu)
plt.scatter(X_2d[:, 0], X_2d[:, 1], c=y_2d, cmap=plt.cm.RdBu_r)
plt.xticks(())
plt.yticks(())
plt.axis('tight')
plt.savefig('prediction_comparison.png')
# plot the scores of the grid
# grid_scores_ contains parameter settings and scores
# We extract just the scores
scores = [x[1] for x in grid.grid_scores_]
scores = np.array(scores).reshape(len(C_range), len(gamma_range))
# Draw heatmap of the validation accuracy as a function of gamma and C
#
# The score are encoded as colors with the hot colormap which varies from dark
# red to bright yellow. As the most interesting scores are all located in the
# 0.92 to 0.97 range we use a custom normalizer to set the mid-point to 0.92 so
# as to make it easier to visualize the small variations of score values in the
# interesting range while not brutally collapsing all the low score values to
# the same color.
plt.figure(figsize=(8, 6))
plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95)
plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot,
norm=MidpointNormalize(vmin=-1.0, midpoint=-0.0001))
plt.xlabel('gamma')
plt.ylabel('C')
plt.colorbar()
plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45)
plt.yticks(np.arange(len(C_range)), C_range)
plt.title('Validation accuracy')
plt.savefig('Heat_map.png')
else:
if keras_mode==1:
from keras.utils import np_utils, generic_utils
dimof_output = len(set(y.flat))
y = np_utils.to_categorical(y, dimof_output)
print("delete this line")
print(X)
print(y)
scores = cross_validation.cross_val_score(clf,X,y,cv=acv,scoring=p_value_scoring_object.p_value_scoring_object)
print(scores)
score_list.append(np.mean(scores))
if mode==2:
return (-1)* np.mean(scores)
############################################################################################################################################################
############################################################### Evaluation of results ####################################################################
############################################################################################################################################################
if mode==0:
# The score list has been computed. Let's plot the distribution
print(score_list)
print("I havent implemented plotting of the distribution")
if __name__ == "__main__":
print("Executing classifier_eval_simplified as a stand-alone script")
print()
comp_file_list=[]
for i in range(1,100):
comp_file_list.append((os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data.{0}.0.txt".format(i), os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data.2{0}.1.txt".format(str(i).zfill(2))))
#clf = SVC(C=100,gamma=0.1,probability=True, cache_size=7000)
from model import MLP
clf = MLP(n_hidden=500, n_deep=7, l1_norm=0, drop=0.5, verbose=0)
args=["dalitz","particle","antiparticle",100,comp_file_list,2,clf,np.logspace(-2, 10, 13),np.logspace(-9, 3, 13)]
#classifier_eval_simplified(aC,agamma)
classifier_eval(0,0,args)
| mit | -6,771,905,230,740,556,000 | 35.298755 | 188 | 0.624714 | false | 3.311128 | false | false | false |
mutanthost/plexhole | PlexConnect.py | 1 | 5291 | #!/usr/bin/env python
"""
PlexConnect
Sources:
inter-process-communication (queue): http://pymotw.com/2/multiprocessing/communication.html
"""
import sys, time
from os import sep
import socket
from multiprocessing import Process, Pipe
from multiprocessing.managers import BaseManager
import signal, errno
from Version import __VERSION__
import DNSServer, WebServer
import Settings, ATVSettings
from PILBackgrounds import isPILinstalled
from Debug import * # dprint()
def getIP_self():
cfg = param['CSettings']
if cfg.getSetting('enable_plexconnect_autodetect')=='True':
# get public ip of machine running PlexConnect
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('1.2.3.4', 1000))
IP = s.getsockname()[0]
dprint('PlexConnect', 0, "IP_self: "+IP)
else:
# manual override from "settings.cfg"
IP = cfg.getSetting('ip_plexconnect')
dprint('PlexConnect', 0, "IP_self (from settings): "+IP)
return IP
# initializer for Manager, proxy-ing ATVSettings to WebServer/XMLConverter
def initProxy():
signal.signal(signal.SIGINT, signal.SIG_IGN)
procs = {}
pipes = {}
param = {}
running = False
def startup():
global procs
global pipes
global param
global running
# Settings
cfg = Settings.CSettings()
param['CSettings'] = cfg
# Logfile
if cfg.getSetting('logpath').startswith('.'):
# relative to current path
logpath = sys.path[0] + sep + cfg.getSetting('logpath')
else:
# absolute path
logpath = cfg.getSetting('logpath')
param['LogFile'] = logpath + sep + 'PlexConnect.log'
param['LogLevel'] = cfg.getSetting('loglevel')
dinit('PlexConnect', param, True) # init logging, new file, main process
dprint('PlexConnect', 0, "Version: {0}", __VERSION__)
dprint('PlexConnect', 0, "Python: {0}", sys.version)
dprint('PlexConnect', 0, "Host OS: {0}", sys.platform)
dprint('PlexConnect', 0, "PILBackgrounds: Is PIL installed? {0}", isPILinstalled())
# more Settings
param['IP_self'] = getIP_self()
param['HostToIntercept'] = cfg.getSetting('hosttointercept')
param['baseURL'] = 'http://'+ param['HostToIntercept']
# proxy for ATVSettings
proxy = BaseManager()
proxy.register('ATVSettings', ATVSettings.CATVSettings)
proxy.start(initProxy)
param['CATVSettings'] = proxy.ATVSettings()
running = True
# init DNSServer
if cfg.getSetting('enable_dnsserver')=='True':
master, slave = Pipe() # endpoint [0]-PlexConnect, [1]-DNSServer
proc = Process(target=DNSServer.Run, args=(slave, param))
proc.start()
time.sleep(0.1)
if proc.is_alive():
procs['DNSServer'] = proc
pipes['DNSServer'] = master
else:
dprint('PlexConnect', 0, "DNSServer not alive. Shutting down.")
running = False
# init WebServer
if running:
master, slave = Pipe() # endpoint [0]-PlexConnect, [1]-WebServer
proc = Process(target=WebServer.Run, args=(slave, param))
proc.start()
time.sleep(0.1)
if proc.is_alive():
procs['WebServer'] = proc
pipes['WebServer'] = master
else:
dprint('PlexConnect', 0, "WebServer not alive. Shutting down.")
running = False
# init WebServer_SSL
if running and \
cfg.getSetting('enable_webserver_ssl')=='True':
master, slave = Pipe() # endpoint [0]-PlexConnect, [1]-WebServer
proc = Process(target=WebServer.Run_SSL, args=(slave, param))
proc.start()
time.sleep(0.1)
if proc.is_alive():
procs['WebServer_SSL'] = proc
pipes['WebServer_SSL'] = master
else:
dprint('PlexConnect', 0, "WebServer_SSL not alive. Shutting down.")
running = False
# not started successful - clean up
if not running:
cmdShutdown()
shutdown()
return running
def run(timeout=60):
# do something important
try:
time.sleep(timeout)
except IOError as e:
if e.errno == errno.EINTR and not running:
pass # mask "IOError: [Errno 4] Interrupted function call"
else:
raise
return running
def shutdown():
for slave in procs:
procs[slave].join()
param['CATVSettings'].saveSettings()
dprint('PlexConnect', 0, "shutdown")
def cmdShutdown():
global running
running = False
# send shutdown to all pipes
for slave in pipes:
pipes[slave].send('shutdown')
dprint('PlexConnect', 0, "Shutting down.")
def sighandler_shutdown(signum, frame):
signal.signal(signal.SIGINT, signal.SIG_IGN) # we heard you!
cmdShutdown()
if __name__=="__main__":
signal.signal(signal.SIGINT, sighandler_shutdown)
signal.signal(signal.SIGTERM, sighandler_shutdown)
dprint('PlexConnect', 0, "***")
dprint('PlexConnect', 0, "PlexConnect")
dprint('PlexConnect', 0, "Press CTRL-C to shut down.")
dprint('PlexConnect', 0, "***")
running = startup()
while running:
running = run()
shutdown()
| gpl-2.0 | 4,669,393,885,179,552,000 | 26.414508 | 91 | 0.609904 | false | 3.765836 | false | false | false |
mat128/python-ubersmith-remote-module-server | ubersmith_remote_module_server/router.py | 1 | 1221 | # Copyright 2016 Internap
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Router(object):
def __init__(self, env_as_kwarg=True):
self.env_as_kwarg = env_as_kwarg
def invoke_method(self, module, method, params=None, env=None, callback=None):
if params is None:
params = []
if env is None:
env = {}
if self.env_as_kwarg:
additional_kwargs = {'env': env}
else:
additional_kwargs = {}
return getattr(module, method)(*params, **additional_kwargs)
def list_implemented_methods(self, module):
return [method for method in dir(module) if callable(getattr(module, method)) and not method.startswith('_')]
| apache-2.0 | 3,638,908,354,356,112,000 | 36 | 117 | 0.671581 | false | 4.056478 | false | false | false |
emnh/namingmuse | namingmuse/filepath.py | 2 | 2931 | """
Contains filepath, a path representation class
$Id:
"""
import os
from musexceptions import *
class FilePath(object):
"""A class that represents a file path.
It also provides some useful and common methods regarding paths.
We pass around paths as encoded strings (not unicode) by default because
then we can work with files with unknown encoding.
"""
def __init__(self, path, *filep, **kwargs):
'''
Pass in unicode keyword to set encoding for returning unicode strings on
unicode(fp) or getName(unicode=True).
'''
assert not isinstance(path, unicode), 'FilePaths should be passed around encoded'
for p in filep:
assert not isinstance(p, unicode), 'FilePaths should be passed around encoded'
if 'encoding' in kwargs:
self.encoding = kwargs['encoding']
else:
self.encoding = 'ascii'
if isinstance(path, FilePath):
path = path.fullpath
else:
path = os.path.abspath(path)
if len(filep) > 0:
path = os.path.join(path, *filep)
self.fullpath = path
def getName(self, unicode=False):
s = os.path.basename(self.fullpath)
if unicode:
s = self.decode(s)
return s
def getParent(self):
return FilePath(os.path.dirname(self.fullpath))
def getExt(self):
return os.path.splitext(self.fullpath)[1]
def getFileType(self):
return self.getExt()[1:].lower()
def __add__(self, other):
return FilePath(self, other)
def __len__(self):
return len(self.fullpath)
def __str__(self):
s = self.fullpath
return s
def decode(self, s):
try:
s = s.decode(self.encoding)
except Exception, e:
print NamingMuseWarning('failed to decode path %s with encoding %s' % (s, self.encoding))
s = s.decode(self.encoding, 'ignore')
return s
def __unicode__(self):
return self.decode(self.fullpath)
def __repr__(self):
return self.__str__()
def __cmp__(self, other):
if not isinstance(other, (FilePath, basestring)):
raise TypeError(\
"can't compare FilePath with non-FilePath/string object")
if isinstance(other, FilePath):
other = other.fullpath
return cmp(self.fullpath, other)
def rename(self, dst):
return os.rename(str(self), str(dst))
def mkdir(self):
return os.mkdir(str(self))
def rmdir(self):
return os.rmdir(str(self))
def exists(self):
return os.path.exists(str(self))
def isdir(self):
return os.path.isdir(str(self))
def listdir(self):
return os.listdir(str(self))
def walk(self):
for x in os.walk(str(self)):
yield x
| gpl-2.0 | -8,755,508,739,750,030,000 | 26.914286 | 101 | 0.57523 | false | 4.169275 | false | false | false |
Zeght/Prass | tools.py | 2 | 3735 | from common import PrassError
import bisect
import math
def parse_scxvid_keyframes(text):
return [i-3 for i,line in enumerate(text.splitlines()) if line and line[0] == 'i']
def parse_keyframes(path):
with open(path) as file_object:
text = file_object.read()
if text.find('# XviD 2pass stat file')>=0:
frames = parse_scxvid_keyframes(text)
else:
raise PrassError('Unsupported keyframes type')
if 0 not in frames:
frames.insert(0, 0)
return frames
class Timecodes(object):
TIMESTAMP_END = 1
TIMESTAMP_START = 2
def __init__(self, times, default_fps):
super(Timecodes, self).__init__()
self.times = times
self.default_frame_duration = 1000.0 / default_fps if default_fps else None
def get_frame_time(self, number, kind=None):
if kind == self.TIMESTAMP_START:
prev = self.get_frame_time(number-1)
curr = self.get_frame_time(number)
return prev + int(round((curr - prev) / 2.0))
elif kind == self.TIMESTAMP_END:
curr = self.get_frame_time(number)
after = self.get_frame_time(number+1)
return curr + int(round((after - curr) / 2.0))
try:
return self.times[number]
except IndexError:
if not self.default_frame_duration:
raise ValueError("Cannot calculate frame timestamp without frame duration")
past_end, last_time = number, 0
if self.times:
past_end, last_time = (number - len(self.times) + 1), self.times[-1]
return int(round(past_end * self.default_frame_duration + last_time))
def get_frame_number(self, ms, kind=None):
if kind == self.TIMESTAMP_START:
return self.get_frame_number(ms - 1) + 1
elif kind == self.TIMESTAMP_END:
return self.get_frame_number(ms - 1)
if self.times and self.times[-1] >= ms:
return bisect.bisect_left(self.times, ms)
if not self.default_frame_duration:
raise ValueError("Cannot calculate frame for this timestamp without frame duration")
if ms < 0:
return int(math.floor(ms / self.default_frame_duration))
last_time = self.times[-1] if self.times else 0
return int((ms - last_time) / self.default_frame_duration) + len(self.times)
@classmethod
def _convert_v1_to_v2(cls, default_fps, overrides):
# start, end, fps
overrides = [(int(x[0]), int(x[1]), float(x[2])) for x in overrides]
if not overrides:
return []
fps = [default_fps] * (overrides[-1][1] + 1)
for start, end, fps in overrides:
fps[start:end + 1] = [fps] * (end - start + 1)
v2 = [0]
for d in (1000.0 / f for f in fps):
v2.append(v2[-1] + d)
return v2
@classmethod
def parse(cls, text):
lines = text.splitlines()
if not lines:
return []
first = lines[0].lower().lstrip()
if first.startswith('# timecode format v2'):
tcs = [x for x in lines[1:]]
return Timecodes(tcs, None)
elif first.startswith('# timecode format v1'):
default = float(lines[1].lower().replace('assume ', ""))
overrides = (x.split(',') for x in lines[2:])
return Timecodes(cls._convert_v1_to_v2(default, overrides), default)
else:
raise PrassError('This timecodes format is not supported')
@classmethod
def from_file(cls, path):
with open(path) as file:
return cls.parse(file.read())
@classmethod
def cfr(cls, fps):
return Timecodes([], default_fps=fps)
| mit | -9,155,276,446,894,454,000 | 33.266055 | 96 | 0.578313 | false | 3.69802 | false | false | false |
drtuxwang/system-config | bin/txz.py | 1 | 3751 | #!/usr/bin/env python3
"""
Make a compressed archive in TAR.XZ format.
"""
import argparse
import glob
import os
import shutil
import signal
import sys
from typing import List
import command_mod
import subtask_mod
class Options:
"""
Options class
"""
def __init__(self) -> None:
self._args: argparse.Namespace = None
self.parse(sys.argv)
def get_archive(self) -> str:
"""
Return archive location.
"""
return self._archive
def get_tar(self) -> command_mod.Command:
"""
Return tar Command class object.
"""
return self._tar
def _parse_args(self, args: List[str]) -> None:
parser = argparse.ArgumentParser(
description='Make a compressed archive in TAR.XZ format.',
)
parser.add_argument(
'archive',
nargs=1,
metavar='file.tar.xz|file.txz',
help='Archive file.'
)
parser.add_argument(
'files',
nargs='*',
metavar='file',
help='File or directory.'
)
self._args = parser.parse_args(args)
def parse(self, args: List[str]) -> None:
"""
Parse arguments
"""
self._parse_args(args[1:])
if os.path.isdir(self._args.archive[0]):
self._archive = os.path.abspath(self._args.archive[0]) + '.tar.xz'
else:
self._archive = self._args.archive[0]
if not self._archive.endswith(('.tar.xz', '.txz')):
raise SystemExit(
sys.argv[0] + ': Unsupported "' + self._archive +
'" archive format.'
)
if self._args.files:
self._files = self._args.files
else:
self._files = os.listdir()
self._tar = command_mod.Command('tar', errors='stop')
self._tar.set_args(['cfv', self._archive+'.part'] + self._files)
self._tar.extend_args([
'--use-compress-program',
'xz -9 -e --x86 --lzma2=dict=128MiB --threads=1 --verbose',
'--owner=0:0',
'--group=0:0',
'--sort=name',
])
class Main:
"""
Main class
"""
def __init__(self) -> None:
try:
self.config()
sys.exit(self.run())
except (EOFError, KeyboardInterrupt):
sys.exit(114)
except SystemExit as exception:
sys.exit(exception)
@staticmethod
def config() -> None:
"""
Configure program
"""
if hasattr(signal, 'SIGPIPE'):
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
if os.name == 'nt':
argv = []
for arg in sys.argv:
files = glob.glob(arg) # Fixes Windows globbing bug
if files:
argv.extend(files)
else:
argv.append(arg)
sys.argv = argv
@staticmethod
def run() -> int:
"""
Start program
"""
options = Options()
archive = options.get_archive()
os.umask(int('022', 8))
task = subtask_mod.Task(options.get_tar().get_cmdline())
task.run()
try:
if task.get_exitcode():
raise OSError
shutil.move(archive+'.part', archive)
except OSError as exception:
raise SystemExit(
'{0:s}: Cannot create "{1:s}" archive file.'.format(
sys.argv[0],
archive
)
) from exception
return 0
if __name__ == '__main__':
if '--pydoc' in sys.argv:
help(__name__)
else:
Main()
| gpl-2.0 | -5,209,942,590,577,605,000 | 23.51634 | 78 | 0.486804 | false | 4.17706 | false | false | false |
vkgrd/instabot | instabot/bot/bot.py | 1 | 12502 | import datetime
import atexit
import signal
from ..api import API
from .bot_get import get_media_owner, get_your_medias, get_user_medias
from .bot_get import get_timeline_medias, get_hashtag_medias, get_user_info
from .bot_get import get_geotag_medias, get_timeline_users, get_hashtag_users
from .bot_get import get_media_commenters, get_userid_from_username
from .bot_get import get_user_followers, get_user_following, get_media_likers
from .bot_get import get_media_comments, get_geotag_users, convert_to_user_id
from .bot_get import get_comment, get_media_info, get_user_likers
from .bot_like import like, like_medias, like_timeline, like_user, like_users
from .bot_like import like_hashtag, like_geotag, like_followers, like_following
from .bot_unlike import unlike, unlike_medias, unlike_user
from .bot_follow import follow, follow_users, follow_followers, follow_following
from .bot_unfollow import unfollow, unfollow_users, unfollow_non_followers
from .bot_unfollow import unfollow_everyone
from .bot_comment import comment, comment_medias, comment_geotag, comment_users
from .bot_comment import comment_hashtag, is_commented
from .bot_block import block, unblock, block_users, unblock_users, block_bots
from .bot_checkpoint import save_checkpoint, load_checkpoint
from .bot_filter import filter_medias, check_media, filter_users, check_user
from .bot_filter import check_not_bot
from .bot_support import check_if_file_exists, read_list_from_file
from .bot_support import add_whitelist, add_blacklist
from .bot_stats import save_user_stats
class Bot(API):
def __init__(self,
whitelist=False,
blacklist=False,
comments_file=False,
max_likes_per_day=1000,
max_unlikes_per_day=1000,
max_follows_per_day=350,
max_unfollows_per_day=350,
max_comments_per_day=100,
max_blocks_per_day=100,
max_unblocks_per_day=100,
max_likes_to_like=100,
filter_users=True,
max_followers_to_follow=2000,
min_followers_to_follow=10,
max_following_to_follow=2000,
min_following_to_follow=10,
max_followers_to_following_ratio=10,
max_following_to_followers_ratio=2,
min_media_count_to_follow=3,
max_following_to_block=2000,
like_delay=10,
unlike_delay=10,
follow_delay=30,
unfollow_delay=30,
comment_delay=60,
block_delay=30,
unblock_delay=30,
stop_words=['shop', 'store', 'free']):
super(self.__class__, self).__init__()
self.total_liked = 0
self.total_unliked = 0
self.total_followed = 0
self.total_unfollowed = 0
self.total_commented = 0
self.total_blocked = 0
self.total_unblocked = 0
self.start_time = datetime.datetime.now()
# limits - follow
self.filter_users = filter_users
self.max_likes_per_day = max_likes_per_day
self.max_unlikes_per_day = max_unlikes_per_day
self.max_follows_per_day = max_follows_per_day
self.max_unfollows_per_day = max_unfollows_per_day
self.max_comments_per_day = max_comments_per_day
self.max_blocks_per_day = max_blocks_per_day
self.max_unblocks_per_day = max_unblocks_per_day
self.max_likes_to_like = max_likes_to_like
self.max_followers_to_follow = max_followers_to_follow
self.min_followers_to_follow = min_followers_to_follow
self.max_following_to_follow = max_following_to_follow
self.min_following_to_follow = min_following_to_follow
self.max_followers_to_following_ratio = max_followers_to_following_ratio
self.max_following_to_followers_ratio = max_following_to_followers_ratio
self.min_media_count_to_follow = min_media_count_to_follow
self.stop_words = stop_words
# limits - block
self.max_following_to_block = max_following_to_block
# delays
self.like_delay = like_delay
self.unlike_delay = unlike_delay
self.follow_delay = follow_delay
self.unfollow_delay = unfollow_delay
self.comment_delay = comment_delay
self.block_delay = block_delay
self.unblock_delay = unblock_delay
# current following
self.following = []
# white and blacklists
self.whitelist = []
if whitelist:
self.whitelist = read_list_from_file(whitelist)
self.blacklist = []
if blacklist:
self.blacklist = read_list_from_file(blacklist)
# comment file
self.comments = []
if comments_file:
self.comments = read_list_from_file(comments_file)
self.logger.info('Instabot Started')
def version(self):
from pip._vendor import pkg_resources
return next((p.version for p in pkg_resources.working_set if p.project_name.lower() == 'instabot'), "No match")
def logout(self):
save_checkpoint(self)
super(self.__class__, self).logout()
self.logger.info("Bot stopped. "
"Worked: %s" % (datetime.datetime.now() - self.start_time))
self.print_counters()
def login(self, **args):
super(self.__class__, self).login(**args)
self.prepare()
signal.signal(signal.SIGTERM, self.logout)
atexit.register(self.logout)
def prepare(self):
storage = load_checkpoint(self)
if storage is not None:
self.total_liked, self.total_unliked, self.total_followed, self.total_unfollowed, self.total_commented, self.total_blocked, self.total_unblocked, self.total_requests, self.start_time = storage
self.whitelist = list(
filter(None, map(self.convert_to_user_id, self.whitelist)))
self.blacklist = list(
filter(None, map(self.convert_to_user_id, self.blacklist)))
def print_counters(self):
if self.total_liked:
self.logger.info("Total liked: %d" % self.total_liked)
if self.total_unliked:
self.logger.info("Total unliked: %d" % self.total_unliked)
if self.total_followed:
self.logger.info("Total followed: %d" % self.total_followed)
if self.total_unfollowed:
self.logger.info("Total unfollowed: %d" % self.total_unfollowed)
if self.total_commented:
self.logger.info("Total commented: %d" % self.total_commented)
if self.total_blocked:
self.logger.info("Total blocked: %d" % self.total_blocked)
if self.total_unblocked:
self.logger.info("Total unblocked: %d" % self.total_unblocked)
self.logger.info("Total requests: %d" % self.total_requests)
# getters
def get_your_medias(self):
return get_your_medias(self)
def get_timeline_medias(self):
return get_timeline_medias(self)
def get_user_medias(self, user_id, filtration=True):
return get_user_medias(self, user_id, filtration)
def get_hashtag_medias(self, hashtag, filtration=True):
return get_hashtag_medias(self, hashtag, filtration)
def get_geotag_medias(self, geotag, filtration=True):
return get_geotag_medias(self, geotag, filtration)
def get_media_info(self, media_id):
return get_media_info(self, media_id)
def get_timeline_users(self):
return get_timeline_users(self)
def get_hashtag_users(self, hashtag):
return get_hashtag_users(self, hashtag)
def get_geotag_users(self, geotag):
return get_geotag_users(self, geotag)
def get_userid_from_username(self, username):
return get_userid_from_username(self, username)
def get_user_info(self, user_id):
return get_user_info(self, user_id)
def get_user_followers(self, user_id):
return get_user_followers(self, user_id)
def get_user_following(self, user_id):
return get_user_following(self, user_id)
def get_media_likers(self, media_id):
return get_media_likers(self, media_id)
def get_media_comments(self, media_id):
return get_media_comments(self, media_id)
def get_comment(self):
return get_comment(self)
def get_media_commenters(self, media_id):
return get_media_commenters(self, media_id)
def get_media_owner(self, media):
return get_media_owner(self, media)
def get_user_likers(self, user_id, media_count=10):
return get_user_likers(self, user_id, media_count)
def convert_to_user_id(self, usernames):
return convert_to_user_id(self, usernames)
# like
def like(self, media_id):
return like(self, media_id)
def like_medias(self, media_ids):
return like_medias(self, media_ids)
def like_timeline(self, amount=None):
return like_timeline(self, amount)
def like_user(self, user_id, amount=None):
return like_user(self, user_id, amount)
def like_hashtag(self, hashtag, amount=None):
return like_hashtag(self, hashtag, amount)
def like_geotag(self, geotag, amount=None):
return like_geotag(self, geotag, amount)
def like_users(self, user_ids, nlikes=None):
return like_users(self, user_ids, nlikes)
def like_followers(self, user_id, nlikes=None):
return like_followers(self, user_id, nlikes)
def like_following(self, user_id, nlikes=None):
return like_following(self, user_id, nlikes)
# unlike
def unlike(self, media_id):
return unlike(self, media_id)
def unlike_medias(self, media_ids):
return unlike_medias(self, media_ids)
def unlike_user(self, user):
return unlike_user(self, user)
# follow
def follow(self, user_id):
return follow(self, user_id)
def follow_users(self, user_ids):
return follow_users(self, user_ids)
def follow_followers(self, user_id):
return follow_followers(self, user_id)
def follow_following(self, user_id):
return follow_following(self, user_id)
# unfollow
def unfollow(self, user_id):
return unfollow(self, user_id)
def unfollow_users(self, user_ids):
return unfollow_users(self, user_ids)
def unfollow_non_followers(self):
return unfollow_non_followers(self)
def unfollow_everyone(self):
return unfollow_everyone(self)
# comment
def comment(self, media_id, comment_text):
return comment(self, media_id, comment_text)
def comment_hashtag(self, hashtag):
return comment_hashtag(self, hashtag)
def comment_medias(self, medias):
return comment_medias(self, medias)
def comment_users(self, user_ids):
return comment_users(self, user_ids)
def comment_geotag(self, geotag):
return comment_geotag(self, geotag)
def is_commented(self, media_id):
return is_commented(self, media_id)
# block
def block(self, user_id):
return block(self, user_id)
def unblock(self, user_id):
return unblock(self, user_id)
def block_users(self, user_ids):
return block_users(self, user_ids)
def unblock_users(self, user_ids):
return unblock_users(self, user_ids)
def block_bots(self):
return block_bots(self)
# filter
def filter_medias(self, media_items, filtration=True):
return filter_medias(self, media_items, filtration)
def check_media(self, media):
return check_media(self, media)
def check_user(self, user, filter_closed_acc=False):
return check_user(self, user, filter_closed_acc)
def check_not_bot(self, user):
return check_not_bot(self, user)
def filter_users(self, user_id_list):
return filter_users(self, user_id_list)
# support
def check_if_file_exists(self, file_path):
return check_if_file_exists(file_path)
def read_list_from_file(self, file_path):
return read_list_from_file(file_path)
def add_whitelist(self, file_path):
return add_whitelist(self, file_path)
def add_blacklist(self, file_path):
return add_blacklist(self, file_path)
# stats
def save_user_stats(self, username):
return save_user_stats(self, username)
| apache-2.0 | -5,600,884,494,373,340,000 | 32.25 | 204 | 0.638218 | false | 3.458368 | false | false | false |
cjforman/pele | pele/potentials/xyspin.py | 5 | 4232 | import numpy as np
from copy import copy
from pele.potentials import BasePotential
import networkx as nx
__all__ = ["XYModel"]
def angle_to_2dvector(theta):
return np.cos(theta), np.sin(theta)
class XYModel(BasePotential):
"""
XY model of 2d spins on a lattice
"""
def __init__(self, dim=None, phi=np.pi, periodic=True, phases=None):
if not dim: dim = [4, 4]
dim = copy(dim)
self.dim = copy(dim)
self.nspins = np.prod(dim)
self.G = nx.grid_graph(dim, periodic)
if phases is not None:
self.phases = phases
else:
self.phases = dict()
binary_disorder = True
if binary_disorder:
for edge in self.G.edges():
self.phases[edge] = phi * np.random.random_integers(0, 1)
else:
for edge in self.G.edges():
self.phases[edge] = np.random.uniform(-phi, phi)
nx.set_edge_attributes(self.G, "phase", self.phases)
self.indices = dict()
self.index2node = dict()
nodes = sorted(self.G.nodes())
for i, node in enumerate(nodes):
self.indices[node] = i
self.index2node[i] = node
self.num_edges = self.G.number_of_edges()
self.set_up_neighborlists()
def get_phases(self):
return self.phases.copy()
def set_up_neighborlists(self):
neighbors = []
self.phase_matrix = np.zeros([self.nspins, self.nspins])
for edge in self.G.edges():
u = self.indices[edge[0]]
v = self.indices[edge[1]]
neighbors.append([u, v])
self.phase_matrix[u, v] = self.phases[edge]
self.phase_matrix[v, u] = self.phases[edge]
self.neighbors = np.array(neighbors).reshape([-1, 2])
def get_spin_energies(self, angles):
"""return the local energy of each spin"""
energies = np.zeros(angles.size)
for edge in self.G.edges():
phase = self.phases[edge]
u = self.indices[edge[0]]
v = self.indices[edge[1]]
E = -np.cos(-angles[u] + angles[v] + phase)
energies[u] += E
energies[v] += E
return energies
def getEnergy(self, angles):
e, g = self.getEnergyGradient(angles)
return e
def getEnergyGradient(self, angles):
import _cython_tools
return _cython_tools.xymodel_energy_gradient(angles, self.phase_matrix, self.neighbors)
# def getEnergyGradient(self, angles):
# # do internal energies first
# E = 0.
# grad = np.zeros(self.nspins)
# for edge in self.G.edges():
# phase = self.phases[edge]
# u = self.indices[edge[0]]
# v = self.indices[edge[1]]
# E += np.cos( -angles[u] + angles[v] + phase )
#
# g = -np.sin( -angles[u] + angles[v] + phase )
# grad[u] += g
# grad[v] += -g
# E = - E
# return E, grad
#def test_basin_hopping(pot, angles):
# from pele.basinhopping import BasinHopping
# from pele.takestep.displace import RandomDisplacement
# from pele.takestep.adaptive import AdaptiveStepsize
#
# takestep = RandomDisplacement(stepsize = np.pi/4)
# takestepa = AdaptiveStepsize(takestep, frequency = 20)
#
# bh = BasinHopping( angles, pot, takestepa, temperature = 1.01)
# bh.run(20)
#
#def test():
# pi = np.pi
# L = 3
# nspins = L**2
#
# #phases = np.zeros(nspins)
# pot = XYModel( dim = [L,L], phi = np.pi) #, phases=phases)
#
#
# angles = np.random.uniform(-pi, pi, nspins)
# print angles
#
# e = pot.getEnergy(angles)
# print "energy ", e
#
# print "numerical gradient"
# ret = pot.getEnergyGradientNumerical(angles)
# print ret[1]
# print "analytical gradient"
# ret2 = pot.getEnergyGradient(angles)
# print ret2[1]
# print ret[0]
# print ret2[0]
#
#
#
# #try a quench
# from pele.optimize import mylbfgs
# ret = mylbfgs(angles, pot)
#
# print "quenched e = ", ret.energy
# print ret.coords
#
# test_basin_hopping(pot, angles)
#
#if __name__ == "__main__":
# test()
| gpl-3.0 | -4,005,206,605,871,008,300 | 26.660131 | 95 | 0.557892 | false | 3.174794 | false | false | false |
gautam1858/tensorflow | tensorflow/tools/compatibility/update/generate_v2_renames_map.py | 10 | 7161 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
"""Script for updating tensorflow/tools/compatibility/renames_v2.py.
To update renames_v2.py, run:
bazel build tensorflow/tools/compatibility/update:generate_v2_renames_map
bazel-bin/tensorflow/tools/compatibility/update/generate_v2_renames_map
"""
# pylint: enable=line-too-long
import sys
import tensorflow as tf
# This import is needed so that TensorFlow python modules are in sys.modules.
from tensorflow import python as tf_python # pylint: disable=unused-import
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import app
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_export
from tensorflow.tools.common import public_api
from tensorflow.tools.common import traverse
from tensorflow.tools.compatibility import tf_upgrade_v2
_OUTPUT_FILE_PATH = 'third_party/tensorflow/tools/compatibility/renames_v2.py'
_FILE_HEADER = """# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
\"\"\"List of renames to apply when converting from TF 1.0 to TF 2.0.
THIS FILE IS AUTOGENERATED: To update, please run:
bazel build tensorflow/tools/compatibility/update:generate_v2_renames_map
bazel-bin/tensorflow/tools/compatibility/update/generate_v2_renames_map
This file should be updated whenever endpoints are deprecated.
\"\"\"
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
"""
def get_canonical_name(v2_names, v1_name):
if v2_names:
return v2_names[0]
return 'compat.v1.%s' % v1_name
def get_all_v2_names():
"""Get a set of function/class names available in TensorFlow 2.0."""
v2_names = set() # All op names in TensorFlow 2.0
def visit(unused_path, unused_parent, children):
"""Visitor that collects TF 2.0 names."""
for child in children:
_, attr = tf_decorator.unwrap(child[1])
api_names_v2 = tf_export.get_v2_names(attr)
for name in api_names_v2:
v2_names.add(name)
visitor = public_api.PublicAPIVisitor(visit)
visitor.do_not_descend_map['tf'].append('contrib')
traverse.traverse(tf.compat.v2, visitor)
return v2_names
def collect_constant_renames():
"""Looks for constants that need to be renamed in TF 2.0.
Returns:
Set of tuples of the form (current name, new name).
"""
renames = set()
for module in sys.modules.values():
constants_v1_list = tf_export.get_v1_constants(module)
constants_v2_list = tf_export.get_v2_constants(module)
# _tf_api_constants attribute contains a list of tuples:
# (api_names_list, constant_name)
# We want to find API names that are in V1 but not in V2 for the same
# constant_names.
# First, we convert constants_v1_list and constants_v2_list to
# dictionaries for easier lookup.
constants_v1 = {constant_name: api_names
for api_names, constant_name in constants_v1_list}
constants_v2 = {constant_name: api_names
for api_names, constant_name in constants_v2_list}
# Second, we look for names that are in V1 but not in V2.
for constant_name, api_names_v1 in constants_v1.items():
api_names_v2 = constants_v2[constant_name]
for name in api_names_v1:
if name not in api_names_v2:
renames.add((name, get_canonical_name(api_names_v2, name)))
return renames
def collect_function_renames():
"""Looks for functions/classes that need to be renamed in TF 2.0.
Returns:
Set of tuples of the form (current name, new name).
"""
# Set of rename lines to write to output file in the form:
# 'tf.deprecated_name': 'tf.canonical_name'
renames = set()
def visit(unused_path, unused_parent, children):
"""Visitor that collects rename strings to add to rename_line_set."""
for child in children:
_, attr = tf_decorator.unwrap(child[1])
api_names_v1 = tf_export.get_v1_names(attr)
api_names_v2 = tf_export.get_v2_names(attr)
deprecated_api_names = set(api_names_v1) - set(api_names_v2)
for name in deprecated_api_names:
renames.add((name, get_canonical_name(api_names_v2, name)))
visitor = public_api.PublicAPIVisitor(visit)
visitor.do_not_descend_map['tf'].append('contrib')
visitor.do_not_descend_map['tf.compat'] = ['v1', 'v2']
traverse.traverse(tf, visitor)
# It is possible that a different function is exported with the
# same name. For e.g. when creating a different function to
# rename arguments. Exclude it from renames in this case.
v2_names = get_all_v2_names()
renames = set((name, new_name) for name, new_name in renames
if name not in v2_names)
return renames
def get_rename_line(name, canonical_name):
return ' \'tf.%s\': \'tf.%s\'' % (name, canonical_name)
def update_renames_v2(output_file_path):
"""Writes a Python dictionary mapping deprecated to canonical API names.
Args:
output_file_path: File path to write output to. Any existing contents
would be replaced.
"""
function_renames = collect_function_renames()
constant_renames = collect_constant_renames()
all_renames = function_renames.union(constant_renames)
manual_renames = set(
tf_upgrade_v2.TFAPIChangeSpec().manual_symbol_renames.keys())
# List of rename lines to write to output file in the form:
# 'tf.deprecated_name': 'tf.canonical_name'
rename_lines = [
get_rename_line(name, canonical_name)
for name, canonical_name in all_renames
if 'tf.' + name not in manual_renames]
renames_file_text = '%srenames = {\n%s\n}\n' % (
_FILE_HEADER, ',\n'.join(sorted(rename_lines)))
file_io.write_string_to_file(output_file_path, renames_file_text)
def main(unused_argv):
update_renames_v2(_OUTPUT_FILE_PATH)
if __name__ == '__main__':
app.run(main=main)
| apache-2.0 | 1,969,881,131,430,991,400 | 36.689474 | 80 | 0.69669 | false | 3.531065 | false | false | false |
codepython/CollectorCity-Market-Place | marketplaces/apps/market_sell/tests.py | 2 | 3626 | """
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
import logging
from django.test import TestCase
from django.core.urlresolvers import reverse
from auth.models import User
from shops.models import Shop
class MarketSellTest(TestCase):
fixtures = ['greatcoins_market.json', 'greatcoins_subscriptions.json']
def setUp(self):
try:
user = User.objects.get(username="test_shop_signup")
user.delete()
except User.DoesNotExist:
pass
def test_sign_up(self):
"""
Test shop signup
"""
response = self.client.get(reverse("market_buy_signup"))
self.failUnless(response.status_code, 200)
users_count = User.objects.count()
params = {
'username': 'test_shop_signup',
'email': '[email protected]',
'password1': 'test',
'password2': 'test',
}
response = self.client.post(reverse("market_buy_signup"), params)
self.failUnless(response.status_code, 302)
self.assertEquals(User.objects.count(), users_count + 1)
shops_count = Shop.objects.count()
response = self.client.get(reverse("market_sell_signup"))
self.failUnless(response.status_code, 200)
print response.context
#SignUp step 0
params = {
'csrfmiddlewaretoken': str(response.context['csrf_token']),
'0-name_store': 'test2',
'0-shop_name': 'test2',
'0-street': 'test',
'0-city': 'test',
'0-state': 'NY',
'0-zip': '10001',
'wizard_step': '0'
}
response = self.client.post(reverse("market_sell_signup"), params)
self.failUnless(response.status_code, 200)
params = {
'csrfmiddlewaretoken': str(response.context['csrf_token']),
'0-name_store': 'test2',
'0-shop_name': 'test2',
'0-street': 'test',
'0-city': 'test',
'0-state': 'NY',
'0-zip': '10001',
'1-plan_id': '1',
'wizard_step': '1',
'hash_0':'22267e8560569a5bba749a8f54aab54a',
}
response = self.client.post(reverse("market_sell_signup"), params)
self.failUnless(response.status_code, 200)
params = {
'csrfmiddlewaretoken': str(response.context['csrf_token']),
'0-name_store': 'test2',
'0-shop_name': 'test2',
'0-street': 'test',
'0-city': 'test',
'0-state': 'NY',
'0-zip': '10001',
'1-plan_id': '1',
'2-billing_street': 'el billing street',
'2-billing_city': 'el billing city',
'2-billing_state': 'NY',
'2-billing_zip': '10001',
'2-cc_number': '4111111111111111',
'2-cc_expiration_month': '03',
'2-cc_expiration_year': '2012',
'2-card_security_number': '123',
'2-terms': 'on',
'wizard_step': '2',
'hash_0':'22267e8560569a5bba749a8f54aab54a',
'hash_1':'e0341a56bf5d7baa6d13e9b72e831098'
}
response = self.client.post(reverse("market_sell_signup"), params)
self.failUnless(response.status_code, 200)
self.assertEquals(Shop.objects.count(), shops_count + 1)
| apache-2.0 | 3,334,057,743,875,397,000 | 31.375 | 74 | 0.533922 | false | 3.722793 | true | false | false |
sam-m888/addons-source | Form/selectform.py | 2 | 5153 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2015 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Form selector.
"""
#------------------------------------------------------------------------
#
# GTK modules
#
#------------------------------------------------------------------------
from gi.repository import Gtk
from gi.repository import Gdk
#------------------------------------------------------------------------
#
# Gramps modules
#
#------------------------------------------------------------------------
from form import get_form_ids, get_form_id, get_form_type
#------------------------------------------------------------------------
#
# Internationalisation
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
try:
_trans = glocale.get_addon_translator(__file__)
except ValueError:
_trans = glocale.translation
_ = _trans.gettext
#------------------------------------------------------------------------
#
# SelectForm class
#
#------------------------------------------------------------------------
class SelectForm(object):
"""
Form Selector.
"""
def __init__(self, dbstate, uistate, track):
self.dbstate = dbstate
self.uistate = uistate
self.top = self._create_dialog()
def _create_dialog(self):
"""
Create a dialog box to select a form.
"""
# pylint: disable-msg=E1101
title = _("%(title)s - Gramps") % {'title': _("Select Form")}
top = Gtk.Dialog(title)
top.set_default_size(400, 350)
top.set_modal(True)
top.set_transient_for(self.uistate.window)
top.vbox.set_spacing(5)
label = Gtk.Label(label='<span size="larger" weight="bold">%s</span>'
% _("Select Form"))
label.set_use_markup(True)
top.vbox.pack_start(label, 0, 0, 5)
box = Gtk.Box()
top.vbox.pack_start(box, 1, 1, 5)
self.model = Gtk.TreeStore(str, str)
self.tree = Gtk.TreeView(model=self.model)
self.tree.connect('button-press-event', self.__button_press)
renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn("Source", renderer, text=1)
column.set_sort_column_id(1)
self.tree.append_column(column)
slist = Gtk.ScrolledWindow()
slist.add(self.tree)
slist.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
box.pack_start(slist, 1, 1, 5)
top.add_button(_('_Cancel'), Gtk.ResponseType.CANCEL)
top.add_button(_('_OK'), Gtk.ResponseType.OK)
top.show_all()
return top
def _populate_model(self):
"""
Populate the model.
"""
self.model.clear()
form_types = {}
for handle in self.dbstate.db.get_source_handles():
source = self.dbstate.db.get_source_from_handle(handle)
form_id = get_form_id(source)
if form_id in get_form_ids():
form_type = get_form_type(form_id)
if _(form_type) in form_types:
parent = form_types[_(form_type)]
else:
parent = self.model.append(None, (None, _(form_type)))
form_types[_(form_type)] = parent
self.model.append(parent, (source.handle, source.title))
self.model.set_sort_column_id(1, Gtk.SortType.ASCENDING)
self.tree.expand_all()
def __button_press(self, obj, event):
"""
Called when a button press is executed
"""
if event.type == Gdk.EventType._2BUTTON_PRESS:
model, iter_ = self.tree.get_selection().get_selected()
if iter_:
source_handle = model.get_value(iter_, 0)
if source_handle:
self.top.response(Gtk.ResponseType.OK)
def run(self):
"""
Run the dialog and return the result.
"""
self._populate_model()
source_handle = None
while True:
response = self.top.run()
if response == Gtk.ResponseType.HELP:
display_help(webpage='Form_Addons')
else:
model, iter_ = self.tree.get_selection().get_selected()
if iter_:
source_handle = model.get_value(iter_, 0)
self.top.destroy()
return source_handle
| gpl-2.0 | -24,963,507,568,043,970 | 33.583893 | 79 | 0.523384 | false | 4.115815 | false | false | false |
SRI-CSL/PLambda | plambda/antlr4/PLambdaParser.py | 1 | 63514 | # Generated from PLambda.g4 by ANTLR 4.8
# encoding: utf-8
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3D")
buf.write("\u00d0\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\3\2\6\2\30\n\2\r\2")
buf.write("\16\2\31\3\3\3\3\3\3\6\3\37\n\3\r\3\16\3 \3\3\3\3\3\3")
buf.write("\3\3\3\3\3\3\6\3)\n\3\r\3\16\3*\3\3\3\3\3\3\3\3\3\3\3")
buf.write("\3\5\3\63\n\3\3\3\6\3\66\n\3\r\3\16\3\67\3\3\3\3\3\3\3")
buf.write("\3\3\3\3\3\6\3@\n\3\r\3\16\3A\3\3\3\3\3\3\3\3\3\3\3\3")
buf.write("\3\3\7\3K\n\3\f\3\16\3N\13\3\3\3\3\3\3\3\3\3\3\3\3\3\7")
buf.write("\3V\n\3\f\3\16\3Y\13\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3")
buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3")
buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3x\n\3\3\3\3\3\3\3\3")
buf.write("\3\3\3\3\3\3\3\5\3\u0081\n\3\3\3\3\3\3\3\3\3\3\3\7\3\u0088")
buf.write("\n\3\f\3\16\3\u008b\13\3\3\3\3\3\3\3\3\3\6\3\u0091\n\3")
buf.write("\r\3\16\3\u0092\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\6\3\u009d")
buf.write("\n\3\r\3\16\3\u009e\3\3\3\3\3\3\3\3\3\3\5\3\u00a6\n\3")
buf.write("\3\4\3\4\7\4\u00aa\n\4\f\4\16\4\u00ad\13\4\3\4\3\4\3\5")
buf.write("\3\5\3\6\3\6\6\6\u00b5\n\6\r\6\16\6\u00b6\3\6\3\6\3\7")
buf.write("\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\6\b\u00c4\n\b\r\b\16")
buf.write("\b\u00c5\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13\3\13\2\2\f")
buf.write("\2\4\6\b\n\f\16\20\22\24\2\4\3\2<=\4\2\5\5<<\2\u00e6\2")
buf.write("\27\3\2\2\2\4\u00a5\3\2\2\2\6\u00a7\3\2\2\2\b\u00b0\3")
buf.write("\2\2\2\n\u00b2\3\2\2\2\f\u00ba\3\2\2\2\16\u00bf\3\2\2")
buf.write("\2\20\u00c9\3\2\2\2\22\u00cb\3\2\2\2\24\u00cd\3\2\2\2")
buf.write("\26\30\5\4\3\2\27\26\3\2\2\2\30\31\3\2\2\2\31\27\3\2\2")
buf.write("\2\31\32\3\2\2\2\32\3\3\2\2\2\33\34\7\3\2\2\34\36\7\16")
buf.write("\2\2\35\37\5\4\3\2\36\35\3\2\2\2\37 \3\2\2\2 \36\3\2\2")
buf.write("\2 !\3\2\2\2!\"\3\2\2\2\"#\7\4\2\2#\u00a6\3\2\2\2$%\7")
buf.write("\3\2\2%&\7\20\2\2&(\5\n\6\2\')\5\4\3\2(\'\3\2\2\2)*\3")
buf.write("\2\2\2*(\3\2\2\2*+\3\2\2\2+,\3\2\2\2,-\7\4\2\2-\u00a6")
buf.write("\3\2\2\2./\7\3\2\2/\60\7\21\2\2\60\62\7<\2\2\61\63\5\6")
buf.write("\4\2\62\61\3\2\2\2\62\63\3\2\2\2\63\65\3\2\2\2\64\66\5")
buf.write("\4\3\2\65\64\3\2\2\2\66\67\3\2\2\2\67\65\3\2\2\2\678\3")
buf.write("\2\2\289\3\2\2\29:\7\4\2\2:\u00a6\3\2\2\2;<\7\3\2\2<=")
buf.write("\7\22\2\2=?\5\6\4\2>@\5\4\3\2?>\3\2\2\2@A\3\2\2\2A?\3")
buf.write("\2\2\2AB\3\2\2\2BC\3\2\2\2CD\7\4\2\2D\u00a6\3\2\2\2EF")
buf.write("\7\3\2\2FG\7\24\2\2GH\5\4\3\2HL\5\4\3\2IK\5\4\3\2JI\3")
buf.write("\2\2\2KN\3\2\2\2LJ\3\2\2\2LM\3\2\2\2MO\3\2\2\2NL\3\2\2")
buf.write("\2OP\7\4\2\2P\u00a6\3\2\2\2QR\7\3\2\2RS\7\23\2\2SW\5\4")
buf.write("\3\2TV\5\4\3\2UT\3\2\2\2VY\3\2\2\2WU\3\2\2\2WX\3\2\2\2")
buf.write("XZ\3\2\2\2YW\3\2\2\2Z[\7\4\2\2[\u00a6\3\2\2\2\\]\7\3\2")
buf.write("\2]^\7\6\2\2^_\5\22\n\2_`\7\4\2\2`\u00a6\3\2\2\2ab\7\3")
buf.write("\2\2bc\7\7\2\2cd\5\4\3\2de\7\4\2\2e\u00a6\3\2\2\2fg\7")
buf.write("\3\2\2gh\7\b\2\2hi\5\4\3\2ij\5\4\3\2jk\7\4\2\2k\u00a6")
buf.write("\3\2\2\2lm\7\3\2\2mn\7\t\2\2no\5\4\3\2op\5\4\3\2pq\5\4")
buf.write("\3\2qr\7\4\2\2r\u00a6\3\2\2\2st\7\3\2\2tu\7\13\2\2uw\5")
buf.write("\4\3\2vx\5\4\3\2wv\3\2\2\2wx\3\2\2\2xy\3\2\2\2yz\7\4\2")
buf.write("\2z\u00a6\3\2\2\2{|\7\3\2\2|}\7\f\2\2}~\5\4\3\2~\u0080")
buf.write("\5\4\3\2\177\u0081\5\4\3\2\u0080\177\3\2\2\2\u0080\u0081")
buf.write("\3\2\2\2\u0081\u0082\3\2\2\2\u0082\u0083\7\4\2\2\u0083")
buf.write("\u00a6\3\2\2\2\u0084\u0085\7\3\2\2\u0085\u0089\7\n\2\2")
buf.write("\u0086\u0088\5\4\3\2\u0087\u0086\3\2\2\2\u0088\u008b\3")
buf.write("\2\2\2\u0089\u0087\3\2\2\2\u0089\u008a\3\2\2\2\u008a\u008c")
buf.write("\3\2\2\2\u008b\u0089\3\2\2\2\u008c\u00a6\7\4\2\2\u008d")
buf.write("\u008e\7\3\2\2\u008e\u0090\7\27\2\2\u008f\u0091\5\4\3")
buf.write("\2\u0090\u008f\3\2\2\2\u0091\u0092\3\2\2\2\u0092\u0090")
buf.write("\3\2\2\2\u0092\u0093\3\2\2\2\u0093\u0094\3\2\2\2\u0094")
buf.write("\u0095\5\16\b\2\u0095\u0096\7\4\2\2\u0096\u00a6\3\2\2")
buf.write("\2\u0097\u0098\7\3\2\2\u0098\u0099\7\26\2\2\u0099\u009a")
buf.write("\7<\2\2\u009a\u009c\5\20\t\2\u009b\u009d\5\4\3\2\u009c")
buf.write("\u009b\3\2\2\2\u009d\u009e\3\2\2\2\u009e\u009c\3\2\2\2")
buf.write("\u009e\u009f\3\2\2\2\u009f\u00a0\3\2\2\2\u00a0\u00a1\7")
buf.write("\4\2\2\u00a1\u00a6\3\2\2\2\u00a2\u00a6\7\5\2\2\u00a3\u00a6")
buf.write("\7<\2\2\u00a4\u00a6\7\r\2\2\u00a5\33\3\2\2\2\u00a5$\3")
buf.write("\2\2\2\u00a5.\3\2\2\2\u00a5;\3\2\2\2\u00a5E\3\2\2\2\u00a5")
buf.write("Q\3\2\2\2\u00a5\\\3\2\2\2\u00a5a\3\2\2\2\u00a5f\3\2\2")
buf.write("\2\u00a5l\3\2\2\2\u00a5s\3\2\2\2\u00a5{\3\2\2\2\u00a5")
buf.write("\u0084\3\2\2\2\u00a5\u008d\3\2\2\2\u00a5\u0097\3\2\2\2")
buf.write("\u00a5\u00a2\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a5\u00a4\3")
buf.write("\2\2\2\u00a6\5\3\2\2\2\u00a7\u00ab\7\3\2\2\u00a8\u00aa")
buf.write("\5\b\5\2\u00a9\u00a8\3\2\2\2\u00aa\u00ad\3\2\2\2\u00ab")
buf.write("\u00a9\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac\u00ae\3\2\2\2")
buf.write("\u00ad\u00ab\3\2\2\2\u00ae\u00af\7\4\2\2\u00af\7\3\2\2")
buf.write("\2\u00b0\u00b1\7<\2\2\u00b1\t\3\2\2\2\u00b2\u00b4\7\3")
buf.write("\2\2\u00b3\u00b5\5\f\7\2\u00b4\u00b3\3\2\2\2\u00b5\u00b6")
buf.write("\3\2\2\2\u00b6\u00b4\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7")
buf.write("\u00b8\3\2\2\2\u00b8\u00b9\7\4\2\2\u00b9\13\3\2\2\2\u00ba")
buf.write("\u00bb\7\3\2\2\u00bb\u00bc\5\b\5\2\u00bc\u00bd\5\4\3\2")
buf.write("\u00bd\u00be\7\4\2\2\u00be\r\3\2\2\2\u00bf\u00c0\7\3\2")
buf.write("\2\u00c0\u00c1\7\30\2\2\u00c1\u00c3\5\b\5\2\u00c2\u00c4")
buf.write("\5\4\3\2\u00c3\u00c2\3\2\2\2\u00c4\u00c5\3\2\2\2\u00c5")
buf.write("\u00c3\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6\u00c7\3\2\2\2")
buf.write("\u00c7\u00c8\7\4\2\2\u00c8\17\3\2\2\2\u00c9\u00ca\5\4")
buf.write("\3\2\u00ca\21\3\2\2\2\u00cb\u00cc\t\2\2\2\u00cc\23\3\2")
buf.write("\2\2\u00cd\u00ce\t\3\2\2\u00ce\25\3\2\2\2\23\31 *\62\67")
buf.write("ALWw\u0080\u0089\u0092\u009e\u00a5\u00ab\u00b6\u00c5")
return buf.getvalue()
class PLambdaParser ( Parser ):
grammarFileName = "PLambda.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'('", "')'", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "'None'", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "'-'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "STRING", "PRIMITIVE_DATA_OP",
"UNARY_OP", "BINARY_OP", "TERNARY_OP", "N_ARY_OP",
"AMBI1_OP", "AMBI2_OP", "NONE", "SEQ", "DO", "LET",
"DEFINE", "LAMBDA", "APPLY", "INVOKE", "SINVOKE",
"FOR", "TRY", "CATCH", "BOOLEAN", "FLOAT", "INT",
"LOAD", "IMPORT", "ISNONE", "ISOBJECT", "ISINT", "ISFLOAT",
"GETUID", "GLOBAL", "NOT", "THROW", "FETCH", "NARROW",
"INSTANCEOF", "GET", "IN", "IS", "LOOKUP", "SETUID",
"KWAPPLY", "MODIFY", "UPDATE", "SUPDATE", "SETATTR",
"CONCAT", "AND", "OR", "MKTUPLE", "MKLIST", "MKDICT",
"MINUS", "IF", "GETATTR", "ID", "NUMBER", "STRING_SQ",
"STRING_DQ", "SYMBOL", "LINE_COMMENT", "NEW_LINE_COMMENT",
"NEW_COMMENT", "WHITE_SPACE" ]
RULE_unit = 0
RULE_expression = 1
RULE_parameterList = 2
RULE_parameter = 3
RULE_bindingList = 4
RULE_bindingPair = 5
RULE_catchExpression = 6
RULE_rangeExpression = 7
RULE_data = 8
RULE_token = 9
ruleNames = [ "unit", "expression", "parameterList", "parameter", "bindingList",
"bindingPair", "catchExpression", "rangeExpression",
"data", "token" ]
EOF = Token.EOF
T__0=1
T__1=2
STRING=3
PRIMITIVE_DATA_OP=4
UNARY_OP=5
BINARY_OP=6
TERNARY_OP=7
N_ARY_OP=8
AMBI1_OP=9
AMBI2_OP=10
NONE=11
SEQ=12
DO=13
LET=14
DEFINE=15
LAMBDA=16
APPLY=17
INVOKE=18
SINVOKE=19
FOR=20
TRY=21
CATCH=22
BOOLEAN=23
FLOAT=24
INT=25
LOAD=26
IMPORT=27
ISNONE=28
ISOBJECT=29
ISINT=30
ISFLOAT=31
GETUID=32
GLOBAL=33
NOT=34
THROW=35
FETCH=36
NARROW=37
INSTANCEOF=38
GET=39
IN=40
IS=41
LOOKUP=42
SETUID=43
KWAPPLY=44
MODIFY=45
UPDATE=46
SUPDATE=47
SETATTR=48
CONCAT=49
AND=50
OR=51
MKTUPLE=52
MKLIST=53
MKDICT=54
MINUS=55
IF=56
GETATTR=57
ID=58
NUMBER=59
STRING_SQ=60
STRING_DQ=61
SYMBOL=62
LINE_COMMENT=63
NEW_LINE_COMMENT=64
NEW_COMMENT=65
WHITE_SPACE=66
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class UnitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def getRuleIndex(self):
return PLambdaParser.RULE_unit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnit" ):
listener.enterUnit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnit" ):
listener.exitUnit(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitUnit" ):
return visitor.visitUnit(self)
else:
return visitor.visitChildren(self)
def unit(self):
localctx = PLambdaParser.UnitContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_unit)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 21
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 20
self.expression()
self.state = 23
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return PLambdaParser.RULE_expression
def copyFrom(self, ctx:ParserRuleContext):
super().copyFrom(ctx)
class NaryExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def N_ARY_OP(self):
return self.getToken(PLambdaParser.N_ARY_OP, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNaryExpression" ):
listener.enterNaryExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNaryExpression" ):
listener.exitNaryExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNaryExpression" ):
return visitor.visitNaryExpression(self)
else:
return visitor.visitChildren(self)
class ForExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def FOR(self):
return self.getToken(PLambdaParser.FOR, 0)
def ID(self):
return self.getToken(PLambdaParser.ID, 0)
def rangeExpression(self):
return self.getTypedRuleContext(PLambdaParser.RangeExpressionContext,0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterForExpression" ):
listener.enterForExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitForExpression" ):
listener.exitForExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitForExpression" ):
return visitor.visitForExpression(self)
else:
return visitor.visitChildren(self)
class LambdaExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def LAMBDA(self):
return self.getToken(PLambdaParser.LAMBDA, 0)
def parameterList(self):
return self.getTypedRuleContext(PLambdaParser.ParameterListContext,0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLambdaExpression" ):
listener.enterLambdaExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLambdaExpression" ):
listener.exitLambdaExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLambdaExpression" ):
return visitor.visitLambdaExpression(self)
else:
return visitor.visitChildren(self)
class OneOrMoreExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def AMBI1_OP(self):
return self.getToken(PLambdaParser.AMBI1_OP, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOneOrMoreExpression" ):
listener.enterOneOrMoreExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOneOrMoreExpression" ):
listener.exitOneOrMoreExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitOneOrMoreExpression" ):
return visitor.visitOneOrMoreExpression(self)
else:
return visitor.visitChildren(self)
class NoneLiteralContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def NONE(self):
return self.getToken(PLambdaParser.NONE, 0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNoneLiteral" ):
listener.enterNoneLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNoneLiteral" ):
listener.exitNoneLiteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNoneLiteral" ):
return visitor.visitNoneLiteral(self)
else:
return visitor.visitChildren(self)
class SeqExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def SEQ(self):
return self.getToken(PLambdaParser.SEQ, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSeqExpression" ):
listener.enterSeqExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSeqExpression" ):
listener.exitSeqExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitSeqExpression" ):
return visitor.visitSeqExpression(self)
else:
return visitor.visitChildren(self)
class ApplyExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def APPLY(self):
return self.getToken(PLambdaParser.APPLY, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterApplyExpression" ):
listener.enterApplyExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitApplyExpression" ):
listener.exitApplyExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitApplyExpression" ):
return visitor.visitApplyExpression(self)
else:
return visitor.visitChildren(self)
class BinaryExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def BINARY_OP(self):
return self.getToken(PLambdaParser.BINARY_OP, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBinaryExpression" ):
listener.enterBinaryExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBinaryExpression" ):
listener.exitBinaryExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBinaryExpression" ):
return visitor.visitBinaryExpression(self)
else:
return visitor.visitChildren(self)
class TwoOrMoreExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def AMBI2_OP(self):
return self.getToken(PLambdaParser.AMBI2_OP, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTwoOrMoreExpression" ):
listener.enterTwoOrMoreExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTwoOrMoreExpression" ):
listener.exitTwoOrMoreExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTwoOrMoreExpression" ):
return visitor.visitTwoOrMoreExpression(self)
else:
return visitor.visitChildren(self)
class TryExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def TRY(self):
return self.getToken(PLambdaParser.TRY, 0)
def catchExpression(self):
return self.getTypedRuleContext(PLambdaParser.CatchExpressionContext,0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTryExpression" ):
listener.enterTryExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTryExpression" ):
listener.exitTryExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTryExpression" ):
return visitor.visitTryExpression(self)
else:
return visitor.visitChildren(self)
class StringLiteralContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def STRING(self):
return self.getToken(PLambdaParser.STRING, 0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStringLiteral" ):
listener.enterStringLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStringLiteral" ):
listener.exitStringLiteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitStringLiteral" ):
return visitor.visitStringLiteral(self)
else:
return visitor.visitChildren(self)
class DefineExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def DEFINE(self):
return self.getToken(PLambdaParser.DEFINE, 0)
def ID(self):
return self.getToken(PLambdaParser.ID, 0)
def parameterList(self):
return self.getTypedRuleContext(PLambdaParser.ParameterListContext,0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDefineExpression" ):
listener.enterDefineExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDefineExpression" ):
listener.exitDefineExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDefineExpression" ):
return visitor.visitDefineExpression(self)
else:
return visitor.visitChildren(self)
class LetExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def LET(self):
return self.getToken(PLambdaParser.LET, 0)
def bindingList(self):
return self.getTypedRuleContext(PLambdaParser.BindingListContext,0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLetExpression" ):
listener.enterLetExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLetExpression" ):
listener.exitLetExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLetExpression" ):
return visitor.visitLetExpression(self)
else:
return visitor.visitChildren(self)
class IdentifierLiteralContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def ID(self):
return self.getToken(PLambdaParser.ID, 0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIdentifierLiteral" ):
listener.enterIdentifierLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIdentifierLiteral" ):
listener.exitIdentifierLiteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitIdentifierLiteral" ):
return visitor.visitIdentifierLiteral(self)
else:
return visitor.visitChildren(self)
class UnaryExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def UNARY_OP(self):
return self.getToken(PLambdaParser.UNARY_OP, 0)
def expression(self):
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnaryExpression" ):
listener.enterUnaryExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnaryExpression" ):
listener.exitUnaryExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitUnaryExpression" ):
return visitor.visitUnaryExpression(self)
else:
return visitor.visitChildren(self)
class TernaryExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def TERNARY_OP(self):
return self.getToken(PLambdaParser.TERNARY_OP, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTernaryExpression" ):
listener.enterTernaryExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTernaryExpression" ):
listener.exitTernaryExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTernaryExpression" ):
return visitor.visitTernaryExpression(self)
else:
return visitor.visitChildren(self)
class InvokeExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def INVOKE(self):
return self.getToken(PLambdaParser.INVOKE, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInvokeExpression" ):
listener.enterInvokeExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInvokeExpression" ):
listener.exitInvokeExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInvokeExpression" ):
return visitor.visitInvokeExpression(self)
else:
return visitor.visitChildren(self)
class DataExpressionContext(ExpressionContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a PLambdaParser.ExpressionContext
super().__init__(parser)
self.copyFrom(ctx)
def PRIMITIVE_DATA_OP(self):
return self.getToken(PLambdaParser.PRIMITIVE_DATA_OP, 0)
def data(self):
return self.getTypedRuleContext(PLambdaParser.DataContext,0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDataExpression" ):
listener.enterDataExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDataExpression" ):
listener.exitDataExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDataExpression" ):
return visitor.visitDataExpression(self)
else:
return visitor.visitChildren(self)
def expression(self):
localctx = PLambdaParser.ExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_expression)
self._la = 0 # Token type
try:
self.state = 163
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,13,self._ctx)
if la_ == 1:
localctx = PLambdaParser.SeqExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 1)
self.state = 25
self.match(PLambdaParser.T__0)
self.state = 26
self.match(PLambdaParser.SEQ)
self.state = 28
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 27
self.expression()
self.state = 30
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0)):
break
self.state = 32
self.match(PLambdaParser.T__1)
pass
elif la_ == 2:
localctx = PLambdaParser.LetExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 2)
self.state = 34
self.match(PLambdaParser.T__0)
self.state = 35
self.match(PLambdaParser.LET)
self.state = 36
self.bindingList()
self.state = 38
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 37
self.expression()
self.state = 40
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0)):
break
self.state = 42
self.match(PLambdaParser.T__1)
pass
elif la_ == 3:
localctx = PLambdaParser.DefineExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 3)
self.state = 44
self.match(PLambdaParser.T__0)
self.state = 45
self.match(PLambdaParser.DEFINE)
self.state = 46
self.match(PLambdaParser.ID)
self.state = 48
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,3,self._ctx)
if la_ == 1:
self.state = 47
self.parameterList()
self.state = 51
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 50
self.expression()
self.state = 53
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0)):
break
self.state = 55
self.match(PLambdaParser.T__1)
pass
elif la_ == 4:
localctx = PLambdaParser.LambdaExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 4)
self.state = 57
self.match(PLambdaParser.T__0)
self.state = 58
self.match(PLambdaParser.LAMBDA)
self.state = 59
self.parameterList()
self.state = 61
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 60
self.expression()
self.state = 63
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0)):
break
self.state = 65
self.match(PLambdaParser.T__1)
pass
elif la_ == 5:
localctx = PLambdaParser.InvokeExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 5)
self.state = 67
self.match(PLambdaParser.T__0)
self.state = 68
self.match(PLambdaParser.INVOKE)
self.state = 69
self.expression()
self.state = 70
self.expression()
self.state = 74
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0):
self.state = 71
self.expression()
self.state = 76
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 77
self.match(PLambdaParser.T__1)
pass
elif la_ == 6:
localctx = PLambdaParser.ApplyExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 6)
self.state = 79
self.match(PLambdaParser.T__0)
self.state = 80
self.match(PLambdaParser.APPLY)
self.state = 81
self.expression()
self.state = 85
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0):
self.state = 82
self.expression()
self.state = 87
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 88
self.match(PLambdaParser.T__1)
pass
elif la_ == 7:
localctx = PLambdaParser.DataExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 7)
self.state = 90
self.match(PLambdaParser.T__0)
self.state = 91
self.match(PLambdaParser.PRIMITIVE_DATA_OP)
self.state = 92
self.data()
self.state = 93
self.match(PLambdaParser.T__1)
pass
elif la_ == 8:
localctx = PLambdaParser.UnaryExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 8)
self.state = 95
self.match(PLambdaParser.T__0)
self.state = 96
self.match(PLambdaParser.UNARY_OP)
self.state = 97
self.expression()
self.state = 98
self.match(PLambdaParser.T__1)
pass
elif la_ == 9:
localctx = PLambdaParser.BinaryExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 9)
self.state = 100
self.match(PLambdaParser.T__0)
self.state = 101
self.match(PLambdaParser.BINARY_OP)
self.state = 102
self.expression()
self.state = 103
self.expression()
self.state = 104
self.match(PLambdaParser.T__1)
pass
elif la_ == 10:
localctx = PLambdaParser.TernaryExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 10)
self.state = 106
self.match(PLambdaParser.T__0)
self.state = 107
self.match(PLambdaParser.TERNARY_OP)
self.state = 108
self.expression()
self.state = 109
self.expression()
self.state = 110
self.expression()
self.state = 111
self.match(PLambdaParser.T__1)
pass
elif la_ == 11:
localctx = PLambdaParser.OneOrMoreExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 11)
self.state = 113
self.match(PLambdaParser.T__0)
self.state = 114
self.match(PLambdaParser.AMBI1_OP)
self.state = 115
self.expression()
self.state = 117
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0):
self.state = 116
self.expression()
self.state = 119
self.match(PLambdaParser.T__1)
pass
elif la_ == 12:
localctx = PLambdaParser.TwoOrMoreExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 12)
self.state = 121
self.match(PLambdaParser.T__0)
self.state = 122
self.match(PLambdaParser.AMBI2_OP)
self.state = 123
self.expression()
self.state = 124
self.expression()
self.state = 126
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0):
self.state = 125
self.expression()
self.state = 128
self.match(PLambdaParser.T__1)
pass
elif la_ == 13:
localctx = PLambdaParser.NaryExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 13)
self.state = 130
self.match(PLambdaParser.T__0)
self.state = 131
self.match(PLambdaParser.N_ARY_OP)
self.state = 135
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0):
self.state = 132
self.expression()
self.state = 137
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 138
self.match(PLambdaParser.T__1)
pass
elif la_ == 14:
localctx = PLambdaParser.TryExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 14)
self.state = 139
self.match(PLambdaParser.T__0)
self.state = 140
self.match(PLambdaParser.TRY)
self.state = 142
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 141
self.expression()
else:
raise NoViableAltException(self)
self.state = 144
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,11,self._ctx)
self.state = 146
self.catchExpression()
self.state = 147
self.match(PLambdaParser.T__1)
pass
elif la_ == 15:
localctx = PLambdaParser.ForExpressionContext(self, localctx)
self.enterOuterAlt(localctx, 15)
self.state = 149
self.match(PLambdaParser.T__0)
self.state = 150
self.match(PLambdaParser.FOR)
self.state = 151
self.match(PLambdaParser.ID)
self.state = 152
self.rangeExpression()
self.state = 154
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 153
self.expression()
self.state = 156
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0)):
break
self.state = 158
self.match(PLambdaParser.T__1)
pass
elif la_ == 16:
localctx = PLambdaParser.StringLiteralContext(self, localctx)
self.enterOuterAlt(localctx, 16)
self.state = 160
self.match(PLambdaParser.STRING)
pass
elif la_ == 17:
localctx = PLambdaParser.IdentifierLiteralContext(self, localctx)
self.enterOuterAlt(localctx, 17)
self.state = 161
self.match(PLambdaParser.ID)
pass
elif la_ == 18:
localctx = PLambdaParser.NoneLiteralContext(self, localctx)
self.enterOuterAlt(localctx, 18)
self.state = 162
self.match(PLambdaParser.NONE)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterListContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameter(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ParameterContext)
else:
return self.getTypedRuleContext(PLambdaParser.ParameterContext,i)
def getRuleIndex(self):
return PLambdaParser.RULE_parameterList
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameterList" ):
listener.enterParameterList(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameterList" ):
listener.exitParameterList(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParameterList" ):
return visitor.visitParameterList(self)
else:
return visitor.visitChildren(self)
def parameterList(self):
localctx = PLambdaParser.ParameterListContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_parameterList)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 165
self.match(PLambdaParser.T__0)
self.state = 169
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==PLambdaParser.ID:
self.state = 166
self.parameter()
self.state = 171
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 172
self.match(PLambdaParser.T__1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ID(self):
return self.getToken(PLambdaParser.ID, 0)
def getRuleIndex(self):
return PLambdaParser.RULE_parameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameter" ):
listener.enterParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameter" ):
listener.exitParameter(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParameter" ):
return visitor.visitParameter(self)
else:
return visitor.visitChildren(self)
def parameter(self):
localctx = PLambdaParser.ParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_parameter)
try:
self.enterOuterAlt(localctx, 1)
self.state = 174
self.match(PLambdaParser.ID)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BindingListContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def bindingPair(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.BindingPairContext)
else:
return self.getTypedRuleContext(PLambdaParser.BindingPairContext,i)
def getRuleIndex(self):
return PLambdaParser.RULE_bindingList
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBindingList" ):
listener.enterBindingList(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBindingList" ):
listener.exitBindingList(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBindingList" ):
return visitor.visitBindingList(self)
else:
return visitor.visitChildren(self)
def bindingList(self):
localctx = PLambdaParser.BindingListContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_bindingList)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 176
self.match(PLambdaParser.T__0)
self.state = 178
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 177
self.bindingPair()
self.state = 180
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==PLambdaParser.T__0):
break
self.state = 182
self.match(PLambdaParser.T__1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BindingPairContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameter(self):
return self.getTypedRuleContext(PLambdaParser.ParameterContext,0)
def expression(self):
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,0)
def getRuleIndex(self):
return PLambdaParser.RULE_bindingPair
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBindingPair" ):
listener.enterBindingPair(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBindingPair" ):
listener.exitBindingPair(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBindingPair" ):
return visitor.visitBindingPair(self)
else:
return visitor.visitChildren(self)
def bindingPair(self):
localctx = PLambdaParser.BindingPairContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_bindingPair)
try:
self.enterOuterAlt(localctx, 1)
self.state = 184
self.match(PLambdaParser.T__0)
self.state = 185
self.parameter()
self.state = 186
self.expression()
self.state = 187
self.match(PLambdaParser.T__1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CatchExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CATCH(self):
return self.getToken(PLambdaParser.CATCH, 0)
def parameter(self):
return self.getTypedRuleContext(PLambdaParser.ParameterContext,0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(PLambdaParser.ExpressionContext)
else:
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,i)
def getRuleIndex(self):
return PLambdaParser.RULE_catchExpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCatchExpression" ):
listener.enterCatchExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCatchExpression" ):
listener.exitCatchExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCatchExpression" ):
return visitor.visitCatchExpression(self)
else:
return visitor.visitChildren(self)
def catchExpression(self):
localctx = PLambdaParser.CatchExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_catchExpression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 189
self.match(PLambdaParser.T__0)
self.state = 190
self.match(PLambdaParser.CATCH)
self.state = 191
self.parameter()
self.state = 193
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 192
self.expression()
self.state = 195
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PLambdaParser.T__0) | (1 << PLambdaParser.STRING) | (1 << PLambdaParser.NONE) | (1 << PLambdaParser.ID))) != 0)):
break
self.state = 197
self.match(PLambdaParser.T__1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class RangeExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(PLambdaParser.ExpressionContext,0)
def getRuleIndex(self):
return PLambdaParser.RULE_rangeExpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterRangeExpression" ):
listener.enterRangeExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitRangeExpression" ):
listener.exitRangeExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitRangeExpression" ):
return visitor.visitRangeExpression(self)
else:
return visitor.visitChildren(self)
def rangeExpression(self):
localctx = PLambdaParser.RangeExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_rangeExpression)
try:
self.enterOuterAlt(localctx, 1)
self.state = 199
self.expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DataContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ID(self):
return self.getToken(PLambdaParser.ID, 0)
def NUMBER(self):
return self.getToken(PLambdaParser.NUMBER, 0)
def getRuleIndex(self):
return PLambdaParser.RULE_data
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterData" ):
listener.enterData(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitData" ):
listener.exitData(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitData" ):
return visitor.visitData(self)
else:
return visitor.visitChildren(self)
def data(self):
localctx = PLambdaParser.DataContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_data)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 201
_la = self._input.LA(1)
if not(_la==PLambdaParser.ID or _la==PLambdaParser.NUMBER):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TokenContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ID(self):
return self.getToken(PLambdaParser.ID, 0)
def STRING(self):
return self.getToken(PLambdaParser.STRING, 0)
def getRuleIndex(self):
return PLambdaParser.RULE_token
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterToken" ):
listener.enterToken(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitToken" ):
listener.exitToken(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitToken" ):
return visitor.visitToken(self)
else:
return visitor.visitChildren(self)
def token(self):
localctx = PLambdaParser.TokenContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_token)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 203
_la = self._input.LA(1)
if not(_la==PLambdaParser.STRING or _la==PLambdaParser.ID):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
| mit | -6,695,734,416,499,598,000 | 36.738562 | 188 | 0.558664 | false | 3.61697 | false | false | false |
Xi-Plus/Xiplus-Wikipedia-Bot | policy-newsletter/policy-newsletter.py | 1 | 11697 | #!/usr/bin/env python
# coding: utf-8
import argparse
import os
import re
from datetime import date
import pymysql
os.environ['PYWIKIBOT_DIR'] = os.path.dirname(os.path.realpath(__file__))
import pywikibot
from dateutil.relativedelta import relativedelta
from config import host, password, user # pylint: disable=E0611,W0614
parser = argparse.ArgumentParser()
parser.add_argument('--page')
parser.add_argument('--months', type=int, default=0)
args = parser.parse_args()
print(args)
title = args.page
if title is None:
rundate = date.today() + relativedelta(months=args.months)
title = 'Wikipedia:維基百科政策簡報/存檔/{:04}-{:02}'.format(rundate.year, rundate.month)
site = pywikibot.Site('zh', 'wikipedia')
site.login()
print(title)
page = pywikibot.Page(site, title)
if not page.exists():
print('page is not exists')
exit()
text = page.text
# print(text)
m = re.search(r'過去一個月((\d+)年(\d+)月(\d+)日至(\d+)年(\d+)月(\d+)日)內', text)
if m:
time1 = '{:04d}{:02d}{:02d}000000'.format(int(m.group(1)), int(m.group(2)), int(m.group(3)))
time2 = '{:04d}{:02d}{:02d}235959'.format(int(m.group(4)), int(m.group(5)), int(m.group(6)))
print(time1, time2)
else:
exit('Failed to get date range')
if not page.botMayEdit():
print('page is locked')
exit()
ignoreRevids = []
pos1 = text.index("'''方針與指引重要變動'''")
pos2 = text.index("'''其他方針與指引雜項修訂'''")
policyText = text[pos1:pos2]
# print(policyText)
for temp in re.findall(r'\[\[Special:Diff/(\d+)/(\d+)\|', policyText):
ignoreRevids.append((int(temp[0]), int(temp[1])))
for temp in re.findall(r'\[\[Special:Permalink/(\d+)\|', policyText):
ignoreRevids.append((0, int(temp)))
talkPage = page.toggleTalkPage()
if talkPage.exists():
talkText = talkPage.text
try:
pos1 = talkText.index('<!-- bot ignore start -->')
pos2 = talkText.index('<!-- bot ignore end -->')
talkText = talkText[pos1:pos2]
print(talkText)
for temp in re.findall(r'\[\[Special:Diff/(\d+)/(\d+)(?:\||\]\])', talkText):
ignoreRevids.append((int(temp[0]), int(temp[1])))
except ValueError:
print('cannot find flag')
print('ignoreRevids', ignoreRevids)
conn = pymysql.connect(
host=host,
user=user,
password=password,
charset="utf8"
)
# https://quarry.wmflabs.org/query/33421
with conn.cursor() as cur:
cur.execute('use zhwiki_p')
cur.execute("""
SELECT
rev_id, rev_parent_id, rev_timestamp,
page_id, page_title, comment_text
FROM revision
LEFT JOIN page ON revision.rev_page = page.page_id
LEFT JOIN comment ON revision.rev_comment_id = comment.comment_id
WHERE
revision.rev_timestamp >= '{}' AND revision.rev_timestamp <= '{}'
AND revision.rev_page IN
(
SELECT page.page_id
FROM pagelinks
LEFT JOIN page ON pagelinks.pl_title = page.page_title AND pagelinks.pl_namespace = page.page_namespace
WHERE pl_from = 1608664 AND pl_namespace = 4
AND page_id NOT IN (
590741, # 嵌入包含
977277 # 模板文檔頁模式
)
)
ORDER BY revision.rev_timestamp ASC
""".format(time1, time2))
res = cur.fetchall()
record = {}
revid2page_id = {}
for row in res:
rev_id = row[0]
rev_parent_id = row[1]
rev_timestamp = row[2].decode()
page_id = row[3]
page_title = row[4].decode()
revid2page_id[rev_id] = page_id
revid2page_id[rev_parent_id] = page_id
if page_id not in record:
record[page_id] = {
'page_title': page_title,
'history': [],
}
record[page_id]['history'].append({
'revid': rev_id,
'rev_parent_id': rev_parent_id,
'rev_timestamp': rev_timestamp,
'minor': True
})
for revids in ignoreRevids:
if revids[1] not in revid2page_id:
continue
page_id = revid2page_id[revids[1]]
idx1 = 0
if revids[0] != 0:
while record[page_id]['history'][idx1]['rev_parent_id'] != revids[0]:
idx1 += 1
idx2 = 0
while record[page_id]['history'][idx2]['revid'] != revids[1]:
idx2 += 1
for i in range(idx1, idx2 + 1):
record[page_id]['history'][i]['minor'] = False
# print(json.dumps(record, indent=4, ensure_ascii=False))
policyList = [
1040126, # IP封禁例外
661388, # 新頁面巡查
35, # 方針與指引
138006, # 五大支柱
140143, # 忽略所有规则
314, # 中立的观点
1007580, # 可供查證
1007588, # 非原创研究
3036, # 避免地域中心
796, # 维基百科不是什么
22766, # 维基百科不是词典
621588, # 自傳
1165683, # 生者傳記
586519, # 用戶查核方針
70668, # 快速删除方针
351, # 文件使用方针
1089503, # 侵犯著作权
121628, # 保護方針
311, # 命名常规
318685, # 命名常规_(人名)
6023660, # 命名常规_(化学)
3570009, # 命名常规_(电子游戏)
6628518, # 命名常规_(页面分类)
104452, # 文明
142344, # 共识
139444, # 不要人身攻击
40126, # 編輯戰
1187041, # 編輯禁制方針
16795, # 编辑方针
1497462, # 修訂巡查
828098, # 騷擾
122511, # 破坏
138734, # 条目所有权
1050650, # 封禁方针
1041919, # 删除方针
1279762, # 修訂版本刪除
3905475, # 存廢覆核方針
7426, # 用户名
5757315, # 機械人方針
1454, # 管理员
160825, # 管理員的離任
503284, # 管理戰
6890438, # 权限申请
5902631, # 解除權限方針
1001002, # 回退功能
919595, # 基金會行動
1082699, # 傀儡
6134707, # 儿童保护
1038748, # 监督
1696159, # 人事任免投票資格
1139217, # 志愿者回复团队
1466707, # 机器用户
282654, # 行政员
5323514, # 大量帳號建立者
6108916, # 檔案移動員
6213290, # 介面管理員
5373689, # 使用条款
5373678, # 有償編輯方針
267252, # 誹謗
6786601, # 版权信息
5307465, # 非歧视方针
1077124, # 非自由内容使用准则
5723648, # 模板編輯員
]
minorPolicyChanges = {}
minorGuidelineChanges = {}
for page_id in record:
idx1 = 0
while idx1 < len(record[page_id]['history']):
if record[page_id]['history'][idx1]['minor']:
idx2 = idx1
while idx2 < len(record[page_id]['history']) and record[page_id]['history'][idx2]['minor']:
idx2 += 1
if page_id in policyList:
if page_id not in minorPolicyChanges:
minorPolicyChanges[page_id] = {
'page_title': record[page_id]['page_title'],
'first_time': int(record[page_id]['history'][idx1]['rev_timestamp']),
'changes': [],
}
minorPolicyChanges[page_id]['changes'].append((
record[page_id]['history'][idx1]['rev_parent_id'],
record[page_id]['history'][idx2 - 1]['revid'],
))
else:
if page_id not in minorGuidelineChanges:
minorGuidelineChanges[page_id] = {
'page_title': record[page_id]['page_title'],
'first_time': int(record[page_id]['history'][idx1]['rev_timestamp']),
'changes': [],
}
minorGuidelineChanges[page_id]['changes'].append((
record[page_id]['history'][idx1]['rev_parent_id'],
record[page_id]['history'][idx2 - 1]['revid'],
))
idx1 = idx2
idx1 += 1
# print(minorPolicyChanges)
# print(minorGuidelineChanges)
minorPolicyChanges = list(minorPolicyChanges.values())
minorPolicyChanges.sort(key=lambda v: v['first_time'])
minorGuidelineChanges = list(minorGuidelineChanges.values())
minorGuidelineChanges.sort(key=lambda v: v['first_time'])
# print(minorPolicyChanges)
# print(minorGuidelineChanges)
chineseNumber = ['一', '二', '三', '四', '五']
def formatTitle(title, isPolicy):
if title == '可靠来源/布告板/评级指引':
return '可靠来源布告板评级指引'
title = re.sub(r'/(条目指引)', r'\1', title)
title = re.sub(r'^(.+)/(.+)$', r'\g<1>(\g<2>)', title)
title = re.sub(r'^(.+)_\((.+)\)$', r'\g<1>(\g<2>)', title)
if not re.search(r'方[針针]|指引|格式手[冊册]|五大支柱|维基百科不是什么|命名常规|忽略所有规则', title):
if isPolicy:
title = re.sub(r'^(.+?)((.+?))?$', r'\g<1>方針\g<2>', title)
else:
title = re.sub(r'^(.+?)((.+?))?$', r'\g<1>指引\g<2>', title)
title = re.sub(r'名字空[间間]', '命名空間', title)
return title
policyTextList = []
for change in minorPolicyChanges:
title = formatTitle(change['page_title'], True)
if len(change['changes']) == 1:
policyTextList.append('《[[Special:Diff/{}/{}|{}]]》'.format(
change['changes'][0][0],
change['changes'][0][1],
title,
))
else:
diffList = []
for i, revids in enumerate(change['changes']):
diffList.append('[[Special:Diff/{}/{}|{}]]'.format(
revids[0],
revids[1],
chineseNumber[i],
))
policyTextList.append('《{}》({})'.format(
title,
'、'.join(diffList),
))
# print('policyTextList', policyTextList)
guidelineTextList = []
for change in minorGuidelineChanges:
title = formatTitle(change['page_title'], False)
if len(change['changes']) == 1:
guidelineTextList.append('《[[Special:Diff/{}/{}|{}]]》'.format(
change['changes'][0][0],
change['changes'][0][1],
title,
))
else:
diffList = []
for i, revids in enumerate(change['changes']):
diffList.append('[[Special:Diff/{}/{}|{}]]'.format(
revids[0],
revids[1],
chineseNumber[i],
))
guidelineTextList.append('《{}》({})'.format(
title,
'、'.join(diffList),
))
# print('guidelineTextList', guidelineTextList)
newPolicyText = ''
if len(policyTextList) >= 2:
newPolicyText = '、'.join(policyTextList[:-1]) + '及' + policyTextList[-1]
elif len(policyTextList) == 1:
newPolicyText = policyTextList[0]
else:
newPolicyText = '無'
# print('newPolicyText', newPolicyText)
newGuidelineText = ''
if len(guidelineTextList) >= 2:
newGuidelineText = '、'.join(guidelineTextList[:-1]) + '及' + guidelineTextList[-1]
elif len(guidelineTextList) == 1:
newGuidelineText = guidelineTextList[0]
else:
newGuidelineText = '無'
# print('newGuidelineText', newGuidelineText)
text = re.sub(r'(\[\[Special:链出更改/Category:维基百科方针\|方針]]:).*', r'\1' + newPolicyText + '。', text)
text = re.sub(r'(\[\[Special:链出更改/Category:维基百科指引\|指引]]:).*', r'\1' + newGuidelineText + '。', text)
# print(text)
if page.text == text:
print('No diff')
exit()
print('Diff:')
pywikibot.showDiff(page.text, text)
print('-' * 50)
page.text = text
page.save(summary='[[User:A2093064-bot/task/36|機器人36]]:自動更新雜項修訂', minor=False)
| mit | -2,312,043,178,374,885,000 | 26.774026 | 115 | 0.560086 | false | 2.562425 | false | false | false |
alexlib/openpiv-python | openpiv/lib.py | 2 | 6691 | import numpy as np
def replace_nans(array, max_iter, tol, kernel_size=2, method="disk"):
"""Replace NaN elements in an array using an iterative image inpainting
algorithm.
The algorithm is the following:
1) For each element in the input array, replace it by a weighted average
of the neighbouring elements which are not NaN themselves. The weights
depend on the method type. See Methods below.
2) Several iterations are needed if there are adjacent NaN elements.
If this is the case, information is "spread" from the edges of the
missing regions iteratively, until the variation is below a certain
threshold.
Methods:
localmean - A square kernel where all elements have the same value,
weights are equal to n/( (2*kernel_size+1)**2 -1 ),
where n is the number of non-NaN elements.
disk - A circular kernel where all elements have the same value,
kernel is calculated by::
if ((S-i)**2 + (S-j)**2)**0.5 <= S:
kernel[i,j] = 1.0
else:
kernel[i,j] = 0.0
where S is the kernel radius.
distance - A circular inverse distance kernel where elements are
weighted proportional to their distance away from the
center of the kernel, elements farther away have less
weight. Elements outside the specified radius are set
to 0.0 as in 'disk', the remaining of the weights are
calculated as::
maxDist = ((S)**2 + (S)**2)**0.5
kernel[i,j] = -1*(((S-i)**2 + (S-j)**2)**0.5 - maxDist)
where S is the kernel radius.
Parameters
----------
array : 2d or 3d np.ndarray
an array containing NaN elements that have to be replaced
if array is a masked array (numpy.ma.MaskedArray), then
the mask is reapplied after the replacement
max_iter : int
the number of iterations
tol : float
On each iteration check if the mean square difference between
values of replaced elements is below a certain tolerance `tol`
kernel_size : int
the size of the kernel, default is 1
method : str
the method used to replace invalid values. Valid options are
`localmean`, `disk`, and `distance`.
Returns
-------
filled : 2d or 3d np.ndarray
a copy of the input array, where NaN elements have been replaced.
"""
kernel_size = int(kernel_size)
filled = array.copy()
n_dim = len(array.shape)
# generating the kernel
kernel = np.zeros([2 * kernel_size + 1] * len(array.shape), dtype=int)
if method == "localmean":
kernel += 1
elif method == "disk":
dist, dist_inv = get_dist(kernel, kernel_size)
kernel[dist <= kernel_size] = 1
elif method == "distance":
dist, dist_inv = get_dist(kernel, kernel_size)
kernel[dist <= kernel_size] = dist_inv[dist <= kernel_size]
else:
raise ValueError(
"Known methods are: `localmean`, `disk` or `distance`."
)
# list of kernel array indices
# kernel_indices = np.indices(kernel.shape)
# kernel_indices = np.reshape(kernel_indices,
# (n_dim, (2 * kernel_size + 1) ** n_dim),
# order="C").T
# indices where array is NaN
nan_indices = np.array(np.nonzero(np.isnan(array))).T.astype(int)
# number of NaN elements
n_nans = len(nan_indices)
# arrays which contain replaced values to check for convergence
replaced_new = np.zeros(n_nans)
replaced_old = np.zeros(n_nans)
# make several passes
# until we reach convergence
for _ in range(max_iter):
# note: identifying new nan indices and looping other the new indices
# would give slightly different result
# for each NaN element
for k in range(n_nans):
ind = nan_indices[
k
] # 2 or 3 indices indicating the position of a nan element
# init to 0.0
replaced_new[k] = 0.0
# generating a list of indices of the convolution window in the
# array
slice_indices = np.array(np.meshgrid(*[range(i - kernel_size,
i + kernel_size + 1) for i in ind]))
# identifying all indices strictly inside the image edges:
in_mask = np.array(
[
np.logical_and(
slice_indices[i] < array.shape[i],
slice_indices[i] >= 0
)
for i in range(n_dim)
]
)
# logical and over x,y (and z) indices
in_mask = np.prod(in_mask, axis=0).astype(bool)
# extract window from array
win = filled[tuple(slice_indices[:, in_mask])]
# selecting the same points from the kernel
kernel_in = kernel[in_mask]
# sum of elements of the kernel that are not nan in the window
non_nan = np.sum(kernel_in[~np.isnan(win)])
if non_nan > 0:
# convolution with the kernel
replaced_new[k] = np.nansum(win * kernel_in) / non_nan
else:
# don't do anything if there is only nans around
replaced_new[k] = np.nan
# bulk replace all new values in array
filled[tuple(nan_indices.T)] = replaced_new
# check if replaced elements are below a certain tolerance
if np.mean((replaced_new - replaced_old) ** 2) < tol:
break
else:
replaced_old = replaced_new
return filled
def get_dist(kernel, kernel_size):
# generates a map of distances to the center of the kernel. This is later
# used to generate disk-shaped kernels and
# to fill in distance based weights
if len(kernel.shape) == 2:
# x and y coordinates for each points
xs, ys = np.indices(kernel.shape)
# maximal distance form center - distance to center (of each point)
dist = np.sqrt((ys - kernel_size) ** 2 + (xs - kernel_size) ** 2)
dist_inv = np.sqrt(2) * kernel_size - dist
if len(kernel.shape) == 3:
xs, ys, zs = np.indices(kernel.shape)
dist = np.sqrt(
(ys - kernel_size) ** 2 +
(xs - kernel_size) ** 2 +
(zs - kernel_size) ** 2
)
dist_inv = np.sqrt(3) * kernel_size - dist
return dist, dist_inv
| gpl-3.0 | 1,186,867,909,054,422,300 | 34.590426 | 79 | 0.562995 | false | 4.213476 | false | false | false |
betterlife/psi | psi/app/utils/format_util.py | 2 | 3006 | # coding=utf-8
from decimal import Decimal, ROUND_HALF_UP
from pypinyin import pinyin, lazy_pinyin
import pypinyin
def format_decimal(value):
"""
Format a decimal with two decimal point with rounding mode ROUND_HALF_UP
:param value the decimal to format
"""
return Decimal(
Decimal(value).quantize(Decimal('.01'), rounding=ROUND_HALF_UP))
def decimal_to_percent(value, decimal_place=2):
"""
Format a decimal to percentage with two decimal
:param value: the value to be formatted
:param decimal_place default decimal place, default to 2
:return: a string in percentage format, 20.50% etc
"""
format_str = '{:.' + str(2) + '%}'
return format_str.format(value)
def get_name(last_name, first_name):
"""
Get name from last name and first name, if the name is in alpha,
then use whitespace as the connect between them.
:param last_name: Last name
:param first_name: First name
:return: last name + connect + first name,
"""
connect = ''
if str(last_name).isalpha() and str(first_name).isalpha():
connect = ' '
return last_name + connect + first_name
def get_pinyin_first_letters(chinese_characters):
"""
Get fist letters of pin yin of chinese characters, if there's any 多音字
All combinations will be returned, for example for "调向"
Result of dx|tx will be returned.
:param chinese_characters: Chinese characters to get pinyin.
:return: first letters of pin yin of the letters
"""
pys = _get_pinyin_all([], chinese_characters)
result = ''
for py in pys:
for p in py:
result += p
result += "|"
result = result.rstrip('|') # <- Remove last "|"
return result
def _get_pinyin_all(existing_combinations, characters):
"""
Get all combinations of pinyin of some chinese characters as list, in a
recurrence way, since format of result from pinyin is [['a'], ['b']]
So a combination of two level loop is needed to get all the pinyin.
:param existing_combinations: Existing combinations, for already calculated characters.
:param characters: Characters to get combination of pinyin
:return: A flat list of all combinations of pinyin for 多音字
"""
first_character, other_characters = characters[0:1], characters[1:]
if len(first_character) > 0:
py = pinyin(first_character, style=pypinyin.FIRST_LETTER, heteronym=True)
new_existing = []
for p in py:
for a in p:
if len(existing_combinations) > 0:
for e in existing_combinations:
ne = e[:]
ne.append(a)
new_existing.append(ne)
else:
ne = existing_combinations[:]
ne.append(a)
new_existing.append(ne)
return _get_pinyin_all(new_existing, other_characters)
return existing_combinations
| mit | -6,693,992,227,477,102,000 | 33.767442 | 93 | 0.625084 | false | 3.853093 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.