blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f8c70c1da41cfea53b6d1f02569fd71e0439f618 | 35e00d1996515ccf3151067ff28ff3357078f0b6 | /samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py | add86c6fa8941035f2205bc30efda5abdc9894e2 | [
"Apache-2.0"
]
| permissive | googleapis/python-pubsub | 5bb18674307bd89236a61c0d7c5079f10e19467e | 1b9724324c58d27bcee42020b751cda58d80fddb | refs/heads/main | 2023-09-03T13:14:22.894233 | 2023-08-28T13:18:36 | 2023-08-28T13:18:36 | 226,992,581 | 321 | 195 | Apache-2.0 | 2023-09-10T23:29:10 | 2019-12-10T00:09:52 | Python | UTF-8 | Python | false | false | 1,876 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ValidateMessage
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-pubsub
# [START pubsub_v1_generated_SchemaService_ValidateMessage_async]
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google import pubsub_v1
async def sample_validate_message():
# Create a client
client = pubsub_v1.SchemaServiceAsyncClient()
# Initialize request argument(s)
request = pubsub_v1.ValidateMessageRequest(
name="name_value",
parent="parent_value",
)
# Make the request
response = await client.validate_message(request=request)
# Handle the response
print(response)
# [END pubsub_v1_generated_SchemaService_ValidateMessage_async]
| [
"[email protected]"
]
| |
8152f5de1e216e50d57f2ee029225b5144c4beb2 | ed2be337ce4b8a3c772862fce99ec99416784a62 | /play/models.py | c889f87afcafa52f0ca12af45ece8a4485629983 | [
"MIT"
]
| permissive | fraferra/PlayPaloAltoServer | e5ecc7557a02b2b14750e929f656a121984a560f | a7128d363efd6059007df2c9da77f7bd033f7987 | refs/heads/master | 2020-05-20T05:30:19.020450 | 2014-07-08T02:34:14 | 2014-07-08T02:34:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,899 | py | from django.db import models
from datetime import date
from django import forms
from django.contrib.auth.models import User,UserManager
from django.utils import timezone
from django.db.models.signals import post_save
from django.utils.translation import ugettext as _
from utils import *
from social_auth.models import UserSocialAuth
import constants
from django.core.exceptions import *
import charity.models
import shop.models
# Create your models here.
import requests
import datetime
#from social_auth.backends.pipeline.user import update_user_details
class Player(models.Model):
user=models.ForeignKey(User)
custom_auth = forms.BooleanField(initial=False)
token=models.CharField(max_length=100, null=True, default=None)
score=models.DecimalField(max_digits=4, decimal_places=0, null=True, default=20)
experience=models.DecimalField(max_digits=5, decimal_places=0, null=True, default=0)
level=models.DecimalField(max_digits=4, decimal_places=0, null=True, default=0)
picture_url=models.CharField(max_length=400, null=True, default='/static/img/avatar-1.png')
facebook_pic=models.BooleanField(default=True)
def __unicode__(self): # Python 3: def __str__(self):
return unicode(self.user) or u''
def create_user_profile(sender, instance, created, **kwargs):
if created:
Player.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
class CouponHistory(models.Model):
title=models.CharField(max_length=100, null=True)
#coupon=models.ForeignKey(Coupon, related_name='coupon')
shop=models.CharField(max_length=100, null=True)
player=models.ForeignKey(Player)
#shop=models.ForeignKey(Shop, related_name='created')
class EventHistory(models.Model):
date=models.DateTimeField( null=True)
title=models.CharField(max_length=100, null=True)
#event_done=models.ForeignKey(Event, related_name='created')
organization=models.CharField(max_length=100, null=True)
player=models.ForeignKey(Player)
points=models.DecimalField(max_digits=4, decimal_places=0)
event_type=models.CharField(max_length=50,choices=constants.TYPE, default=None, null=True)
#organization=models.ForeignKey(Organization, related_name='organization')
class Idea(models.Model):
title=models.CharField(max_length=100, null=True)
author=models.CharField(max_length=100, null=True)
description=models.TextField(max_length=500, null=True)
points=models.DecimalField(max_digits=4, decimal_places=0)
experience=models.DecimalField(max_digits=5, decimal_places=0, null=True, default=0)
class Comment(models.Model):
comment=models.TextField(max_length=500, null=True)
commenter=models.ForeignKey(Player)
event=models.ForeignKey('charity.Event')
date=models.DateTimeField( null=True, default=datetime.datetime.now)
class Feed(models.Model):
player=models.ForeignKey(Player)
event=models.ForeignKey('charity.Event')
likes= models.DecimalField(max_digits=4, decimal_places=0, default=0)
date=models.DateTimeField( null=True, default=datetime.datetime.now)
class CommentFeed(models.Model):
comment=models.TextField(max_length=500, null=True)
commenter=models.ForeignKey(Player)
feed=models.ForeignKey(Feed)
date=models.DateTimeField( null=True, default=datetime.datetime.now)
class Badge(models.Model):
player=models.ForeignKey(Player)
title=models.CharField(max_length=100, null=True, default='Beginner!')
icon=models.CharField(max_length=50,choices=constants.ICON, default='fa-thumbs-o-up')
'''
def assign_badge(sender, instance, created, **kwargs):
if created:
badge=Badge.objects.create(player=instance.player)
type_event=['Animals', 'Food','Art', 'Shopping', 'Elders', 'Environment']
for tt in type_event:
post_save.connect(assign_badge, sender=EventHistory) ''' | [
"[email protected]"
]
| |
fec6a3aa31a220c668b93a5b34d034e735fbae41 | 233087c1eb99e1d13f80de6f43d2cc3264aa9ca6 | /polyaxon_cli/cli/version.py | e1a7f0433468d235fe651db2f75bb5fd16ca9f7f | [
"MIT"
]
| permissive | DXist/polyaxon-cli | e33cd3b3633df5b21b9eb3cc48d7a6affed8e4ec | 0b01512548f9faea77fb60cb7c6bd327e0638b13 | refs/heads/master | 2020-07-08T07:02:43.248549 | 2019-08-15T16:00:05 | 2019-08-15T16:04:31 | 203,601,306 | 0 | 0 | MIT | 2019-08-21T14:27:56 | 2019-08-21T14:27:56 | null | UTF-8 | Python | false | false | 5,988 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import sys
import click
from polyaxon_deploy.operators.pip import PipOperator
from polyaxon_cli import pkg
from polyaxon_cli.client import PolyaxonClient
from polyaxon_cli.client.exceptions import (
AuthorizationError,
PolyaxonHTTPError,
PolyaxonShouldExitError
)
from polyaxon_cli.logger import clean_outputs, logger
from polyaxon_cli.managers.auth import AuthConfigManager
from polyaxon_cli.managers.cli import CliConfigManager
from polyaxon_cli.utils import indentation
from polyaxon_cli.utils.formatting import Printer, dict_tabulate
from polyaxon_client.exceptions import PolyaxonClientException
PROJECT_CLI_NAME = "polyaxon-cli"
def pip_upgrade(project_name=PROJECT_CLI_NAME):
PipOperator.execute(['install', '--upgrade', project_name], stream=True)
click.echo('polyaxon-cli upgraded.')
def session_expired():
AuthConfigManager.purge()
CliConfigManager.purge()
click.echo('Session has expired, please try again.')
sys.exit(1)
def get_version(package):
import pkg_resources
try:
return pkg_resources.get_distribution(package).version
except pkg_resources.DistributionNotFound:
logger.error('`%s` is not installed', package)
def get_current_version():
return pkg.VERSION
def get_server_version():
try:
return PolyaxonClient().version.get_cli_version()
except AuthorizationError:
session_expired()
sys.exit(1)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get cli version.')
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
def get_log_handler():
try:
return PolyaxonClient().version.get_log_handler()
except AuthorizationError:
session_expired()
sys.exit(1)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get cli version.')
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
def check_cli_version():
"""Check if the current cli version satisfies the server requirements"""
if not CliConfigManager.should_check():
return
from distutils.version import LooseVersion # pylint:disable=import-error
server_version = get_server_version()
current_version = get_current_version()
CliConfigManager.reset(current_version=current_version,
min_version=server_version.min_version)
if LooseVersion(current_version) < LooseVersion(server_version.min_version):
click.echo("""Your version of CLI ({}) is no longer compatible with server.""".format(
current_version))
if click.confirm("Do you want to upgrade to "
"version {} now?".format(server_version.latest_version)):
pip_upgrade()
sys.exit(0)
else:
indentation.puts("Your can manually run:")
with indentation.indent(4):
indentation.puts("pip install -U polyaxon-cli")
indentation.puts(
"to upgrade to the latest version `{}`".format(server_version.latest_version))
sys.exit(0)
elif LooseVersion(current_version) < LooseVersion(server_version.latest_version):
indentation.puts("New version of CLI ({}) is now available. To upgrade run:".format(
server_version.latest_version
))
with indentation.indent(4):
indentation.puts("pip install -U polyaxon-cli")
elif LooseVersion(current_version) > LooseVersion(server_version.latest_version):
indentation.puts("You version of CLI ({}) is ahead of the latest version "
"supported by Polyaxon Platform ({}) on your cluster, "
"and might be incompatible.".format(current_version,
server_version.latest_version))
@click.command()
@click.option('--cli', is_flag=True, default=False, help='Version of the Polyaxon cli.')
@click.option('--platform', is_flag=True, default=False, help='Version of the Polyaxon platform.')
@clean_outputs
def version(cli, platform):
"""Print the current version of the cli and platform."""
version_client = PolyaxonClient().version
cli = cli or not any([cli, platform])
if cli:
try:
server_version = version_client.get_cli_version()
except AuthorizationError:
session_expired()
sys.exit(1)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get cli version.')
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
cli_version = get_current_version()
Printer.print_header('Current cli version: {}.'.format(cli_version))
Printer.print_header('Supported cli versions:')
dict_tabulate(server_version.to_dict())
if platform:
try:
platform_version = version_client.get_platform_version()
except AuthorizationError:
session_expired()
sys.exit(1)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get platform version.')
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
chart_version = version_client.get_chart_version()
Printer.print_header('Current platform version: {}.'.format(chart_version.version))
Printer.print_header('Supported platform versions:')
dict_tabulate(platform_version.to_dict())
@click.command()
@clean_outputs
def upgrade():
"""Install/Upgrade polyaxon-cli."""
try:
pip_upgrade(PROJECT_CLI_NAME)
except Exception as e:
logger.error(e)
| [
"[email protected]"
]
| |
6736e09edf00b418607e71443127f44af7ec6cea | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/DXS3326GSR-SWL3MGMT-MIB.py | b6967e401b97f5dc7cbce020ce7e6f82e3f21569 | [
"Apache-2.0"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 43,774 | py | #
# PySNMP MIB module DXS3326GSR-SWL3MGMT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DXS3326GSR-SWL3MGMT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:40:25 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
DesignatedRouterPriority, RouterID, Status, TOSType, HelloRange, Metric, UpToMaxAge, AreaID, PositiveInteger = mibBuilder.importSymbols("OSPF-MIB", "DesignatedRouterPriority", "RouterID", "Status", "TOSType", "HelloRange", "Metric", "UpToMaxAge", "AreaID", "PositiveInteger")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ModuleIdentity, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, Integer32, ObjectIdentity, Counter32, MibIdentifier, Unsigned32, Bits, NotificationType, Counter64, IpAddress, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "Integer32", "ObjectIdentity", "Counter32", "MibIdentifier", "Unsigned32", "Bits", "NotificationType", "Counter64", "IpAddress", "TimeTicks")
TruthValue, PhysAddress, TextualConvention, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "PhysAddress", "TextualConvention", "RowStatus", "DisplayString")
dxs3326GSR, = mibBuilder.importSymbols("SW-PROJECTX-SRPRIMGMT-MIB", "dxs3326GSR")
swL3MgmtMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3))
if mibBuilder.loadTexts: swL3MgmtMIB.setLastUpdated('0007150000Z')
if mibBuilder.loadTexts: swL3MgmtMIB.setOrganization(' ')
class NodeAddress(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(6, 6)
fixedLength = 6
class NetAddress(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(4, 4)
fixedLength = 4
swL3DevMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1))
swL3IpMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2))
swL3RelayMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3))
swL3IpCtrlMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1))
swL3IpFdbMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2))
swL3RelayBootpMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1))
swL3RelayDnsMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2))
swL3DevCtrl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1))
swL3DevCtrlRIPState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlRIPState.setStatus('current')
swL3DevCtrlOSPFState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlOSPFState.setStatus('current')
swL3DevCtrlDVMRPState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlDVMRPState.setStatus('current')
swL3DevCtrlPIMState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlPIMState.setStatus('current')
swL3DevCtrlVRRPState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlVRRPState.setStatus('current')
swL3IpCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1), )
if mibBuilder.loadTexts: swL3IpCtrlTable.setStatus('current')
swL3IpCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpCtrlIpAddr"))
if mibBuilder.loadTexts: swL3IpCtrlEntry.setStatus('current')
swL3IpCtrlIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 1), IpAddress())
if mibBuilder.loadTexts: swL3IpCtrlIpAddr.setStatus('current')
swL3IpCtrlIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpCtrlIfIndex.setStatus('current')
swL3IpCtrlInterfaceName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 12))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlInterfaceName.setStatus('current')
swL3IpCtrlIpSubnetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlIpSubnetMask.setStatus('current')
swL3IpCtrlVlanName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlVlanName.setStatus('current')
swL3IpCtrlMode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("manual", 2), ("bootp", 3), ("dhcp", 4)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlMode.setStatus('current')
swL3IpCtrlSecondary = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 7), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlSecondary.setStatus('current')
swL3IpCtrlState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 8), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlState.setStatus('current')
swL3IpCtrlOperState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("up", 2), ("down", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpCtrlOperState.setStatus('current')
swL3IpFdbInfoTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1), )
if mibBuilder.loadTexts: swL3IpFdbInfoTable.setStatus('current')
swL3IpFdbInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpFdbInfoIpAddr"))
if mibBuilder.loadTexts: swL3IpFdbInfoEntry.setStatus('current')
swL3IpFdbInfoIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpFdbInfoIpAddr.setStatus('current')
swL3IpFdbInfoIpSubnetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpFdbInfoIpSubnetMask.setStatus('current')
swL3IpFdbInfoPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpFdbInfoPort.setStatus('current')
swL3IpFdbInfoType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("static", 2), ("dynamic", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpFdbInfoType.setStatus('current')
swL3IpArpAgingTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3IpArpAgingTime.setStatus('current')
swL3IpStaticRouteTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5), )
if mibBuilder.loadTexts: swL3IpStaticRouteTable.setStatus('current')
swL3IpStaticRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpStaticRouteDest"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpStaticRouteMask"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpStaticRouteBkupState"))
if mibBuilder.loadTexts: swL3IpStaticRouteEntry.setStatus('current')
swL3IpStaticRouteDest = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpStaticRouteDest.setStatus('current')
swL3IpStaticRouteMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpStaticRouteMask.setStatus('current')
swL3IpStaticRouteBkupState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("primary", 1), ("backup", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpStaticRouteBkupState.setStatus('current')
swL3IpStaticRouteNextHop = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpStaticRouteNextHop.setStatus('current')
swL3IpStaticRouteMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpStaticRouteMetric.setStatus('current')
swL3IpStaticRouteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("valid", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpStaticRouteStatus.setStatus('current')
swL3RelayBootpState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayBootpState.setStatus('current')
swL3RelayBootpHopCount = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayBootpHopCount.setStatus('current')
swL3RelayBootpTimeThreshold = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayBootpTimeThreshold.setStatus('current')
swL3RelayBootpCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4), )
if mibBuilder.loadTexts: swL3RelayBootpCtrlTable.setStatus('current')
swL3RelayBootpCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RelayBootpCtrlInterfaceName"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RelayBootpCtrlServer"))
if mibBuilder.loadTexts: swL3RelayBootpCtrlEntry.setStatus('current')
swL3RelayBootpCtrlInterfaceName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 12))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RelayBootpCtrlInterfaceName.setStatus('current')
swL3RelayBootpCtrlServer = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RelayBootpCtrlServer.setStatus('current')
swL3RelayBootpCtrlState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("valid", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayBootpCtrlState.setStatus('current')
swL3RelayDnsState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsState.setStatus('current')
swL3RelayDnsPrimaryServer = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsPrimaryServer.setStatus('current')
swL3RelayDnsSecondaryServer = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsSecondaryServer.setStatus('current')
swL3RelayDnsCacheState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsCacheState.setStatus('current')
swL3RelayDnsStaticTableState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsStaticTableState.setStatus('current')
swL3RelayDnsCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6), )
if mibBuilder.loadTexts: swL3RelayDnsCtrlTable.setStatus('current')
swL3RelayDnsCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RelayDnsCtrlDomainName"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RelayDnsCtrlIpAddr"))
if mibBuilder.loadTexts: swL3RelayDnsCtrlEntry.setStatus('current')
swL3RelayDnsCtrlDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RelayDnsCtrlDomainName.setStatus('current')
swL3RelayDnsCtrlIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RelayDnsCtrlIpAddr.setStatus('current')
swL3RelayDnsCtrlState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("valid", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsCtrlState.setStatus('current')
swL3Md5Table = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4), )
if mibBuilder.loadTexts: swL3Md5Table.setStatus('current')
swL3Md5Entry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3Md5KeyId"))
if mibBuilder.loadTexts: swL3Md5Entry.setStatus('current')
swL3Md5KeyId = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3Md5KeyId.setStatus('current')
swL3Md5Key = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3Md5Key.setStatus('current')
swL3Md5RowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3Md5RowStatus.setStatus('current')
swL3RouteRedistriTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5), )
if mibBuilder.loadTexts: swL3RouteRedistriTable.setStatus('current')
swL3RouteRedistriEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RouteRedistriSrcProtocol"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RouteRedistriDstProtocol"))
if mibBuilder.loadTexts: swL3RouteRedistriEntry.setStatus('current')
swL3RouteRedistriSrcProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("other", 1), ("rip", 2), ("ospf", 3), ("static", 4), ("local", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RouteRedistriSrcProtocol.setStatus('current')
swL3RouteRedistriDstProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("rip", 2), ("ospf", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RouteRedistriDstProtocol.setStatus('current')
swL3RouteRedistriType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("other", 1), ("all", 2), ("type-1", 3), ("type-2", 4), ("internal", 5), ("external", 6), ("inter-E1", 7), ("inter-E2", 8), ("extType1", 9), ("extType2", 10)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3RouteRedistriType.setStatus('current')
swL3RouteRedistriMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777214))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3RouteRedistriMetric.setStatus('current')
swL3RouteRedistriRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3RouteRedistriRowStatus.setStatus('current')
swL3OspfHostTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6), )
if mibBuilder.loadTexts: swL3OspfHostTable.setStatus('current')
swL3OspfHostEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3OspfHostIpAddress"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3OspfHostTOS"))
if mibBuilder.loadTexts: swL3OspfHostEntry.setStatus('current')
swL3OspfHostIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3OspfHostIpAddress.setStatus('current')
swL3OspfHostTOS = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 2), TOSType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3OspfHostTOS.setStatus('current')
swL3OspfHostMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 3), Metric()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3OspfHostMetric.setStatus('current')
swL3OspfHostAreaID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 4), AreaID()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3OspfHostAreaID.setStatus('current')
swL3OspfHostStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3OspfHostStatus.setStatus('current')
swL3ospfVirtIfTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7), )
if mibBuilder.loadTexts: swL3ospfVirtIfTable.setStatus('current')
swL3ospfVirtIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3ospfVirtIfAreaId"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3ospfVirtIfNeighbor"))
if mibBuilder.loadTexts: swL3ospfVirtIfEntry.setStatus('current')
swL3ospfVirtIfAreaId = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 1), AreaID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfVirtIfAreaId.setStatus('current')
swL3ospfVirtIfNeighbor = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 2), RouterID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfVirtIfNeighbor.setStatus('current')
swL3ospfVirtIfTransitDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 3), UpToMaxAge().clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfTransitDelay.setStatus('current')
swL3ospfVirtIfRetransInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 4), UpToMaxAge().clone(5)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfRetransInterval.setStatus('current')
swL3ospfVirtIfHelloInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 5), HelloRange().clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfHelloInterval.setStatus('current')
swL3ospfVirtIfRtrDeadInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 6), PositiveInteger().clone(60)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfRtrDeadInterval.setStatus('current')
swL3ospfVirtIfState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 4))).clone(namedValues=NamedValues(("down", 1), ("pointToPoint", 4))).clone('down')).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfVirtIfState.setStatus('current')
swL3ospfVirtIfEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfVirtIfEvents.setStatus('current')
swL3ospfVirtIfAuthType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfAuthType.setStatus('current')
swL3ospfVirtIfAuthKey = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256)).clone(hexValue="0000000000000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfAuthKey.setStatus('current')
swL3ospfVirtIfAuthKeyID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfAuthKeyID.setStatus('current')
swL3ospfVirtIfStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 12), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfStatus.setStatus('current')
swL3ospfIfTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8), )
if mibBuilder.loadTexts: swL3ospfIfTable.setStatus('current')
swL3ospfIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3ospfIfIpAddress"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3ospfAddressLessIf"))
if mibBuilder.loadTexts: swL3ospfIfEntry.setStatus('current')
swL3ospfIfIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfIpAddress.setStatus('current')
swL3ospfAddressLessIf = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfAddressLessIf.setStatus('current')
swL3ospfIfAreaId = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 3), AreaID().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAreaId.setStatus('current')
swL3ospfIfType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 5))).clone(namedValues=NamedValues(("broadcast", 1), ("nbma", 2), ("pointToPoint", 3), ("pointToMultipoint", 5)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfType.setStatus('current')
swL3ospfIfAdminStat = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 5), Status().clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAdminStat.setStatus('current')
swL3ospfIfRtrPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 6), DesignatedRouterPriority().clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfRtrPriority.setStatus('current')
swL3ospfIfTransitDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 7), UpToMaxAge().clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfTransitDelay.setStatus('current')
swL3ospfIfRetransInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 8), UpToMaxAge().clone(5)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfRetransInterval.setStatus('current')
swL3ospfIfHelloInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 9), HelloRange().clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfHelloInterval.setStatus('current')
swL3ospfIfRtrDeadInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 10), PositiveInteger().clone(40)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfRtrDeadInterval.setStatus('current')
swL3ospfIfPollInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 11), PositiveInteger().clone(120)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfPollInterval.setStatus('current')
swL3ospfIfState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("down", 1), ("loopback", 2), ("waiting", 3), ("pointToPoint", 4), ("designatedRouter", 5), ("backupDesignatedRouter", 6), ("otherDesignatedRouter", 7))).clone('down')).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfState.setStatus('current')
swL3ospfIfDesignatedRouter = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 13), IpAddress().clone(hexValue="00000000")).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfDesignatedRouter.setStatus('current')
swL3ospfIfBackupDesignatedRouter = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 14), IpAddress().clone(hexValue="00000000")).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfBackupDesignatedRouter.setStatus('current')
swL3ospfIfEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfEvents.setStatus('current')
swL3ospfIfMulticastForwarding = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("blocked", 1), ("multicast", 2), ("unicast", 3))).clone('blocked')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfMulticastForwarding.setStatus('current')
swL3ospfIfDemand = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 17), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfDemand.setStatus('current')
swL3ospfIfAuthType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAuthType.setStatus('current')
swL3ospfIfAuthKey = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256)).clone(hexValue="0000000000000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAuthKey.setStatus('current')
swL3ospfIfAuthKeyID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAuthKeyID.setStatus('current')
swL3ospfIfStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 21), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfStatus.setStatus('current')
swL3RoutePreference = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9))
swL3RoutePreferenceRIP = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceRIP.setStatus('current')
swL3RoutePreferenceOSPFIntra = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceOSPFIntra.setStatus('current')
swL3RoutePreferenceStatic = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceStatic.setStatus('current')
swL3RoutePreferenceLocal = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 999))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RoutePreferenceLocal.setStatus('current')
swL3RoutePreferenceOSPFInter = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceOSPFInter.setStatus('current')
swL3RoutePreferenceOSPFExtT1 = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceOSPFExtT1.setStatus('current')
swL3RoutePreferenceOSPFExtT2 = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceOSPFExtT2.setStatus('current')
swL3PimMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11))
swL3PimCbsrInfoMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1))
swL3pimCbsrBootStrapPeriod = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCbsrBootStrapPeriod.setStatus('current')
swL3pimCbsrHashMaskLen = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCbsrHashMaskLen.setStatus('current')
swL3pimCbsrTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 3), )
if mibBuilder.loadTexts: swL3pimCbsrTable.setStatus('current')
swL3pimCbsrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 3, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimCbsrInterface"))
if mibBuilder.loadTexts: swL3pimCbsrEntry.setStatus('current')
swL3pimCbsrInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 3, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: swL3pimCbsrInterface.setStatus('current')
swL3pimCbsrPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCbsrPriority.setStatus('current')
swL3pimCandidateRPMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 2))
swL3pimCandidateRPHoldtime = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCandidateRPHoldtime.setStatus('current')
swL3pimCandidateRPPriority = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCandidateRPPriority.setStatus('current')
swL3pimCandidateRPWildcardPrefixCnt = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCandidateRPWildcardPrefixCnt.setStatus('current')
swL3pimSptMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 3))
swL3pimLastHopSptThreshold = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 3, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimLastHopSptThreshold.setStatus('current')
swL3pimRPSptThreshold = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 3, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimRPSptThreshold.setStatus('current')
swL3pimRegChksumIncDataTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 4), )
if mibBuilder.loadTexts: swL3pimRegChksumIncDataTable.setStatus('current')
swL3pimRegChksumIncDataEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 4, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3SwL3pimRegChksumIncDataRpAddr"))
if mibBuilder.loadTexts: swL3pimRegChksumIncDataEntry.setStatus('current')
swL3SwL3pimRegChksumIncDataRpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 4, 1, 1), IpAddress())
if mibBuilder.loadTexts: swL3SwL3pimRegChksumIncDataRpAddr.setStatus('current')
swL3SwL3pimRegChksumIncDataState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 4, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3SwL3pimRegChksumIncDataState.setStatus('current')
swL3PimInfoMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5))
swL3pimRegisterProbeTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimRegisterProbeTime.setStatus('current')
swL3pimRegisterSuppressionTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(3, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimRegisterSuppressionTime.setStatus('current')
swL3pimInfoTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 3), )
if mibBuilder.loadTexts: swL3pimInfoTable.setStatus('current')
swL3pimInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 3, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimInterface"))
if mibBuilder.loadTexts: swL3pimInfoEntry.setStatus('current')
swL3pimInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 3, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: swL3pimInterface.setStatus('current')
swL3pimDRPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967294))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimDRPriority.setStatus('current')
swL3pimStaticRPTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6), )
if mibBuilder.loadTexts: swL3pimStaticRPTable.setStatus('current')
swL3pimStaticRPEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimStaticRPGroupAddress"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimStaticRPGroupMask"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimStaticRPAddress"))
if mibBuilder.loadTexts: swL3pimStaticRPEntry.setStatus('current')
swL3pimStaticRPGroupAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1, 1), IpAddress())
if mibBuilder.loadTexts: swL3pimStaticRPGroupAddress.setStatus('current')
swL3pimStaticRPGroupMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1, 2), IpAddress())
if mibBuilder.loadTexts: swL3pimStaticRPGroupMask.setStatus('current')
swL3pimStaticRPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1, 3), IpAddress())
if mibBuilder.loadTexts: swL3pimStaticRPAddress.setStatus('current')
swL3pimStaticRPRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3pimStaticRPRowStatus.setStatus('current')
mibBuilder.exportSymbols("DXS3326GSR-SWL3MGMT-MIB", swL3IpCtrlOperState=swL3IpCtrlOperState, swL3PimInfoMgmt=swL3PimInfoMgmt, swL3pimCbsrBootStrapPeriod=swL3pimCbsrBootStrapPeriod, swL3RouteRedistriRowStatus=swL3RouteRedistriRowStatus, swL3pimCandidateRPPriority=swL3pimCandidateRPPriority, swL3MgmtMIB=swL3MgmtMIB, swL3ospfIfDemand=swL3ospfIfDemand, swL3ospfIfDesignatedRouter=swL3ospfIfDesignatedRouter, swL3DevCtrlPIMState=swL3DevCtrlPIMState, swL3RelayDnsState=swL3RelayDnsState, swL3OspfHostEntry=swL3OspfHostEntry, swL3pimCbsrHashMaskLen=swL3pimCbsrHashMaskLen, PYSNMP_MODULE_ID=swL3MgmtMIB, swL3RouteRedistriDstProtocol=swL3RouteRedistriDstProtocol, swL3ospfIfPollInterval=swL3ospfIfPollInterval, swL3IpCtrlSecondary=swL3IpCtrlSecondary, swL3pimCandidateRPWildcardPrefixCnt=swL3pimCandidateRPWildcardPrefixCnt, swL3DevCtrlRIPState=swL3DevCtrlRIPState, swL3Md5Entry=swL3Md5Entry, swL3ospfIfAreaId=swL3ospfIfAreaId, NetAddress=NetAddress, swL3IpStaticRouteDest=swL3IpStaticRouteDest, swL3RelayDnsCtrlIpAddr=swL3RelayDnsCtrlIpAddr, swL3ospfIfBackupDesignatedRouter=swL3ospfIfBackupDesignatedRouter, swL3pimCandidateRPHoldtime=swL3pimCandidateRPHoldtime, swL3pimRegChksumIncDataTable=swL3pimRegChksumIncDataTable, swL3IpStaticRouteTable=swL3IpStaticRouteTable, swL3pimRPSptThreshold=swL3pimRPSptThreshold, swL3ospfIfAuthType=swL3ospfIfAuthType, swL3ospfIfAuthKeyID=swL3ospfIfAuthKeyID, swL3ospfIfMulticastForwarding=swL3ospfIfMulticastForwarding, swL3RoutePreference=swL3RoutePreference, swL3IpFdbInfoEntry=swL3IpFdbInfoEntry, swL3pimRegisterProbeTime=swL3pimRegisterProbeTime, swL3ospfVirtIfRetransInterval=swL3ospfVirtIfRetransInterval, swL3pimStaticRPTable=swL3pimStaticRPTable, swL3RoutePreferenceOSPFIntra=swL3RoutePreferenceOSPFIntra, swL3IpStaticRouteMask=swL3IpStaticRouteMask, swL3RelayDnsCtrlEntry=swL3RelayDnsCtrlEntry, swL3ospfIfState=swL3ospfIfState, swL3ospfIfAuthKey=swL3ospfIfAuthKey, swL3RoutePreferenceStatic=swL3RoutePreferenceStatic, swL3RelayDnsSecondaryServer=swL3RelayDnsSecondaryServer, swL3RouteRedistriMetric=swL3RouteRedistriMetric, swL3PimCbsrInfoMgmt=swL3PimCbsrInfoMgmt, swL3OspfHostTable=swL3OspfHostTable, swL3ospfIfAdminStat=swL3ospfIfAdminStat, swL3IpStaticRouteBkupState=swL3IpStaticRouteBkupState, swL3RelayBootpCtrlState=swL3RelayBootpCtrlState, swL3IpCtrlMgmt=swL3IpCtrlMgmt, swL3IpCtrlIpSubnetMask=swL3IpCtrlIpSubnetMask, swL3RelayBootpCtrlServer=swL3RelayBootpCtrlServer, swL3RelayDnsCtrlDomainName=swL3RelayDnsCtrlDomainName, swL3IpStaticRouteStatus=swL3IpStaticRouteStatus, swL3RouteRedistriSrcProtocol=swL3RouteRedistriSrcProtocol, swL3RelayBootpCtrlTable=swL3RelayBootpCtrlTable, swL3pimCbsrInterface=swL3pimCbsrInterface, swL3ospfIfStatus=swL3ospfIfStatus, swL3IpStaticRouteMetric=swL3IpStaticRouteMetric, swL3RelayDnsMgmt=swL3RelayDnsMgmt, swL3ospfVirtIfTransitDelay=swL3ospfVirtIfTransitDelay, swL3DevCtrlVRRPState=swL3DevCtrlVRRPState, swL3IpCtrlEntry=swL3IpCtrlEntry, swL3IpFdbMgmt=swL3IpFdbMgmt, swL3pimInterface=swL3pimInterface, swL3DevCtrlOSPFState=swL3DevCtrlOSPFState, swL3RelayBootpMgmt=swL3RelayBootpMgmt, swL3RouteRedistriType=swL3RouteRedistriType, swL3pimCbsrPriority=swL3pimCbsrPriority, swL3RelayDnsCtrlState=swL3RelayDnsCtrlState, swL3RelayBootpHopCount=swL3RelayBootpHopCount, swL3OspfHostStatus=swL3OspfHostStatus, swL3PimMgmt=swL3PimMgmt, swL3IpFdbInfoIpAddr=swL3IpFdbInfoIpAddr, NodeAddress=NodeAddress, swL3RelayBootpCtrlInterfaceName=swL3RelayBootpCtrlInterfaceName, swL3RoutePreferenceOSPFExtT2=swL3RoutePreferenceOSPFExtT2, swL3pimCbsrEntry=swL3pimCbsrEntry, swL3RoutePreferenceRIP=swL3RoutePreferenceRIP, swL3ospfVirtIfTable=swL3ospfVirtIfTable, swL3pimCbsrTable=swL3pimCbsrTable, swL3IpArpAgingTime=swL3IpArpAgingTime, swL3RouteRedistriEntry=swL3RouteRedistriEntry, swL3IpCtrlMode=swL3IpCtrlMode, swL3pimStaticRPGroupMask=swL3pimStaticRPGroupMask, swL3DevCtrl=swL3DevCtrl, swL3pimRegChksumIncDataEntry=swL3pimRegChksumIncDataEntry, swL3OspfHostAreaID=swL3OspfHostAreaID, swL3ospfVirtIfState=swL3ospfVirtIfState, swL3ospfIfEvents=swL3ospfIfEvents, swL3Md5KeyId=swL3Md5KeyId, swL3ospfIfIpAddress=swL3ospfIfIpAddress, swL3ospfIfRtrDeadInterval=swL3ospfIfRtrDeadInterval, swL3ospfVirtIfNeighbor=swL3ospfVirtIfNeighbor, swL3SwL3pimRegChksumIncDataState=swL3SwL3pimRegChksumIncDataState, swL3RelayDnsCacheState=swL3RelayDnsCacheState, swL3IpStaticRouteNextHop=swL3IpStaticRouteNextHop, swL3ospfIfHelloInterval=swL3ospfIfHelloInterval, swL3RoutePreferenceOSPFInter=swL3RoutePreferenceOSPFInter, swL3SwL3pimRegChksumIncDataRpAddr=swL3SwL3pimRegChksumIncDataRpAddr, swL3pimStaticRPEntry=swL3pimStaticRPEntry, swL3RelayDnsCtrlTable=swL3RelayDnsCtrlTable, swL3DevCtrlDVMRPState=swL3DevCtrlDVMRPState, swL3ospfIfEntry=swL3ospfIfEntry, swL3IpCtrlInterfaceName=swL3IpCtrlInterfaceName, swL3RelayDnsPrimaryServer=swL3RelayDnsPrimaryServer, swL3OspfHostMetric=swL3OspfHostMetric, swL3Md5Table=swL3Md5Table, swL3ospfVirtIfAuthType=swL3ospfVirtIfAuthType, swL3pimCandidateRPMgmt=swL3pimCandidateRPMgmt, swL3pimInfoEntry=swL3pimInfoEntry, swL3ospfVirtIfHelloInterval=swL3ospfVirtIfHelloInterval, swL3ospfIfRtrPriority=swL3ospfIfRtrPriority, swL3RelayBootpCtrlEntry=swL3RelayBootpCtrlEntry, swL3pimDRPriority=swL3pimDRPriority, swL3RelayBootpState=swL3RelayBootpState, swL3ospfAddressLessIf=swL3ospfAddressLessIf, swL3IpStaticRouteEntry=swL3IpStaticRouteEntry, swL3pimStaticRPGroupAddress=swL3pimStaticRPGroupAddress, swL3pimStaticRPAddress=swL3pimStaticRPAddress, swL3pimStaticRPRowStatus=swL3pimStaticRPRowStatus, swL3IpCtrlVlanName=swL3IpCtrlVlanName, swL3pimLastHopSptThreshold=swL3pimLastHopSptThreshold, swL3ospfVirtIfEvents=swL3ospfVirtIfEvents, swL3pimInfoTable=swL3pimInfoTable, swL3pimSptMgmt=swL3pimSptMgmt, swL3IpFdbInfoTable=swL3IpFdbInfoTable, swL3ospfIfType=swL3ospfIfType, swL3ospfVirtIfAreaId=swL3ospfVirtIfAreaId, swL3ospfIfTransitDelay=swL3ospfIfTransitDelay, swL3RouteRedistriTable=swL3RouteRedistriTable, swL3IpCtrlTable=swL3IpCtrlTable, swL3ospfIfTable=swL3ospfIfTable, swL3IpCtrlIpAddr=swL3IpCtrlIpAddr, swL3Md5RowStatus=swL3Md5RowStatus, swL3IpFdbInfoPort=swL3IpFdbInfoPort, swL3ospfVirtIfEntry=swL3ospfVirtIfEntry, swL3ospfVirtIfAuthKey=swL3ospfVirtIfAuthKey, swL3ospfVirtIfRtrDeadInterval=swL3ospfVirtIfRtrDeadInterval, swL3IpFdbInfoIpSubnetMask=swL3IpFdbInfoIpSubnetMask, swL3ospfIfRetransInterval=swL3ospfIfRetransInterval, swL3ospfVirtIfStatus=swL3ospfVirtIfStatus, swL3RelayMgmt=swL3RelayMgmt, swL3IpCtrlIfIndex=swL3IpCtrlIfIndex, swL3OspfHostIpAddress=swL3OspfHostIpAddress, swL3RoutePreferenceLocal=swL3RoutePreferenceLocal, swL3DevMgmt=swL3DevMgmt, swL3ospfVirtIfAuthKeyID=swL3ospfVirtIfAuthKeyID, swL3RelayBootpTimeThreshold=swL3RelayBootpTimeThreshold, swL3Md5Key=swL3Md5Key, swL3IpFdbInfoType=swL3IpFdbInfoType, swL3RoutePreferenceOSPFExtT1=swL3RoutePreferenceOSPFExtT1, swL3IpMgmt=swL3IpMgmt, swL3OspfHostTOS=swL3OspfHostTOS, swL3pimRegisterSuppressionTime=swL3pimRegisterSuppressionTime, swL3RelayDnsStaticTableState=swL3RelayDnsStaticTableState, swL3IpCtrlState=swL3IpCtrlState)
| [
"[email protected]"
]
| |
1be13eebadb30837a50498bf56c567f3ae17a166 | 4f00c6a08db5755b294bd519b9377866f5ff6c19 | /src/tests/google/appengine/api/xmpp/xmpp_service_stub.py | 8071acee686aa1637f430255e904c727b3a3af37 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | cooljeanius/cauliflowervest | 02035a8455b1dde469ebfd0b202c02456820a679 | a9bc209b610a927083bf16274d8451c6c45227bf | refs/heads/main | 2022-12-24T15:28:30.616604 | 2020-09-25T23:55:15 | 2020-09-25T23:55:15 | 303,812,548 | 1 | 0 | Apache-2.0 | 2023-09-04T16:48:46 | 2020-10-13T19:46:58 | Python | UTF-8 | Python | false | false | 5,161 | py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the XMPP API, writes messages to logs."""
import logging
import os
from google.appengine.api import apiproxy_stub
from google.appengine.api import app_identity
from google.appengine.api import xmpp
from google.appengine.api.xmpp import xmpp_service_pb
class XmppServiceStub(apiproxy_stub.APIProxyStub):
"""Python only xmpp service stub.
This stub does not use an XMPP network. It prints messages to the console
instead of sending any stanzas.
"""
def __init__(self, log=logging.info, service_name='xmpp'):
"""Initializer.
Args:
log: A logger, used for dependency injection.
service_name: Service name expected for all calls.
"""
super(XmppServiceStub, self).__init__(service_name)
self.log = log
def _Dynamic_GetPresence(self, request, response):
"""Implementation of XmppService::GetPresence.
Returns online if the first character of the JID comes before 'm' in the
alphabet, otherwise returns offline.
Args:
request: A PresenceRequest.
response: A PresenceResponse.
"""
jid = request.jid()
self._GetFrom(request.from_jid())
if jid[0] < 'm':
response.set_is_available(True)
else:
response.set_is_available(False)
def _Dynamic_SendMessage(self, request, response):
"""Implementation of XmppService::SendMessage.
Args:
request: An XmppMessageRequest.
response: An XmppMessageResponse .
"""
from_jid = self._GetFrom(request.from_jid())
self.log('Sending an XMPP Message:')
self.log(' From:')
self.log(' ' + from_jid)
self.log(' Body:')
self.log(' ' + request.body())
self.log(' Type:')
self.log(' ' + request.type())
self.log(' Raw Xml:')
self.log(' ' + str(request.raw_xml()))
self.log(' To JIDs:')
for jid in request.jid_list():
self.log(' ' + jid)
for jid in request.jid_list():
response.add_status(xmpp_service_pb.XmppMessageResponse.NO_ERROR)
def _Dynamic_SendInvite(self, request, response):
"""Implementation of XmppService::SendInvite.
Args:
request: An XmppInviteRequest.
response: An XmppInviteResponse .
"""
from_jid = self._GetFrom(request.from_jid())
self.log('Sending an XMPP Invite:')
self.log(' From:')
self.log(' ' + from_jid)
self.log(' To: ' + request.jid())
def _Dynamic_SendPresence(self, request, response):
"""Implementation of XmppService::SendPresence.
Args:
request: An XmppSendPresenceRequest.
response: An XmppSendPresenceResponse .
"""
from_jid = self._GetFrom(request.from_jid())
self.log('Sending an XMPP Presence:')
self.log(' From:')
self.log(' ' + from_jid)
self.log(' To: ' + request.jid())
if request.type():
self.log(' Type: ' + request.type())
if request.show():
self.log(' Show: ' + request.show())
if request.status():
self.log(' Status: ' + request.status())
def _GetFrom(self, requested):
"""Validates that the from JID is valid.
Args:
requested: The requested from JID.
Returns:
string, The from JID.
Raises:
xmpp.InvalidJidError if the requested JID is invalid.
"""
appid = app_identity.get_application_id()
if requested == None or requested == '':
return appid + '@appspot.com/bot'
node, domain, resource = ('', '', '')
at = requested.find('@')
if at == -1:
self.log('Invalid From JID: No \'@\' character found. JID: %s', requested)
raise xmpp.InvalidJidError()
node = requested[:at]
rest = requested[at+1:]
if rest.find('@') > -1:
self.log('Invalid From JID: Second \'@\' character found. JID: %s',
requested)
raise xmpp.InvalidJidError()
slash = rest.find('/')
if slash == -1:
domain = rest
resource = 'bot'
else:
domain = rest[:slash]
resource = rest[slash+1:]
if resource.find('/') > -1:
self.log('Invalid From JID: Second \'/\' character found. JID: %s',
requested)
raise xmpp.InvalidJidError()
if domain == 'appspot.com' and node == appid:
return node + '@' + domain + '/' + resource
elif domain == appid + '.appspotchat.com':
return node + '@' + domain + '/' + resource
self.log('Invalid From JID: Must be [email protected][/resource] or '
'[email protected][/resource]. JID: %s', requested)
raise xmpp.InvalidJidError()
| [
"[email protected]"
]
| |
2f50c5d8fbaf7359990e0e5264f56327e41de7cc | 0e2a58dce33bb412f19d019b98168d68af9bdeec | /model.py | 9d1eb91c29159a77b3e6e6ab01503bc436ef9099 | []
| no_license | Naveenprabaharan/Salary_Prediction | 0ea2810a177b7c0d3de8f4044970f35d51efa820 | c36cf19545667c4e330cb08bb273c45afa74b06a | refs/heads/master | 2023-08-23T16:16:10.834688 | 2021-10-23T15:39:28 | 2021-10-23T15:39:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import pickle
# MODEL TRAINING:
# dataset = pd.read_csv('__demo\Salary_Data.csv')
# X = dataset.iloc[:, :-1].values
# y = dataset.iloc[:, -1].values
# X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 1/3, random_state = 0)
# regressor = LinearRegression()
# regressor.fit(X_train, y_train)
# X_test = input("enter year:")
# y_pred = regressor.predict([[X_test]])
# print(y_pred)
# MODEL DEPLOYEMENT:
def salaryPrediction(hrs):
model = pickle.load(open('__demo/reg_model.p','rb'))
year = hrs
y_out = model.predict([[year]])
return y_out
| [
"[email protected]"
]
| |
0547381d2254684900dcf79141d5b76f75c00912 | 65fce73a1e6a36718238cdef09a17493b19532a0 | /8/swagger_client/__init__.py | d6ffda23006c1131e1217a75ca10767be0046ebb | [
"Apache-2.0"
]
| permissive | apitore/apitore-sdk-python | eb419589609efb86bd279cd1733c2a03cdc03680 | c0814c5635ddd09e9a20fcb155b62122bee41d33 | refs/heads/master | 2020-03-21T10:06:34.557781 | 2018-06-23T21:26:27 | 2018-06-23T21:26:27 | 138,434,217 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | # coding: utf-8
# flake8: noqa
"""
Word2Vec APIs
Word2Vec.<BR />[Endpoint] https://api.apitore.com/api/8 # noqa: E501
OpenAPI spec version: 1.0.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import apis into sdk package
from swagger_client.api.word_2_vec_controller_api import Word2VecControllerApi
# import ApiClient
from swagger_client.api_client import ApiClient
from swagger_client.configuration import Configuration
# import models into sdk package
from swagger_client.models.analogy_response_entity import AnalogyResponseEntity
from swagger_client.models.distance_entity import DistanceEntity
from swagger_client.models.distance_response_entity import DistanceResponseEntity
from swagger_client.models.similarity_response_entity import SimilarityResponseEntity
from swagger_client.models.vector_distance_response_entity import VectorDistanceResponseEntity
from swagger_client.models.word_vector_response_entity import WordVectorResponseEntity
| [
"[email protected]"
]
| |
82a8b3eab92e0cf6cf8a43a66206a1eef88a20d0 | 2212a32833776a5d5d2164d8efd11bd18bd3f768 | /tf_agents/networks/sequential_test.py | 001323efb5ecede3876c8fdbfa391b5b484a47d9 | [
"Apache-2.0"
]
| permissive | tensorflow/agents | f39805fb98ef9af712dcaff3ba49e1ac6d42804b | eca1093d3a047e538f17f6ab92ab4d8144284f23 | refs/heads/master | 2023-08-14T04:56:30.774797 | 2023-08-02T17:43:44 | 2023-08-02T17:44:09 | 157,936,206 | 2,755 | 848 | Apache-2.0 | 2023-07-26T02:35:32 | 2018-11-17T00:29:12 | Python | UTF-8 | Python | false | false | 9,811 | py | # coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_agents.networks.sequential."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from tf_agents.distributions import utils as distribution_utils
from tf_agents.keras_layers import dynamic_unroll_layer
from tf_agents.keras_layers import inner_reshape
from tf_agents.networks import nest_map
from tf_agents.networks import network
from tf_agents.networks import sequential as sequential_lib
from tf_agents.policies import actor_policy
from tf_agents.policies import policy_saver
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import time_step as ts
from tf_agents.utils import common
from tf_agents.utils import test_utils
FLAGS = flags.FLAGS
tfd = tfp.distributions
class ActorNetwork(network.Network):
def __init__(self, input_tensor_spec, output_tensor_spec):
num_actions = output_tensor_spec.shape.num_elements()
self._sequential = sequential_lib.Sequential(
[
tf.keras.layers.Dense(50),
tf.keras.layers.Dense(10),
tf.keras.layers.Dense(num_actions),
],
input_spec=input_tensor_spec,
) # pytype: disable=wrong-arg-types
super(ActorNetwork, self).__init__(
input_tensor_spec=input_tensor_spec,
state_spec=self._sequential.state_spec,
name='TestActorNetwork',
)
def call(self, observations, step_type=(), network_state=(), training=False):
return self._sequential(observations, network_state)
class SequentialTest(test_utils.TestCase):
def setUp(self):
if not common.has_eager_been_enabled():
self.skipTest('Only supported in TF2.x.')
super(SequentialTest, self).setUp()
def testCall(self):
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(4, use_bias=False), tf.keras.layers.ReLU()],
input_spec=tf.TensorSpec((3,), tf.float32),
) # pytype: disable=wrong-arg-types
inputs = np.ones((2, 3))
out, state = sequential(inputs)
self.assertEqual(state, ())
self.evaluate(tf.compat.v1.global_variables_initializer())
out = self.evaluate(out)
weights = self.evaluate(sequential.layers[0].weights[0])
expected = np.dot(inputs, weights)
expected[expected < 0] = 0
self.assertAllClose(expected, out)
def testMixOfNonRecurrentAndRecurrent(self):
sequential = sequential_lib.Sequential(
[
tf.keras.layers.Dense(2),
tf.keras.layers.LSTM(2, return_state=True, return_sequences=True),
tf.keras.layers.RNN(
tf.keras.layers.StackedRNNCells(
[
tf.keras.layers.LSTMCell(1),
tf.keras.layers.LSTMCell(32),
],
),
return_state=True,
return_sequences=True,
),
# Convert inner dimension to [4, 4, 2] for convolution.
inner_reshape.InnerReshape([32], [4, 4, 2]),
tf.keras.layers.Conv2D(2, 3),
# Convert 3 inner dimensions to [?] for RNN.
inner_reshape.InnerReshape([None] * 3, [-1]),
tf.keras.layers.GRU(2, return_state=True, return_sequences=True),
dynamic_unroll_layer.DynamicUnroll(tf.keras.layers.LSTMCell(2)),
tf.keras.layers.Lambda(
lambda x: tfd.MultivariateNormalDiag(loc=x, scale_diag=x)
),
],
input_spec=tf.TensorSpec((3,), tf.float32),
) # pytype: disable=wrong-arg-types
self.assertEqual(
sequential.input_tensor_spec, tf.TensorSpec((3,), tf.float32)
)
output_spec = sequential.create_variables()
self.assertIsInstance(output_spec, distribution_utils.DistributionSpecV2)
output_event_spec = output_spec.event_spec
self.assertEqual(output_event_spec, tf.TensorSpec((2,), dtype=tf.float32))
tf.nest.map_structure(
self.assertEqual,
sequential.state_spec,
(
( # LSTM
tf.TensorSpec((2,), tf.float32),
tf.TensorSpec((2,), tf.float32),
),
( # RNN(StackedRNNCells)
[
tf.TensorSpec((1,), tf.float32),
tf.TensorSpec((1,), tf.float32),
],
[
tf.TensorSpec((32,), tf.float32),
tf.TensorSpec((32,), tf.float32),
],
),
# GRU
tf.TensorSpec((2,), tf.float32),
( # DynamicUnroll
tf.TensorSpec((2,), tf.float32),
tf.TensorSpec((2,), tf.float32),
),
),
)
inputs = tf.ones((8, 10, 3), dtype=tf.float32)
dist, _ = sequential(inputs)
outputs = dist.sample()
self.assertEqual(outputs.shape, tf.TensorShape([8, 10, 2]))
def testBuild(self):
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(4, use_bias=False), tf.keras.layers.ReLU()]
)
inputs = np.ones((2, 3))
out, _ = sequential(inputs)
self.evaluate(tf.compat.v1.global_variables_initializer())
out = self.evaluate(out)
weights = self.evaluate(sequential.layers[0].weights[0])
expected = np.dot(inputs, weights)
expected[expected < 0] = 0
self.assertAllClose(expected, out)
def testAllZeroLengthStateSpecsShowAsEmptyState(self):
sequential = sequential_lib.Sequential(
[
nest_map.NestMap({
'a': tf.keras.layers.Dense(2),
'b': tf.keras.layers.Dense(3),
})
]
)
self.assertEqual(sequential.state_spec, ())
def testTrainableVariables(self):
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), tf.keras.layers.Dense(4)]
)
sequential.create_variables(tf.TensorSpec(shape=(3, 2)))
self.evaluate(tf.compat.v1.global_variables_initializer())
variables = self.evaluate(sequential.trainable_variables)
self.assertLen(variables, 4)
self.assertLen(sequential.variables, 4)
self.assertTrue(sequential.trainable)
sequential.trainable = False
self.assertFalse(sequential.trainable)
self.assertEmpty(sequential.trainable_variables)
self.assertLen(sequential.variables, 4)
def testTrainableVariablesWithNonTrainableLayer(self):
non_trainable_layer = tf.keras.layers.Dense(4)
non_trainable_layer.trainable = False
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), non_trainable_layer]
)
sequential.create_variables(tf.TensorSpec(shape=(3, 2)))
self.evaluate(tf.compat.v1.global_variables_initializer())
variables = self.evaluate(sequential.trainable_variables)
self.assertLen(variables, 2)
self.assertLen(sequential.variables, 4)
self.assertTrue(sequential.trainable)
sequential.trainable = False
self.assertFalse(sequential.trainable)
self.assertEmpty(sequential.trainable_variables)
self.assertLen(sequential.variables, 4)
def testTrainableVariablesNestedNetwork(self):
sequential_inner = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), tf.keras.layers.Dense(4)]
)
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), sequential_inner]
)
sequential.create_variables(tf.TensorSpec(shape=(3, 2)))
self.evaluate(tf.compat.v1.global_variables_initializer())
variables = self.evaluate(sequential.trainable_variables)
self.assertLen(variables, 6)
self.assertLen(sequential.variables, 6)
self.assertLen(sequential_inner.variables, 4)
self.assertTrue(sequential.trainable)
sequential.trainable = False
self.assertFalse(sequential.trainable)
self.assertEmpty(sequential.trainable_variables)
self.assertLen(sequential.variables, 6)
def testCopy(self):
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), tf.keras.layers.Dense(4, use_bias=False)]
)
clone = type(sequential).from_config(sequential.get_config())
self.assertLen(clone.layers, 2)
for l1, l2 in zip(sequential.layers, clone.layers):
self.assertEqual(l1.dtype, l2.dtype)
self.assertEqual(l1.units, l2.units)
self.assertEqual(l1.use_bias, l2.use_bias)
def testPolicySaverCompatibility(self):
observation_spec = tensor_spec.TensorSpec(shape=(100,), dtype=tf.float32)
action_spec = tensor_spec.TensorSpec(shape=(5,), dtype=tf.float32)
time_step_tensor_spec = ts.time_step_spec(observation_spec)
net = ActorNetwork(observation_spec, action_spec)
net.create_variables()
policy = actor_policy.ActorPolicy(time_step_tensor_spec, action_spec, net)
sample = tensor_spec.sample_spec_nest(
time_step_tensor_spec, outer_dims=(5,)
)
policy.action(sample)
train_step = common.create_variable('train_step')
saver = policy_saver.PolicySaver(policy, train_step=train_step)
self.initialize_v1_variables()
with self.cached_session():
saver.save(os.path.join(FLAGS.test_tmpdir, 'sequential_model'))
if __name__ == '__main__':
test_utils.main()
| [
"[email protected]"
]
| |
d973653f84166354990b4df25cb162438aa56b9e | ed9b286cc1fba177abae3449540e95cde558b7e3 | /tests/unit/test_logging.py | 57a6cff2087deaf7e117e341b0311904534212d9 | [
"Apache-2.0"
]
| permissive | AndrewNg/anchore | e706f0a0c47e298be3295d1aa6d167ec58788cd2 | 308e91881be65dd546dbfc79b9d3982b501252a8 | refs/heads/master | 2020-09-29T04:58:03.114023 | 2019-12-09T20:07:08 | 2019-12-09T20:07:08 | 226,957,427 | 0 | 0 | Apache-2.0 | 2019-12-09T20:06:16 | 2019-12-09T20:06:15 | null | UTF-8 | Python | false | false | 2,428 | py | import logging
import unittest
import anchore.cli.logs
import anchore.cli.common
import anchore.util
class TestLogging (unittest.TestCase):
@staticmethod
def do_generic(some_logger, name=None):
assert isinstance(some_logger, logging.Logger)
some_logger.debug('debug message - ' + name)
some_logger.info('info message - ' + name)
some_logger.warn('warn message - ' + name)
some_logger.error('error message - ' + name)
try:
raise KeyError('Some key not found')
except KeyError:
some_logger.exception('Some exception caught - ' + name)
@staticmethod
def do_anchore_logging():
print '--ANCHORE LOGGER'
anchore_logger = logging.getLogger('anchore')
TestLogging.do_generic(anchore_logger, 'anchore')
@staticmethod
def do_non_anchore_logging():
print '--NON-ANCHORE LOGGER'
rand_logger = logging.getLogger('somepackage.somemodule')
TestLogging.do_generic(rand_logger, 'non-anchore')
@staticmethod
def reset_logging_config():
logging.root.setLevel('NOTSET')
for f in logging.root.filters:
logging.root.filters.remove(f)
for f in logging.root.handlers:
print 'Removing handler %s' % str(f)
logging.root.handlers.remove(f)
def test_quiet(self):
print '--STARTING TEST: quiet'
TestLogging.reset_logging_config()
anchore.cli.logs.init_output_formatters(output_verbosity='quiet')
TestLogging.do_anchore_logging()
TestLogging.do_non_anchore_logging()
def test_normal(self):
print '--STARTING TEST: normal'
TestLogging.reset_logging_config()
anchore.cli.logs.init_output_formatters(output_verbosity='normal')
TestLogging.do_anchore_logging()
TestLogging.do_non_anchore_logging()
def test_verbose(self):
print '--STARTING TEST: verbose'
TestLogging.reset_logging_config()
anchore.cli.logs.init_output_formatters(output_verbosity='verbose')
TestLogging.do_anchore_logging()
TestLogging.do_non_anchore_logging()
def test_debug(self):
print '--STARTING TEST: debug'
TestLogging.reset_logging_config()
anchore.cli.logs.init_output_formatters(output_verbosity='debug')
TestLogging.do_anchore_logging()
TestLogging.do_non_anchore_logging()
| [
"[email protected]"
]
| |
630a17eceb74a3892bd59ab00b61f09ff63f75c5 | 949ebd7bc2ab1526b3d535def4c90c80fab907f0 | /Decision_Tree_Classification/decision_tree_classification_f1score.py | 543282a5320bd6834cdfb946ee193307187f8799 | []
| no_license | mbhushan/ml | 1c5c0d79f56dbc374f5163a032900da14ca5bc58 | 89441760c489bb265339bcdcbe975888686fc8a5 | refs/heads/master | 2021-05-15T05:31:47.801454 | 2018-05-12T17:34:23 | 2018-05-12T17:34:23 | 116,192,180 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,358 | py | # Decision Tree Classification
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Social_Network_Ads.csv')
X = dataset.iloc[:, [2, 3]].values
y = dataset.iloc[:, 4].values
# Splitting the dataset into the Training set and Test set
from sklearn.cross_validation import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.20, random_state = 0)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
# Fitting Decision Tree Classification to the Training set
from sklearn.tree import DecisionTreeClassifier
classifier = DecisionTreeClassifier(criterion = 'entropy', min_samples_split=25,
random_state = 1, max_depth=3,
min_samples_leaf=5, splitter='best')
classifier.fit(X_train, y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix
from sklearn.metrics import precision_recall_fscore_support
from sklearn.metrics import f1_score
cm = confusion_matrix(y_test, y_pred)
tn, fp, fn, tp = cm.ravel()
print ('TN: %s, FP: %s, FN: %s, TP: %s' %(tn, fp, fn, tp))
precision, recall, fscore, support = precision_recall_fscore_support(y_test, y_pred, average='binary')
print ('fscore: %s' % (fscore))
print ('precision: %s' % (precision))
print ('recall: %s' % (recall))
# f1_score = f1_score(y_test, y_pred, average='binary')
# print ('F1 Score: ', str(f1_score))
# Visualising the Training set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_train, y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Decision Tree Classification (Training set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
# Visualising the Test set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_test, y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Decision Tree Classification (Test set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show() | [
"[email protected]"
]
| |
7cae145eeb1765e1dc1249a7c25c4f9b5a5a80c0 | 2612f336d667a087823234daf946f09b40d8ca3d | /python/lib/Lib/site-packages/django/utils/formats.py | c23a37cb2b51c0f4fca23725608c0e1326cc71ee | [
"Apache-2.0"
]
| permissive | tnorbye/intellij-community | df7f181861fc5c551c02c73df3b00b70ab2dd589 | f01cf262fc196bf4dbb99e20cd937dee3705a7b6 | refs/heads/master | 2021-04-06T06:57:57.974599 | 2018-03-13T17:37:00 | 2018-03-13T17:37:00 | 125,079,130 | 2 | 0 | Apache-2.0 | 2018-03-13T16:09:41 | 2018-03-13T16:09:41 | null | UTF-8 | Python | false | false | 6,513 | py | import decimal
import datetime
from django.conf import settings
from django.utils.translation import get_language, to_locale, check_for_language
from django.utils.importlib import import_module
from django.utils.encoding import smart_str
from django.utils import dateformat, numberformat, datetime_safe
from django.utils.safestring import mark_safe
# format_cache is a mapping from (format_type, lang) to the format string.
# By using the cache, it is possible to avoid running get_format_modules
# repeatedly.
_format_cache = {}
_format_modules_cache = {}
def iter_format_modules(lang):
"""
Does the heavy lifting of finding format modules.
"""
if check_for_language(lang) or settings.USE_L10N:
format_locations = ['django.conf.locale.%s']
if settings.FORMAT_MODULE_PATH:
format_locations.append(settings.FORMAT_MODULE_PATH + '.%s')
format_locations.reverse()
locale = to_locale(lang)
locales = set((locale, locale.split('_')[0]))
for location in format_locations:
for loc in locales:
try:
yield import_module('.formats', location % loc)
except ImportError:
pass
def get_format_modules(reverse=False):
"""
Returns an iterator over the format modules found
"""
lang = get_language()
modules = _format_modules_cache.setdefault(lang, list(iter_format_modules(lang)))
if reverse:
modules.reverse()
return modules
def get_format(format_type, lang=None, use_l10n=None):
"""
For a specific format type, returns the format for the current
language (locale), defaults to the format in the settings.
format_type is the name of the format, e.g. 'DATE_FORMAT'
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
format_type = smart_str(format_type)
if use_l10n or (use_l10n is None and settings.USE_L10N):
if lang is None:
lang = get_language()
cache_key = (format_type, lang)
try:
return _format_cache[cache_key] or getattr(settings, format_type)
except KeyError:
for module in get_format_modules():
try:
val = getattr(module, format_type)
_format_cache[cache_key] = val
return val
except AttributeError:
pass
_format_cache[cache_key] = None
return getattr(settings, format_type)
def date_format(value, format=None, use_l10n=None):
"""
Formats a datetime.date or datetime.datetime object using a
localizable format
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
return dateformat.format(value, get_format(format or 'DATE_FORMAT', use_l10n=use_l10n))
def time_format(value, format=None, use_l10n=None):
"""
Formats a datetime.time object using a localizable format
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
return dateformat.time_format(value, get_format(format or 'TIME_FORMAT', use_l10n=use_l10n))
def number_format(value, decimal_pos=None, use_l10n=None):
"""
Formats a numeric value using localization settings
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
if use_l10n or (use_l10n is None and settings.USE_L10N):
lang = get_language()
else:
lang = None
return numberformat.format(
value,
get_format('DECIMAL_SEPARATOR', lang, use_l10n=use_l10n),
decimal_pos,
get_format('NUMBER_GROUPING', lang, use_l10n=use_l10n),
get_format('THOUSAND_SEPARATOR', lang, use_l10n=use_l10n),
)
def localize(value, use_l10n=None):
"""
Checks if value is a localizable type (date, number...) and returns it
formatted as a string using current locale format.
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
if isinstance(value, bool):
return mark_safe(unicode(value))
elif isinstance(value, (decimal.Decimal, float, int, long)):
return number_format(value, use_l10n=use_l10n)
elif isinstance(value, datetime.datetime):
return date_format(value, 'DATETIME_FORMAT', use_l10n=use_l10n)
elif isinstance(value, datetime.date):
return date_format(value, use_l10n=use_l10n)
elif isinstance(value, datetime.time):
return time_format(value, 'TIME_FORMAT', use_l10n=use_l10n)
else:
return value
def localize_input(value, default=None):
"""
Checks if an input value is a localizable type and returns it
formatted with the appropriate formatting string of the current locale.
"""
if isinstance(value, (decimal.Decimal, float, int, long)):
return number_format(value)
elif isinstance(value, datetime.datetime):
value = datetime_safe.new_datetime(value)
format = smart_str(default or get_format('DATETIME_INPUT_FORMATS')[0])
return value.strftime(format)
elif isinstance(value, datetime.date):
value = datetime_safe.new_date(value)
format = smart_str(default or get_format('DATE_INPUT_FORMATS')[0])
return value.strftime(format)
elif isinstance(value, datetime.time):
format = smart_str(default or get_format('TIME_INPUT_FORMATS')[0])
return value.strftime(format)
return value
def sanitize_separators(value):
"""
Sanitizes a value according to the current decimal and
thousand separator setting. Used with form field input.
"""
if settings.USE_L10N:
decimal_separator = get_format('DECIMAL_SEPARATOR')
if isinstance(value, basestring):
parts = []
if decimal_separator in value:
value, decimals = value.split(decimal_separator, 1)
parts.append(decimals)
if settings.USE_THOUSAND_SEPARATOR:
parts.append(value.replace(get_format('THOUSAND_SEPARATOR'), ''))
else:
parts.append(value)
value = '.'.join(reversed(parts))
return value
| [
"[email protected]"
]
| |
112d2d8320692eba6ef70e6342254ab8abb37bd3 | 875921eb2b486923cfef0c2af249e8f456bdf0c9 | /config.py | 71df786a370703f818458fa7a87ac6837cb8727c | [
"Apache-2.0"
]
| permissive | baozixifan/ASRFrame | c0d3d477409b0e262fbf760860c6c7b6ddd59caf | 307596dc729f7611b270b9f6d279fefa05ef488d | refs/heads/master | 2020-06-23T07:06:07.973172 | 2019-07-18T16:26:07 | 2019-07-18T16:26:07 | 198,550,805 | 1 | 0 | null | 2019-07-24T03:23:48 | 2019-07-24T03:23:47 | null | UTF-8 | Python | false | false | 1,964 | py | import platform
import os
project_path = os.path.split(os.path.realpath(__file__))[0] #
thu_datapath = None # 目录下应该有data/ dev/ 等目录
z200_datapath = None # 目录下应该有一大堆G../格式的目录
aishell_datapath = None # 目录下应有wav/和transcript/两个目录
prime_datapath = None # 目录下应有一个json文件和一个目录
stcmd_datapath = None # 目录下应该直接是音频文件
wiki_datapath = None
if platform.system() == "Linux":
thu_datapath = "/data/voicerec/thchs30/data_thchs30"
z200_datapath = "/data/voicerec/z200"
aishell_datapath = "/data/voicerec/ALShell-1/data_aishell"
prime_datapath = "/data/voicerec/Primewords Chinese Corpus Set 1/primewords_md_2018_set1"
stcmd_datapath = "/data/voicerec/Free ST Chinese Mandarin Corpus/ST-CMDS-20170001_1-OS"
wiki_datapath = "/data/voicerec/wiki/wiki_corpus_2"
elif platform.system() == "Windows":
thu_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\thchs30"
z200_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\z200"
aishell_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\data_aishell"
prime_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\primewords_md_2018_set1"
stcmd_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\ST-CMDS-20170001_1-OS"
model_dir = os.path.join(project_path,"model") # ./model
dict_dir = os.path.join(project_path,"util","dicts") #./util/dicts
acoustic_model_dir = os.path.join(model_dir, "acoustic") # ./acoustic
language_model_dir = os.path.join(model_dir, "language") # ./language
loss_dir = "./loss_plot/"
acoustic_loss_dir = os.path.join(loss_dir,"acoustic") # ./loss_plot/acoustic
language_loss_dir = os.path.join(loss_dir,"language") # ./loss_plot/language
join_model_path = lambda x:os.path.join(model_dir, x)
chs_dict_path = os.path.join(dict_dir,"pure_chs.txt") # ./util/dicts/...
py_dict_path = os.path.join(dict_dir,"pure_py.txt") # ./util/dicts/... | [
"[email protected]"
]
| |
977922ac36268edcaa041e79fd97eed215a5b6ac | 179577ecdd7fda84ad970b3aad573a575fef56bc | /exercicios/ex034.py | cc2175d2d31399159743980d7251f1a8965d04fb | []
| no_license | Elvis-Lopes/Curso-em-video-Python | 6c12fa17a5c38c722a7c8e9677f6d9596bc5653c | 65f093975af9bd59c8aaa37606ba648b7ba1e1c4 | refs/heads/master | 2021-02-11T12:15:13.580496 | 2020-05-05T21:55:06 | 2020-05-05T21:55:06 | 244,490,886 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | sal = float(input('Insira o salário: '))
aumento = float()
if sal > 1250:
aumento = (sal*15)/100
sal = sal + aumento
else:
aumento = (sal*10)/100
sal = sal + aumento
print(f'Novo salario R${sal:.2f}')
| [
"[email protected]"
]
| |
3c53e42d5a2371b1683e62b91621f013f2474ebd | 7e50b94379132a4156fd693bc73d640ff6752ed9 | /tests/conftest.py | 6981e1f250018bce62a66937c9462a5ed171ebab | [
"MIT"
]
| permissive | Pylons/plaster_pastedeploy | 145ac4c5310babf78ea7a0f7ad0639cc1b3f8a33 | c0a146cdfac61781057ecaaa1b7938ef53dae9af | refs/heads/main | 2023-06-12T04:08:37.382145 | 2023-01-03T02:44:28 | 2023-01-03T02:44:28 | 60,292,293 | 7 | 8 | MIT | 2023-09-09T04:19:56 | 2016-06-02T19:40:32 | Python | UTF-8 | Python | false | false | 515 | py | import os.path
import sys
import pkg_resources
import pytest
@pytest.fixture(scope="session")
def fake_packages():
# we'd like to keep this scope more focused but it's proven really
# difficult to fully monkeypatch pkg_resources and so for now we just
# install the packages for the duration of the test suite
test_dir = os.path.dirname(__file__)
info_dir = os.path.join(test_dir, "fake_packages", "FakeApp")
sys.path.insert(0, info_dir)
pkg_resources.working_set.add_entry(info_dir)
| [
"[email protected]"
]
| |
a73f8302a9249594d2ed5b77f6688c6768dc5b63 | 6a2b0db7d6c4ecef8434f3b35fcaef71eeb0d896 | /VENV/py3_venv/lib/python3.6/site-packages/pyntc/templates/__init__.py | f9a12282a24b39159158a59ac474ea95c08b289c | []
| no_license | pseudonode/nornircourse | 9bf890ecfadd1a08691f113e0cd2acadd4b9bffa | 1ad0372f9673de784233937cc15779bc2391e267 | refs/heads/master | 2022-11-09T20:18:22.714703 | 2019-10-04T08:06:42 | 2019-10-04T08:06:42 | 211,856,983 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 926 | py | import os
import textfsm
TEMPLATE_PATH_ENV_VAR = "NTC_TEMPLATES"
def get_structured_data(template_name, rawtxt):
"""Returns structured data given raw text using
TextFSM templates
"""
template_file = get_template(template_name)
with open(template_file) as template:
fsm = textfsm.TextFSM(template)
table = fsm.ParseText(rawtxt)
structured_data = []
for row in table:
temp_dict = {}
for index, element in enumerate(row):
temp_dict[fsm.header[index].lower()] = element
structured_data.append(temp_dict)
return structured_data
def get_template(template_name):
template_dir = get_template_dir()
return os.path.join(template_dir, template_name)
def get_template_dir():
try:
return os.environ[TEMPLATE_PATH_ENV_VAR]
except KeyError:
return os.path.realpath(os.path.dirname(__file__))
| [
"[email protected]"
]
| |
3eb6d193517b5ddaa0e343c16513ad7fff94180c | 216ee8ab7ca468638aa2dc6ccb7f89ea76dd0b35 | /Project/Solutions/b_print_to_csv/scraper.py | 53d02a6e14c5fe2a789323404aefe9f094fd9c4d | []
| no_license | satishkbe/python-level-2 | 7b44d0f676bc830f0a94f823aeb6e0f628215628 | 834411f74d54019b9675a87004fd39072dc5fba0 | refs/heads/master | 2023-03-13T20:32:17.993938 | 2021-03-16T00:19:07 | 2021-03-16T00:19:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,099 | py | import csv
import requests
from bs4 import BeautifulSoup
URL = "https://en.wikipedia.org/wiki/Member_states_of_the_United_Nations"
# Todo: Update with your info
name = None
email = None
assert name and email
headers = {'User-Agent': f'{name} ({email})'}
response = requests.get(URL, headers=headers)
assert response.status_code == 200, f'Response got {response.status_code}'
html_doc = response.text
soup = BeautifulSoup(html_doc, 'html.parser')
table = soup.find('table', class_='wikitable')
countries = []
for row in table.find_all('tr'):
name_column = row.find('td')
if name_column:
country_dict = {}
name_link = name_column.find_all('a')[1]
name = name_link.string
country_dict['Name'] = name
date_column = row.find_all('td')[1]
date_joined = date_column.span.text
country_dict['Date Joined'] = date_joined
countries.append(country_dict)
with open('data/countries.csv', 'w') as file:
writer = csv.DictWriter(file, fieldnames=('Name', 'Date Joined'))
writer.writeheader()
writer.writerows(countries)
| [
"[email protected]"
]
| |
7b36da46e9525ecd5334eed1e12a60755f7ecced | 3691f0b571612fd550095af0d7c93f22d5a8061c | /ERP/ERP/urls.py | c6682a389f1e01fb60f097995d0cfef7300d1560 | []
| no_license | sambapython/db16 | 29db8c6be5a3628cd3063cc0d8e092ae8ea69d60 | 98d751ffc7277bb4e28f90b7cb470d667ab47593 | refs/heads/master | 2021-02-11T22:02:34.251113 | 2020-03-03T03:43:29 | 2020-03-03T03:43:29 | 244,532,780 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 931 | py | """ERP URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('sales/', include("sales.urls")),
path('pur/', include("pur.urls")),
path('accounting/', include("accounting.urls")),
path('stock/', include("stock.urls")),
]
| [
"[email protected]"
]
| |
1571db30fcb1897a7c23bbc6da84249efffefe12 | 13cf11440998376d3b52a49f1e4fb8936c360ac4 | /chainer_chemistry/saliency/visualizer/table_visualizer.py | 4e27c19c5a807d63f9d5844832ecaecdfb772adc | [
"MIT"
]
| permissive | k-ishiguro/chainer-chemistry | 87e3db724de0e99042d9585cd4bd5fff38169339 | aec33496def16e76bdfbefa508ba01ab9f79a592 | refs/heads/master | 2021-07-06T22:58:20.127907 | 2019-02-04T02:51:34 | 2019-02-04T02:51:34 | 169,345,375 | 1 | 1 | MIT | 2020-07-30T06:04:13 | 2019-02-06T02:27:39 | Python | UTF-8 | Python | false | false | 3,289 | py | import numpy
import matplotlib.pyplot as plt
from chainer_chemistry.saliency.visualizer.base_visualizer import BaseVisualizer # NOQA
from chainer_chemistry.saliency.visualizer.visualizer_utils import abs_max_scaler # NOQA
class TableVisualizer(BaseVisualizer):
"""Saliency visualizer for table data"""
def visualize(self, saliency, feature_names=None, save_filepath=None,
num_visualize=-1, scaler=abs_max_scaler,
sort='descending', title='Feature Importance', color='b',
xlabel='Importance', bbox_inches='tight'):
"""Visualize or save `saliency` in bar plot.
Args:
saliency (numpy.ndarray): 1-dim saliency array (num_feature,)
feature_names (list or numpy.ndarray): Feature names of `saliency`
save_filepath (str or None): If specified, file is saved to path.
num_visualize (int): If positive value is set, only plot specified
number of features.
scaler (callable): function which takes `x` as input and outputs
scaled `x`, for plotting.
sort (str): Below sort options are supported.
none: not sort
ascending: plot in ascending order
descending: plot in descending order
title (str or None): title of plot
color (str): color of bar in plot
xlabel (str): x label legend
bbox_inches (str or Bbox or None): used for `plt.savefig` option.
"""
# --- type check ---
if saliency.ndim != 1:
raise ValueError("[ERROR] Unexpected value saliency.shape={}"
.format(saliency.shape))
num_total_feat = saliency.shape[0]
if feature_names is not None:
# type check
if len(feature_names) != num_total_feat:
raise ValueError(
"feature_names={} must have same length with `saliency`"
.format(feature_names))
else:
feature_names = numpy.arange(num_total_feat)
if sort == 'none':
indices = numpy.arange(num_total_feat)
elif sort == 'ascending':
indices = numpy.argsort(saliency)[::-1]
elif sort == 'descending':
indices = numpy.argsort(saliency)
else:
raise ValueError("[ERROR] Unexpected value sort={}".format(sort))
saliency = saliency[indices]
feature_names = numpy.asarray(feature_names)[indices]
if scaler is not None:
# Normalize to [-1, 1] or [0, 1]
saliency = scaler(saliency)
if num_visualize > 0:
saliency = saliency[:num_visualize]
if feature_names is not None:
feature_names = feature_names[:num_visualize]
else:
num_visualize = num_total_feat
plt.figure()
plt.clf()
if title is not None:
plt.title(title)
plt.barh(range(num_visualize), saliency, color=color, align='center')
plt.yticks(range(num_visualize), feature_names)
plt.xlabel(xlabel)
if save_filepath:
plt.savefig(save_filepath, bbox_inches=bbox_inches)
else:
plt.show()
| [
"[email protected]"
]
| |
af23bfe2581b749cad1c35dc75d23d8ece968b41 | e756bfb5290cd336d20f0cf0cde04eec2a35caae | /src/actions/custom/utils/pick_card.py | 9847ba74cd50d6deef5415fb9cb4dde04b52eee6 | [
"MIT"
]
| permissive | StetHD/Lonabot | ff1b9113f1e8d6618a271a17752e86679e0c6274 | 615ce2c176607d6da71c84d38644d8aaaf0d3a0b | refs/heads/master | 2021-01-22T10:40:55.989293 | 2016-08-24T10:17:29 | 2016-08-24T10:17:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,101 | py | from actions.action_base import ActionBase
from random import choice, randint
class PickCardAction(ActionBase):
def __init__(self):
super().__init__(name="PICK A CARD FROM THE DECK",
keywords=['pick a card (INT)',
'pick (INT) cards?'])
def act(self, data):
times = data.get_match_int(1, fallback=1)
if times > 48: # Avoid too many
self.send_msg(data,
"there are 48 cards in a deck (no joker here), "
"how am i supposed to pick {}?!".format(times))
return
if times == 48:
self.send_msg(data, "there are 48 cards in the deck, BUT, if that makes you happy:".format(times))
# Add unique choices until we have enough
result = []
while len(result) < times:
# Pick a random value
value = randint(2, 14)
if value == 11:
value = 'jack'
elif value == 12:
value = 'queen'
elif value == 13:
value = 'king'
elif value == 14:
value = 'ace'
# And a random suit
suit = choice(['♠️', '♣️', '♥️', '♦️'])
current = '{}{}'.format(suit, value)
# Add the random value with the choice if it wasn't in yet
if current not in result:
result.append(current)
if times > 4: # If too many times, let's make a pretty table!
row_size = 4
spacing = 7
msg = '```\n'
for i in range(0, times, row_size):
# Join the results from i..i+row_size with a '.'
msg += '.'.join(str(result[j]).ljust(spacing, '.')
for j in range(i, i + row_size) if j < times)
msg += '\n'
msg += '```'
self.send_msg(data, msg, markdown=True)
else: # Else just join multiline
self.send_msg(data, '\n'.join(result), markdown=True)
| [
"[email protected]"
]
| |
b26f46287c34e1c977675f1a1da4680ab338880a | d578dc0955028ee86656f06423ceaa4a50c5ba92 | /Final Project/Centroid.py | 6915378daa05fc205509cf703953b537c1f5ae35 | []
| no_license | ohsuz/CSI4106-Repository | 1b17482d9215c0dcfff60edb90494833d11e069a | d81c5a2600b7c8bf67dd02fbd30138a7f8245e47 | refs/heads/master | 2023-06-11T10:29:33.009843 | 2021-06-26T18:02:50 | 2021-06-26T18:02:50 | 230,781,524 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,509 | py | #!/usr/bin/env python
# coding: utf-8
# In[33]:
#import packages
import glob
import os
import imutils
import cv2
#read the images from the folder
images = [cv2.imread(file) for file in glob.glob("C:/Users/cjtol/CSI4106/Pocket/*.png")]
#covert image to grayscale
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
#blur to reduce high frequency noise
blurred = cv2.GaussianBlur(gray, (5, 5), 0)
#binarize the image with a threshold
thresh = cv2.threshold(blurred, 60, 255, cv2.THRESH_BINARY)[1]
#thresh = cv2.adaptiveThreshold(blurred,255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 7, 2)[1]
#get the rock
cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnts = imutils.grab_contours(cnts)
# In[34]:
# compute the center of the contour
def get_contour(image):
for c in cnts:
M = cv2.moments(c)
if M["m00"] != 0:
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
else:
cX, cY = 0, 0
#draw contour and center of shape
cv2.drawContours(image, [c], -1, (0, 255, 0), 2)
cv2.circle(image, (cX, cY), 7, (255, 255, 255), -1)
cv2.putText(image, "center", (cX - 20, cY - 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
cv2.imwrite(os.path.join(path , "output.png"),image)
#display modified image
cv2.imshow("Image", image)
cv2.waitKey(0)
# In[ ]:
| [
"[email protected]"
]
| |
cd6c93b19fecb396cb0458d2561de26a3b8b110a | f40ad51a600e64f12710fc4847c356a35cd0a3d2 | /S08/oop/geometry.py | 69181f8bd1a3113ef4a969527338183d111a722a | []
| no_license | pymft/py-mft1 | 0aa1b854ea80e17e18c0eacc6f4dc7428a71af39 | f4657fe17e56b6f54bdc8b1076edfc388b85cb05 | refs/heads/master | 2020-05-09T09:32:59.020361 | 2019-07-05T13:59:19 | 2019-07-05T13:59:19 | 181,006,072 | 1 | 5 | null | 2019-05-03T20:06:03 | 2019-04-12T12:42:38 | Python | UTF-8 | Python | false | false | 940 | py | import math
class Parallelogram:
def __init__(self, a, b, angle):
print("paralleogram", self.__class__)
self.a = a
self.b = b
self.angle = angle
@property
def area(self):
return self.a * self.b * math.sin(math.radians(self.angle))
@property
def perimeter(self):
return (self.a + self.b) * 2
class Diamond(Parallelogram):
def __init__(self, a, angle):
print("diamond")
super().__init__(a, a, angle)
class Rectangle(Parallelogram):
def __init__(self, w, h):
print("rect")
super().__init__(w, h, 90)
#
# class Square(Rectangle):
# def __init__(self, a):
# super().__init__(a, a)
class Square(Diamond):
def __init__(self, a):
print("square")
super().__init__(a, 90)
#
# r = Rectangle(10, 4)
# print(r.area, r.perimeter)
s = Diamond(7, 45)
print(s.area, s.perimeter)
#
# print(s, hex(id(s)))
| [
"[email protected]"
]
| |
a40183d91dc5ab741e0f4a91dfb2c05c5b73b66f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02628/s785161467.py | 3f9c154f8635eed3d0cb56f4538868a10a28d93d | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | n,k,*p=map(int,open(0).read().split());print(sum(sorted(p)[:k])) | [
"[email protected]"
]
| |
836236628e6676f74198897e9bb09b26ef6e8926 | 37b0de1e37bc313ad5c4735d288f0f2ccc6bca88 | /tests/test_paired.py | 9cf3c361cabf9ee88f9f5d935c87a3044e68aeb9 | [
"MIT"
]
| permissive | kingdynasty/cutadapt | a65b46eb192fbff00ab404324d5960f1ab22cb79 | 49aa33ac46c5183a39acddbe85d58103ff7eecb8 | refs/heads/master | 2020-03-28T14:04:13.798989 | 2018-09-10T09:21:48 | 2018-09-10T09:21:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,702 | py | import os.path
import shutil
import tempfile
from itertools import product
import pytest
from cutadapt.__main__ import main
from utils import run, assert_files_equal, datapath, cutpath, redirect_stderr, temporary_path
@pytest.fixture(params=[1, 2])
def cores(request):
return request.param
def run_paired(params, in1, in2, expected1, expected2, cores):
if type(params) is str:
params = params.split()
params += ['--cores', str(cores), '--buffer-size=512']
with temporary_path('tmp1-' + expected1) as p1:
with temporary_path('tmp2-' + expected2) as p2:
params += ['-o', p1, '-p', p2]
params += [datapath(in1), datapath(in2)]
assert main(params) is None
assert_files_equal(cutpath(expected1), p1)
assert_files_equal(cutpath(expected2), p2)
def run_interleaved(params, inpath1, inpath2=None, expected1=None, expected2=None, cores=1):
"""
Interleaved input or output (or both)
"""
assert not (inpath1 and inpath2 and expected1 and expected2)
assert not (expected2 and not expected1)
assert not (inpath2 and not inpath1)
if type(params) is str:
params = params.split()
params += ['--interleaved', '--cores', str(cores), '--buffer-size=512']
with temporary_path('tmp1-' + expected1) as tmp1:
params += ['-o', tmp1]
paths = [datapath(inpath1)]
if inpath2:
paths += [datapath(inpath2)]
if expected2:
with temporary_path('tmp2-' + expected2) as tmp2:
params += ['-p', tmp2]
assert main(params + paths) is None
assert_files_equal(cutpath(expected2), tmp2)
else:
assert main(params + paths) is None
assert_files_equal(cutpath(expected1), tmp1)
def test_paired_separate():
"""test separate trimming of paired-end reads"""
run('-a TTAGACATAT', 'paired-separate.1.fastq', 'paired.1.fastq')
run('-a CAGTGGAGTA', 'paired-separate.2.fastq', 'paired.2.fastq')
def test_paired_end_legacy(cores):
"""--paired-output, not using -A/-B/-G"""
# the -m 14 filters out one read, which should then also be filtered out in the second output file
# -q 10 should not change anything: qualities in file 1 are high enough,
# qualities in file 2 should not be inspected.
run_paired(
'-a TTAGACATAT -m 14 -q 10',
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired.m14.1.fastq', expected2='paired.m14.2.fastq',
cores=cores
)
def test_untrimmed_paired_output():
with temporary_path("tmp-untrimmed.1.fastq") as untrimmed1:
with temporary_path("tmp-untrimmed.2.fastq") as untrimmed2:
run_paired(
['-a', 'TTAGACATAT',
'--untrimmed-output', untrimmed1,
'--untrimmed-paired-output', untrimmed2],
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired-trimmed.1.fastq', expected2='paired-trimmed.2.fastq',
cores=1
)
assert_files_equal(cutpath('paired-untrimmed.1.fastq'), untrimmed1)
assert_files_equal(cutpath('paired-untrimmed.2.fastq'), untrimmed2)
def test_explicit_format_with_paired():
# Use --format=fastq with input files whose extension is .txt
with temporary_path("paired.1.txt") as txt1:
with temporary_path("paired.2.txt") as txt2:
shutil.copyfile(datapath("paired.1.fastq"), txt1)
shutil.copyfile(datapath("paired.2.fastq"), txt2)
run_paired(
'--format=fastq -a TTAGACATAT -m 14',
in1=txt1, in2=txt2,
expected1='paired.m14.1.fastq',
expected2='paired.m14.2.fastq',
cores=1
)
def test_no_trimming_legacy():
# make sure that this doesn't divide by zero
main([
'-a', 'XXXXX', '-o', '/dev/null', '-p', '/dev/null',
datapath('paired.1.fastq'), datapath('paired.2.fastq')])
def test_no_trimming():
# make sure that this doesn't divide by zero
main([
'-a', 'XXXXX', '-A', 'XXXXX', '-o', '/dev/null', '-p', '/dev/null',
datapath('paired.1.fastq'), datapath('paired.2.fastq')])
def test_missing_file():
with pytest.raises(SystemExit):
with redirect_stderr():
main(['-a', 'XX', '--paired-output', 'out.fastq', datapath('paired.1.fastq')])
def test_first_too_short(cores):
with pytest.raises(SystemExit):
with temporary_path("truncated.1.fastq") as trunc1:
# Create a truncated file in which the last read is missing
with open(datapath('paired.1.fastq')) as f:
lines = f.readlines()
lines = lines[:-4]
with open(trunc1, 'w') as f:
f.writelines(lines)
with redirect_stderr():
main(
'-a XX -o /dev/null --paired-output out.fastq'.split()
+ ['--cores', str(cores)]
+ [trunc1, datapath('paired.2.fastq')]
)
def test_second_too_short(cores):
with pytest.raises(SystemExit):
with temporary_path("truncated.2.fastq") as trunc2:
# Create a truncated file in which the last read is missing
with open(datapath('paired.2.fastq')) as f:
lines = f.readlines()
lines = lines[:-4]
with open(trunc2, 'w') as f:
f.writelines(lines)
with redirect_stderr():
main('-a XX -o /dev/null --paired-output out.fastq'.split()
+ ['--cores', str(cores)]
+ [datapath('paired.1.fastq'), trunc2])
def test_unmatched_read_names(cores):
with pytest.raises(SystemExit):
with temporary_path("swapped.1.fastq") as swapped:
# Create a file in which reads 2 and 1 are swapped
with open(datapath('paired.1.fastq')) as f:
lines = f.readlines()
lines = lines[0:4] + lines[8:12] + lines[4:8] + lines[12:]
with open(swapped, 'w') as f:
f.writelines(lines)
with redirect_stderr():
main('-a XX -o out1.fastq --paired-output out2.fastq'.split()
+ ['--cores', str(cores)]
+ [swapped, datapath('paired.2.fastq')])
def test_p_without_o(cores):
"""Option -p given but -o missing"""
with pytest.raises(SystemExit):
main('-a XX -p /dev/null'.split()
+ ['--cores', str(cores)]
+ [datapath('paired.1.fastq'), datapath('paired.2.fastq')])
def test_paired_but_only_one_input_file(cores):
"""Option -p given but only one input file"""
with pytest.raises(SystemExit):
main('-a XX -o /dev/null -p /dev/null'.split()
+ ['--cores', str(cores)]
+ [datapath('paired.1.fastq')])
def test_legacy_minlength(cores):
"""Ensure -m is not applied to second read in a pair in legacy mode"""
run_paired(
'-a XXX -m 27',
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired-m27.1.fastq', expected2='paired-m27.2.fastq',
cores=cores
)
def test_paired_end(cores):
"""single-pass paired-end with -m"""
run_paired(
'-a TTAGACATAT -A CAGTGGAGTA -m 14',
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired.1.fastq', expected2='paired.2.fastq',
cores=cores
)
def test_paired_anchored_back_no_indels():
run_paired(
'-a BACKADAPTER$ -A BACKADAPTER$ -N --no-indels',
in1='anchored-back.fasta', in2='anchored-back.fasta',
expected1='anchored-back.fasta', expected2="anchored-back.fasta",
cores=1
)
def test_paired_end_qualtrim(cores):
"""single-pass paired-end with -q and -m"""
run_paired(
'-q 20 -a TTAGACATAT -A CAGTGGAGTA -m 14 -M 90',
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='pairedq.1.fastq', expected2='pairedq.2.fastq',
cores=cores
)
def test_paired_end_qualtrim_swapped(cores):
"""single-pass paired-end with -q and -m, but files swapped"""
run_paired(
'-q 20 -a CAGTGGAGTA -A TTAGACATAT -m 14',
in1='paired.2.fastq', in2='paired.1.fastq',
expected1='pairedq.2.fastq', expected2='pairedq.1.fastq',
cores=cores
)
def test_paired_end_cut(cores):
run_paired(
'-u 3 -u -1 -U 4 -U -2',
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='pairedu.1.fastq', expected2='pairedu.2.fastq',
cores=cores
)
def test_paired_end_upper_a_only(cores):
run_paired(
'-A CAGTGGAGTA',
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired-onlyA.1.fastq', expected2='paired-onlyA.2.fastq',
cores=cores
)
def test_discard_untrimmed(cores):
# issue #146
# the first adapter is a sequence cut out from the first read
run_paired(
'-a CTCCAGCTTAGACATATC -A XXXXXXXX --discard-untrimmed',
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='empty.fastq', expected2='empty.fastq',
cores=cores
)
def test_discard_trimmed(cores):
run_paired(
'-A C -O 1 --discard-trimmed', # applies everywhere
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='empty.fastq', expected2='empty.fastq',
cores=cores
)
def test_interleaved_in_and_out(cores):
"""Single-pass interleaved paired-end with -q and -m"""
run_interleaved(
'-q 20 -a TTAGACATAT -A CAGTGGAGTA -m 14 -M 90',
inpath1='interleaved.fastq', expected1='interleaved.fastq',
cores=cores
)
def test_interleaved_in(cores):
"""Interleaved input, two files output"""
run_interleaved(
'-q 20 -a TTAGACATAT -A CAGTGGAGTA -m 14 -M 90',
inpath1='interleaved.fastq',
expected1='pairedq.1.fastq', expected2='pairedq.2.fastq',
cores=cores
)
def test_interleaved_out(cores):
"""Two files input, interleaved output"""
run_interleaved(
'-q 20 -a TTAGACATAT -A CAGTGGAGTA -m 14 -M 90',
inpath1='paired.1.fastq', inpath2='paired.2.fastq',
expected1='interleaved.fastq',
cores=cores
)
def test_interleaved_neither_nor():
"""Option --interleaved used, but pairs of files given for input and output"""
with temporary_path("temp-paired.1.fastq") as p1:
with temporary_path("temp-paired.2.fastq") as p2:
params = '-a XX --interleaved'.split()
with redirect_stderr():
params += ['-o', p1, '-p1', p2, 'paired.1.fastq', 'paired.2.fastq']
with pytest.raises(SystemExit):
main(params)
def test_pair_filter_both(cores):
run_paired(
'--pair-filter=both -a TTAGACATAT -A GGAGTA -m 14',
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired-filterboth.1.fastq', expected2='paired-filterboth.2.fastq',
cores=cores
)
def test_pair_filter_first(cores):
run_paired(
'--pair-filter=first -a TTAGACATAT -A GGAGTA -m 14',
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired-filterfirst.1.fastq', expected2='paired-filterfirst.2.fastq',
cores=cores
)
def test_too_short_paired_output():
with temporary_path("temp-too-short.1.fastq") as p1:
with temporary_path("temp-too-short.2.fastq") as p2:
run_paired(
'-a TTAGACATAT -A CAGTGGAGTA -m 14 --too-short-output '
'{0} --too-short-paired-output {1}'.format(p1, p2),
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired.1.fastq', expected2='paired.2.fastq',
cores=1
)
assert_files_equal(cutpath('paired-too-short.1.fastq'), p1)
assert_files_equal(cutpath('paired-too-short.2.fastq'), p2)
def test_too_long_output():
with temporary_path('temp-too-long.1.fastq') as p1:
with temporary_path('temp-too-long.2.fastq') as p2:
run_paired(
'-a TTAGACATAT -A CAGTGGAGTA -M 14 --too-long-output '
'{0} --too-long-paired-output {1}'.format(p1, p2),
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired-too-short.1.fastq', expected2='paired-too-short.2.fastq',
cores=1
)
assert_files_equal(cutpath('paired.1.fastq'), p1)
assert_files_equal(cutpath('paired.2.fastq'), p2)
def test_too_short_output_paired_option_missing():
with temporary_path('temp-too-short.1.fastq') as p1:
with pytest.raises(SystemExit):
run_paired(
'-a TTAGACATAT -A CAGTGGAGTA -m 14 --too-short-output '
'{0}'.format(p1),
in1='paired.1.fastq', in2='paired.2.fastq',
expected1='paired.1.fastq', expected2='paired.2.fastq',
cores=1
)
def test_nextseq_paired(cores):
run_paired('--nextseq-trim 22', in1='nextseq.fastq', in2='nextseq.fastq',
expected1='nextseq.fastq', expected2='nextseq.fastq',
cores=cores)
def test_paired_demultiplex():
tempdir = tempfile.mkdtemp(prefix='cutadapt-tests.')
multiout1 = os.path.join(tempdir, 'demultiplexed.{name}.1.fastq')
multiout2 = os.path.join(tempdir, 'demultiplexed.{name}.2.fastq')
params = [
'-a', 'first=AACATTAGACA', '-a', 'second=CATTAGACATATCGG',
'-A', 'ignored=CAGTGGAGTA', '-A', 'alsoignored=AATAACAGTGGAGTA',
'-o', multiout1, '-p', multiout2,
datapath('paired.1.fastq'), datapath('paired.2.fastq')]
assert main(params) is None
assert_files_equal(cutpath('demultiplexed.first.1.fastq'), multiout1.format(name='first'))
assert_files_equal(cutpath('demultiplexed.second.1.fastq'), multiout1.format(name='second'))
assert_files_equal(cutpath('demultiplexed.unknown.1.fastq'), multiout1.format(name='unknown'))
assert_files_equal(cutpath('demultiplexed.first.2.fastq'), multiout2.format(name='first'))
assert_files_equal(cutpath('demultiplexed.second.2.fastq'), multiout2.format(name='second'))
assert_files_equal(cutpath('demultiplexed.unknown.2.fastq'), multiout2.format(name='unknown'))
shutil.rmtree(tempdir)
@pytest.mark.parametrize('name_op,l1,l2,m', list(product(
(('m', lambda x, y: x >= y), ('M', lambda x, y: x <= y)),
range(1, 5),
range(1, 5),
[(2, 3), (2, None), (None, 3)]
)))
def test_separate_minmaxlength(tmpdir, name_op, l1, l2, m):
"""Separate minimum lengths for R1 and R2"""
m1, m2 = m
name, func = name_op
inpath = str(tmpdir.join('separate_minlength.fasta'))
expected = str(tmpdir.join('separate_minlength_expected.fasta'))
outpath = str(tmpdir.join('out.fasta'))
record = '>r{}:{}\n{}\n'.format(l1, l2, 'A' * l1)
record += '>r{}:{}\n{}'.format(l1, l2, 'A' * l2)
with open(inpath, 'w') as f:
print(record, file=f)
with open(expected, 'w') as f:
if (m1 is None or func(l1, m1)) and (m2 is None or func(l2, m2)):
print(record, file=f)
assert os.path.exists(inpath)
assert os.path.exists(expected)
if m1 is None:
m1 = ''
if m2 is None:
m2 = ''
main(['--interleaved', '-o', outpath, '-' + name, '{}:{}'.format(m1, m2), inpath])
assert_files_equal(expected, outpath)
def test_separate_minlength_single():
"""Using separate minlengths for single-end data"""
with pytest.raises(SystemExit):
main(['-m', '5:7', datapath('small.fastq')])
| [
"[email protected]"
]
| |
56e49ec8b756e2762d4f46ee992731ee54be86f1 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_116/379.py | 45af30e36a16a8b8f0a6a9536d9e5d1ddb753e2b | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,246 | py | def hasLine(squares, symbol):
for i in range(4):
if squares[i][0] == squares[i][1] == squares[i][2] == squares[i][3] == symbol:
return True
for i in range(4):
if squares[0][i] == squares[1][i] == squares[2][i] == squares[3][i] == symbol:
return True
if squares[0][0] == squares[1][1] == squares[2][2] == squares[3][3] == symbol:
return True
if squares[0][3] == squares[1][2] == squares[2][1] == squares[3][0] == symbol:
return True
return False
def hasEmpty(squares):
for i in range(4):
for j in range(4):
if squares[i][j] == '.':
return True
return False
file = open("A-large.in")
n = int(file.readline())
for case in range(n):
squares = [list(file.readline()) for i in range(4)]
file.readline()
print("Case #{:d}:".format(case+1)),
Tpos = None
for i in range(4):
if 'T' in squares[i]:
index = squares[i].index('T')
Tpos = (i, index)
break
if Tpos != None:
squares[Tpos[0]][Tpos[1]] = 'X'
if hasLine(squares, 'X'):
print("X won")
else:
if Tpos != None:
squares[Tpos[0]][Tpos[1]] = 'O'
if hasLine(squares, 'O'):
print("O won")
else:
if hasEmpty(squares):
print("Game has not completed")
else:
print("Draw")
file.close() | [
"[email protected]"
]
| |
fd7326df55080e803d9ef5dcf9ef75f5bfd70c6c | 2c872fedcdc12c89742d10c2f1c821eed0470726 | /pbase/day12/code/text2.py | 416a82954f3664da8fa1e1eb23e2cb329b0f8028 | []
| no_license | zuigehulu/AID1811 | 581c3c7a37df9fa928bc632e4891fc9bafe69201 | 10cab0869875290646a9e5d815ff159d0116990e | refs/heads/master | 2020-04-19T16:33:04.174841 | 2019-01-30T07:58:24 | 2019-01-30T07:58:24 | 168,307,918 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | # 编写函数fun 基功能是计算下列多项式的和
# Sn = 1 + 1/1! + 1/2! + 1/3! + .... + 1/n!
# (建议用数学模块中的factorial)
# 求当n得20时 Sn的值
# 即:
# print(fun(20)) # 2.718281828...
import math
# def sumfun(n):
# Sn = 1
# for x in range(1,n+1):
# Sn += 1/math.factorial(x)
# return Sn
# print(sumfun(20))
def sumfun(n):
s = sum(map(lambda x :1/math.factorial(x),range(n+1)))
print(s)
sumfun(20) | [
"[email protected]"
]
| |
e1797abbb517a5b0d9e49e93536eb28f286dff74 | a214e706c875e0af7221c0c9ae193d9d93ee20a7 | /merge_pedmap.py | a521b00da550343f20052204c786390bad354afb | []
| no_license | inambioinfo/bioinformatics_scripts | fa2292e91ad4134204a09ace27c8a91ae70fa34c | 3a23611f382b7f3dd60e5e2abe841b84408c0d44 | refs/heads/master | 2020-03-20T21:17:10.163061 | 2017-03-28T23:41:39 | 2017-03-28T23:41:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,809 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from optparse import OptionParser
import os
__author__ = "Raony Guimarães"
__copyright__ = "Copyright 2012, Filter Analysis"
__credits__ = ["Raony Guimarães"]
__license__ = "GPL"
__version__ = "1.0.1"
__maintainer__ = "Raony Guimarães"
__email__ = "[email protected]"
__status__ = "Production"
#run example
#python gatk.py -i alignment/exome.sorted.bam
parser = OptionParser()
parser.add_option("-p", dest="p1",
help="PED File", metavar="pedfile")
parser.add_option("-q", dest="p2",
help="PED File", metavar="pedfile")
parser.add_option("-o", dest="outfile",
help="PED File", metavar="pedfile")
(options, args) = parser.parse_args()
f1 = ".".join(options.p1.split("/")[-1].split(".")[:-1])
f1 = options.p1.replace('.ped','')
f2 = ".".join(options.p2.split("/")[-1].split(".")[:-1])
f2 = options.p2.replace('.ped','')
outfile = options.outfile
plink_dir = '/projects/relatedness/plink-1.07-x86_64'
#first identify the ones to remove
command = '%s/plink --file %s --merge %s.ped %s.map --recode --out %s --noweb --geno 0' % (plink_dir, f1, f2, f2, outfile)
os.system(command)
#commando remove snps
command = 'mv %s.missnp removesnps' % (outfile)
os.system(command)
print 'remove snps in file one'
command = '%s/plink --file %s --recode --out %s.snpsless --noweb --exclude removesnps' % (plink_dir, f1, f1)
os.system(command)
print 'remove snps in file two'
command = '%s/plink --file %s --recode --out %s.snpsless --noweb --exclude removesnps' % (plink_dir, f2, f2)
os.system(command)
print 'finally merge'
command = '%s/plink --file %s.snpsless --merge %s.snpsless.ped %s.snpsless.map --recode --out %s --noweb --geno 0' % (plink_dir, f1, f2, f2, options.outfile)
os.system(command)
| [
"[email protected]"
]
| |
dd713f3a180a0c82f82e9b6a9e9358a8c8649ab4 | f4d78406cda8cb7e8232873dfd4d735763a36f07 | /result/migrations/0004_auto_20170331_2017.py | 5ac9fa86e89e90bb810b3663b4339951b7cc5e5d | []
| no_license | pratulyab/ipu-results-bot | 722b646a098f95e21bb12a47bcaff69d7e8a034a | aa000f28cad79ad49d14547203877247fae7327d | refs/heads/master | 2022-07-13T18:03:39.107959 | 2019-06-24T20:45:56 | 2019-06-24T20:45:56 | 193,555,061 | 0 | 0 | null | 2022-06-21T22:12:19 | 2019-06-24T18:05:45 | Python | UTF-8 | Python | false | false | 550 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-03-31 14:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('result', '0003_auto_20170331_1806'),
]
operations = [
migrations.RemoveField(
model_name='score',
name='verdict',
),
migrations.AddField(
model_name='score',
name='passed',
field=models.BooleanField(default=True),
),
]
| [
"[email protected]"
]
| |
0b3788d4fbdbbf609b1d07cec5135630f51a7c4b | ed7b5c24d9a13d0c717fd6f6293f3464f43d7cbf | /demo/sjh_web/demo55.py | 0dd3351eafd4b929e7b8c9d051f64ed3d14dee2a | []
| no_license | befallenStar/python | ccb93d456dc161a8087a78220a7aaab21320ab8b | e44ce8c11b820f03fe2d60dfa84053d8cc356c80 | refs/heads/master | 2022-12-07T18:34:03.091146 | 2020-08-20T02:33:56 | 2020-08-20T02:33:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,659 | py | # -*- encoding: utf-8 -*-
import urllib3
pcUserAgent = {
'IE-agent': 'Mozilla/5.0 (compatible; MSIE 9.0; Windwos NT 6.1; Trident/5.0;',
'firefox-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0',
'chrome-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36',
}
mobileUserAgent = {
'Touch capable Windows 8 device': 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Trident/6.0; Touch)',
'Kindle Fire': 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-us; Silk/1.1.0-80) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16 Silk-Accelerated=true',
'iPad': 'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10',
'Samsung Galaxy S3': 'Mozilla/5.0 (Linux; U; Android 4.0.4; en-gb; GT-I9300 Build/IMM76D) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30',
'BlackBerry': 'BlackBerry9700/5.0.0.862 Profile/MIDP-2.1 Configuration/CLDC-1.1 VendorID/331 UNTRUSTED/1.0 3gpp-gba',
'iPhone': 'Mozilla/5.0 (iPhone; CPU iPhone OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B179 Safari/7534.48.3',
'UC standard': 'NOKIA5700/ UCWEB7.0.2.37/28/999'
}
http = urllib3.PoolManager()
r = http.request('GET', 'http://www.baidu.com/s', {'wd': 'hello'},
pcUserAgent['ff-agent']) # 伪造头部信息欺骗服务器
print(r) # <urllib3.response.HTTPResponse object at 0x000002A0FB49EE88>
print(r.status) # 200
print(r.data.decode('utf-8'))
| [
"[email protected]"
]
| |
572b84a3f569162ee860e6f7b20ac524c04a19b9 | 6ab31b5f3a5f26d4d534abc4b197fe469a68e8e5 | /katas/kyu_7/linked_lists_get_nth_node.py | 1f2b39f86f418fb40df8cc42b845bc21a735c961 | [
"MIT"
]
| permissive | mveselov/CodeWars | e4259194bfa018299906f42cd02b8ef4e5ab6caa | 1eafd1247d60955a5dfb63e4882e8ce86019f43a | refs/heads/master | 2021-06-09T04:17:10.053324 | 2017-01-08T06:36:17 | 2017-01-08T06:36:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 311 | py | class Node(object):
def __init__(self, data):
self.data = data
self.next = None
def get_nth(node, index):
current = node
dex = -1
while current is not None:
dex += 1
if dex == index:
return current
current = current.next
raise Exception
| [
"[email protected]"
]
| |
d47b760098656ec22905595db57af143f04c9a99 | b5cf99c4ed0ff18e351394ae85a91068a74dcc16 | /libdemo/bs_demo.py | 445ff43c30b45cdfd2a3a0e39920958a494e0f96 | []
| no_license | srikanthpragada/DEMO_PYTHON_19_NOV_2019 | 8966d218af8531c8e77accf7e2740094e2c1902f | ac50fdbb7de94d671e0ab5274d6aadd133b70899 | refs/heads/master | 2020-09-14T08:19:55.453868 | 2019-12-23T03:00:07 | 2019-12-23T03:00:07 | 223,076,035 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | from bs4 import BeautifulSoup
st = "<html><body><h1>Title1</h1><h2>Title2.1</h2><h2>Title2.2</h2></body></html>"
bs = BeautifulSoup(st, 'html.parser')
for tag in bs.find_all("h2"):
print(tag.text)
| [
"[email protected]"
]
| |
5a991fed1d4a0e7596274c8eb7335d9e09592e6a | 8f5f0c3ef83fdd482387973149738f6178477a42 | /medium/trees/next_right_pointer.py | e1eb4ce5802ddc9adc9779869feb56faa06352f2 | []
| no_license | nicokuzak/leetcode | 79a5771ad83786cc7dbfd790f8fffcf1ce58794e | 39b0235dc429a97a7cba0689d44641a6af6d7a32 | refs/heads/main | 2023-04-06T21:02:09.553185 | 2021-04-14T22:21:20 | 2021-04-14T22:21:20 | 336,847,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,414 | py | """You are given a perfect binary tree where all leaves are on the same level, and every parent has two children. The binary tree has the following definition:
struct Node {
int val;
Node *left;
Node *right;
Node *next;
}
Populate each next pointer to point to its next right node. If there is no next right node, the next pointer should be set to NULL.
Initially, all next pointers are set to NULL.
Follow up:
You may only use constant extra space.
Recursive approach is fine, you may assume implicit stack space does not count as extra space for this problem.
Example 1:
Input: root = [1,2,3,4,5,6,7]
Output: [1,#,2,3,#,4,5,6,7,#]
Explanation: Given the above perfect binary tree (Figure A), your function should populate each next pointer to point to its next right node, just like in Figure B. The serialized output is in level order as connected by the next pointers, with '#' signifying the end of each level.
"""
class Solution:
def connect(self, root: 'Node') -> 'Node':
if root is None or root.left is None:
return root
root.left.next = root.right #Child left -> Child Right
if root.next: #If it is a left node that has something to the right
root.right.next = root.next.left #Child right next is parent right's left
self.connect(root.left)
self.connect(root.right)
return root | [
"[email protected]"
]
| |
5b21a4c2067e74e7ff233876453a7bbb84d6ebc6 | 3bc4b502fdb5ffecdbecc9239a0c25746dc31022 | /Ch06/p157.py | 9df9fb2464141935daf597c1cf1f74a731857083 | []
| no_license | pkc-3/python | 68da873bbe7ad9a3e0db4e22ddaa412a9377720f | d8410d897c3784c6017f7edc215ce8763e557518 | refs/heads/master | 2023-05-31T06:40:30.279748 | 2021-06-10T09:00:09 | 2021-06-10T09:00:09 | 361,634,592 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | #self 명령어 예
class multiply3:
#멤버 변수 없음
#생성자 없음
#동적 멤버변수 생성/초기화
def data(self,x,y):
self.x = x
self.y = y
#곱셈 연산
def mul(self):
result = self.x * self.y
self.display(result) #메서드 호출
#결과 출력
def display(self, result):
print("곱셈 = %d" % (result))
obj = multiply3() #기본 생성자
obj.data(10, 20)
obj.mul() | [
"[email protected]"
]
| |
1ad1cdf4c211d1ad2cfc0e6db523776b6a91d5d7 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog_tags/initial_859.py | a64a24fccf2efc8b865aa813310e625203f34f62 | []
| no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,330 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog1_Anch" not in marker_sets:
s=new_marker_set('Cog1_Anch')
marker_sets["Cog1_Anch"]=s
s= marker_sets["Cog1_Anch"]
mark=s.place_marker((740, 588, 378), (0, 0, 1), 21.9005)
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((934, 253, 192), (1, 0.5, 0), 21.9005)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((18, 558, 379), (1, 0.5, 0), 21.9005)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((553, 818, 131), (1, 0.5, 0), 21.9005)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((756, 296, 36), (1, 0.87, 0), 21.9005)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((816, 91, 319), (1, 0.87, 0), 21.9005)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((649, 924, 860), (1, 0.87, 0), 21.9005)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((341, 421, 253), (0.97, 0.51, 0.75), 21.9005)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((623, 816, 736), (0.97, 0.51, 0.75), 21.9005)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((88, 643, 970), (0.97, 0.51, 0.75), 21.9005)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((302, 317, 967), (0.39, 0.31, 0.14), 21.9005)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((635, 925, 161), (0.39, 0.31, 0.14), 21.9005)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((490, 53, 130), (0.39, 0.31, 0.14), 21.9005)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((933, 833, 769), (0.6, 0.31, 0.64), 21.9005)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((362, 701, 371), (0.6, 0.31, 0.64), 21.9005)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((190, 600, 839), (0.6, 0.31, 0.64), 21.9005)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((310, 511, 365), (0.89, 0.1, 0.1), 21.9005)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((295, 883, 14), (0.89, 0.1, 0.1), 21.9005)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((639, 840, 123), (0.89, 0.1, 0.1), 21.9005)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((761, 18, 329), (0.3, 0.69, 0.29), 21.9005)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((107, 498, 442), (0.3, 0.69, 0.29), 21.9005)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
]
| |
d7f63dcc0bc4be0be92e1b193db9abad6b55f611 | c9500ad778b8521aaa85cb7fe3239989efaa4799 | /plugins/microsoft_intune/icon_microsoft_intune/actions/get_managed_apps/schema.py | 950f7ebf08d7248b32cb5c69cb6007c0c35c5b04 | [
"MIT"
]
| permissive | rapid7/insightconnect-plugins | 5a6465e720f114d71b1a82fe14e42e94db104a0b | 718d15ca36c57231bb89df0aebc53d0210db400c | refs/heads/master | 2023-09-01T09:21:27.143980 | 2023-08-31T10:25:36 | 2023-08-31T10:25:36 | 190,435,635 | 61 | 60 | MIT | 2023-09-14T08:47:37 | 2019-06-05T17:05:12 | Python | UTF-8 | Python | false | false | 4,712 | py | # GENERATED BY KOMAND SDK - DO NOT EDIT
import insightconnect_plugin_runtime
import json
class Component:
DESCRIPTION = "Returns InTune manageable apps"
class Input:
APP = "app"
class Output:
MANAGED_APPS = "managed_apps"
class GetManagedAppsInput(insightconnect_plugin_runtime.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"app": {
"type": "string",
"title": "App",
"description": "Application ID or name, if empty returns all applications",
"order": 1
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class GetManagedAppsOutput(insightconnect_plugin_runtime.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"managed_apps": {
"type": "array",
"title": "Managed Apps",
"description": "Application details",
"items": {
"$ref": "#/definitions/value"
},
"order": 1
}
},
"definitions": {
"value": {
"type": "object",
"title": "value",
"properties": {
"@odata.context": {
"type": "string",
"title": "Odata Context",
"description": "Odata context",
"order": 2
},
"@odata.type": {
"type": "string",
"title": "Odata Type",
"description": "Odata type",
"order": 1
},
"appAvailability": {
"type": "string",
"title": "App Availability",
"description": "App availability",
"order": 17
},
"appStoreUrl": {
"type": "string",
"title": "App Store URL",
"description": "App store URL",
"order": 20
},
"createdDateTime": {
"type": "string",
"title": "Created Datetime",
"description": "Created datetime",
"order": 8
},
"description": {
"type": "string",
"title": "Description",
"description": "Description",
"order": 5
},
"developer": {
"type": "string",
"title": "Developer",
"description": "Developer",
"order": 14
},
"displayName": {
"type": "string",
"title": "Display Name",
"description": "Display Name",
"order": 4
},
"id": {
"type": "string",
"title": "ID",
"description": "ID",
"order": 3
},
"informationUrl": {
"type": "string",
"title": "Information URL",
"description": "Information URL",
"order": 12
},
"isFeatured": {
"type": "boolean",
"title": "Is Featured",
"description": "Is featured",
"order": 10
},
"largeIcon": {
"type": "object",
"title": "Large Icon",
"description": "Large icon",
"order": 7
},
"lastModifiedDateTime": {
"type": "string",
"title": "Last Modified Datetime",
"description": "Last modified datetime",
"order": 9
},
"minimumSupportedOperatingSystem": {
"type": "object",
"title": "Minimum Supported Operating System",
"description": "Minimum supported operating system",
"order": 21
},
"notes": {
"type": "string",
"title": "Notes",
"description": "Notes",
"order": 15
},
"owner": {
"type": "string",
"title": "Owner",
"description": "Owner",
"order": 13
},
"packageId": {
"type": "string",
"title": "Package ID",
"description": "Package ID",
"order": 19
},
"privacyInformationUrl": {
"type": "string",
"title": "Privacy Information URL",
"description": "Privacy information URL",
"order": 11
},
"publisher": {
"type": "string",
"title": "Publisher",
"description": "Publisher",
"order": 6
},
"publishingState": {
"type": "string",
"title": "Publishing State",
"description": "Publishing state",
"order": 16
},
"version": {
"type": "string",
"title": "Version",
"description": "Version",
"order": 18
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| [
"[email protected]"
]
| |
d09cbbe00b827f394ca2273cd1219aa9bad0fd43 | 9c7091f82a5108261cbc3e5209f0e6df42f55530 | /node/src/fuzzers/peach_fuzzbang.py | 3a1d9883ba450b54c00eee87fd997b6a106f6edc | [
"MIT"
]
| permissive | hatRiot/PeachOrchard | 881b24bdf8ceb5c1e23c989fdb612f8b70dfd192 | cd11ab0ccbcce2349408d5c2e4b651eb99a4e9c1 | refs/heads/master | 2021-06-18T03:27:03.835834 | 2019-09-23T19:24:02 | 2019-09-23T19:24:02 | 23,305,215 | 46 | 26 | MIT | 2021-06-10T19:48:39 | 2014-08-25T07:12:20 | Python | UTF-8 | Python | false | false | 3,101 | py | from src.core.log import *
from src.core import config
from src.core import utility
from src.core.fuzzer import Fuzzer
from re import findall
import os
class peach_fuzzbang(Fuzzer):
""" Class implements the interface for the Peach fuzzer. This has
been tested with FuzzBang as well as regular ol' Peach.
"""
def __init__(self):
self.name = "Peach FuzzBang"
def fetch_crashes(self):
"""
"""
base = config.MONITOR_DIR + '/' + config.SESSION
crashes = {}
# build a list of files from session root
pot_files = []
for (root, subFolders, files) in os.walk(base):
for file in files:
f = os.path.join(root, file)
pot_files.append(f.replace('\\', '/'))
# massage these into our crashes dictionary
for entry in pot_files:
if '_description.txt' in entry:
# found description entry, parse it
e = entry.rsplit('/', 2)
crashes[e[1]] = entry
return crashes
def get_status(self):
""" Parse the status file and pull the latest iteration update
"""
try:
data = None
spath = config.MONITOR_DIR + '/' + config.SESSION + '/' + 'status.txt'
with open(spath) as f:
data = f.read().split('\n')
# chop it up
status = None
data = [x for x in data if len(x) > 0]
if 'Test finished' in data[:-1]:
status = 'Completed'
else:
(cidx, total) = findall("Iteration (.*?) of (.*?) :", data[-1])[0]
status = '%s/%s' % (cidx, total)
except Exception, e:
utility.msg("Failed to parse status update: %s" % e, ERROR)
status = "Error"
return status
def check_session(self):
"""
"""
valid = False
try:
if config.MONITOR_DIR and os.path.isdir(config.MONITOR_DIR):
if config.SESSION:
# validate session
if config.SESSION not in os.listdir(config.MONITOR_DIR):
utility.msg("Session %s not found in %s" % (config.SESSION, config.MONITOR_DIR))
else:
valid = True
else:
# fetch latest version
tmp = os.listdir(config.MONITOR_DIR)
if len(tmp) <= 0:
utility.msg("No running sessions found", ERROR)
valid = False
else:
config.SESSION = tmp[-1]
utility.msg("Setting session to %s" % config.SESSION, LOG)
valid = True
else:
utility.msg("Directory '%s' not found" % config.MONITOR_DIR, ERROR)
valid = False
except Exception, e:
utility.msg("Error checking session: %s" % e, ERROR)
valid = False
return valid
| [
"[email protected]"
]
| |
9dd49b3cf82fa3b52f4bc3b9c1514bcf1c23dca0 | 63ba933a294865f65409635f62e0f1d59f725f37 | /src/trees/isBalanced.py | 7ecb0495d36c1aecf3938a94d2007c4730bf1f19 | [
"CC0-1.0"
]
| permissive | way2arun/datastructures_algorithms | fc4302bdbb923ef8912a4acf75a286f2b695de2a | 4ea4c1579c28308455be4dfa02bd45ebd88b2d0a | refs/heads/master | 2021-12-07T04:34:35.732026 | 2021-09-30T12:11:32 | 2021-09-30T12:11:32 | 203,658,808 | 1 | 0 | null | 2020-08-08T15:55:09 | 2019-08-21T20:23:46 | Python | UTF-8 | Python | false | false | 1,995 | py | """
Balanced Binary Tree
Given a binary tree, determine if it is height-balanced.
For this problem, a height-balanced binary tree is defined as:
a binary tree in which the left and right subtrees of every node differ in height by no more than 1.
Example 1:
Input: root = [3,9,20,null,null,15,7]
Output: true
Example 2:
Input: root = [1,2,2,3,3,null,null,4,4]
Output: false
Example 3:
Input: root = []
Output: true
Constraints:
The number of nodes in the tree is in the range [0, 5000].
-104 <= Node.val <= 104
"""
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def isBalanced(self, root: TreeNode) -> bool:
# Solution 1 - 48 ms
#return self.dfs(root)[1]
# Solution 2 - 28 ms
h, is_b = self.helper(root)
return is_b
def helper(self, root):
if root is None:
return 0, True
hl, lb = self.helper(root.left)
hr, rb = self.helper(root.right)
if lb and rb and abs(hl - hr) <= 1:
return max(hl, hr) + 1, True
else:
return -1, False
def dfs(self, root): # return (depth, isBalance)
if root is None:
return 0, True
leftH, leftB = self.dfs(root.left) # left height, left balance
rightH, rightB = self.dfs(root.right) # right height, right balance
return max(leftH, rightH) + 1, abs(leftH - rightH) <= 1 and leftB and rightB
# Main Call
root = TreeNode(3)
root.left = TreeNode(9)
root.right = TreeNode(20)
root.right.left = TreeNode(15)
root.right.right = TreeNode(7)
solution = Solution()
print(solution.isBalanced(root))
root = TreeNode(1)
root.right = TreeNode(2)
root.left = TreeNode(2)
root.left.right = TreeNode(3)
root.left.left = TreeNode(3)
root.left.left.right = TreeNode(4)
root.left.left.left = TreeNode(4)
print(solution.isBalanced(root))
| [
"[email protected]"
]
| |
64728e5c76187cf4177e6d19c48c73b797430c05 | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /perso_arabic_norm/describe_splits.py | 70788b7f48dbdc399d1fcc680fe3b99a08017009 | [
"CC-BY-4.0",
"Apache-2.0"
]
| permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 2,067 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Collects basic stats for training and test splits from the results file.
Example:
--------
LANGUAGE=...
cat data/ngrams/results/reading/00/baselines/${LANGUAGE}.*.tsv > /tmp/${LANGUAGE}.tsv
python describe_splits.py \
--results_tsv_file /tmp/${LANGUAGE}.tsv
Dependencies:
-------------
absl
pandas
statsmodels
"""
from typing import Sequence
import logging
from absl import app
from absl import flags
import pandas as pd
import statsmodels.stats.api as sms
flags.DEFINE_string(
"results_tsv_file", "",
"Results text file in tab-separated (tsv) format.")
FLAGS = flags.FLAGS
def _to_str(stats):
"""Retrieves basic stats from the object."""
return f"mean: {stats.mean} var: {stats.var} std: {stats.std}"
def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
if not FLAGS.results_tsv_file:
raise app.UsageError("Specify --results_tsv_file [FILE]!")
logging.info(f"Reading metrics from {FLAGS.results_tsv_file} ...")
df = pd.read_csv(FLAGS.results_tsv_file, sep="\t", header=None)
logging.info(f"Read {df.shape[0]} samples")
num_train_toks = list(df[0]) # Token can be char or word.
train_stats = sms.DescrStatsW(num_train_toks)
logging.info(f"Train stats: {_to_str(train_stats)}")
num_test_toks = list(df[1])
test_stats = sms.DescrStatsW(num_test_toks)
logging.info(f"Test stats: {_to_str(test_stats)}")
if __name__ == "__main__":
app.run(main)
| [
"[email protected]"
]
| |
ad55a036719eab54161bb16e9344fa465842a9b0 | 003ffcf8144565404636f3d74590a8d6b10a90a4 | /620-not-boring-movies/620-not-boring-movies.py | 649086294562ebc1cd5148e624db643e5a39e3ab | []
| no_license | congve1/leetcode | fb31edf93049e21210d73f7b3e7b9b82057e1d7a | ce1e802b5052da2cdb919d6d7e39eed860e0b61b | refs/heads/master | 2020-05-13T19:19:58.835432 | 2019-05-06T00:44:07 | 2019-05-06T00:44:07 | 181,652,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | # Write your MySQL query statement below
select id,movie,description,rating
from cinema
where description != 'boring' and id%2 = 1
order by rating DESC
| [
"[email protected]"
]
| |
6c4072c302692caf0fc1eefcb3cc828a7315e998 | e56214188faae8ebfb36a463e34fc8324935b3c2 | /intersight/models/hyperflex_server_model_all_of.py | 00b213f2b14b684bad926afd34710dd805f3dc75 | [
"Apache-2.0"
]
| permissive | CiscoUcs/intersight-python | 866d6c63e0cb8c33440771efd93541d679bb1ecc | a92fccb1c8df4332ba1f05a0e784efbb4f2efdc4 | refs/heads/master | 2021-11-07T12:54:41.888973 | 2021-10-25T16:15:50 | 2021-10-25T16:15:50 | 115,440,875 | 25 | 18 | Apache-2.0 | 2020-03-02T16:19:49 | 2017-12-26T17:14:03 | Python | UTF-8 | Python | false | false | 5,600 | py | # coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from intersight.configuration import Configuration
class HyperflexServerModelAllOf(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'server_model_entries': 'list[HyperflexServerModelEntry]',
'app_catalog': 'HyperflexAppCatalog'
}
attribute_map = {
'server_model_entries': 'ServerModelEntries',
'app_catalog': 'AppCatalog'
}
def __init__(self,
server_model_entries=None,
app_catalog=None,
local_vars_configuration=None): # noqa: E501
"""HyperflexServerModelAllOf - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._server_model_entries = None
self._app_catalog = None
self.discriminator = None
if server_model_entries is not None:
self.server_model_entries = server_model_entries
if app_catalog is not None:
self.app_catalog = app_catalog
@property
def server_model_entries(self):
"""Gets the server_model_entries of this HyperflexServerModelAllOf. # noqa: E501
:return: The server_model_entries of this HyperflexServerModelAllOf. # noqa: E501
:rtype: list[HyperflexServerModelEntry]
"""
return self._server_model_entries
@server_model_entries.setter
def server_model_entries(self, server_model_entries):
"""Sets the server_model_entries of this HyperflexServerModelAllOf.
:param server_model_entries: The server_model_entries of this HyperflexServerModelAllOf. # noqa: E501
:type: list[HyperflexServerModelEntry]
"""
self._server_model_entries = server_model_entries
@property
def app_catalog(self):
"""Gets the app_catalog of this HyperflexServerModelAllOf. # noqa: E501
:return: The app_catalog of this HyperflexServerModelAllOf. # noqa: E501
:rtype: HyperflexAppCatalog
"""
return self._app_catalog
@app_catalog.setter
def app_catalog(self, app_catalog):
"""Sets the app_catalog of this HyperflexServerModelAllOf.
:param app_catalog: The app_catalog of this HyperflexServerModelAllOf. # noqa: E501
:type: HyperflexAppCatalog
"""
self._app_catalog = app_catalog
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict()
if hasattr(x, "to_dict") else x, value))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, HyperflexServerModelAllOf):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, HyperflexServerModelAllOf):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
]
| |
656400a9a3c0238586b3bc67900a8c9c266c3cfb | 5891051796778cfb44a255248ce38789bfef9e70 | /DjangoLearn/bgfaith/urls.py | 2fae220197d9f146c5fbb61d9e5154182b10d282 | []
| no_license | Faithlmy/Python_base | cc546a5d86b123e102a69df1227cde9b6e567493 | 5a43557e6375dc9dbe5f6701d7c10e549873a5ab | refs/heads/master | 2021-01-01T17:07:04.097978 | 2018-03-31T16:44:01 | 2018-03-31T16:44:01 | 98,000,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 856 | py | """bgfaith URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
# from bgapp.views import *
urlpatterns = [
# url(r'^admin/', include(admin.site.urls)),
url('^bgapp/', include('bgapp.urls', namespace='', app_name=''))
]
| [
"[email protected]"
]
| |
e5b83f893b2e670a76c3e80afe4f2b7a7c9ecff8 | 3637fe729395dac153f7abc3024dcc69e17f4e81 | /reference/ucmdb/discovery/nnmi_api.py | 5f1ad3f66d69e9c7f50fca6f28d7628e45ce3907 | []
| no_license | madmonkyang/cda-record | daced6846c2456f20dddce7f9720602d1583a02a | c431e809e8d0f82e1bca7e3429dd0245560b5680 | refs/heads/master | 2023-06-15T08:16:46.230569 | 2021-07-15T16:27:36 | 2021-07-15T16:27:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 59,922 | py | #!/usr/bin/env python
# coding: utf8
import re
import logger
import types
import time
import os
import ip_addr
import itertools
import nnmi_filters
import java.net
from java.lang import System, String
from com.hp.ucmdb.discovery.library.clients.recorder import ExecutionRecorderManager
import com.hp.ov.nms.sdk
from javax.xml.ws import BindingProvider
from javax.xml.ws import WebServiceException
from com.hp.ucmdb.discovery.clients.nnm import SoapHeadersHandler
from com.hp.ucmdb.discovery.clients.nnm import SoapHandlerResolver
from java.io import FileOutputStream, FileInputStream
from com.esotericsoftware.kryo import Kryo
from com.esotericsoftware.kryo.io import Output, Input
FF = nnmi_filters.get_jaxws_filter_factory()
# Default page sizes. To tune actual page sizes, refer to NmsAPI constructor
DEFAULT_PAGESIZE_NODE = 500
DEFAULT_PAGESIZE_L2CONNECTION = 200
DEFAULT_PAGESIZE_VLAN = 50
DEFAULT_CONDITIONS_IN_FILTER = 100
_DEFAULT_RELATED_TOPOLOGY_PAGESIZE = 1000
DEFAULT_PAGESIZE_INTERFACE = _DEFAULT_RELATED_TOPOLOGY_PAGESIZE
DEFAULT_PAGESIZE_IPADDRESS = _DEFAULT_RELATED_TOPOLOGY_PAGESIZE
DEFAULT_PAGESIZE_IPSUBNET = _DEFAULT_RELATED_TOPOLOGY_PAGESIZE
DEFAULT_PAGESIZE_PORT = _DEFAULT_RELATED_TOPOLOGY_PAGESIZE
DEFAULT_PAGESIZE_CARD = _DEFAULT_RELATED_TOPOLOGY_PAGESIZE
NO_PAGE_SIZE = -1
FETCH_DELAY = 0
FETCH_RETRY_COUNT = 3
FETCH_RETRY_DELAY = 20
RECORD_FOLDER_PATH = ExecutionRecorderManager.RECORD_FOLDER_PATH
class StoreConfig:
def __init__(self, read, write, fallback_to_live):
self._read = read
self._write = write
self._fallback_to_live = fallback_to_live
def read(self):
return self._read
def write(self):
return self._write
def fallback_to_live(self):
return self._fallback_to_live
_STORE_CONFIG = None#StoreConfig(True, True, True)
_STORE_NAMESPACE = 'default'
def not_empty(x):
return not((x is None) or (x == ''))
class NmsServices:
Node = 'Node'
Interface = 'Interface'
IPAddress = 'IPAddress'
IPSubnet = 'IPSubnet'
L2Connection = 'L2Connection'
L2Node = 'L2Node'
VLAN = 'VLAN'
Port = 'Port'
Card = 'Card'
class RestorableItem:
def __init__(self, cmdbId, id_):
if not cmdbId:
raise ValueError('Invalid cmdbId')
if not id_:
raise ValueError('Invalid id_')
self.id = id_
self.cmdbId = cmdbId
def _restore_items(fetcher, id_map, ids_to_restore):
r'@types: BaseNmsFetcher, dict[str, str], set[str] -> list[BaseNmsEntity]'
cls = fetcher.collection_class.item_class
restorable_items = []
for id_ in ids_to_restore:
item = cls(RestorableItem(id_map.get(id_), id_), fetcher)
restorable_items.append(item)
return restorable_items
def is_restorable(entity):
return hasattr(entity, 'cmdbId')
def is_not_restorable(entity):
return not is_restorable(entity)
class _HasOsh:
''' Mixin which holds generated OSH object '''
def __init__(self):
self._osh = None
def get_osh(self):
return self._osh
def set_osh(self, osh):
if osh is None:
raise ValueError("osh is None")
self._osh = osh
def has_osh(entity):
''' entity with _HasOsh mixin -> boolean '''
return entity is not None and entity.get_osh() is not None
def to_osh(entity):
''' entity with _HasOsh mixin -> OSH '''
return entity.get_osh()
last_action = System.currentTimeMillis()
def ensure_delay(delay=0):
def decorator_fn(real_fn):
def wrapper(*args, **kwargs):
global last_action
current_time = System.currentTimeMillis()
difference = (int)((current_time - last_action) / 1000)
if difference < delay:
sleep_time = delay-difference
logger.debug("Delaying by %s seconds" % sleep_time)
time.sleep(sleep_time)
last_action = System.currentTimeMillis()
return real_fn(*args, **kwargs)
return wrapper
return decorator_fn
def retry_on(exceptions, times, with_delay=0, rethrow_exception=True, reload=False):
if not exceptions: raise ValueError("exceptions are not specified")
if not times: raise ValueError("times is not specified")
def decorator_fn(real_fn):
def wrapper(*args, **kwargs):
if reload:
times = FETCH_RETRY_COUNT
with_delay = FETCH_RETRY_DELAY
local_retries = times
while local_retries >= 0:
try:
return real_fn(*args, **kwargs)
except exceptions, ex:
local_retries -= 1
if local_retries >= 0:
logger.debug("(%s) Retrying call after exception %r" % (local_retries, ex))
if with_delay > 0:
logger.debug("after delay of %s seconds" % with_delay)
time.sleep(with_delay)
else:
if rethrow_exception:
raise ex
else:
logger.debug('Ignore the exception finally:%s'%ex)
return wrapper
return decorator_fn
# assumptions\limitation: 1st arg is self, return value is countable
def log_self_calls():
def decorator_fn(real_fn):
def wrapper(*args, **kwargs):
logger.debug(" ---> %s.%s(%s, %s)" % (args[0].__class__.__name__, real_fn.__name__, args[1:], kwargs))
r = real_fn(*args, **kwargs)
if r is not None:
logger.debug(" <--- returning %s items" % len(r))
else:
logger.debug(" <--- returning None")
return r
return wrapper
return decorator_fn
class BaseNmsEntity(_HasOsh):
'''
Flag enables querying custom attributes. Each specific entity is expected to be modified to
support them and this flag to be set to True. Otherwise custom attributes are not requested
even if enabled globally.
'''
includes_custom_attrs = False
def __init__(self, item, fetcher):
self.fetcher = fetcher
self.id = None
if is_restorable(item):
self.cmdbId = item.cmdbId
self.id = item.id
_HasOsh.__init__(self)
def __repr__(self):
fields_repr = []
for field_name in self.field_names:
field_value = getattr(self, field_name)
field_value_repr = repr(field_value)
fields_repr.append('%s = %s' % (field_name, field_value_repr))
return '%s(%s)' % (self.__class__.__name__, ', '.join(fields_repr))
def __str__(self):
fields_str = []
for field_name in self.field_names:
field_value = getattr(self, field_name)
field_value_str = repr(field_value)
fields_str.append('%s=%s' % (field_name, field_value_str))
return '<%s %s at 0x%.8X>' % (self.__class__.__name__, ' '.join(fields_str), id(self))
class BaseManagementNmsEntity(BaseNmsEntity):
def __init__(self, item, fetcher):
BaseNmsEntity.__init__(self, item, fetcher)
def _get_management_mode(self, item):
management_mode = item.getManagementMode()
if management_mode:
return management_mode.value()
else:
return None
class NmsNodeEntity(BaseManagementNmsEntity):
DEV_PREFIX_LEN = len('com.hp.ov.nms.devices.')
LAN_SWITCH_CAPABILITY = 'com.hp.nnm.capability.node.lanswitching'
IP_FORWARDING_CAPABILITY = 'com.hp.nnm.capability.node.ipforwarding'
field_names = (
'id',
'name',
'is_lan_switch',
'is_router',
'system_name',
'system_contact',
'system_description',
'system_location',
'system_object_id',
'long_name',
'snmp_version',
'device_model',
'device_vendor',
'device_family',
'device_description',
'device_category',
'uuid',
'management_mode',
# 'customAttributes',
)
def __init__(self, item, fetcher):
BaseManagementNmsEntity.__init__(self, item, fetcher)
self.management_mode = None
if not is_restorable(item):
self.id = item.getId()
self.name = item.getName()
self.is_lan_switch = self._get_is_lan_switch(item)
self.is_router = self._get_is_router(item)
self.system_name = item.getSystemName()
self.system_contact = item.getSystemContact()
self.system_description = item.getSystemDescription()
self.system_location = item.getSystemLocation()
self.system_object_id = item.getSystemObjectId()
self.long_name = self._get_long_name(item)
self.snmp_version = item.getSnmpVersion()
self.device_model = self._get_device_model(item)
self.device_vendor = self._get_device_vendor(item)
self.device_family = self._get_device_family(item)
self.device_description = item.getDeviceDescription()
self.device_category = self._get_device_category(item)
self.uuid = item.getUuid()
self.management_mode = self._get_management_mode(item)
# self.customAttributes = item.getCustomAttributes()
self._report_all = False # indicates whether all related information should be reported, including interfaces, ips etc
def _get_is_lan_switch(self, item):
caps = item.getCapabilities()
if caps:
for cap in caps:
cap_key = cap.getKey()
if cap_key:
cap_key = cap_key.strip()
if cap_key == self.LAN_SWITCH_CAPABILITY:
return 1
return 0
def _get_is_router(self, item):
caps = item.getCapabilities()
if caps:
for cap in caps:
cap_key = cap.getKey()
if cap_key:
cap_key = cap_key.strip()
if cap_key == self.IP_FORWARDING_CAPABILITY:
return 1
return 0
def _get_device_family(self, item):
device_family = item.getDeviceFamily()
if device_family and (device_family != '<No SNMP>'):
return device_family[self.DEV_PREFIX_LEN:]
else:
return ''
def _get_device_vendor(self, item):
device_vendor = item.getDeviceVendor()
if device_vendor and (device_vendor != 'com.hp.ov.nms.devices.nosnmp'):
return device_vendor[self.DEV_PREFIX_LEN:]
else:
return ''
def _get_device_model(self, item):
device_model = item.getDeviceModel()
if device_model and (device_model != 'com.hp.ov.nms.devices.<No SNMP>'):
return device_model
else:
return ''
def _get_device_category(self, item):
device_category = item.getDeviceCategory()
if device_category:
return device_category[self.DEV_PREFIX_LEN:]
else:
return ''
def _get_long_name(self, item):
long_name = item.getLongName()
return long_name or ''
class NmsInterfaceEntity(BaseManagementNmsEntity):
field_names = (
'id',
'name',
'hosted_on_id',
'connection_id',
'if_index',
'if_alias',
'if_descr',
'if_name',
'if_speed',
'physical_address',
'if_type',
'uuid',
'status',
'admin_status',
'oper_status',
'management_mode',
)
def __init__(self, item, fetcher):
r'@types: com.hp.ov.nms.sdk.iface._interface, NmsInterfaceFetcher'
BaseManagementNmsEntity.__init__(self, item, fetcher)
self.hosted_on_id = None
self.management_mode = None
if not is_restorable(item):
self.id = item.getId()
self.name = item.getName()
self.hosted_on_id = item.getHostedOnId()
self.connection_id = item.getConnectionId()
self.if_index = item.getIfIndex()
self.if_alias = item.getIfAlias()
self.if_descr = item.getIfDescr()
self.if_name = item.getIfName()
self.if_speed = item.getIfSpeed()
self.admin_status = item.getAdministrativeState()
self.oper_status = item.getOperationalState()
self.physical_address = self._get_physical_address(item)
self.if_type = self._get_interface_type(item)
self.uuid = item.getUuid()
self.management_mode = self._get_management_mode(item)
self.status = self._get_status(item)
def _get_status(self, item):
status = item.getStatus()
return status.value()
def _get_physical_address(self, item):
physical_address = item.getPhysicalAddress()
if physical_address:
return physical_address
else:
return None
def _get_interface_type(self, item):
typeStr = item.getIfType()
if typeStr:
try:
typeValue = int(typeStr)
if typeValue > 0 and typeValue < 252:
return typeValue
except:
pass
return None
class NmsIPAddressEntity(BaseManagementNmsEntity):
field_names = (
'id',
'hosted_on_id',
'ip_subnet_id',
'in_interface_id',
'ip_value',
'prefix_length',
'uuid',
'management_mode',
)
def __init__(self, item, fetcher):
BaseManagementNmsEntity.__init__(self, item, fetcher)
self.hosted_on_id = None
self.management_mode = None
if not is_restorable(item):
self.id = item.getId()
self.hosted_on_id = item.getHostedOnId()
self.ip_subnet_id = item.getIpSubnetId()
self.in_interface_id = item.getInInterfaceId()
self.ip_value = item.getIpValue()
self.prefix_length = item.getPrefixLength()
self.uuid = item.getUuid()
self.management_mode = self._get_management_mode(item)
class NmsIPSubnetEntity(BaseNmsEntity):
field_names = (
'id',
'name',
'prefix_length',
'prefix',
'uuid',
)
def __init__(self, item, fetcher):
BaseNmsEntity.__init__(self, item, fetcher)
if not is_restorable(item):
self.id = item.getId()
self.name = item.getName()
self.prefix_length = item.getPrefixLength()
self.prefix = item.getPrefix()
self.uuid = item.getUuid()
class NmsL2ConnectionEntity(BaseNmsEntity):
field_names = (
'id',
'name',
'interfaces',
'uuid',
)
def __init__(self, item, fetcher):
BaseNmsEntity.__init__(self, item, fetcher)
if not is_restorable(item):
self.id = item.getId()
self.name = item.getName()
self.uuid = item.getUuid()
interfaces = item.getInterfaces()
if interfaces is not None:
self.interfaces = list(interfaces)
else:
self.interfaces = self._getInterfacesIdsByL2Name(item.name)
def _getHostInterface(self, id):
interfaceFetcher = self.fetcher.api.get_fetcher(NmsServices.Interface)
name_filter = FF.CONDITION('hostedOnId', '==', id)
return interfaceFetcher.filtered(name_filter).all()
def _findInterfaceIdByHostAndName(self, hostName, interfaceName):
hostFetcher = self.fetcher.api.get_fetcher(NmsServices.Node)
name_filter = FF.CONDITION('name', '==', hostName)
hosts = hostFetcher.filtered(name_filter).all()
if hosts:
hostList = hosts.items()
if hostList:
# our api for NNM returns for each host tuple(hostId, hostObject)
# we need to host object
host = hostList[0][1]
if len(hostList) > 1:
logger.warn("Non unique host was found. Host name: %s " % host.name)
else:
hostInterfaces = self._getHostInterface(host.id)
for interface in hostInterfaces:
if interface.name == interfaceName:
return interface.id
return None
def _getInterfacesIdsByL2Name(self, name):
interfaceInfoList = name.split(",")
interfaces = []
for interfaceInfo in interfaceInfoList:
interfaceId = self._getInterfaceId(interfaceInfo)
if interfaceId:
interfaces.append(interfaceId)
return interfaces
def _getInterfaceId(self, interfaceInfo):
"""
Trying to get interface info from Layer2Connection name.
In NNMi Layer2Connection name include in such format "Hostname[InterfaceName]"
"""
match = re.match("(.*)\[(.*)\]", interfaceInfo.strip())
if match:
hostName = match.group(1)
interfaceName = match.group(2)
return self._findInterfaceIdByHostAndName(hostName, interfaceName)
class NmsVLANEntity(BaseNmsEntity):
field_names = (
'id',
'name',
'uuid',
'vlan_id',
)
def __init__(self, item, fetcher):
BaseNmsEntity.__init__(self, item, fetcher)
if not is_restorable(item):
self.id = item.getId()
self.name = item.getName()
self.uuid = item.getUuid()
self.vlan_id = item.getVlanId()
self.ports = self._get_ports()
def _get_ports(self):
port_objects = self.fetcher._get_stub().getPortsForVLANbyId(self.id).getItem()
if port_objects is not None:
return [port_object.getId() for port_object in port_objects]
else:
return []
class NmsPortEntity(BaseNmsEntity):
PORT_DUPLEX_TYPE = {
'FULL': 'full',
'HALF': 'half',
'AUTO': 'auto-negotiated',
'UNKNOWN': 'other',
}
field_names = (
'id',
'name',
'hosted_on_id',
'interface',
'card',
'speed',
'type',
'duplex_setting',
'index',
'uuid',
)
def __init__(self, item, fetcher):
BaseNmsEntity.__init__(self, item, fetcher)
self.hosted_on_id = None
if not is_restorable(item):
self.id = item.getId()
self.name = item.getName()
self.hosted_on_id = self._get_hosted_on_id(item)
self.interface = self._get_interface(item)
self.card = self._get_card(item)
self.speed = self._get_speed(item)
self.type = self._get_type(item)
self.duplex_setting = self._get_duplex_setting(item)
self.index = item.getIndex()
self.uuid = item.getUuid()
def _get_hosted_on_id(self, item):
hosted_on_id = item.getHostedOnId()
if hosted_on_id:
return hosted_on_id
else:
return ''
def _get_interface(self, item):
interface = item.getIface()
if interface:
return interface
else:
return ''
def _get_card(self, item):
try:
card = item.getCard()
if card:
return card
except AttributeError:
pass
return ''
def _get_speed(self, item):
speed = item.getSpeed()
if speed:
return speed
else:
return ''
def _get_type(self, item):
_type = item.getType()
if _type:
return _type
else:
return ''
def _get_duplex_setting(self, item):
duplex_setting = item.getDuplexSetting()
if duplex_setting:
return self.PORT_DUPLEX_TYPE.get(duplex_setting.value())
else:
return ''
class NmsCardEntity(BaseManagementNmsEntity):
field_names = (
'id',
'name',
'hosted_on_id',
'card_descr',
'firmware_version',
'hardware_version',
'software_version',
'hosting_card',
'serial_number',
'type',
'index',
'uuid',
'management_mode',
)
def __init__(self, item, fetcher):
BaseManagementNmsEntity.__init__(self, item, fetcher)
self.hosted_on_id = None
self.management_mode = None
if not is_restorable(item):
self.id = item.getId()
self.name = item.getName()
self.hosted_on_id = self._get_hosted_on_id(item)
self.card_descr = self._get_card_descr(item)
self.firmware_version = self._get_firmware_version(item)
self.hardware_version = self._get_hardware_version(item)
self.software_version = self._get_software_version(item)
self.hosting_card = self._get_hosting_card(item)
self.serial_number = self._get_serial_number(item)
self.type = self._get_type(item)
self.index = self._get_index(item)
self.uuid = item.getUuid()
self.management_mode = self._get_management_mode(item)
def _get_hosted_on_id(self, item):
hosted_on_id = item.getHostedOnId()
if hosted_on_id:
return hosted_on_id
else:
return ''
def _get_card_descr(self, item):
card_descr = item.getCardDescr()
if card_descr:
return card_descr
else:
return ''
def _get_firmware_version(self, item):
firmware_version = item.getFirmwareVersion()
if firmware_version:
return firmware_version
else:
return ''
def _get_hardware_version(self, item):
hardware_version = item.getHardwareVersion()
if hardware_version:
return hardware_version
else:
return ''
def _get_software_version(self, item):
software_version = item.getSoftwareVersion()
if software_version:
return software_version
else:
return ''
def _get_hosting_card(self, item):
hosting_card = item.getHostingCard()
if hosting_card:
return hosting_card
else:
return ''
def _get_serial_number(self, item):
serial_number = item.getSerialNumber()
if serial_number:
return serial_number
else:
return ''
def _get_type(self, item):
_type = item.getType()
if _type:
return _type
else:
return ''
def _get_index(self, item):
index = item.getIndex()
if index:
return index
else:
return ''
class BaseNmsCollection:
def __init__(self, fetcher, items):
r'@types: BaseNmsFetcher, list[BaseNmsEntity]'
self.fetcher = fetcher
self.api = fetcher.api
idmap = {}
for item in items:
if not isinstance(item, self.item_class):
raise ValueError('expected instances of %r class, but %r instance occurred' % (self.item_class, item.__class__.__name__))
idmap[item.id] = item
self._items = idmap
def __len__(self):
return len(self._items)
def __getitem__(self, item_id):
if isinstance(item_id, types.IntType) or isinstance(item_id, types.LongType):
return self.values()[item_id]
elif isinstance(item_id, types.SliceType):
return self.__getslice___(item_id.start, item_id.stop)
else:
return self._items[item_id]
def __getslice___(self, start, end):
cls = self.__class__
return cls(self.fetcher, self.values()[start:end])
def __contains__(self, item_id):
return item_id in self._items.keys()
def filter_restorable_items(self):
'@types: -> list[BaseNmsEntity]'
return filter(is_not_restorable, self.itervalues())
def items(self):
return self._items.items()
def keys(self):
return self._items.keys()
def values(self):
return self._items.values()
def iteritems(self):
return self._items.iteritems()
def itervalues(self):
return self._items.itervalues()
def iterkeys(self):
return self._items.iterkeys()
def get(self, item_id, default=None):
return self._items.get(item_id, default)
def merge(self, collection):
if collection.item_class != self.item_class:
raise ValueError('cannot merge collections with different item types')
if collection.fetcher.__class__ != self.fetcher.__class__:
raise ValueError('cannot merge collections with different fetcher types')
cls = self.__class__
return cls(self.fetcher, itertools.chain(self.itervalues(), collection.itervalues()))
def _get_partitioned_topology_by_field(self, nms_service, field_name, values):
fetcher = self.api.get_fetcher(nms_service)
if fetcher:
if values:
idMap, discovered_ids, undiscovered_ids = self.api.ucmdb_api.partitionIds(values)
restorable_items = _restore_items(fetcher, idMap, discovered_ids)
restorable_items_collection = fetcher.collection_class(fetcher,
restorable_items)
if undiscovered_ids:
undiscovered_ids = sorted(undiscovered_ids)
fullCollection = fetcher.collection_class(fetcher, [])
for id_chunk_index in xrange(0, len(undiscovered_ids), DEFAULT_CONDITIONS_IN_FILTER):
filter_ = FF.EMPTY
for undiscovered_id in list(undiscovered_ids)[id_chunk_index:id_chunk_index+DEFAULT_CONDITIONS_IN_FILTER]:
filter_ |= FF.CONDITION(field_name, '==', undiscovered_id)
fullCollection = fullCollection.merge(fetcher.filtered(filter_).all())
return fullCollection.merge(restorable_items_collection)
return restorable_items_collection
return fetcher.collection_class(fetcher, [])
def _get_partitioned_topology_by_id(self, nms_service, ids):
r'@types: NmsServices, set[str]->BaseNmsCollection'
return self._get_partitioned_topology_by_field(nms_service, 'id', ids)
def _get_related_topology(self, nms_service, field_name, values):
fetcher = self.api.get_fetcher(nms_service)
if fetcher:
if values:
values = sorted(values)
fullCollection = fetcher.collection_class(fetcher, [])
for values_index in xrange(0, len(values), DEFAULT_CONDITIONS_IN_FILTER):
filter_ = FF.EMPTY
for value in values[values_index:values_index+DEFAULT_CONDITIONS_IN_FILTER]:
filter_ |= FF.CONDITION(field_name, '==', value)
if fetcher:
fullCollection = fullCollection.merge(fetcher.filtered(filter_).all())
return fullCollection
return fetcher.collection_class(fetcher, [])
class NmsNodeCollection(BaseNmsCollection):
item_class = NmsNodeEntity
def _get_rt_interface(self):
# Interface.hostedOnId <== Node.id
return self._get_related_topology(NmsServices.Interface,
'hostedOnId',
self.keys())
def _get_rt_ip_address(self):
# IPAddress.hostedOnId <== Node.id
return self._get_related_topology(NmsServices.IPAddress,
'hostedOnId',
self.keys())
def _get_rt_port(self):
# Port.hostedOnId <== Node.id
return self._get_related_topology(NmsServices.Port,
'hostedOnId',
self.keys())
def _get_rt_card(self):
# Card.hostedOnId <== Node.id
return self._get_related_topology(NmsServices.Card,
'hostedOnId',
self.keys())
class NmsInterfaceCollection(BaseNmsCollection):
item_class = NmsInterfaceEntity
def _get_rt_node(self):
ids = set([entity.hosted_on_id for entity in self.filter_restorable_items() if entity.hosted_on_id])
return self._get_partitioned_topology_by_id(NmsServices.Node, ids)
class NmsIPAddressCollection(BaseNmsCollection):
item_class = NmsIPAddressEntity
def _get_rt_ip_subnet(self):
# IPSubnet.id ==> IPAddress.ipSubnetId
ids = set([entity.ip_subnet_id for entity in self.filter_restorable_items() if entity.ip_subnet_id])
return self._get_partitioned_topology_by_field(NmsServices.IPSubnet,
'ipSubnetId',
ids)
class NmsIPSubnetCollection(BaseNmsCollection):
item_class = NmsIPSubnetEntity
class NmsL2NodeCollection(NmsNodeCollection):
item_class = NmsL2ConnectionEntity
class NmsL2ConnectionCollection(BaseNmsCollection):
item_class = NmsL2ConnectionEntity
def _get_rt_interface(self):
# L2Connection.interfaces[] ==> Interface.id
interface_ids = []
for entity in self:
interface_ids.extend(entity.interfaces)
return self._get_partitioned_topology_by_id(NmsServices.Interface,
set(interface_ids))
class NmsVLANCollection(BaseNmsCollection):
item_class = NmsVLANEntity
def _get_rt_port(self):
# VLAN.ports[] ==> Port.id
port_ids = []
for entity in self.filter_restorable_items():
port_ids.extend(entity.ports)
port_ids = set(port_ids)
return self._get_partitioned_topology_by_id(NmsServices.Port, port_ids)
class NmsPortCollection(BaseNmsCollection):
item_class = NmsPortEntity
def _get_rt_node(self):
# Port.hostedOnId ==> Node.id
ids = set([entity.hosted_on_id for entity in self.filter_restorable_items()])
return self._get_partitioned_topology_by_id(NmsServices.Node, ids)
class NmsCardCollection(BaseNmsCollection):
item_class = NmsCardEntity
class StorageFileDoesNotExist(Exception):
pass
class StorageOperationException(Exception):
pass
class ResultStorage:
def __init__(self, fetcher, namespace=_STORE_NAMESPACE):
self.fetcher = fetcher
self.namespace = namespace
def get_storage_key(self, final_filter, page_index, page_size):
filter_hash = nnmi_filters.filter_hash(final_filter)
key = "%s_%s_i%s_p%s" % (self.fetcher.__class__.__name__, filter_hash, page_index, page_size)
return key
def get_trigger_id(self):
return self.fetcher.api.configuration.triggerId
def get_store_file_name(self, storage_key):
path = RECORD_FOLDER_PATH
triggerId = self.get_trigger_id()
filePath = '%snnm_store/%s_%s' % (path, triggerId, self.namespace)
fileName = '%s.ser' % storage_key
fullFileName = "%s/%s" % (filePath, fileName)
return filePath, fullFileName
def serialize(self, items, fullFileName):
stream = None
try:
try:
kryo = Kryo()
stream = Output(FileOutputStream(fullFileName))
kryo.writeObject(stream, items)
except:
raise StorageOperationException("Serialization failed")
finally:
if stream is not None:
try:
stream.close()
except:
pass
def deserialize(self, fullFileName):
stream = None
try:
try:
kryo = Kryo()
stream = Input(FileInputStream(fullFileName))
return kryo.readObject(stream, java.util.ArrayList)
except:
raise StorageOperationException("Deserialization failed")
finally:
if stream is not None:
try:
stream.close()
except:
pass
def store_items(self, items, storage_key):
filePath, fullFileName = self.get_store_file_name(storage_key)
if not os.path.exists(filePath):
os.makedirs(filePath)
logger.debug(" -- Saving items to file '%s'" % fullFileName)
self.serialize(items, fullFileName)
def read_items(self, storage_key):
_, fullFileName = self.get_store_file_name(storage_key)
logger.debug(" -- Reading items from file '%s'" % fullFileName)
if os.path.isfile(fullFileName):
return self.deserialize(fullFileName)
else:
raise StorageFileDoesNotExist()
class BaseNmsFetcher:
def __init__(self, api, endpoint_proto, endpoint_host, endpoint_port,
auth_username, auth_password, default_filter=None):
self.api = api
self.endpoint_proto = endpoint_proto
self.endpoint_host = endpoint_host
self.endpoint_port = endpoint_port
self.auth_username = auth_username
self.auth_password = auth_password
self.default_filter = default_filter
self._connection_host = endpoint_host
try:
ip_addr.IPv6Address(endpoint_host)
self._connection_host = "[%s]" % endpoint_host
except:
pass
self._storage = ResultStorage(self)
def _create_stub(self):
service = self.stub_class(java.net.URL('%s://%s:%d%s' % (self.endpoint_proto, self._connection_host, int(self.endpoint_port), self.endpoint_path)))
service.setHandlerResolver(SoapHandlerResolver())
port = self._get_port(service)
port.getRequestContext().put(BindingProvider.USERNAME_PROPERTY, self.auth_username);
port.getRequestContext().put(BindingProvider.PASSWORD_PROPERTY, self.auth_password);
return port
def _get_stub(self):
return self._create_stub()
def _get_port(self, service):
raise NotImplemented("_get_port")
def __getitem__(self, index):
if isinstance(index, types.TupleType):
page_index, page_size = index
else:
page_index, page_size = index, self.page_size
result = self.fetch(page_index=page_index, page_size=page_size)
if result is None:
raise IndexError()
return result
def __repr__(self):
return '%s(endpoint_proto = %r, endpoint_host = %r, endpoint_port = %r, auth_username = %r, auth_password = %r, default_filter = %r)' % (self.__class__.__name__, self.endpoint_proto, self.endpoint_host, self.endpoint_port, self.auth_username, self.auth_password, self.default_filter)
def __str__(self):
return '<%s endpoint_proto=%r endpoint_host=%r endpoint_port=%r auth_username=%r auth_password=%r default_filter=%r>' % (self.__class__.__name__, self.endpoint_proto, self.endpoint_host, self.endpoint_port, self.auth_username, self.auth_password, self.default_filter)
@retry_on((java.net.SocketException, WebServiceException), FETCH_RETRY_COUNT, with_delay=FETCH_RETRY_DELAY, rethrow_exception=False, reload=True)
@ensure_delay(FETCH_DELAY)
@log_self_calls()
def fetch(self, page_index, page_size=None, subfilter=None):
item_class = self.collection_class.item_class
includes_custom_attrs = item_class.includes_custom_attrs
configuration = self.api.configuration
if page_size is None:
page_size = self.page_size
final_filter = FF.EMPTY
if self.default_filter is not None:
final_filter &= self.default_filter
if page_size != NO_PAGE_SIZE: #explicitly unlimited
final_filter &= FF.PAGER(page_index, page_size)
if subfilter is not None:
final_filter &= subfilter
if includes_custom_attrs and configuration.requestCustomAttributes:
final_filter &= FF.CUSTOM_ATTRS
result_items = []
storage_key = None
if _STORE_CONFIG is not None:
storage_key = self._storage.get_storage_key(final_filter, page_index, page_size)
items = None
items_updated = False
if _STORE_CONFIG and _STORE_CONFIG.read():
try:
items = self._storage.read_items(storage_key)
except (StorageFileDoesNotExist, StorageOperationException), ex:
logger.debug("Failed to read from storage or no previous results exist")
if _STORE_CONFIG.fallback_to_live():
items = self._get_stub_items(final_filter.nr())
items_updated = True
else:
raise ex
else:
items = self._get_stub_items(final_filter.nr())
items_updated = True
if _STORE_CONFIG and _STORE_CONFIG.write() and items_updated:
self._storage.store_items(items, storage_key)
if not items:
return None
item_class = self.collection_class.item_class
for item in items:
if self._is_valid_item(item):
item_entity = item_class(item, self)
result_items.append(item_entity)
return self.collection_class(self, result_items)
def all(self):
result = []
for page in self:
for item in page:
result.append(item)
if len(result) < self.page_size:
break
return self.collection_class(self, result)
def filtered(self, subfilter):
cls = self.__class__
_filter = self.default_filter
if subfilter is not None:
if _filter is not None:
_filter &= subfilter
else:
_filter = subfilter
return cls(api=self.api, endpoint_proto=self.endpoint_proto,
endpoint_host=self.endpoint_host,
endpoint_port=self.endpoint_port,
auth_username=self.auth_username,
auth_password=self.auth_password,
default_filter=_filter)
class NmsNodeFetcher(BaseNmsFetcher):
stub_class = com.hp.ov.nms.sdk.node.NodeBeanService
collection_class = NmsNodeCollection
endpoint_path = '/NodeBeanService/NodeBean'
page_size = DEFAULT_PAGESIZE_NODE
def _get_stub_items(self, subfilter):
return self._get_stub().getNodes(subfilter).getItem()
def _get_port(self, service):
return service.getNodeBeanPort()
def _is_valid_item(self, item):
return (
not_empty(item.getId()) and
not_empty(item.getName())
)
class NmsInterfaceFetcher(BaseNmsFetcher):
stub_class = com.hp.ov.nms.sdk.iface.InterfaceBeanService
endpoint_path = '/InterfaceBeanService/InterfaceBean'
collection_class = NmsInterfaceCollection
page_size = DEFAULT_PAGESIZE_INTERFACE
def _get_stub_items(self, subfilter):
return self._get_stub().getInterfaces(subfilter).getItem()
def _get_port(self, service):
return service.getInterfaceBeanPort()
def _is_valid_item(self, item):
return (
not_empty(item.getId())
)
class NmsIPAddressFetcher(BaseNmsFetcher):
stub_class = com.hp.ov.nms.sdk.ipaddress.IPAddressBeanService
endpoint_path = '/IPAddressBeanService/IPAddressBean'
collection_class = NmsIPAddressCollection
page_size = DEFAULT_PAGESIZE_IPADDRESS
def _get_stub_items(self, subfilter):
return self._get_stub().getIPAddresses(subfilter).getItem()
def _get_port(self, service):
return service.getIPAddressBeanPort()
def _is_valid_item(self, item):
return (
not_empty(item.getId()) and
not_empty(item.getHostedOnId()) and
not_empty(item.getIpValue())
)
class NmsIPSubnetFetcher(BaseNmsFetcher):
stub_class = com.hp.ov.nms.sdk.ipsubnet.IPSubnetBeanService
endpoint_path = '/IPSubnetBeanService/IPSubnetBean'
collection_class = NmsIPSubnetCollection
page_size = DEFAULT_PAGESIZE_IPSUBNET
def _get_stub_items(self, subfilter):
return self._get_stub().getIPSubnets(subfilter).getItem()
def _get_port(self, service):
return service.getIPSubnetBeanPort()
def _is_valid_item(self, item):
return (
not_empty(item.getId()) and
not_empty(item.getPrefix()) and
not_empty(item.getPrefixLength()) and
(0 <= item.getPrefixLength() <= 32)
)
class NmsL2ConnectionFetcher(BaseNmsFetcher):
stub_class = com.hp.ov.nms.sdk.l2connection.L2ConnectionBeanService
endpoint_path = '/L2ConnectionBeanService/L2ConnectionBean'
collection_class = NmsL2ConnectionCollection
page_size = DEFAULT_PAGESIZE_L2CONNECTION
def _get_stub_items(self, subfilter):
return self._get_stub().getL2Connections(subfilter).getItem()
def _get_port(self, service):
return service.getL2ConnectionBeanPort()
def _is_valid_item(self, item):
return (
not_empty(item.getId()) and
not_empty(item.getName())
)
class NmsL2NodeFetcher(BaseNmsFetcher):
stub_class = com.hp.ov.nms.sdk.l2connection.L2ConnectionBeanService
endpoint_path = '/L2ConnectionBeanService/L2ConnectionBean'
collection_class = NmsL2NodeCollection
page_size = DEFAULT_PAGESIZE_L2CONNECTION
def _get_stub_items(self, subfilter):
return self._get_stub().getL2Connections(subfilter).getItem()
def _get_port(self, service):
return service.getL2ConnectionBeanPort()
def _is_valid_item(self, item):
return (
not_empty(item.getId()) and
not_empty(item.getName())
)
class NmsVLANFetcher(BaseNmsFetcher):
stub_class = com.hp.ov.nms.sdk.vlan.VLANBeanService
endpoint_path = '/VLANBeanService/VLANBean'
collection_class = NmsVLANCollection
page_size = DEFAULT_PAGESIZE_VLAN
def _get_stub_items(self, subfilter):
return self._get_stub().getVLANs(subfilter).getItem()
def _get_port(self, service):
return service.getVLANBeanPort()
def _is_valid_item(self, item):
return (
not_empty(item.getId()) and
not_empty(item.getVlanId())
)
class NmsPortFetcher(BaseNmsFetcher):
stub_class = com.hp.ov.nms.sdk.phys.PortBeanService
endpoint_path = '/NmsSdkService/PortBean'
collection_class = NmsPortCollection
page_size = DEFAULT_PAGESIZE_PORT
def _get_stub_items(self, subfilter):
return self._get_stub().getPorts(subfilter).getItem()
def _get_port(self, service):
return service.getPortBeanPort()
def _is_valid_item(self, item):
return (
not_empty(item.getId()) and
not_empty(item.getName()) and
not_empty(item.getIndex()) and
not_empty(item.getHostedOnId())
)
class NmsCardFetcher(BaseNmsFetcher):
stub_class = com.hp.ov.nms.sdk.phys.CardBeanService
endpoint_path = '/NmsSdkService/CardBean'
collection_class = NmsCardCollection
page_size = DEFAULT_PAGESIZE_CARD
def _get_stub_items(self, subfilter):
return self._get_stub().getCards(subfilter).getItem()
def _get_port(self, service):
return service.getCardBeanPort()
def _is_valid_item(self, item):
return (
not_empty(item.getId()) and
not_empty(item.getHostedOnId()) and (
not_empty(item.getSerialNumber()) or
not_empty(item.getEntityPhysicalIndex())
)
)
class NmsAPI:
SERVICE_TO_FETCHER = {
NmsServices.Node: NmsNodeFetcher,
NmsServices.Interface: NmsInterfaceFetcher,
NmsServices.IPAddress: NmsIPAddressFetcher,
NmsServices.IPSubnet: NmsIPSubnetFetcher,
NmsServices.L2Connection: NmsL2ConnectionFetcher,
NmsServices.L2Node: NmsL2NodeFetcher,
NmsServices.VLAN: NmsVLANFetcher,
NmsServices.Port: NmsPortFetcher,
NmsServices.Card: NmsCardFetcher,
}
def __init__(self, endpoint_proto, endpoint_host, endpoint_port,
auth_username, auth_password, ucmdb_api, configuration):
self.endpoint_proto = endpoint_proto
self.endpoint_host = endpoint_host
self.endpoint_port = endpoint_port
self.auth_username = auth_username
self.auth_password = auth_password
self.ucmdb_api = ucmdb_api
self.configuration = configuration
def __repr__(self):
return '%s(endpoint_proto = %r, endpoint_host = %r, endpoint_port = %r, auth_username = %r, auth_password = %r)' % (self.__class__.__name__, self.endpoint_proto, self.endpoint_host, self.endpoint_port, self.auth_username, self.auth_password)
def __str__(self):
return '<%s endpoint_proto=%r endpoint_host=%r endpoint_port=%r auth_username=%r auth_password=%r at 0x%.8X>' % (self.__class__.__name__, self.endpoint_proto, self.endpoint_host, self.endpoint_port, self.auth_username, self.auth_password, id(self))
def __getitem__(self, service):
return self.get_fetcher(service)
def get_fetcher(self, service):
return self.SERVICE_TO_FETCHER[service](self, self.endpoint_proto,
self.endpoint_host,
self.endpoint_port,
self.auth_username,
self.auth_password)
def get_related_topology_nodes(self, page_size=None, sub_filter=None):
return NmsNodeRelatedTopologyPager(self, page_size, sub_filter)
def get_related_topology_l2_connections(self, l2_connections=None, page_size=None):
if l2_connections:
return NmsL2OfflineConnectionRelatedTopologyPager(self, l2_connections, page_size)
return NmsL2ConnectionRelatedTopologyPager(self, page_size)
def get_related_topology_l2_node(self, page_size=None):
return NmsL2NodeRelatedTopologyPager(self, page_size=page_size)
def get_related_topology_vlans(self, page_size=None):
return NmsVLANRelatedTopologyPager(self, page_size=page_size)
def get_nodes(self, page_size=None, sub_filter=None):
''' Get nodes topology, split by pages '''
return NmsNodeTopologyPager(self, page_size, sub_filter)
def get_interfaces(self, page_size=None, sub_filter=None):
''' Get interfaces, split by pages '''
return NmsInterfaceTopologyPager(self, page_size, sub_filter)
def get_ip_adresses(self, page_size=None, sub_filter=None):
''' Get ips, split by pages '''
return NmsIpAddressTopologyPager(self, page_size, sub_filter)
def get_ip_subnets(self, page_size=None, sub_filter=None):
''' Get subnets, split by pages '''
return NmsIpSubnetTopologyPager(self, page_size, sub_filter)
def get_l2_connections(self, page_size=None, sub_filter=None):
''' Get l2 connections, split by pages '''
return NmsL2ConnectionTopologyPager(self, page_size, sub_filter)
def get_vlans(self, page_size=None, sub_filter=None):
''' Get vlans, split by pages '''
return NmsVlanTopologyPager(self, page_size, sub_filter)
def get_ports(self, page_size=None, sub_filter=None):
''' Get ports, split by pages '''
return NmsPortTopologyPager(self, page_size, sub_filter)
def get_cards(self, page_size=None, sub_filter=None):
''' Get ports, split by pages '''
return NmsCardTopologyPager(self, page_size, sub_filter)
def get_empty_collection(self, service):
''' -> NmsBaseCollection '''
fetcher = self.get_fetcher(service)
if fetcher is not None:
return fetcher.collection_class(fetcher, [])
def get_interfaces_non_paged(self, sub_filter=None):
''' -> NmsInterfaceCollection
Get interfaces with no pages '''
fetcher = self.get_fetcher(NmsServices.Interface)
collection = fetcher.fetch(0, page_size=NO_PAGE_SIZE, subfilter=sub_filter)
return collection
def getStringSizeInBytes(str_):
return len(String(str(str_)).getBytes('ASCII'))
def property_equals_condition(property_name, value):
condition_filter = FF.CONDITION(property_name, '==', value)
return condition_filter
def name_equals_condition(name_value):
return property_equals_condition('name', name_value)
def hosted_on_id_condition(id_value):
return property_equals_condition('hostedOnId', id_value)
FILTER_STR_LENGTH_LIMIT = 4 * 1024
def conditions_filter_generator_by_max_str(values, condition_fn, max_filter_str_length = FILTER_STR_LENGTH_LIMIT):
'''
iterable(values), func(value -> condition) -> filter
Generator produces subfilters of max string length using values and function
transforming value into subfilter
Conditions are concatenated by OR operations
'''
if not values:
return
current_subfilter = FF.EMPTY
current_length = getStringSizeInBytes(str(current_subfilter))
for value in values:
condition = condition_fn(value)
condition_length = getStringSizeInBytes(str(condition))
if current_length + condition_length < max_filter_str_length:
# append condition
current_subfilter |= condition
current_length = getStringSizeInBytes(str(current_subfilter))
else:
# return
yield current_subfilter
current_subfilter = condition
current_length = condition_length
yield current_subfilter
FILTER_MAX_COUNT = 25
def conditions_filter_generator_by_count(values, condition_fn, max_count = FILTER_MAX_COUNT):
'''
iterable(values), func(value -> condition) -> filter
Generator produces subfilters of specified number of subconditions using values and function
transforming value into subfilter
Conditions are concatenated by OR operations
'''
if not values:
return
current_subfilter = FF.EMPTY
current_count = 0
for value in values:
condition = condition_fn(value)
if current_count < max_count:
# append condition
current_subfilter |= condition
current_count += 1
else:
# return
yield current_subfilter
current_subfilter = condition
current_count = 1
yield current_subfilter
class BaseNmsTopology:
def __init__(self, collection):
self.collection = collection
def get_collection(self):
return self.collection
class NmsNodeRelatedTopology(BaseNmsTopology):
entry_service = NmsServices.Node
entry_collection_class = NmsNodeCollection
def __init__(self, nodes):
self.nodes = nodes
self.interfaces = self.nodes._get_rt_interface()
self.ip_addresses = self.nodes._get_rt_ip_address()
api = nodes.api
self.ports = api.get_empty_collection(NmsServices.Port)
self.cards = api.get_empty_collection(NmsServices.Card)
if api.configuration.discoverPhysicalPorts:
self.ports = self.nodes._get_rt_port()
self.cards = self.nodes._get_rt_card()
self.ip_subnets = self.ip_addresses._get_rt_ip_subnet()
class NmsL2ConnectionRelatedTopology(BaseNmsTopology):
entry_service = NmsServices.L2Connection
entry_collection_class = NmsL2ConnectionCollection
def __init__(self, l2_connections):
self.l2_connections = l2_connections
self.interfaces = self.l2_connections._get_rt_interface()
if self.interfaces:
self.nodes = self.interfaces._get_rt_node()
# need to get related interfaces of the nodes to be able to report
# nodes of layer 2 connection which have equal interface macs
self.interfaces = self.interfaces.merge(self.nodes._get_rt_interface())
self.ip_addresses = self.nodes._get_rt_ip_address()
else:
self.nodes = None
self.ip_addresses = None
class NmsL2NodeRelatedTopology(BaseNmsTopology):
entry_service = NmsServices.L2Node
entry_collection_class = NmsL2NodeCollection
def __init__(self, l2_connections):
self.l2_connections = l2_connections
class NmsVLANRelatedTopology(BaseNmsTopology):
entry_service = NmsServices.VLAN
entry_collection_class = NmsVLANCollection
def __init__(self, vlans):
self.vlans = vlans
self.ports = self.vlans._get_rt_port()
if self.ports:
self.nodes = self.ports._get_rt_node()
self.ports = self.ports.merge(self.nodes._get_rt_port())
self.interfaces = self.nodes._get_rt_interface()
self.cards = self.nodes._get_rt_card()
self.ip_addresses = self.nodes._get_rt_ip_address()
else:
self.nodes = None
self.interfaces = None
self.cards = None
self.ip_addresses = None
class NmsNodesTopology(BaseNmsTopology):
entry_service = NmsServices.Node
entry_collection_class = NmsNodeCollection
class NmsInterfacesTopology(BaseNmsTopology):
entry_service = NmsServices.Interface
entry_collection_class = NmsInterfaceCollection
class NmsIpAddressTopology(BaseNmsTopology):
entry_service = NmsServices.IPAddress
entry_collection_class = NmsIPAddressCollection
class NmsIpSubnetTopology(BaseNmsTopology):
entry_service = NmsServices.IPSubnet
entry_collection_class = NmsIPSubnetCollection
class NmsL2ConnectionTopology(BaseNmsTopology):
entry_service = NmsServices.L2Connection
entry_collection_class = NmsL2ConnectionCollection
class NmsVlanTopology(BaseNmsTopology):
entry_service = NmsServices.VLAN
entry_collection_class = NmsVLANCollection
class NmsPortTopology(BaseNmsTopology):
entry_service = NmsServices.Port
entry_collection_class = NmsPortCollection
class NmsCardTopology(BaseNmsTopology):
entry_service = NmsServices.Card
entry_collection_class = NmsCardCollection
class NmsFullTopology(BaseNmsTopology):
def __init__(self, nodes=None, interfaces=None, ip_addresses=None, ip_subnets=None, l2_connections=None,
vlans=None, ports=None, cards=None):
self.nodes = nodes
self.interfaces = interfaces
self.ip_addresses = ip_addresses
self.ip_subnets = ip_subnets
self.l2_connections = l2_connections
self.vlans = vlans
self.ports = ports
self.cards = cards
class BaseNmsRelatedTopologyPager:
def __init__(self, api, page_size=None, sub_filter=None):
self.api = api
self.page_size = page_size
self.sub_filter = sub_filter
def __getitem__(self, index):
if isinstance(index, types.TupleType):
page_index, page_size = index
else:
page_index, page_size = index, None
result = self.fetch(page_index, page_size, self.sub_filter)
if result is None:
raise IndexError()
return result
def fetch(self, page_index, page_size=None, subfilter=None):
fetcher = self.api.get_fetcher(self.related_topology_class.entry_service)
if page_size is None:
page_size = self.page_size
if page_size is None:
page_size = fetcher.page_size
if fetcher:
collection = fetcher.fetch(page_index=page_index,
page_size=page_size,
subfilter=subfilter)
if collection is None:
return None
return self.related_topology_class(collection)
class NmsNodeRelatedTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsNodeRelatedTopology
class NmsL2ConnectionRelatedTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsL2ConnectionRelatedTopology
class NmsL2OfflineConnectionRelatedTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsL2ConnectionRelatedTopology
def __init__(self, api, l2_connections, page_size=None, sub_filter=None):
BaseNmsRelatedTopologyPager.__init__(self, api, page_size, sub_filter)
self.l2_connections = l2_connections
def fetch(self, page_index, page_size=None, subfilter=None):
fetcher = self.api.get_fetcher(self.related_topology_class.entry_service)
if page_size is None:
page_size = self.page_size
if page_size is None:
page_size = fetcher.page_size
collection_class = self.related_topology_class.entry_collection_class
start_index = page_index * page_size
end_index = start_index + page_size
l2_connection_chunk = self.l2_connections[start_index:end_index]
if l2_connection_chunk:
collection = collection_class(fetcher,
l2_connection_chunk)
return self.related_topology_class(collection)
class NmsVLANRelatedTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsVLANRelatedTopology
class NmsL2NodeRelatedTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsL2NodeRelatedTopology
class NmsNodeTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsNodesTopology
class NmsInterfaceTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsInterfacesTopology
class NmsIpAddressTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsIpAddressTopology
class NmsIpSubnetTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsIpSubnetTopology
class NmsL2ConnectionTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsL2ConnectionTopology
class NmsVlanTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsVlanTopology
class NmsPortTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsPortTopology
class NmsCardTopologyPager(BaseNmsRelatedTopologyPager):
related_topology_class = NmsCardTopology
| [
"[email protected]"
]
| |
4fbda2699b9145b694ef3f7a10590380ae779cad | f3b233e5053e28fa95c549017bd75a30456eb50c | /mcl1_input/L54/54-23_MD_NVT_rerun/set_4.py | 5cd6d8796cee46fdf49e2b9f80b0d39eff8896aa | []
| no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 740 | py | import os
dir = '/mnt/scratch/songlin3/run/mcl1/L54/MD_NVT_rerun/ti_one-step/54_23/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_4.in'
temp_pbs = filesdir + 'temp_4.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_4.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_4.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"[email protected]"
]
| |
62be565d1ad0e2bc743c1f5b5682cd2bdeef76c1 | 2e9ffd88923b1eb90047fe5c6a633a6d29c111a8 | /muddery/typeclasses/players.py | 76d46460f1c6c62028e28ae5e66dedef392932d5 | [
"BSD-3-Clause"
]
| permissive | externIE/muddery | 4f7424abf2eac4280baef86ba5752e8d8ddee16d | ee4165e97e1510e06fa1e8120a35878a6c2862b7 | refs/heads/master | 2020-04-06T06:48:41.501309 | 2016-08-16T12:58:47 | 2016-08-16T12:58:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,369 | py | """
This is adapt from evennia/evennia/players/players.py.
The licence of Evennia can be found in evennia/LICENSE.txt.
Player
The Player represents the game "account" and each login has only one
Player object. A Player is what chats on default channels but has no
other in-game-world existance. Rather the Player puppets Objects (such
as Characters) in order to actually participate in the game world.
Guest
Guest players are simple low-level accounts that are created/deleted
on the fly and allows users to test the game without the committment
of a full registration. Guest accounts are deactivated by default; to
activate them, add the following line to your settings file:
GUEST_ENABLED = True
You will also need to modify the connection screen to reflect the
possibility to connect with a guest account. The setting file accepts
several more options for customizing the Guest account system.
"""
import json
from evennia.utils.utils import make_iter
from evennia.players.players import DefaultPlayer, DefaultGuest
class MudderyPlayer(DefaultPlayer):
"""
This class describes the actual OOC player (i.e. the user connecting
to the MUD). It does NOT have visual appearance in the game world (that
is handled by the character which is connected to this). Comm channels
are attended/joined using this object.
It can be useful e.g. for storing configuration options for your game, but
should generally not hold any character-related info (that's best handled
on the character level).
Can be set using BASE_PLAYER_TYPECLASS.
"""
def msg(self, text=None, from_obj=None, session=None, **kwargs):
"""
Evennia -> User
This is the main route for sending data back to the user from the
server.
Args:
text (str, optional): text data to send
from_obj (Object or Player, optional): Object sending. If given,
its at_msg_send() hook will be called.
session (Session or list, optional): Session object or a list of
Sessions to receive this send. If given, overrules the
default send behavior for the current
MULTISESSION_MODE.
Notes:
All other keywords are passed on to the protocol.
"""
raw = kwargs.get("raw", False)
if not raw:
try:
text = json.dumps(text)
except Exception, e:
text = json.dumps({"err": "There is an error occurred while outputing messages."})
logger.log_tracemsg("json.dumps failed: %s" % e)
else:
text = to_str(text, force_string=True) if text else ""
# set raw=True
if kwargs:
kwargs["raw"] = True
else:
kwargs = {"raw": True}
if from_obj:
# call hook
try:
from_obj.at_msg_send(text=text, to_obj=self, **kwargs)
except Exception:
pass
# session relay
sessions = make_iter(session) if session else self.sessions.all()
for session in sessions:
session.msg(text=text, **kwargs)
class MudderyGuest(DefaultGuest):
"""
This class is used for guest logins. Unlike Players, Guests and their
characters are deleted after disconnection.
"""
pass
| [
"[email protected]"
]
| |
00e2fbc37e5d8aa5a588fc4185c7bc8bab4c4f22 | a39ed5db6c75c9ae1f5e05118794c64102dc5f7a | /2020/01_1/solution.py | 091874824ee82bf49cb18909afad5b2272562b7c | [
"MIT"
]
| permissive | budavariam/advent_of_code | b656d5caf5d05113b82357754eb225e61e89ac0d | 635be485ec691f9c0cdeb83f944de190f51c1ba3 | refs/heads/master | 2022-12-25T18:12:00.981365 | 2022-12-20T08:20:51 | 2022-12-20T08:20:51 | 114,570,426 | 1 | 1 | MIT | 2022-12-09T09:29:06 | 2017-12-17T21:36:00 | Python | UTF-8 | Python | false | false | 656 | py | """ Advent of code 2020 day 1/1 """
import math
from os import path
def solution(data):
""" Solution to the problem """
lines = data.split("\n")
precalculate = dict()
for line_value_str in lines:
precalculate[2020 - int(line_value_str)] = True
for line_value_str in lines:
current_value = int(line_value_str)
inverse = 2020 - current_value
if (precalculate.get(current_value) == True):
return current_value * inverse
return None
if __name__ == "__main__":
with(open(path.join(path.dirname(__file__), 'input.txt'), 'r')) as input_file:
print(solution(input_file.read()))
| [
"[email protected]"
]
| |
a765cbe96955bdac735e102715ca63e35d4ceee6 | 5cc7f0bfadbddf29671419a6a64b6046d055ddee | /database/mysql_connector.py | 3e498755590152fdfc82779586e02e71ab1041b6 | [
"LicenseRef-scancode-warranty-disclaimer",
"CC-BY-NC-4.0",
"BSD-2-Clause",
"MIT"
]
| permissive | webbpinner/openrvdas | 270e661b9e4adff76cdb42cc200dfd7e1aa373ae | 10342586e7406d55b72031f9b54ce2feb10f2f1a | refs/heads/master | 2023-08-20T18:24:23.535467 | 2023-05-25T15:05:03 | 2023-05-25T15:05:03 | 137,220,057 | 0 | 0 | MIT | 2018-06-13T13:33:48 | 2018-06-13T13:33:45 | Python | UTF-8 | Python | false | false | 12,452 | py | #!/usr/bin/env python3
"""Tables:
data: pk timestamp field_name field_value source_record
We don't know what type each value will have, so have a column for
int, float, str and bool and leave all but the appropriate value type
NULL. Docs claim that NULL values take no space, so...
Still so many ways we could make this more space efficient, most
obviously by partitioning field_name (and even timestamp?) into
foreign keys.
field_name - could store this in a separate table so that it's only
a foreign key in the data table. Something like:
fields: id field_name field_type
source_record - an id indexing a table where raw source records are
stored, so that we can re-parse and recreate whatever data we want
if needed.
Current implementation is simple and inefficient in both computation
and storage.
TODO: Allow wildcarding field selection, so client can specify 'S330*,Knud*'
"""
import logging
import sys
from os.path import dirname, realpath
sys.path.append(dirname(dirname(realpath(__file__))))
from logger.utils.das_record import DASRecord # noqa: E402
try:
import mysql.connector
MYSQL_ENABLED = True
except ImportError:
MYSQL_ENABLED = False
################################################################################
class MySQLConnector:
# Name of table in which we will store mappings from record field
# names to the tnames of the tables containing those fields.
DATA_TABLE = 'data'
FIELD_TABLE = 'fields'
SOURCE_TABLE = 'source'
def __init__(self, database, host, user, password,
tail=False, save_source=True):
"""Interface to MySQLConnector, to be imported by, e.g. DatabaseWriter."""
if not MYSQL_ENABLED:
logging.warning('MySQL not found, so MySQL functionality not available.')
return
self.connection = mysql.connector.connect(database=database, host=host,
user=user, password=password,
auth_plugin='mysql_native_password')
self.save_source = save_source
# What's the next id we're supposed to read? Or if we've been
# reading by timestamp, what's the last timestamp we've seen?
self.next_id = 1
self.last_timestamp = 0
self.exec_sql_command('set autocommit = 1')
# Create tables if they don't exist yet
if not self.table_exists(self.SOURCE_TABLE):
table_cmd = 'CREATE TABLE %s (id INT PRIMARY KEY AUTO_INCREMENT, ' \
'record TEXT)' % self.SOURCE_TABLE
logging.info('Creating table with command: %s', table_cmd)
self.exec_sql_command(table_cmd)
if not self.table_exists(self.DATA_TABLE):
table_cmd = ['CREATE TABLE %s ' % self.DATA_TABLE,
'(',
'id INT PRIMARY KEY AUTO_INCREMENT,',
'timestamp DOUBLE,',
'field_name VARCHAR(255),',
'int_value INT,',
'float_value DOUBLE,',
'str_value TEXT,',
'bool_value INT,',
'source INT,',
'INDEX (timestamp),',
'FOREIGN KEY (source) REFERENCES %s(id)'
% self.SOURCE_TABLE,
')'
]
logging.info('Creating table with command: %s', ' '.join(table_cmd))
self.exec_sql_command(' '.join(table_cmd))
# Once tables are initialized, seek to end if tail is True
if tail:
self.seek(offset=0, origin='end')
############################
def exec_sql_command(self, command):
cursor = self.connection.cursor()
try:
cursor.execute(command)
self.connection.commit()
cursor.close()
except mysql.connector.errors.Error as e:
logging.error('Executing command: "%s", encountered error "%s"',
command, str(e))
############################
def table_exists(self, table_name):
"""Does the specified table exist in the database?"""
cursor = self.connection.cursor()
cursor.execute('SHOW TABLES LIKE "%s"' % table_name)
if cursor.fetchone():
exists = True
else:
exists = False
cursor.close()
return exists
############################
def write_record(self, record):
"""Write record to table."""
# First, check that we've got something we can work with
if not record:
return
if not type(record) == DASRecord:
logging.error('write_record() received non-DASRecord as input. '
'Type: %s', type(record))
return
# If we're saving source records, we have to do a little
# legerdemain: after we've saved the record, we need to retrieve
# the id of the record we've just saved so that we can attach it
# to the data values we're about to save.
if self.save_source:
write_cmd = 'insert into `%s` (record) values (\'%s\')' % \
(self.SOURCE_TABLE, record.as_json())
logging.debug('Inserting source into table with command: %s', write_cmd)
self.exec_sql_command(write_cmd)
# Get the id of the saved source record. Note: documentation
# *claims* that this is kept on a per-client basis, so it's safe
# even if another client does an intervening write.
query = 'select last_insert_id()'
cursor = self.connection.cursor()
cursor.execute(query)
# source_field = ', source'
source_id = next(cursor)[0]
else:
# source_field = ''
source_id = None
if not record.fields:
logging.info('DASRecord has no parsed fields. Skipping record.')
return
# Write one row for each field-value pair. Columns are:
# timestamp
# field_name
# int_value \
# float_value, \ Only one of these fields will be non-NULL,
# str_value / depending on the type of the value.
# bool_value /
timestamp = record.timestamp
values = []
for field_name, value in record.fields.items():
value_array = ['%f' % timestamp, '"%s"' % field_name,
'NULL', 'NULL', 'NULL', 'NULL']
if type(value) is int:
value_array[2] = '%d' % value
elif type(value) is float:
value_array[3] = '%f' % value
elif type(value) is str:
value_array[4] = '"%s"' % value
elif type(value) is bool:
value_array[5] = '%d' % ('1' if value else '0')
elif value is None:
value_array[4] = '""'
else:
logging.error('Unknown record value type (%s) for %s: %s',
type(value), field_name, value)
continue
# If we've saved this field's source record, append source's
# foreign key to row so we can look it up.
if source_id:
value_array.append('%d' % source_id)
# Join entries into a string, append to list of other values
# we've already saved.
value_str = '(%s)' % ','.join(value_array)
values.append(value_str)
# Build the SQL query
fields = ['timestamp',
'field_name',
'int_value',
'float_value',
'str_value',
'bool_value']
if source_id:
fields.append('source')
if not values:
logging.warning('No values found in record %s', str(record))
write_cmd = 'insert into `%s` (%s) values %s' % \
(self.DATA_TABLE, ','.join(fields), ','.join(values))
logging.debug('Inserting record into table with command: %s', write_cmd)
self.exec_sql_command(write_cmd)
############################
def read(self, field_list=None, start=None, num_records=1):
"""Read the next record from table. If start is specified, reset read
to start at that position."""
if start is None:
start = self.next_id
condition = 'id >= %d' % start
# If they haven't given us any fields, retrieve everything
if field_list:
field_conditions = ['field_name="%s"' % f for f in field_list.split(',')]
condition += ' and (%s)' % ' or '.join(field_conditions)
condition += ' order by id'
if num_records is not None:
condition += ' limit %d' % num_records
query = 'select * from `%s` where %s' % (self.DATA_TABLE, condition)
logging.debug('read query: %s', query)
return self._process_query(query)
############################
def read_time(self, field_list=None, start_time=None, stop_time=None):
"""Read the next records from table based on timestamps. If start_time
is None, use the timestamp of the last read record. If stop_time is None,
read all records since then."""
if start_time is None:
condition = 'timestamp > %f' % self.last_timestamp
else:
condition = 'timestamp > %f' % start_time
if stop_time is not None:
condition = '(%s and timestamp < %f)' % (condition, stop_time)
# If they haven't given us any fields, retrieve everything
if field_list:
field_conditions = ['field_name="%s"' % f for f in field_list]
condition += ' and (%s)' % ' or '.join(field_conditions)
condition += ' order by timestamp'
query = 'select * from `%s` where %s' % (self.DATA_TABLE, condition)
logging.debug('read query: %s', query)
return self._process_query(query)
############################
def seek(self, offset=0, origin='current'):
"""Behavior is intended to mimic file seek() behavior but with
respect to records: 'offset' means number of records, and origin
is either 'start', 'current' or 'end'."""
num_rows = self._num_rows(self.DATA_TABLE)
if origin == 'current':
self.next_id += offset
elif origin == 'start':
self.next_id = offset + 1
elif origin == 'end':
self.next_id = num_rows + offset + 1
self._next_id = min(num_rows, self.next_id)
logging.debug('Seek: next position %d', self.next_id)
############################
def _num_rows(self, table_name):
query = 'select count(1) from `%s`' % table_name
cursor = self.connection.cursor()
cursor.execute(query)
num_rows = next(cursor)[0]
return num_rows
############################
def _process_query(self, query):
cursor = self.connection.cursor()
cursor.execute(query)
results = {}
for values in cursor:
(id, timestamp, field_name,
int_value, float_value, str_value, bool_value,
source) = values
if field_name not in results:
results[field_name] = []
if int_value is not None:
val = int_value
elif float_value is not None:
val = float_value
elif str_value is not None:
val = str_value
elif float_value is not None:
val = int_value
elif bool_value is not None:
val = bool(bool_value)
results[field_name].append((timestamp, val))
self.next_id = id + 1
self.last_timestamp = timestamp
cursor.close()
return results
############################
def delete_table(self, table_name):
"""Delete a table."""
delete_cmd = 'drop table `%s`' % table_name
logging.info('Dropping table with command: %s', delete_cmd)
self.exec_sql_command(delete_cmd)
############################
def close(self):
"""Close connection."""
self.connection.close()
| [
"[email protected]"
]
| |
85e231fb8ba4cfd6c1162cb823ec5bb8281d3e38 | 0920b50773cfd231137d2383695a6730d0678628 | /pylib/options.py | aa0c0975c42ce82b8eb23f47fdaaa066b16f3010 | []
| no_license | chyser/bin | 05b67cf299b0e427e253abc42ca015fcdec8e84c | b54f23c6c5f1f19e426ee06c9e9faf9f561ee9a9 | refs/heads/master | 2021-01-19T19:35:05.801722 | 2015-08-19T17:58:29 | 2015-08-19T17:58:29 | 17,319,228 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,201 | py | #!/usr/bin/env python
"""
Library:
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
import sys
import glob
class MOptException(Exception): pass
#-------------------------------------------------------------------------------
class OptionClass(object):
#-------------------------------------------------------------------------------
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def __init__(self, usageStr):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
object.__init__(self)
self._usageStr_ = usageStr
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def __contains__(self, x):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
return self[x] is not None
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def __getitem__(self, x):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
return getattr(self, x)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def get(self, attr, default=None, cls=None):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
val = getattr(self, attr)
if val is None:
return default
if cls is not None:
try:
val = cls(val)
except ValueError as ex:
self.usage(101, "option: '%s' has '%s'" % (val, str(ex)))
return val
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def usage(self, rc, s=''):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
"""
"""
if self._usageStr_ is None:
print('No help provided\n', file=sys.stderr)
sys.exit(rc)
if isinstance(self._usageStr_, (unicode, str)):
print(self._usageStr_ + '\n' + str(s), file=sys.stderr)
sys.exit(rc)
else:
self._usageStr_(rc, s)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def logError(self, __d__, d, v=None):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#import pprint
print('OptionClass.validate():')
print(' ', self.__dict__, '\n')
#pprint.pprint(self.__dict__)
#pprint.pprint(d, '\n')
if v is not None:
print(v, '\n')
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def validate(self, d):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
v = set(self.__dict__) - set(d)
if v:
return self.logError(self.__dict__, d, v)
v = set(d) - set(self.__dict__)
if v:
return self.logError(self.__dict__, d, v)
noFirstError = True
for key, val in self.__dict__.items():
if d[key] != val:
if noFirstError:
noFirstError = self.logError(self.__dict__, d)
print(' key:', key, ', d:', d[key], ', __dict__:', val)
if not noFirstError: print()
return noFirstError
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def __str__(self):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
return str(self.__dict__)
#-------------------------------------------------------------------------------
def mopt(cmdLineArgs, oneOnlyFlags, oneOnlyParams, *args, **kwds):
#-------------------------------------------------------------------------------
""" parses cmdLineArgs for options and arguments. options are normally
identified by a leading '-'.
mopt(cmdArgs, oneOnlyFlags, oneOnlyParams, usageStr="", **keyword)
mopt(opt, oneOnlyFlags, oneOnlyParams, usageStr="", **keyword)
mopt(cmdArgs, oneOnlyFlags, oneOnlyParams, multipleFlags, multipleParams, usageStr="", **keywords)
mopt(opt, oneOnlyFlags, oneOnlyParams, multipleFlags, multipleParams, usageStr="", **keywords)
Keyword arguments:
addHelp : automatically call usage for -? or --help', default: True
nonOpStopOp : '-'s are ignored after first non-option, default: True
skipUnknownOps : if True, put unknown options into arg list, else call usage with error. default: False
allowMultiChar : if False, '-abcd' means options a, b, c, and d, else it is option 'abcd'. default: False
shortOpMarker : marker when leading char used to identify short options. default: '-'
longOpMarker : marker when leading char used to identify long options. default: '--'
expandWildCards : expand wildcards in arguments (assume they are files). default: True
oneOnlyFlags, oneOnlyParams, multipleFlags and multipleParams are lists of:
- tuples (<short form>, <long form>)
- string
if single char is short form, else long form
usageStr may be either a string or a function to be used as to
display a usage message to stderr.
The long form value (or short if short form only) becomes an
attribute of the option class and will be set to None or [] if not
explicitely set. If an option is listed both as a flag and as a
param, then it always tries to fill the param with the next command
line arg unless it is last, in which case it does not generate a
error (usage call).
Arguments are checked for wildcards and expanded if expandWildCards
is True. Expansion mimics unix shells {*.py, co??l.py, abc[123].py)
and can be excaped by quotes ['"].
If mopt() is called multiple times with a prior OptionClass 'opt'
instead of a cmd line, further processing can occur on the remaining
command line options. This usually implies the first call had
skipUnknownOps = True.
Returns tuple (list of arguments, OptionClass instance)
"""
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def cvt(options, opts, sopt, lopt, val=None):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
shortVals = {}; longVals = set()
for opt in options:
if isinstance(opt, unicode):
shortV, longV = (opt, None) if sopt and len(opt) == 1 else (None, opt)
else:
shortV, longV = opt
if shortV:
name = shortV if longV is None else longV
if not hasattr(opts, name):
name = shortV if longV is None else longV
## note, each one should have a different list of not None
setattr(opts, name, None if val is None else [])
shortVals[sopt + shortV] = name
if longV:
if not hasattr(opts, longV):
## note, each one should have a different list of not None
setattr(opts, longV, None if val is None else [])
longVals.add(lopt + longV)
return shortVals, longVals
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def expandWCs(arg):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
""" expand wildcards like "*.py", "c?c?.py", "tmp[1234].py"
won't expand if surrounded ' or "
"""
if arg[0] in set(['"', "'"]) and arg[-1] in set(['"', "'"]):
return [arg[1:-1]]
if '*' in arg or '?' in arg or ('[' in arg and ']' in arg):
return glob.glob(arg)
return [arg]
## allow multiple calls by passing in the prior opt class
if isinstance(cmdLineArgs, OptionClass):
opts = cmdLineArgs
cmdLineArgs = opts._args_
else:
opts = None
## parse out keyword arguments
addHelp = kwds.get('addHelp', True)
nonOpStopOp = kwds.get('nonOpStopOp', True)
skipUnknownOps = kwds.get('skipUnknownOps', False)
allowMultiChar = kwds.get('allowMultiChar', False)
shortOpMarker = kwds.get('shortOpMarker', '-')
longOpMarker = kwds.get('longOpMarker', '--')
expandWildCards = kwds.get('expandWildCards', True)
k = set(['addHelp', 'nonOpStopOp', 'skipUnknownOps', 'allowMultiChar', 'shortOpMarker', 'longOpMarker', 'expandWildCards'])
if set(kwds) - k:
raise MOptException("illegal keyword(s): " + str(set(kwds) - k))
lopt = shortOpMarker if allowMultiChar else longOpMarker
## parse out arguments
la = len(args)
if la == 0 or la == 1:
usageStr = '' if la == 0 else args[0]
assert not isinstance(usageStr, list)
if opts is None:
opts = OptionClass(usageStr)
shortMultipleFlags = longMultipleFlags = shortMultipleParams = longMultipleParams = set()
elif la == 2 or la == 3:
usageStr = '' if la == 2 else args[2]
assert not isinstance(usageStr, list)
if opts is None:
opts = OptionClass(usageStr)
if not (isinstance(args[0], (list, tuple)) and isinstance(args[1], (list, tuple))):
raise TypeError('mopt() takes either 3-4 or 5-6 arguments (not counting keyword only args')
shortMultipleFlags, longMultipleFlags = cvt(args[0], opts, shortOpMarker, lopt, 'list')
shortMultipleParams, longMultipleParams = cvt(args[1], opts, shortOpMarker, lopt, 'list')
else:
raise TypeError('mopt() takes either 3-4 or 5-6 arguments (not counting keyword only args')
shortSingleFlags, longSingleFlags = cvt(oneOnlyFlags, opts, shortOpMarker, lopt)
shortSingleParams, longSingleParams = cvt(oneOnlyParams, opts, shortOpMarker, lopt)
opts._cmdline_ = cmdLineArgs
opts._args_ = oargs = []
if not allowMultiChar:
## convert ['-acbd'] to ['-a', '-c', '-b', '-d']
cargs = []
for arg in cmdLineArgs:
if arg.startswith(lopt):
cargs.append(arg)
elif arg.startswith(shortOpMarker) and len(arg) > 2:
for c in arg[1:]:
cargs.append(shortOpMarker + c)
else:
cargs.append(arg)
else:
cargs = cmdLineArgs
#print('cargs:', cargs)
idx = 0
while idx < len(cargs):
arg = cargs[idx]
if addHelp:
if arg == shortOpMarker + '?' or arg == lopt + 'help':
opts.usage(0)
if arg in shortSingleParams:
idx += 1
try:
val = cargs[idx]
except IndexError:
## allows the last option to also be a flag if no following parameter
if arg not in shortSingleFlags:
opts.usage(10001, 'parameter "%s" has no parameter' % arg)
val = True
setattr(opts, shortSingleParams[arg], val)
elif arg in longSingleParams:
idx += 1
try:
val = cargs[idx]
except IndexError:
## allows the last option to also be a flag if no following parameter
if arg not in longSingleFlags:
opts.usage(10001, 'parameter "%s" has no parameter' % arg)
val = True
setattr(opts, arg[len(lopt):], val)
elif arg in shortMultipleParams:
idx += 1
try:
val = cargs[idx]
except IndexError:
## allows the last option to also be a flag if no following parameter
if arg not in shortMultipleFlags:
opts.usage(10001, 'parameter "%s" has no parameter' % arg)
val = True
getattr(opts, shortMultipleParams[arg]).append(val)
elif arg in longMultipleParams:
idx += 1
try:
val = cargs[idx]
except IndexError:
## allows the last option to also be a flag if no following parameter
if arg not in longMultipleFlags:
opts.usage(10001, 'parameter "%s" has no parameter' % arg)
val = True
getattr(opts, arg[len(lopt):]).append(val)
elif arg in shortSingleFlags:
setattr(opts, shortSingleFlags[arg], True)
elif arg in longSingleFlags:
setattr(opts, arg[len(lopt):], True)
elif arg in shortMultipleFlags:
getattr(opts, shortMultipleFlags[arg]).append(True)
elif arg in longMultipleFlags:
getattr(opts, arg[len(lopt):]).append(True)
## signal to stop option parsing is an 'empty' long option
elif arg == lopt:
if expandWildCards:
for arg in cargs[idx+1:]:
oargs.extend(expandWCs(arg))
else:
oargs.extend(cargs[idx+1:])
break
## must have found a negative number
elif arg[0] == '-' and arg[1] in set('0123456789'):
oargs.append(arg)
## must have found an unknown option
elif arg.startswith(shortOpMarker):
if not skipUnknownOps:
opts.usage(10000, 'Unknown option: "%s"' % arg)
oargs.append(arg)
## must be an argument
else:
if nonOpStopOp:
if expandWildCards:
for arg in cargs[idx:]:
oargs.extend(expandWCs(arg))
else:
oargs.extend(cargs[idx:])
break
if expandWildCards:
oargs.extend(expandWCs(arg))
else:
oargs.append(arg)
idx += 1
return oargs, opts
#-------------------------------------------------------------------------------
def __test__(verbose=False):
#-------------------------------------------------------------------------------
"""
used for automated module testing. see L{tester}
"""
import pylib.tester as tester
class TException(Exception): pass
def usage(rc, s=''):
raise TException(s)
t = ['-caa', '--sara', 'cool', 'filename', '--cool', '-5', '-a', '-a']
args, opts = mopt(t, [('c', 'cat')], ['cool', 'sara'], ['a'], [], "cool")
tester.Assert(opts.get('cool', 0, int) == 0)
tester.Assert(len(opts.a) == 2)
args, opts = mopt(t, [('c', 'cat')], ['cool', 'sara'], ['a'], [], 'this is the prgm', nonOpStopOp=False)
tester.Assert(opts.get('cool', 0, int) == -5)
tester.Assert(len(opts.a) == 4)
args, opts = mopt(t, [('c', 'cat'), 'a'], ['cool', 'sara'], 'this is the prgm', nonOpStopOp=False)
tester.Assert(opts.get('cool', 0, int) == -5)
tester.AssertRecvException(AttributeError, opts.get, ('b', ))
tester.AssertRecvException(TException, mopt, (t, [('c', 'cat')], ['cool', 'sara'], usage))
tester.AssertRecvException(TException, mopt, (['--help'], [('c', 'cat')], ['cool', 'sara'], usage))
tester.AssertRecvException(TException, mopt, (['-?'], [('c', 'cat')], ['cool', 'sara'], usage))
args, opts = mopt(t, [('c', 'cat')], ['cool'], 'this is the prgm', nonOpStopOp=False, skipUnknownOps=True)
tester.Assert(opts.get('cool', 0, int) == -5)
tester.Assert(args == ['-a', '-a', '--sara', 'cool', 'filename', '-a', '-a'])
# test opts as first param
arg, opts = mopt(opts, [], [], ['a'], ['sara'], '', nonOpStopOp=False)
tester.Assert(opts.validate({'a': [True, True, True, True], 'sara': ['cool'], 'cat': True, '_usageStr_': 'this is the prgm', 'cool': u'-5', '_args_': [u'filename'],
'_cmdline_': ['-a', '-a', '--sara', 'cool', 'filename', '-a', '-a']}))
arg, opts = mopt(opts, [], [], ['a'], ['sara'], nonOpStopOp=False)
tester.Assert(opts.validate({'a': [True, True, True, True], 'sara': ['cool'],
'cat': True, '_usageStr_': 'this is the prgm', '_args_': ['filename'], 'cool': '-5', '_cmdline_': ['filename']}))
arg, opts = mopt(opts, [], [], ['a'], ['sara'], '', nonOpStopOp=False)
tester.Assert(opts.validate({'a': [True, True, True, True], 'sara': ['cool'],
'cat': True, '_usageStr_': 'this is the prgm', '_args_': ['filename'], 'cool': '-5', '_cmdline_': ['filename']}))
arg, opts = mopt(opts, [], [], ['a'], ['sara'], nonOpStopOp=False)
tester.Assert(opts.validate({'a': [True, True, True, True], 'sara': ['cool'],
'cat': True, '_usageStr_': 'this is the prgm', '_args_': ['filename'], 'cool': '-5', '_cmdline_': ['filename']}))
arg, opts = mopt(opts, ['c', 'cool'], [])
tester.Assert(opts.validate({'a': [True, True, True, True], 'c': None, 'sara': ['cool'], 'cat': True,
'_usageStr_': u'this is the prgm', '_args_': ['filename'],
'cool': '-5', '_cmdline_': ['filename']}))
tester.Assert('c' not in opts)
tester.Assert('cat' in opts)
tester.Assert(opts['cat'] is True)
t = ['-cool', '-run', '5', 'stuff']
args, opts = mopt(t, ['cool'], ['run'], 'this is the prgm', allowMultiChar=True)
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 'run': '5',
'_args_': ['stuff'], 'cool': True, '_cmdline_': ['-cool', '-run', '5', 'stuff']}))
t = ['/cool', '/run', '5', 'stuff']
args, opts = mopt(t, ['cool'], ['run'], 'this is the prgm', allowMultiChar=True, shortOpMarker='/')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 'run': '5',
'_args_': ['stuff'], 'cool': True, '_cmdline_': ['/cool', '/run', '5', 'stuff']}))
t = ['--sara', 'boo']
args, opts = mopt(t, ['sara'], ['sara'], 'this is the prgm')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 'sara': 'boo',
'_args_': [], '_cmdline_': ['--sara', 'boo']}))
args, opts = mopt(t, ['sara'], ['sara'], usage)
tester.AssertRecvException(TException, opts.get, ('sara', '', float))
t = ['--sara']
args, opts = mopt(t, ['sara'], ['sara'], 'this is the prgm')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 'sara': True, '_args_': [], '_cmdline_': ['--sara']}))
args, opts = mopt(['*.py'], ['sara'], ['sara'], 'this is the prgm')
tester.AssertRecvException(TypeError, mopt, (['--help'], [('c', 'cat')]))
args, opts = mopt(['*.py'], ['sara'], ['sara'], 'this is the prgm')
tester.Assert('options.py' in args)
args, opts = mopt(['"*.py"'], ['sara'], ['sara'], 'this is the prgm')
tester.Assert('*.py' in args)
args, opts = mopt(['coo[123].py'], ['sara'], ['sara'], 'this is the prgm')
args, opts = mopt(['*.py'], ['sara'], ['sara'], 'this is the prgm', expandWildCards=False)
tester.Assert('*.py' in args)
args, opts = mopt(['*.py'], ['sara'], ['sara'], 'this is the prgm', expandWildCards=False, nonOpStopOp=False)
tester.Assert('*.py' in args)
t = ['-c', '-r', '5', 'stuff']
args, opts = mopt(t, [('c', 'cool')], [('r','run')], 'this is the prgm')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 'run': '5',
'_args_': ['stuff'], 'cool': True, '_cmdline_': ['-c', '-r', '5', 'stuff']}))
t = ['-s']
args, opts = mopt(t, ['s'], ['s'], 'this is the prgm')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 's': True, '_args_': [], '_cmdline_': ['-s']}))
t = ['-s', 'boo']
args, opts = mopt(t, ['s'], ['s'], 'this is the prgm')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 's': 'boo', '_args_': [], '_cmdline_': ['-s', 'boo']}))
t = ['-s']
args, opts = mopt(t, [], [], ['s'], ['s'], 'this is the prgm')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 's': [True], '_args_': [], '_cmdline_': ['-s']}))
t = ['-s', 'boo']
args, opts = mopt(t, [], [], ['s'], ['s'], 'this is the prgm')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 's': ['boo'], '_args_': [], '_cmdline_': ['-s', 'boo']}))
t = ['--sara']
args, opts = mopt(t, [], [], ['sara'], ['sara'], 'this is the prgm')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 'sara': [True], '_args_': [], '_cmdline_': ['--sara']}))
t = ['--sara', 'boo']
args, opts = mopt(t, [], [], ['sara'], ['sara'], 'this is the prgm')
tester.Assert(opts.validate({'_usageStr_': 'this is the prgm', 'sara': ['boo'], '_args_': [], '_cmdline_': ['--sara', 'boo']}))
t = ['--sara', 'boo', '--', '--cool']
args, opts = mopt(t, [], [], ['sara'], ['sara'])
tester.Assert(opts.validate({'_usageStr_': '', 'sara': ['boo'], '_args_': ['--cool'],
'_cmdline_': ['--sara', 'boo', '--', '--cool']}))
t = ['--sara', 'boo', '--', '--cool']
args, opts = mopt(t, [], [], ['sara'], ['sara'], expandWildCards=False)
tester.Assert(opts.validate({'_usageStr_': '', 'sara': ['boo'], '_args_': ['--cool'],
'_cmdline_': ['--sara', 'boo', '--', '--cool']}))
t = ['--sara', '--', '--cool']
args, opts = mopt(t, [], [], ['sara'], [], expandWildCards=False)
tester.Assert(opts.validate({'_usageStr_': '', 'sara': [True], '_args_': ['--cool'],
'_cmdline_': ['--sara', '--', '--cool']}))
t = ['--sara']
tester.AssertRecvException(TException, mopt, (t, [], [('s', 'sara')], usage))
t = ['-s']
tester.AssertRecvException(TException, mopt, (t, [], [('s', 'sara')], usage))
t = ['--sara']
tester.AssertRecvException(TException, mopt, (t, [], [], [], [('s', 'sara')], usage))
t = ['-s']
tester.AssertRecvException(TException, mopt, (t, [], [], [], [('s', 'sara')], usage))
tester.AssertRecvException(TypeError, mopt, (t, [], [], [('s', 'sara')], usage))
t = ['---sara', '---', '---cool']
args, opts = mopt(t, [], [], ['sara'], [], expandWildCards=False, longOpMarker='---')
tester.Assert(opts.validate({'_usageStr_': '', 'sara': [True], '_args_': ['---cool'],
'_cmdline_': ['---sara', '---', '---cool']}))
t = ['---sara', '-s', '-d']
args, opts = mopt(t, [], [], ['sara'], [], expandWildCards=False, longOpMarker='---', shortOpMarker='---')
tester.Assert(opts.validate({'_usageStr_': '', 'sara': [True], '_args_': ['-s', '-d'],
'_cmdline_': ['---sara', '-s', '-d']}))
return 0
#-------------------------------------------------------------------------------
if __name__ == "__main__":
#-------------------------------------------------------------------------------
import pylib.osscripts as oss
args, opts = mopt(oss.argv[1:], [], [], __test__.__doc__)
print(oss.argv[1:])
print(args)
print('-'*40)
#mopt([], [], [], '', cool=5)
t = ['---cc', 'cl9', '---lib', r'C:\libcpp\lib;C:\Program Files\Microsoft Visual Studio\VC98\lib', '-c', 'msh.cpp']
args, opts = mopt(t, [], [('Z', 'cc'), ('L', 'lib')], [], [], expandWildCards=False, longOpMarker='---', shortOpMarker='---')
print(args)
print(opts)
res = not __test__(verbose=True)
oss.exit(res)
| [
"[email protected]"
]
| |
92c59a1156df87073eec8744b9a4011e1e6fd657 | f07e66293cc41a9fe71fc44f765b432fd7a0997c | /selfdrive/controls/lib/cluster/SConscript | 97eb4300d4da6618962e0430ca534fc43fb0640f | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
]
| permissive | kegman/openpilot | c9ba96a72d905956f02c684e065091e023942883 | b35291c91783657a5fc83abfff012d3bb49dd89f | refs/heads/kegman-ultimate | 2022-05-22T17:07:16.656336 | 2021-10-25T13:35:28 | 2021-10-25T13:35:28 | 229,979,925 | 105 | 212 | MIT | 2022-03-13T05:47:51 | 2019-12-24T17:27:11 | C | UTF-8 | Python | false | false | 185 | Import('env')
fc = env.SharedLibrary("fastcluster", "fastcluster.cpp")
# TODO: how do I gate on test
#env.Program("test", ["test.cpp"], LIBS=[fc])
#valgrind --leak-check=full ./test
| [
"[email protected]"
]
| ||
f68c67977383e7d333b30f0ea34c322410459cb5 | 4fee75068edcf2fb64074e84b150ad7a744e55df | /stock_market.py | 68549a9b79b0e8db27072edb80be07758f86993f | []
| no_license | satee143/zebuapi | 5311e1b7011dc86e311fddc0355f02cc89474205 | 1fa57ffc1802fac2bfa6bee06125a2ea68c0756a | refs/heads/master | 2022-12-07T00:20:42.126203 | 2020-08-26T18:15:28 | 2020-08-26T18:15:28 | 280,687,836 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | from nsetools import Nse
nse = Nse()
q = nse.get_quote('infy')
print(type(q))
# pprint(q)
| [
"[email protected]"
]
| |
e9529238cbc47916e001451674d12f106fbd8037 | 4dd5dbebc7b7f6dbfcbd6cc662311c91ad6d47e9 | /AtCoder/AGC030A.py | 641bf990c39cb0b7b2f7bfded4df6569c61e550e | []
| no_license | sourjp/programming_contest | aa6925b3317bd3aeb646df93a611af1199bfc7aa | 2a50e1be45441789e81eb49bfdfc0c598d2a534b | refs/heads/master | 2021-04-01T05:08:44.097226 | 2020-08-20T13:01:55 | 2020-08-20T13:01:55 | 248,158,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 120 | py | a, b, c = map(int, input().split())
cnt = 0
if a + b + 1>= c:
ans = b + c
else:
ans = b + a + b + 1
print(ans) | [
"[email protected]"
]
| |
99d58cfffec18317f497271c87e04c101c9d5fbf | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/rdbms/azure-mgmt-rdbms/generated_samples/mysql/server_security_alerts_create_max.py | 702f9e0bb6a8a7da00508fb08c8a992824a0c71c | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 2,227 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.rdbms.mysql import MySQLManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-rdbms
# USAGE
python server_security_alerts_create_max.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = MySQLManagementClient(
credential=DefaultAzureCredential(),
subscription_id="00000000-1111-2222-3333-444444444444",
)
response = client.server_security_alert_policies.begin_create_or_update(
resource_group_name="securityalert-4799",
server_name="securityalert-6440",
security_alert_policy_name="Default",
parameters={
"properties": {
"disabledAlerts": ["Access_Anomaly", "Usage_Anomaly"],
"emailAccountAdmins": True,
"emailAddresses": ["[email protected]"],
"retentionDays": 5,
"state": "Enabled",
"storageAccountAccessKey": "sdlfkjabc+sdlfkjsdlkfsjdfLDKFTERLKFDFKLjsdfksjdflsdkfD2342309432849328476458/3RSD==",
"storageEndpoint": "https://mystorage.blob.core.windows.net",
}
},
).result()
print(response)
# x-ms-original-file: specification/mysql/resource-manager/Microsoft.DBforMySQL/legacy/stable/2017-12-01/examples/ServerSecurityAlertsCreateMax.json
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
db123b88102b476b1f7aa06f89b3742fe4ef29c6 | 805a795ea81ca8b5cee1dec638585011da3aa12f | /MAIN/2.79/python/lib/test/test_asyncio/test_events.py | 28d92a9f4e3eac21e2aed7993f0e60dbd7ff1e89 | [
"Apache-2.0"
]
| permissive | josipamrsa/Interactive3DAnimation | 5b3837382eb0cc2ebdee9ee69adcee632054c00a | a4b7be78514b38fb096ced5601f25486d2a1d3a4 | refs/heads/master | 2022-10-12T05:48:20.572061 | 2019-09-26T09:50:49 | 2019-09-26T09:50:49 | 210,919,746 | 0 | 1 | Apache-2.0 | 2022-10-11T01:53:36 | 2019-09-25T19:03:51 | Python | UTF-8 | Python | false | false | 102,515 | py | """Tests for events.py."""
import collections.abc
import functools
import gc
import io
import os
import platform
import re
import signal
import socket
try:
import ssl
except ImportError:
ssl = None
import subprocess
import sys
import threading
import time
import errno
import unittest
from unittest import mock
import weakref
if sys.platform != 'win32':
import tty
import asyncio
from asyncio import coroutines
from asyncio import proactor_events
from asyncio import selector_events
from asyncio import sslproto
from asyncio import test_utils
try:
from test import support
except ImportError:
from asyncio import test_support as support
def data_file(filename):
if hasattr(support, 'TEST_HOME_DIR'):
fullname = os.path.join(support.TEST_HOME_DIR, filename)
if os.path.isfile(fullname):
return fullname
fullname = os.path.join(os.path.dirname(__file__), filename)
if os.path.isfile(fullname):
return fullname
raise FileNotFoundError(filename)
def osx_tiger():
"""Return True if the platform is Mac OS 10.4 or older."""
if sys.platform != 'darwin':
return False
version = platform.mac_ver()[0]
version = tuple(map(int, version.split('.')))
return version < (10, 5)
ONLYCERT = data_file('ssl_cert.pem')
ONLYKEY = data_file('ssl_key.pem')
SIGNED_CERTFILE = data_file('keycert3.pem')
SIGNING_CA = data_file('pycacert.pem')
PEERCERT = {'serialNumber': 'B09264B1F2DA21D1',
'version': 1,
'subject': ((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),)),
'issuer': ((('countryName', 'XY'),),
(('organizationName', 'Python Software Foundation CA'),),
(('commonName', 'our-ca-server'),)),
'notAfter': 'Nov 13 19:47:07 2022 GMT',
'notBefore': 'Jan 4 19:47:07 2013 GMT'}
class MyBaseProto(asyncio.Protocol):
connected = None
done = None
def __init__(self, loop=None):
self.transport = None
self.state = 'INITIAL'
self.nbytes = 0
if loop is not None:
self.connected = asyncio.Future(loop=loop)
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
if self.connected:
self.connected.set_result(None)
def data_received(self, data):
assert self.state == 'CONNECTED', self.state
self.nbytes += len(data)
def eof_received(self):
assert self.state == 'CONNECTED', self.state
self.state = 'EOF'
def connection_lost(self, exc):
assert self.state in ('CONNECTED', 'EOF'), self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MyProto(MyBaseProto):
def connection_made(self, transport):
super().connection_made(transport)
transport.write(b'GET / HTTP/1.0\r\nHost: example.com\r\n\r\n')
class MyDatagramProto(asyncio.DatagramProtocol):
done = None
def __init__(self, loop=None):
self.state = 'INITIAL'
self.nbytes = 0
if loop is not None:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'INITIALIZED'
def datagram_received(self, data, addr):
assert self.state == 'INITIALIZED', self.state
self.nbytes += len(data)
def error_received(self, exc):
assert self.state == 'INITIALIZED', self.state
def connection_lost(self, exc):
assert self.state == 'INITIALIZED', self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MyReadPipeProto(asyncio.Protocol):
done = None
def __init__(self, loop=None):
self.state = ['INITIAL']
self.nbytes = 0
self.transport = None
if loop is not None:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == ['INITIAL'], self.state
self.state.append('CONNECTED')
def data_received(self, data):
assert self.state == ['INITIAL', 'CONNECTED'], self.state
self.nbytes += len(data)
def eof_received(self):
assert self.state == ['INITIAL', 'CONNECTED'], self.state
self.state.append('EOF')
def connection_lost(self, exc):
if 'EOF' not in self.state:
self.state.append('EOF') # It is okay if EOF is missed.
assert self.state == ['INITIAL', 'CONNECTED', 'EOF'], self.state
self.state.append('CLOSED')
if self.done:
self.done.set_result(None)
class MyWritePipeProto(asyncio.BaseProtocol):
done = None
def __init__(self, loop=None):
self.state = 'INITIAL'
self.transport = None
if loop is not None:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
def connection_lost(self, exc):
assert self.state == 'CONNECTED', self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MySubprocessProtocol(asyncio.SubprocessProtocol):
def __init__(self, loop):
self.state = 'INITIAL'
self.transport = None
self.connected = asyncio.Future(loop=loop)
self.completed = asyncio.Future(loop=loop)
self.disconnects = {fd: asyncio.Future(loop=loop) for fd in range(3)}
self.data = {1: b'', 2: b''}
self.returncode = None
self.got_data = {1: asyncio.Event(loop=loop),
2: asyncio.Event(loop=loop)}
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
self.connected.set_result(None)
def connection_lost(self, exc):
assert self.state == 'CONNECTED', self.state
self.state = 'CLOSED'
self.completed.set_result(None)
def pipe_data_received(self, fd, data):
assert self.state == 'CONNECTED', self.state
self.data[fd] += data
self.got_data[fd].set()
def pipe_connection_lost(self, fd, exc):
assert self.state == 'CONNECTED', self.state
if exc:
self.disconnects[fd].set_exception(exc)
else:
self.disconnects[fd].set_result(exc)
def process_exited(self):
assert self.state == 'CONNECTED', self.state
self.returncode = self.transport.get_returncode()
class EventLoopTestsMixin:
def setUp(self):
super().setUp()
self.loop = self.create_event_loop()
self.set_event_loop(self.loop)
def tearDown(self):
# just in case if we have transport close callbacks
if not self.loop.is_closed():
test_utils.run_briefly(self.loop)
self.loop.close()
gc.collect()
super().tearDown()
def test_run_until_complete_nesting(self):
@asyncio.coroutine
def coro1():
yield
@asyncio.coroutine
def coro2():
self.assertTrue(self.loop.is_running())
self.loop.run_until_complete(coro1())
self.assertRaises(
RuntimeError, self.loop.run_until_complete, coro2())
# Note: because of the default Windows timing granularity of
# 15.6 msec, we use fairly long sleep times here (~100 msec).
def test_run_until_complete(self):
t0 = self.loop.time()
self.loop.run_until_complete(asyncio.sleep(0.1, loop=self.loop))
t1 = self.loop.time()
self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
def test_run_until_complete_stopped(self):
@asyncio.coroutine
def cb():
self.loop.stop()
yield from asyncio.sleep(0.1, loop=self.loop)
task = cb()
self.assertRaises(RuntimeError,
self.loop.run_until_complete, task)
def test_call_later(self):
results = []
def callback(arg):
results.append(arg)
self.loop.stop()
self.loop.call_later(0.1, callback, 'hello world')
t0 = time.monotonic()
self.loop.run_forever()
t1 = time.monotonic()
self.assertEqual(results, ['hello world'])
self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
def test_call_soon(self):
results = []
def callback(arg1, arg2):
results.append((arg1, arg2))
self.loop.stop()
self.loop.call_soon(callback, 'hello', 'world')
self.loop.run_forever()
self.assertEqual(results, [('hello', 'world')])
def test_call_soon_threadsafe(self):
results = []
lock = threading.Lock()
def callback(arg):
results.append(arg)
if len(results) >= 2:
self.loop.stop()
def run_in_thread():
self.loop.call_soon_threadsafe(callback, 'hello')
lock.release()
lock.acquire()
t = threading.Thread(target=run_in_thread)
t.start()
with lock:
self.loop.call_soon(callback, 'world')
self.loop.run_forever()
t.join()
self.assertEqual(results, ['hello', 'world'])
def test_call_soon_threadsafe_same_thread(self):
results = []
def callback(arg):
results.append(arg)
if len(results) >= 2:
self.loop.stop()
self.loop.call_soon_threadsafe(callback, 'hello')
self.loop.call_soon(callback, 'world')
self.loop.run_forever()
self.assertEqual(results, ['hello', 'world'])
def test_run_in_executor(self):
def run(arg):
return (arg, threading.get_ident())
f2 = self.loop.run_in_executor(None, run, 'yo')
res, thread_id = self.loop.run_until_complete(f2)
self.assertEqual(res, 'yo')
self.assertNotEqual(thread_id, threading.get_ident())
def test_reader_callback(self):
r, w = test_utils.socketpair()
r.setblocking(False)
bytes_read = bytearray()
def reader():
try:
data = r.recv(1024)
except BlockingIOError:
# Spurious readiness notifications are possible
# at least on Linux -- see man select.
return
if data:
bytes_read.extend(data)
else:
self.assertTrue(self.loop.remove_reader(r.fileno()))
r.close()
self.loop.add_reader(r.fileno(), reader)
self.loop.call_soon(w.send, b'abc')
test_utils.run_until(self.loop, lambda: len(bytes_read) >= 3)
self.loop.call_soon(w.send, b'def')
test_utils.run_until(self.loop, lambda: len(bytes_read) >= 6)
self.loop.call_soon(w.close)
self.loop.call_soon(self.loop.stop)
self.loop.run_forever()
self.assertEqual(bytes_read, b'abcdef')
def test_writer_callback(self):
r, w = test_utils.socketpair()
w.setblocking(False)
def writer(data):
w.send(data)
self.loop.stop()
data = b'x' * 1024
self.loop.add_writer(w.fileno(), writer, data)
self.loop.run_forever()
self.assertTrue(self.loop.remove_writer(w.fileno()))
self.assertFalse(self.loop.remove_writer(w.fileno()))
w.close()
read = r.recv(len(data) * 2)
r.close()
self.assertEqual(read, data)
def _basetest_sock_client_ops(self, httpd, sock):
if not isinstance(self.loop, proactor_events.BaseProactorEventLoop):
# in debug mode, socket operations must fail
# if the socket is not in blocking mode
self.loop.set_debug(True)
sock.setblocking(True)
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_connect(sock, httpd.address))
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n'))
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_recv(sock, 1024))
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_accept(sock))
# test in non-blocking mode
sock.setblocking(False)
self.loop.run_until_complete(
self.loop.sock_connect(sock, httpd.address))
self.loop.run_until_complete(
self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n'))
data = self.loop.run_until_complete(
self.loop.sock_recv(sock, 1024))
# consume data
self.loop.run_until_complete(
self.loop.sock_recv(sock, 1024))
sock.close()
self.assertTrue(data.startswith(b'HTTP/1.0 200 OK'))
def test_sock_client_ops(self):
with test_utils.run_test_server() as httpd:
sock = socket.socket()
self._basetest_sock_client_ops(httpd, sock)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_unix_sock_client_ops(self):
with test_utils.run_test_unix_server() as httpd:
sock = socket.socket(socket.AF_UNIX)
self._basetest_sock_client_ops(httpd, sock)
def test_sock_client_fail(self):
# Make sure that we will get an unused port
address = None
try:
s = socket.socket()
s.bind(('127.0.0.1', 0))
address = s.getsockname()
finally:
s.close()
sock = socket.socket()
sock.setblocking(False)
with self.assertRaises(ConnectionRefusedError):
self.loop.run_until_complete(
self.loop.sock_connect(sock, address))
sock.close()
def test_sock_accept(self):
listener = socket.socket()
listener.setblocking(False)
listener.bind(('127.0.0.1', 0))
listener.listen(1)
client = socket.socket()
client.connect(listener.getsockname())
f = self.loop.sock_accept(listener)
conn, addr = self.loop.run_until_complete(f)
self.assertEqual(conn.gettimeout(), 0)
self.assertEqual(addr, client.getsockname())
self.assertEqual(client.getpeername(), listener.getsockname())
client.close()
conn.close()
listener.close()
@unittest.skipUnless(hasattr(signal, 'SIGKILL'), 'No SIGKILL')
def test_add_signal_handler(self):
caught = 0
def my_handler():
nonlocal caught
caught += 1
# Check error behavior first.
self.assertRaises(
TypeError, self.loop.add_signal_handler, 'boom', my_handler)
self.assertRaises(
TypeError, self.loop.remove_signal_handler, 'boom')
self.assertRaises(
ValueError, self.loop.add_signal_handler, signal.NSIG+1,
my_handler)
self.assertRaises(
ValueError, self.loop.remove_signal_handler, signal.NSIG+1)
self.assertRaises(
ValueError, self.loop.add_signal_handler, 0, my_handler)
self.assertRaises(
ValueError, self.loop.remove_signal_handler, 0)
self.assertRaises(
ValueError, self.loop.add_signal_handler, -1, my_handler)
self.assertRaises(
ValueError, self.loop.remove_signal_handler, -1)
self.assertRaises(
RuntimeError, self.loop.add_signal_handler, signal.SIGKILL,
my_handler)
# Removing SIGKILL doesn't raise, since we don't call signal().
self.assertFalse(self.loop.remove_signal_handler(signal.SIGKILL))
# Now set a handler and handle it.
self.loop.add_signal_handler(signal.SIGINT, my_handler)
os.kill(os.getpid(), signal.SIGINT)
test_utils.run_until(self.loop, lambda: caught)
# Removing it should restore the default handler.
self.assertTrue(self.loop.remove_signal_handler(signal.SIGINT))
self.assertEqual(signal.getsignal(signal.SIGINT),
signal.default_int_handler)
# Removing again returns False.
self.assertFalse(self.loop.remove_signal_handler(signal.SIGINT))
@unittest.skipUnless(hasattr(signal, 'SIGALRM'), 'No SIGALRM')
def test_signal_handling_while_selecting(self):
# Test with a signal actually arriving during a select() call.
caught = 0
def my_handler():
nonlocal caught
caught += 1
self.loop.stop()
self.loop.add_signal_handler(signal.SIGALRM, my_handler)
signal.setitimer(signal.ITIMER_REAL, 0.01, 0) # Send SIGALRM once.
self.loop.run_forever()
self.assertEqual(caught, 1)
@unittest.skipUnless(hasattr(signal, 'SIGALRM'), 'No SIGALRM')
def test_signal_handling_args(self):
some_args = (42,)
caught = 0
def my_handler(*args):
nonlocal caught
caught += 1
self.assertEqual(args, some_args)
self.loop.add_signal_handler(signal.SIGALRM, my_handler, *some_args)
signal.setitimer(signal.ITIMER_REAL, 0.1, 0) # Send SIGALRM once.
self.loop.call_later(0.5, self.loop.stop)
self.loop.run_forever()
self.assertEqual(caught, 1)
def _basetest_create_connection(self, connection_fut, check_sockname=True):
tr, pr = self.loop.run_until_complete(connection_fut)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, asyncio.Protocol)
self.assertIs(pr.transport, tr)
if check_sockname:
self.assertIsNotNone(tr.get_extra_info('sockname'))
self.loop.run_until_complete(pr.done)
self.assertGreater(pr.nbytes, 0)
tr.close()
def test_create_connection(self):
with test_utils.run_test_server() as httpd:
conn_fut = self.loop.create_connection(
lambda: MyProto(loop=self.loop), *httpd.address)
self._basetest_create_connection(conn_fut)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_connection(self):
# Issue #20682: On Mac OS X Tiger, getsockname() returns a
# zero-length address for UNIX socket.
check_sockname = not osx_tiger()
with test_utils.run_test_unix_server() as httpd:
conn_fut = self.loop.create_unix_connection(
lambda: MyProto(loop=self.loop), httpd.address)
self._basetest_create_connection(conn_fut, check_sockname)
def test_create_connection_sock(self):
with test_utils.run_test_server() as httpd:
sock = None
infos = self.loop.run_until_complete(
self.loop.getaddrinfo(
*httpd.address, type=socket.SOCK_STREAM))
for family, type, proto, cname, address in infos:
try:
sock = socket.socket(family=family, type=type, proto=proto)
sock.setblocking(False)
self.loop.run_until_complete(
self.loop.sock_connect(sock, address))
except:
pass
else:
break
else:
assert False, 'Can not create socket.'
f = self.loop.create_connection(
lambda: MyProto(loop=self.loop), sock=sock)
tr, pr = self.loop.run_until_complete(f)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, asyncio.Protocol)
self.loop.run_until_complete(pr.done)
self.assertGreater(pr.nbytes, 0)
tr.close()
def check_ssl_extra_info(self, client, check_sockname=True,
peername=None, peercert={}):
if check_sockname:
self.assertIsNotNone(client.get_extra_info('sockname'))
if peername:
self.assertEqual(peername,
client.get_extra_info('peername'))
else:
self.assertIsNotNone(client.get_extra_info('peername'))
self.assertEqual(peercert,
client.get_extra_info('peercert'))
# test SSL cipher
cipher = client.get_extra_info('cipher')
self.assertIsInstance(cipher, tuple)
self.assertEqual(len(cipher), 3, cipher)
self.assertIsInstance(cipher[0], str)
self.assertIsInstance(cipher[1], str)
self.assertIsInstance(cipher[2], int)
# test SSL object
sslobj = client.get_extra_info('ssl_object')
self.assertIsNotNone(sslobj)
self.assertEqual(sslobj.compression(),
client.get_extra_info('compression'))
self.assertEqual(sslobj.cipher(),
client.get_extra_info('cipher'))
self.assertEqual(sslobj.getpeercert(),
client.get_extra_info('peercert'))
self.assertEqual(sslobj.compression(),
client.get_extra_info('compression'))
def _basetest_create_ssl_connection(self, connection_fut,
check_sockname=True,
peername=None):
tr, pr = self.loop.run_until_complete(connection_fut)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, asyncio.Protocol)
self.assertTrue('ssl' in tr.__class__.__name__.lower())
self.check_ssl_extra_info(tr, check_sockname, peername)
self.loop.run_until_complete(pr.done)
self.assertGreater(pr.nbytes, 0)
tr.close()
def _test_create_ssl_connection(self, httpd, create_connection,
check_sockname=True, peername=None):
conn_fut = create_connection(ssl=test_utils.dummy_ssl_context())
self._basetest_create_ssl_connection(conn_fut, check_sockname,
peername)
# ssl.Purpose was introduced in Python 3.4
if hasattr(ssl, 'Purpose'):
def _dummy_ssl_create_context(purpose=ssl.Purpose.SERVER_AUTH, *,
cafile=None, capath=None,
cadata=None):
"""
A ssl.create_default_context() replacement that doesn't enable
cert validation.
"""
self.assertEqual(purpose, ssl.Purpose.SERVER_AUTH)
return test_utils.dummy_ssl_context()
# With ssl=True, ssl.create_default_context() should be called
with mock.patch('ssl.create_default_context',
side_effect=_dummy_ssl_create_context) as m:
conn_fut = create_connection(ssl=True)
self._basetest_create_ssl_connection(conn_fut, check_sockname,
peername)
self.assertEqual(m.call_count, 1)
# With the real ssl.create_default_context(), certificate
# validation will fail
with self.assertRaises(ssl.SSLError) as cm:
conn_fut = create_connection(ssl=True)
# Ignore the "SSL handshake failed" log in debug mode
with test_utils.disable_logger():
self._basetest_create_ssl_connection(conn_fut, check_sockname,
peername)
self.assertEqual(cm.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_ssl_connection(self):
with test_utils.run_test_server(use_ssl=True) as httpd:
create_connection = functools.partial(
self.loop.create_connection,
lambda: MyProto(loop=self.loop),
*httpd.address)
self._test_create_ssl_connection(httpd, create_connection,
peername=httpd.address)
def test_legacy_create_ssl_connection(self):
with test_utils.force_legacy_ssl_support():
self.test_create_ssl_connection()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_ssl_unix_connection(self):
# Issue #20682: On Mac OS X Tiger, getsockname() returns a
# zero-length address for UNIX socket.
check_sockname = not osx_tiger()
with test_utils.run_test_unix_server(use_ssl=True) as httpd:
create_connection = functools.partial(
self.loop.create_unix_connection,
lambda: MyProto(loop=self.loop), httpd.address,
server_hostname='127.0.0.1')
self._test_create_ssl_connection(httpd, create_connection,
check_sockname,
peername=httpd.address)
def test_legacy_create_ssl_unix_connection(self):
with test_utils.force_legacy_ssl_support():
self.test_create_ssl_unix_connection()
def test_create_connection_local_addr(self):
with test_utils.run_test_server() as httpd:
port = support.find_unused_port()
f = self.loop.create_connection(
lambda: MyProto(loop=self.loop),
*httpd.address, local_addr=(httpd.address[0], port))
tr, pr = self.loop.run_until_complete(f)
expected = pr.transport.get_extra_info('sockname')[1]
self.assertEqual(port, expected)
tr.close()
def test_create_connection_local_addr_in_use(self):
with test_utils.run_test_server() as httpd:
f = self.loop.create_connection(
lambda: MyProto(loop=self.loop),
*httpd.address, local_addr=httpd.address)
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(f)
self.assertEqual(cm.exception.errno, errno.EADDRINUSE)
self.assertIn(str(httpd.address), cm.exception.strerror)
def test_connect_accepted_socket(self, server_ssl=None, client_ssl=None):
loop = self.loop
class MyProto(MyBaseProto):
def connection_lost(self, exc):
super().connection_lost(exc)
loop.call_soon(loop.stop)
def data_received(self, data):
super().data_received(data)
self.transport.write(expected_response)
lsock = socket.socket()
lsock.bind(('127.0.0.1', 0))
lsock.listen(1)
addr = lsock.getsockname()
message = b'test data'
response = None
expected_response = b'roger'
def client():
nonlocal response
try:
csock = socket.socket()
if client_ssl is not None:
csock = client_ssl.wrap_socket(csock)
csock.connect(addr)
csock.sendall(message)
response = csock.recv(99)
csock.close()
except Exception as exc:
print(
"Failure in client thread in test_connect_accepted_socket",
exc)
thread = threading.Thread(target=client, daemon=True)
thread.start()
conn, _ = lsock.accept()
proto = MyProto(loop=loop)
proto.loop = loop
loop.run_until_complete(
loop.connect_accepted_socket(
(lambda: proto), conn, ssl=server_ssl))
loop.run_forever()
proto.transport.close()
lsock.close()
thread.join(1)
self.assertFalse(thread.is_alive())
self.assertEqual(proto.state, 'CLOSED')
self.assertEqual(proto.nbytes, len(message))
self.assertEqual(response, expected_response)
@unittest.skipIf(ssl is None, 'No ssl module')
def test_ssl_connect_accepted_socket(self):
if (sys.platform == 'win32' and
sys.version_info < (3, 5) and
isinstance(self.loop, proactor_events.BaseProactorEventLoop)
):
raise unittest.SkipTest(
'SSL not supported with proactor event loops before Python 3.5'
)
server_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
server_context.load_cert_chain(ONLYCERT, ONLYKEY)
if hasattr(server_context, 'check_hostname'):
server_context.check_hostname = False
server_context.verify_mode = ssl.CERT_NONE
client_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
if hasattr(server_context, 'check_hostname'):
client_context.check_hostname = False
client_context.verify_mode = ssl.CERT_NONE
self.test_connect_accepted_socket(server_context, client_context)
@mock.patch('asyncio.base_events.socket')
def create_server_multiple_hosts(self, family, hosts, mock_sock):
@asyncio.coroutine
def getaddrinfo(host, port, *args, **kw):
if family == socket.AF_INET:
return [(family, socket.SOCK_STREAM, 6, '', (host, port))]
else:
return [(family, socket.SOCK_STREAM, 6, '', (host, port, 0, 0))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
unique_hosts = set(hosts)
if family == socket.AF_INET:
mock_sock.socket().getsockbyname.side_effect = [
(host, 80) for host in unique_hosts]
else:
mock_sock.socket().getsockbyname.side_effect = [
(host, 80, 0, 0) for host in unique_hosts]
self.loop.getaddrinfo = getaddrinfo_task
self.loop._start_serving = mock.Mock()
self.loop._stop_serving = mock.Mock()
f = self.loop.create_server(lambda: MyProto(self.loop), hosts, 80)
server = self.loop.run_until_complete(f)
self.addCleanup(server.close)
server_hosts = {sock.getsockbyname()[0] for sock in server.sockets}
self.assertEqual(server_hosts, unique_hosts)
def test_create_server_multiple_hosts_ipv4(self):
self.create_server_multiple_hosts(socket.AF_INET,
['1.2.3.4', '5.6.7.8', '1.2.3.4'])
def test_create_server_multiple_hosts_ipv6(self):
self.create_server_multiple_hosts(socket.AF_INET6,
['::1', '::2', '::1'])
def test_create_server(self):
proto = MyProto(self.loop)
f = self.loop.create_server(lambda: proto, '0.0.0.0', 0)
server = self.loop.run_until_complete(f)
self.assertEqual(len(server.sockets), 1)
sock = server.sockets[0]
host, port = sock.getsockname()
self.assertEqual(host, '0.0.0.0')
client = socket.socket()
client.connect(('127.0.0.1', port))
client.sendall(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('sockname'))
self.assertEqual('127.0.0.1',
proto.transport.get_extra_info('peername')[0])
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# close server
server.close()
@unittest.skipUnless(hasattr(socket, 'SO_REUSEPORT'), 'No SO_REUSEPORT')
def test_create_server_reuse_port(self):
proto = MyProto(self.loop)
f = self.loop.create_server(
lambda: proto, '0.0.0.0', 0)
server = self.loop.run_until_complete(f)
self.assertEqual(len(server.sockets), 1)
sock = server.sockets[0]
self.assertFalse(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEPORT))
server.close()
test_utils.run_briefly(self.loop)
proto = MyProto(self.loop)
f = self.loop.create_server(
lambda: proto, '0.0.0.0', 0, reuse_port=True)
server = self.loop.run_until_complete(f)
self.assertEqual(len(server.sockets), 1)
sock = server.sockets[0]
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEPORT))
server.close()
def _make_unix_server(self, factory, **kwargs):
path = test_utils.gen_unix_socket_path()
self.addCleanup(lambda: os.path.exists(path) and os.unlink(path))
f = self.loop.create_unix_server(factory, path, **kwargs)
server = self.loop.run_until_complete(f)
return server, path
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server(self):
proto = MyProto(loop=self.loop)
server, path = self._make_unix_server(lambda: proto)
self.assertEqual(len(server.sockets), 1)
client = socket.socket(socket.AF_UNIX)
client.connect(path)
client.sendall(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# close server
server.close()
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_path_socket_error(self):
proto = MyProto(loop=self.loop)
sock = socket.socket()
with sock:
f = self.loop.create_unix_server(lambda: proto, '/test', sock=sock)
with self.assertRaisesRegex(ValueError,
'path and sock can not be specified '
'at the same time'):
self.loop.run_until_complete(f)
def _create_ssl_context(self, certfile, keyfile=None):
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.load_cert_chain(certfile, keyfile)
return sslcontext
def _make_ssl_server(self, factory, certfile, keyfile=None):
sslcontext = self._create_ssl_context(certfile, keyfile)
f = self.loop.create_server(factory, '127.0.0.1', 0, ssl=sslcontext)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
host, port = sock.getsockname()
self.assertEqual(host, '127.0.0.1')
return server, host, port
def _make_ssl_unix_server(self, factory, certfile, keyfile=None):
sslcontext = self._create_ssl_context(certfile, keyfile)
return self._make_unix_server(factory, ssl=sslcontext)
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, ONLYCERT, ONLYKEY)
f_c = self.loop.create_connection(MyBaseProto, host, port,
ssl=test_utils.dummy_ssl_context())
client, pr = self.loop.run_until_complete(f_c)
client.write(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# extra info is available
self.check_ssl_extra_info(client, peername=(host, port))
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# stop serving
server.close()
def test_legacy_create_server_ssl(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_ssl(self):
proto = MyProto(loop=self.loop)
server, path = self._make_ssl_unix_server(
lambda: proto, ONLYCERT, ONLYKEY)
f_c = self.loop.create_unix_connection(
MyBaseProto, path, ssl=test_utils.dummy_ssl_context(),
server_hostname='')
client, pr = self.loop.run_until_complete(f_c)
client.write(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# stop serving
server.close()
def test_legacy_create_unix_server_ssl(self):
with test_utils.force_legacy_ssl_support():
self.test_create_unix_server_ssl()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl_verify_failed(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# no CA loaded
f_c = self.loop.create_connection(MyProto, host, port,
ssl=sslcontext_client)
with mock.patch.object(self.loop, 'call_exception_handler'):
with test_utils.disable_logger():
with self.assertRaisesRegex(ssl.SSLError,
'(?i)certificate.verify.failed'):
self.loop.run_until_complete(f_c)
# execute the loop to log the connection error
test_utils.run_briefly(self.loop)
# close connection
self.assertIsNone(proto.transport)
server.close()
def test_legacy_create_server_ssl_verify_failed(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl_verify_failed()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_ssl_verify_failed(self):
proto = MyProto(loop=self.loop)
server, path = self._make_ssl_unix_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# no CA loaded
f_c = self.loop.create_unix_connection(MyProto, path,
ssl=sslcontext_client,
server_hostname='invalid')
with mock.patch.object(self.loop, 'call_exception_handler'):
with test_utils.disable_logger():
with self.assertRaisesRegex(ssl.SSLError,
'(?i)certificate.verify.failed'):
self.loop.run_until_complete(f_c)
# execute the loop to log the connection error
test_utils.run_briefly(self.loop)
# close connection
self.assertIsNone(proto.transport)
server.close()
def test_legacy_create_unix_server_ssl_verify_failed(self):
with test_utils.force_legacy_ssl_support():
self.test_create_unix_server_ssl_verify_failed()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl_match_failed(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
sslcontext_client.load_verify_locations(
cafile=SIGNING_CA)
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# incorrect server_hostname
f_c = self.loop.create_connection(MyProto, host, port,
ssl=sslcontext_client)
with mock.patch.object(self.loop, 'call_exception_handler'):
with test_utils.disable_logger():
with self.assertRaisesRegex(
ssl.CertificateError,
"hostname '127.0.0.1' doesn't match 'localhost'"):
self.loop.run_until_complete(f_c)
# close connection
proto.transport.close()
server.close()
def test_legacy_create_server_ssl_match_failed(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl_match_failed()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_ssl_verified(self):
proto = MyProto(loop=self.loop)
server, path = self._make_ssl_unix_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
sslcontext_client.load_verify_locations(cafile=SIGNING_CA)
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# Connection succeeds with correct CA and server hostname.
f_c = self.loop.create_unix_connection(MyProto, path,
ssl=sslcontext_client,
server_hostname='localhost')
client, pr = self.loop.run_until_complete(f_c)
# close connection
proto.transport.close()
client.close()
server.close()
self.loop.run_until_complete(proto.done)
def test_legacy_create_unix_server_ssl_verified(self):
with test_utils.force_legacy_ssl_support():
self.test_create_unix_server_ssl_verified()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl_verified(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
sslcontext_client.load_verify_locations(cafile=SIGNING_CA)
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# Connection succeeds with correct CA and server hostname.
f_c = self.loop.create_connection(MyProto, host, port,
ssl=sslcontext_client,
server_hostname='localhost')
client, pr = self.loop.run_until_complete(f_c)
# extra info is available
self.check_ssl_extra_info(client,peername=(host, port),
peercert=PEERCERT)
# close connection
proto.transport.close()
client.close()
server.close()
self.loop.run_until_complete(proto.done)
def test_legacy_create_server_ssl_verified(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl_verified()
def test_create_server_sock(self):
proto = asyncio.Future(loop=self.loop)
class TestMyProto(MyProto):
def connection_made(self, transport):
super().connection_made(transport)
proto.set_result(self)
sock_ob = socket.socket(type=socket.SOCK_STREAM)
sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock_ob.bind(('0.0.0.0', 0))
f = self.loop.create_server(TestMyProto, sock=sock_ob)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
self.assertIs(sock, sock_ob)
host, port = sock.getsockname()
self.assertEqual(host, '0.0.0.0')
client = socket.socket()
client.connect(('127.0.0.1', port))
client.send(b'xxx')
client.close()
server.close()
def test_create_server_addr_in_use(self):
sock_ob = socket.socket(type=socket.SOCK_STREAM)
sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock_ob.bind(('0.0.0.0', 0))
f = self.loop.create_server(MyProto, sock=sock_ob)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
host, port = sock.getsockname()
f = self.loop.create_server(MyProto, host=host, port=port)
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(f)
self.assertEqual(cm.exception.errno, errno.EADDRINUSE)
server.close()
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 not supported or enabled')
def test_create_server_dual_stack(self):
f_proto = asyncio.Future(loop=self.loop)
class TestMyProto(MyProto):
def connection_made(self, transport):
super().connection_made(transport)
f_proto.set_result(self)
try_count = 0
while True:
try:
port = support.find_unused_port()
f = self.loop.create_server(TestMyProto, host=None, port=port)
server = self.loop.run_until_complete(f)
except OSError as ex:
if ex.errno == errno.EADDRINUSE:
try_count += 1
self.assertGreaterEqual(5, try_count)
continue
else:
raise
else:
break
client = socket.socket()
client.connect(('127.0.0.1', port))
client.send(b'xxx')
proto = self.loop.run_until_complete(f_proto)
proto.transport.close()
client.close()
f_proto = asyncio.Future(loop=self.loop)
client = socket.socket(socket.AF_INET6)
client.connect(('::1', port))
client.send(b'xxx')
proto = self.loop.run_until_complete(f_proto)
proto.transport.close()
client.close()
server.close()
def test_server_close(self):
f = self.loop.create_server(MyProto, '0.0.0.0', 0)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
host, port = sock.getsockname()
client = socket.socket()
client.connect(('127.0.0.1', port))
client.send(b'xxx')
client.close()
server.close()
client = socket.socket()
self.assertRaises(
ConnectionRefusedError, client.connect, ('127.0.0.1', port))
client.close()
def test_create_datagram_endpoint(self):
class TestMyDatagramProto(MyDatagramProto):
def __init__(inner_self):
super().__init__(loop=self.loop)
def datagram_received(self, data, addr):
super().datagram_received(data, addr)
self.transport.sendto(b'resp:'+data, addr)
coro = self.loop.create_datagram_endpoint(
TestMyDatagramProto, local_addr=('127.0.0.1', 0))
s_transport, server = self.loop.run_until_complete(coro)
host, port = s_transport.get_extra_info('sockname')
self.assertIsInstance(s_transport, asyncio.Transport)
self.assertIsInstance(server, TestMyDatagramProto)
self.assertEqual('INITIALIZED', server.state)
self.assertIs(server.transport, s_transport)
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(loop=self.loop),
remote_addr=(host, port))
transport, client = self.loop.run_until_complete(coro)
self.assertIsInstance(transport, asyncio.Transport)
self.assertIsInstance(client, MyDatagramProto)
self.assertEqual('INITIALIZED', client.state)
self.assertIs(client.transport, transport)
transport.sendto(b'xxx')
test_utils.run_until(self.loop, lambda: server.nbytes)
self.assertEqual(3, server.nbytes)
test_utils.run_until(self.loop, lambda: client.nbytes)
# received
self.assertEqual(8, client.nbytes)
# extra info is available
self.assertIsNotNone(transport.get_extra_info('sockname'))
# close connection
transport.close()
self.loop.run_until_complete(client.done)
self.assertEqual('CLOSED', client.state)
server.transport.close()
def test_create_datagram_endpoint_sock(self):
if (sys.platform == 'win32' and
isinstance(self.loop, proactor_events.BaseProactorEventLoop)):
raise unittest.SkipTest(
'UDP is not supported with proactor event loops')
sock = None
local_address = ('127.0.0.1', 0)
infos = self.loop.run_until_complete(
self.loop.getaddrinfo(
*local_address, type=socket.SOCK_DGRAM))
for family, type, proto, cname, address in infos:
try:
sock = socket.socket(family=family, type=type, proto=proto)
sock.setblocking(False)
sock.bind(address)
except:
pass
else:
break
else:
assert False, 'Can not create socket.'
f = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(loop=self.loop), sock=sock)
tr, pr = self.loop.run_until_complete(f)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, MyDatagramProto)
tr.close()
self.loop.run_until_complete(pr.done)
def test_internal_fds(self):
loop = self.create_event_loop()
if not isinstance(loop, selector_events.BaseSelectorEventLoop):
loop.close()
self.skipTest('loop is not a BaseSelectorEventLoop')
self.assertEqual(1, loop._internal_fds)
loop.close()
self.assertEqual(0, loop._internal_fds)
self.assertIsNone(loop._csock)
self.assertIsNone(loop._ssock)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_read_pipe(self):
proto = MyReadPipeProto(loop=self.loop)
rpipe, wpipe = os.pipe()
pipeobj = io.open(rpipe, 'rb', 1024)
@asyncio.coroutine
def connect():
t, p = yield from self.loop.connect_read_pipe(
lambda: proto, pipeobj)
self.assertIs(p, proto)
self.assertIs(t, proto.transport)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(0, proto.nbytes)
self.loop.run_until_complete(connect())
os.write(wpipe, b'1')
test_utils.run_until(self.loop, lambda: proto.nbytes >= 1)
self.assertEqual(1, proto.nbytes)
os.write(wpipe, b'2345')
test_utils.run_until(self.loop, lambda: proto.nbytes >= 5)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(5, proto.nbytes)
os.close(wpipe)
self.loop.run_until_complete(proto.done)
self.assertEqual(
['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], proto.state)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_unclosed_pipe_transport(self):
# This test reproduces the issue #314 on GitHub
loop = self.create_event_loop()
read_proto = MyReadPipeProto(loop=loop)
write_proto = MyWritePipeProto(loop=loop)
rpipe, wpipe = os.pipe()
rpipeobj = io.open(rpipe, 'rb', 1024)
wpipeobj = io.open(wpipe, 'w', 1024)
@asyncio.coroutine
def connect():
read_transport, _ = yield from loop.connect_read_pipe(
lambda: read_proto, rpipeobj)
write_transport, _ = yield from loop.connect_write_pipe(
lambda: write_proto, wpipeobj)
return read_transport, write_transport
# Run and close the loop without closing the transports
read_transport, write_transport = loop.run_until_complete(connect())
loop.close()
# These 'repr' calls used to raise an AttributeError
# See Issue #314 on GitHub
self.assertIn('open', repr(read_transport))
self.assertIn('open', repr(write_transport))
# Clean up (avoid ResourceWarning)
rpipeobj.close()
wpipeobj.close()
read_transport._pipe = None
write_transport._pipe = None
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
# select, poll and kqueue don't support character devices (PTY) on Mac OS X
# older than 10.6 (Snow Leopard)
@support.requires_mac_ver(10, 6)
# Issue #20495: The test hangs on FreeBSD 7.2 but pass on FreeBSD 9
@support.requires_freebsd_version(8)
def test_read_pty_output(self):
proto = MyReadPipeProto(loop=self.loop)
master, slave = os.openpty()
master_read_obj = io.open(master, 'rb', 0)
@asyncio.coroutine
def connect():
t, p = yield from self.loop.connect_read_pipe(lambda: proto,
master_read_obj)
self.assertIs(p, proto)
self.assertIs(t, proto.transport)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(0, proto.nbytes)
self.loop.run_until_complete(connect())
os.write(slave, b'1')
test_utils.run_until(self.loop, lambda: proto.nbytes)
self.assertEqual(1, proto.nbytes)
os.write(slave, b'2345')
test_utils.run_until(self.loop, lambda: proto.nbytes >= 5)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(5, proto.nbytes)
os.close(slave)
self.loop.run_until_complete(proto.done)
self.assertEqual(
['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], proto.state)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_write_pipe(self):
rpipe, wpipe = os.pipe()
pipeobj = io.open(wpipe, 'wb', 1024)
proto = MyWritePipeProto(loop=self.loop)
connect = self.loop.connect_write_pipe(lambda: proto, pipeobj)
transport, p = self.loop.run_until_complete(connect)
self.assertIs(p, proto)
self.assertIs(transport, proto.transport)
self.assertEqual('CONNECTED', proto.state)
transport.write(b'1')
data = bytearray()
def reader(data):
chunk = os.read(rpipe, 1024)
data += chunk
return len(data)
test_utils.run_until(self.loop, lambda: reader(data) >= 1)
self.assertEqual(b'1', data)
transport.write(b'2345')
test_utils.run_until(self.loop, lambda: reader(data) >= 5)
self.assertEqual(b'12345', data)
self.assertEqual('CONNECTED', proto.state)
os.close(rpipe)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_write_pipe_disconnect_on_close(self):
rsock, wsock = test_utils.socketpair()
rsock.setblocking(False)
pipeobj = io.open(wsock.detach(), 'wb', 1024)
proto = MyWritePipeProto(loop=self.loop)
connect = self.loop.connect_write_pipe(lambda: proto, pipeobj)
transport, p = self.loop.run_until_complete(connect)
self.assertIs(p, proto)
self.assertIs(transport, proto.transport)
self.assertEqual('CONNECTED', proto.state)
transport.write(b'1')
data = self.loop.run_until_complete(self.loop.sock_recv(rsock, 1024))
self.assertEqual(b'1', data)
rsock.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
# select, poll and kqueue don't support character devices (PTY) on Mac OS X
# older than 10.6 (Snow Leopard)
@support.requires_mac_ver(10, 6)
def test_write_pty(self):
master, slave = os.openpty()
slave_write_obj = io.open(slave, 'wb', 0)
proto = MyWritePipeProto(loop=self.loop)
connect = self.loop.connect_write_pipe(lambda: proto, slave_write_obj)
transport, p = self.loop.run_until_complete(connect)
self.assertIs(p, proto)
self.assertIs(transport, proto.transport)
self.assertEqual('CONNECTED', proto.state)
transport.write(b'1')
data = bytearray()
def reader(data):
chunk = os.read(master, 1024)
data += chunk
return len(data)
test_utils.run_until(self.loop, lambda: reader(data) >= 1,
timeout=10)
self.assertEqual(b'1', data)
transport.write(b'2345')
test_utils.run_until(self.loop, lambda: reader(data) >= 5,
timeout=10)
self.assertEqual(b'12345', data)
self.assertEqual('CONNECTED', proto.state)
os.close(master)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
# select, poll and kqueue don't support character devices (PTY) on Mac OS X
# older than 10.6 (Snow Leopard)
@support.requires_mac_ver(10, 6)
def test_bidirectional_pty(self):
master, read_slave = os.openpty()
write_slave = os.dup(read_slave)
tty.setraw(read_slave)
slave_read_obj = io.open(read_slave, 'rb', 0)
read_proto = MyReadPipeProto(loop=self.loop)
read_connect = self.loop.connect_read_pipe(lambda: read_proto,
slave_read_obj)
read_transport, p = self.loop.run_until_complete(read_connect)
self.assertIs(p, read_proto)
self.assertIs(read_transport, read_proto.transport)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual(0, read_proto.nbytes)
slave_write_obj = io.open(write_slave, 'wb', 0)
write_proto = MyWritePipeProto(loop=self.loop)
write_connect = self.loop.connect_write_pipe(lambda: write_proto,
slave_write_obj)
write_transport, p = self.loop.run_until_complete(write_connect)
self.assertIs(p, write_proto)
self.assertIs(write_transport, write_proto.transport)
self.assertEqual('CONNECTED', write_proto.state)
data = bytearray()
def reader(data):
chunk = os.read(master, 1024)
data += chunk
return len(data)
write_transport.write(b'1')
test_utils.run_until(self.loop, lambda: reader(data) >= 1, timeout=10)
self.assertEqual(b'1', data)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual('CONNECTED', write_proto.state)
os.write(master, b'a')
test_utils.run_until(self.loop, lambda: read_proto.nbytes >= 1,
timeout=10)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual(1, read_proto.nbytes)
self.assertEqual('CONNECTED', write_proto.state)
write_transport.write(b'2345')
test_utils.run_until(self.loop, lambda: reader(data) >= 5, timeout=10)
self.assertEqual(b'12345', data)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual('CONNECTED', write_proto.state)
os.write(master, b'bcde')
test_utils.run_until(self.loop, lambda: read_proto.nbytes >= 5,
timeout=10)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual(5, read_proto.nbytes)
self.assertEqual('CONNECTED', write_proto.state)
os.close(master)
read_transport.close()
self.loop.run_until_complete(read_proto.done)
self.assertEqual(
['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], read_proto.state)
write_transport.close()
self.loop.run_until_complete(write_proto.done)
self.assertEqual('CLOSED', write_proto.state)
def test_prompt_cancellation(self):
r, w = test_utils.socketpair()
r.setblocking(False)
f = self.loop.sock_recv(r, 1)
ov = getattr(f, 'ov', None)
if ov is not None:
self.assertTrue(ov.pending)
@asyncio.coroutine
def main():
try:
self.loop.call_soon(f.cancel)
yield from f
except asyncio.CancelledError:
res = 'cancelled'
else:
res = None
finally:
self.loop.stop()
return res
start = time.monotonic()
t = asyncio.Task(main(), loop=self.loop)
self.loop.run_forever()
elapsed = time.monotonic() - start
self.assertLess(elapsed, 0.1)
self.assertEqual(t.result(), 'cancelled')
self.assertRaises(asyncio.CancelledError, f.result)
if ov is not None:
self.assertFalse(ov.pending)
self.loop._stop_serving(r)
r.close()
w.close()
def test_timeout_rounding(self):
def _run_once():
self.loop._run_once_counter += 1
orig_run_once()
orig_run_once = self.loop._run_once
self.loop._run_once_counter = 0
self.loop._run_once = _run_once
@asyncio.coroutine
def wait():
loop = self.loop
yield from asyncio.sleep(1e-2, loop=loop)
yield from asyncio.sleep(1e-4, loop=loop)
yield from asyncio.sleep(1e-6, loop=loop)
yield from asyncio.sleep(1e-8, loop=loop)
yield from asyncio.sleep(1e-10, loop=loop)
self.loop.run_until_complete(wait())
# The ideal number of call is 12, but on some platforms, the selector
# may sleep at little bit less than timeout depending on the resolution
# of the clock used by the kernel. Tolerate a few useless calls on
# these platforms.
self.assertLessEqual(self.loop._run_once_counter, 20,
{'clock_resolution': self.loop._clock_resolution,
'selector': self.loop._selector.__class__.__name__})
def test_remove_fds_after_closing(self):
loop = self.create_event_loop()
callback = lambda: None
r, w = test_utils.socketpair()
self.addCleanup(r.close)
self.addCleanup(w.close)
loop.add_reader(r, callback)
loop.add_writer(w, callback)
loop.close()
self.assertFalse(loop.remove_reader(r))
self.assertFalse(loop.remove_writer(w))
def test_add_fds_after_closing(self):
loop = self.create_event_loop()
callback = lambda: None
r, w = test_utils.socketpair()
self.addCleanup(r.close)
self.addCleanup(w.close)
loop.close()
with self.assertRaises(RuntimeError):
loop.add_reader(r, callback)
with self.assertRaises(RuntimeError):
loop.add_writer(w, callback)
def test_close_running_event_loop(self):
@asyncio.coroutine
def close_loop(loop):
self.loop.close()
coro = close_loop(self.loop)
with self.assertRaises(RuntimeError):
self.loop.run_until_complete(coro)
def test_close(self):
self.loop.close()
@asyncio.coroutine
def test():
pass
func = lambda: False
coro = test()
self.addCleanup(coro.close)
# operation blocked when the loop is closed
with self.assertRaises(RuntimeError):
self.loop.run_forever()
with self.assertRaises(RuntimeError):
fut = asyncio.Future(loop=self.loop)
self.loop.run_until_complete(fut)
with self.assertRaises(RuntimeError):
self.loop.call_soon(func)
with self.assertRaises(RuntimeError):
self.loop.call_soon_threadsafe(func)
with self.assertRaises(RuntimeError):
self.loop.call_later(1.0, func)
with self.assertRaises(RuntimeError):
self.loop.call_at(self.loop.time() + .0, func)
with self.assertRaises(RuntimeError):
self.loop.run_in_executor(None, func)
with self.assertRaises(RuntimeError):
self.loop.create_task(coro)
with self.assertRaises(RuntimeError):
self.loop.add_signal_handler(signal.SIGTERM, func)
class SubprocessTestsMixin:
def check_terminated(self, returncode):
if sys.platform == 'win32':
self.assertIsInstance(returncode, int)
# expect 1 but sometimes get 0
else:
self.assertEqual(-signal.SIGTERM, returncode)
def check_killed(self, returncode):
if sys.platform == 'win32':
self.assertIsInstance(returncode, int)
# expect 1 but sometimes get 0
else:
self.assertEqual(-signal.SIGKILL, returncode)
def test_subprocess_exec(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
stdin = transp.get_pipe_transport(0)
stdin.write(b'Python The Winner')
self.loop.run_until_complete(proto.got_data[1].wait())
with test_utils.disable_logger():
transp.close()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
self.assertEqual(b'Python The Winner', proto.data[1])
def test_subprocess_interactive(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
stdin = transp.get_pipe_transport(0)
stdin.write(b'Python ')
self.loop.run_until_complete(proto.got_data[1].wait())
proto.got_data[1].clear()
self.assertEqual(b'Python ', proto.data[1])
stdin.write(b'The Winner')
self.loop.run_until_complete(proto.got_data[1].wait())
self.assertEqual(b'Python The Winner', proto.data[1])
with test_utils.disable_logger():
transp.close()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
def test_subprocess_shell(self):
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'echo Python')
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.get_pipe_transport(0).close()
self.loop.run_until_complete(proto.completed)
self.assertEqual(0, proto.returncode)
self.assertTrue(all(f.done() for f in proto.disconnects.values()))
self.assertEqual(proto.data[1].rstrip(b'\r\n'), b'Python')
self.assertEqual(proto.data[2], b'')
transp.close()
def test_subprocess_exitcode(self):
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
transp.close()
def test_subprocess_close_after_finish(self):
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.assertIsNone(transp.get_pipe_transport(0))
self.assertIsNone(transp.get_pipe_transport(1))
self.assertIsNone(transp.get_pipe_transport(2))
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
self.assertIsNone(transp.close())
def test_subprocess_kill(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.kill()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
transp.close()
def test_subprocess_terminate(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.terminate()
self.loop.run_until_complete(proto.completed)
self.check_terminated(proto.returncode)
transp.close()
@unittest.skipIf(sys.platform == 'win32', "Don't have SIGHUP")
def test_subprocess_send_signal(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.send_signal(signal.SIGHUP)
self.loop.run_until_complete(proto.completed)
self.assertEqual(-signal.SIGHUP, proto.returncode)
transp.close()
def test_subprocess_stderr(self):
prog = os.path.join(os.path.dirname(__file__), 'echo2.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
stdin = transp.get_pipe_transport(0)
stdin.write(b'test')
self.loop.run_until_complete(proto.completed)
transp.close()
self.assertEqual(b'OUT:test', proto.data[1])
self.assertTrue(proto.data[2].startswith(b'ERR:test'), proto.data[2])
self.assertEqual(0, proto.returncode)
def test_subprocess_stderr_redirect_to_stdout(self):
prog = os.path.join(os.path.dirname(__file__), 'echo2.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog, stderr=subprocess.STDOUT)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
stdin = transp.get_pipe_transport(0)
self.assertIsNotNone(transp.get_pipe_transport(1))
self.assertIsNone(transp.get_pipe_transport(2))
stdin.write(b'test')
self.loop.run_until_complete(proto.completed)
self.assertTrue(proto.data[1].startswith(b'OUT:testERR:test'),
proto.data[1])
self.assertEqual(b'', proto.data[2])
transp.close()
self.assertEqual(0, proto.returncode)
def test_subprocess_close_client_stream(self):
prog = os.path.join(os.path.dirname(__file__), 'echo3.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
stdin = transp.get_pipe_transport(0)
stdout = transp.get_pipe_transport(1)
stdin.write(b'test')
self.loop.run_until_complete(proto.got_data[1].wait())
self.assertEqual(b'OUT:test', proto.data[1])
stdout.close()
self.loop.run_until_complete(proto.disconnects[1])
stdin.write(b'xxx')
self.loop.run_until_complete(proto.got_data[2].wait())
if sys.platform != 'win32':
self.assertEqual(b'ERR:BrokenPipeError', proto.data[2])
else:
# After closing the read-end of a pipe, writing to the
# write-end using os.write() fails with errno==EINVAL and
# GetLastError()==ERROR_INVALID_NAME on Windows!?! (Using
# WriteFile() we get ERROR_BROKEN_PIPE as expected.)
self.assertEqual(b'ERR:OSError', proto.data[2])
with test_utils.disable_logger():
transp.close()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
def test_subprocess_wait_no_same_group(self):
# start the new process in a new session
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None,
start_new_session=True)
_, proto = yield self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
def test_subprocess_exec_invalid_args(self):
@asyncio.coroutine
def connect(**kwds):
yield from self.loop.subprocess_exec(
asyncio.SubprocessProtocol,
'pwd', **kwds)
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(universal_newlines=True))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(bufsize=4096))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(shell=True))
def test_subprocess_shell_invalid_args(self):
@asyncio.coroutine
def connect(cmd=None, **kwds):
if not cmd:
cmd = 'pwd'
yield from self.loop.subprocess_shell(
asyncio.SubprocessProtocol,
cmd, **kwds)
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(['ls', '-l']))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(universal_newlines=True))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(bufsize=4096))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(shell=False))
if sys.platform == 'win32':
class SelectEventLoopTests(EventLoopTestsMixin, test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop()
class ProactorEventLoopTests(EventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.ProactorEventLoop()
if not sslproto._is_sslproto_available():
def test_create_ssl_connection(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl_verify_failed(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl_match_failed(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl_verified(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_legacy_create_ssl_connection(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl_verify_failed(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl_match_failed(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl_verified(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_reader_callback(self):
raise unittest.SkipTest("IocpEventLoop does not have add_reader()")
def test_reader_callback_cancel(self):
raise unittest.SkipTest("IocpEventLoop does not have add_reader()")
def test_writer_callback(self):
raise unittest.SkipTest("IocpEventLoop does not have add_writer()")
def test_writer_callback_cancel(self):
raise unittest.SkipTest("IocpEventLoop does not have add_writer()")
def test_create_datagram_endpoint(self):
raise unittest.SkipTest(
"IocpEventLoop does not have create_datagram_endpoint()")
def test_remove_fds_after_closing(self):
raise unittest.SkipTest("IocpEventLoop does not have add_reader()")
else:
from asyncio import selectors
class UnixEventLoopTestsMixin(EventLoopTestsMixin):
def setUp(self):
super().setUp()
watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(self.loop)
asyncio.set_child_watcher(watcher)
def tearDown(self):
asyncio.set_child_watcher(None)
super().tearDown()
if hasattr(selectors, 'KqueueSelector'):
class KqueueEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(
selectors.KqueueSelector())
# kqueue doesn't support character devices (PTY) on Mac OS X older
# than 10.9 (Maverick)
@support.requires_mac_ver(10, 9)
# Issue #20667: KqueueEventLoopTests.test_read_pty_output()
# hangs on OpenBSD 5.5
@unittest.skipIf(sys.platform.startswith('openbsd'),
'test hangs on OpenBSD')
def test_read_pty_output(self):
super().test_read_pty_output()
# kqueue doesn't support character devices (PTY) on Mac OS X older
# than 10.9 (Maverick)
@support.requires_mac_ver(10, 9)
def test_write_pty(self):
super().test_write_pty()
if hasattr(selectors, 'EpollSelector'):
class EPollEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(selectors.EpollSelector())
if hasattr(selectors, 'PollSelector'):
class PollEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(selectors.PollSelector())
# Should always exist.
class SelectEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(selectors.SelectSelector())
def noop(*args, **kwargs):
pass
class HandleTests(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = mock.Mock()
self.loop.get_debug.return_value = True
def test_handle(self):
def callback(*args):
return args
args = ()
h = asyncio.Handle(callback, args, self.loop)
self.assertIs(h._callback, callback)
self.assertIs(h._args, args)
self.assertFalse(h._cancelled)
h.cancel()
self.assertTrue(h._cancelled)
def test_callback_with_exception(self):
def callback():
raise ValueError()
self.loop = mock.Mock()
self.loop.call_exception_handler = mock.Mock()
h = asyncio.Handle(callback, (), self.loop)
h._run()
self.loop.call_exception_handler.assert_called_with({
'message': test_utils.MockPattern('Exception in callback.*'),
'exception': mock.ANY,
'handle': h,
'source_traceback': h._source_traceback,
})
def test_handle_weakref(self):
wd = weakref.WeakValueDictionary()
h = asyncio.Handle(lambda: None, (), self.loop)
wd['h'] = h # Would fail without __weakref__ slot.
def test_handle_repr(self):
self.loop.get_debug.return_value = False
# simple function
h = asyncio.Handle(noop, (1, 2), self.loop)
filename, lineno = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<Handle noop(1, 2) at %s:%s>'
% (filename, lineno))
# cancelled handle
h.cancel()
self.assertEqual(repr(h),
'<Handle cancelled>')
# decorated function
cb = asyncio.coroutine(noop)
h = asyncio.Handle(cb, (), self.loop)
self.assertEqual(repr(h),
'<Handle noop() at %s:%s>'
% (filename, lineno))
# partial function
cb = functools.partial(noop, 1, 2)
h = asyncio.Handle(cb, (3,), self.loop)
regex = (r'^<Handle noop\(1, 2\)\(3\) at %s:%s>$'
% (re.escape(filename), lineno))
self.assertRegex(repr(h), regex)
# partial function with keyword args
cb = functools.partial(noop, x=1)
h = asyncio.Handle(cb, (2, 3), self.loop)
regex = (r'^<Handle noop\(x=1\)\(2, 3\) at %s:%s>$'
% (re.escape(filename), lineno))
self.assertRegex(repr(h), regex)
# partial method
if sys.version_info >= (3, 4):
method = HandleTests.test_handle_repr
cb = functools.partialmethod(method)
filename, lineno = test_utils.get_function_source(method)
h = asyncio.Handle(cb, (), self.loop)
cb_regex = r'<function HandleTests.test_handle_repr .*>'
cb_regex = (r'functools.partialmethod\(%s, , \)\(\)' % cb_regex)
regex = (r'^<Handle %s at %s:%s>$'
% (cb_regex, re.escape(filename), lineno))
self.assertRegex(repr(h), regex)
def test_handle_repr_debug(self):
self.loop.get_debug.return_value = True
# simple function
create_filename = __file__
create_lineno = sys._getframe().f_lineno + 1
h = asyncio.Handle(noop, (1, 2), self.loop)
filename, lineno = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<Handle noop(1, 2) at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
# cancelled handle
h.cancel()
self.assertEqual(
repr(h),
'<Handle cancelled noop(1, 2) at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
# double cancellation won't overwrite _repr
h.cancel()
self.assertEqual(
repr(h),
'<Handle cancelled noop(1, 2) at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
def test_handle_source_traceback(self):
loop = asyncio.get_event_loop_policy().new_event_loop()
loop.set_debug(True)
self.set_event_loop(loop)
def check_source_traceback(h):
lineno = sys._getframe(1).f_lineno - 1
self.assertIsInstance(h._source_traceback, list)
self.assertEqual(h._source_traceback[-1][:3],
(__file__,
lineno,
'test_handle_source_traceback'))
# call_soon
h = loop.call_soon(noop)
check_source_traceback(h)
# call_soon_threadsafe
h = loop.call_soon_threadsafe(noop)
check_source_traceback(h)
# call_later
h = loop.call_later(0, noop)
check_source_traceback(h)
# call_at
h = loop.call_later(0, noop)
check_source_traceback(h)
@unittest.skipUnless(hasattr(collections.abc, 'Coroutine'),
'No collections.abc.Coroutine')
def test_coroutine_like_object_debug_formatting(self):
# Test that asyncio can format coroutines that are instances of
# collections.abc.Coroutine, but lack cr_core or gi_code attributes
# (such as ones compiled with Cython).
class Coro:
def send(self, v):
pass
def throw(self, *exc):
pass
def close(self):
pass
def __await__(self):
pass
coro = Coro()
coro.__name__ = 'AAA'
self.assertTrue(asyncio.iscoroutine(coro))
self.assertEqual(coroutines._format_coroutine(coro), 'AAA()')
coro.__qualname__ = 'BBB'
self.assertEqual(coroutines._format_coroutine(coro), 'BBB()')
coro.cr_running = True
self.assertEqual(coroutines._format_coroutine(coro), 'BBB() running')
coro = Coro()
# Some coroutines might not have '__name__', such as
# built-in async_gen.asend().
self.assertEqual(coroutines._format_coroutine(coro), 'Coro()')
class TimerTests(unittest.TestCase):
def setUp(self):
super().setUp()
self.loop = mock.Mock()
def test_hash(self):
when = time.monotonic()
h = asyncio.TimerHandle(when, lambda: False, (),
mock.Mock())
self.assertEqual(hash(h), hash(when))
def test_timer(self):
def callback(*args):
return args
args = (1, 2, 3)
when = time.monotonic()
h = asyncio.TimerHandle(when, callback, args, mock.Mock())
self.assertIs(h._callback, callback)
self.assertIs(h._args, args)
self.assertFalse(h._cancelled)
# cancel
h.cancel()
self.assertTrue(h._cancelled)
self.assertIsNone(h._callback)
self.assertIsNone(h._args)
# when cannot be None
self.assertRaises(AssertionError,
asyncio.TimerHandle, None, callback, args,
self.loop)
def test_timer_repr(self):
self.loop.get_debug.return_value = False
# simple function
h = asyncio.TimerHandle(123, noop, (), self.loop)
src = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<TimerHandle when=123 noop() at %s:%s>' % src)
# cancelled handle
h.cancel()
self.assertEqual(repr(h),
'<TimerHandle cancelled when=123>')
def test_timer_repr_debug(self):
self.loop.get_debug.return_value = True
# simple function
create_filename = __file__
create_lineno = sys._getframe().f_lineno + 1
h = asyncio.TimerHandle(123, noop, (), self.loop)
filename, lineno = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<TimerHandle when=123 noop() '
'at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
# cancelled handle
h.cancel()
self.assertEqual(repr(h),
'<TimerHandle cancelled when=123 noop() '
'at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
def test_timer_comparison(self):
def callback(*args):
return args
when = time.monotonic()
h1 = asyncio.TimerHandle(when, callback, (), self.loop)
h2 = asyncio.TimerHandle(when, callback, (), self.loop)
# TODO: Use assertLess etc.
self.assertFalse(h1 < h2)
self.assertFalse(h2 < h1)
self.assertTrue(h1 <= h2)
self.assertTrue(h2 <= h1)
self.assertFalse(h1 > h2)
self.assertFalse(h2 > h1)
self.assertTrue(h1 >= h2)
self.assertTrue(h2 >= h1)
self.assertTrue(h1 == h2)
self.assertFalse(h1 != h2)
h2.cancel()
self.assertFalse(h1 == h2)
h1 = asyncio.TimerHandle(when, callback, (), self.loop)
h2 = asyncio.TimerHandle(when + 10.0, callback, (), self.loop)
self.assertTrue(h1 < h2)
self.assertFalse(h2 < h1)
self.assertTrue(h1 <= h2)
self.assertFalse(h2 <= h1)
self.assertFalse(h1 > h2)
self.assertTrue(h2 > h1)
self.assertFalse(h1 >= h2)
self.assertTrue(h2 >= h1)
self.assertFalse(h1 == h2)
self.assertTrue(h1 != h2)
h3 = asyncio.Handle(callback, (), self.loop)
self.assertIs(NotImplemented, h1.__eq__(h3))
self.assertIs(NotImplemented, h1.__ne__(h3))
class AbstractEventLoopTests(unittest.TestCase):
def test_not_implemented(self):
f = mock.Mock()
loop = asyncio.AbstractEventLoop()
self.assertRaises(
NotImplementedError, loop.run_forever)
self.assertRaises(
NotImplementedError, loop.run_until_complete, None)
self.assertRaises(
NotImplementedError, loop.stop)
self.assertRaises(
NotImplementedError, loop.is_running)
self.assertRaises(
NotImplementedError, loop.is_closed)
self.assertRaises(
NotImplementedError, loop.close)
self.assertRaises(
NotImplementedError, loop.create_task, None)
self.assertRaises(
NotImplementedError, loop.call_later, None, None)
self.assertRaises(
NotImplementedError, loop.call_at, f, f)
self.assertRaises(
NotImplementedError, loop.call_soon, None)
self.assertRaises(
NotImplementedError, loop.time)
self.assertRaises(
NotImplementedError, loop.call_soon_threadsafe, None)
self.assertRaises(
NotImplementedError, loop.run_in_executor, f, f)
self.assertRaises(
NotImplementedError, loop.set_default_executor, f)
self.assertRaises(
NotImplementedError, loop.getaddrinfo, 'localhost', 8080)
self.assertRaises(
NotImplementedError, loop.getnameinfo, ('localhost', 8080))
self.assertRaises(
NotImplementedError, loop.create_connection, f)
self.assertRaises(
NotImplementedError, loop.create_server, f)
self.assertRaises(
NotImplementedError, loop.create_datagram_endpoint, f)
self.assertRaises(
NotImplementedError, loop.add_reader, 1, f)
self.assertRaises(
NotImplementedError, loop.remove_reader, 1)
self.assertRaises(
NotImplementedError, loop.add_writer, 1, f)
self.assertRaises(
NotImplementedError, loop.remove_writer, 1)
self.assertRaises(
NotImplementedError, loop.sock_recv, f, 10)
self.assertRaises(
NotImplementedError, loop.sock_sendall, f, 10)
self.assertRaises(
NotImplementedError, loop.sock_connect, f, f)
self.assertRaises(
NotImplementedError, loop.sock_accept, f)
self.assertRaises(
NotImplementedError, loop.add_signal_handler, 1, f)
self.assertRaises(
NotImplementedError, loop.remove_signal_handler, 1)
self.assertRaises(
NotImplementedError, loop.remove_signal_handler, 1)
self.assertRaises(
NotImplementedError, loop.connect_read_pipe, f,
mock.sentinel.pipe)
self.assertRaises(
NotImplementedError, loop.connect_write_pipe, f,
mock.sentinel.pipe)
self.assertRaises(
NotImplementedError, loop.subprocess_shell, f,
mock.sentinel)
self.assertRaises(
NotImplementedError, loop.subprocess_exec, f)
self.assertRaises(
NotImplementedError, loop.set_exception_handler, f)
self.assertRaises(
NotImplementedError, loop.default_exception_handler, f)
self.assertRaises(
NotImplementedError, loop.call_exception_handler, f)
self.assertRaises(
NotImplementedError, loop.get_debug)
self.assertRaises(
NotImplementedError, loop.set_debug, f)
class ProtocolsAbsTests(unittest.TestCase):
def test_empty(self):
f = mock.Mock()
p = asyncio.Protocol()
self.assertIsNone(p.connection_made(f))
self.assertIsNone(p.connection_lost(f))
self.assertIsNone(p.data_received(f))
self.assertIsNone(p.eof_received())
dp = asyncio.DatagramProtocol()
self.assertIsNone(dp.connection_made(f))
self.assertIsNone(dp.connection_lost(f))
self.assertIsNone(dp.error_received(f))
self.assertIsNone(dp.datagram_received(f, f))
sp = asyncio.SubprocessProtocol()
self.assertIsNone(sp.connection_made(f))
self.assertIsNone(sp.connection_lost(f))
self.assertIsNone(sp.pipe_data_received(1, f))
self.assertIsNone(sp.pipe_connection_lost(1, f))
self.assertIsNone(sp.process_exited())
class PolicyTests(unittest.TestCase):
def test_event_loop_policy(self):
policy = asyncio.AbstractEventLoopPolicy()
self.assertRaises(NotImplementedError, policy.get_event_loop)
self.assertRaises(NotImplementedError, policy.set_event_loop, object())
self.assertRaises(NotImplementedError, policy.new_event_loop)
self.assertRaises(NotImplementedError, policy.get_child_watcher)
self.assertRaises(NotImplementedError, policy.set_child_watcher,
object())
def test_get_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
self.assertIsNone(policy._local._loop)
loop = policy.get_event_loop()
self.assertIsInstance(loop, asyncio.AbstractEventLoop)
self.assertIs(policy._local._loop, loop)
self.assertIs(loop, policy.get_event_loop())
loop.close()
def test_get_event_loop_calls_set_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
with mock.patch.object(
policy, "set_event_loop",
wraps=policy.set_event_loop) as m_set_event_loop:
loop = policy.get_event_loop()
# policy._local._loop must be set through .set_event_loop()
# (the unix DefaultEventLoopPolicy needs this call to attach
# the child watcher correctly)
m_set_event_loop.assert_called_with(loop)
loop.close()
def test_get_event_loop_after_set_none(self):
policy = asyncio.DefaultEventLoopPolicy()
policy.set_event_loop(None)
self.assertRaises(RuntimeError, policy.get_event_loop)
@mock.patch('asyncio.events.threading.current_thread')
def test_get_event_loop_thread(self, m_current_thread):
def f():
policy = asyncio.DefaultEventLoopPolicy()
self.assertRaises(RuntimeError, policy.get_event_loop)
th = threading.Thread(target=f)
th.start()
th.join()
def test_new_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
loop = policy.new_event_loop()
self.assertIsInstance(loop, asyncio.AbstractEventLoop)
loop.close()
def test_set_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
old_loop = policy.get_event_loop()
self.assertRaises(AssertionError, policy.set_event_loop, object())
loop = policy.new_event_loop()
policy.set_event_loop(loop)
self.assertIs(loop, policy.get_event_loop())
self.assertIsNot(old_loop, policy.get_event_loop())
loop.close()
old_loop.close()
def test_get_event_loop_policy(self):
policy = asyncio.get_event_loop_policy()
self.assertIsInstance(policy, asyncio.AbstractEventLoopPolicy)
self.assertIs(policy, asyncio.get_event_loop_policy())
def test_set_event_loop_policy(self):
self.assertRaises(
AssertionError, asyncio.set_event_loop_policy, object())
old_policy = asyncio.get_event_loop_policy()
policy = asyncio.DefaultEventLoopPolicy()
asyncio.set_event_loop_policy(policy)
self.assertIs(policy, asyncio.get_event_loop_policy())
self.assertIsNot(policy, old_policy)
def test_get_event_loop_returns_running_loop(self):
class Policy(asyncio.DefaultEventLoopPolicy):
def get_event_loop(self):
raise NotImplementedError
loop = None
old_policy = asyncio.get_event_loop_policy()
try:
asyncio.set_event_loop_policy(Policy())
loop = asyncio.new_event_loop()
self.assertIs(asyncio._get_running_loop(), None)
async def func():
self.assertIs(asyncio.get_event_loop(), loop)
self.assertIs(asyncio._get_running_loop(), loop)
loop.run_until_complete(func())
finally:
asyncio.set_event_loop_policy(old_policy)
if loop is not None:
loop.close()
self.assertIs(asyncio._get_running_loop(), None)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
f7a2225fb76bf9459f66776aa69b4cf0239c4723 | 64d4e61c73d158a81300b4c43767971a512f66e9 | /KingPhisherServer | bfe4504a4c49194e930b70de7a8939fcf7654b47 | [
"BSD-3-Clause"
]
| permissive | nebooben/king-phisher | 82f384da8686149f270d0a117a5536fc56bc949a | 23ea1f2749cd7af031025802557e9b84d5c74ece | refs/heads/master | 2021-01-18T12:17:15.088018 | 2015-09-22T20:37:42 | 2015-09-22T20:37:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,103 | #!/usr/bin/python -B
# -*- coding: utf-8 -*-
#
# KingPhisherServer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# pylint: disable=R0914
import argparse
import logging
import os
import pwd
import signal
import sys
import threading
from king_phisher import color
from king_phisher import errors
from king_phisher import find
from king_phisher import geoip
from king_phisher import its
from king_phisher import utilities
from king_phisher import version
from king_phisher.server import server
from boltons import strutils
from smoke_zephyr.configuration import Configuration
from smoke_zephyr.requirements import check_requirements
__requirements__ = [
'alembic>=0.6.7',
'boltons>=0.6.4',
"dns{0}>=1.12.0".format('python' if its.py_v2 else 'python3'),
'geoip2>=2.1.0',
'Jinja2>=2.7.3',
'markupsafe>=0.23',
'msgpack-python>=0.4.5',
'psycopg2>=2.6',
'PyYAML>=3.11',
'requests>=2.7.0',
'SQLAlchemy>=1.0.2'
]
if its.py_v2:
__requirements__.append('py2-ipaddress>=3.4')
def main():
parser = argparse.ArgumentParser(description='King Phisher Server', conflict_handler='resolve')
utilities.argp_add_args(parser)
parser.add_argument('-f', '--foreground', dest='foreground', action='store_true', default=False, help='run in the foreground (do not fork)')
parser.add_argument('--verify-config', dest='verify_config', action='store_true', default=False, help='verify the configuration and exit')
parser.add_argument('--update-geoip-db', dest='update_geoip_db', action='store_true', default=False, help='update the geoip database and exit')
parser.add_argument('config_file', action='store', type=argparse.FileType('r'), help='configuration file to use')
arguments = parser.parse_args()
console_log_handler = utilities.configure_stream_logger(arguments.loglvl, arguments.logger)
config_file = arguments.config_file
del parser
logger = logging.getLogger('KingPhisher.Server.CLI')
missing_requirements = check_requirements(__requirements__)
if missing_requirements:
color.print_error('the following package requirements are missing or incompatible:')
for missing_req in missing_requirements:
color.print_error(' - ' + missing_req)
color.print_error('please install the missing requirements with pip')
return os.EX_SOFTWARE
if os.getuid():
color.print_error('the server must be started as root, configure the')
color.print_error('\'server.setuid_username\' option in the config file to drop privileges')
return os.EX_NOPERM
try:
config = Configuration(config_file.name)
except Exception as error:
color.print_error('an error occurred while parsing the server configuration file')
error_name = "{0}.{1}".format(error.__module__, error.__class__.__name__)
if error_name != 'yaml.parser.ParserError':
raise
for line in str(error).split('\n'):
color.print_error(line.rstrip())
return os.EX_CONFIG
# configure environment variables
data_path = os.path.dirname(__file__)
data_path = os.path.join(data_path, 'data', 'server')
data_path = os.path.abspath(data_path)
find.data_path_append(data_path)
data_path = os.getcwd()
data_path = os.path.join(data_path, 'data', 'server')
data_path = os.path.abspath(data_path)
find.data_path_append(data_path)
if config.has_option('server.data_path'):
find.data_path_append(config.get('server.data_path'))
# check the configuration for missing and incompatible options
verify_config = find.find_data_file('server_config_verification.yml')
if not verify_config:
color.print_error('could not load server config verification data')
return os.EX_NOINPUT
missing_options = config.get_missing(verify_config)
if missing_options:
if 'missing' in missing_options:
color.print_error('the following required options are missing from the server configuration:')
for option in missing_options['missing']:
color.print_error(' - ' + option)
if 'incompatible' in missing_options:
color.print_error('the following options are of an incompatible data type in the server configuration:')
for option in missing_options['incompatible']:
color.print_error(" - {0} (type: {1})".format(option[0], option[1]))
return os.EX_CONFIG
if arguments.verify_config:
color.print_good('configuration verification passed')
return os.EX_OK
if arguments.update_geoip_db:
color.print_status('downloading a new geoip database')
size = geoip.download_geolite2_city_db(config.get('server.geoip.database'))
color.print_good("download complete, file size: {0}".format(strutils.bytes2human(size)))
return os.EX_OK
# setup logging based on the configuration
log_file_path = None
if config.has_section('logging'):
log_level = min(getattr(logging, arguments.loglvl), getattr(logging, config.get('logging.level').upper()))
if config.has_option('logging.file') and config.get('logging.file'):
log_file_path = config.get('logging.file')
file_handler = logging.FileHandler(log_file_path)
file_handler.setFormatter(logging.Formatter("%(asctime)s %(name)-50s %(levelname)-8s %(message)s"))
logging.getLogger('').addHandler(file_handler)
file_handler.setLevel(log_level)
if config.has_option('logging.console') and config.get('logging.console'):
console_log_handler.setLevel(log_level)
logger.debug("king phisher version: {0} python version: {1}.{2}.{3}".format(version.version, sys.version_info[0], sys.version_info[1], sys.version_info[2]))
# fork into the background
should_fork = True
if arguments.foreground:
should_fork = False
elif config.has_option('server.fork'):
should_fork = bool(config.get('server.fork'))
if should_fork and os.fork():
return os.EX_OK
try:
king_phisher_server = server.build_king_phisher_server(config)
except errors.KingPhisherError as error:
logger.critical(error.message)
return os.EX_SOFTWARE
server_pid = os.getpid()
logger.info("server running in process: {0} main tid: 0x{1:x}".format(server_pid, threading.current_thread().ident))
if should_fork and config.has_option('server.pid_file'):
pid_file = open(config.get('server.pid_file'), 'w')
pid_file.write(str(server_pid))
pid_file.close()
if config.has_option('server.setuid_username'):
setuid_username = config.get('server.setuid_username')
try:
user_info = pwd.getpwnam(setuid_username)
except KeyError:
logger.critical('an invalid username was specified as \'server.setuid_username\'')
king_phisher_server.shutdown()
return os.EX_NOUSER
if log_file_path:
os.chown(log_file_path, user_info.pw_uid, user_info.pw_gid)
os.setregid(user_info.pw_gid, user_info.pw_gid)
os.setreuid(user_info.pw_uid, user_info.pw_uid)
logger.info("dropped privileges to the {0} account".format(setuid_username))
else:
logger.warning('running with root privileges is dangerous, drop them by configuring \'server.setuid_username\'')
db_engine_url = king_phisher_server.database_engine.url
if db_engine_url.drivername == 'sqlite':
logger.warning('sqlite is no longer fully supported, see https://github.com/securestate/king-phisher/wiki/Database#sqlite for more details')
database_dir = os.path.dirname(db_engine_url.database)
if not os.access(database_dir, os.W_OK):
logger.critical('sqlite requires write permissions to the folder containing the database')
king_phisher_server.shutdown()
return os.EX_NOPERM
sighup_handler = lambda: threading.Thread(target=king_phisher_server.shutdown).start()
signal.signal(signal.SIGHUP, lambda signum, frame: sighup_handler())
try:
king_phisher_server.serve_forever(fork=False)
except KeyboardInterrupt:
pass
king_phisher_server.shutdown()
logging.shutdown()
return os.EX_OK
if __name__ == '__main__':
sys.exit(main())
| [
"[email protected]"
]
| ||
98d4b751487188eba562b6871a2298cb5ea68deb | 34d5ebe9e6de9d6742c234dabfa9b38f0adb7774 | /carriage_return/ui.py | 6b4e0f2958482b03ef044f9c62842f0bd045a463 | []
| no_license | campagnola/return-to-carriage | f37acaf8e41ccf04e7089018574732a1fdcd2a64 | eeb7f31b16e2c236c875c867a0295173fa6f4b0a | refs/heads/master | 2021-08-05T14:02:49.988526 | 2021-07-31T08:38:17 | 2021-07-31T08:38:17 | 84,014,684 | 0 | 2 | null | 2021-07-30T02:48:13 | 2017-03-06T00:55:55 | Python | UTF-8 | Python | false | false | 4,518 | py | import numpy as np
import vispy.scene, vispy.app
import vispy.util.ptime as ptime
from .input import InputDispatcher, CommandInputHandler
from .graphics import TextBox
from .console import CommandInterpreter
class MainWindow:
"""Implements user interface: graphical panels, key input handling
"""
def __init__(self):
self.canvas = vispy.scene.SceneCanvas()
self.canvas.show()
self.canvas.size = 1400, 900
self.debug_line_of_sight = False
self.debug_los_tex = False
# Setup input event handling
self.input_dispatcher = InputDispatcher(self.canvas)
self.command_mode = False
# setup UI
self.view = self.canvas.central_widget.add_view()
self.view.camera = 'panzoom'
self.view.camera.rect = [0, -5, 120, 60]
self.view.camera.aspect = 0.6
self.view.events.key_press.disconnect()
self.camera_target = self.view.camera.rect
self._last_camera_update = ptime.time()
self.scroll_timer = vispy.app.Timer(start=True, connect=self._scroll_camera, interval=0.016)
self.console_grid = self.canvas.central_widget.add_grid()
self.stats_box = TextBox((2, 160))
self.console_grid.add_widget(self.stats_box.view, 1, 0, 1, 2)
self.stats_box.write(
"HP:17/33 Food:56% Water:34% Sleep:65% Weight:207(45) Level:3 Int:12 Str:9 Wis:11 Cha:2")
self.stats_box.view.height_max = 30
self.stats_box.view.stretch = (1, 10)
self.info_box = TextBox((15, 80))
self.console_grid.add_widget(self.info_box.view, 2, 0)
self.info_box.write("There is a scroll of infinite recursion here.")
self.info_box.view.height_max = 200
self.stats_box.view.stretch = (1, 1)
self.console = TextBox((15, 80))
self.console_grid.add_widget(self.console.view, 2, 1)
self.console.view.stretch = (1, 10)
# self.console.view.parent = self.canvas.scene
self.console.view.rect = vispy.geometry.Rect(30, 620, 1350, 250)
self.console.transform = vispy.visuals.transforms.STTransform((0, 0, -0.5))
# self.console.view.camera.aspect = 0.6
self.console.view.height_max = 200
self.console.write('Hello?')
self.console.write('Is anybody\n there?')
self.console.write(''.join([chr(i) for i in range(0x20, 128)]))
# self.console.view.camera.rect = [-1, -1, 30, 3]
self.command = CommandInterpreter(self)
self.cmd_input_handler = CommandInputHandler(self.console, self.command)
self._follow_entity = None
def follow_entity(self, entity):
if self._follow_entity is not None:
self._follow_entity.location.global_changed.disconnect(self._update_camera_target)
self._follow_entity = entity
entity.location.global_changed.connect(self._update_camera_target)
self._update_camera_target()
def toggle_command_mode(self):
# todo: visual cue
self.command_mode = not self.command_mode
if self.command_mode:
self.cmd_input_handler.activate()
else:
self.cmd_input_handler.deactivate()
def _scroll_camera(self, ev):
now = ptime.time()
dt = now - self._last_camera_update
self._last_camera_update = now
cr = vispy.geometry.Rect(self.view.camera.rect)
tr = self.camera_target
crv = np.array(cr.pos + cr.size, dtype='float32')
trv = np.array(tr.pos + tr.size, dtype='float32')
if not np.any(abs(trv - crv) > 1e-2):
return
s = np.exp(-dt / 0.4) # 400 ms settling time constant
nrv = crv * s + trv * (1.0 - s)
cr.pos = nrv[:2]
cr.size = nrv[2:]
self.view.camera.rect = cr
def _update_camera_target(self, event=None):
location = self._follow_entity.location
pp = np.array(location.global_location.slot)
cr = vispy.geometry.Rect(self.view.camera.rect)
cc = np.array(cr.center)
cs = np.array(cr.size)
cp = np.array(cr.pos)
dif = pp - cc
maxdif = 0.1 * cs # start correcting camera at 10% width from center
for ax in (0, 1):
if dif[ax] < -maxdif[ax]:
cp[ax] += dif[ax] + maxdif[ax]
elif dif[ax] > maxdif[ax]:
cp[ax] += dif[ax] - maxdif[ax]
cr.pos = cp
self.camera_target = cr
def quit(self):
self.canvas.close()
| [
"[email protected]"
]
| |
e2df404f234ab3d108cbda675d9190679f716fdd | feccf7588777becba68921c0bfade3e21f5210ce | /airflow/providers/google/ads/_vendor/googleads/v12/services/services/feed_mapping_service/client.py | 47bde342b33c5b65eb11046e4f5b390987d25d06 | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
]
| permissive | francescomucio/airflow | f17ed9abd8b41d8a2227deca052508edf12f1cbf | c199b1a10563a11cf24436e38cb167ae82c01601 | refs/heads/master | 2023-04-14T17:44:53.438246 | 2023-04-06T06:44:23 | 2023-04-06T06:44:23 | 217,327,641 | 0 | 0 | Apache-2.0 | 2020-09-09T13:26:47 | 2019-10-24T15:06:52 | Python | UTF-8 | Python | false | false | 20,872 | py | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from airflow.providers.google.ads._vendor.googleads.v12.services.types import feed_mapping_service
from google.rpc import status_pb2 # type: ignore
from .transports.base import FeedMappingServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import FeedMappingServiceGrpcTransport
class FeedMappingServiceClientMeta(type):
"""Metaclass for the FeedMappingService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[FeedMappingServiceTransport]]
_transport_registry["grpc"] = FeedMappingServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[FeedMappingServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class FeedMappingServiceClient(metaclass=FeedMappingServiceClientMeta):
"""Service to manage feed mappings."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeedMappingServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeedMappingServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> FeedMappingServiceTransport:
"""Returns the transport used by the client instance.
Returns:
FeedMappingServiceTransport: The transport used by the client
instance.
"""
return self._transport
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
@staticmethod
def feed_path(customer_id: str, feed_id: str,) -> str:
"""Returns a fully-qualified feed string."""
return "customers/{customer_id}/feeds/{feed_id}".format(
customer_id=customer_id, feed_id=feed_id,
)
@staticmethod
def parse_feed_path(path: str) -> Dict[str, str]:
"""Parses a feed path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/feeds/(?P<feed_id>.+?)$", path
)
return m.groupdict() if m else {}
@staticmethod
def feed_mapping_path(
customer_id: str, feed_id: str, feed_mapping_id: str,
) -> str:
"""Returns a fully-qualified feed_mapping string."""
return "customers/{customer_id}/feedMappings/{feed_id}~{feed_mapping_id}".format(
customer_id=customer_id,
feed_id=feed_id,
feed_mapping_id=feed_mapping_id,
)
@staticmethod
def parse_feed_mapping_path(path: str) -> Dict[str, str]:
"""Parses a feed_mapping path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/feedMappings/(?P<feed_id>.+?)~(?P<feed_mapping_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, FeedMappingServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the feed mapping service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, FeedMappingServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in (
"true",
"false",
):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
use_client_cert = (
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true"
)
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, FeedMappingServiceTransport):
# transport is a FeedMappingServiceTransport instance.
if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def mutate_feed_mappings(
self,
request: Union[
feed_mapping_service.MutateFeedMappingsRequest, dict
] = None,
*,
customer_id: str = None,
operations: Sequence[feed_mapping_service.FeedMappingOperation] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> feed_mapping_service.MutateFeedMappingsResponse:
r"""Creates or removes feed mappings. Operation statuses are
returned.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `DatabaseError <>`__
`DistinctError <>`__ `FeedMappingError <>`__ `FieldError <>`__
`HeaderError <>`__ `IdError <>`__ `InternalError <>`__
`MutateError <>`__ `NotEmptyError <>`__
`OperationAccessDeniedError <>`__ `OperatorError <>`__
`QuotaError <>`__ `RangeError <>`__ `RequestError <>`__
`SizeLimitError <>`__ `StringFormatError <>`__
`StringLengthError <>`__
Args:
request (Union[google.ads.googleads.v12.services.types.MutateFeedMappingsRequest, dict]):
The request object. Request message for
[FeedMappingService.MutateFeedMappings][google.ads.googleads.v12.services.FeedMappingService.MutateFeedMappings].
customer_id (str):
Required. The ID of the customer
whose feed mappings are being modified.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
operations (Sequence[google.ads.googleads.v12.services.types.FeedMappingOperation]):
Required. The list of operations to
perform on individual feed mappings.
This corresponds to the ``operations`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v12.services.types.MutateFeedMappingsResponse:
Response message for a feed mapping
mutate.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([customer_id, operations])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a feed_mapping_service.MutateFeedMappingsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, feed_mapping_service.MutateFeedMappingsRequest
):
request = feed_mapping_service.MutateFeedMappingsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if operations is not None:
request.operations = operations
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.mutate_feed_mappings
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("FeedMappingServiceClient",)
| [
"[email protected]"
]
| |
89a83059cc975cbb899bcbf35c4ce9000b7da5e0 | 59166105545cdd87626d15bf42e60a9ee1ef2413 | /test/test_watermill_api.py | 9b7b62efa75df5f6b212c5921dfa2cb31da4fd6a | []
| no_license | mosoriob/dbpedia_api_client | 8c594fc115ce75235315e890d55fbf6bd555fa85 | 8d6f0d04a3a30a82ce0e9277e4c9ce00ecd0c0cc | refs/heads/master | 2022-11-20T01:42:33.481024 | 2020-05-12T23:22:54 | 2020-05-12T23:22:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 957 | py | # coding: utf-8
"""
DBpedia
This is the API of the DBpedia Ontology # noqa: E501
The version of the OpenAPI document: v0.0.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import dbpedia
from dbpedia.api.watermill_api import WatermillApi # noqa: E501
from dbpedia.rest import ApiException
class TestWatermillApi(unittest.TestCase):
"""WatermillApi unit test stubs"""
def setUp(self):
self.api = dbpedia.api.watermill_api.WatermillApi() # noqa: E501
def tearDown(self):
pass
def test_watermills_get(self):
"""Test case for watermills_get
List all instances of Watermill # noqa: E501
"""
pass
def test_watermills_id_get(self):
"""Test case for watermills_id_get
Get a single Watermill by its id # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
ce66f81dd62ef4c454b93bada3202dfdabc764a2 | adbb2b958296815f9485bab60c0d38827befeeeb | /build/lib.linux-i686-2.7/gdrivefs/change.py | 394f5bedbdc47e5902688e014679cddbd2e96977 | [
"MIT"
]
| permissive | gryphius/GDriveFS | 4b4619e1eefceb562ded6ae13dcc9a2c5b4c0a1b | fadfbdea019cfa4c2a821f4636380edbc8be32bc | refs/heads/master | 2021-01-18T14:14:32.028542 | 2013-04-24T06:17:03 | 2013-04-24T06:17:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,892 | py | import logging
from threading import Lock, Timer
from gdrivefs.gdtool import AccountInfo, drive_proxy
from gdrivefs.conf import Conf
from gdrivefs.cache import PathRelations, EntryCache
from gdrivefs.timer import Timers
def _sched_check_changes():
logging.debug("Doing scheduled check for changes.")
get_change_manager().process_updates()
# Schedule next invocation.
t = Timer(Conf.get('change_check_frequency_s'), _sched_check_changes)
t.start()
Timers.get_instance().register_timer('change', t)
class _ChangeManager(object):
__log = None
at_change_id = None
def __init__(self):
self.__log = logging.getLogger().getChild('ChangeMan')
try:
self.at_change_id = AccountInfo.get_instance().largest_change_id
except:
self.__log.exception("Could not get largest change-ID.")
raise
self.__log.info("Latest change-ID at startup is (%d)." %
(self.at_change_id))
def mount_init(self):
"""Called when filesystem is first mounted."""
self.__log.debug("Change init.")
_sched_check_changes()
def mount_destroy(self):
"""Called when the filesystem is unmounted."""
self.__log.debug("Change destroy.")
def process_updates(self):
"""Process any changes to our files. Return True if everything is up to
date or False if we need to be run again.
"""
start_at_id = (self.at_change_id + 1)
try:
result = drive_proxy('list_changes', start_change_id=start_at_id)
except:
self.__log.exception("Could not retrieve updates. Skipped.")
return True
(largest_change_id, next_page_token, changes) = result
self.__log.debug("The latest reported change-ID is (%d) and we're "
"currently at change-ID (%d)." % (largest_change_id,
self.at_change_id))
if largest_change_id == self.at_change_id:
self.__log.debug("No entries have changed.")
return True
self.__log.info("(%d) changes will now be applied." % (len(changes)))
for change_id, change_tuple in changes.iteritems():
# Apply the changes. We expect to be running them from oldest to
# newest.
self.__log.info("Change with ID (%d) will now be applied." %
(change_id))
try:
self.__apply_change(change_id, change_tuple)
except:
self.__log.exception("There was a problem while processing change"
" with ID (%d). No more changes will be "
"applied." % (change_id))
return False
self.at_change_id = change_id
return (next_page_token == None)
def __apply_change(self, change_id, change_tuple):
"""Apply changes to our filesystem reported by GD. All we do is remove
the current record components, if it's valid, and then reload it with
what we were given. Note that since we don't necessarily know
about the entries that have been changed, this also allows us to slowly
increase our knowledge of the filesystem (of, obviously, only those
things that change).
"""
(entry_id, was_deleted, entry) = change_tuple
is_visible = entry.is_visible if entry else None
self.__log.info("Applying change with change-ID (%d), entry-ID [%s], and "
"is-visible of [%s]" % (change_id, entry_id, is_visible))
# First, remove any current knowledge from the system.
self.__log.debug("Removing all trace of entry with ID [%s]." % (entry_id))
try:
PathRelations.get_instance().remove_entry_all(entry_id)
except:
self.__log.exception("There was a problem remove entry with ID [%s] "
"from the caches." % (entry_id))
raise
# If it wasn't deleted, add it back.
self.__log.debug("Registering changed entry with ID [%s]." % (entry_id))
if is_visible:
path_relations = PathRelations.get_instance()
try:
path_relations.register_entry(entry)
except:
self.__log.exception("Could not register changed entry with ID "
"[%s] with path-relations cache." %
(entry_id))
raise
def get_change_manager():
with get_change_manager.lock:
if not get_change_manager.instance:
get_change_manager.instance = _ChangeManager()
return get_change_manager.instance
get_change_manager.instance = None
get_change_manager.lock = Lock()
| [
"[email protected]"
]
| |
5260e5f6e9e62dff2851c2a69b0d9942a5673c04 | ccbb7fb8fda4d936e765263f05a435058b397bd9 | /src/guiltytargets/ppi_network_annotation/pipeline.py | 4556892fb1e8316cdaac58aa4319506234f86649 | [
"MIT"
]
| permissive | GuiltyTargets/guiltytargets | 5a5d3ba9e45867a64c81a91529ae6689f8be447f | c20a5cae6c9cc71c2ca73080a862abe986bc34c0 | refs/heads/master | 2022-02-13T03:30:49.705239 | 2021-12-22T12:51:20 | 2021-12-22T12:51:20 | 154,318,881 | 10 | 5 | null | null | null | null | UTF-8 | Python | false | false | 3,627 | py | # -*- coding: utf-8 -*-
"""Functions to easily set up the network."""
import logging
from typing import List, Optional
from .model.gene import Gene
from .model.network import Network
from .parsers import parse_csv, parse_disease_associations, parse_disease_ids, parse_excel, parse_ppi_graph
__all__ = [
'generate_ppi_network',
'parse_dge',
]
logger = logging.getLogger(__name__)
def generate_ppi_network(
ppi_graph_path: str,
dge_list: List[Gene],
max_adj_p: float,
max_log2_fold_change: float,
min_log2_fold_change: float,
ppi_edge_min_confidence: Optional[float] = None,
current_disease_ids_path: Optional[str] = None,
disease_associations_path: Optional[str] = None,
) -> Network:
"""Generate the protein-protein interaction network.
:return Network: Protein-protein interaction network with information on differential expression.
"""
# Compilation of a protein-protein interaction (PPI) graph (HIPPIE)
protein_interactions = parse_ppi_graph(ppi_graph_path, ppi_edge_min_confidence)
protein_interactions = protein_interactions.simplify()
if disease_associations_path is not None and current_disease_ids_path is not None:
current_disease_ids = parse_disease_ids(current_disease_ids_path)
disease_associations = parse_disease_associations(disease_associations_path,
current_disease_ids)
else:
disease_associations = None
# Build an undirected weighted graph with the remaining interactions based on Entrez gene IDs
network = Network(
protein_interactions,
max_adj_p=max_adj_p,
max_l2fc=max_log2_fold_change,
min_l2fc=min_log2_fold_change,
)
network.set_up_network(dge_list, disease_associations=disease_associations)
return network
def parse_dge(
dge_path: str,
entrez_id_header: str,
log2_fold_change_header: str,
adj_p_header: str,
entrez_delimiter: str,
base_mean_header: Optional[str] = None,
) -> List[Gene]:
"""Parse a differential expression file.
:param dge_path: Path to the file.
:param entrez_id_header: Header for the Entrez identifier column
:param log2_fold_change_header: Header for the log2 fold change column
:param adj_p_header: Header for the adjusted p-value column
:param entrez_delimiter: Delimiter between Entrez ids.
:param base_mean_header: Header for the base mean column.
:return: A list of genes.
"""
if dge_path.endswith('.xlsx'):
return parse_excel(
dge_path,
entrez_id_header=entrez_id_header,
log_fold_change_header=log2_fold_change_header,
adjusted_p_value_header=adj_p_header,
entrez_delimiter=entrez_delimiter,
base_mean_header=base_mean_header,
)
if dge_path.endswith('.csv'):
return parse_csv(
dge_path,
entrez_id_header=entrez_id_header,
log_fold_change_header=log2_fold_change_header,
adjusted_p_value_header=adj_p_header,
entrez_delimiter=entrez_delimiter,
base_mean_header=base_mean_header,
)
if dge_path.endswith('.tsv'):
return parse_csv(
dge_path,
entrez_id_header=entrez_id_header,
log_fold_change_header=log2_fold_change_header,
adjusted_p_value_header=adj_p_header,
entrez_delimiter=entrez_delimiter,
base_mean_header=base_mean_header,
sep="\t",
)
raise ValueError(f'Unsupported extension: {dge_path}')
| [
"[email protected]"
]
| |
8f15048573ae6cf53c784fe29bb50ef7345fb154 | 99701affb7ae46c42c55484f3301d59f79294a10 | /project/Examples/Examples/PP2E/Dstruct/Basic/inter2.py | 200364cc5828b3f08ae4bba0989169e3e39861b8 | []
| no_license | inteljack/EL6183-Digital-Signal-Processing-Lab-2015-Fall | 1050b9e9bddb335bf42b7debf2abebe51dd9f9e0 | 0f650a97d8fbaa576142e5bb1745f136b027bc73 | refs/heads/master | 2021-01-21T21:48:21.326372 | 2016-04-06T20:05:19 | 2016-04-06T20:05:19 | 42,902,523 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | def intersect(*args):
res = []
for x in args[0]: # scan the first list
for other in args[1:]: # for all other arguments
if x not in other: break # this item in each one?
else:
res.append(x) # add common items to the end
return res
def union(*args):
res = []
for seq in args: # for all sequence-arguments
for x in seq: # for all nodes in argument
if not x in res:
res.append(x) # add new items to result
return res
| [
"[email protected]"
]
| |
5582e0c04ffcb5fecce6af3812ec4c05c1be9fb2 | 6219e6536774e8eeb4cadc4a84f6f2bea376c1b0 | /scraper/storage_spiders/thnhatrangvn.py | 220429db599deaabf7822d301bccd557a783a259 | [
"MIT"
]
| permissive | nguyenminhthai/choinho | 109d354b410b92784a9737f020894d073bea1534 | d2a216fe7a5064d73cdee3e928a7beef7f511fd1 | refs/heads/master | 2023-05-07T16:51:46.667755 | 2019-10-22T07:53:41 | 2019-10-22T07:53:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,077 | py | # Auto generated by generator.py. Delete this line if you make modification.
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
XPATH = {
'name' : "//div[@class='home-center']/div[@class='product-info']/div[@class='product-name']/h1",
'price' : "//div[@class='product-right']/div[@class='product-price']/p[@class='cssPriceSpecial']/b",
'category' : "//div[@class='wrap']/div[@class='home-content']/div[@class='category-path']/a",
'description' : "//div[@id='pro_content_desc']/div//span",
'images' : "//div[@id='pro_big']/a/@href",
'canonical' : "//link[@rel='canonical']/@href",
'base_url' : "",
'brand' : ""
}
name = 'thnhatrang.vn'
allowed_domains = ['thnhatrang.vn']
start_urls = ['http://thnhatrang.vn']
tracking_url = ''
sitemap_urls = ['']
sitemap_rules = [('', 'parse_item')]
sitemap_follow = []
rules = [
Rule(LinkExtractor(allow=['/[a-zA-Z0-9-]+-p\d+\.html']), 'parse_item'),
Rule(LinkExtractor(allow=['/[a-zA-Z0-9-]+-c\d+\.html']), 'parse'),
#Rule(LinkExtractor(), 'parse_item_and_links'),
]
| [
"[email protected]"
]
| |
9cbdd8a6c6170a9d1d5a9ca37e428a2e16bc6c22 | 309d17b81cea038713ba67bee72a41d2df4d6869 | /Python/Python_basic/Python_OOP/OOP21_composition2.py | 86f25cef9fcfcf5256d11e83738ff6e7e74ed70b | []
| no_license | Bongkot-Kladklaen/Programming_tutorial_code | ac07e39da2bce396e670611884436b360536cdc5 | cda7508c15c3e3d179c64b9aac163b6173ef3519 | refs/heads/master | 2023-06-20T13:14:17.077809 | 2021-07-18T04:41:04 | 2021-07-18T04:41:04 | 387,081,622 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | class Printer:
def print_page(self, data):
print("printing {}".format(data))
class Scanner:
def scan_page(self):
print("scanning...")
class Fax:
def fax_page(self,number):
print("faxing to {}".format(number))
class Aio: # All in one printer
def __init__(self, p, s,f):
self.p = p
self.s = s
self.f = f
if __name__ == '__main__':
a = Aio(Printer(), Scanner(), Fax())
a.p.print_page("hello")
a.s.scan_page()
a.f.fax_page("02848248") | [
"[email protected]"
]
| |
7bd0879f9babbc70ad3e7b46acda567a0352685e | 1dacbf90eeb384455ab84a8cf63d16e2c9680a90 | /pkgs/nltk-3.2-py27_0/lib/python2.7/site-packages/nltk/chunk/util.py | 0027fab667b850ac00ae34418c66c68f13313f1e | [
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
]
| permissive | wangyum/Anaconda | ac7229b21815dd92b0bd1c8b7ec4e85c013b8994 | 2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6 | refs/heads/master | 2022-10-21T15:14:23.464126 | 2022-10-05T12:10:31 | 2022-10-05T12:10:31 | 76,526,728 | 11 | 10 | Apache-2.0 | 2022-10-05T12:10:32 | 2016-12-15T05:26:12 | Python | UTF-8 | Python | false | false | 21,251 | py | # Natural Language Toolkit: Chunk format conversions
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Edward Loper <[email protected]>
# Steven Bird <[email protected]> (minor additions)
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function, unicode_literals, division
import re
from nltk.tree import Tree
from nltk.tag.mapping import map_tag
from nltk.tag.util import str2tuple
from nltk.compat import python_2_unicode_compatible
##//////////////////////////////////////////////////////
## EVALUATION
##//////////////////////////////////////////////////////
from nltk.metrics import accuracy as _accuracy
def accuracy(chunker, gold):
"""
Score the accuracy of the chunker against the gold standard.
Strip the chunk information from the gold standard and rechunk it using
the chunker, then compute the accuracy score.
:type chunker: ChunkParserI
:param chunker: The chunker being evaluated.
:type gold: tree
:param gold: The chunk structures to score the chunker on.
:rtype: float
"""
gold_tags = []
test_tags = []
for gold_tree in gold:
test_tree = chunker.parse(gold_tree.flatten())
gold_tags += tree2conlltags(gold_tree)
test_tags += tree2conlltags(test_tree)
# print 'GOLD:', gold_tags[:50]
# print 'TEST:', test_tags[:50]
return _accuracy(gold_tags, test_tags)
# Patched for increased performance by Yoav Goldberg <[email protected]>, 2006-01-13
# -- statistics are evaluated only on demand, instead of at every sentence evaluation
#
# SB: use nltk.metrics for precision/recall scoring?
#
class ChunkScore(object):
"""
A utility class for scoring chunk parsers. ``ChunkScore`` can
evaluate a chunk parser's output, based on a number of statistics
(precision, recall, f-measure, misssed chunks, incorrect chunks).
It can also combine the scores from the parsing of multiple texts;
this makes it significantly easier to evaluate a chunk parser that
operates one sentence at a time.
Texts are evaluated with the ``score`` method. The results of
evaluation can be accessed via a number of accessor methods, such
as ``precision`` and ``f_measure``. A typical use of the
``ChunkScore`` class is::
>>> chunkscore = ChunkScore() # doctest: +SKIP
>>> for correct in correct_sentences: # doctest: +SKIP
... guess = chunkparser.parse(correct.leaves()) # doctest: +SKIP
... chunkscore.score(correct, guess) # doctest: +SKIP
>>> print('F Measure:', chunkscore.f_measure()) # doctest: +SKIP
F Measure: 0.823
:ivar kwargs: Keyword arguments:
- max_tp_examples: The maximum number actual examples of true
positives to record. This affects the ``correct`` member
function: ``correct`` will not return more than this number
of true positive examples. This does *not* affect any of
the numerical metrics (precision, recall, or f-measure)
- max_fp_examples: The maximum number actual examples of false
positives to record. This affects the ``incorrect`` member
function and the ``guessed`` member function: ``incorrect``
will not return more than this number of examples, and
``guessed`` will not return more than this number of true
positive examples. This does *not* affect any of the
numerical metrics (precision, recall, or f-measure)
- max_fn_examples: The maximum number actual examples of false
negatives to record. This affects the ``missed`` member
function and the ``correct`` member function: ``missed``
will not return more than this number of examples, and
``correct`` will not return more than this number of true
negative examples. This does *not* affect any of the
numerical metrics (precision, recall, or f-measure)
- chunk_label: A regular expression indicating which chunks
should be compared. Defaults to ``'.*'`` (i.e., all chunks).
:type _tp: list(Token)
:ivar _tp: List of true positives
:type _fp: list(Token)
:ivar _fp: List of false positives
:type _fn: list(Token)
:ivar _fn: List of false negatives
:type _tp_num: int
:ivar _tp_num: Number of true positives
:type _fp_num: int
:ivar _fp_num: Number of false positives
:type _fn_num: int
:ivar _fn_num: Number of false negatives.
"""
def __init__(self, **kwargs):
self._correct = set()
self._guessed = set()
self._tp = set()
self._fp = set()
self._fn = set()
self._max_tp = kwargs.get('max_tp_examples', 100)
self._max_fp = kwargs.get('max_fp_examples', 100)
self._max_fn = kwargs.get('max_fn_examples', 100)
self._chunk_label = kwargs.get('chunk_label', '.*')
self._tp_num = 0
self._fp_num = 0
self._fn_num = 0
self._count = 0
self._tags_correct = 0.0
self._tags_total = 0.0
self._measuresNeedUpdate = False
def _updateMeasures(self):
if (self._measuresNeedUpdate):
self._tp = self._guessed & self._correct
self._fn = self._correct - self._guessed
self._fp = self._guessed - self._correct
self._tp_num = len(self._tp)
self._fp_num = len(self._fp)
self._fn_num = len(self._fn)
self._measuresNeedUpdate = False
def score(self, correct, guessed):
"""
Given a correctly chunked sentence, score another chunked
version of the same sentence.
:type correct: chunk structure
:param correct: The known-correct ("gold standard") chunked
sentence.
:type guessed: chunk structure
:param guessed: The chunked sentence to be scored.
"""
self._correct |= _chunksets(correct, self._count, self._chunk_label)
self._guessed |= _chunksets(guessed, self._count, self._chunk_label)
self._count += 1
self._measuresNeedUpdate = True
# Keep track of per-tag accuracy (if possible)
try:
correct_tags = tree2conlltags(correct)
guessed_tags = tree2conlltags(guessed)
except ValueError:
# This exception case is for nested chunk structures,
# where tree2conlltags will fail with a ValueError: "Tree
# is too deeply nested to be printed in CoNLL format."
correct_tags = guessed_tags = ()
self._tags_total += len(correct_tags)
self._tags_correct += sum(1 for (t,g) in zip(guessed_tags,
correct_tags)
if t==g)
def accuracy(self):
"""
Return the overall tag-based accuracy for all text that have
been scored by this ``ChunkScore``, using the IOB (conll2000)
tag encoding.
:rtype: float
"""
if self._tags_total == 0: return 1
return self._tags_correct/self._tags_total
def precision(self):
"""
Return the overall precision for all texts that have been
scored by this ``ChunkScore``.
:rtype: float
"""
self._updateMeasures()
div = self._tp_num + self._fp_num
if div == 0: return 0
else: return self._tp_num / div
def recall(self):
"""
Return the overall recall for all texts that have been
scored by this ``ChunkScore``.
:rtype: float
"""
self._updateMeasures()
div = self._tp_num + self._fn_num
if div == 0: return 0
else: return self._tp_num / div
def f_measure(self, alpha=0.5):
"""
Return the overall F measure for all texts that have been
scored by this ``ChunkScore``.
:param alpha: the relative weighting of precision and recall.
Larger alpha biases the score towards the precision value,
while smaller alpha biases the score towards the recall
value. ``alpha`` should have a value in the range [0,1].
:type alpha: float
:rtype: float
"""
self._updateMeasures()
p = self.precision()
r = self.recall()
if p == 0 or r == 0: # what if alpha is 0 or 1?
return 0
return 1/(alpha/p + (1-alpha)/r)
def missed(self):
"""
Return the chunks which were included in the
correct chunk structures, but not in the guessed chunk
structures, listed in input order.
:rtype: list of chunks
"""
self._updateMeasures()
chunks = list(self._fn)
return [c[1] for c in chunks] # discard position information
def incorrect(self):
"""
Return the chunks which were included in the guessed chunk structures,
but not in the correct chunk structures, listed in input order.
:rtype: list of chunks
"""
self._updateMeasures()
chunks = list(self._fp)
return [c[1] for c in chunks] # discard position information
def correct(self):
"""
Return the chunks which were included in the correct
chunk structures, listed in input order.
:rtype: list of chunks
"""
chunks = list(self._correct)
return [c[1] for c in chunks] # discard position information
def guessed(self):
"""
Return the chunks which were included in the guessed
chunk structures, listed in input order.
:rtype: list of chunks
"""
chunks = list(self._guessed)
return [c[1] for c in chunks] # discard position information
def __len__(self):
self._updateMeasures()
return self._tp_num + self._fn_num
def __repr__(self):
"""
Return a concise representation of this ``ChunkScoring``.
:rtype: str
"""
return '<ChunkScoring of '+repr(len(self))+' chunks>'
def __str__(self):
"""
Return a verbose representation of this ``ChunkScoring``.
This representation includes the precision, recall, and
f-measure scores. For other information about the score,
use the accessor methods (e.g., ``missed()`` and ``incorrect()``).
:rtype: str
"""
return ("ChunkParse score:\n" +
(" IOB Accuracy: %5.1f%%\n" % (self.accuracy()*100)) +
(" Precision: %5.1f%%\n" % (self.precision()*100)) +
(" Recall: %5.1f%%\n" % (self.recall()*100))+
(" F-Measure: %5.1f%%" % (self.f_measure()*100)))
# extract chunks, and assign unique id, the absolute position of
# the first word of the chunk
def _chunksets(t, count, chunk_label):
pos = 0
chunks = []
for child in t:
if isinstance(child, Tree):
if re.match(chunk_label, child.label()):
chunks.append(((count, pos), child.freeze()))
pos += len(child.leaves())
else:
pos += 1
return set(chunks)
def tagstr2tree(s, chunk_label="NP", root_label="S", sep='/',
source_tagset=None, target_tagset=None):
"""
Divide a string of bracketted tagged text into
chunks and unchunked tokens, and produce a Tree.
Chunks are marked by square brackets (``[...]``). Words are
delimited by whitespace, and each word should have the form
``text/tag``. Words that do not contain a slash are
assigned a ``tag`` of None.
:param s: The string to be converted
:type s: str
:param chunk_label: The label to use for chunk nodes
:type chunk_label: str
:param root_label: The label to use for the root of the tree
:type root_label: str
:rtype: Tree
"""
WORD_OR_BRACKET = re.compile(r'\[|\]|[^\[\]\s]+')
stack = [Tree(root_label, [])]
for match in WORD_OR_BRACKET.finditer(s):
text = match.group()
if text[0] == '[':
if len(stack) != 1:
raise ValueError('Unexpected [ at char %d' % match.start())
chunk = Tree(chunk_label, [])
stack[-1].append(chunk)
stack.append(chunk)
elif text[0] == ']':
if len(stack) != 2:
raise ValueError('Unexpected ] at char %d' % match.start())
stack.pop()
else:
if sep is None:
stack[-1].append(text)
else:
word, tag = str2tuple(text, sep)
if source_tagset and target_tagset:
tag = map_tag(source_tagset, target_tagset, tag)
stack[-1].append((word, tag))
if len(stack) != 1:
raise ValueError('Expected ] at char %d' % len(s))
return stack[0]
### CONLL
_LINE_RE = re.compile('(\S+)\s+(\S+)\s+([IOB])-?(\S+)?')
def conllstr2tree(s, chunk_types=('NP', 'PP', 'VP'), root_label="S"):
"""
Return a chunk structure for a single sentence
encoded in the given CONLL 2000 style string.
This function converts a CoNLL IOB string into a tree.
It uses the specified chunk types
(defaults to NP, PP and VP), and creates a tree rooted at a node
labeled S (by default).
:param s: The CoNLL string to be converted.
:type s: str
:param chunk_types: The chunk types to be converted.
:type chunk_types: tuple
:param root_label: The node label to use for the root.
:type root_label: str
:rtype: Tree
"""
stack = [Tree(root_label, [])]
for lineno, line in enumerate(s.split('\n')):
if not line.strip(): continue
# Decode the line.
match = _LINE_RE.match(line)
if match is None:
raise ValueError('Error on line %d' % lineno)
(word, tag, state, chunk_type) = match.groups()
# If it's a chunk type we don't care about, treat it as O.
if (chunk_types is not None and
chunk_type not in chunk_types):
state = 'O'
# For "Begin"/"Outside", finish any completed chunks -
# also do so for "Inside" which don't match the previous token.
mismatch_I = state == 'I' and chunk_type != stack[-1].label()
if state in 'BO' or mismatch_I:
if len(stack) == 2: stack.pop()
# For "Begin", start a new chunk.
if state == 'B' or mismatch_I:
chunk = Tree(chunk_type, [])
stack[-1].append(chunk)
stack.append(chunk)
# Add the new word token.
stack[-1].append((word, tag))
return stack[0]
def tree2conlltags(t):
"""
Return a list of 3-tuples containing ``(word, tag, IOB-tag)``.
Convert a tree to the CoNLL IOB tag format.
:param t: The tree to be converted.
:type t: Tree
:rtype: list(tuple)
"""
tags = []
for child in t:
try:
category = child.label()
prefix = "B-"
for contents in child:
if isinstance(contents, Tree):
raise ValueError("Tree is too deeply nested to be printed in CoNLL format")
tags.append((contents[0], contents[1], prefix+category))
prefix = "I-"
except AttributeError:
tags.append((child[0], child[1], "O"))
return tags
def conlltags2tree(sentence, chunk_types=('NP','PP','VP'),
root_label='S', strict=False):
"""
Convert the CoNLL IOB format to a tree.
"""
tree = Tree(root_label, [])
for (word, postag, chunktag) in sentence:
if chunktag is None:
if strict:
raise ValueError("Bad conll tag sequence")
else:
# Treat as O
tree.append((word,postag))
elif chunktag.startswith('B-'):
tree.append(Tree(chunktag[2:], [(word,postag)]))
elif chunktag.startswith('I-'):
if (len(tree)==0 or not isinstance(tree[-1], Tree) or
tree[-1].label() != chunktag[2:]):
if strict:
raise ValueError("Bad conll tag sequence")
else:
# Treat as B-*
tree.append(Tree(chunktag[2:], [(word,postag)]))
else:
tree[-1].append((word,postag))
elif chunktag == 'O':
tree.append((word,postag))
else:
raise ValueError("Bad conll tag %r" % chunktag)
return tree
def tree2conllstr(t):
"""
Return a multiline string where each line contains a word, tag and IOB tag.
Convert a tree to the CoNLL IOB string format
:param t: The tree to be converted.
:type t: Tree
:rtype: str
"""
lines = [" ".join(token) for token in tree2conlltags(t)]
return '\n'.join(lines)
### IEER
_IEER_DOC_RE = re.compile(r'<DOC>\s*'
r'(<DOCNO>\s*(?P<docno>.+?)\s*</DOCNO>\s*)?'
r'(<DOCTYPE>\s*(?P<doctype>.+?)\s*</DOCTYPE>\s*)?'
r'(<DATE_TIME>\s*(?P<date_time>.+?)\s*</DATE_TIME>\s*)?'
r'<BODY>\s*'
r'(<HEADLINE>\s*(?P<headline>.+?)\s*</HEADLINE>\s*)?'
r'<TEXT>(?P<text>.*?)</TEXT>\s*'
r'</BODY>\s*</DOC>\s*', re.DOTALL)
_IEER_TYPE_RE = re.compile('<b_\w+\s+[^>]*?type="(?P<type>\w+)"')
def _ieer_read_text(s, root_label):
stack = [Tree(root_label, [])]
# s will be None if there is no headline in the text
# return the empty list in place of a Tree
if s is None:
return []
for piece_m in re.finditer('<[^>]+>|[^\s<]+', s):
piece = piece_m.group()
try:
if piece.startswith('<b_'):
m = _IEER_TYPE_RE.match(piece)
if m is None: print('XXXX', piece)
chunk = Tree(m.group('type'), [])
stack[-1].append(chunk)
stack.append(chunk)
elif piece.startswith('<e_'):
stack.pop()
# elif piece.startswith('<'):
# print "ERROR:", piece
# raise ValueError # Unexpected HTML
else:
stack[-1].append(piece)
except (IndexError, ValueError):
raise ValueError('Bad IEER string (error at character %d)' %
piece_m.start())
if len(stack) != 1:
raise ValueError('Bad IEER string')
return stack[0]
def ieerstr2tree(s, chunk_types = ['LOCATION', 'ORGANIZATION', 'PERSON', 'DURATION',
'DATE', 'CARDINAL', 'PERCENT', 'MONEY', 'MEASURE'], root_label="S"):
"""
Return a chunk structure containing the chunked tagged text that is
encoded in the given IEER style string.
Convert a string of chunked tagged text in the IEER named
entity format into a chunk structure. Chunks are of several
types, LOCATION, ORGANIZATION, PERSON, DURATION, DATE, CARDINAL,
PERCENT, MONEY, and MEASURE.
:rtype: Tree
"""
# Try looking for a single document. If that doesn't work, then just
# treat everything as if it was within the <TEXT>...</TEXT>.
m = _IEER_DOC_RE.match(s)
if m:
return {
'text': _ieer_read_text(m.group('text'), root_label),
'docno': m.group('docno'),
'doctype': m.group('doctype'),
'date_time': m.group('date_time'),
#'headline': m.group('headline')
# we want to capture NEs in the headline too!
'headline': _ieer_read_text(m.group('headline'), root_label),
}
else:
return _ieer_read_text(s, root_label)
def demo():
s = "[ Pierre/NNP Vinken/NNP ] ,/, [ 61/CD years/NNS ] old/JJ ,/, will/MD join/VB [ the/DT board/NN ] ./."
import nltk
t = nltk.chunk.tagstr2tree(s, chunk_label='NP')
t.pprint()
print()
s = """
These DT B-NP
research NN I-NP
protocols NNS I-NP
offer VBP B-VP
to TO B-PP
the DT B-NP
patient NN I-NP
not RB O
only RB O
the DT B-NP
very RB I-NP
best JJS I-NP
therapy NN I-NP
which WDT B-NP
we PRP B-NP
have VBP B-VP
established VBN I-VP
today NN B-NP
but CC B-NP
also RB I-NP
the DT B-NP
hope NN I-NP
of IN B-PP
something NN B-NP
still RB B-ADJP
better JJR I-ADJP
. . O
"""
conll_tree = conllstr2tree(s, chunk_types=('NP', 'PP'))
conll_tree.pprint()
# Demonstrate CoNLL output
print("CoNLL output:")
print(nltk.chunk.tree2conllstr(conll_tree))
print()
if __name__ == '__main__':
demo()
| [
"[email protected]"
]
| |
ea8ca2060f2262c3ecaf0c88506fad93bb81a001 | eb54d732b5f14f03d9bf2988c6157605c80bbdd5 | /bubble_sort.py | e599bb7065016d2e01b3e67d5e93e3dc4947d828 | []
| no_license | tngo0508/practice_coding | 2e60519fed83a9b3c28b52c2d5ec1ee1d2a609ed | 453c9a7b9a8aa80f37b245f9df447525a9b0a2d1 | refs/heads/master | 2022-03-27T01:44:56.589650 | 2020-01-05T18:58:31 | 2020-01-05T18:58:31 | 225,294,423 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | def bubble_sort(nums):
for i in range(len(nums) - 1, 0, -1):
for j in range(i):
if nums[j] > nums[j + 1]:
nums[j], nums[j+1] = nums[j+1], nums[j]
print(nums)
return nums
print(bubble_sort([4, 1, 0, 3, 5, 1, 2, 6]))
| [
"[email protected]"
]
| |
86a2d304179a0d4d021966bafce213f4365d57c2 | 84290c584128de3e872e66dc99b5b407a7a4612f | /Statistical Thinking in Python (Part 2)/Bootstrap confidence intervals/Visualizing bootstrap samples.py | 325418de26f528e09ecafe5c6554c241dae959c8 | []
| no_license | BautizarCodigo/DataAnalyticEssentials | 91eddc56dd1b457e9e3e1e3db5fbbb2a85d3b789 | 7f5f3d8936dd4945ee0fd854ef17f04a04eb7b57 | refs/heads/main | 2023-04-11T04:42:17.977491 | 2021-03-21T19:05:17 | 2021-03-21T19:05:17 | 349,784,608 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 534 | py | for _ in range(50):
# Generate bootstrap sample: bs_sample
bs_sample = np.random.choice(rainfall, size=len(rainfall))
# Compute and plot ECDF from bootstrap sample
x, y = ecdf(bs_sample)
_ = plt.plot(x, y, marker='.', linestyle='none',
color='gray', alpha=0.1)
# Compute and plot ECDF from original data
x, y = ecdf(rainfall)
_ = plt.plot(x, y, marker='.')
# Make margins and label axes
plt.margins(0.02)
_ = plt.xlabel('yearly rainfall (mm)')
_ = plt.ylabel('ECDF')
# Show the plot
plt.show() | [
"[email protected]"
]
| |
391a306f78fe5c96c880603c95534afa317eb828 | 874f8db726d5ce5da971dbd54aac58f0b3176d78 | /aa通用的工具类或方法/一个通用的mongodb类.py | 688e8d6c4c304fd0c6613395dc49c4fed7d13fcf | []
| no_license | Social-Engineering-OrigData/python | a8442ab5b3a772ddfc568eb5e386b11074c5bf93 | 6dde78f75e2a3306bccdc0085a44751cf2b901ca | refs/heads/master | 2021-09-09T12:56:09.781127 | 2018-03-16T09:34:17 | 2018-03-16T09:34:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,035 | py | #!/usr/bin/env python
# encoding: utf-8
'''
@contact: wersonliugmail.com
@File : 一个通用的mongodb类.py
'''
from pymongo import MongoClient
"""
在爬虫存数据时使用,不需要事先建立数据库,直接存字典
"""
class MyMongo:
def __init__(self, dbname, colname):
"""
:param dbname: 初始化 命名自己的库
:param colname: 初始化 命名自己的表(集合)名
"""
# host,port 按自己需要重写
self.host = "127.0.0.1"
self.port = 27017
# self.dbname = dbname
# self.colname = colname
self.client = MongoClient(host=self.host, port=self.port)
self.db = self.client[dbname]
self.col = self.db[colname]
def process_data(self, data):
self.col.insert(data)
print("成功插入%s" % data)
def close_mongo(self):
self.client.close()
# 其他增删改查操作
my = MyMongo("wnagyi", "info")
my.process_data({"姓名": "刘伟", "工资": 1800})
my.close_mongo()
| [
"[email protected]"
]
| |
35380b0997d3dc37aa77773fe400ca9768d179f3 | 9c05ec071dda2aa98ea1b12d9703dd91df19c87d | /quantum/hooks.py | 2c6a587a6d593503d2bbf9fee3977197c254c5db | [
"Apache-2.0"
]
| permissive | DestinyOneSystems/quantum | af6ff44dd5e8cff944e53946f60adb11efb47bd5 | d7eafd8ffa719d91108b230221ecf27531a3530d | refs/heads/master | 2020-04-05T18:59:00.584768 | 2013-10-21T02:41:15 | 2013-10-21T02:41:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,109 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
def setup_hook(config):
"""Filter config parsed from a setup.cfg to inject our defaults."""
metadata = config['metadata']
if sys.platform == 'win32':
requires = metadata.get('requires_dist', list()).split('\n')
requires.append('pywin32')
requires.append('wmi')
requires.remove('pyudev')
metadata['requires_dist'] = "\n".join(requires)
config['metadata'] = metadata
| [
"[email protected]"
]
| |
30f7edb35eef5cce6d855b50be7fff21042a064c | 39257f596d4ec7840e39c8267b3444443e89ebba | /src/pyff/pipes.py | b76c3853b4c2b50602eb6f904f16bc38eee2d586 | [
"BSD-2-Clause"
]
| permissive | lhoekenga/pyFF | ff6921410d46687528d84e416cbdafa6af46b164 | a0413d34744ddbf95904d0d933524589a039c025 | refs/heads/master | 2021-08-22T09:49:56.160558 | 2017-11-27T09:58:08 | 2017-11-27T09:58:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,028 | py | """
Pipes and plumbing. Plumbing instances are sequences of pipes. Each pipe is called in order to load, select,
transform, sign or output SAML metadata.
"""
import traceback
try:
from cStringIO import StringIO
except ImportError: # pragma: no cover
print(" *** install cStringIO for better performance")
from StringIO import StringIO
import os
import yaml
from .utils import resource_string, PyffException
from .logs import log
__author__ = 'leifj'
registry = dict()
def pipe(*args, **kwargs):
"""
Register the decorated function in the pyff pipe registry
:param name: optional name - if None, use function name
"""
def deco_none(f):
return f
def deco_pipe(f):
f_name = kwargs.get('name', f.__name__)
registry[f_name] = f
return f
if 1 == len(args):
f = args[0]
registry[f.__name__] = f
return deco_none
else:
return deco_pipe
class PipeException(PyffException):
pass
class PluginsRegistry(dict):
"""
The plugin registry uses pkg_resources.iter_entry_points to list all EntryPoints in the group 'pyff.pipe'. All pipe
entry_points must have the following prototype:
def the_something_func(req,*opts):
pass
Referencing this function as an entry_point using something = module:the_somethig_func in setup.py allows the
function to be referenced as 'something' in a pipeline.
"""
# def __init__(self):
# for entry_point in iter_entry_points('pyff.pipe'):
# if entry_point.name in self:
# log.warn("Duplicate entry point: %s" % entry_point.name)
# else:
# log.debug("Registering entry point: %s" % entry_point.name)
# self[entry_point.name] = entry_point.load()
def load_pipe(d):
"""Return a triple callable,name,args of the pipe specified by the object d.
:param d: The following alternatives for d are allowed:
- d is a string (or unicode) in which case the pipe is named d called with None as args.
- d is a dict of the form {name: args} (i.e one key) in which case the pipe named *name* is called with args
- d is an iterable (eg tuple or list) in which case d[0] is treated as the pipe name and d[1:] becomes the args
"""
def _n(_d):
lst = _d.split()
_name = lst[0]
_opts = lst[1:]
return _name, _opts
name = None
args = None
opts = []
if type(d) is str or type(d) is unicode:
name, opts = _n(d)
elif hasattr(d, '__iter__') and not type(d) is dict:
if not len(d):
raise PipeException("This does not look like a length of pipe... \n%s" % repr(d))
name, opts = _n(d[0])
elif type(d) is dict:
k = d.keys()[0]
name, opts = _n(k)
args = d[k]
else:
raise PipeException("This does not look like a length of pipe... \n%s" % repr(d))
if name is None:
raise PipeException("Anonymous length of pipe... \n%s" % repr(d))
func = None
if name in registry:
func = registry[name]
if func is None or not hasattr(func, '__call__'):
raise PipeException('No pipe named %s is installed' % name)
return func, opts, name, args
class PipelineCallback(object):
"""
A delayed pipeline callback used as a post for parse_metadata
"""
def __init__(self, entry_point, req):
self.entry_point = entry_point
self.plumbing = Plumbing(req.plumbing.pipeline, "%s-via-%s" % (req.plumbing.id, entry_point))
self.req = req
def __call__(self, *args, **kwargs):
t = args[0]
if t is None:
raise ValueError("PipelineCallback must be called with a parse-tree argument")
try:
return self.plumbing.process(self.req.md, state={self.entry_point: True}, t=t)
except Exception as ex:
traceback.print_exc(ex)
raise ex
class Plumbing(object):
"""
A plumbing instance represents a basic processing chain for SAML metadata. A simple, yet reasonably complete example:
.. code-block:: yaml
- load:
- /var/metadata/registry
- http://md.example.com
- select:
- #md:EntityDescriptor[md:IDPSSODescriptor]
- xslt:
stylesheet: tidy.xsl
- fork:
- finalize:
Name: http://example.com/metadata.xml
cacheDuration: PT1H
validUntil: PT1D
- sign:
key: signer.key
cert: signer.crt
- publish: /var/metadata/public/metadata.xml
Running this plumbing would bake all metadata found in /var/metadata/registry and at http://md.example.com into an
EntitiesDescriptor element with @Name http://example.com/metadata.xml, @cacheDuration set to 1hr and @validUntil
1 day from the time the 'finalize' command was run. The tree woud be transformed using the "tidy" stylesheets and
would then be signed (using signer.key) and finally published in /var/metadata/public/metadata.xml
"""
def __init__(self, pipeline, pid):
self._id = pid
self.pipeline = pipeline
@property
def id(self):
return self._id
@property
def pid(self):
return self._id
def __iter__(self):
return self.pipeline
def __str__(self):
out = StringIO()
yaml.dump(self.pipeline, stream=out)
return out.getvalue()
class Request(object):
"""
Represents a single request. When processing a set of pipelines a single request is used. Any part of the pipeline
may modify any of the fields.
"""
def __init__(self, pl, md, t, name=None, args=None, state=None):
if not state:
state = dict()
if not args:
args = []
self.plumbing = pl
self.md = md
self.t = t
self.name = name
self.args = args
self.state = state
self.done = False
def process(self, pl):
"""The inner request pipeline processor.
:param pl: The plumbing to run this request through
"""
log.debug('Processing \n%s' % pl)
for p in pl.pipeline:
cb, opts, name, args = load_pipe(p)
# log.debug("traversing pipe %s,%s,%s using %s" % (pipe,name,args,opts))
if type(args) is str or type(args) is unicode:
args = [args]
if args is not None and type(args) is not dict and type(args) is not list and type(args) is not tuple:
raise PipeException("Unknown argument type %s" % repr(args))
self.args = args
self.name = name
ot = cb(self, *opts)
if ot is not None:
self.t = ot
if self.done:
break
return self.t
def process(self, md, state=None, t=None):
"""
The main entrypoint for processing a request pipeline. Calls the inner processor.
:param md: The current metadata repository
:param state: The active request state
:param t: The active working document
:return: The result of applying the processing pipeline to t.
"""
if not state:
state = dict()
# req = Plumbing.Request(self, md, t, state=state)
# self._process(req)
# return req.t
return Plumbing.Request(self, md, t, state=state).process(self)
def _process(self, req):
"""The inner request pipeline processor.
:param req: The request to run through the pipeline
"""
log.debug('Processing \n%s' % self)
for p in self.pipeline:
try:
pipe, opts, name, args = load_pipe(p)
# log.debug("traversing pipe %s,%s,%s using %s" % (pipe,name,args,opts))
if type(args) is str or type(args) is unicode:
args = [args]
if args is not None and type(args) is not dict and type(args) is not list and type(args) is not tuple:
raise PipeException("Unknown argument type %s" % repr(args))
req.args = args
req.name = name
ot = pipe(req, *opts)
if ot is not None:
req.t = ot
if req.done:
break
except PipeException as ex:
log.error(ex)
break
return req.t
def plumbing(fn):
"""
Create a new plumbing instance by parsing yaml from the filename.
:param fn: A filename containing the pipeline.
:return: A plumbing object
This uses the resource framework to locate the yaml file which means that pipelines can be shipped as plugins.
"""
pid = os.path.splitext(fn)[0]
ystr = resource_string(fn)
if ystr is None:
raise PipeException("Plumbing not found: %s" % fn)
pipeline = yaml.safe_load(ystr)
return Plumbing(pipeline=pipeline, pid=pid)
| [
"[email protected]"
]
| |
233285c17f75cb0cf8903cbacdeb74bbe001281d | 8fcdcec1bf0f194d23bba4acd664166a04dc128f | /packages/gcTool.py | bd22a08d82b189ff60330613fa6b6795e709fd48 | []
| no_license | grid-control/grid-control | e51337dd7e5d158644a8da35923443fb0d232bfb | 1f5295cd6114f3f18958be0e0618ff6b35aa16d7 | refs/heads/master | 2022-11-13T13:29:13.226512 | 2021-10-01T14:37:59 | 2021-10-01T14:37:59 | 13,805,261 | 32 | 30 | null | 2023-02-19T16:22:47 | 2013-10-23T14:39:28 | Python | UTF-8 | Python | false | false | 19 | py | grid_control_api.py | [
"[email protected]"
]
| |
f138655f1c273477db99f1f85129ea718053c624 | 1a2cbc44bfcda1eafe4e8513de8541d8cd49bd08 | /fts/test_t1_amend_user2.py | f8bebf459f343d1e016841a1993e789e179cfd24 | [
"LicenseRef-scancode-public-domain"
]
| permissive | DonaldMc/gdms | d62d34585a3914330cc933476dcb0d3ab750b7d8 | 7bfdf40d929afab2e204256c781c3700f6e24443 | refs/heads/master | 2021-01-18T12:38:55.798638 | 2016-05-30T18:59:55 | 2016-05-30T18:59:55 | 56,460,151 | 0 | 0 | null | 2016-05-30T20:59:22 | 2016-04-17T21:44:40 | Python | UTF-8 | Python | false | false | 3,486 | py | from functional_tests import FunctionalTest, ROOT, USERS
from ddt import ddt, data, unpack
from selenium.webdriver.support.ui import WebDriverWait
import time
from selenium.webdriver.support.ui import Select
# Testuser1 - stays as unspecified
# Testuser2 - specifies Africa and unspecified country and subdivision
# Testuser3 - specifies Africa and South Africa and unspecified subdivision
# Testuser4 - specifies Europe and unspecifoed country
# Testuser5 - specifies Europe and Switzerland and unspecified Subdivision
# Testuser6 - specifies North America and Unspeccified country
# Testuser7 - specifies North America, Canada and unspecified subdivision
# Testuser8 - specifies North America, Canada and Alberta
# Testuser9 - specifies North America, Canada and Saskatchewan
@ddt
class TestRegisterPage (FunctionalTest):
def setUp(self):
self.url = ROOT + '/default/user/login'
get_browser=self.browser.get(self.url)
# setup below for user7 being set twice seems stupid however for reasons that escape me the
# setting of unspecified subdivision isn't working if done in a single step hence Manitoba
# temporarily wheeled into play
@data((USERS['USER7'], USERS['PASSWORD7'], 'North America (NA)', 'Canada (NA)', 'Manitoba'),
(USERS['USER6'], USERS['PASSWORD6'], 'North America (NA)', 'Unspecified', 'Unspecified'),
(USERS['USER8'], USERS['PASSWORD8'], 'North America (NA)', 'Canada (NA)', 'Alberta'),
(USERS['USER9'], USERS['PASSWORD9'], 'North America (NA)', 'Canada (NA)', 'Saskatchewan'),
(USERS['USER7'], USERS['PASSWORD7'], 'North America (NA)', 'Canada (NA)', 'Unspecified'))
@unpack
def test_put_values_in_register_form(self, user, passwd, continent, country, subdivision):
mailstring = user + '@user.com'
email = WebDriverWait(self, 10).until(lambda self: self.browser.find_element_by_name("email"))
email.send_keys(mailstring)
password = self.browser.find_element_by_name("password")
password.send_keys(passwd)
time.sleep(1)
submit_button = self.browser.find_element_by_css_selector("#submit_record__row input")
time.sleep(1)
submit_button.click()
time.sleep(1)
self.url = ROOT + '/default/user/profile'
get_browser=self.browser.get(self.url)
time.sleep(1)
select = Select(self.browser.find_element_by_id("auth_user_continent"))
time.sleep(1)
select.select_by_visible_text(continent)
time.sleep(1)
select = Select(self.browser.find_element_by_id("countryopt"))
time.sleep(2)
select.select_by_visible_text(country)
time.sleep(3)
select = Select(self.browser.find_element_by_id("subdivopt"))
time.sleep(3)
select.select_by_visible_text(subdivision)
time.sleep(3)
self.browser.find_element_by_xpath("//input[@value='Apply changes']").click()
# TODO get this changed to changes applied after working
resultstring = 'Welcome'
time.sleep(2)
body = WebDriverWait(self, 10).until(lambda self: self.browser.find_element_by_tag_name('body'))
self.assertIn(resultstring, body.text)
#welcome_message = self.browser.find_element_by_css_selector(".flash")
#self.assertEqual(resultstring, welcome_message.text)
self.url = ROOT + '/default/user/logout'
get_browser = self.browser.get(self.url)
time.sleep(1) | [
"[email protected]"
]
| |
c85113890b4775751eea8a0787ac818401ea92d5 | c660fdd49861211926a9dac0206d3856002ff2a8 | /smbl/prog/plugins/samtools.py | e203b8094d9a9201ecb7919fbc2f9595a2242875 | [
"MIT"
]
| permissive | hermanzhaozzzz/smbl | d493a8b7ecfaf961c7ca7280d94c945a3e4e3b92 | 5922fa2fc4060d86172e991361a1cceb0af51af8 | refs/heads/master | 2021-06-23T11:27:57.869235 | 2017-08-19T02:21:51 | 2017-08-19T02:21:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | import smbl
import snakemake
import os
from ._program import *
SAMTOOLS = get_bin_file_path("samtools")
##########################################
##########################################
class SamTools(Program):
@classmethod
def get_installation_files(cls):
return [
SAMTOOLS,
]
@classmethod
def install(cls):
gitdir_samtools=cls.git_clone("http://github.com/samtools/samtools","samtools")
gitdir_htslib=cls.git_clone("http://github.com/samtools/htslib","htslib")
smbl.prog.correct_samtools_make(os.path.join(gitdir_samtools,"Makefile"))
cls.run_make("samtools")
cls.install_file("samtools/samtools",SAMTOOLS)
@classmethod
def supported_platforms(cls):
return ["cygwin","osx","linux"]
| [
"[email protected]"
]
| |
ee8aad80ea9fe488f536a12acb866395bcbdfc70 | c26dc7928b1facac2c0912f6532076d35c19e835 | /devel/lib/python2.7/dist-packages/cob_object_detection_msgs/srv/__init__.py | 8d01ac1030bab33d482fd8bc39a91912a52446bc | []
| no_license | mattedminster/inmoov_ros | 33c29a2ea711f61f15ad5e2c53dd9db65ef6437f | e063a90b61418c3612b8df7876a633bc0dc2c428 | refs/heads/master | 2021-01-23T02:39:36.090746 | 2017-08-09T02:56:42 | 2017-08-09T02:56:42 | 85,995,826 | 0 | 0 | null | 2017-03-23T20:45:32 | 2017-03-23T20:45:32 | null | UTF-8 | Python | false | false | 309 | py | from ._AcquireObjectImage import *
from ._BaTestEnvironment import *
from ._BagTrainObject import *
from ._ComputeGraspsVacuumGripper import *
from ._DetectObjects import *
from ._SaveRecordedObject import *
from ._StartObjectRecording import *
from ._StopObjectRecording import *
from ._TrainObject import *
| [
"[email protected]"
]
| |
1390212b59f1a609de481080cfa340b8b55b6dfd | b144928d199550e0fd2a0a0a21224e4f463e4bc6 | /src/cmssh/filemover.py | bbc8b2abf265ef902f6c600bfe786fe4a2ff5e37 | []
| no_license | dmwm/cmssh | 84f91ca1bb401dc052dcde1f58f42ecee48a3438 | 0cd6e104185938d21b10b053479e890c9f4f3b57 | refs/heads/master | 2016-09-10T19:13:29.567153 | 2014-03-06T20:45:05 | 2014-03-06T20:45:05 | 2,615,169 | 2 | 0 | null | 2012-11-28T18:24:41 | 2011-10-20T18:23:41 | Python | UTF-8 | Python | false | false | 27,912 | py | #!/usr/bin/env python
"""Filemover cli equivalent"""
# system modules
import os
import re
import sys
import json
import stat
import time
import thread
import urllib
import urllib2
import datetime
from multiprocessing import Process
# for DBS2 XML parsing
import xml.etree.ElementTree as ET
# cmssh modules
from cmssh.iprint import print_error, print_info, print_warning
from cmssh.utils import size_format
from cmssh.ddict import DotDict
from cmssh.cms_urls import phedex_url, dbs_url, dbs_instances
from cmssh.cms_objects import CMSObj
from cmssh.utils import execmd
from cmssh.utils import PrintProgress, qlxml_parser
from cmssh.url_utils import get_data
from cmssh.sitedb import SiteDBManager
from cmssh.srmls import srmls_printer, srm_ls_printer
def get_dbs_se(lfn):
"Get original SE from DBS for given LFN"
# TODO: should have transparent access to DBS2/DBS3
query = 'find site where file=%s' % lfn
params = {"api":"executeQuery", "apiversion": "DBS_2_0_9", "query":query}
default_instance = os.environ.get('DBS_INSTANCE')
for inst in dbs_instances():
params.update({"query":query})
os.environ['DBS_INSTANCE'] = inst
data = urllib2.urlopen(dbs_url(), urllib.urlencode(params))
try:
rec = [f for f in qlxml_parser(data, 'site')][0]
sename = rec['site']['site']
except:
continue
os.environ['DBS_INSTANCE'] = default_instance
return sename
os.environ['DBS_INSTANCE'] = default_instance
def file_size(ifile):
"Return file size"
if os.path.isfile(ifile):
return os.stat(ifile)[stat.ST_SIZE]
return 0
def check_permission(dst, verbose=None):
"""
Check permission to write to given destination area
"""
if verbose:
print "Check permission to write to %s" % dst
srmmkdir = os.environ.get('SRM_MKDIR', '')
if not srmmkdir:
print_error('Unable to find srm mkdir command')
sys.exit(1)
cmd = '%s %s' % (srmmkdir, dst)
stdout, stderr = execmd(cmd)
if stderr.find('command not found') != -1:
print 'Unable to find srm mkdir tool'
print help
sys.exit(1)
if stdout.find('SRM-DIR: directory not created') != -1 or\
stdout.find('SRM_FAILURE') != -1:
msg = "Unable to access %s:" % dst
print msg
print "-" * len(msg)
print
print stdout
sys.exit(1)
def check_software(softlist):
"""
Perform the check that Grid middleware is installed on a node
"""
help = 'Please run with --help for more options'
for cmd in softlist:
stdout, stderr = execmd(cmd)
if not stdout:
print 'Unable to find %s' % cmd
print help
sys.exit(1)
def parser(data):
"""Parser DBS2 listFiles output"""
elem = ET.fromstring(data)
for i in elem:
if i.tag == 'file':
yield i.attrib['lfn']
def parse_srmls(data):
"""Parse srm-ls XML output"""
data = data.split('<?xml version="1.0" encoding="UTF-8"?>')
data = '<?xml version="1.0" encoding="UTF-8"?>' + data[-1]
elem = ET.fromstring(data)
for i in elem:
if i.tag == 'file' and i.attrib.has_key('size'):
return i.attrib['size']
def lfns(run=None, dataset=None):
"""
Get lfns list for provided run/dataset
"""
url = dbs_url('files') # DBS3
params = {'detail':'True'}
if run:
args['minrun'] = run
args['maxrun'] = run
if dataset:
args['dataset'] = dataset
params.update(args)
json_dict = get_data(url, params)
for row in json_dict:
yield row['logical_file_name']
def get_username(verbose=None):
"""
Get user name from provided DN
"""
# get DN from grid-proxy-info
cmd = 'grid-proxy-info'
stdout, stderr = execmd(cmd)
if stderr.find('command not found') != -1:
raise Exception(stderr)
userdn = None
try:
for line in stdout.split('\n'):
if line.find('issuer') != -1:
issuer, userdn = line.split(' : ')
except:
raise Exception('Unable to parse grid-proxy-info:\n%s' % stdout)
if verbose:
print "userdn :", userdn
if not userdn:
msg = 'Unable to determine your DN, please run grid-proxy-init'
raise Exception(msg)
mgr = SiteDBManager()
user = mgr.get_user(userdn)
return user
def nodes(select=True):
"""
Yield list of Phedex nodes, I only select T2 and below
"""
result = get_data(phedex_url('nodes'), {})
pat = re.compile('^T[0-1]_[A-Z]+(_)[A-Z]+')
lnodes = []
for row in result['phedex']['node']:
if select and pat.match(row['name']):
continue
msg = "%s, SE: %s, description %s/%s" \
% (row['name'], row['se'], row['technology'], row['kind'])
lnodes.append(msg)
lnodes.sort()
for row in lnodes:
print row
def resolve_srm_path(node, verbose=None):
"""
Use TFC phedex API to resolve srm path for given node
"""
params = {'node':node}
result = get_data(phedex_url('tfc'), params)
for row in result['phedex']['storage-mapping']['array']:
if row['protocol'] == 'srmv2' and row['element_name'] == 'lfn-to-pfn':
yield (row['result'], row['path-match'])
def resolve_user_srm_path(node, ldir='/store/user', verbose=None):
"""
Use TFC phedex API to resolve srm path for given node
"""
# change ldir if user supplied full path, e.g. /xrootdfs/cms/store/...
ldir = '/store/' + ldir.split('/store/')[-1]
params = {'node':node, 'lfn':ldir, 'protocol': 'srmv2'}
result = get_data(phedex_url('lfn2pfn'), params)
for row in result['phedex']['mapping']:
yield row['pfn']
def lfn2pfn(lfn, sename, mgr=None):
"Find PFN for given LFN and SE"
pfnlist = []
if not mgr:
mgr = SiteDBManager()
cmsname = mgr.get_name(sename)
if cmsname:
params = {'protocol':'srmv2', 'lfn':lfn, 'node':cmsname}
result = get_data(phedex_url('lfn2pfn'), params)
try:
for item in result['phedex']['mapping']:
pfn = item['pfn']
if pfn not in pfnlist:
pfnlist.append(pfn)
except:
msg = "Fail to look-up PFNs in Phedex\n" + str(result)
print msg
return pfnlist
def get_pfns(lfn, verbose=None):
"""
Look-up LFN in Phedex and get corresponding list of PFNs
"""
pfnlist = []
selist = []
params = {'se':'*', 'lfn':lfn}
json_dict = get_data(phedex_url('fileReplicas'), params)
ddict = DotDict(json_dict)
if not json_dict['phedex']['block']:
return pfnlist, selist
for fname in ddict.get('phedex.block.file'):
for replica in fname['replica']:
cmsname = replica['node']
se = replica['se']
if se not in selist:
selist.append(se)
# query Phedex for PFN
params = {'protocol':'srmv2', 'lfn':lfn, 'node':cmsname}
result = get_data(phedex_url('lfn2pfn'), params)
try:
for item in result['phedex']['mapping']:
pfn = item['pfn']
if pfn not in pfnlist:
pfnlist.append(pfn)
except:
msg = "Fail to look-up PFNs in Phedex\n" + str(result)
print msg
continue
return pfnlist, selist
def pfn_dst(lfn, dst, verbose=None):
"""
Look-up LFN in Phedex and return pfn dst for further processing
"""
dstfname = None
pat = re.compile('^T[0-9]_[A-Z]+(_)[A-Z]+')
if pat.match(dst):
dst_split = dst.split(':')
dst = dst_split[0]
if len(dst_split) == 1: # copy to the node
local_path = dst_split[1]
for srm_path, lfn_match in resolve_srm_path(dst, verbose):
lfn_pat = re.compile(lfn_match)
if lfn_pat.match(lfn):
srm_path = srm_path.replace('\?', '?').replace('$1', local_path)
if verbose:
print "Resolve %s into %s" % (dst, srm_path)
dst = srm_path
else:
paths = [p for p in resolve_user_srm_path(dst, verbose=verbose)]
dst = '%s/%s' % (paths[0], get_username())
check_permission(dst, verbose)
else:
if dst.find('file:///') == -1:
dstfname = dst.split('/')[-1]
if dstfname == '.':
dstfname = None
if dst[0] == '/': # absolute path
if os.path.isdir(dst):
ddir = dst
dstfname = None
else:
ddir = '/'.join(dst.split('/')[:-1])
if not os.path.isdir(ddir):
msg = 'Provided destination directory %s does not exists' % ddir
raise Exception(msg)
dst = 'file:///%s' % ddir
else:
ddir = '/'.join(dst.split('/')[:-1]).replace('$PWD', os.getcwd())
if os.path.isdir(ddir):
dst = 'file:///%s' % os.path.join(os.getcwd(), ddir)
else:
dst = 'file:///%s' % os.getcwd()
pfnlist = []
if os.path.isfile(lfn) or lfn.find('file:///') != -1: # local file
pfn = lfn.replace('file:///', '')
if pfn[0] != '/':
pfn = 'file:///%s' % os.path.join(os.getcwd(), pfn)
else:
pfn = 'file:///%s' % pfn
pfnlist = [pfn]
else:
if lfn.find(':') != -1:
node, lfn = lfn.split(':')
params = {'node':node, 'lfn':lfn, 'protocol':'srmv2'}
method = 'lfn2pfn'
else:
params = {'se':'*', 'lfn':lfn}
method = 'fileReplicas'
json_dict = get_data(phedex_url(method), params)
ddict = DotDict(json_dict)
if verbose:
print "Look-up LFN:"
print lfn
phedex = json_dict['phedex']
if phedex.has_key('mapping'):
if not phedex['mapping']:
msg = "LFN: %s\n" % lfn
msg += 'No replicas found\n'
msg += str(json_dict)
raise Exception(msg)
filelist = ddict.get('phedex.mapping.pfn')
if not filelist:
filelist = []
if isinstance(filelist, basestring):
filelist = [filelist]
for fname in filelist:
pfnlist.append(fname)
elif phedex.has_key('block') and not phedex['block']:
msg = 'No replicas found in PhEDEx, will try to get original SE from DBS'
print_warning(msg)
sename = get_dbs_se(lfn)
msg = 'Orignal LFN site %s' % sename
print_info(msg)
mgr = SiteDBManager()
pfnlist = lfn2pfn(lfn, sename, mgr)
filelist = ddict.get('phedex.block.file')
if not filelist:
filelist = []
for fname in filelist:
for replica in fname['replica']:
cmsname = replica['node']
se = replica['se']
if verbose:
print "found LFN on node=%s, se=%s" % (cmsname, se)
if cmsname.count('T0', 0, 2) == 1:
continue # skip T0's
# query Phedex for PFN
params = {'protocol':'srmv2', 'lfn':lfn, 'node':cmsname}
result = get_data(phedex_url('lfn2pfn'), params)
try:
for item in result['phedex']['mapping']:
pfn = item['pfn']
if pfn not in pfnlist:
pfnlist.append(pfn)
except:
msg = "Fail to look-up PFNs in Phedex\n" + str(result)
print msg
continue
if verbose > 1:
print "PFN list:"
for pfn in pfnlist:
print pfn
# finally return pfn and dst paths w/ file for further processing
for item in pfnlist:
ifile = item.split("/")[-1] if not dstfname else dstfname
yield item, '%s/%s' % (dst, ifile)
def get_size(surl, verbose=None):
"""
Execute srm-ls <surl> command and retrieve file size information
"""
srmls = os.environ.get('SRM_LS', '')
if not srmls:
print_error('Unable to find srm ls tool')
sys.exit(1)
if srmls.find('srm-ls') != -1:
srmargs = ''
else:
srmargs = '-2'
cmd = '%s %s %s' % (srmls, srmargs, surl)
if verbose:
print_info(cmd)
if cmd.find('file:///') != -1:
return file_size(cmd.split('file:///')[-1])
stdout, stderr = execmd(cmd)
if verbose:
print_info(stdout + stderr)
orig_size = 0
if cmd.find('file:///') != -1: # srm-ls returns XML
if srmls.find('srm-ls') != -1:
orig_size = parse_srmls(stdout)
else:
try:
orig_size = stdout.split()[0].strip()
except:
return 0
else:
if srmls.find('srm-ls') != -1:
for line in stdout.split('\n'):
if line.find('Bytes') != -1:
orig_size = line.replace('\n', '').split('=')[-1]
else:
try:
orig_size = stdout.split()[0].strip()
except:
return 0
return orig_size
def check_file(src, dst, verbose):
"""
Check if file is transfered and return dst, dst_size upon success.
"""
# find file size from replica
orig_size = get_size(src, verbose)
if verbose:
print "%s, size %s" % (src, orig_size)
if not orig_size or orig_size == 'null':
return False
# find file size from destination (if any)
dst_size = get_size(dst, verbose)
if verbose:
print "%s, size %s" % (dst, dst_size)
if not dst_size or dst_size == 'null':
return False
if int(orig_size) == int(dst_size):
return (dst, int(dst_size))
return False
def execute(cmds, src, dst, verbose):
"""
Execute given command, but also check if file is in place at dst
"""
status = check_file(src, dst, verbose)
if status:
return status
else:
if isinstance(cmds, basestring):
stdout, stderr = execmd(cmds)
if verbose:
print_info('Output of %s' % cmd)
print stdout + stderr
status = check_file(src, dst, verbose)
elif isinstance(cmds, list):
for cmd in cmds:
if not cmd:
continue
stdout, stderr = execmd(cmd)
if verbose:
print_info('Output of %s' % cmd)
print stdout + stderr
status = check_file(src, dst, verbose)
if status:
return status
return status
def active_jobs(queue):
"Return number of active jobs in a queue"
njobs = 0
for _, (proc, _status) in queue.items():
if proc.is_alive():
njobs += 1
return njobs
def worker(queue, threshold):
"""
Worker which start processes in a queue and monitor that number of
jobs does not exceed a given threshold
"""
while True:
njobs = active_jobs(queue)
if njobs < threshold:
# start process
for lfn, (proc, status) in queue.items():
if active_jobs(queue) >= threshold:
break
if not status and not proc.is_alive():
proc.start()
queue[lfn] = (proc, 'started')
time.sleep(5)
class FileMover(object):
def __init__(self):
self.instance = "Instance at %d" % self.__hash__()
self.queue = {} # download queue
threshold = os.environ.get('CMSSH_TRANSFER_LIMIT', 3)
thread.start_new_thread(worker, (self.queue, threshold))
self.methods = ['xrdcp', 'lcgcp', 'srmcp']
def transfer_cmds(self, lfn, dst, verbose=0):
"Generate transfer commands"
xrdcmd = 'xrdcp root://cms-xrd-global.cern.ch/%s %s' % (lfn, dst)
if not os.path.isdir(dst):
xrdcmd = ''
srmcp = os.environ.get('SRM_CP', '')
if srmcp.find('srm-copy') != -1:
srmargs = '-pushmode -statuswaittime 30 -3partycopy -delegation false -dcau false'
else:
srmargs = '-srm_protocol_version=2 -retry_num=1 -streams_num=1 -debug'
for pfn, pdst in pfn_dst(lfn, dst, 0): # last zero is verbose=0
lcg = os.environ.get('LCG_CP', '')
if lcg:
if verbose:
vflag = '-v'
else:
vflag = ''
lcgcmd = '%s %s -b -D srmv2 %s %s' % (lcg, vflag, pfn, pdst)
else:
lcgcmd = ''
if srmcp.find('srm-copy') != -1:
srmcmd = '%s %s %s %s' % (srmcp, pfn, pdst, srmargs)
else:
srmcmd = '%s %s %s %s' % (srmcp, srmargs, pfn, pdst)
yield xrdcmd, lcgcmd, srmcmd, pfn, pdst
def copy(self, lfn, dst, method='xrdcp', verbose=0, background=False):
"""Copy LFN to given destination"""
if method not in self.methods:
print_error('Unknown transfer method "%s"' % method)
return 'fail'
for xrdcmd, lcgcmd, srmcmd, pfn, pdst in self.transfer_cmds(lfn, dst, verbose):
if method == 'xrdcp':
cmd = xrdcmd
elif method == 'lcgcp':
cmd = lcgcmd
else:
cmd = srmcmd
if not cmd:
return 'fail'
if background:
# I need to pass list of commands for transfer method
# for that I'll use background variable
background = [xrdcmd, lcgcmd, srmcmd]
status = self.transfer(cmd, lfn, pfn, pdst, verbose, background)
if status == 'success' or status == 'accepted':
return status
return 'fail'
def transfer(self, cmd, lfn, pfn, pdst, verbose=0, background=False):
"""Copy LFN to given destination"""
err = 'Unable to identify total size of the file,'
err += ' GRID middleware fails.'
if not background:
bar = PrintProgress('Fetching LFN info')
if verbose:
print_info(cmd)
if background:
# here background is a list of commands
if not isinstance(background, list):
return 'fail'
proc = Process(target=execute, args=(background, pfn, pdst, 0))
self.queue[lfn] = (proc, None)
return 'accepted'
elif verbose:
status = execute(cmd, pfn, pdst, verbose)
if not status:
return 'fail'
else:
dst, dst_size = status
size = size_format(dst_size)
if not size or not dst_size:
print_error(err)
print "Status of transfer:\n", status
return 'fail'
else:
print "\nDone, file located at %s (%s)" \
% (dst, size_format(dst_size))
return 'success'
else:
ifile = pdst
pfn_size = get_size(pfn)
if pfn_size and pfn_size != 'null':
tot_size = float(pfn_size)
bar.print_msg('LFN size=%s' % size_format(tot_size))
bar.init('Download in progress:')
proc = Process(target=execute, args=(cmd, pfn, pdst, verbose))
proc.start()
while True:
if proc.is_alive():
size = get_size(ifile)
if not size or size == 'null':
bar.refresh('')
pass
else:
progress = float(size)*100/tot_size
bar.refresh(progress)
if progress == 100:
break
else:
break
time.sleep(0.5)
bar.clear()
status = check_file(pfn, pdst, verbose)
if status:
return 'success'
else:
print_error(err)
return 'fail'
return 'fail'
def list_lfn(self, lfn, verbose=0):
"""List LFN"""
pat_lfn = re.compile('^/.*\.root$')
if pat_lfn.match(lfn):
pfnlist, selist = get_pfns(arg, verbose)
for pfn in pfnlist:
print '%s %s' % (lfn, get_size(pfn, verbose))
def list_se(self, arg, verbose=0):
"""list content of given directory on SE"""
try:
node, ldir = arg.split(':')
except:
msg = 'Given argument "%s" does not represent SE:dir' % arg
raise Exception(msg)
srmls = os.environ.get('SRM_LS', '')
if not srmls:
print_error('Unable to find srm ls tool')
sys.exit(1)
dst = [r for r in resolve_user_srm_path(node, ldir)][0]
if os.environ.get('LCG_LS', ''):
cmd = "%s -l -v -b -D srmv2 %s" % (os.environ['LCG_LS'], dst)
else:
if srmls.find('srm-ls') != -1:
cmd = "%s %s -fulldetailed" % (srmls, dst)
else:
cmd = "%s -2 -l %s" % (srmls, dst)
if verbose:
print cmd
stdout, stderr = execmd(cmd)
if stderr:
print_error(stderr)
output = []
row = {}
if os.environ.get('LCG_LS', ''):
for line in stdout.split('\n'):
if line.find('SE type') != -1:
continue
output.append(line)
return '\n'.join(output)
elif srmls.find('srmls') != -1:
for line in srmls_printer(stdout, dst.split('=')[-1]):
output.append(line)
return '\n'.join(output)
else:
for line in srm_ls_printer(stdout, dst.split('=')[-1]):
output.append(line)
return '\n'.join(output)
def rm_lfn(self, arg, verbose=0):
"""Remove user lfn from a node"""
try:
node, lfn = arg.split(':')
except:
msg = 'Given argument "%s" does not represent SE:LFN' % arg
raise Exception(msg)
cmd = os.environ.get('SRM_RM', '')
dst = [r for r in resolve_user_srm_path(node)][0]
dst, path = dst.split('=')
if dst[-1] != '=':
dst += '='
for item in lfn.split('/'):
if not item or item in path:
continue
path += '/%s' % item
cmd = "%s %s" % (cmd, dst+path)
if verbose:
print cmd
try:
stdout, stderr = execmd(cmd)
if verbose:
print_info(stdout + stderr)
except:
return 'fail'
return 'success'
def rmdir(self, path, verbose=0):
"""rmdir command"""
spath = path.split(':')
if len(spath) == 1:
node = spath[0]
ldir = '/store/user'
else:
node = spath[0]
ldir = spath[1]
dst = [r for r in resolve_user_srm_path(node, ldir)][0]
cmd = '%s %s' % (os.environ.get('SRM_RMDIR', ''), dst)
if verbose:
print_info(cmd)
try:
stdout, stderr = execmd(cmd)
if verbose:
print_info(stdout + stderr)
except:
return 'fail'
return 'success'
def mkdir(self, path, verbose=0):
"""mkdir command"""
spath = path.split(':')
if len(spath) == 1:
node = spath[0]
ldir = '/store/user'
else:
node = spath[0]
ldir = spath[1]
dst = [r for r in resolve_user_srm_path(node, ldir)][0]
cmd = '%s %s' % (os.environ.get('SRM_MKDIR', ''), dst)
if verbose:
print_info(cmd)
try:
stdout, stderr = execmd(cmd)
if verbose:
print_info(stdout + stderr)
except:
return 'fail'
return 'success'
def lfn_exists(lfn, dst):
"Check if given LFN exists at local destination"
if dst[0] == '/' or dst[0] == '.':
fname = lfn.split('/')[-1]
if os.path.isdir(dst):
if os.path.exists(os.path.join(dst, fname)):
return True
if os.path.exists(dst):
return True
return False
FM_SINGLETON = FileMover()
def copy_lfn(lfn, dst, verbose=0, background=False, overwrite=False):
"""Copy lfn to destination"""
if overwrite:
if os.path.isfile(dst):
os.remove(dst)
if lfn_exists(lfn, dst):
if os.path.isdir(dst):
fname = lfn.split('/')[-1]
if os.path.exists(os.path.join(dst, fname)):
os.remove(os.path.join(dst, fname))
else:
if lfn_exists(lfn, dst):
if os.path.isdir(dst):
fname = os.path.join(dst, lfn.split('/')[-1])
if not os.path.exists(fname):
fname = None
elif os.path.isfile(dst) and os.path.exists(dst):
fname = dst
else:
fname = None
print_warning('Destination %s is not local disk')
if fname:
print_warning('File %s already exists' % fname)
return 'fail'
method = os.environ.get('CMSSH_TRANSFER_METHOD', 'xrdcp')
status = FM_SINGLETON.copy(lfn, dst, method, verbose, background)
if status == 'fail':
print_warning('xrdcp fails to copy file, fallback to GRID middleware mechanism')
if os.environ.get('LCG_CP', ''):
status = FM_SINGLETON.copy(lfn, dst, 'lcgcp', verbose, background)
else:
status = FM_SINGLETON.copy(lfn, dst, 'srmcp', verbose, background)
return status
def dqueue(arg=None):
"""Return download queue"""
download_queue = FM_SINGLETON.queue
alive = []
waiting = []
ended = []
for lfn, (proc, status) in download_queue.items():
if not status:
waiting.append(lfn)
elif proc.is_alive():
alive.append(lfn)
else:
ended.append((lfn, proc.exitcode))
del download_queue[lfn]
print "In progress: %s jobs" % len(alive)
if arg and arg == 'list':
for lfn in alive:
print lfn
if len(alive): print
print "Waiting : %s jobs" % len(waiting)
if arg and arg == 'list':
for lfn in waiting:
print lfn
if len(waiting): print
print "Finished : %s jobs" % len(ended)
if arg and arg == 'list':
for lfn, code in ended:
print "%s, exit code %s" % (lfn, code)
def list_lfn(lfn, verbose=0):
"""List lfn info"""
return FM_SINGLETON.list_lfn(lfn, verbose)
def list_se(arg, verbose=0):
"""List SE content"""
return FM_SINGLETON.list_se(arg, verbose)
def rm_lfn(lfn, verbose=0):
"""Remove lfn from destination"""
return FM_SINGLETON.rm_lfn(lfn, verbose)
def mkdir(dst, verbose=0):
"""mkdir command"""
return FM_SINGLETON.mkdir(dst, verbose)
def rmdir(dst, verbose=0):
"""rmdir command"""
return FM_SINGLETON.rmdir(dst, verbose)
| [
"[email protected]"
]
| |
df75da1c4cd20551725bfd015ced8926d141dbca | 62e4030268aa2835a4806864cb70055675724471 | /docs/conf.py | 1016182d33de160fe22db4afafdff318e041a7bc | []
| no_license | aisis/FoxDotCode | 291507fe16f5a56b8fed312827712db213d78e83 | 186175f76873771e13b4aa1fa714201ab98c4efe | refs/heads/master | 2021-01-15T13:35:20.524593 | 2016-03-13T13:16:44 | 2016-03-13T13:16:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,385 | py | # -*- coding: utf-8 -*-
#
# FoxDot documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 21 22:04:59 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'FoxDot'
copyright = u'2016, Ryan Kirkbride'
author = u'Ryan Kirkbride'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0'
# The full version, including alpha/beta/rc tags.
release = u'1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'alabaster'
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'FoxDotdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'FoxDot.tex', u'FoxDot Documentation',
u'Ryan Kirkbride', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'foxdot', u'FoxDot Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'FoxDot', u'FoxDot Documentation',
author, 'FoxDot', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Path to Python files
sys.path.insert(0,"D:\\Google Drive\\GitHub\\FoxDotCode\\")
| [
"[email protected]"
]
| |
47ede935441605d7d56f33de91b7e10d1f544291 | 930309163b930559929323647b8d82238724f392 | /sumitb2019_c.py | 8ebf6c2adc23f64ec6e3e5122b0e1896defd65e2 | []
| no_license | GINK03/atcoder-solvers | 874251dffc9f23b187faa77c439b445e53f8dfe1 | b1e7ac6e9d67938de9a85df4a2f9780fb1fbcee7 | refs/heads/master | 2021-11-07T14:16:52.138894 | 2021-09-12T13:32:29 | 2021-09-12T13:32:29 | 11,724,396 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 210 | py | import itertools
X = int(input())
ps = [100, 101, 102, 103, 104, 105]
dp=[0]*(X+1)
dp[0] = 1
for p in ps:
for i in range(len(dp)):
if i >= p:
dp[i] = max(dp[i], dp[i-p])
print(dp[X])
| [
"[email protected]"
]
| |
56583f3316a24edddd70b4a0f9c935cbd4ceb946 | 3b79a802f8dd9f26bee0bfde4630ac0cab932803 | /srcSegcls/getEventSegDF.py | b004b92f2a243f693794a4efdb8cca0d07350ef9 | []
| no_license | qolina/Twevent | 87fc4706564088361e9db6ddc44efc10647e67fe | 4b90b0604493b20dee90448c17e0a8e0d557165e | refs/heads/master | 2021-06-24T19:06:02.022882 | 2017-08-15T05:20:09 | 2017-08-15T05:20:09 | 100,341,172 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,338 | py | #! /usr/bin/env python
#coding=utf-8
import time
import re
import os
import math
import cPickle
############################
## load tweetID-usrID
def loadUsrId(filepath):
usrFile = file(filepath,"r")
tweIdToUsrIdHash = cPickle.load(usrFile)
usrFile.close()
return tweIdToUsrIdHash
############################
## load event segments from file
def loadEvtseg(filePath):
unitHash = {}#segment:segmentID(count from 0)
inFile = file(filePath)
unitID = 0
while True:
lineStr = inFile.readline()
lineStr = re.sub(r'\n', ' ', lineStr)
lineStr = lineStr.strip()
if len(lineStr) <= 0:
break
contentArr = lineStr.split("\t")
unit = contentArr[2]
unitHash[unit] = unitID
unitID += 1
inFile.close()
print "### " + str(len(unitHash)) + " event " + UNIT + "s are loaded from " + inFile.name
return unitHash
############################
## getEventSegment's df
def getEventSegmentDF(dataFilePath, toolDirPath):
fileList = os.listdir(dataFilePath)
for item in sorted(fileList):
if item.find("segged") != 0:
continue
print "### Processing " + item
seggedFile = file(dataFilePath + item)
tStr = item[len(item)-2:len(item)]
print "Time window: " + tStr
eventSegFilePath = dataFilePath + "event" + UNIT + tStr
unitHash = loadEvtseg(eventSegFilePath)
eventSegDFFile = file(dataFilePath + "event" + UNIT + "DF" + tStr, "w")
unitDFHash = {} # unit:dfhash
N_t = 0
Usr_t = 0
usrHash = {}
unitUsrHash = {}
tweToUsrFilePath = toolDirPath + "tweIdToUsrId" + tStr
tweIdToUsrIdHash = loadUsrId(tweToUsrFilePath)
while True:
lineStr = seggedFile.readline()
lineStr = re.sub(r'\n', " ", lineStr)
lineStr = lineStr.strip()
if len(lineStr) <= 0:
break
contentArr = lineStr.split("\t")
tweetIDstr = contentArr[0]
tweetText = contentArr[2]
usrIDstr = tweIdToUsrIdHash[tweetIDstr]
if len(tweetText)*len(tweetIDstr) == 0:
print "Error: empty id or text: " + tweetIDstr + "#" + tweetText
exit
N_t += 1
if usrIDstr not in usrHash:
usrHash[usrIDstr] = 1
textArr = tweetText.split("|")
for segment in textArr:
wordArr = segment.split(" ")
containslang = False
if useSegmentFlag:
unit = segment
if unit not in unitHash:
continue
# segment df
df_t_hash = {}
if unit in unitDFHash:
df_t_hash = unitDFHash[unit]
df_t_hash[tweetIDstr] = 1
unitDFHash[unit] = df_t_hash
# segment users
usr_hash = {}
if unit in unitUsrHash:
usr_hash = unitUsrHash[unit]
usr_hash[usrIDstr] = 1
unitUsrHash[unit] = usr_hash
else:
for word in wordArr:
unit = word
if unit not in unitHash:
continue
# word df
df_t_hash = {}
if unit in unitDFHash:
df_t_hash = unitDFHash[unit]
df_t_hash[tweetIDstr] = 1
unitDFHash[unit] = df_t_hash
# word users
usr_hash = {}
if unit in unitUsrHash:
usr_hash = unitUsrHash[unit]
usr_hash[usrIDstr] = 1
unitUsrHash[unit] = usr_hash
if N_t % 100000 == 0:
print "### " + str(time.asctime()) + " " + str(N_t) + " tweets are processed!"
windowHash[tStr] = N_t
Usr_t = len(usrHash)
cPickle.dump(N_t, eventSegDFFile)
cPickle.dump(Usr_t, eventSegDFFile)
cPickle.dump(unitDFHash, eventSegDFFile)
cPickle.dump(unitUsrHash, eventSegDFFile)
for unit in unitDFHash:
print unit + "\t" + str(len(unitDFHash[unit]))
print "### " + str(time.asctime()) + " " + str(len(unitHash)) + " event " + UNIT + "s DF/UsrDF are calculated and writen to " + eventSegDFFile.name
seggedFile.close()
eventSegDFFile.close()
############################
## main Function
global useSegmentFlag, UNIT
print "###program starts at " + str(time.asctime())
#dataFilePath = r"../Data_hfmon/segged_qtwe/"
dataFilePath = r"../Data_hfmon/segged_ltwe/"
#dataFilePath = r"../Data_hfmon/segged_ltwe_hash/"
# use segment or word as unit
useSegmentFlag = True
if useSegmentFlag:
UNIT = "segment"
else:
UNIT = "word"
toolDirPath = r"../Tools/"
windowHash = {} # timeSliceIdStr:tweetNum
getEventSegmentDF(dataFilePath, toolDirPath)
print "###program ends at " + str(time.asctime())
| [
"[email protected]"
]
| |
b6c7bc0863d3be11b0c5fdaf4028d0651061b62a | 3ee0418421955d01558b1c623def251932bcfc01 | /python-examples/marble_sort/write_json.py | b3388c9cc682286c4a2476f1d08641cbb8ddb79c | [
"MIT"
]
| permissive | pep-dortmund/mindstorms | 89f426930516155bb75f52b9fdd24a0b64fc0951 | 9e6be52545e21ab8ba3bca7e1b0e64ed2320366d | refs/heads/master | 2021-01-01T19:19:26.508803 | 2017-04-29T11:39:35 | 2017-04-29T11:39:35 | 38,932,641 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 658 | py | from argparse import ArgumentParser
import zmq
import json
parser = ArgumentParser()
parser.add_argument('outputfile')
parser.add_argument('-p', '--port', type=int, default=5000)
context = zmq.Context()
socket = context.socket(zmq.REP)
def main():
args = parser.parse_args()
socket.bind('tcp://0.0.0.0:{}'.format(args.port))
events = 0
with open(args.outputfile, 'a') as f:
while True:
data = socket.recv_pyobj()
socket.send_string('ok')
events += 1
print('Events:', events)
f.write(json.dumps(data))
f.write('\n')
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
6afad1eb9a9749a808aa04ff852f4ed7cf4fb72b | 889d13d15084f12e84731f48f50c72169f4ca45f | /public/class03demos/class03p10.py | d49c82eb8a80a9c4ac35087d43a3a802aada5e9c | []
| no_license | puneet-khatod/ml4us | 1bb4a661f3d59d8d0b7ff9e959b2f51324c7a9c9 | 917cdac85086bfc82f03e3db3ba8e7b15f9c407b | refs/heads/master | 2021-05-06T15:59:13.646649 | 2017-12-09T08:03:30 | 2017-12-09T08:03:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | """
class03p10.py
This script should use Pandas to plot prices of GSPC for 2016.
"""
import pandas as pd
import matplotlib.pyplot as plt
csvfile = 'http://spy611.herokuapp.com/csv/allpredictions.csv'
# Goog: In pandas how to sort a dataframe?
cp_df = pd.read_csv(csvfile).sort_values(['cdate'])
# Goog: In pandas how to filter?
cp2016_sr = (cp_df.cdate > '2016') & (cp_df.cdate < '2017')
cp2016_df = cp_df[['cdate','cp']][cp2016_sr]
# I should plot
cpdate2016_df = cp2016_df.set_index(['cdate'])
# Goog: In Pandas what is an index?
# Goog: In Pandas what does set_index do?
cpdate2016_df.plot.line(title="GSPC 2016")
plt.show() # This line might be slow
'bye'
| [
"[email protected]"
]
| |
cd4907ec3488eeaa4af0b6adb78c6fe463d8811d | 4142b8c513d87361da196631f7edd82f11465abb | /python/round135/219A.py | 84c2546d1739cabe735229c97479d28929b9d4e4 | []
| no_license | npkhanhh/codeforces | b52b66780426682ea1a3d72c66aedbe6dc71d7fe | 107acd623b0e99ef0a635dfce3e87041347e36df | refs/heads/master | 2022-02-08T17:01:01.731524 | 2022-02-07T10:29:52 | 2022-02-07T10:29:52 | 228,027,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | py | from collections import Counter
k = int(input())
s = input()
d = Counter(s)
res = ''
for t in d:
val = d[t]
if val % k == 0:
res += t * (val // k)
else:
res = '-1'
break
if res != '-1':
res *= k
print(res)
| [
"[email protected]"
]
| |
9d7639d189d421797740d682aac51312abee9e92 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/225/users/4011/codes/1745_1531.py | f673af6f93026f7831e6b2d8cc72542a9d884f67 | []
| no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 244 | py | from math import*
x = eval(input("radiano: "))
k = int(input("Quantidade de termos da serie: "))
n = 0
soma =
while(n < k):
n = n + 1
sinal = (x**(2 + 2*n)/factorial(2*n))
sinal = - sinal
soma = sinal + sinal
print(round(serie, 10)) | [
"[email protected]"
]
| |
f5dd2bb68d941f22a8ece354d5ebe4a7ff628fca | 736250d9d14552c5fa0aca25b25d9c8a28fcd1a0 | /mtmpro/mtmapp/migrations/0001_initial.py | 42368fcde0eff7c119ef4b9236f3139dcdb96da7 | []
| no_license | maheswatapradhan/feedback | 57f052a2082902cb8a72b474e0b863b7a00d1c9c | 31c7dcb113a38e29b3a56481fcb9ae2fce7d61a2 | refs/heads/master | 2020-09-15T23:42:32.041306 | 2019-11-23T12:54:25 | 2019-11-23T12:54:25 | 223,585,900 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,254 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2019-09-16 11:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cno', models.IntegerField()),
('cname', models.CharField(max_length=100)),
('fee', models.IntegerField()),
],
),
migrations.CreateModel(
name='Student',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sno', models.IntegerField()),
('sname', models.CharField(max_length=100)),
('location', models.CharField(max_length=100)),
('marks', models.IntegerField()),
],
),
migrations.AddField(
model_name='course',
name='student',
field=models.ManyToManyField(to='mtmapp.Student'),
),
]
| [
"[email protected]"
]
| |
e92bb7009b48dbf53be81f216d049bab6787cdce | 5d61565651b7ba5fa8fade3313a5e82fca8b6686 | /login/migrations/0003_auto_20190709_2213.py | 58c72a12c002fd6586fd9fbdb94b2ed1aaacc6c2 | []
| no_license | lonelyxmas/ISMS | d597b00072bfa77907875f575b866fbb1fb53295 | 08c5e2f3518fc639cf1a1f2869f4b2f3ae58e306 | refs/heads/master | 2023-08-14T12:02:59.001215 | 2021-03-22T03:34:58 | 2021-03-22T03:34:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 679 | py | # Generated by Django 2.1.4 on 2019-07-09 14:13
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('login', '0002_auto_20190704_0826'),
]
operations = [
migrations.AlterField(
model_name='user',
name='FID',
field=models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='user',
name='FType',
field=models.IntegerField(choices=[(0, '企业账户'), (1, '合作伙伴'), (2, '管理员')], default=0, verbose_name='用户类型'),
),
]
| [
"[email protected]"
]
| |
db9503f8d4917677b10f97a48c4f912d05a9290a | acc244c97a943d8e2074339afa1bff1274ae4cfc | /CGATPipelines/PipelineMedip.py | 3f12a921f960aaedb163d725a83b325930f8e7fb | []
| no_license | eromasko/cgat | 00114f4c95b439ba6595ddf2092d1a3307347401 | d82d197f3913b8d65b656c0b205ca48854fdb2a6 | refs/heads/master | 2021-01-17T09:37:17.168278 | 2015-02-20T09:03:31 | 2015-02-20T09:03:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,824 | py | '''
PipelineMedip.py - tasks associated with MedipSeq analysis
==========================================================
'''
import re
import os
import collections
import sqlite3
import CGAT.Experiment as E
import CGAT.Pipeline as P
import CGAT.Database as Database
import CGAT.IOTools as IOTools
from rpy2.robjects import r as R
import rpy2.robjects as ro
PARAMS = {}
def buildDMRStats(tables, method, outfile):
'''build dmr summary statistics.
Creates some diagnostic plots in
<exportdir>/<method> directory.
Tables should be labeled <tileset>_<design>_<method>.
'''
dbhandle = sqlite3.connect(PARAMS["database"])
def togeneset(tablename):
return re.match("([^_]+)_", tablename).groups()[0]
keys_status = "OK", "NOTEST", "FAIL", "NOCALL"
outf = IOTools.openFile(outfile, "w")
outf.write("\t".join(("tileset", "design", "track1", "track2", "tested",
"\t".join(["status_%s" % x for x in keys_status]),
"significant",
"up", "down",
"twofold",
"twofold_up", "twofold_down",
)) + "\n")
all_tables = set(Database.getTables(dbhandle))
outdir = os.path.join(PARAMS["exportdir"], "diff_methylation")
for tablename in tables:
prefix = P.snip(tablename, "_%s" % method)
tileset, design = prefix.split("_")
def toDict(vals, l=2):
return collections.defaultdict(int, [(tuple(x[:l]), x[l]) for x in vals])
E.info("collecting data from %s" % tablename)
tested = toDict(Database.executewait(dbhandle,
"""SELECT treatment_name, control_name, COUNT(*) FROM %(tablename)s
GROUP BY treatment_name,control_name""" % locals() ).fetchall() )
status = toDict(Database.executewait(dbhandle,
"""SELECT treatment_name, control_name, status, COUNT(*) FROM %(tablename)s
GROUP BY treatment_name,control_name,status""" % locals() ).fetchall(), 3 )
signif = toDict(Database.executewait(dbhandle,
"""SELECT treatment_name, control_name, COUNT(*) FROM %(tablename)s
WHERE significant
GROUP BY treatment_name,control_name""" % locals() ).fetchall() )
fold2 = toDict(Database.executewait(dbhandle,
"""SELECT treatment_name, control_name, COUNT(*) FROM %(tablename)s
WHERE (l2fold >= 1 or l2fold <= -1) AND significant
GROUP BY treatment_name,control_name,significant""" % locals() ).fetchall() )
up = toDict(Database.executewait(dbhandle,
"""SELECT treatment_name, control_name, COUNT(*) FROM %(tablename)s
WHERE l2fold > 0 AND significant
GROUP BY treatment_name,control_name,significant""" % locals() ).fetchall() )
down = toDict(Database.executewait(dbhandle,
"""SELECT treatment_name, control_name, COUNT(*) FROM %(tablename)s
WHERE l2fold < 0 AND significant
GROUP BY treatment_name,control_name,significant""" % locals() ).fetchall() )
fold2up = toDict(Database.executewait(dbhandle,
"""SELECT treatment_name, control_name, COUNT(*) FROM %(tablename)s
WHERE l2fold > 1 AND significant
GROUP BY treatment_name,control_name,significant""" % locals() ).fetchall() )
fold2down = toDict(Database.executewait(dbhandle,
"""SELECT treatment_name, control_name, COUNT(*) FROM %(tablename)s
WHERE l2fold < -1 AND significant
GROUP BY treatment_name,control_name,significant""" % locals() ).fetchall() )
groups = tested.keys()
for treatment_name, control_name in groups:
k = (treatment_name, control_name)
outf.write("\t".join(map(str, (
tileset,
design,
treatment_name,
control_name,
tested[k],
"\t".join([str(status[(treatment_name, control_name, x)])
for x in keys_status]),
signif[(k)],
up[k], down[k],
fold2[k],
fold2up[k], fold2down[k]))) + "\n")
###########################################
###########################################
###########################################
# plot length versus P-Value
data = Database.executewait(dbhandle,
'''SELECT end - start, pvalue
FROM %(tablename)s
WHERE significant''' % locals() ).fetchall()
# require at least 10 datapoints - otherwise smooth scatter fails
if len(data) > 10:
data = zip(*data)
pngfile = "%(outdir)s/%(tileset)s_%(design)s_%(method)s_pvalue_vs_length.png" % locals()
R.png(pngfile)
R.smoothScatter(R.log10(ro.FloatVector(data[0])),
R.log10(ro.FloatVector(data[1])),
xlab='log10( length )',
ylab='log10( pvalue )',
log="x", pch=20, cex=.1)
R['dev.off']()
outf.close()
| [
"[email protected]"
]
| |
7a821db6e73317f1eda8b4668d934a936b9bc173 | efb3d0c2f9fcc5be631323e31f4b8dfcdd0ab676 | /compiler/tests/14_replica_column_test.py | c8d50a539879db74ee9e9e7d09880960e2cc6270 | [
"BSD-3-Clause"
]
| permissive | kanokkorn/OpenRAM | 5f30beb35e3c161fbf0d233b59fe7d7805d3c348 | 3a9693e37fd3afbd52001839966b0f2811fb4ccd | refs/heads/master | 2022-06-03T12:53:47.750245 | 2022-05-27T15:53:05 | 2022-05-27T15:53:05 | 189,780,330 | 0 | 0 | BSD-3-Clause | 2021-04-07T06:49:08 | 2019-06-01T21:47:50 | Python | UTF-8 | Python | false | false | 1,291 | py | #!/usr/bin/env python3
# See LICENSE for licensing information.
#
# Copyright (c) 2016-2021 Regents of the University of California
# All rights reserved.
#
import unittest
from testutils import *
import sys, os
sys.path.append(os.getenv("OPENRAM_HOME"))
import globals
from globals import OPTS
from sram_factory import factory
import debug
class replica_column_test(openram_test):
def runTest(self):
config_file = "{}/tests/configs/config".format(os.getenv("OPENRAM_HOME"))
globals.init_openram(config_file)
if OPTS.tech_name == "sky130":
num_spare_rows = 1
num_spare_cols = 1
else:
num_spare_rows = 0
num_spare_cols = 0
debug.info(2, "Testing replica column for single port")
a = factory.create(module_type="replica_column",
rows=4 + num_spare_rows,
rbl=[1, 0],
replica_bit=1,
column_offset=num_spare_cols)
self.local_check(a)
globals.end_openram()
# run the test from the command line
if __name__ == "__main__":
(OPTS, args) = globals.parse_args()
del sys.argv[1:]
header(__file__, OPTS.tech_name)
unittest.main(testRunner=debugTestRunner())
| [
"[email protected]"
]
| |
7b205e91d3d2e6bea20b6b48b78dc7bf2b245db8 | c908dacdc0006e247aa529dddb98bc1d67fbf7c8 | /user.py | c2f9669f15bbddd02c3b88046a27e25547ba194d | []
| no_license | TomeCirun/flask_blog | 40e3bd041fd7ba376c181073c92e19f296aca928 | de34ac14e2e3e2044e3f327e288eefadf34b7faf | refs/heads/main | 2023-03-05T13:51:37.335673 | 2021-02-17T12:04:00 | 2021-02-17T12:04:00 | 339,709,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py |
class User():
def __init__(self,id,username,password):
self.id = id
self.username = username
self.password = password
def __str__(self):
return f'User Id: {self.id}' | [
"[email protected]"
]
| |
7332bb72184308f1c755b9859e825e727dc18a52 | 2205363ea412aae36aa2c5f8b7d608cd8a158a03 | /Personal_Blog/Pb/Pb/settings.py | d3f8de8c66bb1455f934b84f6bb3190cd42b086b | []
| no_license | Akanksha2403/HacktoberFest2020 | 986ef7ba5595679085e5159d35c5a30d9e91ebc5 | 789762e3a4a3ad23fd2c1ca3b6cc3bc8f39eed82 | refs/heads/master | 2023-08-28T04:25:07.466359 | 2021-10-20T10:16:46 | 2021-10-20T10:16:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,674 | py | """
Django settings for Pb project.
Generated by 'django-admin startproject' using Django 3.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-0r=r9##5pcrhvdnxxoblg4uj7#@^n$z3t%+a7&t@1_4ebckoxo'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'Blog.apps.BlogConfig',
'chat.apps.ChatConfig',
'resume',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Pb.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR/'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Pb.wsgi.application'
ASGI_APPLICATION = 'chatty.asgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
STATIC_ROOT = os.path.join(BASE_DIR, "static")
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"[email protected]"
]
| |
38ffeefe71c4acb79a5a838efeb26765465afa7f | 159d4ae61f4ca91d94e29e769697ff46d11ae4a4 | /venv/bin/iptest | 62ac6036fc5f261d69ea933bb91ed9bee7ded5ca | [
"MIT"
]
| permissive | davidycliao/bisCrawler | 729db002afe10ae405306b9eed45b782e68eace8 | f42281f35b866b52e5860b6a062790ae8147a4a4 | refs/heads/main | 2023-05-24T00:41:50.224279 | 2023-01-22T23:17:51 | 2023-01-22T23:17:51 | 411,470,732 | 8 | 0 | MIT | 2023-02-09T16:28:24 | 2021-09-28T23:48:13 | Python | UTF-8 | Python | false | false | 269 | #!/Users/yenchiehliao/Dropbox/bisCrawler/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from IPython.testing.iptestcontroller import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
412d94ad7ce1d9d7b92b6406d8aa5350f3f77fe9 | 2e79b8f2e4cc5ea10789de787f787fdc56137993 | /leetcode/438.找到字符串中所有字母异味词.py | e1f674a878c118e39a1c3fa3bfafdb8b51fc9564 | []
| no_license | wangye707/Test | d486ccb0947f6a83662a73fb56554260d1445c30 | 0d5fb8ea7da79d7d168d99f7158c8aa5757a1d35 | refs/heads/master | 2020-06-04T05:48:46.132054 | 2020-04-28T14:53:30 | 2020-04-28T14:53:30 | 191,894,269 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,025 | py | #!D:/workplace/python
# -*- coding: utf-8 -*-
# @File : 438.找到字符串中所有字母异味词.py
# @Author: WangYe
# @Date : 2019/9/25
# @Software: PyCharm
def findAnagrams(s, p):
"""
:type s: str
:type p: str
:rtype: List[int]
"""
out = []
need = {}
for i in p:
if i in need:
need[i] += 1
else:
need[i] = 1
l = len(p)
win = {}
for i in range(len(s)-l+1):
if i ==0:
win = {}
for k in s[i:i + l]:
if k in win:
win[k] += 1
else:
win[k] = 1
else:
# print(s[i-1],win)
if win[s[i-1]] >1:
win[s[i-1]] -=1
else:
del win[s[i-1]]
if s[i+l-1] in win:
win[s[i+l-1]] +=1
else:
win[s[i+l-1]] = 1
if win==need:
out.append(i)
return out
s = "cbaebabacd"
p = "abc"
print(findAnagrams(s,p)) | [
"[email protected]"
]
| |
e170f688e59520f390ab02a6b3e1b52b161b747b | 66bfac516682bc8c3c804a5b7414cfc8b3440186 | /leads/apps/leads/serializers.py | 5e9d220555f6c26071a166a7b386b109ee1a7eb8 | []
| no_license | Izaiasjun1Dev/leads | 190d1bf01f1809c34cb53582e0f1020c3d704b58 | 22a209b43fd0eb60218deba731c9bf189ea9568a | refs/heads/master | 2023-04-05T15:15:39.834194 | 2021-03-30T11:35:36 | 2021-03-30T11:35:36 | 352,966,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | from rest_framework import serializers
from .models import Lead
# Serializador lead
class LeadSerializer(serializers.ModelSerializer):
class Meta:
model = Lead
fields = '__all__'
| [
"[email protected]"
]
| |
71f27e6f44fc1dfef7571b27982acccf33236218 | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /homeassistant/components/ring/siren.py | 7f1b147471d271411715ee41520529c0afef4805 | [
"Apache-2.0"
]
| permissive | home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | 2023-08-31T15:41:06.299469 | 2023-08-31T14:50:53 | 2023-08-31T14:50:53 | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | 2023-09-14T21:50:15 | 2013-09-17T07:29:48 | Python | UTF-8 | Python | false | false | 1,678 | py | """Component providing HA Siren support for Ring Chimes."""
import logging
from typing import Any
from ring_doorbell.const import CHIME_TEST_SOUND_KINDS, KIND_DING
from homeassistant.components.siren import ATTR_TONE, SirenEntity, SirenEntityFeature
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import DOMAIN
from .entity import RingEntityMixin
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Create the sirens for the Ring devices."""
devices = hass.data[DOMAIN][config_entry.entry_id]["devices"]
sirens = []
for device in devices["chimes"]:
sirens.append(RingChimeSiren(config_entry, device))
async_add_entities(sirens)
class RingChimeSiren(RingEntityMixin, SirenEntity):
"""Creates a siren to play the test chimes of a Chime device."""
_attr_available_tones = CHIME_TEST_SOUND_KINDS
_attr_supported_features = SirenEntityFeature.TURN_ON | SirenEntityFeature.TONES
_attr_translation_key = "siren"
def __init__(self, config_entry: ConfigEntry, device) -> None:
"""Initialize a Ring Chime siren."""
super().__init__(config_entry.entry_id, device)
# Entity class attributes
self._attr_unique_id = f"{self._device.id}-siren"
def turn_on(self, **kwargs: Any) -> None:
"""Play the test sound on a Ring Chime device."""
tone = kwargs.get(ATTR_TONE) or KIND_DING
self._device.test_sound(kind=tone)
| [
"[email protected]"
]
| |
f9d21162737f40168c323f56d4a303bf6211ce0c | c6d89d2507efe02ead1802649a769e021795b2b6 | /categories/context_processors.py | cb9c2687489bdc34c2746a89d05b11c34a37b16c | []
| no_license | ikonitas/pleasuresallmine | b671b05d2f13428973cc19d39e58d0b56d1914f0 | 875e6067a202be801a9b1fddb27c4d313fd133f4 | refs/heads/master | 2021-05-29T19:50:39.812885 | 2014-11-27T21:22:22 | 2014-11-27T21:22:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 375 | py | # coding=utf-8
from django.core.cache import cache
from models import Category
def list_categories(request):
categories = cache.get('list_categories')
if not categories:
categories = Category.objects.filter(
is_active=True).order_by('sort_order')
cache.set('list_categories', categories, 60)
return {'list_categories': categories}
| [
"[email protected]"
]
| |
29511c1e8bcf903725d957b2e420756cc1908ad8 | 29d7ba390d4b6046666f783e682ea248108ea900 | /cbagent/__main__.py | 09669d3c032cc65f432c457e3e7024f81dfcc2cd | [
"Apache-2.0"
]
| permissive | pavel-paulau/cbagent | 5f289fbaf08b997b55d270944d67f716ec1a127a | f905974d663e0320e55a00076d292cbf489e53d9 | refs/heads/master | 2020-04-26T09:55:43.761203 | 2014-07-31T12:41:18 | 2014-07-31T12:41:18 | 13,084,444 | 2 | 1 | null | 2014-06-19T02:15:22 | 2013-09-25T04:52:21 | Python | UTF-8 | Python | false | false | 2,380 | py | import sys
from optparse import OptionParser
from cbagent.collectors.active_tasks import ActiveTasks
from cbagent.collectors.iostat import IO
from cbagent.collectors.latency import Latency
from cbagent.collectors.observe import ObserveLatency
from cbagent.collectors.net import Net
from cbagent.collectors.ns_server import NSServer
from cbagent.collectors.ps import PS
from cbagent.collectors.sync_gateway import SyncGateway
from cbagent.collectors.xdcr_lag import XdcrLag
from cbagent.settings import Settings
def main():
parser = OptionParser(prog="cbagent")
parser.add_option("--at", action="store_true", dest="active_tasks",
help="Active tasks")
parser.add_option("--io", action="store_true", dest="iostat",
help="iostat")
parser.add_option("--l", action="store_true", dest="latency",
help="Latency")
parser.add_option("--o", action="store_true", dest="observe",
help="Observe latency")
parser.add_option("--n", action="store_true", dest="net",
help="Net")
parser.add_option("--ns", action="store_true", dest="ns_server",
help="ns_server")
parser.add_option("--ps", action="store_true", dest="ps",
help="ps CPU, RSS and VSIZE")
parser.add_option("--sg", action="store_true", dest="sync_gateway",
help="Sync Gateway")
parser.add_option("--x", action="store_true", dest="xdcr_lag",
help="XDCR lag")
options, args = parser.parse_args()
if not args:
sys.exit("No configuration provided")
if options.active_tasks:
collector = ActiveTasks
elif options.iostat:
collector = IO
elif options.latency:
collector = Latency
elif options.observe:
collector = ObserveLatency
elif options.net:
collector = Net
elif options.ns_server:
collector = NSServer
elif options.ps:
collector = PS
elif options.sync_gateway:
collector = SyncGateway
elif options.xdcr_lag:
collector = XdcrLag
else:
sys.exit("No collector selected")
settings = Settings()
settings.read_cfg(args[0])
collector = collector(settings)
collector.update_metadata()
collector.collect()
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
1fa53956af9d567b5bb6cde0572f8a7cb11d736f | 70121257e52e0fd2f0895414fcee3c991737443a | /python_recipes/tfpreprocess_cifar.py | 33aaef3fdca4998831ffa5306a3bf25f080ae646 | []
| no_license | OlgaBelitskaya/cookbooks | 2e54208bb5e5157814deea6ff71cd7ce5b1e4972 | 216dde3e5617203371ed4c4bb7d9e8391640c588 | refs/heads/master | 2021-07-11T15:56:44.923442 | 2021-03-25T08:38:46 | 2021-03-25T08:38:46 | 99,447,645 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,672 | py | import warnings; warnings.filterwarnings('ignore')
import tensorflow as tf,numpy as np,pandas as pd
import tensorflow_datasets as tfds
from IPython.display import display,HTML
pd.set_option('precision',3)
tf.keras.backend.set_floatx('float64')
tfds.disable_progress_bar()
img_size=32
buffer_size,batch_size=10000,64
c1,c2,f1,f2,fs1,fs2=\
'#11ff66','#6611ff','Wallpoet','Orbitron',20,10
def dhtml(string,fontcolor=c1,font=f1,fontsize=fs1):
display(HTML("""<style>
@import 'https://fonts.googleapis.com/css?family="""\
+font+"""&effect=3d-float';</style>
<h1 class='font-effect-3d-float'
style='font-family:"""+font+\
"""; color:"""+fontcolor+\
"""; font-size:"""+str(fontsize)+"""px;'>
%s</h1>"""%string))
def load_cifar():
cifar=tfds.builder('cifar10')
cifar.download_and_prepare()
ds=cifar.as_dataset(shuffle_files=False,
split=['train','test'])
cifar_train,cifar_test=ds[0],ds[1]
dhtml(cifar.info.features['image'],c2,f2,fs2)
dhtml(cifar.info.features['label'],c2,f2,fs2)
cifar_train=cifar_train.map(
lambda item:(tf.cast(item['image'],tf.float32)/255.,
tf.cast(item['label'],tf.int32)))
cifar_test=cifar_test.map(
lambda item:(tf.cast(item['image'],tf.float32)/255.,
tf.cast(item['label'],tf.int32)))
tf.random.set_seed(123)
cifar_train=cifar_train.shuffle(
buffer_size=buffer_size,
reshuffle_each_iteration=False)
cifar_valid=cifar_train.take(buffer_size).batch(batch_size)
cifar_train=cifar_train.skip(buffer_size).batch(batch_size)
return cifar_train,cifar_valid,cifar_test
| [
"[email protected]"
]
| |
c623380ca8277769f08041e14cc66374a1963eb7 | 5be7afab3f57b7b5365053700386c01bad7031e6 | /quotes.toscrape.com/1.2.quote_web_scraping/spiders/quotes_spider.py | 16b47d33a8206b7bb7caf819229b34ef62e264fb | []
| no_license | enji-coder/SCRAPY-PROJECTS | c0c76e1ef8697320a0cb9b3fa9155a158574a5c1 | bd65e6f3cf83912bc082ef39aba702db6cc4465c | refs/heads/main | 2023-06-20T19:11:36.764847 | 2021-08-04T04:39:08 | 2021-08-04T04:39:08 | 386,542,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 707 | py | import scrapy
class ExampleSpider(scrapy.Spider):
name = 'quotes'
allowed_domains = ['example.com']
start_urls = ['http://quotes.toscrape.com']
def parse(self, response):
all_quotes = response.css('div.quote')
# retrive all quotes title , author and tag details
# note it retrive 1st page all data only
for quotes in all_quotes:
desc = quotes.css('span.text::text').extract()
author = quotes.css('.author::text').extract()
tag = quotes.css('div.tags a::text').extract()
yield{
'--->> desc': desc,
'author': author,
'tag': tag,
}
| [
"[email protected]"
]
| |
e6ea0a18c418751b3458be9dd1196e1a7f5514d0 | 2d13b3206b04d663eed9c5cfe7b6d273abaab33e | /2.Algorithm/pycharm/SW Academy/20200309/harvest.py | 89098f8eaff5f7281c33299f947b60d69d741907 | []
| no_license | hdp0545/TIL | 0ba5378274f0076cd2b029581b292785a77207da | 6d6e5e54373bd71606823e97b3a5fb2d63a2784e | refs/heads/master | 2023-05-24T12:37:33.690750 | 2023-05-19T06:57:49 | 2023-05-19T06:57:49 | 235,004,133 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 316 | py | for test_case in range(1, int(input())+1):
N = int(input())
matrix = [list(map(int, [n for n in input()])) for _ in range(N)]
result = 0
c = N // 2
for i in range(N):
di = (N//2) - abs(i - (N//2))
result += sum(matrix[i][c-di:c+di+1])
print('#{} {}'.format(test_case, result)) | [
"[email protected]"
]
| |
8cd2cc4ef6bde6bb958a5160732122d1e4d5c2af | b46513de1a1fe8eadbd01518fc6b8067de277aee | /vdbm/dumb.py | 242e9f060e0925f6dc82f8bc9f9bc41d641c8dc1 | []
| no_license | vrthra/taint-demo | b4b83f28727341d1723df1157e8a8ac67fc69097 | 9eb50f214dc5178b27ba7e4945441b31091037f9 | refs/heads/master | 2021-08-30T23:21:00.371936 | 2017-12-19T20:16:05 | 2017-12-19T20:16:05 | 114,804,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,187 | py | """A dumb and slow but simple dbm clone.
For database spam, spam.dir contains the index (a text file),
spam.bak *may* contain a backup of the index (also a text file),
while spam.dat contains the data (a binary file).
XXX TO DO:
- seems to contain a bug when updating...
- reclaim free space (currently, space once occupied by deleted or expanded
items is never reused)
- support concurrent access (currently, if two processes take turns making
updates, they can mess up the index)
- support efficient access to large databases (currently, the whole index
is read when the database is opened, and some updates rewrite the whole index)
- support opening for read-only (flag = 'm')
"""
import ast as _ast
import io
import os
import collections
import vdbm.taint
def sanitize(src): return src
for m in [io]: vdbm.taint.mark_sources(m)
sanitize = vdbm.taint.sanitizer(sanitize)
for m in [os]: vdbm.taint.mark_sinks(m)
eval = vdbm.taint.sink(eval)
__all__ = ["error", "open"]
_BLOCKSIZE = 512
error = OSError
class _Database(collections.MutableMapping):
# The on-disk directory and data files can remain in mutually
# inconsistent states for an arbitrarily long time (see comments
# at the end of __setitem__). This is only repaired when _commit()
# gets called. One place _commit() gets called is from __del__(),
# and if that occurs at program shutdown time, module globals may
# already have gotten rebound to None. Since it's crucial that
# _commit() finish successfully, we can't ignore shutdown races
# here, and _commit() must not reference any globals.
_os = os # for _commit()
_io = io # for _commit()
def __init__(self, filebasename, mode, flag='c'):
self._mode = mode
self._readonly = (flag == 'r')
# The directory file is a text file. Each line looks like
# "%r, (%d, %d)\n" % (key, pos, siz)
# where key is the string key, pos is the offset into the dat
# file of the associated value's first byte, and siz is the number
# of bytes in the associated value.
self._dirfile = filebasename + '.dir'
# The data file is a binary file pointed into by the directory
# file, and holds the values associated with keys. Each value
# begins at a _BLOCKSIZE-aligned byte offset, and is a raw
# binary 8-bit string value.
self._datfile = filebasename + '.dat'
self._bakfile = filebasename + '.bak'
# The index is an in-memory dict, mirroring the directory file.
self._index = None # maps keys to (pos, siz) pairs
# Handle the creation
self._create(flag)
self._update()
def _create(self, flag):
if flag == 'n':
for filename in (self._datfile, self._bakfile, self._dirfile):
try:
_os.remove(filename)
except OSError:
pass
# Mod by Jack: create data file if needed
try:
f = io.open(self._datfile, 'r', encoding="Latin-1")
except OSError:
if flag not in ('c', 'n'):
import warnings
warnings.warn("The database file is missing, the "
"semantics of the 'c' flag will be used.",
DeprecationWarning, stacklevel=4)
with io.open(self._datfile, 'w', encoding="Latin-1") as f:
self._chmod(self._datfile)
else:
f.close()
# Read directory file into the in-memory index dict.
def _update(self):
self._index = {}
try:
f = io.open(self._dirfile, 'r', encoding="Latin-1")
except OSError:
self._modified = not self._readonly
else:
self._modified = False
with f:
for line in f:
line = sanitize(line.rstrip())
key, pos_and_siz_pair = eval(line, globals(), locals())
key = key.encode('Latin-1')
self._index[key] = pos_and_siz_pair
# Write the index dict to the directory file. The original directory
# file (if any) is renamed with a .bak extension first. If a .bak
# file currently exists, it's deleted.
def _commit(self):
# CAUTION: It's vital that _commit() succeed, and _commit() can
# be called from __del__(). Therefore we must never reference a
# global in this routine.
if self._index is None or not self._modified:
return # nothing to do
try:
self._os.unlink(self._bakfile)
except OSError:
pass
try:
self._os.rename(self._dirfile, self._bakfile)
except OSError:
pass
with self._io.open(self._dirfile, 'w', encoding="Latin-1") as f:
self._chmod(self._dirfile)
for key, pos_and_siz_pair in self._index.items():
# Use Latin-1 since it has no qualms with any value in any
# position; UTF-8, though, does care sometimes.
entry = "%r, %r\n" % (key.decode('Latin-1'), pos_and_siz_pair)
f.write(entry)
sync = _commit
def _verify_open(self):
if self._index is None:
raise error('DBM object has already been closed')
def __getitem__(self, key):
if isinstance(key, str):
key = key.encode('utf-8')
self._verify_open()
pos, siz = self._index[key] # may raise KeyError
with io.open(self._datfile, 'rb') as f:
f.seek(pos)
dat = f.read(siz)
return dat
# Append val to the data file, starting at a _BLOCKSIZE-aligned
# offset. The data file is first padded with NUL bytes (if needed)
# to get to an aligned offset. Return pair
# (starting offset of val, len(val))
def _addval(self, val):
with io.open(self._datfile, 'rb+') as f:
f.seek(0, 2)
pos = int(f.tell())
npos = ((pos + _BLOCKSIZE - 1) // _BLOCKSIZE) * _BLOCKSIZE
f.write(b'\0'*(npos-pos))
pos = npos
f.write(val)
return (pos, len(val))
# Write val to the data file, starting at offset pos. The caller
# is responsible for ensuring that there's enough room starting at
# pos to hold val, without overwriting some other value. Return
# pair (pos, len(val)).
def _setval(self, pos, val):
with io.open(self._datfile, 'rb+') as f:
f.seek(pos)
f.write(val)
return (pos, len(val))
# key is a new key whose associated value starts in the data file
# at offset pos and with length siz. Add an index record to
# the in-memory index dict, and append one to the directory file.
def _addkey(self, key, pos_and_siz_pair):
self._index[key] = pos_and_siz_pair
with io.open(self._dirfile, 'a', encoding="Latin-1") as f:
self._chmod(self._dirfile)
f.write("%r, %r\n" % (key.decode("Latin-1"), pos_and_siz_pair))
def __setitem__(self, key, val):
if self._readonly:
import warnings
warnings.warn('The database is opened for reading only',
DeprecationWarning, stacklevel=2)
if isinstance(key, str):
key = key.encode('utf-8')
elif not isinstance(key, (bytes, bytearray)):
raise TypeError("keys must be bytes or strings")
if isinstance(val, str):
val = val.encode('utf-8')
elif not isinstance(val, (bytes, bytearray)):
raise TypeError("values must be bytes or strings")
self._verify_open()
self._modified = True
if key not in self._index:
self._addkey(key, self._addval(val))
else:
# See whether the new value is small enough to fit in the
# (padded) space currently occupied by the old value.
pos, siz = self._index[key]
oldblocks = (siz + _BLOCKSIZE - 1) // _BLOCKSIZE
newblocks = (len(val) + _BLOCKSIZE - 1) // _BLOCKSIZE
if newblocks <= oldblocks:
self._index[key] = self._setval(pos, val)
else:
# The new value doesn't fit in the (padded) space used
# by the old value. The blocks used by the old value are
# forever lost.
self._index[key] = self._addval(val)
# Note that _index may be out of synch with the directory
# file now: _setval() and _addval() don't update the directory
# file. This also means that the on-disk directory and data
# files are in a mutually inconsistent state, and they'll
# remain that way until _commit() is called. Note that this
# is a disaster (for the database) if the program crashes
# (so that _commit() never gets called).
def __delitem__(self, key):
if self._readonly:
import warnings
warnings.warn('The database is opened for reading only',
DeprecationWarning, stacklevel=2)
if isinstance(key, str):
key = key.encode('utf-8')
self._verify_open()
self._modified = True
# The blocks used by the associated value are lost.
del self._index[key]
# XXX It's unclear why we do a _commit() here (the code always
# XXX has, so I'm not changing it). __setitem__ doesn't try to
# XXX keep the directory file in synch. Why should we? Or
# XXX why shouldn't __setitem__?
self._commit()
def keys(self):
try:
return list(self._index)
except TypeError:
raise error('DBM object has already been closed') from None
def items(self):
self._verify_open()
return [(key, self[key]) for key in self._index.keys()]
def __contains__(self, key):
if isinstance(key, str):
key = key.encode('utf-8')
try:
return key in self._index
except TypeError:
if self._index is None:
raise error('DBM object has already been closed') from None
else:
raise
def iterkeys(self):
try:
return iter(self._index)
except TypeError:
raise error('DBM object has already been closed') from None
__iter__ = iterkeys
def __len__(self):
try:
return len(self._index)
except TypeError:
raise error('DBM object has already been closed') from None
def close(self):
try:
self._commit()
finally:
self._index = self._datfile = self._dirfile = self._bakfile = None
__del__ = close
def _chmod(self, file):
if hasattr(self._os, 'chmod'):
self._os.chmod(file, self._mode)
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def open(file, flag='c', mode=0o666):
"""Open the database file, filename, and return corresponding object.
The flag argument, used to control how the database is opened in the
other DBM implementations, supports only the semantics of 'c' and 'n'
values. Other values will default to the semantics of 'c' value:
the database will always opened for update and will be created if it
does not exist.
The optional mode argument is the UNIX mode of the file, used only when
the database has to be created. It defaults to octal code 0o666 (and
will be modified by the prevailing umask).
"""
# Modify mode depending on the umask
try:
um = os.umask(0)
os.umask(um)
except AttributeError:
pass
else:
# Turn off any bits that are set in the umask
mode = mode & (~um)
if flag not in ('r', 'w', 'c', 'n'):
import warnings
warnings.warn("Flag must be one of 'r', 'w', 'c', or 'n'",
DeprecationWarning, stacklevel=2)
return _Database(file, mode, flag=flag)
| [
"[email protected]"
]
| |
569c0fe40b397c4990eb34ce4716eead233cf51f | e0ede722874d222a789411070f76b50026bbe3d8 | /practice/solution/0040_combination_sum_ii.py | 522d0209dcadd27bc9829d15d2270d94bb200cd4 | []
| no_license | kesarb/leetcode-summary-python | cd67456cb57bdff7ee227dab3930aaf9c2a6ad00 | dc45210cb2cc50bfefd8c21c865e6ee2163a022a | refs/heads/master | 2023-05-26T06:07:25.943854 | 2021-06-06T20:02:13 | 2021-06-06T20:02:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 761 | py | class Solution(object):
def combinationSum2(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
self.res = []
self.dfs(0, sorted(candidates), target, [])
return self.res
def dfs(self, start, candidates, target, value_list):
if target < 0:
return
if not target:
self.res.append(value_list)
for i in range(start, len(candidates)):
if i > start and candidates[i] == candidates[i - 1]:
continue
self.dfs(i + 1, candidates, target - candidates[i], value_list + [candidates[i]]) | [
"[email protected]"
]
| |
b7f7294d6eed3c6580709c80a3bbdedfde794b91 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03767/s342121711.py | e279603e0ad4d33f1c70bcc3c868122d20a4b586 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | n = int(input())
a = list(map(int, input().split()))
a.sort(reverse = True)
list = []
for i, j in enumerate(a):
if i % 2 == 1:
list.append(j)
answer = sum(list[0 : n])
print(answer) | [
"[email protected]"
]
| |
798efca679f2d54fa6c1a967b92fe1d157e03f55 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnfaulti.py | 0db9ec8c46bcffabd20b375fd10825d23728bee3 | []
| no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 624 | py | ii = [('CookGHP3.py', 1), ('SadlMLP.py', 2), ('MartHSI2.py', 1), ('LeakWTI2.py', 1), ('KembFJ1.py', 1), ('WilkJMC3.py', 1), ('WilbRLW5.py', 1), ('GellWPT2.py', 1), ('SeniNSP.py', 1), ('LyttELD.py', 1), ('AdamHMM.py', 1), ('ClarGE.py', 3), ('DaltJMA.py', 1), ('NewmJLP.py', 3), ('CoopJBT.py', 1), ('LeakWTI4.py', 1), ('MereHHB3.py', 1), ('HowiWRL2.py', 1), ('BailJD3.py', 1), ('MartHRW.py', 1), ('KembFJ2.py', 1), ('BellCHM.py', 1), ('HaliTBC.py', 2), ('WilbRLW3.py', 1), ('AinsWRR2.py', 1), ('ClarGE3.py', 2), ('MartHRW2.py', 2), ('DibdTRL.py', 2), ('MartHSI.py', 1), ('LyelCPG3.py', 1), ('TaylIF.py', 3), ('WaylFEP.py', 1)] | [
"[email protected]"
]
| |
37d16ddeb7663fc42eb684c9fd238cc1286dc69c | 6c202bfadef2d80242567be70e2437e384a96b20 | /IO/IO.py | b7ef9cb9e50b01ac979792f2d15848dbfe3327fd | []
| no_license | hoylemd/Python | 257acecc1bc6c0e21d7c9bffc2d3b8861add9dab | 39e3863c87d83883d15e5db39a5fd5ce605b6ebb | refs/heads/master | 2018-12-28T20:33:16.497465 | 2012-10-15T02:44:36 | 2012-10-15T02:44:36 | 1,898,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | # read a string from the terminal
str = raw_input()
# print it back out!
print str | [
"[email protected]"
]
| |
0d497de579e262500807394359bad38278397bee | 90ea49bb872623a0fc117632df0232f26e078033 | /redis_main.py | 6e22c3d257bc2ca5b18745dc8e70d73601aefcc6 | [
"MIT"
]
| permissive | JX-Wang/Redis-servcie-T | e4612967a30c8c18ba5fa51aac91482e5f4f591a | 26005d0b15defa8628220512046aadc94765bd5b | refs/heads/master | 2020-06-17T04:39:28.779495 | 2019-07-09T12:35:38 | 2019-07-09T12:35:38 | 195,799,949 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 263 | py | # usr/bin/env python
# coding:utf-8
"""
redis Notes
============
Date@2019/7/9
Author@Wangjunxiong
"""
import redis
try:
r = redis.Redis(host="39.106.165.57", port=6379, db=0)
r.get("msg")
except Exception as e:
print "Connect Error as -> ", str(e)
| [
"[email protected]"
]
| |
77e62c1c823d2937af521648a473b6f93b4731f7 | 2834298c6a50ff7cfada61fb028b9fd3fc796e85 | /desenvolvimento/programas/magic_square.py | bc855866757b1a2961b256809c6df378cdebb02e | []
| no_license | ernestojfcosta/IPRP_LIVRO_2013_06 | 73841c45d000dee7fc898279d4b10d008c039fd0 | a7bb48745ad2fbfeb5bd4bc334cb7203d8f204a4 | refs/heads/master | 2021-01-22T05:00:57.868387 | 2013-06-07T11:00:55 | 2013-06-07T11:00:55 | 10,548,127 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,291 | py | # =============
# magic_square_
# =============
#
# *Simple operations with magic squares.*
#
# Copyright (c) 2007 `Alec Mihailovs`_ <[email protected]>
# All rights reserved. Licensed under the `MIT License`_ .
#
# .. _magic_square: http://mihailovs.com/Alec/Python/magic_square.html
#
# .. _`Alec Mihailovs`: http://mihailovs.com/Alec/
#
# .. _`MIT License`: http://opensource.org/licenses/mit-license.php
#
########################################################################
r"""
*Simple operations with magic squares.*
**Prerequisites:**
- NumPy_
**Functions:**
- `ismagic(A)` -- test whether *A* is a magic square.
- `magic(N)` -- create an *N* by *N* magic square.
- `magic_constant(A)` -- calculate the magic constant of *A*.
**Examples:**
>>> from magic_square import *
>>> print magic(3)
[[8 1 6]
[3 5 7]
[4 9 2]]
>>> magic_constant()
15
>>> ismagic(magic(4))
True
>>> magic_constant()
34
>>> magic_constant([1, 1, 1, 1])
2
>>> ismagic([[1, 2], [3, 4]])
False
**Notes:**
(1) Function `magic(N)` produces the same magic squares as Matlab
and Octave command ``magic(N)``. The speed of calculations for *N*
close to 1000 is about 100--200 times faster than in Octave.
(2) Integer arithmetic in NumPy_ is done modulo ``2**32``. That
can give a false positive in `ismagic(A)` for integer arrays with
overflowing in row, column, or diagonal sums. To avoid that and to
avoid wrong answers in `magic_constant(A)`, in such cases the
array's ``dtype`` should be changed to ``'int64'``, or if
``'int64'`` also overflows, to ``'object'``.
**Screenshots:**
That's how it looks in SAGE_:
|SAGE|
And that's how it looks in IDLE_:
|IDLE|
**Author:**
`Alec Mihailovs`_ <[email protected]>
**Last Updated:**
February 22, 2007.
.. _NumPy: http://www.scipy.org/Download
.. _SAGE: http://sage.math.washington.edu/sage/
.. _IDLE: http://www.python.org/idle/
.. |SAGE| image:: sage.png
.. |IDLE| image:: idle.png
.. _`Alec Mihailovs`: http://mihailovs.com/Alec/
"""
__version__ = "0.2"
"""Development Status :: 3 - Alpha"""
__author__ = "Alec Mihailovs <[email protected]>"
"""
`Alec Mihailovs`_ <[email protected]>
.. _`Alec Mihailovs`: http://mihailovs.com/Alec/
"""
__docformat__ = "restructuredtext"
"""http://docutils.sourceforge.net/rst.html"""
from numpy import arange, asarray, flipud, r_, tile
from math import sqrt
_constant = None # to avoid an exception in magic_square._constant
"""Last calculated magic constant."""
def ismagic(A):
r"""
Test whether the given array is a magic square.
**Input:**
*A* -- 2D array, or a sequence that can be interpreted as such.
**Output:**
``bool`` or ``NotImplementedType`` -- ``True`` if *A* is a
magic square, ``NotImplemented`` if the number of dimensions of
*A* is not 2 or 1, or the size is not a perfect square in the
1D case, and ``False`` otherwise.
**Examples:**
>>> from magic_square import *
>>> ismagic(magic(3))
True
>>> ismagic([1, 1, 1, 1])
True
>>> ismagic([[8, 1, 6], [3, 5, 7], [4, 9, 2]])
True
>>> ismagic(1) # 0 dimensions
NotImplemented
>>> ismagic('[[1]]') # a string gives 0 dimensions
NotImplemented
>>> ismagic([[[1]]]) # 3 dimensions
NotImplemented
>>> ismagic(array([[1, 2], [3, 4]]))
False
**Notes:**
Integer arithmetic in NumPy_ is done modulo ``2**32`` as in the
following example:
>>> from numpy import array
>>> array([2**16])
array([65536])
>>> _*_
array([0])
That can give a false positive in `ismagic(A)` for integer
arrays with overflowing in row, column, or diagonal sums.
To avoid that, in such cases the array's ``dtype`` should be
changed to either ``'int64'``, or ``'object'``, see
`magic_constant(A)` Notes.
.. _NumPy: http://www.scipy.org/Download
"""
global _constant
_constant = None # may be commented out if desirable
a = asarray(A)
if a.ndim == 2:
m = flipud(a).trace()
t = (r_[a.sum(axis=0), a.sum(axis=1), a.trace()] == m).all()
if t == True: # not "is" because t is a NumPy boolean
_constant = m
return True # not "return t",
else: # to make sure that
return False # the return value is of the bool type
elif a.ndim == 1:
s = sqrt(a.size)
if a.size == s*s:
return ismagic(a.reshape(s,s))
else:
return NotImplemented
else:
return NotImplemented
def magic_constant(A=None):
r"""
Magic constant of the magic square.
**Input:**
*A* -- 2D array, or a sequence that can be interpreted as such.
If not entered, the last constructed `magic(n)` or last array
*A* tested in `ismagic(A)` is used.
**Output:**
``dtype`` of the array, or Python ``long``, or ``NoneType`` --
the magic constant if the array is a magic square, or ``None``
otherwise. Python ``long`` can occur if *A* is ``None`` and the
magic constant is calculated for the last constructed
`magic(n)` with large *n*.
**Examples:**
>>> from magic_square import *
>>> magic_constant([1, 1, 1, 1])
2
>>> print magic_constant([1, 2, 3, 4])
None
>>> ismagic(magic(6))
True
>>> magic_constant()
111
>>> a = magic(5000)
>>> magic_constant()
62500002500L
**Notes:**
Integer arithmetic in NumPy_ is done modulo ``2**32``. That
makes `magic_constant(A)` to return wrong answers for integer
arrays with overflowing in row, column, or diagonal sums. For
example,
>>> magic_constant(magic(5000))
-1924506940
>>> ismagic(magic(5000))
True
>>> magic_constant()
-1924506940
Note that
>>> 62500002500L % 2**32 == -1924506940 % 2**32
True
To avoid such wrong answers, the array's ``dtype`` can be
changed to ``'int64'``, or if ``'int64'`` also overflows, to
``'object'`` (that one significantly slows down the
calculations.) In this example,
>>> from numpy import array
>>> magic_constant(array(magic(5000), dtype='int64'))
62500002500
>>> magic_constant(array(magic(5000), dtype='object')) # long
62500002500L
.. _NumPy: http://www.scipy.org/Download
"""
if A is None or ismagic(A) is True: # avoiding NotImplemented
return _constant
def magic(N):
r"""
Create an *N* by *N* magic square.
**Input:**
*N* -- an integer in some form, may be float or quotted.
**Output:**
an ``'int32'`` *N* by *N* array -- the same magic square as in
Matlab and Octave ``magic(N)`` commands. In particular, the
Siamese method is used for odd *N* (but with a different
implementation.)
**Examples:**
>>> from magic_square import *
>>> magic(4)
array([[16, 2, 3, 13],
[ 5, 11, 10, 8],
[ 9, 7, 6, 12],
[ 4, 14, 15, 1]])
>>> magic_constant()
34
>>> magic(5.0) # can be float
array([[17, 24, 1, 8, 15],
[23, 5, 7, 14, 16],
[ 4, 6, 13, 20, 22],
[10, 12, 19, 21, 3],
[11, 18, 25, 2, 9]])
>>> print magic('6') # can be quotted
[[35 1 6 26 19 24]
[ 3 32 7 21 23 25]
[31 9 2 22 27 20]
[ 8 28 33 17 10 15]
[30 5 34 12 14 16]
[ 4 36 29 13 18 11]]
>>> magic(2) # consistent with Octave
Traceback (most recent call last):
TypeError: No such magic squares exist.
>>> magic(0)
array([], shape=(0, 0), dtype=int32)
>>> magic_constant() # the empty sum is 0
0
**Notes:**
The calculations for *n* close to 1000 are about 100--200
times faster than in Octave.
"""
global _constant
n = int(N)
if n < 0 or n == 2: # consistent with Octave
raise TypeError("No such magic squares exist.")
elif n%2 == 1:
m = n>>1
b = n*n + 1
_constant = n*b>>1
return (tile(arange(1,b,n),n+2)[m:-m-1].reshape(n,n+1)[...,1:]+
tile(arange(n),n+2).reshape(n,n+2)[...,1:-1]).transpose()
elif n%4 == 0:
b = n*n + 1
_constant = n*b>>1
d=arange(1, b).reshape(n, n)
d[0:n:4, 0:n:4] = b - d[0:n:4, 0:n:4]
d[0:n:4, 3:n:4] = b - d[0:n:4, 3:n:4]
d[3:n:4, 0:n:4] = b - d[3:n:4, 0:n:4]
d[3:n:4, 3:n:4] = b - d[3:n:4, 3:n:4]
d[1:n:4, 1:n:4] = b - d[1:n:4, 1:n:4]
d[1:n:4, 2:n:4] = b - d[1:n:4, 2:n:4]
d[2:n:4, 1:n:4] = b - d[2:n:4, 1:n:4]
d[2:n:4, 2:n:4] = b - d[2:n:4, 2:n:4]
return d
else:
m = n>>1
k = m>>1
b = m*m
d = tile(magic(m), (2,2)) # that changes the _constant
_constant = _constant*8 - n - m
d[:m, :k] += 3*b
d[m:,k:m] += 3*b
d[ k, k] += 3*b
d[ k, 0] -= 3*b
d[m+k, 0] += 3*b
d[m+k, k] -= 3*b
d[:m,m:n-k+1] += b+b
d[m:,m:n-k+1] += b
d[:m, n-k+1:] += b
d[m:, n-k+1:] += b+b
return d
##################################################################
# Python 2.5 (r25:51908, Sep 19 2006, 09:52:17) [MSC v.1310 32 bit
# (Intel)] on win32
#
# >>> from magic_square import *
# >>> from time import clock
# >>> t=clock(); a=magic(1000); clock()-t
# 0.0191592494101839
# >>> t=clock(); a=magic(1001); clock()-t
# 0.018718461322123403
# >>> t=clock(); a=magic(1002); clock()-t
# 0.027449660797152831
# >>> t=clock(); ismagic(a); clock()-t
# True
# 0.021589410496389405
#################################################################
# $ ipython
# Python 2.5 (r25:51908, Jan 11 2007, 22:47:00)
# IPython 0.7.3.svn -- An enhanced Interactive Python.
#
# In [1]: from magic_square import *
#
# In [2]: time a=magic(1000)
# CPU times: user 0.02 s, sys: 0.00 s, total: 0.02 s
# Wall time: 0.02
#
# In [3]: time a=magic(1001)
# CPU times: user 0.00 s, sys: 0.01 s, total: 0.01 s
# Wall time: 0.02
#
# In [4]: time a=magic(1002)
# CPU times: user 0.00 s, sys: 0.02 s, total: 0.02 s
# Wall time: 0.03
#
# In [5]: time ismagic(a)
# CPU times: user 0.01 s, sys: 0.00 s, total: 0.01 s
# Wall time: 0.02
################################################################
# $ octave
# GNU Octave, version 2.1.73 (i686-pc-cygwin).
#
# octave:1> t=cputime();a=magic(1000);cputime()-t
# ans = 2
# octave:2> t=cputime();a=magic(1001);cputime()-t
# ans = 4.1410
# octave:3> t=cputime();a=magic(1002);cputime()-t
# ans = 4.9840
################################################################
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.