blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c85f6d190b977c7efe18e3b3eafff96dd5697bcc | 32ce121ca829a50fd4786b2c1470c25ccb980487 | /examples/subscribe_1.py | 9d17f9d0bf71c69ef3763322b770cc44aad7bbeb | [
"MIT"
] | permissive | py-zoid/harmony | 5aa42b0665a8624627a3ed2d7271847f2a3df7b6 | 8a94b253c36302ee6d52fd2a0748e6b91879bbef | refs/heads/main | 2023-06-23T08:15:28.610600 | 2021-05-30T01:40:04 | 2021-05-30T01:40:04 | 387,935,695 | 3 | 0 | MIT | 2021-07-20T23:16:23 | 2021-07-20T23:16:22 | null | UTF-8 | Python | false | false | 736 | py | #!/usr/bin/python3
from python_graphql_client import GraphqlClient
from json import dumps
import asyncio
def prettyPrint(data):
print(dumps(data, sort_keys=True, indent=2))
try:
client = GraphqlClient(endpoint="ws://localhost:7000/v1/graphql")
query = """
subscription {
newPendingTx {
from
to
nonce
gasPrice
queuedFor
pendingFor
pool
}
}
"""
print('Listening for any new tx, entering pending pool')
asyncio.run(client.subscribe(query=query, handle=prettyPrint))
except Exception as e:
print(e)
except KeyboardInterrupt:
print('\nStopping')
| [
"[email protected]"
] | |
0b393a21f7951461e0b7dc197f6ee0790223b2a5 | 2bdedcda705f6dcf45a1e9a090377f892bcb58bb | /src/main/output/head_father/thing_end/question_water_right/fact.py | d42798930b1c8ad06369f29d10e59c00e1537245 | [] | no_license | matkosoric/GenericNameTesting | 860a22af1098dda9ea9e24a1fc681bb728aa2d69 | 03f4a38229c28bc6d83258e5a84fce4b189d5f00 | refs/heads/master | 2021-01-08T22:35:20.022350 | 2020-02-21T11:28:21 | 2020-02-21T11:28:21 | 242,123,053 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,391 | py | using CategoriesPOC.TranslatorService;
using System;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
namespace CategoriesPOC.Helpers
{
public static class TranslatorHelper
{
private const string SubscriptionKey = "ec5892dd4dbc7efdd4227cd0291300f5"; //Enter here the Key from your Microsoft Translator Text subscription on http://portal.azure.com
public static Task<string> Translate(string word, string lang="")
{
var translatorService = new TranslatorService.LanguageServiceClient();
var authTokenSource = new AzureAuthToken(SubscriptionKey);
var token = string.Empty;
lang = string.IsNullOrEmpty(lang) ? DetectLanguage(word).Result : lang;
if (lang == "en") return Task.FromResult<string>(word);
try
{
token = authTokenSource.GetAccessToken();
return translatorService.TranslateAsync(token, word, lang, "en", "text/plain", "general", string.Empty);
}
catch (HttpRequestException)
{
switch (authTokenSource.RequestStatusCode)
{
case HttpStatusCode.Unauthorized:
Console.WriteLine("Request to token service is not authorized (401). Check that the Azure subscription key is valid.");
break;
case HttpStatusCode.Forbidden:
Console.WriteLine("Request to token service is not authorized (403). For accounts in the free-tier, check that the account quota is not exceeded.");
break;
}
throw;
}
//Console.WriteLine("Translated to French: {0}", translatorService.Translate(token, "Hello World", "en", "fr", "text/plain", "general", string.Empty));
}
public static Task<GetTranslationsResponse> GetTranslations(string word, string lang = "")
{
var translatorService = new TranslatorService.LanguageServiceClient();
var authTokenSource = new AzureAuthToken(SubscriptionKey);
var token = string.Empty;
lang = string.IsNullOrEmpty(lang) ? DetectLanguage(word).Result : lang;
try
{
token = authTokenSource.GetAccessToken();
var options = new TranslateOptions();
return translatorService.GetTranslationsAsync(token, word, lang, "en", 20, options);
}
catch (HttpRequestException)
{
switch (authTokenSource.RequestStatusCode)
{
case HttpStatusCode.Unauthorized:
Console.WriteLine("Request to token service is not authorized (401). Check that the Azure subscription key is valid.");
break;
case HttpStatusCode.Forbidden:
Console.WriteLine("Request to token service is not authorized (403). For accounts in the free-tier, check that the account quota is not exceeded.");
break;
}
throw;
}
}
public static Task<string> DetectLanguage(string str)
{
var translatorService = new TranslatorService.LanguageServiceClient();
var authTokenSource = new AzureAuthToken(SubscriptionKey);
var token = string.Empty;
try
{
token = authTokenSource.GetAccessToken();
return translatorService.DetectAsync(token, str);
}
catch (HttpRequestException)
{
switch (authTokenSource.RequestStatusCode)
{
case HttpStatusCode.Unauthorized:
Console.WriteLine("Request to token service is not authorized (401). Check that the Azure subscription key is valid.");
break;
case HttpStatusCode.Forbidden:
Console.WriteLine("Request to token service is not authorized (403). For accounts in the free-tier, check that the account quota is not exceeded.");
break;
}
throw;
}
//translatorService.Detect(token, str);
}
}
}
| [
"[email protected]"
] | |
c82533b14aad2bc70cb7f0d32c0a011ac1ba5058 | 98810fbf90a42028915a88bfac9fb8cb8681008e | /azure-devops/azext_devops/devops_sdk/v6_0/token_administration/__init__.py | d0c5658b8f53ee15939375e036f993b970fc95b2 | [
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unicode",
"LGPL-3.0-only",
"LicenseRef-scancode-warranty-disclaimer",
"PSF-2.0",
"PostgreSQL",
"LicenseRef-scancode-python-cwi",
"LGPL-2.1-or-later",
"LicenseRef-scancode-proprietary-license",
"LGPL-2.1-only",
"CC-BY-4.0",
"Python-2.0",
"MPL-1.1",
"OpenSSL",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference",
"MPL-1.0",
"ISC",
"GPL-2.0-only",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"GPL-1.0-or-later",
"MPL-2.0"
] | permissive | Azure/azure-devops-cli-extension | ba87357a8243e1318f100791fc32acbb59448d05 | bd34a6fd0658a15dadf6c09c7f6217ca5ffa662b | refs/heads/master | 2023-08-29T10:56:54.228674 | 2023-07-17T04:37:06 | 2023-07-17T04:37:06 | 107,708,057 | 419 | 208 | MIT | 2023-08-02T02:10:10 | 2017-10-20T17:39:11 | Python | UTF-8 | Python | false | false | 815 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .token_administration_client import TokenAdministrationClient
__all__ = [
'SessionToken',
'TokenAdministrationRevocation',
'TokenAdminPagedSessionTokens',
'TokenAdminRevocation',
'TokenAdministrationClient'
]
| [
"[email protected]"
] | |
d9891b24891a2da8e8a76e6058e6a19b83a963c5 | ef5f369a8fb3978dbb57cdab2c0f83880fa43c36 | /amatino/tests/primary/entity.py | c47f450533c5f4325bdca50830a582096037333a | [
"MIT"
] | permissive | pypi-buildability-project/amatino-python | c8a93c849d9e97ea907d411511a0c732ee51b29e | 9178e0883b735f882729c19a7a68df68b49e057b | refs/heads/master | 2022-07-19T12:24:06.587840 | 2020-05-21T05:28:08 | 2020-05-21T05:28:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,034 | py | """
Amatino API Python Bindings
Entity Test Module
Author: [email protected]
"""
from amatino.entity import Entity
from amatino.tests.ancillary.session import SessionTest
from amatino import Session
class EntityTest(SessionTest):
"""
Test the Entity primary object
"""
def __init__(self, name='Create, retrieve, update an Entity') -> None:
self.entity = None
super().__init__(name)
self.create_session()
if not isinstance(self.session, Session):
raise RuntimeError(
'Session creation failed, consider running Session tests'
)
return
def create_entity(self) -> Entity:
entity = Entity.create(
self.session,
'Test Entity',
None,
None
)
self.entity = entity
return entity
def execute(self) -> None:
assert self.session is not None
try:
entity = self.create_entity()
except Exception as error:
self.record_failure(error)
return
assert isinstance(self.entity, Entity)
try:
entity = Entity.retrieve(
self.session,
entity.id_
)
except Exception as error:
self.record_failure(error)
return
if entity.id_ != self.entity.id_:
self.record_failure('Entity ids do not match')
return
new_name = 'Updated Entity Name'
try:
updated_entity = entity.update(new_name)
except Exception as error:
self.record_failure(error)
return
if updated_entity.name != new_name:
self.record_failure('Entity name not updated: ' + str(entity.name))
return
listed_entities = Entity.retrieve_list(
session=self.session
)
assert isinstance(listed_entities, list)
assert len(listed_entities) > 0
self.record_success()
return
| [
"[email protected]"
] | |
29eb2562ec4c47d302e9848afa25bb9fe02ea5ef | 380848070205bf5cb119071eb2b32e98caca0253 | /two by two/convo.py | e7e591289ce9079002f4202d804241a043f07220 | [] | no_license | qynglang/Algorithm-intelligence | a3b3720ec8f2475457875d38cdde858c1805e910 | 8e410b87cea6abd18a8bcd45ed89cb5f436748b3 | refs/heads/master | 2020-07-03T03:30:53.923930 | 2019-09-17T09:25:21 | 2019-09-17T09:25:21 | 201,769,566 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,434 | py | # Create some wrappers for simplicity
import tensorflow as tf
def conv2d(x, W, strides=1):
# Conv2D wrapper, with bias and relu activation
x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')
#x = tf.nn.bias_add(x, b)
return tf.nn.relu(x)
def maxpool2d(x, k=2):
# MaxPool2D wrapper
return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1],
padding='SAME')
# Create model
def conv_net(x, weights, biases, dropout):
# MNIST data input is a 1-D vector of 784 features (28*28 pixels)
# Reshape to match picture format [Height x Width x Channel]
# Tensor input become 4-D: [Batch Size, Height, Width, Channel]
x = tf.reshape(x, shape=[-1, 40, 50, 1])
# Convolution Layer
conv1 = conv2d(x, weights['wc1'])
# Max Pooling (down-sampling)
conv1 = maxpool2d(conv1, k=3)
# Convolution Layer
conv2 = conv2d(conv1, weights['wc2'])
# Max Pooling (down-sampling)
conv2 = maxpool2d(conv2, k=3)
# Fully connected layer
# Reshape conv2 output to fit fully connected layer input
fc1 = tf.reshape(conv2, [-1, weights['wd1'].get_shape().as_list()[0]])
fc1 = tf.add(tf.matmul(fc1, weights['wd1']), biases['bd1'])
fc1 = tf.nn.relu(fc1)
# Apply Dropout
fc1 = tf.nn.dropout(fc1, dropout)
# Output, class prediction
out = tf.add(tf.matmul(fc1, weights['out']), biases['out'])
return out | [
"[email protected]"
] | |
1e4bee103070178cb11759b33a9988d636e01631 | bf26ed0b9ef5a6d846df05a748dcc7d4799f1164 | /chapter-2/bhp.py | 030e9f74b45e2cd6fb2c75dd983a94d776a09543 | [] | no_license | cornh0lio/blackhat-python | 41cd694c845c982ff3384a3620017e64a799afe8 | b1373b759435cc50a53ce7b05bca906523c924b9 | refs/heads/master | 2021-06-15T20:04:46.897711 | 2017-01-16T15:46:38 | 2017-01-16T15:46:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,874 | py | import sys
import socket
import getopt
import threading
import subprocess
# define some global variables
listen = False
command = False
upload = False
execute = ""
target = ""
upload_destination = ""
port = 0
# Print the help for the tool
def usage():
print "BHP Net Tool"
print
print "Usage: bhp.py -t <target> -p <port>"
print "-l --listen - listen on [host]:[port] for incoming connections"
print "-e --execute=<file_to_run> - execute the given file upon receiving a connection"
print "-c --command - initialize a command shell"
print "-u --upload=<destination> - upon receiving a connection upolad a file and write it to [destination]"
print
print
print "Examples:"
print "bhp.py -t 192.168.0.1 -p 5555 -l -c"
print "bhp.py -t 192.168.0.1 -p 5555 -l -u=c:\\target.exe"
print "bhp.py -t 192.168.0.1 -p 5555 -l -e=\"cat /etc/passwd\""
print "echo 'ABCDEFGHI' | ./bhp.py -t 192.168.11.12 -p 135"
sys.exit(0)
def main():
global listen
global port
global execute
global command
global upload_destination
global target
if not len(sys.argv[1:]):
usage()
# read the commandline options
try:
opts, args = getopt.getopt(sys.argv[1:],"hle:t:p:cu:", ["help","listen","execute","target","port","command","upload"])
except getopt.GetoptError as err:
print str(err)
usage()
for o,a in opts:
if o in ("-h","--help"):
usage()
elif o in ("-l","--listen"):
listen = True
elif o in ("-e","--execute"):
execute = a
elif o in ("-c","--command"):
command = True
elif o in ("-t","--target"):
target = a
elif o in ("-p","--port"):
port = int(a)
else:
assert False, "Unhandled Option"
# are we going to listen or just send data from stdin?
if not listen and len(target) and port > 0:
# read in the buffer from the commandline
# this will block, so send CTRL-D if not sending input
# to stdin
buffer = sys.stdin.read()
#send data off
client_sender(buffer)
# we are going to listen and potentially
# upload things, execute commands, and drop a shell back
# depending on our command line options above
if listen:
server_loop()
def client_sender(buffer):
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
# connect to our target host
client.connect((target,port))
if len(buffer):
client.send(buffer)
while True:
# now wait for data back
recv_len = 1
response = ""
while recv_len:
data = client.recv(4096)
recv_len = len(data)
response += data
if recv_len < 4096:
break
print response
# wait for more input
buffer = raw_input("")
buffer += "\n"
# send it off
client.send(buffer)
except:
print "[*] Exception! Exiting!."
client.close()
def server_loop():
global target
# if no target is defined, we listen on all interfaces
if not len(target):
target = "0.0.0.0"
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((target,port))
server.listen(5)
# this is the while where we manage incoming connections
while True:
client_socket, addr = server.accept()
# spin off a thread to handle ouyr new client
client_thread = threading.Thread(target=client_handler,args=(client_socket,))
client_thread.start()
def run_command(command):
# trim the newline
command = command.rstrip()
# run the command and get the output back
try:
output = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
except:
output = "Failed to execute command.\r\n"
# send the output back to the client
return output
def client_handler(client_socket):
global upload
global execute
global command
# check for upload
if len(upload_destination):
# read in all of the bytes and write to our destination
file_buffer = ""
# keep reading data until none is available
while True:
data = client_socket.recv(1024)
if not data:
break
else:
file_buffer += data
# now we take these bytes and try to write them out
try:
# We open a file descriptor in write + binary mode
file_descriptor = open(upload_destination, "wb")
file_descriptor.write(file_buffer)
file_descriptor.close()
# send ack to the client to confirm that we wrote the file
client_socket.send("Successfully saved the file to %s\r\n" % upload_destination)
except:
client_socket.send("Failed to save the file to %s\r\n" % upload_destination)
if len(execute):
output = run_command(execute)
client_socket.send(output)
# now we go into another loop if a command shell is requested
if command:
while True:
# show a simple prompt
client_socket.send("<BHP:#> ")
# now we receive until we see a linefeed
cmd_buffer = ""
while "\n" not in cmd_buffer:
cmd_buffer += client_socket.recv(1024)
# get back the command output
response = run_command(cmd_buffer)
# send back the response
client_socket.send(response)
main()
| [
"[email protected]"
] | |
3a446d64643255b8eed4cfce2ad8f4db60a1e0f3 | 48d0cfbe1ba313740a94ef75f25e685bbc8aa7f6 | /santa/content/tests/base.py | cc92b8a7faf8d2364a79342c3604ce91a1dbb1af | [] | no_license | taito-zz/santa.content | 72995e455b3ceec7842fc5923a607ba5963268cd | dd497f48918212c61bd429e1e7130a9b1c4620f5 | refs/heads/master | 2021-05-27T14:58:47.513815 | 2012-10-30T19:10:14 | 2012-10-30T19:10:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,326 | py | from plone.app.testing import FunctionalTesting
from plone.app.testing import IntegrationTesting
from plone.app.testing import PLONE_FIXTURE
from plone.app.testing import PloneSandboxLayer
from plone.testing import z2
import unittest2 as unittest
class SantaContentLayer(PloneSandboxLayer):
defaultBases = (PLONE_FIXTURE,)
def setUpZope(self, app, configurationContext):
"""Set up Zope."""
# Load ZCML
import santa.content
self.loadZCML(package=santa.content)
z2.installProduct(app, 'santa.content')
def setUpPloneSite(self, portal):
"""Set up Plone."""
# Install into Plone site using portal_setup
self.applyProfile(portal, 'santa.content:default')
def tearDownZope(self, app):
"""Tear down Zope."""
z2.uninstallProduct(app, 'santa.content')
FIXTURE = SantaContentLayer()
INTEGRATION_TESTING = IntegrationTesting(
bases=(FIXTURE,), name="SantaContentLayer:Integration")
FUNCTIONAL_TESTING = FunctionalTesting(
bases=(FIXTURE,), name="SantaContentLayer:Functional")
class IntegrationTestCase(unittest.TestCase):
"""Base class for integration tests."""
layer = INTEGRATION_TESTING
class FunctionalTestCase(unittest.TestCase):
"""Base class for functional tests."""
layer = FUNCTIONAL_TESTING
| [
"[email protected]"
] | |
d6082f52df1a5cd5cf6235e03479e09e57a2afe2 | e4cae3759a053ca88a936e87e3329aec203608db | /sdk/compute/azure-mgmt-compute/tests/test_mgmt_compute_disks.py | b92c07deeb502022a7dd5f1016afe72894d51615 | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | a-santamaria/azure-sdk-for-python | c9413858747ccfcec2fbbefd50922c515cb4f634 | 9dec418ad621ac75f217e56e901f15b6624800b0 | refs/heads/master | 2022-05-19T00:01:07.604118 | 2021-02-01T22:52:25 | 2021-02-01T22:52:25 | 202,599,021 | 0 | 0 | MIT | 2019-08-15T19:22:33 | 2019-08-15T19:22:32 | null | UTF-8 | Python | false | false | 15,792 | py | # coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
# covered ops:
# snapshots: 8/8
# disks: 8/8
# disk_encryption_sets: 6/6
# images: 6/6
import unittest
import azure.mgmt.compute
from azure.profiles import ProfileDefinition
from devtools_testutils import AzureMgmtTestCase, RandomNameResourceGroupPreparer
AZURE_LOCATION = 'eastus'
class MgmtComputeTestMultiVersion(AzureMgmtTestCase):
def setUp(self):
super(MgmtComputeTestMultiVersion, self).setUp()
self.mgmt_client = self.create_mgmt_client(
azure.mgmt.compute.ComputeManagementClient
)
self.mgmt_client.profile = ProfileDefinition({
self.mgmt_client._PROFILE_TAG: {
None: "2019-07-01",
'availability_sets': '2019-07-01',
'dedicated_host_groups': '2019-07-01',
'dedicated_hosts': '2019-07-01',
'disk_encryption_sets': '2019-11-01',
'disks': '2019-03-01', # test old version
'images': '2019-07-01',
'log_analytics': '2019-07-01',
'operations': '2019-07-01',
'proximity_placement_groups': '2019-07-01',
'resource_skus': '2019-04-01',
'snapshots': '2019-11-01',
'usage': '2019-07-01',
'virtual_machine_extension_images': '2019-07-01',
'virtual_machine_extensions': '2019-07-01',
'virtual_machine_images': '2019-07-01',
'virtual_machine_run_commands': '2019-07-01',
'virtual_machine_scale_set_extensions': '2019-07-01',
'virtual_machine_scale_set_rolling_upgrades': '2019-07-01',
'virtual_machine_scale_set_vm_extensions': '2019-07-01',
'virtual_machine_scale_set_vms': '2019-07-01',
'virtual_machine_scale_sets': '2019-07-01',
'virtual_machine_sizes': '2019-07-01',
'virtual_machines': '2019-07-01',
}},
self.mgmt_client._PROFILE_TAG + " test"
)
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
def test_compute_disks_multi(self, resource_group):
DISK_NAME = self.get_resource_name("disknamex")
# Create an empty managed disk.[put]
BODY = {
"location": "eastus",
"creation_data": {
"create_option": "Empty"
},
"disk_size_gb": "200"
}
result = self.mgmt_client.disks.begin_create_or_update(resource_group.name, DISK_NAME, BODY)
result = result.result()
# Get information about a managed disk.[get]
result = self.mgmt_client.disks.get(resource_group.name, DISK_NAME)
# List all managed disks in a resource group.[get]
result = self.mgmt_client.disks.list_by_resource_group(resource_group.name)
# List all managed disks in a subscription.[get]
result = self.mgmt_client.disks.list()
# Update disk.[patch]
BODY = {
"disk_size_gb": "200"
}
result = self.mgmt_client.disks.begin_update(resource_group.name, DISK_NAME, BODY)
result = result.result()
# Grant acess disk
BODY = {
"access": "Read",
"duration_in_seconds": "1800"
}
result = self.mgmt_client.disks.begin_grant_access(resource_group.name, DISK_NAME, BODY)
result = result.result()
# Revoke access disk
result = self.mgmt_client.disks.begin_revoke_access(resource_group.name, DISK_NAME)
result = result.result()
# Delete disk
result = self.mgmt_client.disks.begin_delete(resource_group.name, DISK_NAME)
result = result.result()
class MgmtComputeTest(AzureMgmtTestCase):
def setUp(self):
super(MgmtComputeTest, self).setUp()
self.mgmt_client = self.create_mgmt_client(
azure.mgmt.compute.ComputeManagementClient
)
if self.is_live:
from azure.mgmt.keyvault import KeyVaultManagementClient
self.keyvault_client = self.create_mgmt_client(
KeyVaultManagementClient
)
# self.network_client = self.create_mgmt_client(
# azure.mgmt.network.NetworkManagementClient
# )
def create_key(self, group_name, location, key_vault, tenant_id, object_id):
if self.is_live:
result = self.keyvault_client.vaults.begin_create_or_update(
group_name,
key_vault,
{
'location': location,
'properties': {
'sku': {
'family': "A",
'name': 'standard'
},
'tenant_id': tenant_id,
"access_policies": [
{
"tenant_id": tenant_id,
"object_id": object_id,
"permissions": {
"keys": [
"encrypt",
"decrypt",
"wrapKey",
"unwrapKey",
"sign",
"verify",
"get",
"list",
"create",
"update",
"import",
"delete",
"backup",
"restore",
"recover",
"purge"
]
}
}
],
'enabled_for_disk_encryption': True,
}
}
).result()
vault_url = result.properties.vault_uri
vault_id = result.id
from azure.keyvault.keys import KeyClient
credentials = self.settings.get_azure_core_credentials()
key_client = KeyClient(vault_url, credentials)
# [START create_key]
from dateutil import parser as date_parse
expires_on = date_parse.parse("2050-02-02T08:00:00.000Z")
key = key_client.create_key(
"testkey",
"RSA",
size=2048,
expires_on=expires_on
)
return (vault_id, key.id)
else:
return ('000', '000')
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
def test_compute_disk_encryption(self, resource_group):
SUBSCRIPTION_ID = self.settings.SUBSCRIPTION_ID
TENANT_ID = self.settings.TENANT_ID
CLIENT_OID = self.settings.CLIENT_OID if self.is_live else "000"
RESOURCE_GROUP = resource_group.name
KEY_VAULT_NAME = self.get_resource_name("keyvaultxmmx")
DISK_ENCRYPTION_SET_NAME = self.get_resource_name("diskencryptionset")
VAULT_ID, KEY_URI = self.create_key(RESOURCE_GROUP, AZURE_LOCATION, KEY_VAULT_NAME, TENANT_ID, CLIENT_OID)
# Create a disk encryption set.[put]
BODY = {
"location": "eastus",
"identity": {
"type": "SystemAssigned"
},
"active_key": {
"source_vault": {
# "id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.KeyVault/vaults/" + VAULT_NAME + ""
"id": VAULT_ID
},
# "key_url": "https://myvmvault.vault-int.azure-int.net/keys/{key}/{key_version}"
"key_url": KEY_URI
}
}
result = self.mgmt_client.disk_encryption_sets.begin_create_or_update(resource_group.name, DISK_ENCRYPTION_SET_NAME, BODY)
result = result.result()
# # Get information about a disk encryption set.[get]
result = self.mgmt_client.disk_encryption_sets.get(resource_group.name, DISK_ENCRYPTION_SET_NAME)
# List all disk encryption sets in a resource group.[get]
result = self.mgmt_client.disk_encryption_sets.list_by_resource_group(resource_group.name)
# List all disk encryption sets in a subscription.[get]
result = self.mgmt_client.disk_encryption_sets.list()
# Update a disk encryption set.[patch]
BODY = {
"active_key": {
"source_vault": {
# "id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.KeyVault/vaults/" + VAULT_NAME + ""
"id": VAULT_ID
},
"key_url": KEY_URI
# "key_url": "https://myvmvault.vault-int.azure-int.net/keys/{key}/{key_version}"
},
"tags": {
"department": "Development",
"project": "Encryption"
}
}
result = self.mgmt_client.disk_encryption_sets.begin_update(resource_group.name, DISK_ENCRYPTION_SET_NAME, BODY)
result = result.result()
# # Delete a disk encryption set.[delete]
result = self.mgmt_client.disk_encryption_sets.begin_delete(resource_group.name, DISK_ENCRYPTION_SET_NAME)
result = result.result()
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
def test_compute_shot(self, resource_group):
SUBSCRIPTION_ID = self.settings.SUBSCRIPTION_ID
RESOURCE_GROUP = resource_group.name
DISK_NAME = self.get_resource_name("disknamex")
SNAPSHOT_NAME = self.get_resource_name("snapshotx")
IMAGE_NAME = self.get_resource_name("imagex")
# Create an empty managed disk.[put]
BODY = {
"location": "eastus",
"creation_data": {
"create_option": "Empty"
},
"disk_size_gb": "200"
}
result = self.mgmt_client.disks.begin_create_or_update(resource_group.name, DISK_NAME, BODY)
result = result.result()
# Create a snapshot by copying a disk.
BODY = {
"location": "eastus",
"creation_data": {
"create_option": "Copy",
"source_uri": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Compute/disks/" + DISK_NAME
}
}
result = self.mgmt_client.snapshots.begin_create_or_update(resource_group.name, SNAPSHOT_NAME, BODY)
result = result.result()
# Create a virtual machine image from a snapshot.[put]
BODY = {
"location": "eastus",
"storage_profile": {
"os_disk": {
"os_type": "Linux",
"snapshot": {
"id": "subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Compute/snapshots/" + SNAPSHOT_NAME
},
"os_state": "Generalized"
},
"zone_resilient": False
},
"hyper_v_generation": "V1" # TODO: required
}
result = self.mgmt_client.images.begin_create_or_update(resource_group.name, IMAGE_NAME, BODY)
result = result.result()
# Get information about a snapshot.[get]
result = self.mgmt_client.snapshots.get(resource_group.name, SNAPSHOT_NAME)
# Get information about a virtual machine image.[get]
result = self.mgmt_client.images.get(resource_group.name, IMAGE_NAME)
# List all virtual machine images in a resource group.[get]
result = self.mgmt_client.images.list_by_resource_group(resource_group.name)
# List all snapshots in a resource group.[get]
result = self.mgmt_client.snapshots.list_by_resource_group(resource_group.name)
# List all virtual machine images in a subscription.[get]
result = self.mgmt_client.images.list()
# List all snapshots in a subscription.[get]
result = self.mgmt_client.snapshots.list()
# Updates tags of an Image.[patch]
BODY = {
# "properties": {
# "source_virtual_machine": {
# "id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Compute/virtualMachines/" + VIRTUAL_MACHINE_NAME + ""
# },
# "hyper_vgeneration": "V1"
# },
"tags": {
"department": "HR"
}
}
result = self.mgmt_client.images.begin_update(resource_group.name, IMAGE_NAME, BODY)
result = result.result()
# Update a snapshot by
BODY = {
"creation_data": {
"create_option": "Copy",
"source_uri": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Compute/disks/" + DISK_NAME
}
}
result = self.mgmt_client.snapshots.begin_update(resource_group.name, SNAPSHOT_NAME, BODY)
result = result.result()
# Grant acess snapshot (TODO: need swagger file)
BODY = {
"access": "Read",
"duration_in_seconds": "1800"
}
result = self.mgmt_client.snapshots.begin_grant_access(resource_group.name, SNAPSHOT_NAME, BODY)
result = result.result()
# Revoke access snapshot (TODO: need swagger file)
result = self.mgmt_client.snapshots.begin_revoke_access(resource_group.name, SNAPSHOT_NAME)
result = result.result()
# Delete a image. (TODO: need a swagger file)
result = self.mgmt_client.images.begin_delete(resource_group.name, IMAGE_NAME)
result = result.result()
# Delete snapshot (TODO: need swagger file)
result = self.mgmt_client.snapshots.begin_delete(resource_group.name, SNAPSHOT_NAME)
result = result.result()
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
def test_compute_disks(self, resource_group):
DISK_NAME = self.get_resource_name("disknamex")
# Create an empty managed disk.[put]
BODY = {
"location": "eastus",
"creation_data": {
"create_option": "Empty"
},
"disk_size_gb": "200"
}
result = self.mgmt_client.disks.begin_create_or_update(resource_group.name, DISK_NAME, BODY)
result = result.result()
# Get information about a managed disk.[get]
result = self.mgmt_client.disks.get(resource_group.name, DISK_NAME)
# List all managed disks in a resource group.[get]
result = self.mgmt_client.disks.list_by_resource_group(resource_group.name)
# List all managed disks in a subscription.[get]
result = self.mgmt_client.disks.list()
# Update disk.[patch]
BODY = {
"disk_size_gb": "200"
}
result = self.mgmt_client.disks.begin_update(resource_group.name, DISK_NAME, BODY)
result = result.result()
# Grant acess diski
BODY = {
"access": "Read",
"duration_in_seconds": "1800"
}
result = self.mgmt_client.disks.begin_grant_access(resource_group.name, DISK_NAME, BODY)
result = result.result()
# Revoke access disk
result = self.mgmt_client.disks.begin_revoke_access(resource_group.name, DISK_NAME)
result = result.result()
# Delete disk
result = self.mgmt_client.disks.begin_delete(resource_group.name, DISK_NAME)
result = result.result()
| [
"[email protected]"
] | |
e9583dfd136ae69e44da411101e8d5ef314a7351 | e446c2c600fbe6e279acf05eac3079643b4c3cf3 | /14_3_21_algorithms_data_structures/recursion.py | cc1a8f708ffee28156c8cb439c8770e67c427f73 | [] | no_license | solomoniosif/SDA_Python_Exercises | 2208298240c7788a2ddd93adb68870d5d5265683 | 691cd5328bbec8fa53f6a6f26bc8071d3e70ef58 | refs/heads/master | 2023-03-28T15:02:49.689022 | 2021-04-03T09:53:26 | 2021-04-03T09:53:26 | 328,112,039 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 979 | py | from timer import time_execution
# import sys
#
#
# sys.setrecursionlimit(10 ** 6)
@time_execution
def recursive_factorial(n):
def factorial(n):
if n == 0:
return 1
return n * factorial(n - 1)
return factorial(n)
@time_execution
def iterative_factorial(n):
if n < 0:
return 0
elif n == 0 or n == 1:
return 1
else:
fact = 1
while n > 1:
fact *= n
n -= 1
return fact
# print(f"5! = {recursive_factorial(777)}")
# print(f"5! = {iterative_factorial(777)}")
@time_execution
def recursive_fibonacci(n):
def inner(n):
if n in [0, 1]:
return n
return inner(n - 1) + inner(n - 2)
return inner(n)
@time_execution
def iterative_fibonacci(n):
i = 0
b = 1
a = 0
while i < n:
c = b + a
b = a
a = c
i += 1
return c
print(recursive_fibonacci(32))
print(iterative_fibonacci(32))
| [
"[email protected]"
] | |
e811ec107a083b1f682d0ad79cbf097409f2116a | b22588340d7925b614a735bbbde1b351ad657ffc | /athena/Control/PerformanceMonitoring/PerfMonTests/python/IoTestsLib.py | 2a4ec4de377d084a030efa749b09c1a7a575b8d4 | [] | no_license | rushioda/PIXELVALID_athena | 90befe12042c1249cbb3655dde1428bb9b9a42ce | 22df23187ef85e9c3120122c8375ea0e7d8ea440 | refs/heads/master | 2020-12-14T22:01:15.365949 | 2020-01-19T03:59:35 | 2020-01-19T03:59:35 | 234,836,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,626 | py | # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
## @file PerfMonTests.IoTestsLib
## @date April 2009
__author__ = "Sebastien Binet <[email protected]>"
__version__ = "$Revision: 1.1 $"
__doc__ = """
a set of simple minded functions to test ROOT I/O (from python)
"""
from array import array as carray
import random
# set some dummy seed, for reproducibility
random.seed(20080910) # first LHC startup :)
from os import sysconf
_pagesz = sysconf('SC_PAGE_SIZE') / 1024 # in kb
_py_dtype_to_root = {
'i' : 'I',
'f' : 'F',
}
"""translates the usual python 'dtype' codes to the ROOT/CINT ones
"""
from PyUtils.Decorators import forking
def pymon():
"""returns (cpu[ms], vmem[kb], rss[kb])
"""
from resource import getrusage, RUSAGE_SELF
from string import split as ssplit
cpu = getrusage(RUSAGE_SELF)
mem = open('/proc/self/statm','r')
cpu = (cpu.ru_utime+cpu.ru_stime) * 1e3 # in milliseconds
mem = ssplit(mem.readlines()[0])
vmem = int(mem[0])*_pagesz
rss = int(mem[1])*_pagesz
return cpu,vmem,rss
def comp_delta(d, verbose=False):
assert 'start' in d
assert 'stop' in d
assert len(d['start']) == 3
assert len(d['stop']) == 3
if verbose:
print repr(d)
delta = { 'cpu' : d['stop'][0] - d['start'][0],
'vmem': d['stop'][1] - d['start'][1],
'rss' : d['stop'][2] - d['start'][2],
'nbytes': -1
}
if 'nbytes' in d:
delta['nbytes'] = d['nbytes']
print "==> cpu: %(cpu)8.3f ms vmem: %(vmem)i kB rss: %(rss)i kB nbytes: %(nbytes)i kB" % delta
return delta
def import_ROOT():
import sys
# for ROOT...
if not '-b' in sys.argv:
sys.argv.insert(1, '-b')
import ROOT
return ROOT
ROOT = import_ROOT()
@forking
def io_test1_write(fname, nevts=1000, sz=1000, dtype='i'):
"""testing writing 1000 evts with arrays of 1000- integers
"""
f = ROOT.TFile.Open(fname, 'RECREATE')
t = ROOT.TTree('t', 't')
nevts= nevts
imax = sz
data = carray(dtype, imax*[ 0 ] )
#t.Branch( 'mynum', n, 'mynum/I' )
t.Branch( 'i', data, 'data[%d]/%s'%(imax, _py_dtype_to_root[dtype]) )
from random import randint
fill = t.Fill
for i in xrange(nevts):
for j in xrange(sz):
data[j] = randint(0, sz)
fill()
f.Write()
f.Close()
return
@forking
def io_test1_read(fname, verbose=False):
f = ROOT.TFile.Open(fname, 'READ')
t = f.Get('t')
assert t, "could not find tree 't'"
nevts = t.GetEntries()
if verbose:
print "::: reading [%s] (%i events) [sz=%s kB]" % (fname, nevts,
f.GetSize()/1024)
tot_bytes = 0
get_entry = t.GetEntry
start = pymon()
for ievt in xrange(nevts):
# copy next entry into memory and verify
nb = get_entry(ievt)
if nb <= 0:
continue
tot_bytes += nb
# use the values directly from the tree
data = getattr(t, 'data')
sz = len(data)
assert sz > 0
#print "::: ievt [%3i] : #data = %s" % (ievt, sz)
stop = pymon()
del t
f.Close()
return {'start' : start,
'stop' : stop,
'nbytes': tot_bytes/1024}
@forking
def io_test2_write(fname, nevts=1000, sz=1000, dtype='i'):
"""testing writing 1000 evts with arrays of (variable length) 1000- ints
"""
f = ROOT.TFile.Open(fname, 'RECREATE')
t = ROOT.TTree('t', 't')
nevts= nevts
imax = sz
n = carray( 'i', [ 0 ] )
data = carray( dtype, imax*[ 0 ] )
t.Branch( 'sz', n, 'sz/I' )
t.Branch( 'data', data, 'data[sz]/%s'%_py_dtype_to_root[dtype])
from random import randint
fill = t.Fill
for i in xrange(nevts):
jmax = randint(1, sz)
n[0] = jmax
for j in xrange(jmax):
data[j] = randint(0, sz)
fill()
f.Write()
f.Close()
return
@forking
def io_test2_read(fname, verbose=False):
f = ROOT.TFile.Open(fname, 'READ')
t = f.Get('t')
assert t, "could not find tree 't'"
nevts = t.GetEntries()
if verbose:
print "::: reading [%s] (%i events) [sz=%s kB]" % (fname, nevts,
f.GetSize()/1024)
tot_bytes = 0
get_entry = t.GetEntry
start = pymon()
for ievt in xrange(nevts):
# copy next entry into memory and verify
nb = get_entry(ievt)
if nb <= 0:
continue
tot_bytes += nb
# use the values directly from the tree
data = getattr(t, 'data')
sz = len(data)
assert sz > 0
#print "::: ievt [%3i] : #data = %s" % (ievt, sz)
stop = pymon()
del t
f.Close()
return {'start' : start,
'stop' : stop,
'nbytes': tot_bytes/1024}
### tests ---------------------------------------------------------------------
if __name__ == "__main__":
# FIXME: use 'nose' instead... for automatical test discovery
print "::: running all tests..."
nreads = 10 # nbr of times to repeat each 'read' test
mon_data = {}
# -----
# io_test1
# -----
# io_test1 - ints
fname = '/tmp/out_test1_ints.root'
w = io_test1_write(fname=fname,
nevts=100000, sz=1000,
dtype='i')
mon_data['io_test1-ints'] = []
for _ in xrange(nreads):
mon_data['io_test1-ints'].append(comp_delta(io_test1_read(fname=fname)))
# io_test1 - floats
fname = '/tmp/out_test1_flts.root'
w = io_test1_write(fname=fname,
nevts=100000, sz=1000,
dtype='f')
mon_data['io_test1-flts'] = []
for _ in xrange(nreads):
mon_data['io_test1-flts'].append(comp_delta(io_test1_read(fname=fname)))
# -----
# io_test2
# -----
# io_test2 - ints
fname = '/tmp/out_test2_ints.root'
w = io_test2_write(fname=fname,
nevts=100000, sz=1000,
dtype='i')
mon_data['io_test2-ints'] = []
for _ in xrange(nreads):
mon_data['io_test2-ints'].append(comp_delta(io_test2_read(fname=fname)))
# io_test2 - floats
fname = '/tmp/out_test2_floats.root'
w = io_test2_write(fname=fname,
nevts=100000, sz=1000,
dtype='f')
mon_data['io_test2-flts'] = []
for _ in xrange(nreads):
mon_data['io_test2-flts'].append(comp_delta(io_test2_read(fname=fname)))
print mon_data
| [
"[email protected]"
] | |
96dc1b0790b37b38c91a4371bce1044a9a8221dc | cb95d669749407510b9dd87518bea60d10cd478d | /migration/change_uq.py | 38a176fff51b7662fedf44cea3ac89921c8ccc94 | [] | no_license | patarapolw/zhlib | 465af0898912afe57ea99595bde6faf562124851 | 66b61c2a607eb0bff2cfe7f51c45789d865db044 | refs/heads/master | 2020-04-02T03:45:57.039084 | 2018-11-01T02:57:37 | 2018-11-01T02:57:37 | 153,982,936 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | from playhouse.migrate import SqliteMigrator, migrate
from zhlib import zh
if __name__ == '__main__':
migrator = SqliteMigrator(zh.database)
migrate(
migrator.drop_index('sentence', 'sentence_chinese'),
migrator.add_index('sentence', ('sentence', 'pinyin'), True)
)
| [
"[email protected]"
] | |
02daa1468251ba4567e1b5a2cf22a54aae0bebef | 4e29395020ce78f435e75e0b3f1e09b227f6f4d8 | /ataraxia/inference/ocr/recognition/crann/src/crannRec/recurrent.py | 63481a0bd9fe0a199e952dd6ae3f352fa5fef01b | [] | no_license | luoyangustc/argus | 8b332d94af331a2594f5b1715ef74a4dd98041ad | 2ad0df5d7355c3b81484f6625b82530b38b248f3 | refs/heads/master | 2020-05-25T21:57:37.815370 | 2019-05-22T09:42:40 | 2019-05-22T09:42:40 | 188,005,059 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,461 | py | #coding:UTF-8
import torch.nn as nn
import time
class CompositeLSTM(nn.Module):
def __init__(self, nIn, nHidden, nOut, multi_gpu=False):
super(CompositeLSTM, self).__init__()
self.rnn = nn.LSTM(nIn, nHidden, bidirectional=True)
self.embedding = nn.Linear(nHidden * 2, nOut)
self.multi_gpu = multi_gpu
initrange = 0.08
print("Initializing Bidirectional LSTM...")
for weight in self.rnn.parameters():
weight.data.uniform_(-initrange, initrange)
def forward(self, input):
if self.multi_gpu:
self.rnn.flatten_parameters()
start = time.time()
recurrent, _ = self.rnn(input)
print('Recurrent Net cost: {:.3f}'.format(time.time() - start))
T, b, h = recurrent.size()
t_rec = recurrent.view(T*b, h)
output = self.embedding(t_rec)
output = output.view(T, b, -1)
return output
class MLayerLSTM(nn.Module):
def __init__(self, nIn, nHidden, nLayer, nClass, dropout, multi_gpu=False):
super(MLayerLSTM, self).__init__()
self.rnn = nn.LSTM(nIn, nHidden, nLayer, dropout=dropout, bidirectional=True)
self.embedding = nn.Linear(nHidden * 2, nClass)
self.multi_gpu = multi_gpu
initrange = 0.08
print("Initializing Bidirectional LSTM...")
for weight in self.rnn.parameters():
weight.data.uniform_(-initrange, initrange)
def forward(self, input):
if self.multi_gpu:
self.rnn.flatten_parameters()
recurrent, _ = self.rnn(input)
T, b, h = recurrent.size()
t_rec = recurrent.view(T*b, h)
output = self.embedding(t_rec)
output = output.view(T, b, -1)
return output
def compositelstm(rnn_conf, n_class):
in_dim = rnn_conf['n_In']
n_hidden = rnn_conf['n_Hidden']
multi_gpu = rnn_conf['multi_gpu']
model = nn.Sequential(
CompositeLSTM(in_dim, n_hidden, n_hidden, multi_gpu),
CompositeLSTM(n_hidden, n_hidden, n_class, multi_gpu)
)
return model
def lstm_2layer(rnn_conf, n_class):
in_dim = rnn_conf['n_In']
n_hidden = rnn_conf['n_Hidden']
n_layer = rnn_conf['n_Layer']
dropout = rnn_conf['dropout']
multi_gpu = rnn_conf['multi_gpu']
model = MLayerLSTM(in_dim, n_hidden, n_layer, n_class, dropout, multi_gpu)
return model
#TODO Implement Seq2Seq model
#class Seq2Seq(nn.Module):
| [
"[email protected]"
] | |
1306b4cb3c6da529dce11dc8c45647ba1081ed1c | e42a61b7be7ec3412e5cea0ffe9f6e9f34d4bf8d | /a10sdk/core/network/network.py | 5ee559ace168f3318a4fd1021519bc80c203ad66 | [
"Apache-2.0"
] | permissive | amwelch/a10sdk-python | 4179565afdc76cdec3601c2715a79479b3225aef | 3e6d88c65bd1a2bf63917d14be58d782e06814e6 | refs/heads/master | 2021-01-20T23:17:07.270210 | 2015-08-13T17:53:23 | 2015-08-13T17:53:23 | 40,673,499 | 0 | 0 | null | 2015-08-13T17:51:35 | 2015-08-13T17:51:34 | null | UTF-8 | Python | false | false | 6,810 | py | from a10sdk.common.A10BaseClass import A10BaseClass
class Network(A10BaseClass):
""" :param vlan_list: {"minItems": 1, "items": {"type": "vlan"}, "uniqueItems": true, "array": [{"required": ["vlan-num"], "properties": {"uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "ve": {"description": "ve number", "format": "number", "type": "number", "maximum": 4094, "minimum": 2, "optional": true}, "untagged-trunk-list": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"untagged-trunk-start": {"type": "number", "description": "Trunk groups", "format": "number"}, "optional": true, "untagged-trunk-end": {"type": "number", "description": "Trunk Group", "format": "number"}}}]}, "untagged-lif": {"description": "Logical tunnel interface (Logical tunnel interface number)", "format": "number", "type": "number", "maximum": 128, "minimum": 1, "optional": true}, "untagged-eth-list": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"untagged-ethernet-end": {"type": "number", "description": "Ethernet port", "format": "interface"}, "untagged-ethernet-start": {"$ref": "/axapi/v3/interface/ethernet", "type": "number", "description": "Ethernet port (Interface number)", "format": "interface"}, "optional": true}}]}, "tagged-eth-list": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"tagged-ethernet-end": {"type": "number", "description": "Ethernet port", "format": "interface"}, "optional": true, "tagged-ethernet-start": {"$ref": "/axapi/v3/interface/ethernet", "type": "number", "description": "Ethernet port (Interface number)", "format": "interface"}}}]}, "tagged-trunk-list": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "tagged-trunk-start": {"type": "number", "description": "Trunk groups", "format": "number"}, "tagged-trunk-end": {"type": "number", "description": "Trunk Group", "format": "number"}}}]}, "vlan-num": {"description": "VLAN number", "format": "number", "type": "number", "maximum": 4094, "minimum": 2, "optional": false}, "name": {"description": "VLAN name", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 63, "type": "string"}}}], "type": "array", "$ref": "/axapi/v3/network/vlan/{vlan-num}"}
:param lacp_passthrough_list: {"minItems": 1, "items": {"type": "lacp-passthrough"}, "uniqueItems": true, "array": [{"required": ["peer-from", "peer-to"], "properties": {"peer-from": {"optional": false, "type": "number", "description": "Peer member to forward received LACP packets", "format": "interface"}, "peer-to": {"optional": false, "type": "number", "description": "Peer member to forward received LACP packets", "format": "interface"}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}}}], "type": "array", "$ref": "/axapi/v3/network/lacp-passthrough/{peer-from}+{peer-to}"}
:param bpdu_fwd_group_list: {"minItems": 1, "items": {"type": "bpdu-fwd-group"}, "uniqueItems": true, "array": [{"required": ["bpdu-fwd-group-number"], "properties": {"bpdu-fwd-group-number": {"optional": false, "minimum": 1, "type": "number", "maximum": 8, "format": "number"}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "ethernet-list": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"ethernet-start": {"type": "number", "description": "Ethernet Port (Interface number)", "format": "interface"}, "ethernet-end": {"type": "number", "description": "Ethernet Port", "format": "interface"}, "optional": true}}]}}}], "type": "array", "$ref": "/axapi/v3/network/bpdu-fwd-group/{bpdu-fwd-group-number}"}
:param bridge_vlan_group_list: {"minItems": 1, "items": {"type": "bridge-vlan-group"}, "uniqueItems": true, "array": [{"required": ["bridge-vlan-group-number"], "properties": {"vlan-list": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "vlan-start": {"$ref": "/axapi/v3/network/vlan", "type": "number", "description": "VLAN id", "format": "number"}, "vlan-end": {"type": "number", "description": "VLAN id", "format": "number"}}}]}, "ve": {"description": "Virtual Ethernet Port (Virtual Ethernet Port number)", "format": "number", "type": "number", "maximum": 4094, "minimum": 2, "optional": true}, "forward-traffic": {"description": "'forward-all-traffic': Forward all traffic between bridge members; 'forward-ip-traffic': Forward only IP traffic between bridge members (default); ", "format": "enum", "default": "forward-ip-traffic", "type": "string", "enum": ["forward-all-traffic", "forward-ip-traffic"], "optional": true}, "name": {"description": "Bridge Group Name", "format": "string", "minLength": 1, "optional": true, "maxLength": 63, "type": "string"}, "bridge-vlan-group-number": {"description": "Bridge VLAN Group Number", "format": "number", "type": "number", "maximum": 255, "minimum": 1, "optional": false}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}}}], "type": "array", "$ref": "/axapi/v3/network/bridge-vlan-group/{bridge-vlan-group-number}"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
Class Description::
Configure Network Command.
Class network supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/network`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "network"
self.a10_url="/axapi/v3/network"
self.DeviceProxy = ""
self.arp = {}
self.vlan_list = []
self.lacp_passthrough_list = []
self.bpdu_fwd_group_list = []
self.vlan_global = {}
self.ve_stats = {}
self.mac_age_time = {}
self.icmpv6_rate_limit = {}
self.lacp = {}
self.arp_timeout = {}
self.bfd = {}
self.icmp_rate_limit = {}
self.bridge_vlan_group_list = []
self.mac_address = {}
self.lldp = {}
for keys, value in kwargs.items():
setattr(self,keys, value)
| [
"[email protected]"
] | |
0b3ce2bb646fbb0331575ede06a06288df241849 | 5864e86954a221d52d4fa83a607c71bacf201c5a | /eveclientqatools/explosions.py | b7df83a42b9b24a0ca2895a5e0776b27306a98bb | [] | no_license | connoryang/1v1dec | e9a2303a01e5a26bf14159112b112be81a6560fd | 404f2cebf13b311e754d45206008918881496370 | refs/heads/master | 2021-05-04T02:34:59.627529 | 2016-10-19T08:56:26 | 2016-10-19T08:56:26 | 71,334,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,958 | py | #Embedded file name: e:\jenkins\workspace\client_SERENITY\branches\release\SERENITY\packages\eveclientqatools\explosions.py
import uicontrols
import carbonui.const as uiconst
import evetypes
import util
from carbonui.primitives.gridcontainer import GridContainer
from eve.client.script.ui.control.eveCombo import Combo
from eve.client.script.ui.control.buttons import ButtonIcon
from carbonui.primitives.container import Container
from evegraphics.explosions.spaceObjectExplosionManager import SpaceObjectExplosionManager
from evegraphics.fsd.explosionBuckets import GetExplosionBucketIDByTypeID, GetExplosionRaces
SEARCH_DISTANCE = 1000000
class ExplosionDebugger(object):
def __init__(self):
self.name = 'Explosions'
self.windowID = 'Explosions_ ' + self.name
self._sceneManager = sm.GetService('sceneManager')
self._michelle = sm.GetService('michelle')
self.scroll = None
self.selectedBallsToExplosionBucketID = {}
self.ballIDToExplosion = {}
self.explosionBucketsUsedWhenWindowOpened = False
def GetBall(self, ballID = None):
if ballID is None:
ballID = self.shipId
return sm.GetService('michelle').GetBall(ballID)
def _OnClose(self):
SpaceObjectExplosionManager.USE_EXPLOSION_BUCKETS = self.explosionBucketsUsedWhenWindowOpened
def ShowUI(self):
self.explosionBucketsUsedWhenWindowOpened = SpaceObjectExplosionManager.USE_EXPLOSION_BUCKETS
wnd = uicontrols.Window.Open(windowID=self.windowID)
wnd.SetTopparentHeight(0)
wnd.SetMinSize([500, 250])
wnd.SetCaption(self.name)
wnd._OnClose = self._OnClose
main = wnd.GetMainArea()
bottomCont = Container(name='bottomCont', parent=main, align=uiconst.TOBOTTOM, height=30, width=50, padBottom=10)
explosionSelectionContainer = Container(name='explosionSelectionCont', parent=main, align=uiconst.TOBOTTOM, height=30, padTop=10, padBottom=10)
explosionContainer = Container(name='explosionContainer', parent=main, align=uiconst.TOALL, padBottom=10)
self.scroll = uicontrols.Scroll(parent=explosionContainer)
self.scroll.sr.id = 'explosionDebugScroll'
self.scroll.OnSelectionChange = self.OnSelectionChange
self.explosionCombo = Combo(name='myCombo', parent=explosionSelectionContainer, label='Set explosion to selected items', options=[('Random', None)], callback=self.OnExplosionSelected, align=uiconst.TOTOP, padRight=12, padLeft=12)
buttonGrid = GridContainer(name='buttonGrid', parent=bottomCont, align=uiconst.CENTER, width=150, height=20, lines=1, columns=3)
ButtonIcon(name='Play', parent=buttonGrid, align=uiconst.TORIGHT, width=20, height=20, iconSize=24, padRight=15, texturePath='res:/UI/Texture/Icons/play.png', func=self.Explode, hint='Play Explosions (the exploding ships will not survive)')
ButtonIcon(name='Refresh', parent=buttonGrid, align=uiconst.CENTER, width=20, height=20, iconSize=24, texturePath='res:/UI/Texture/Icons/replay.png', func=self.UpdateTable, hint='Update table')
ButtonIcon(name='ClearWrecks', parent=buttonGrid, align=uiconst.TOLEFT, width=20, height=20, iconSize=32, padLeft=15, texturePath='res:/UI/Texture/Icons/44_32_37.png', func=self.ClearWrecks, hint='Clear wrecks')
self.UpdateTable()
def UpdateTable(self):
layout = '%s<t>%s<t>%s<t>%s<t>%s<t>%s'
headers = ['distance (m)',
'itemID',
'Type Name',
'Group Name',
'Explosion Bucket ID',
'Selected Explosion']
content = []
ballpark = sm.GetService('michelle').GetBallpark()
balls = ballpark.GetBallsInRange(session.shipid, SEARCH_DISTANCE)
selectedEntries = []
for ballID in balls:
ball = sm.GetService('michelle').GetBall(ballID)
if not hasattr(ball, 'typeData') or getattr(ball, 'exploded', False):
continue
typeID = ball.typeData['typeID']
explosionBucketID = GetExplosionBucketIDByTypeID(typeID)
if explosionBucketID is None:
continue
typeName = evetypes.GetName(typeID)
groupName = evetypes.GetGroupName(typeID)
explosionRes = 'Random'
dist = util.FmtAmt(ballpark.DistanceBetween(session.shipid, ballID))
info = (dist,
ballID,
typeName,
groupName,
explosionBucketID,
explosionRes)
label = layout % info
entry = uicontrols.ScrollEntryNode(decoClass=uicontrols.SE_GenericCore, label=label)
if ballID in self.selectedBallsToExplosionBucketID:
selectedEntries.append(entry)
content.append(entry)
self.scroll.Load(contentList=content, headers=headers, fixedEntryHeight=18)
self.scroll.SelectNodes(selectedEntries)
def OnSelectionChange(self, selection):
self.selectedBallsToExplosionBucketID = {}
for item in selection:
itemInfo = item.label.split('<t>')
itemID = int(itemInfo[1])
explosionBucketID = int(itemInfo[4])
self.selectedBallsToExplosionBucketID[itemID] = explosionBucketID
explosionBuckets = set(self.selectedBallsToExplosionBucketID.values())
options = [('Random', None)]
for explosionBucketID in explosionBuckets:
for race, explosions in GetExplosionRaces(int(explosionBucketID)).iteritems():
for explosion in explosions:
options.append((explosion.filePath, explosion))
self.explosionCombo.LoadOptions(options)
def OnExplosionSelected(self, combobox, key, value):
selectedBalls = self.selectedBallsToExplosionBucketID.keys()
for ballID in selectedBalls:
if value is None:
del self.ballIDToExplosion[ballID]
else:
self.ballIDToExplosion[ballID] = value
for row in self.scroll.sr.nodes:
if not row.get('selected', 0):
continue
label = row.label
splitLabel = label.split('<t>')
splitLabel[5] = value.filePath
row.label = '<t>'.join(splitLabel)
self.scroll.ReloadNodes()
def Explode(self):
SpaceObjectExplosionManager.USE_EXPLOSION_BUCKETS = True
for ballID, explosion in self.ballIDToExplosion.iteritems():
SpaceObjectExplosionManager.SetPreferredExplosion(ballID, explosion)
for ballID in self.selectedBallsToExplosionBucketID:
sm.GetService('slash').SlashCmd('/kill %s' % ballID)
if ballID in self.ballIDToExplosion:
del self.ballIDToExplosion[ballID]
self.selectedBallsToExplosionBucketID = {}
def ClearWrecks(self):
sm.GetService('slash').SlashCmd('/unspawn range=%s only=groupWreck' % SEARCH_DISTANCE)
| [
"[email protected]"
] | |
9bb2e4a7ed40ed97b5149b0f6f1e2ac1f704ad6b | 63d3a6255f2677f9d92205d62163b9d22a74c5c7 | /modules/dynadb/migrations/0063_auto_20161221_1826.py | c58f76604e77a21a599a46e02764f5ddf4cef3f0 | [
"Apache-2.0"
] | permissive | GPCRmd/GPCRmd | 9204f39b1bfbc800b13512b316e05e54ddd8af23 | 47d7a4e71025b70e15a0f752760873249932c54e | refs/heads/main | 2023-09-04T11:13:44.285629 | 2023-08-29T13:43:01 | 2023-08-29T13:43:01 | 260,036,875 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 850 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-12-21 17:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dynadb', '0062_merge'),
]
operations = [
migrations.AlterField(
model_name='dyndbmodel',
name='type',
field=models.SmallIntegerField(choices=[(0, 'Apoform'), (1, 'Complex')], default=0),
),
migrations.AlterField(
model_name='dyndbsubmissionmolecule',
name='type',
field=models.SmallIntegerField(blank=True, choices=[(0, 'Orthosteric ligand'), (1, 'Allosteric ligand'), (2, 'Crystallographic waters'), (3, 'Crystallographic lipids'), (4, 'Crystallographic ions'), (5, 'Other')], default=0, null=True),
),
]
| [
"[email protected]"
] | |
c5ae45a375095336c401e1f966e0b4e474d46e8a | 0b793bce2da8c3d09b7956c0672ddbffd46feaed | /atcoder/corp/codefes2016_qc_c.py | 06989a728b75a41345f62c26b25d84e5a15ae4aa | [
"MIT"
] | permissive | knuu/competitive-programming | c6c4e08fb231937d988bdc5a60a8ad6b31b97616 | 16bc68fdaedd6f96ae24310d697585ca8836ab6e | refs/heads/master | 2021-01-17T09:39:02.647688 | 2020-11-07T03:17:22 | 2020-11-07T03:17:22 | 27,886,732 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py | N = int(input())
A = [int(x) for x in input().split()]
B = [int(x) for x in input().split()]
INF = 10**9
record_A = []
record_B = []
record_A.append((A[0], A[0]))
for i in range(1, N):
if A[i] == A[i-1]:
record_A.append((1, record_A[-1][1]))
else:
record_A.append((A[i], A[i]))
record_B.append((B[-1], B[-1]))
for i in reversed(range(N-1)):
if B[i] == B[i+1]:
record_B.append((1, record_B[-1][1]))
else:
record_B.append((B[i], B[i]))
ans = 1
mod = 10**9 + 7
for (lb_a, ub_a), (lb_b, ub_b) in zip(record_A, reversed(record_B)):
lb, ub = max(lb_a, lb_b), min(ub_a, ub_b)
if ub - lb < 0:
print(0)
break
ans *= ub - lb + 1
ans %= mod
else:
print(ans)
| [
"[email protected]"
] | |
d9e648577a84d88311e187435c4adda1b002de3f | 35fa8925e63f2b0f62ef6bfc1ff4e03cf42bd923 | /tests/models/test_category.py | 03d5ee1020c13013d8c46e00d4cfc63d278f2993 | [
"Apache-2.0"
] | permissive | TheLabbingProject/django_analyses | 9e6f8b9bd2a84e8efe6dda6a15de6a3ecdf48ec1 | 5642579660fd09dde4a23bf02ec98a7ec264bceb | refs/heads/master | 2023-02-26T07:53:53.142552 | 2023-02-17T08:12:17 | 2023-02-17T08:12:17 | 225,623,958 | 1 | 2 | Apache-2.0 | 2023-02-17T08:12:18 | 2019-12-03T13:15:29 | Python | UTF-8 | Python | false | false | 3,594 | py | from django.test import TestCase
from django_analyses.models.category import Category
from tests.factories.category import CategoryFactory
class CategoryTestCase(TestCase):
"""
Tests for the :class:`~django_analyses.models.category.Category` model.
"""
def setUp(self):
"""
Adds the created instances to the tests' contexts.
For more information see unittest's :meth:`~unittest.TestCase.setUp` method.
"""
self.category = CategoryFactory()
##########
# Meta #
##########
def test_verbose_name_plural(self):
"""
Validate the `verbose name plural`_ of the
:class:`~django_analyses.models.category.Category` model.
.. _verbose name plural: https://docs.djangoproject.com/en/2.2/ref/models/options/#verbose-name-plural
"""
self.assertEqual(Category._meta.verbose_name_plural, "Categories")
def test_ordering(self):
"""
Validate the `ordering`_ of the
:class:`~django_analyses.models.category.Category` model.
.. _ordering: https://docs.djangoproject.com/en/2.2/ref/models/options/#ordering
"""
self.assertTupleEqual(Category._meta.ordering, ("title",))
##########
# Fields #
##########
# title
def test_title_max_length(self):
"""
Validate the max_length of the *title* field.
"""
field = self.category._meta.get_field("title")
self.assertEqual(field.max_length, 255)
def test_title_is_unique(self):
"""
Validates that the *title* field is unique.
"""
field = self.category._meta.get_field("title")
self.assertTrue(field.unique)
def test_title_blank_and_null(self):
"""
Validates that the *title* field may not be blank or null.
"""
field = self.category._meta.get_field("title")
self.assertFalse(field.blank)
self.assertFalse(field.null)
# description
def test_description_is_not_unique(self):
"""
Validates that the *description* field is not set to unique.
"""
field = self.category._meta.get_field("description")
self.assertFalse(field.unique)
def test_description_blank_and_null(self):
"""
Validates that the *description* field may be blank or null.
"""
field = self.category._meta.get_field("description")
self.assertTrue(field.blank)
self.assertTrue(field.null)
# parent
def test_parent_is_nullable(self):
"""
Validates that the *parent* field is nullable.
"""
field = self.category._meta.get_field("parent")
self.assertTrue(field.null)
def test_creation_with_parent_category(self):
"""
Tests creating a category with an existing category as the parent.
"""
new_category = CategoryFactory(parent=self.category)
self.assertEqual(new_category.parent, self.category)
def test_settings_a_parent_category(self):
"""
Tests setting a parent category.
"""
parent = CategoryFactory()
self.category.parent = parent
self.category.save()
self.assertEqual(self.category.parent, parent)
###########
# Methods #
###########
def test_string(self):
"""
Validate the string output of the
:class:`~django_analyses.models.category.category` model.
"""
self.assertEqual(str(self.category), self.category.title)
| [
"[email protected]"
] | |
6845f29a5c09f0a2ad3e965b6e8a97e5f2963dbc | c2fd9c421b225862633f74f99a7a0dad635c5c67 | /tree/RangeSumofBST.py | 0b06571e618c824e7fd428daebeaebde12112bc8 | [] | no_license | yuhangxiaocs/LeetCodePy | 3751881dbd78b581a1d75beea737aed28765988b | 31012a004ba14ddfb468a91925d86bc2dfb60dd4 | refs/heads/master | 2020-12-20T19:36:55.421295 | 2020-11-24T17:01:15 | 2020-11-24T17:01:15 | 236,190,313 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,401 | py | # Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def construct(self, t):
return 0
class Solution(object):
# 利用二叉搜索树的性质 适当剪枝
def rangeSumBST(self, root, L, R):
"""
:type root: TreeNode
:type L: int
:type R: int
:rtype: int
"""
if root == None:
return 0
if root.val < L:
return self.rangeSumBST(root.right, L, R)
elif root.val > R:
return self.rangeSumBST(root.left, L, R)
else:
return root.val + self.rangeSumBST(root.right, L, R) + self.rangeSumBST(root.left, L, R)
# 用stack来模拟递归 节约递归调用代价
# python中用list的append和pop操作轻松实现stack
def rangeSumBST2(self, root, L, R):
stack = []
stack.append(root)
rangeSum = 0
while (len(stack) > 0):
node = stack.pop()
if node == None:
continue
if node.val < L:
stack.append(node.right)
elif node.val > R:
stack.append(node.left)
else:
rangeSum += node.val
stack.append(node.left)
stack.append(node.right)
return rangeSum
| [
"[email protected]"
] | |
372952efec21a12b8261f6363b873755ecc62eed | 3ba0de5f13f6eae9434cd09964a9d69a6dbda636 | /mako/lib/MemoryConfiguration.py | 25db0ef519aed8814320b45d63efa19b9cfe7b46 | [] | no_license | fantastic001/Mako | 513f43f4170896a807c4e297573e19125dc2066c | eb51f163b127f9c273ff9179d6ed55092fed369f | refs/heads/master | 2022-01-18T20:10:33.141618 | 2022-01-02T12:30:03 | 2022-01-02T12:30:03 | 85,867,290 | 8 | 3 | null | null | null | null | UTF-8 | Python | false | false | 250 | py |
from . import Configuration
class MemoryConfiguration(Configuration):
def __init__(self, data={}):
self.data = data
def open(self) -> dict:
return self.data
def save(self, params: dict):
self.data = params
| [
"[email protected]"
] | |
51e65b7efa8aca4a4d89a8d1aaa1076f921df158 | 7455dcf23ca3c8d74abcb4ef223bf0506ccb1eb9 | /PMD/map-pipeline/src/main/python/run.py | e6986ed77cda45243f56ac59bf06dfbec808a5cb | [] | no_license | ResearchSoftwareInstitute/Duke-TIC | 2e2ca9cadd52d672b5614aa6d661afb0ab0bf25d | f481103adc68b883cf70c101901f296b031954aa | refs/heads/master | 2020-04-05T02:13:10.849193 | 2019-01-15T16:32:05 | 2019-01-15T16:32:05 | 156,468,435 | 0 | 1 | null | 2018-11-21T17:10:10 | 2018-11-07T00:41:49 | Scala | UTF-8 | Python | false | false | 149 | py | from utils import submit
import sys
host = sys.argv[1]
cache_dir = sys.argv[2]
args = sys.argv[3:]
submit(host, cache_dir, "tic.Transform", *args)
| [
"[email protected]"
] | |
09d87b4f24a30478585165a9e590a4f858680692 | 2293c76c3d18e2fcd44ded90bd40113d26285663 | /pyeccodes/defs/grib2/local/1098/centres_table.py | a1437b6b11a2846a97eca02ab304aaac8681e911 | [
"Apache-2.0"
] | permissive | ecmwf/pyeccodes | b1f121dbddf68d176a03805ed5144ba0b37ac211 | dce2c72d3adcc0cb801731366be53327ce13a00b | refs/heads/master | 2022-04-23T10:37:40.524078 | 2020-04-18T06:30:29 | 2020-04-18T06:30:29 | 255,554,540 | 9 | 3 | null | null | null | null | UTF-8 | Python | false | false | 867 | py | def load(h):
return ({'abbr': 'eggr', 'code': 0, 'title': 'UK Met Office - UK'},
{'abbr': 'aemet', 'code': 1, 'title': 'AEMET- Spain HIRLAM'},
{'abbr': 'arpasim', 'code': 2, 'title': 'ARPA-SIM - Italy COSMO'},
{'abbr': 'metno', 'code': 3, 'title': 'Met.NO'},
{'abbr': 'zamg', 'code': 4, 'title': 'ZAMG / Austria'},
{'abbr': 'dwd', 'code': 5, 'title': 'DWD - Germany SRNWP'},
{'abbr': 'dnmi', 'code': 6, 'title': 'DNMI/Univ Oslo - Norway HIRLAM ALADIN'},
{'abbr': 'meteofrance', 'code': 7, 'title': 'Meteo-France / France'},
{'abbr': 'dmi', 'code': 8, 'title': 'DMI'},
{'abbr': 'hungary', 'code': 9, 'title': 'Hungary'},
{'abbr': 'czech', 'code': 10, 'title': 'Czech Republic'},
{'abbr': 'croatia', 'code': 11, 'title': 'Croatia'})
| [
"[email protected]"
] | |
829f985edf125ed9a87152f34ea4882a305f6192 | 55540f3e86f1d5d86ef6b5d295a63518e274efe3 | /toolchain/riscv/MSYS/python/Lib/test/test_selectors.py | 58afb0eb0988d9e4c2965c78485e6d0d6c85a779 | [
"Apache-2.0",
"bzip2-1.0.6",
"LicenseRef-scancode-proprietary-license",
"OpenSSL",
"Python-2.0",
"LicenseRef-scancode-newlib-historical",
"TCL",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | bouffalolab/bl_iot_sdk | bc5eaf036b70f8c65dd389439062b169f8d09daa | b90664de0bd4c1897a9f1f5d9e360a9631d38b34 | refs/heads/master | 2023-08-31T03:38:03.369853 | 2023-08-16T08:50:33 | 2023-08-18T09:13:27 | 307,347,250 | 244 | 101 | Apache-2.0 | 2023-08-28T06:29:02 | 2020-10-26T11:16:30 | C | UTF-8 | Python | false | false | 18,779 | py | import errno
import os
import random
import selectors
import signal
import socket
import sys
from test import support
from time import sleep
import unittest
import unittest.mock
import tempfile
from time import monotonic as time
try:
import resource
except ImportError:
resource = None
if hasattr(socket, 'socketpair'):
socketpair = socket.socketpair
else:
def socketpair(family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0):
with socket.socket(family, type, proto) as l:
l.bind((support.HOST, 0))
l.listen()
c = socket.socket(family, type, proto)
try:
c.connect(l.getsockname())
caddr = c.getsockname()
while True:
a, addr = l.accept()
# check that we've got the correct client
if addr == caddr:
return c, a
a.close()
except OSError:
c.close()
raise
def find_ready_matching(ready, flag):
match = []
for key, events in ready:
if events & flag:
match.append(key.fileobj)
return match
class BaseSelectorTestCase(unittest.TestCase):
def make_socketpair(self):
rd, wr = socketpair()
self.addCleanup(rd.close)
self.addCleanup(wr.close)
return rd, wr
def test_register(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
key = s.register(rd, selectors.EVENT_READ, "data")
self.assertIsInstance(key, selectors.SelectorKey)
self.assertEqual(key.fileobj, rd)
self.assertEqual(key.fd, rd.fileno())
self.assertEqual(key.events, selectors.EVENT_READ)
self.assertEqual(key.data, "data")
# register an unknown event
self.assertRaises(ValueError, s.register, 0, 999999)
# register an invalid FD
self.assertRaises(ValueError, s.register, -10, selectors.EVENT_READ)
# register twice
self.assertRaises(KeyError, s.register, rd, selectors.EVENT_READ)
# register the same FD, but with a different object
self.assertRaises(KeyError, s.register, rd.fileno(),
selectors.EVENT_READ)
def test_unregister(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, selectors.EVENT_READ)
s.unregister(rd)
# unregister an unknown file obj
self.assertRaises(KeyError, s.unregister, 999999)
# unregister twice
self.assertRaises(KeyError, s.unregister, rd)
def test_unregister_after_fd_close(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
r, w = rd.fileno(), wr.fileno()
s.register(r, selectors.EVENT_READ)
s.register(w, selectors.EVENT_WRITE)
rd.close()
wr.close()
s.unregister(r)
s.unregister(w)
@unittest.skipUnless(os.name == 'posix', "requires posix")
def test_unregister_after_fd_close_and_reuse(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
r, w = rd.fileno(), wr.fileno()
s.register(r, selectors.EVENT_READ)
s.register(w, selectors.EVENT_WRITE)
rd2, wr2 = self.make_socketpair()
rd.close()
wr.close()
os.dup2(rd2.fileno(), r)
os.dup2(wr2.fileno(), w)
self.addCleanup(os.close, r)
self.addCleanup(os.close, w)
s.unregister(r)
s.unregister(w)
def test_unregister_after_socket_close(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, selectors.EVENT_READ)
s.register(wr, selectors.EVENT_WRITE)
rd.close()
wr.close()
s.unregister(rd)
s.unregister(wr)
def test_modify(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
key = s.register(rd, selectors.EVENT_READ)
# modify events
key2 = s.modify(rd, selectors.EVENT_WRITE)
self.assertNotEqual(key.events, key2.events)
self.assertEqual(key2, s.get_key(rd))
s.unregister(rd)
# modify data
d1 = object()
d2 = object()
key = s.register(rd, selectors.EVENT_READ, d1)
key2 = s.modify(rd, selectors.EVENT_READ, d2)
self.assertEqual(key.events, key2.events)
self.assertNotEqual(key.data, key2.data)
self.assertEqual(key2, s.get_key(rd))
self.assertEqual(key2.data, d2)
# modify unknown file obj
self.assertRaises(KeyError, s.modify, 999999, selectors.EVENT_READ)
# modify use a shortcut
d3 = object()
s.register = unittest.mock.Mock()
s.unregister = unittest.mock.Mock()
s.modify(rd, selectors.EVENT_READ, d3)
self.assertFalse(s.register.called)
self.assertFalse(s.unregister.called)
def test_modify_unregister(self):
# Make sure the fd is unregister()ed in case of error on
# modify(): http://bugs.python.org/issue30014
if self.SELECTOR.__name__ == 'EpollSelector':
patch = unittest.mock.patch(
'selectors.EpollSelector._selector_cls')
elif self.SELECTOR.__name__ == 'PollSelector':
patch = unittest.mock.patch(
'selectors.PollSelector._selector_cls')
elif self.SELECTOR.__name__ == 'DevpollSelector':
patch = unittest.mock.patch(
'selectors.DevpollSelector._selector_cls')
else:
raise self.skipTest("")
with patch as m:
m.return_value.modify = unittest.mock.Mock(
side_effect=ZeroDivisionError)
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, selectors.EVENT_READ)
self.assertEqual(len(s._map), 1)
with self.assertRaises(ZeroDivisionError):
s.modify(rd, selectors.EVENT_WRITE)
self.assertEqual(len(s._map), 0)
def test_close(self):
s = self.SELECTOR()
self.addCleanup(s.close)
mapping = s.get_map()
rd, wr = self.make_socketpair()
s.register(rd, selectors.EVENT_READ)
s.register(wr, selectors.EVENT_WRITE)
s.close()
self.assertRaises(RuntimeError, s.get_key, rd)
self.assertRaises(RuntimeError, s.get_key, wr)
self.assertRaises(KeyError, mapping.__getitem__, rd)
self.assertRaises(KeyError, mapping.__getitem__, wr)
def test_get_key(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
key = s.register(rd, selectors.EVENT_READ, "data")
self.assertEqual(key, s.get_key(rd))
# unknown file obj
self.assertRaises(KeyError, s.get_key, 999999)
def test_get_map(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
keys = s.get_map()
self.assertFalse(keys)
self.assertEqual(len(keys), 0)
self.assertEqual(list(keys), [])
key = s.register(rd, selectors.EVENT_READ, "data")
self.assertIn(rd, keys)
self.assertEqual(key, keys[rd])
self.assertEqual(len(keys), 1)
self.assertEqual(list(keys), [rd.fileno()])
self.assertEqual(list(keys.values()), [key])
# unknown file obj
with self.assertRaises(KeyError):
keys[999999]
# Read-only mapping
with self.assertRaises(TypeError):
del keys[rd]
def test_select(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, selectors.EVENT_READ)
wr_key = s.register(wr, selectors.EVENT_WRITE)
result = s.select()
for key, events in result:
self.assertTrue(isinstance(key, selectors.SelectorKey))
self.assertTrue(events)
self.assertFalse(events & ~(selectors.EVENT_READ |
selectors.EVENT_WRITE))
self.assertEqual([(wr_key, selectors.EVENT_WRITE)], result)
def test_context_manager(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
with s as sel:
sel.register(rd, selectors.EVENT_READ)
sel.register(wr, selectors.EVENT_WRITE)
self.assertRaises(RuntimeError, s.get_key, rd)
self.assertRaises(RuntimeError, s.get_key, wr)
def test_fileno(self):
s = self.SELECTOR()
self.addCleanup(s.close)
if hasattr(s, 'fileno'):
fd = s.fileno()
self.assertTrue(isinstance(fd, int))
self.assertGreaterEqual(fd, 0)
def test_selector(self):
s = self.SELECTOR()
self.addCleanup(s.close)
NUM_SOCKETS = 12
MSG = b" This is a test."
MSG_LEN = len(MSG)
readers = []
writers = []
r2w = {}
w2r = {}
for i in range(NUM_SOCKETS):
rd, wr = self.make_socketpair()
s.register(rd, selectors.EVENT_READ)
s.register(wr, selectors.EVENT_WRITE)
readers.append(rd)
writers.append(wr)
r2w[rd] = wr
w2r[wr] = rd
bufs = []
while writers:
ready = s.select()
ready_writers = find_ready_matching(ready, selectors.EVENT_WRITE)
if not ready_writers:
self.fail("no sockets ready for writing")
wr = random.choice(ready_writers)
wr.send(MSG)
for i in range(10):
ready = s.select()
ready_readers = find_ready_matching(ready,
selectors.EVENT_READ)
if ready_readers:
break
# there might be a delay between the write to the write end and
# the read end is reported ready
sleep(0.1)
else:
self.fail("no sockets ready for reading")
self.assertEqual([w2r[wr]], ready_readers)
rd = ready_readers[0]
buf = rd.recv(MSG_LEN)
self.assertEqual(len(buf), MSG_LEN)
bufs.append(buf)
s.unregister(r2w[rd])
s.unregister(rd)
writers.remove(r2w[rd])
self.assertEqual(bufs, [MSG] * NUM_SOCKETS)
@unittest.skipIf(sys.platform == 'win32',
'select.select() cannot be used with empty fd sets')
def test_empty_select(self):
# Issue #23009: Make sure EpollSelector.select() works when no FD is
# registered.
s = self.SELECTOR()
self.addCleanup(s.close)
self.assertEqual(s.select(timeout=0), [])
def test_timeout(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(wr, selectors.EVENT_WRITE)
t = time()
self.assertEqual(1, len(s.select(0)))
self.assertEqual(1, len(s.select(-1)))
self.assertLess(time() - t, 0.5)
s.unregister(wr)
s.register(rd, selectors.EVENT_READ)
t = time()
self.assertFalse(s.select(0))
self.assertFalse(s.select(-1))
self.assertLess(time() - t, 0.5)
t0 = time()
self.assertFalse(s.select(1))
t1 = time()
dt = t1 - t0
# Tolerate 2.0 seconds for very slow buildbots
self.assertTrue(0.8 <= dt <= 2.0, dt)
@unittest.skipUnless(hasattr(signal, "alarm"),
"signal.alarm() required for this test")
def test_select_interrupt_exc(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
class InterruptSelect(Exception):
pass
def handler(*args):
raise InterruptSelect
orig_alrm_handler = signal.signal(signal.SIGALRM, handler)
self.addCleanup(signal.signal, signal.SIGALRM, orig_alrm_handler)
try:
signal.alarm(1)
s.register(rd, selectors.EVENT_READ)
t = time()
# select() is interrupted by a signal which raises an exception
with self.assertRaises(InterruptSelect):
s.select(30)
# select() was interrupted before the timeout of 30 seconds
self.assertLess(time() - t, 5.0)
finally:
signal.alarm(0)
@unittest.skipUnless(hasattr(signal, "alarm"),
"signal.alarm() required for this test")
def test_select_interrupt_noraise(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
orig_alrm_handler = signal.signal(signal.SIGALRM, lambda *args: None)
self.addCleanup(signal.signal, signal.SIGALRM, orig_alrm_handler)
try:
signal.alarm(1)
s.register(rd, selectors.EVENT_READ)
t = time()
# select() is interrupted by a signal, but the signal handler doesn't
# raise an exception, so select() should by retries with a recomputed
# timeout
self.assertFalse(s.select(1.5))
self.assertGreaterEqual(time() - t, 1.0)
finally:
signal.alarm(0)
class ScalableSelectorMixIn:
# see issue #18963 for why it's skipped on older OS X versions
@support.requires_mac_ver(10, 5)
@unittest.skipUnless(resource, "Test needs resource module")
def test_above_fd_setsize(self):
# A scalable implementation should have no problem with more than
# FD_SETSIZE file descriptors. Since we don't know the value, we just
# try to set the soft RLIMIT_NOFILE to the hard RLIMIT_NOFILE ceiling.
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (hard, hard))
self.addCleanup(resource.setrlimit, resource.RLIMIT_NOFILE,
(soft, hard))
NUM_FDS = min(hard, 2**16)
except (OSError, ValueError):
NUM_FDS = soft
# guard for already allocated FDs (stdin, stdout...)
NUM_FDS -= 32
s = self.SELECTOR()
self.addCleanup(s.close)
for i in range(NUM_FDS // 2):
try:
rd, wr = self.make_socketpair()
except OSError:
# too many FDs, skip - note that we should only catch EMFILE
# here, but apparently *BSD and Solaris can fail upon connect()
# or bind() with EADDRNOTAVAIL, so let's be safe
self.skipTest("FD limit reached")
try:
s.register(rd, selectors.EVENT_READ)
s.register(wr, selectors.EVENT_WRITE)
except OSError as e:
if e.errno == errno.ENOSPC:
# this can be raised by epoll if we go over
# fs.epoll.max_user_watches sysctl
self.skipTest("FD limit reached")
raise
try:
fds = s.select()
except OSError as e:
if e.errno == errno.EINVAL and sys.platform == 'darwin':
# unexplainable errors on macOS don't need to fail the test
self.skipTest("Invalid argument error calling poll()")
raise
self.assertEqual(NUM_FDS // 2, len(fds))
class DefaultSelectorTestCase(BaseSelectorTestCase):
SELECTOR = selectors.DefaultSelector
class SelectSelectorTestCase(BaseSelectorTestCase):
SELECTOR = selectors.SelectSelector
@unittest.skipUnless(hasattr(selectors, 'PollSelector'),
"Test needs selectors.PollSelector")
class PollSelectorTestCase(BaseSelectorTestCase, ScalableSelectorMixIn):
SELECTOR = getattr(selectors, 'PollSelector', None)
@unittest.skipUnless(hasattr(selectors, 'EpollSelector'),
"Test needs selectors.EpollSelector")
class EpollSelectorTestCase(BaseSelectorTestCase, ScalableSelectorMixIn):
SELECTOR = getattr(selectors, 'EpollSelector', None)
def test_register_file(self):
# epoll(7) returns EPERM when given a file to watch
s = self.SELECTOR()
with tempfile.NamedTemporaryFile() as f:
with self.assertRaises(IOError):
s.register(f, selectors.EVENT_READ)
# the SelectorKey has been removed
with self.assertRaises(KeyError):
s.get_key(f)
@unittest.skipUnless(hasattr(selectors, 'KqueueSelector'),
"Test needs selectors.KqueueSelector)")
class KqueueSelectorTestCase(BaseSelectorTestCase, ScalableSelectorMixIn):
SELECTOR = getattr(selectors, 'KqueueSelector', None)
def test_register_bad_fd(self):
# a file descriptor that's been closed should raise an OSError
# with EBADF
s = self.SELECTOR()
bad_f = support.make_bad_fd()
with self.assertRaises(OSError) as cm:
s.register(bad_f, selectors.EVENT_READ)
self.assertEqual(cm.exception.errno, errno.EBADF)
# the SelectorKey has been removed
with self.assertRaises(KeyError):
s.get_key(bad_f)
@unittest.skipUnless(hasattr(selectors, 'DevpollSelector'),
"Test needs selectors.DevpollSelector")
class DevpollSelectorTestCase(BaseSelectorTestCase, ScalableSelectorMixIn):
SELECTOR = getattr(selectors, 'DevpollSelector', None)
def test_main():
tests = [DefaultSelectorTestCase, SelectSelectorTestCase,
PollSelectorTestCase, EpollSelectorTestCase,
KqueueSelectorTestCase, DevpollSelectorTestCase]
support.run_unittest(*tests)
support.reap_children()
if __name__ == "__main__":
test_main()
| [
"[email protected]"
] | |
817aa994789d584285af1b87544401eee6f12db6 | f6f5db03e5f0fc43bf466730650fc2923d438189 | /feedjack_wp_export/migrations/0005_auto__chg_field_taxonomyterm_term_name__chg_field_export_url.py | e20a4935fbbe851552188c40ef257e8fc19951be | [
"WTFPL"
] | permissive | mk-fg/feedjack-wordpress-export | bd7e97adf5793067e909d7eaf14804eafaee5beb | 72f034872d65cb0d10ff097a13627f7b86b13843 | refs/heads/master | 2023-08-23T03:55:01.381404 | 2012-08-29T11:04:32 | 2012-08-29T11:04:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,070 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'TaxonomyTerm.term_name'
db.alter_column('feedjack_wp_export_taxonomyterm', 'term_name', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'Export.url'
db.alter_column('feedjack_wp_export_export', 'url', self.gf('django.db.models.fields.CharField')(max_length=255))
def backwards(self, orm):
# Changing field 'TaxonomyTerm.term_name'
db.alter_column('feedjack_wp_export_taxonomyterm', 'term_name', self.gf('django.db.models.fields.CharField')(max_length=254))
# Changing field 'Export.url'
db.alter_column('feedjack_wp_export_export', 'url', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
'feedjack.feed': {
'Meta': {'ordering': "('name', 'feed_url')", 'object_name': 'Feed'},
'etag': ('django.db.models.fields.CharField', [], {'max_length': '127', 'blank': 'True'}),
'feed_url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'filters': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'feeds'", 'blank': 'True', 'to': "orm['feedjack.Filter']"}),
'filters_logic': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'last_checked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shortname': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'skip_errors': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tagline': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
'feedjack.filter': {
'Meta': {'object_name': 'Filter'},
'base': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filters'", 'to': "orm['feedjack.FilterBase']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'})
},
'feedjack.filterbase': {
'Meta': {'object_name': 'FilterBase'},
'crossref': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'crossref_rebuild': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'crossref_span': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'crossref_timeline': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'handler_name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'})
},
'feedjack_wp_export.export': {
'Meta': {'ordering': "('url', 'blog_id', 'username')", 'unique_together': "(('url', 'blog_id'),)", 'object_name': 'Export'},
'blog_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '63'})
},
'feedjack_wp_export.exportsubscriber': {
'Meta': {'ordering': "('export', '-is_active', 'feed')", 'object_name': 'ExportSubscriber'},
'export': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriber_set'", 'to': "orm['feedjack_wp_export.Export']"}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exports'", 'to': "orm['feedjack.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'processors': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'taxonomies': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['feedjack_wp_export.TaxonomyTerm']", 'null': 'True', 'blank': 'True'})
},
'feedjack_wp_export.taxonomyterm': {
'Meta': {'ordering': "('taxonomy', 'term_name', 'term_id')", 'unique_together': "(('taxonomy', 'term_name'), ('taxonomy', 'term_id'))", 'object_name': 'TaxonomyTerm'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'taxonomy': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
'term_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'term_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['feedjack_wp_export']
| [
"[email protected]"
] | |
472335cc898b7324390d655b4501a7343208b8cd | e6432bc17447989e950fbe5d68fcb3ac06599d4d | /library/management/commands/markamama.py | 6326608d9fb3a94d5c0609ffe1f7ec45e2e5c55c | [] | no_license | egitimplus/petproject | eb1a5dd72f9113b55bdd346868e5ba56bcce920e | a3e860499aae626756131f1f0c4a7eb0aabf7d93 | refs/heads/master | 2023-01-25T05:21:19.393652 | 2020-12-05T23:58:21 | 2020-12-05T23:58:21 | 276,755,718 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,305 | py | from django.core.management.base import BaseCommand
from bs4 import BeautifulSoup
import requests
from library.models import ProductLink
from django.utils import timezone
from food.models import FoodSite, FoodComment
import json
from django.db.models import Max
from datetime import datetime
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.food_type = None
self.food = None
self.dry_brands = [
'124', #acana
'168', #bozita
'42', #brit-care
'162', #dr-sachi
'121', #felicia
'51', #golosi
'66', #hills
'114', #lavital
'77', # luis
'89', #matisse
'112', # nd
'7', #obivan
'38', #orijen
'83', #pro-plan
'108', #pro-choice
'371', #pro-performance
'50', #proline
'65', #purina-one
'61', #reflex
'79', #royal-canin
'40', #sanabelle
]
self.wet_brands = [
'44', #animonda
'122',#best-pet
'168', # bozita
'42', # brit-care
'86', #chefs-choice
'162', #dr-sachi
'56', #felix
'247', #gim-cat
'51', # golosi
'87', #gourmet-gold
'66', #hills
'89', #matisse
'381', #me-o
'52', #miglior-gatto
'112', #nd
'7', #obivan
'50', #proline
'83', #pro-plan
'61', #reflex
'79', #royal-canin
'85', #schesir
'91', #vitacraft
]
self.brands = []
# --type page
def _page(self):
for brand in self.brands:
source = self._page_content(brand)
self._page_products(source, brand)
self._page_children(brand)
def _page_content(self, brand, page=1):
url = 'https://www.markamama.com.tr/srv/service/product/loader?' + self.food_type + '&link=' + self.food_type + '&brand=' + str(brand) + '&pg=' + str(page)
r = requests.get(url)
return BeautifulSoup(r.content, "lxml")
def _page_products(self, source, brand):
products = source.findAll("div", {"class": "col col-3 col-md-4 col-sm-6 col-xs-6 btm productItem ease"})
if products:
for product in products:
br = product.find("a", {"class": "col col-12 productBrand"})
url = product.a.get('href')
title = product.img.get('alt')
link, created = ProductLink.objects.get_or_create(
url='https://www.markamama.com.tr' + url,
defaults={
'brand': brand,
'name': title,
'food_type': self.food,
'petshop_id': 4
}
)
else:
ProductLink.objects.filter(brand=brand, food_type=self.food).update(down=1)
def _page_children(self, brand):
for i in range(2, 100):
source = self._page_content(brand, i)
products = source.findAll("div", {"class": "col col-3 col-md-4 col-sm-6 col-xs-6 btm productItem ease"})
if products:
self._page_products(source, brand)
else:
break
# --type product
def _product(self):
#last_update = timezone.now().date() - timedelta(0)
#links = ProductLink.objects.filter(updated__lte=last_update, petshop_id=4, down=0, active=1, food__isnull=False).all()
links = ProductLink.objects.filter(petshop_id=4, down=0, active=1, food__isnull=False).all()
for link in links:
if link.food_id is not None:
try:
source = self._product_content(link.url)
shippings = source.findAll("div", {"class": "box col-10 col-ml-1 krg"})
free_cargo = False
for shipping in shippings:
divs = shipping.findAll("div", {"class": "box col-8"})
for div in divs:
if div.text.strip() == 'Ücretsiz Kargo':
free_cargo = True
new_price = source.find("span", {"class": "product-price"})
if new_price:
new_price = new_price.text.strip().replace('.', '').replace(',', '.')
else:
new_price = 0
old_price = source.find("span", {"class": "product-price-not-discounted"})
if old_price:
old_price = old_price.text.strip().replace('.', '').replace(',', '.')
else:
old_price = new_price
in_stock = source.find("div", {"class": "fl col-12 add-to-cart-win inStock"})
if in_stock:
in_stock = True
else:
in_stock = False
skt = source.find("div", {"class": "sonkullanma"})
if skt:
try:
skt = skt.strong.text
skt = skt.replace(',', '.').replace('/', '.').replace('-', '.')
check_date = skt.split('.')
if len(check_date) == 2:
if len(check_date[1]) == 2:
skt = datetime.strptime(skt, '%m.%y')
skt = timezone.make_aware(skt, timezone.get_current_timezone())
else:
skt = datetime.strptime(skt, '%m.%Y')
skt = timezone.make_aware(skt, timezone.get_current_timezone())
else:
skt = datetime.strptime(skt, '%d.%m.%Y')
skt = timezone.make_aware(skt, timezone.get_current_timezone())
except:
skt = None
foodsite = FoodSite.objects.filter(url=link.url).first()
if foodsite is None:
new_site = FoodSite(
name=link.name,
food=link.food,
petshop=link.petshop,
url=link.url,
old_price=old_price,
price=new_price,
stock=in_stock,
cargo=free_cargo,
best_before=skt,
updated=timezone.now(),
)
new_site.save()
else:
foodsite.old_price = old_price
foodsite.price = new_price
foodsite.stock = in_stock
foodsite.cargo = free_cargo
foodsite.best_before = skt
foodsite.save()
self._product_comments(source, link.food)
ProductLink.objects.filter(id=link.id).update(down=0, updated=timezone.now())
except Exception as e:
print(e)
ProductLink.objects.filter(id=link.id).update(down=1, updated=timezone.now())
def _product_comments(self, source, food):
comments_li = source.find(id="commentTab")
comments_li = comments_li['data-href'].split('comment/')
comment_data = self._product_content(
'https://www.markamama.com.tr/srv/service/product-detail/comments/' + comments_li[1])
comment_json = json.loads(comment_data.text)
comments = comment_json.get('COMMENTS')
if comments:
c = FoodComment.objects.filter(food_id=food.id, petshop_id=9).aggregate(max_date=Max('created'))
for comment in comments:
published = datetime.fromtimestamp(int(comment['DATE']))
published = timezone.make_aware(published, timezone.get_current_timezone())
save = 1 # daha sonra yeni yorumlar gelsin diye sıfır olacak
if c['max_date'] is None:
save = 1
elif published > c['max_date']:
save = 1
if save == 1:
fc = FoodComment(
food=food,
name=comment['NAME'],
created=published,
content=comment['COMMENT'],
rating=round(comment['RATE'] / 4),
petshop_id=9,
)
fc.save()
def _product_content(self, url):
r = requests.get(url)
return BeautifulSoup(r.content, "lxml")
# command
def add_arguments(self, parser):
parser.add_argument('-t', '--type', type=str, help='Define a username prefix', )
parser.add_argument('-f', '--food', type=str, help='Define a food prefix', )
def handle(self, *args, **options):
crawl_type = options.get('type', None)
food = options.get('food', None)
if food == 'wet':
self.food_type = 'kedi-konserve-mamalari'
self.brands = self.wet_brands
elif food == 'dry':
self.food_type = 'kedi-mamasi'
self.brands = self.dry_brands
if crawl_type is not None:
if crawl_type == 'product':
self._product()
elif crawl_type == 'page':
if self.food_type is not None:
self.food = food
self._page()
else:
print('Seçim yapmadın --food')
else:
print('Yanlış seçim yaptınız --type')
else:
print('Seçim yapmadın --type')
"""
--food : wet, dry
--type : product, page
"""
| [
"[email protected]"
] | |
2ee8846e5a2086e11df153514d9ed5676a0b0ba3 | d5ad13232e3f1ced55f6956bc4cbda87925c8085 | /RNAseqMSMS/2-sv/2-split-mapped-sv/2-type.py | 194578ce6452976b1ac7d6adbf8c5f41fddece1f | [] | no_license | arvin580/SIBS | c0ba9a8a41f59cb333517c286f7d80300b9501a2 | 0cc2378bf62359ec068336ea4de16d081d0f58a4 | refs/heads/master | 2021-01-23T21:57:35.658443 | 2015-04-09T23:11:34 | 2015-04-09T23:11:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,415 | py | import sys
import os
files = os.listdir('.')
ouFile1 = open('split-mapped-translocation','w')
ouFile2 = open('split-mapped-inversion','w')
ouFile3 = open('split-mapped-duplication','w')
ouFile4 = open('split-mapped-deletion','w')
for f in files:
if f[-12:] =='not-splicing':
inFile = open(f)
while True:
line1 = inFile.readline()
line2 = inFile.readline()
if line1:
fields = line1.split()
ch1 = fields[3]
ch2 = fields[15]
pos1 = float(fields[10])
pos2 = float(fields[11])
pos3 = float(fields[22])
pos4 = float(fields[23])
qpos1 = float(fields[8])
qpos2 = float(fields[9])
qpos3 = float(fields[20])
qpos4 = float(fields[21])
mid1 = (pos1+pos2)/2
mid2 = (pos3+pos4)/2
qmid1 = (qpos1+qpos2)/2
qmid2 = (qpos3+qpos4)/2
if ch1 != ch2:
ouFile1.write(line1)
ouFile1.write(line2)
elif (pos1 - pos2)*(pos3-pos4) < 0:
ouFile2.write(line1)
ouFile2.write(line2)
else:
if (pos1 - pos2) < 0 and (pos3 - pos4) <0 :
if (mid1 - mid2)*(qmid1 - qmid2) < 0:
ouFile3.write(line1)
ouFile3.write(line2)
else:
ouFile4.write(line1)
ouFile4.write(line2)
elif (pos1 -pos2) >0 and (pos3 - pos4) > 0:
if (mid1 - mid2)*(qmid1 - qmid2) > 0:
ouFile3.write(line1)
ouFile3.write(line2)
else:
ouFile4.write(line1)
ouFile4.write(line2)
else:
ouFile4.write(line1)
ouFile4.write(line2)
#elif (mid1 - mid2)*(qmid1 - qmid2) < 0:
# ouFile3.write(line1)
# ouFile3.write(line2)
# print(str(mid1)+'\t'+str(mid2)+'\t'+str(qmid1)+'\t'+str(qmid2))
else:
break
inFile.close()
| [
"[email protected]"
] | |
1fcd177241175f152741cc56ddfb300b6eea02db | 179d8aae260d20443e6e87613cff55d42587bc16 | /examples/oneflow2onnx/models/test_resnet50.py | a1c5ff6baefa1dda15f6499ddd4777b30db9293f | [] | no_license | 666DZY666/oneflow_convert_tools | 3b1f9d6ebaf154d7218236c332c6f9613b89a860 | bb38c52954facbfe977e09c7e4706b7563a7b50c | refs/heads/main | 2023-06-04T10:16:08.786531 | 2021-06-24T08:38:24 | 2021-06-24T08:38:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,920 | py | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import oneflow as flow
import oneflow.typing as tp
import onnx
import onnxruntime as ort
import numpy as np
from oneflow_onnx.oneflow2onnx.util import convert_to_onnx_and_check
BLOCK_COUNTS = [3, 4, 6, 3]
BLOCK_FILTERS = [256, 512, 1024, 2048]
BLOCK_FILTERS_INNER = [64, 128, 256, 512]
g_trainable = False
def _conv2d(
name,
input,
filters,
kernel_size,
strides=1,
padding="SAME",
data_format="NCHW",
dilations=1,
trainable=True,
# weight_initializer=flow.variance_scaling_initializer(data_format="NCHW"),
weight_initializer=flow.variance_scaling_initializer(
2, "fan_in", "random_normal", data_format="NCHW"
),
weight_regularizer=flow.regularizers.l2(1.0 / 32768),
):
weight = flow.get_variable(
name + "-weight",
shape=(filters, input.shape[1], kernel_size, kernel_size),
dtype=input.dtype,
initializer=weight_initializer,
regularizer=weight_regularizer,
model_name="weight",
trainable=trainable,
)
return flow.nn.conv2d(
input, weight, strides, padding, data_format, dilations, name=name
)
def _batch_norm(inputs, name=None, trainable=True):
return flow.layers.batch_normalization(
inputs=inputs,
axis=1,
momentum=0.9, # 97,
epsilon=1.001e-5,
center=True,
scale=True,
trainable=trainable,
training=trainable,
name=name,
)
def conv2d_affine(input, name, filters, kernel_size, strides, activation=None):
# input data_format must be NCHW, cannot check now
padding = "SAME" if strides > 1 or kernel_size > 1 else "VALID"
output = _conv2d(
name, input, filters, kernel_size, strides, padding, trainable=g_trainable
)
output = _batch_norm(output, name + "_bn", trainable=g_trainable)
if activation == "Relu":
output = flow.math.relu(output)
return output
def bottleneck_transformation(input, block_name, filters, filters_inner, strides):
a = conv2d_affine(
input, block_name + "_branch2a", filters_inner, 1, 1, activation="Relu",
)
b = conv2d_affine(
a, block_name + "_branch2b", filters_inner, 3, strides, activation="Relu",
)
c = conv2d_affine(b, block_name + "_branch2c", filters, 1, 1)
return c
def residual_block(input, block_name, filters, filters_inner, strides_init):
if strides_init != 1 or block_name == "res2_0":
shortcut = conv2d_affine(
input, block_name + "_branch1", filters, 1, strides_init
)
else:
shortcut = input
bottleneck = bottleneck_transformation(
input, block_name, filters, filters_inner, strides_init
)
return flow.math.relu(bottleneck + shortcut)
def residual_stage(input, stage_name, counts, filters, filters_inner, stride_init=2):
output = input
for i in range(counts):
block_name = "%s_%d" % (stage_name, i)
output = residual_block(
output, block_name, filters, filters_inner, stride_init if i == 0 else 1,
)
return output
def resnet_conv_x_body(input, on_stage_end=lambda x: x):
output = input
for i, (counts, filters, filters_inner) in enumerate(
zip(BLOCK_COUNTS, BLOCK_FILTERS, BLOCK_FILTERS_INNER)
):
stage_name = "res%d" % (i + 2)
output = residual_stage(
output, stage_name, counts, filters, filters_inner, 1 if i == 0 else 2,
)
on_stage_end(output)
return output
def resnet_stem(input):
conv1 = _conv2d("conv1", input, 1, 1, 2)
tmp = _batch_norm(conv1, "conv1_bn", trainable=g_trainable)
conv1_bn = flow.math.relu(tmp)
pool1 = flow.nn.max_pool2d(
conv1_bn, ksize=3, strides=2, padding="VALID", data_format="NCHW", name="pool1",
)
return pool1
def resnet50(images, trainable=True, need_transpose=False):
# note: images.shape = (N C H W) in cc's new dataloader, transpose is not needed anymore
if need_transpose:
images = flow.transpose(images, name="transpose", perm=[0, 3, 1, 2])
with flow.scope.namespace("Resnet"):
stem = resnet_stem(images)
body = resnet_conv_x_body(stem, lambda x: x)
pool5 = flow.nn.avg_pool2d(
body, ksize=7, strides=1, padding="VALID", data_format="NCHW", name="pool5",
)
fc1001 = flow.layers.dense(
flow.reshape(pool5, (pool5.shape[0], -1)),
units=1000,
use_bias=True,
kernel_initializer=flow.variance_scaling_initializer(
2, "fan_in", "random_normal"
),
# kernel_initializer=flow.xavier_uniform_initializer(),
bias_initializer=flow.random_uniform_initializer(),
kernel_regularizer=flow.regularizers.l2(1.0 / 32768),
trainable=trainable,
name="fc1001",
)
return fc1001
def test_resnet50():
@flow.global_function()
def InferenceNet(images: tp.Numpy.Placeholder((1, 3, 224, 224))):
logits = resnet50(images)
predictions = flow.nn.softmax(logits)
return predictions
convert_to_onnx_and_check(InferenceNet, flow_weight_dir=None, onnx_model_path="/tmp")
| [
"[email protected]"
] | |
f7af2abc696098cdcf7342806fe9a1fca0e927f0 | 9a7a7e43902b6bc5a9e96933da8814acf3f318a3 | /Python3接口测试/Demo/requests_basic_demo.py | eae7986e0055a59d4e3ea0bcc34b73ba0340f15e | [] | no_license | liuchangfu/python_script | 9684d512f4bb09f37585e3fc56329be2ea8d6eb5 | 73f0e71364fc2271626e0deff54b4079ad92390c | refs/heads/master | 2020-03-15T16:05:47.624545 | 2018-06-08T10:44:17 | 2018-06-08T10:44:17 | 132,226,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 915 | py | #-*- coding:utf-8 -*-
__author__ = "苦叶子"
# 导入模块
import requests
if __name__ == "__main__":
print("开源优测 - requests基本示例")
# 发送HTTP GET请求,获取github API列表
r = requests.get("https://api.github.com")
# 请求返回码
status_code = r.status_code
# 完整的返回头
headers = r.headers
# 请求返回头 content-type的值
content_type = r.headers["content-type"]
# 返回内容编码类型
code = r.encoding
# 返回内容文本
text = r.text
# 若返回结果为json格式,我们可以获取其json格式内容
json_data = r.json()
# 打印上述所有获取到的值
print("状态码: ", status_code)
print("返回头: ", headers)
print("content-type: ", content_type)
print("编码:", code)
print("文本内容: ", text)
print("json串内容: ", json_data)
| [
"[email protected]"
] | |
b574c638e632c2c9acb969482d20a6e3aff555da | f3b233e5053e28fa95c549017bd75a30456eb50c | /p38a_input/L3FN/3FN-2S_MD_NVT_rerun/set_1ns_equi_1.py | b69dbe2d0723c7e0f6d2cdc6d7d1ae094c03f431 | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 928 | py | import os
dir = '/mnt/scratch/songlin3/run/p38a/L3FN/MD_NVT_rerun/ti_one-step/3FN_2S/'
filesdir = dir + 'files/'
temp_equiin = filesdir + 'temp_equi_1.in'
temp_pbs = filesdir + 'temp_1ns_equi_1.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.system("rm -r %6.5f" %(j))
os.system("mkdir %6.5f" %(j))
os.chdir("%6.5f" %(j))
os.system("rm *")
workdir = dir + "%6.5f" %(j) + '/'
#equiin
eqin = workdir + "%6.5f_equi_1.in" %(j)
os.system("cp %s %s" %(temp_equiin, eqin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, eqin))
#PBS
pbs = workdir + "%6.5f_1ns_equi_1.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#top
os.system("cp ../3FN-2S_merged.prmtop .")
os.system("cp ../0.5_equi_0.rst .")
#submit pbs
os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"[email protected]"
] | |
7fe2b984bb64556c73259340aa07d9b479af10c0 | 781e2692049e87a4256320c76e82a19be257a05d | /assignments/python/wc/src/475.py | 7044ba9d04fd4df419719828541451ec5195f793 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 133 | py | def word_count(phrase):
words = {}
for word in phrase.split():
words[word] = words.get(word, 0) + 1
return words
| [
"[email protected]"
] | |
01cf0d870aefe802fe4b97ed4766e1610c28530b | 75dcb56e318688499bdab789262839e7f58bd4f6 | /_algorithms_challenges/leetcode/LeetcodePythonProject/leetcode_0651_0700/LeetCode668_KthSmallestNumberInMultiplicationTable.py | 3d0c274f6424ca868adade8603128f21123179a1 | [] | no_license | syurskyi/Algorithms_and_Data_Structure | 9a1f358577e51e89c862d0f93f373b7f20ddd261 | 929dde1723fb2f54870c8a9badc80fc23e8400d3 | refs/heads/master | 2023-02-22T17:55:55.453535 | 2022-12-23T03:15:00 | 2022-12-23T03:15:00 | 226,243,987 | 4 | 1 | null | 2023-02-07T21:01:45 | 2019-12-06T04:14:10 | Jupyter Notebook | UTF-8 | Python | false | false | 1,148 | py | '''
Created on Oct 11, 2017
@author: MT
'''
class Solution(object):
def findKthNumber(self, m, n, k):
"""
:type m: int
:type n: int
:type k: int
:rtype: int
"""
low, high = 1, m*n+1
while low < high:
mid = (low+high)//2
c = self.count(mid, m, n)
if c >= k:
high = mid
else:
low = mid+1
return high
def count(self, val, m, n):
count = 0
for i in range(1, m+1):
tmp = min(val//i, n)
count += tmp
return count
def test(self):
testCases = [
[
3,
3,
5,
],
[
2,
3,
6,
],
]
for m, n, k in testCases:
print('m: %s' % m)
print('n: %s' % n)
print('k: %s' % k)
result = self.findKthNumber(m, n, k)
print('result: %s' % result)
print('-='*30+'-')
if __name__ == '__main__':
Solution().test()
| [
"[email protected]"
] | |
9c7b59a1671696fa7b1c125de069f0b0d8bdb923 | feed4c22eae892271e29a401c0527bf440c6ecf1 | /models.py | 92952e4396c37bba20c5fa244517538d72c04de5 | [
"Apache-2.0"
] | permissive | XrosLiang/Object_Detection_Tracking | 41cda98fba4f3ff1dc83d31c9f885590c044ea7c | 81bf17483211ba807133f097fc4d662cd9aab7d4 | refs/heads/master | 2023-01-05T06:46:21.040640 | 2020-11-03T14:17:32 | 2020-11-03T14:17:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74,348 | py | # coding=utf-8
"""model graph.
"""
import cv2
import json
import math
import itertools
import random
import sys
import os
import tensorflow as tf
import numpy as np
from PIL import Image
from utils import Dataset
from utils import get_all_anchors
from utils import draw_boxes
from utils import box_wh_to_x1x2
from utils import get_op_tensor_name
#import tensorflow.contrib.slim as slim
from nn import pretrained_resnet_conv4
from nn import conv2d
from nn import deconv2d
from nn import resnet_conv5
from nn import dense
from nn import pairwise_iou
from nn import get_iou_callable
from nn import resizeImage
from nn import resnet_fpn_backbone
from nn import fpn_model
from nn import decode_bbox_target
from nn import generate_rpn_proposals
from nn import sample_fast_rcnn_targets
from nn import roi_align
from nn import encode_bbox_target
from nn import focal_loss
from nn import wd_cost
from nn import clip_boxes
from nn import person_object_relation
from nn import np_iou
# this is for ugly batch norm
from nn import is_training
from nn import add_wd
#from nn import get_so_labels
from nn import group_norm
from efficientdet_wrapper import EfficientDet
from efficientdet_wrapper import EfficientDet_frozen
# need this otherwise No TRTEngineOp when load a trt graph # no use,
#TensorRT doesn"t support FPN ops yet
#import tensorflow.contrib.tensorrt as trt
# ------------------------------ multi gpu stuff
PS_OPS = [
"Variable", "VariableV2", "AutoReloadVariable", "MutableHashTable",
"MutableHashTableOfTensors", "MutableDenseHashTable"
]
# see https://github.com/tensorflow/tensorflow/issues/9517
def assign_to_device(compute_device, controller_device): # ps: paramter server
"""Returns a function to place variables on the ps_device.
Args:
device: Device for everything but variables
ps_device: Device to put the variables on. Example values are /GPU:0
and /CPU:0.
If ps_device is not set then the variables will be placed on the default
device.
The best device for shared varibles depends on the platform as well as the
model. Start with CPU:0 and then test GPU:0 to see if there is an
improvement.
"""
def _assign(op):
node_def = op if isinstance(op, tf.NodeDef) else op.node_def
if node_def.op in PS_OPS:
return controller_device
else:
return compute_device
return _assign
#----------------------------------
# 05/2019, the code will still use other gpu even if we have set visible list;
# seems a v1.13 bug
# yes it is a v1.13 bug, something to do with XLA:
# https://github.com/horovod/horovod/issues/876
def get_model(config, gpuid=0, task=0, controller="/cpu:0"):
with tf.device(assign_to_device("/gpu:%s"%(gpuid), controller)):
# load from frozen model
if config.is_load_from_pb:
if config.is_efficientdet:
model = EfficientDet_frozen(config, config.load_from, gpuid)
else:
model = Mask_RCNN_FPN_frozen(config.load_from, gpuid,
add_mask=config.add_mask)
else:
with tf.variable_scope(tf.get_variable_scope(), reuse=tf.AUTO_REUSE):
#tf.get_variable_scope().reuse_variables()
if config.is_efficientdet:
model = EfficientDet(config)
else:
model = Mask_RCNN_FPN(config, gpuid=gpuid)
return model
def get_model_feat(config, gpuid=0, task=0, controller="/cpu:0"):
# task is not used
#with tf.device("/gpu:%s"%gpuid):
with tf.device(assign_to_device("/gpu:%s"%(gpuid), controller)):
with tf.variable_scope(tf.get_variable_scope(), reuse=tf.AUTO_REUSE):
#tf.get_variable_scope().reuse_variables()
model = RCNN_FPN_givenbox(config, gpuid=gpuid)
return model
# updated 05/29, pack model
# simple tf frozen graph or TensorRT optimized model
def pack(config):
# the graph var names to be saved
vars_ = [
"final_boxes",
"final_labels",
"final_probs",
"fpn_box_feat"]
if config.add_mask:
vars_ = [
"final_boxes",
"final_labels",
"final_probs",
"final_masks",
"fpn_box_feat"]
model = get_model(config)
tfconfig = tf.ConfigProto(allow_soft_placement=True)
tfconfig.gpu_options.allow_growth = True
with tf.Session(config=tfconfig) as sess:
initialize(load=True, load_best=config.load_best, config=config, sess=sess)
# also save all the model config and note into the model
assert config.note != "", "please add some note for the model"
# remove some param?
config_json = vars(config)
for k in config_json:
if type(config_json[k]) == type(np.array([1])):
config_json[k] = config_json[k].tolist()
if type(config_json[k]) == type(np.array([1])[0]):
config_json[k] = int(config_json[k])
if type(config_json[k]) == type(np.array([1.0])[0]):
config_json[k] = float(config_json[k])
if type(config_json[k]) == type({}.keys()): # python3 dict_keys
config_json[k] = list(config_json[k])
with open(config.pack_modelconfig_path, "w") as f:
json.dump(config_json, f)
print("saving packed model...")
# put into one big file to save
input_graph_def = tf.get_default_graph().as_graph_def()
#print [n.name for n in input_graph_def.node]
# We use a built-in TF helper to export variables to constants
# output node names
output_graph_def = tf.graph_util.convert_variables_to_constants(
sess, # The session is used to retrieve the weights
input_graph_def, # The graph_def is used to retrieve the nodes
vars_,
)
output_graph = config.pack_model_path
# Finally we serialize and dump the output graph to the filesystem
with tf.gfile.GFile(output_graph, "wb") as f:
f.write(output_graph_def.SerializeToString())
print("%d ops in the final graph." % len(output_graph_def.node))
print("model saved in %s, config record is in %s" % (
config.pack_model_path, config.pack_modelconfig_path))
# load the weights at init time
# this class has the same interface as Mask_RCNN_FPN
class Mask_RCNN_FPN_frozen():
def __init__(self, modelpath, gpuid, add_mask=False):
self.graph = tf.get_default_graph()
# save path is one.pb file
with tf.gfile.GFile(modelpath, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
#print [n.name for n in graph_def.node]
# need this to load different stuff for different gpu
self.var_prefix = "model_%s" % gpuid
tf.import_graph_def(
graph_def,
name=self.var_prefix,
return_elements=None
)
# input place holders
self.image = self.graph.get_tensor_by_name("%s/image:0" % self.var_prefix)
self.final_boxes = self.graph.get_tensor_by_name(
"%s/final_boxes:0" % self.var_prefix)
self.final_labels = self.graph.get_tensor_by_name(
"%s/final_labels:0" % self.var_prefix)
self.final_probs = self.graph.get_tensor_by_name(
"%s/final_probs:0" % self.var_prefix)
if add_mask:
self.final_masks = self.graph.get_tensor_by_name(
"%s/final_masks:0" % self.var_prefix)
self.fpn_box_feat = self.graph.get_tensor_by_name(
"%s/fpn_box_feat:0" % self.var_prefix)
def get_feed_dict_forward(self, imgdata):
feed_dict = {}
feed_dict[self.image] = imgdata
return feed_dict
class Mask_RCNN_FPN():
def __init__(self, config, gpuid=0):
self.gpuid = gpuid
# for batch_norm
global is_training
is_training = config.is_train # change this before building model
self.config = config
self.num_class = config.num_class
self.global_step = tf.get_variable(
"global_step", shape=[], dtype="int32",
initializer=tf.constant_initializer(0), trainable=False)
# current model get one image at a time
self.image = tf.placeholder(tf.float32, [None, None, 3], name="image")
if not config.is_pack_model:
self.is_train = tf.placeholder("bool", [], name="is_train")
# for training
self.anchor_labels = []
self.anchor_boxes = []
num_anchors = len(config.anchor_ratios)
for k in range(len(config.anchor_strides)):
self.anchor_labels.append(
tf.placeholder(tf.int32, [None, None, num_anchors],
name="anchor_labels_lvl%s" % (k+2)))
self.anchor_boxes.append(
tf.placeholder(tf.float32, [None, None, num_anchors, 4],
name="anchor_boxes_lvl%s" % (k+2)))
self.gt_boxes = tf.placeholder(tf.float32, [None, 4], name="gt_boxes")
self.gt_labels = tf.placeholder(tf.int64, [None, ], name="gt_labels")
self.so_gt_boxes = []
self.so_gt_labels = []
for i in range(len(config.small_objects)):
self.so_gt_boxes.append(
tf.placeholder(tf.float32, [None, 4], name="so_gt_boxes_c%s" % (i+1)))
self.so_gt_labels.append(
tf.placeholder(tf.int64, [None,], name="so_gt_labels_c%s" % (i+1)))
# H,W,v -> {0,1}
self.gt_mask = tf.placeholder(tf.uint8, [None, None, None], name="gt_masks")
# the following will be added in the build_forward and loss
self.logits = None
self.yp = None
self.loss = None
self.build_preprocess()
self.build_forward()
# get feature map anchor and preprocess image
def build_preprocess(self):
config = self.config
image = self.image
# get feature map anchors first
# slower if put on cpu # 1.5it/s vs 1.2it/s
self.multilevel_anchors = []
with tf.name_scope("fpn_anchors"):#,tf.device("/cpu:0"):
#fm_h,fm_w = tf.shape(image)[0] // config.anchor_stride,tf.shape(image)[1]
#// config.anchor_stride
# all posible anchor box coordinates for a given max_size image,
# so for 1920 x 1920 image, 1920/16 = 120, so (120,120,NA,4) box, NA is
#scale*ratio boxes
self.multilevel_anchors = self.get_all_anchors_fpn()
bgr = True # cv2 load image is bgr
p_image = tf.expand_dims(image, 0) # [1,H,W,C]
with tf.name_scope("image_preprocess"): # tf.device("/cpu:0"):
if p_image.dtype.base_dtype != tf.float32:
p_image = tf.cast(p_image, tf.float32)
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
p_image = p_image * (1.0/255)
if bgr:
mean = mean[::-1]
std = std[::-1]
image_mean = tf.constant(mean, dtype=tf.float32)
image_std = tf.constant(std, dtype=tf.float32)
p_image = (p_image - image_mean) / image_std
p_image = tf.transpose(p_image, [0, 3, 1, 2])
self.p_image = p_image
def get_all_anchors_fpn(self):
config = self.config
anchors = []
assert len(config.anchor_strides) == len(config.anchor_sizes)
for stride, size in zip(config.anchor_strides, config.anchor_sizes):
anchors_np = get_all_anchors(
stride=stride, sizes=[size], ratios=config.anchor_ratios,
max_size=config.max_size)
anchors.append(anchors_np)
return anchors
# make the numpy anchor match to the feature shape
def slice_feature_and_anchors(self, image_shape2d, p23456, anchors):
# anchors is the numpy anchors for different levels
config = self.config
# the anchor labels and boxes are grouped into
gt_anchor_labels = self.anchor_labels
gt_anchor_boxes = self.anchor_boxes
self.sliced_anchor_labels = []
self.sliced_anchor_boxes = []
for i, stride in enumerate(config.anchor_strides):
with tf.name_scope("FPN_slice_lvl%s" % (i)):
if i < 3:
# Images are padded for p5, which are too large for p2-p4.
pi = p23456[i]
target_shape = tf.to_int32(tf.ceil(tf.to_float(image_shape2d) * \
(1.0 / stride)))
p23456[i] = tf.slice(
pi, [0, 0, 0, 0], tf.concat([[-1, -1], target_shape], axis=0))
p23456[i].set_shape([1, pi.shape[1], None, None])
shape2d = tf.shape(p23456[i])[2:] # h,W
slice3d = tf.concat([shape2d, [-1]], axis=0)
slice4d = tf.concat([shape2d, [-1, -1]], axis=0)
anchors[i] = tf.slice(anchors[i], [0, 0, 0, 0], slice4d)
self.sliced_anchor_labels.append(
tf.slice(gt_anchor_labels[i], [0, 0, 0], slice3d))
self.sliced_anchor_boxes.append(tf.slice(
gt_anchor_boxes[i], [0, 0, 0, 0], slice4d))
def generate_fpn_proposals(self, multilevel_anchors, multilevel_label_logits,
multilevel_box_logits, image_shape2d):
config = self.config
num_lvl = len(config.anchor_strides)
assert num_lvl == len(multilevel_anchors)
assert num_lvl == len(multilevel_box_logits)
assert num_lvl == len(multilevel_label_logits)
all_boxes = []
all_scores = []
fpn_nms_topk = config.rpn_train_post_nms_topk \
if config.is_train else config.rpn_test_post_nms_topk
for lvl in range(num_lvl):
with tf.name_scope("Lvl%s"%(lvl+2)):
anchors = multilevel_anchors[lvl]
pred_boxes_decoded = decode_bbox_target(
multilevel_box_logits[lvl], anchors,
decode_clip=config.bbox_decode_clip)
this_fpn_nms_topk = fpn_nms_topk
proposal_boxes, proposal_scores = generate_rpn_proposals(
tf.reshape(pred_boxes_decoded, [-1, 4]),
tf.reshape(multilevel_label_logits[lvl], [-1]), image_shape2d,
config, pre_nms_topk=this_fpn_nms_topk)
all_boxes.append(proposal_boxes)
all_scores.append(proposal_scores)
proposal_boxes = tf.concat(all_boxes, axis=0) # nx4
proposal_scores = tf.concat(all_scores, axis=0) # n
proposal_topk = tf.minimum(tf.size(proposal_scores), fpn_nms_topk)
proposal_scores, topk_indices = tf.nn.top_k(proposal_scores,
k=proposal_topk, sorted=False)
proposal_boxes = tf.gather(proposal_boxes, topk_indices)
return tf.stop_gradient(proposal_boxes, name="boxes"), \
tf.stop_gradient(proposal_scores, name="scores")
# based on box sizes
def fpn_map_rois_to_levels(self, boxes):
def tf_area(boxes):
x_min, y_min, x_max, y_max = tf.split(boxes, 4, axis=1)
return tf.squeeze((y_max - y_min) * (x_max - x_min), [1])
sqrtarea = tf.sqrt(tf_area(boxes))
level = tf.to_int32(tf.floor(4 + tf.log(sqrtarea * (1. / 224) + 1e-6) * \
(1.0 / np.log(2))))
# RoI levels range from 2~5 (not 6)
level_ids = [
tf.where(level <= 2),
tf.where(tf.equal(level, 3)),# problems with ==?
tf.where(tf.equal(level, 4)),
tf.where(level >= 5)]
level_ids = [tf.reshape(x, [-1], name="roi_level%s_id" % (i + 2))
for i, x in enumerate(level_ids)]
#num_in_levels = [tf.size(x, name="num_roi_level%s" % (i + 2))
# for i, x in enumerate(level_ids)]
level_boxes = [tf.gather(boxes, ids) for ids in level_ids]
return level_ids, level_boxes
# output_shape is the output feature HxW
def multilevel_roi_align(self, features, rcnn_boxes, output_shape):
config = self.config
assert len(features) == 4
# Reassign rcnn_boxes to levels # based on box area size
level_ids, level_boxes = self.fpn_map_rois_to_levels(rcnn_boxes)
all_rois = []
# Crop patches from corresponding levels
for i, boxes, featuremap in zip(itertools.count(), level_boxes, features):
with tf.name_scope("roi_level%s" % (i + 2)):
boxes_on_featuremap = boxes * (1.0 / config.anchor_strides[i])
all_rois.append(
roi_align(featuremap, boxes_on_featuremap, output_shape))
# this can fail if using TF<=1.8 with MKL build
all_rois = tf.concat(all_rois, axis=0) # NCHW
# Unshuffle to the original order, to match the original samples
level_id_perm = tf.concat(level_ids, axis=0) # A permutation of 1~N
level_id_invert_perm = tf.invert_permutation(level_id_perm)
all_rois = tf.gather(all_rois, level_id_invert_perm)
return all_rois
def build_forward(self):
config = self.config
image = self.p_image # [1, C, H, W]
image_shape2d = tf.shape(image)[2:]
# a list of numpy anchors, not sliced
multilevel_anchors = self.multilevel_anchors
# the feature map shared by RPN and fast RCNN
# TODO: fix the batch norm mess
# TODO: fix global param like data_format and
# [1,C,FS,FS]
c2345 = resnet_fpn_backbone(
image, config.resnet_num_block, use_gn=config.use_gn,
resolution_requirement=config.fpn_resolution_requirement,
use_dilations=config.use_dilations,
use_deformable=config.use_deformable, tf_pad_reverse=True,
freeze=config.freeze, use_basic_block=config.use_basic_block,
use_se=config.use_se, use_resnext=config.use_resnext)
# include lateral 1x1 conv and final 3x3 conv
# -> [7, 7, 256]
p23456 = fpn_model(c2345, num_channel=config.fpn_num_channel,
use_gn=config.use_gn, scope="fpn")
if config.freeze_rpn or config.freeze_fastrcnn:
p23456 = [tf.stop_gradient(p) for p in p23456]
# [1, H, W, channel]
self.fpn_feature = tf.image.resize_images(tf.transpose(
p23456[3], perm=[0, 2, 3, 1]), (7, 7)) # p5 # default bilinear
if config.no_obj_detect: # pair with extract_feat, so only extract feature
print("no object detect branch..")
return True
# given the numpy anchor for each stride,
# slice the anchor box and label against the feature map size on each
#level. Again?
self.slice_feature_and_anchors(image_shape2d, p23456, multilevel_anchors)
# now multilevel_anchors are sliced and tf type
# added sliced gt anchor labels and boxes
# so we have each fpn level"s anchor boxes, and the ground truth anchor
# boxes & labels if training
# given [1,256,FS,FS] feature, each level got len(anchor_ratios) anchor
# outputs
rpn_outputs = [
self.rpn_head(pi, config.fpn_num_channel, len(config.anchor_ratios),
data_format="NCHW", scope="rpn") for pi in p23456]
multilevel_label_logits = [k[0] for k in rpn_outputs]
multilevel_box_logits = [k[1] for k in rpn_outputs]
if config.freeze_rpn:
multilevel_label_logits = [tf.stop_gradient(o)
for o in multilevel_label_logits]
multilevel_box_logits = [tf.stop_gradient(o)
for o in multilevel_box_logits]
# each H,W location has a box regression and classification score,
# here combine all positive boxes using NMS
# [N,4]/[N] , N is the number of proposal boxes
proposal_boxes, proposal_scores = self.generate_fpn_proposals(
multilevel_anchors, multilevel_label_logits, multilevel_box_logits,
image_shape2d)
# for getting RPN performance
# K depend on rpn_test_post_nms_topk during testing
# K = 1000
self.proposal_boxes = proposal_boxes # [K, 4]
self.proposal_scores = proposal_scores # [K]
if config.is_train:
gt_boxes = self.gt_boxes
gt_labels = self.gt_labels
# for training, use gt_box and some proposal box as pos and neg
# rcnn_sampled_boxes [N_FG+N_NEG,4]
# fg_inds_wrt_gt -> [N_FG], each is index of gt_boxes
rcnn_boxes, rcnn_labels, fg_inds_wrt_gt = sample_fast_rcnn_targets(
proposal_boxes, gt_boxes, gt_labels, config=config)
else:
rcnn_boxes = proposal_boxes
# NxCx7x7 # (?, 256, 7, 7)
roi_feature_fastrcnn = self.multilevel_roi_align(p23456[:4], rcnn_boxes, 7)
if config.use_frcnn_class_agnostic:
# (N,num_class), (N, 1, 4)
fastrcnn_label_logits, fastrcnn_box_logits = \
self.fastrcnn_2fc_head_class_agnostic(
roi_feature_fastrcnn, config.num_class,
boxes=rcnn_boxes, scope="fastrcnn")
else:
# (N,num_class), (N, num_class - 1, 4)
fastrcnn_label_logits, fastrcnn_box_logits = self.fastrcnn_2fc_head(
roi_feature_fastrcnn, config.num_class,
boxes=rcnn_boxes, scope="fastrcnn")
if config.freeze_fastrcnn:
fastrcnn_label_logits, fastrcnn_box_logits = tf.stop_gradient(
fastrcnn_label_logits), tf.stop_gradient(fastrcnn_box_logits)
if config.use_small_object_head:
# 1. get all the actual boxes coordinates
anchors = tf.tile(tf.expand_dims(rcnn_boxes, 1),
[1, config.num_class-1, 1])
boxes = decode_bbox_target(fastrcnn_box_logits / \
tf.constant(config.fastrcnn_bbox_reg_weights, dtype=tf.float32),
anchors)
probs = tf.nn.softmax(fastrcnn_label_logits)
boxes = tf.transpose(boxes, [1, 0, 2]) # [num_class-1, N, 4]
probs = tf.transpose(probs[:, 1:], [1, 0]) # [num_class-1, N]
small_object_class_ids = [config.classname2id[name] - 1
for name in config.small_objects]
# C is the number of small object class
# [C, N, 4], [C, N]
so_boxes, so_scores = tf.gather(boxes, small_object_class_ids), \
tf.gather(probs, small_object_class_ids)
# 1. we do NMS for each class to get topk
# for each catagory get the top K
# [C, K, 4] / [C, K]
so_boxes, so_scores = tf.map_fn(
self.nms_return_boxes, (so_scores, so_boxes),
dtype=(tf.float32, tf.float32), parallel_iterations=10)
self.so_boxes = so_boxes
so_boxes = tf.reshape(so_boxes, [-1, 4]) # [C*K, 4]
so_scores = tf.reshape(so_scores, [-1]) # [C*K]
# [C*K, 256, 7, 7]
so_feature = self.multilevel_roi_align(p23456[:4], so_boxes, 7)
# share the fc part with fast rcnn head
with tf.variable_scope("fastrcnn", reuse=tf.AUTO_REUSE):
dim = config.fpn_frcnn_fc_head_dim # 1024
initializer = tf.variance_scaling_initializer()
# sharing features
# [C*K, dim]
hidden = dense(so_feature, dim, W_init=initializer,
activation=tf.nn.relu, scope="fc6")
hidden = dense(hidden, dim, W_init=initializer,
activation=tf.nn.relu, scope="fc7")
# [C, K, dim]
hidden = tf.reshape(hidden, [len(config.small_objects), -1, dim])
if config.freeze_fastrcnn:
hidden = tf.stop_gradient(hidden)
if config.use_so_association:
ref_class_id = config.classname2id["Person"] - 1
# [N, 4], [N]
ref_boxes, ref_scores = boxes[ref_class_id], probs[ref_class_id]
# NMS to get a few peron boxes
ref_topk = config.so_person_topk # 10
ref_selection = tf.image.non_max_suppression(
ref_boxes, ref_scores, max_output_size=ref_topk,
iou_threshold=config.fastrcnn_nms_iou_thres)
# [Rr, 4]
ref_boxes = tf.gather(ref_boxes, ref_selection)
ref_scores = tf.gather(ref_scores, ref_selection)
ref_feat = self.multilevel_roi_align(p23456[:4], ref_boxes, 7)
# share the same fc
ref_feat = dense(ref_feat, dim, W_init=initializer,
activation=tf.nn.relu, scope="fc6")
ref_feat = dense(ref_feat, dim, W_init=initializer,
activation=tf.nn.relu, scope="fc7")
if config.freeze_fastrcnn:
ref_feat = tf.stop_gradient(ref_feat)
# new variable for small object
with tf.variable_scope("small_objects"):
so_label_logits = [] # each class a head
for i in range(len(config.small_objects)):
if config.use_so_association:
asso_hidden = hidden[i] + person_object_relation(
hidden[i], self.so_boxes[i], ref_boxes, ref_feat,
group=16, geo_feat_dim=64, scope="person_object_relation")
so_label_logits.append(dense(
asso_hidden, 2,
W_init=tf.random_normal_initializer(stddev=0.01),
scope="small_object_classification_c%s" % (i+1)))
else:
so_label_logits.append(dense(
hidden[i], 2,
W_init=tf.random_normal_initializer(stddev=0.01),
scope="small_object_classification_c%s"%(i+1)))
add_wd(0.0001)
# [C, K, 2]
so_label_logits = tf.stack(so_label_logits, axis=0)
if config.is_train:
rpn_label_loss, rpn_box_loss = self.multilevel_rpn_losses(
multilevel_anchors, multilevel_label_logits, multilevel_box_logits)
# rcnn_labels [N_FG + N_NEG] <- index in [N_FG]
fg_inds_wrt_sample = tf.reshape(tf.where(rcnn_labels > 0), [-1])
# for training, maskRCNN only apply on positive box
# [N_FG, num_class, 14, 14]
# [N_FG, 4]
# sampled boxes are at least iou with a gt_boxes
fg_sampled_boxes = tf.gather(rcnn_boxes, fg_inds_wrt_sample)
fg_fastrcnn_box_logits = tf.gather(fastrcnn_box_logits,
fg_inds_wrt_sample)
# [N_FG, 4] # each proposal box assigned gt box, may repeat
matched_gt_boxes = tf.gather(gt_boxes, fg_inds_wrt_gt)
# fastrcnn also need to regress box (just the FG box)
encoded_boxes = encode_bbox_target(matched_gt_boxes, fg_sampled_boxes) * \
tf.constant(config.fastrcnn_bbox_reg_weights) # [10,10,5,5]?
# fastrcnn input is fg and bg proposal box, do classification to
# num_class(include bg) and then regress on fg boxes
# [N_FG+N_NEG,4] & [N_FG,4]
fastrcnn_label_loss, fastrcnn_box_loss = self.fastrcnn_losses(
rcnn_labels, fastrcnn_label_logits, encoded_boxes,
fg_fastrcnn_box_logits)
# ---------------------------------------------------------
# for debug
self.rpn_label_loss = rpn_label_loss
self.rpn_box_loss = rpn_box_loss
self.fastrcnn_label_loss = fastrcnn_label_loss
self.fastrcnn_box_loss = fastrcnn_box_loss
losses = [rpn_label_loss, rpn_box_loss, fastrcnn_label_loss,
fastrcnn_box_loss]
if config.use_small_object_head:
# assume we have the small gt boxes and labels
# so_boxes [C, K, 4]
# so_label_logits [C, K, 2]
# so_labels [C, K] # [0, 1]
so_labels = get_so_labels(self.so_boxes, self.so_gt_boxes,
self.so_gt_labels, config=config)
so_label_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=so_labels, logits=so_label_logits)
so_label_loss = tf.reduce_mean(so_label_loss, name="label_loss")
self.so_label_loss = so_label_loss
losses.append(so_label_loss)
# mask rcnn loss
if config.add_mask:
fg_inds_wrt_sample = tf.reshape(tf.where(rcnn_labels > 0), [-1])
fg_labels = tf.gather(rcnn_labels, fg_inds_wrt_sample)
# NxCx14x14
# only the fg boxes
roi_feature_fastrcnn = self.multilevel_roi_align(
p23456[:4], fg_sampled_boxes, 14)
mask_logits = self.maskrcnn_up4conv_head(
fg_feature, config.num_class, scope="maskrcnn")
# [N_FG, H,W]
gt_mask = self.gt_mask
gt_mask_for_fg = tf.gather(gt_mask, fg_inds_wrt_gt)
# [N_FG, H, W] -> [N_FG, 14, 14]
target_masks_for_fg = crop_and_resize(
tf.expand_dims(gt_masks, 1),
fg_sampled_boxes,
fg_inds_wrt_gt, 28, pad_border=False) # fg x 1x28x28
target_masks_for_fg = tf.squeeze(target_masks_for_fg, 1)
mrcnn_loss = self.maskrcnn_loss(mask_logits, fg_labels,
target_masks_for_fg)
losses += [mrcnn_loss]
self.wd = None
if config.wd is not None:
wd = wd_cost(".*/W", config.wd, scope="wd_cost")
self.wd = wd
losses.append(wd)
self.loss = tf.add_n(losses, "total_loss")
# l2loss
else:
# inferencing
# K -> proposal box
# [K,num_class]
# image_shape2d, rcnn_boxes, fastrcnn_label_logits, fastrcnn_box_logits
# get the regressed actual boxes
if config.use_frcnn_class_agnostic:
# box regress logits [K, 1, 4], so we tile it to num_class-1 so
# the rest is the same
fastrcnn_box_logits = tf.tile(fastrcnn_box_logits,
[1, config.num_class - 1, 1])
num_class = config.num_class
# COCO has 81 classes, we only need a few
if config.use_partial_classes:
needed_object_classids = [config.classname2id[name]
for name in config.partial_classes]
needed_object_classids_minus_1 = [o - 1 for o in needed_object_classids]
# (N, num_class), (N, num_class - 1, 4)
# -> (num_class, N), (num_class - 1, N, 4)
label_logits_t = tf.transpose(fastrcnn_label_logits, [1, 0])
box_logits_t = tf.transpose(fastrcnn_box_logits, [1, 0, 2])
# [C + 1, N] # 1 is the BG class
partial_label_logits_t = tf.gather(label_logits_t,
[0] + needed_object_classids)
# [C, N, 4]
partial_box_logits_t = tf.gather(box_logits_t,
needed_object_classids_minus_1)
partial_label_logits = tf.transpose(partial_label_logits_t, [1, 0])
partial_box_logits = tf.transpose(partial_box_logits_t, [1, 0, 2])
fastrcnn_label_logits = partial_label_logits
fastrcnn_box_logits = partial_box_logits
num_class = len(needed_object_classids) + 1
# anchor box [K,4] -> [K, num_class - 1, 4] <-
# box regress logits [K, num_class-1, 4]
anchors = tf.tile(tf.expand_dims(rcnn_boxes, 1), [1, num_class-1, 1])
# [K, num_class-1, 4]/ [K, 1, 4]
decoded_boxes = decode_bbox_target(fastrcnn_box_logits / \
tf.constant(config.fastrcnn_bbox_reg_weights, dtype=tf.float32),
anchors)
decoded_boxes = clip_boxes(decoded_boxes, image_shape2d,
name="fastrcnn_all_boxes")
label_probs = tf.nn.softmax(fastrcnn_label_logits)
if config.use_small_object_head:
# so_label_logits: [C, N, 2]
"""
if config.replace_small_object:
# replace some of the scores
small_object_class_ids = [config.classname2id[name]
for name in config.small_objects]
# [N, num_class]
# put each label logit for each class then stack
new_label_logits = []
for classid in config.classid2name:
if classid in small_object_class_ids:
so_idx = small_object_class_ids.index(classid)
# 1 is the class score and 0 is score for BG
new_label_logits.append(so_label_logits[so_idx, :, 1])
else:
new_label_logits.append(fastrcnn_label_logits[:, classid])
fastrcnn_label_logits = tf.stack(new_label_logits, axis=1)
"""
# output the small object boxes separately
# K is result_per_im=100
# 1. so_label_logits is [C, K, 2]
# so_boxes [C, K, 4]
# reconstruct label logit to be [K, C+1]
new_label_logits = []
# BG is ignore anyway
new_label_logits.append(
tf.reduce_mean(so_label_logits[:, :, 0], axis=0)) # [K]
for i in range(len(config.small_objects)):
new_label_logits.append(so_label_logits[i, :, 1])
# [K, C+1]
so_label_logits = tf.stack(new_label_logits, axis=1)
# [K, C, 4]
so_boxes = tf.transpose(self.so_boxes, [1, 0, 2])
so_decoded_boxes = clip_boxes(
so_boxes, image_shape2d, name="so_all_boxes")
so_pred_indices, so_final_probs = self.fastrcnn_predictions(
so_decoded_boxes, so_label_logits,
no_score_filter=not config.use_so_score_thres)
so_final_boxes = tf.gather_nd(
so_decoded_boxes, so_pred_indices, name="so_final_boxes")
so_final_labels = tf.add(
so_pred_indices[:, 1], 1, name="so_final_labels")
# [R,4]
self.so_final_boxes = so_final_boxes
# [R]
self.so_final_labels = so_final_labels
self.so_final_probs = so_final_probs
if config.use_cpu_nms:
boxes = decoded_boxes
probs = label_probs
assert boxes.shape[1] == config.num_class - 1, \
(boxes.shape, config.num_class)
assert probs.shape[1] == config.num_class, \
(probs.shape[1], config.num_class)
# transpose to map_fn along each class
boxes = tf.transpose(boxes, [1, 0, 2]) # [num_class-1, K,4]
probs = tf.transpose(probs[:, 1:], [1, 0]) # [num_class-1, K]
self.final_boxes = boxes
self.final_probs = probs
# just used for compatable with none cpu nms mode
self.final_labels = rcnn_boxes
return None # so no TF GPU NMS
# decoded boxes are [K,num_class-1,4]. so from each proposal
# boxes generate all classses" boxes, with prob, then do nms on these
# pred_indices: [R,2] , each entry (#proposal[1-K],
#catid [0,num_class-1])
# final_probs [R]
# here do nms,
pred_indices, final_probs = self.fastrcnn_predictions(
decoded_boxes, label_probs)
# [R,4]
final_boxes = tf.gather_nd(
decoded_boxes, pred_indices, name="final_boxes")
# [R] , each is 1-catogory
final_labels = tf.add(pred_indices[:, 1], 1, name="final_labels")
if config.add_mask:
roi_feature_maskrcnn = self.multilevel_roi_align(
p23456[:4], final_boxes, 14)
# [R, num_class - 1, 14, 14]
mask_logits = self.maskrcnn_up4conv_head(
roi_feature_maskrcnn, config.num_class, scope="maskrcnn")
if config.use_partial_classes:
# need to select the classes as final_labels
mask_logits_t = tf.transpose(mask_logits, [1, 0, 2, 3])
# [C, R, 14, 14]
partial_mask_logits_t = tf.gather(
mask_logits_t, needed_object_classids)
# [R, C, 14, 14]
partial_mask_logits = tf.transpose(
partial_mask_logits_t, [1, 0, 2, 3])
indices = tf.stack(
[tf.range(tf.size(final_labels)), tf.to_int32(final_labels) - 1],
axis=1)
final_mask_logits = tf.gather_nd(mask_logits, indices)
final_masks = tf.sigmoid(final_mask_logits)
# [R,14,14]
self.final_masks = final_masks
# [R,4]
self.final_boxes = final_boxes
# [R]
self.final_labels = final_labels
# add a name so the frozen graph will have that name
self.final_probs = tf.identity(final_probs, name="final_probs")
# [R, 256, 7, 7]
fpn_box_feat = self.multilevel_roi_align(p23456[:4], final_boxes, 7)
self.fpn_box_feat = tf.identity(fpn_box_feat, name="fpn_box_feat")
# ----some model component
# feature map -> [1,1024,FS1,FS2] , FS1 = H/16.0, FS2 = W/16.0
# channle -> 1024
def rpn_head(self, featuremap, channel, num_anchors, data_format,
scope="rpn"):
with tf.variable_scope(scope):
# [1, channel, FS1, FS2] # channel = 1024
# conv0:W -> [3,3,1024,1024]
h = conv2d(
featuremap, channel, kernel=3, activation=tf.nn.relu,
data_format=data_format,
W_init=tf.random_normal_initializer(stddev=0.01), scope="conv0")
# h -> [1,1024(channel),FS1,FS2]
# 1x1 kernel conv to classification on each grid
# [1, 1024, FS1, FS2] -> # [1, num_anchors, FS1, FS2]
label_logits = conv2d(
h, num_anchors, 1, data_format=data_format,
W_init=tf.random_normal_initializer(stddev=0.01), scope="class")
# [1, 1024, FS1, FS2] -> # [1, 4 * num_anchors, FS1, FS2]
box_logits = conv2d(
h, 4*num_anchors, 1, data_format=data_format,
W_init=tf.random_normal_initializer(stddev=0.01), scope="box")
# [1,1024,FS1, FS2] -> [FS1, FS2,1024]
label_logits = tf.squeeze(tf.transpose(label_logits, [0, 2, 3, 1]), 0)
box_shape = tf.shape(box_logits)
box_logits = tf.transpose(box_logits, [0, 2, 3, 1]) # [1,FS1, FS2,1024*4]
# [FS1, FS2,1024,4]
box_logits = tf.reshape(
box_logits, [box_shape[2], box_shape[3], num_anchors, 4])
return label_logits, box_logits
def small_object_classification_head(
self, feature, num_class, scope="small_object_classification"):
config = self.config
dim = config.fpn_frcnn_fc_head_dim # 1024
initializer = tf.variance_scaling_initializer()
with tf.variable_scope(scope):
hidden = dense(
feature, dim, W_init=initializer, activation=tf.nn.relu, scope="fc6")
hidden = dense(
hidden, dim, W_init=initializer, activation=tf.nn.relu, scope="fc7")
classification = dense(
hidden, num_class, W_init=tf.random_normal_initializer(stddev=0.01),
scope="class") # [K,num_class]
return classification
# feature: [K,C,7,7] # feature for each roi
def fastrcnn_2fc_head(
self, feature, num_class=None, boxes=None, scope="fastrcnn_head"):
config = self.config
dim = config.fpn_frcnn_fc_head_dim # 1024
initializer = tf.variance_scaling_initializer()
with tf.variable_scope(scope):
if config.use_conv_frcnn_head:
hidden = self.conv_frcnn_head(
feature, dim, config.conv_frcnn_head_dim, num_conv=4,
use_gn=config.use_gn)
else:
# dense will reshape to [k,C*7*7] first
if config.add_relation_nn:
hidden = dense(
feature, dim, W_init=initializer, activation=tf.nn.relu,
scope="fc6")
hidden = hidden + relation_network(
hidden, boxes, group=16, geo_feat_dim=64, scope="RM_r1")
hidden = dense(
hidden, dim, W_init=initializer, activation=tf.nn.relu,
scope="fc7")
hidden = hidden + relation_network(
hidden, boxes, group=16, geo_feat_dim=64, scope="RM_r2")
else:
hidden = dense(
feature, dim, W_init=initializer, activation=tf.nn.relu,
scope="fc6")
hidden = dense(
hidden, dim, W_init=initializer, activation=tf.nn.relu,
scope="fc7")
# hidden -> [K, dim]
if config.use_att_frcnn_head:
# changes: 1. conv2d kernel size; 2. softmax/sigmoid;
# 3. sum or gating?; 4. convert to dim first then attention?;
# 5. attend then two fc, no use of previous hidden
# [K, 7, 7, C]
feature = tf.transpose(feature, perm=[0, 2, 3, 1])
H, W, feat_dim = feature.get_shape()[1:]
# 1. simple conv attention
# [K, 7, 7, 1]
attention = conv2d(
feature, 1, kernel=3, padding="SAME", stride=1,
activation=tf.nn.softmax, use_bias=True, data_format="NHWC",
W_init=initializer, scope="attention")
# [K,7*7, C]
feature = tf.reshape(feature, [-1, H*W, feat_dim])
attention = tf.reshape(attention, [-1, H*W, 1])
# [K, C]
attended = tf.reduce_sum(feature * attention, 1)
# match the dimension
attended_feat = dense(
attended, dim, W_init=initializer, activation=tf.nn.relu,
scope="att_trans")
# sum with original feature
hidden = hidden + attended_feat
with tf.variable_scope("outputs"):
classification = dense(
hidden, num_class, W_init=tf.random_normal_initializer(stddev=0.01),
scope="class") # [K,num_class]
box_regression = dense(
hidden, num_class*4,
W_init=tf.random_normal_initializer(stddev=0.001),
scope="box")
box_regression = tf.reshape(box_regression, (-1, num_class, 4))
box_regression = box_regression[:, 1:, :]
box_regression.set_shape([None, num_class-1, 4])
return classification, box_regression
def conv_frcnn_head(self, feature, fc_dim, conv_dim, num_conv, use_gn=False):
l = feature
for k in range(num_conv):
l = conv2d(
l, conv_dim, kernel=3, activation=tf.nn.relu,
data_format="NCHW",
W_init=tf.variance_scaling_initializer(
scale=2.0, mode="fan_out", distribution="truncated_normal"),
scope="conv%s" % (k))
if use_gn:
l = group_norm(l, scope="gn%s" % (k))
l = dense(
l, fc_dim, W_init=tf.variance_scaling_initializer(),
activation=tf.nn.relu, scope="fc")
return l
def fastrcnn_2fc_head_class_agnostic(
self, feature, num_class, boxes=None, scope="head"):
config = self.config
dim = config.fpn_frcnn_fc_head_dim # 1024
initializer = tf.variance_scaling_initializer()
with tf.variable_scope(scope):
if config.use_conv_frcnn_head:
hidden = self.conv_frcnn_head(
feature, dim, config.conv_frcnn_head_dim, num_conv=4,
use_gn=config.use_gn)
else:
# dense will reshape to [k,C*7*7] first
if config.add_relation_nn:
hidden = dense(
feature, dim, W_init=initializer,
activation=tf.nn.relu, scope="fc6")
hidden = hidden + relation_network(
hidden, boxes, group=16, geo_feat_dim=64, scope="RM_r1")
hidden = dense(hidden, dim, W_init=initializer,
activation=tf.nn.relu, scope="fc7")
hidden = hidden + relation_network(
hidden, boxes, group=16, geo_feat_dim=64, scope="RM_r2")
else:
hidden = dense(
feature, dim, W_init=initializer, activation=tf.nn.relu,
scope="fc6")
hidden = dense(
hidden, dim, W_init=initializer, activation=tf.nn.relu,
scope="fc7")
with tf.variable_scope("outputs"):
classification = dense(
hidden, num_class,
W_init=tf.random_normal_initializer(stddev=0.01),
scope="class") # [K,num_class]
num_class = 1 # just for box
box_regression = dense(
hidden, num_class*4,
W_init=tf.random_normal_initializer(stddev=0.001), scope="box")
box_regression = tf.reshape(box_regression, (-1, num_class, 4))
return classification, box_regression
def maskrcnn_up4conv_head(self, feature, num_class, scope="maskrcnn_head"):
# feature [R, 256, 7, 7]
config = self.config
num_conv = 4 # C4 model this is 0
l = feature
with tf.variable_scope(scope):
for k in range(num_conv):
l = conv2d(
l, config.mrcnn_head_dim, kernel=3, activation=tf.nn.relu,
data_format="NCHW",
W_init=tf.variance_scaling_initializer(
scale=2.0, mode="fan_out", distribution="truncated_normal"),
scope="fcn%s"%(k))
l = deconv2d(
l, config.mrcnn_head_dim, kernel=2, stride=2, activation=tf.nn.relu,
data_format="NCHW",
W_init=tf.variance_scaling_initializer(
scale=2.0, mode="fan_out", distribution="truncated_normal"),
scope="deconv")
# [R, num_class-1, 14, 14]
l = conv2d(
l, num_class - 1, kernel=1, data_format="NCHW",
W_init=tf.variance_scaling_initializer(
scale=2.0, mode="fan_out", distribution="normal"),
scope="conv")
return l
def nms_return_masks(self, X):
config = self.config
prob, box = X # [K], [K,4]
output_shape = tf.shape(prob)
# [K]
ids = tf.reshape(tf.where(prob > config.result_score_thres), [-1])
prob_ = tf.gather(prob, ids)
box_ = tf.gather(box, ids)
# NMS
selection = tf.image.non_max_suppression(
box_, prob_, max_output_size=config.result_per_im,
iou_threshold=config.fastrcnn_nms_iou_thres)
selection = tf.to_int32(tf.gather(ids, selection))
sorted_selection = -tf.nn.top_k(-selection, k=tf.size(selection))[0]
mask = tf.sparse_to_dense(
sparse_indices=sorted_selection,
output_shape=output_shape,
sparse_values=True,
default_value=False)
return mask
def nms_return_masks_no_score_filter(self, X):
config = self.config
prob, box = X # [K], [K,4]
output_shape = tf.shape(prob)
# NMS
selection = tf.image.non_max_suppression(
box, prob, max_output_size=config.result_per_im,
iou_threshold=config.fastrcnn_nms_iou_thres)
sorted_selection = -tf.nn.top_k(-selection, k=tf.size(selection))[0]
mask = tf.sparse_to_dense(
sparse_indices=sorted_selection, output_shape=output_shape,
sparse_values=True, default_value=False)
return mask
def nms_return_boxes(self, X):
config = self.config
prob, box = X # [K], [K,4]
output_shape = tf.shape(prob)
# NMS
selection = tf.image.non_max_suppression(
box, prob, max_output_size=config.result_per_im,
iou_threshold=config.fastrcnn_nms_iou_thres)
selected_prob = tf.gather(prob, selection)
selected_box = tf.gather(box, selection)
return selected_box, selected_prob
# given all proposal box prediction, based on score thres , get final
# NMS resulting box
# [K,num_class-1,4] -> decoded_boxes
# [K,num_class] label_probs
# each proposal box has prob and box to all class
# here using nms for each class, -> [R]
def fastrcnn_predictions(self, boxes, probs, no_score_filter=False,
scope="fastrcnn_predictions"):
with tf.variable_scope(scope):
config = self.config
if config.use_bg_score: # use the BG score to filter out boxes
# probs: [K, num_class]
box_classes = tf.argmax(probs, axis=1) # [K]
# [N]
nonBG_box_indices = tf.reshape(
tf.where(tf.greater(box_classes, 0)), [-1])
probs = tf.gather(probs, nonBG_box_indices)
boxes = tf.gather(boxes, nonBG_box_indices)
# note if use partial class, config.num_class is not the
# actual num_class here
# transpose to map_fn along each class
boxes = tf.transpose(boxes, [1, 0, 2]) # [num_class-1, K,4]
probs = tf.transpose(probs[:, 1:], [1, 0]) # [num_class-1, K]
# for each catagory get the top K
# [num_class-1, K]
if no_score_filter:
masks = tf.map_fn(
self.nms_return_masks_no_score_filter, (probs, boxes),
dtype=tf.bool, parallel_iterations=10)
else:
masks = tf.map_fn(
self.nms_return_masks, (probs, boxes), dtype=tf.bool,
parallel_iterations=10)
# [R*(num_class-1),2], each entry is [cat_id,box_id]
selected_indices = tf.where(masks)
# [num_class-1, K] -> [R*(num_class-1)]
probs = tf.boolean_mask(probs, masks)
# topk_indices [R]
topk_probs, topk_indices = tf.nn.top_k(
probs, tf.minimum(config.result_per_im, tf.size(probs)), sorted=False)
# [K,2] <- select [act_num,R]
filtered_selection = tf.gather(selected_indices, topk_indices)
filtered_selection = tf.reverse(
filtered_selection, axis=[1], name="filtered")
# [R,2], [R,]
return filtered_selection, topk_probs
# ---- losses
def maskrcnn_loss(self, mask_logits, fg_labels, fg_target_masks,
scope="maskrcnn_loss"):
with tf.variable_scope(scope):
# mask_logits: [N_FG, num_cat, 14, 14]
# fg_labels: [N_FG]
# fg_target_masks: [N_FG, 14, 14]
num_fg = tf.size(fg_labels)
# [N_FG, 2] # these index is used to get the pos cat"s logit
indices = tf.stack([tf.range(num_fg), tf.to_int32(fg_labels) - 1], axis=1)
# ignore other class"s logit
# [N_FG, 14, 14]
mask_logits = tf.gather_nd(mask_logits, indices)
mask_probs = tf.sigmoid(mask_logits)
loss = tf.nn.sigmoid_cross_entropy_with_logits(
labels=fg_target_masks, logits=mask_logits)
loss = tf.reduce_mean(loss, name="maskrcnn_loss")
return loss
def multilevel_rpn_losses(self, multilevel_anchors, multilevel_label_logits,
multilevel_box_logits, scope="rpn_losses"):
config = self.config
sliced_anchor_labels = self.sliced_anchor_labels
sliced_anchor_boxes = self.sliced_anchor_boxes
num_lvl = len(config.anchor_strides)
assert num_lvl == len(multilevel_label_logits)
assert num_lvl == len(multilevel_box_logits)
assert num_lvl == len(multilevel_anchors)
losses = []
with tf.variable_scope(scope):
for lvl in range(num_lvl):
anchors = multilevel_anchors[lvl]
gt_labels = sliced_anchor_labels[lvl]
gt_boxes = sliced_anchor_boxes[lvl]
# get the ground truth T_xywh
encoded_gt_boxes = encode_bbox_target(gt_boxes, anchors)
label_loss, box_loss = self.rpn_losses(
gt_labels, encoded_gt_boxes, multilevel_label_logits[lvl],
multilevel_box_logits[lvl], scope="level%s" % (lvl+2))
losses.extend([label_loss, box_loss])
total_label_loss = tf.add_n(losses[::2], name="label_loss")
total_box_loss = tf.add_n(losses[1::2], name="box_loss")
return total_label_loss, total_box_loss
def rpn_losses(self, anchor_labels, anchor_boxes, label_logits,
box_logits, scope="rpn_losses"):
config = self.config
with tf.variable_scope(scope):
# anchor_label ~ {-1,0,1} , -1 means ignore, , 0 neg, 1 pos
# label_logits [FS,FS,num_anchors]
# box_logits [FS,FS,num_anchors,4]
#with tf.device("/cpu:0"):
# 1,0|pos/neg
valid_mask = tf.stop_gradient(tf.not_equal(anchor_labels, -1))
pos_mask = tf.stop_gradient(tf.equal(anchor_labels, 1))
nr_valid = tf.stop_gradient(
tf.count_nonzero(valid_mask, dtype=tf.int32), name="num_valid_anchor")
nr_pos = tf.identity(
tf.count_nonzero(pos_mask, dtype=tf.int32), name="num_pos_anchor")
# [nr_valid]
valid_anchor_labels = tf.boolean_mask(anchor_labels, valid_mask)
# [nr_valid]
valid_label_logits = tf.boolean_mask(label_logits, valid_mask)
placeholder = 0.
# label loss for all valid anchor box
if config.focal_loss:
valid_label_logits = tf.reshape(valid_label_logits, [-1, 1])
valid_anchor_labels = tf.reshape(valid_anchor_labels, [-1, 1])
label_loss = focal_loss(
logits=valid_label_logits, labels=tf.to_float(valid_anchor_labels))
else:
label_loss = tf.nn.sigmoid_cross_entropy_with_logits(
logits=valid_label_logits, labels=tf.to_float(valid_anchor_labels))
label_loss = tf.reduce_sum(label_loss) * (1. / config.rpn_batch_per_im)
label_loss = tf.where(
tf.equal(nr_valid, 0), placeholder, label_loss, name="label_loss")
# box loss for positive anchor
pos_anchor_boxes = tf.boolean_mask(anchor_boxes, pos_mask)
pos_box_logits = tf.boolean_mask(box_logits, pos_mask)
delta = 1.0/9
# the smooth l1 loss
box_loss = tf.losses.huber_loss(
pos_anchor_boxes, pos_box_logits, delta=delta,
reduction=tf.losses.Reduction.SUM) / delta
box_loss = box_loss * (1. / config.rpn_batch_per_im)
box_loss = tf.where(
tf.equal(nr_pos, 0), placeholder, box_loss, name="box_loss")
return label_loss, box_loss
def fastrcnn_losses(self, labels, label_logits, fg_boxes, fg_box_logits,
scope="fastrcnn_losses"):
config = self.config
with tf.variable_scope(scope):
# label -> label for roi [N_FG + N_NEG], the fg labels are 1-num_class,
# 0 is bg
# label_logits [N_FG + N_NEG,num_class]
# fg_boxes_logits -> [N_FG,num_class-1,4]
# so the label is int [0-num_class], 0 being background
if config.focal_loss:
# [N, num_classes]
onehot_label = tf.one_hot(labels, label_logits.get_shape()[-1])
# here uses sigmoid
label_loss = focal_loss(
logits=label_logits, labels=tf.to_float(onehot_label))
else:
label_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=label_logits)
label_loss = tf.reduce_mean(label_loss, name="label_loss")
fg_inds = tf.where(labels > 0)[:, 0]
fg_labels = tf.gather(labels, fg_inds) # [N_FG]
num_fg = tf.size(fg_inds) # N_FG
if int(fg_box_logits.shape[1]) > 1:
# [N_FG, 2]
indices = tf.stack(
[tf.range(num_fg), tf.to_int32(fg_labels) - 1], axis=1)
# gather the logits from [N_FG,num_class-1, 4] to [N_FG,4],
# only the gt class"s logit
fg_box_logits = tf.gather_nd(fg_box_logits, indices)
else:
# class agnostic for cascade rcnn
fg_box_logits = tf.reshape(fg_box_logits, [-1, 4])
box_loss = tf.losses.huber_loss(
fg_boxes, fg_box_logits, reduction=tf.losses.Reduction.SUM)
# / N_FG + N_NEG ?
box_loss = tf.truediv(
box_loss, tf.to_float(tf.shape(labels)[0]), name="box_loss")
return label_loss, box_loss
# given the image path, and the label for it
# preprocess
def get_feed_dict(self, batch, is_train=False):
#{"imgs":[],"gt":[]}
config = self.config
N = len(batch.data["imgs"])
assert N == 1 # only 1 image for now
feed_dict = {}
if "imgdata" in batch.data:
image = batch.data["imgdata"][0]
else:
image = batch.data["imgs"][0]
if config.use_mixup:
img1, img2 = image
use_mixup = random.random() <= config.mixup_chance
if use_mixup:
weight = batch.data["mixup_weights"][0]
img1 = Image.open(img1)
img2 = Image.open(img2)
trans_alpha = int(255.0*weight)
for mixup_box in batch.data["gt"][0]["mixup_boxes"]:
box_img = img2.crop(mixup_box)
box_img_sizes = [int(a) for a in box_img.size[::-1]]
# unit8 and "L" are needed
mask = Image.fromarray(
np.zeros(box_img_sizes, dtype="uint8") + trans_alpha, mode="L")
img1.paste(box_img, mixup_box, mask=mask)
# PIL to cv2 image
img1 = np.array(img1)
img1 = img1[:, :, ::-1].copy()
# now add the annotation
batch.data["gt"][0]["boxes"] = np.concatenate(
[batch.data["gt"][0]["boxes"],
batch.data["gt"][0]["mixup_boxes"]],
axis=0)
batch.data["gt"][0]["labels"].extend(
batch.data["gt"][0]["mixup_labels"])
image = img1
else:
image = cv2.imread(img1, cv2.IMREAD_COLOR)
else:
image = cv2.imread(image, cv2.IMREAD_COLOR)
assert image is not None, image
image = image.astype("float32")
h, w = image.shape[:2] # original width/height
# resize image, boxes
short_edge_size = config.short_edge_size
if config.scale_jitter and is_train:
short_edge_size = random.randint(
config.short_edge_size_min, config.short_edge_size_max)
if "resized_image" in batch.data:
resized_image = batch.data["resized_image"][0]
else:
resized_image = resizeImage(image, short_edge_size, config.max_size)
newh, neww = resized_image.shape[:2]
#print newh,neww, batch.data["imgs"][0]
#sys.exit()
if is_train:
anno = batch.data["gt"][0] # "boxes" -> [K,4], "labels" -> [K]
# now the box is in [x1,y1,x2,y2] format, not coco box
o_boxes = anno["boxes"]
labels = anno["labels"]
assert len(labels) == len(o_boxes)
# boxes # (x,y,w,h)
"""
boxes = o_boxes[:,[0,2,1,3]] #(x,w,y,h)
boxes = boxes.reshape((-1,2,2)) #
boxes[:,0] = boxes[:,0] * (neww*1.0/w) # x,w
boxes[:,1] = boxes[:,1] * (newh*1.0/h) # y,h
"""
# boxes # (x1,y1,x2,y2)
boxes = o_boxes[:, [0, 2, 1, 3]] #(x1,x2,y1,y2)
boxes = boxes.reshape((-1, 2, 2)) # (x1,x2),(y1,y2)
boxes[:, 0] = boxes[:, 0] * (neww*1.0/w) # x1,x2
boxes[:, 1] = boxes[:, 1] * (newh*1.0/h) # y1,y2
# random horizontal flip
# no flip for surveilance video?
if config.flip_image:
prob = 0.5
rand = random.random()
if rand > prob:
resized_image = cv2.flip(resized_image, 1) # 1 for horizontal
#boxes[:,0,0] = neww - boxes[:,0,0] - boxes[:,0,1] # for (x,y,w,h)
boxes[:, 0] = neww - boxes[:, 0]
boxes[:, 0, :] = boxes[:, 0, ::-1]# (x_min will be x_max after flip)
boxes = boxes.reshape((-1, 4))
boxes = boxes[:, [0, 2, 1, 3]] #(x1,y1,x2,y2)
# visualize?
if config.vis_pre:
label_names = [config.classId_to_class[i] for i in labels]
o_boxes_x1x2 = np.asarray([box_wh_to_x1x2(box) for box in o_boxes])
boxes_x1x2 = np.asarray([box for box in boxes])
ori_vis = draw_boxes(image, o_boxes_x1x2, labels=label_names)
new_vis = draw_boxes(resized_image, boxes_x1x2, labels=label_names)
imgname = os.path.splitext(os.path.basename(batch.data["imgs"][0]))[0]
cv2.imwrite(
"%s.ori.jpg" % os.path.join(config.vis_path, imgname), ori_vis)
cv2.imwrite(
"%s.prepro.jpg" % os.path.join(config.vis_path, imgname), new_vis)
print("viz saved in %s" % config.vis_path)
sys.exit()
# get rpn anchor labels
# [fs_im,fs_im,num_anchor,4]
multilevel_anchor_inputs = self.get_multilevel_rpn_anchor_input(
resized_image, boxes)
multilevel_anchor_labels = [l for l, b in multilevel_anchor_inputs]
multilevel_anchor_boxes = [b for l, b in multilevel_anchor_inputs]
assert len(multilevel_anchor_labels) == len(multilevel_anchor_boxes) \
== len(self.anchor_labels) == len(self.anchor_boxes), \
(len(multilevel_anchor_labels), len(multilevel_anchor_boxes),
len(self.anchor_labels), len(self.anchor_boxes))
for pl_labels, pl_boxes, in_labels, in_boxes in zip(
self.anchor_labels, self.anchor_boxes, multilevel_anchor_labels,
multilevel_anchor_boxes):
feed_dict[pl_labels] = in_labels
feed_dict[pl_boxes] = in_boxes
assert len(boxes) > 0
feed_dict[self.gt_boxes] = boxes
feed_dict[self.gt_labels] = labels
if config.use_small_object_head:
for si in range(len(config.small_objects)):
# the class id in the all classes
small_object_class_id = config.classname2id[config.small_objects[si]]
# the box ids
so_ids = [i for i in range(len(labels))
if labels[i] == small_object_class_id]
# small object label id is different
# so_label is 0/1, so should be all 1s
feed_dict[self.so_gt_boxes[si]] = boxes[so_ids, :] # could be empty
feed_dict[self.so_gt_labels[si]] = [1 for i in range(len(so_ids))]
else:
pass
feed_dict[self.image] = resized_image
feed_dict[self.is_train] = is_train
return feed_dict
def get_feed_dict_forward(self, imgdata):
feed_dict = {}
feed_dict[self.image] = imgdata
feed_dict[self.is_train] = False
return feed_dict
# anchor related function for training--------------------
def filter_box_inside(self, im, boxes):
h, w = im.shape[:2]
indices = np.where(
(boxes[:, 0] >= 0) &
(boxes[:, 1] >= 0) &
(boxes[:, 2] <= w) &
(boxes[:, 3] <= h)
)[0]
return indices, boxes[indices, :]
# for training, given image and box, get anchor box labels
# [fs_im,fs_im,num_anchor,4] # not fs,
def get_rpn_anchor_input(self,im,boxes):
config = self.config
boxes = boxes.copy()
# [FS,FS,num_anchor,4] all possible anchor boxes given the max image size
all_anchors_np = np.copy(get_all_anchors(
stride=config.anchor_stride, sizes=config.anchor_sizes,
ratios=config.anchor_ratios, max_size=config.max_size))
h, w = im.shape[:2]
# so image may be smaller than the full anchor size
#featureh,featurew = h//config.anchor_stride,w//config.anchor_stride
anchorH, anchorW = all_anchors_np.shape[:2]
featureh, featurew = anchorH, anchorW
# [FS_im,FS_im,num_anchors,4] # the anchor field that the image is included
#featuremap_anchors = all_anchors_np[:featureh,:featurew,:,:]
#print featuremap_anchors.shape #(46,83,15,4)
#featuremap_anchors_flatten = featuremap_anchors.reshape((-1,4))
featuremap_anchors_flatten = all_anchors_np.reshape((-1, 4))
# num_in < FS_im*FS_im*num_anchors # [num_in,4]
inside_ind, inside_anchors = self.filter_box_inside(
im, featuremap_anchors_flatten) # the anchor box inside the image
# anchor labels is in {1,-1,0}, -1 means ignore
# N = num_in
# [N], [N,4] # only the fg anchor has box value
anchor_labels, anchor_boxes = self.get_anchor_labels(inside_anchors, boxes)
# fill back to [fs,fs,num_anchor,4]
# all anchor outside box is ignored (-1)
featuremap_labels = -np.ones(
(featureh * featurew*config.num_anchors,), dtype="int32")
featuremap_labels[inside_ind] = anchor_labels
featuremap_labels = featuremap_labels.reshape(
(featureh, featurew, config.num_anchors))
featuremap_boxes = np.zeros(
(featureh * featurew * config.num_anchors, 4), dtype="float32")
featuremap_boxes[inside_ind, :] = anchor_boxes
featuremap_boxes = featuremap_boxes.reshape(
(featureh, featurew, config.num_anchors, 4))
return featuremap_labels, featuremap_boxes
def get_multilevel_rpn_anchor_input(self, im, boxes):
config = self.config
boxes = boxes.copy()
# get anchor for each (anchor_stride,anchor_size) pair
anchors_per_level = self.get_all_anchors_fpn()
flatten_anchors_per_level = [k.reshape((-1, 4)) for k in anchors_per_level]
all_anchors_flatten = np.concatenate(flatten_anchors_per_level, axis=0)
# some image may not be resized to max size, could be shorter edge size
inside_ind, inside_anchors = self.filter_box_inside(im, all_anchors_flatten)
# given all these anchors, given the ground truth box, and their iou to
# each anchor, get the label to be 1 or 0.
anchor_labels, anchor_gt_boxes = self.get_anchor_labels(
inside_anchors, boxes)
# map back to all_anchors, then split to each level
num_all_anchors = all_anchors_flatten.shape[0]
all_labels = -np.ones((num_all_anchors, ), dtype="int32")
all_labels[inside_ind] = anchor_labels
all_boxes = np.zeros((num_all_anchors, 4), dtype="float32")
all_boxes[inside_ind] = anchor_gt_boxes
start = 0
multilevel_inputs = []
# put back to list for each level
for level_anchor in anchors_per_level:
assert level_anchor.shape[2] == len(config.anchor_ratios)
anchor_shape = level_anchor.shape[:3] # fHxfWxNUM_ANCHOR_RATIOS
num_anchor_this_level = np.prod(anchor_shape)
end = start + num_anchor_this_level
multilevel_inputs.append(
(all_labels[start: end].reshape(anchor_shape),
all_boxes[start:end, :].reshape(anchor_shape + (4,))))
start = end
assert end == num_all_anchors, \
("num all anchors:%s, end:%s" % (num_all_anchors, end))
return multilevel_inputs
def get_anchor_labels(self, anchors, gt_boxes):
config = self.config
# return max_num of index for labels equal val
def filter_box_label(labels, val, max_num):
cur_inds = np.where(labels == val)[0]
if len(cur_inds) > max_num:
disable_inds = np.random.choice(
cur_inds, size=(len(cur_inds) - max_num), replace=False)
labels[disable_inds] = -1
cur_inds = np.where(labels == val)[0]
return cur_inds
NA, NB = len(anchors), len(gt_boxes)
assert NB > 0
#bbox_iou_float = get_iou_callable() # tf op on cpu, nn.py
#box_ious = bbox_iou_float(anchors,gt_boxes) #[NA,NB]
box_ious = np_iou(anchors, gt_boxes)
#print box_ious.shape #(37607,7)
#NA, each anchors max iou to any gt box, and the max gt box"s index [0,NB-1]
iou_argmax_per_anchor = box_ious.argmax(axis=1)
iou_max_per_anchor = box_ious.max(axis=1)
# 1 x NB, each gt box"s max iou to any anchor boxes
#iou_max_per_gt = box_ious.max(axis=1,keepdims=True)
#print iou_max_per_gt # all zero?
iou_max_per_gt = np.amax(box_ious, axis=0, keepdims=True) # 1xNB
# NA x 1? True for anchors that cover all the gt boxes
anchors_with_max_iou_per_gt = np.where(box_ious == iou_max_per_gt)[0]
anchor_labels = -np.ones((NA,), dtype="int32")
anchor_labels[anchors_with_max_iou_per_gt] = 1
anchor_labels[iou_max_per_anchor >= config.positive_anchor_thres] = 1
anchor_labels[iou_max_per_anchor < config.negative_anchor_thres] = 0
# cap the number of fg anchor and bg anchor
target_num_fg = int(config.rpn_batch_per_im * config.rpn_fg_ratio)
# set the label==1 to -1 if the number exceeds
fg_inds = filter_box_label(anchor_labels, 1, target_num_fg)
#assert len(fg_inds) > 0
old_num_bg = np.sum(anchor_labels == 0)
if old_num_bg == 0:
raise Exception("No valid background for RPN!")
# the rest of 256 is negative
target_num_bg = config.rpn_batch_per_im - len(fg_inds)
# set some label to -1 if exceeds
filter_box_label(anchor_labels, 0, target_num_bg)
# only the fg anchor_boxes are filled with the corresponding gt_box
anchor_boxes = np.zeros((NA, 4), dtype="float32")
anchor_boxes[fg_inds, :] = gt_boxes[iou_argmax_per_anchor[fg_inds], :]
return anchor_labels, anchor_boxes
# given the box, just extract feature for each box
class RCNN_FPN_givenbox():
def __init__(self, config, gpuid=0):
self.gpuid = gpuid
# for batch_norm
global is_training
is_training = config.is_train # change this before building model
assert not config.is_train # only for inferencing
self.config = config
self.num_class = config.num_class
self.global_step = tf.get_variable(
"global_step", shape=[], dtype="int32",
initializer=tf.constant_initializer(0), trainable=False)
# current model get one image at a time
self.image = tf.placeholder(tf.float32, [None, None, 3], name="image")
# used for dropout switch
self.is_train = tf.placeholder("bool", [], name="is_train")
self.boxes = tf.placeholder(tf.float32, [None, 4], name="boxes")
# the following will be added in the build_forward and loss
self.logits = None
self.yp = None
self.loss = None
self.build_preprocess()
self.build_forward()
# get feature map anchor and preprocess image
def build_preprocess(self):
config = self.config
image = self.image
bgr = True # cv2 load image is bgr
p_image = tf.expand_dims(image, 0) # [1,H,W,C]
with tf.name_scope("image_preprocess"): # tf.device("/cpu:0"):
if p_image.dtype.base_dtype != tf.float32:
p_image = tf.cast(p_image, tf.float32)
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
p_image = p_image*(1.0/255)
if bgr:
mean = mean[::-1]
std = std[::-1]
image_mean = tf.constant(mean, dtype=tf.float32)
image_std = tf.constant(std, dtype=tf.float32)
p_image = (p_image - image_mean) / image_std
p_image = tf.transpose(p_image, [0, 3, 1, 2])
self.p_image = p_image
# based on box sizes
def fpn_map_rois_to_levels(self, boxes):
def tf_area(boxes):
x_min, y_min, x_max, y_max = tf.split(boxes, 4, axis=1)
return tf.squeeze((y_max - y_min) * (x_max - x_min), [1])
sqrtarea = tf.sqrt(tf_area(boxes))
level = tf.to_int32(tf.floor(4 + tf.log(sqrtarea * (1. / 224) + 1e-6) * (1.0 / np.log(2))))
# RoI levels range from 2~5 (not 6)
level_ids = [
tf.where(level <= 2),
tf.where(tf.equal(level, 3)),# problems with ==?
tf.where(tf.equal(level, 4)),
tf.where(level >= 5)]
level_ids = [tf.reshape(x, [-1], name="roi_level%s_id"%(i + 2))
for i, x in enumerate(level_ids)]
num_in_levels = [tf.size(x, name="num_roi_level%s"%(i + 2))
for i, x in enumerate(level_ids)]
level_boxes = [tf.gather(boxes, ids) for ids in level_ids]
return level_ids, level_boxes
# output_shape is the output feature HxW
def multilevel_roi_align(self, features, rcnn_boxes, output_shape):
config = self.config
assert len(features) == 4
# Reassign rcnn_boxes to levels # based on box area size
level_ids, level_boxes = self.fpn_map_rois_to_levels(rcnn_boxes)
all_rois = []
# Crop patches from corresponding levels
for i, boxes, featuremap in zip(itertools.count(), level_boxes, features):
with tf.name_scope("roi_level%s"%(i + 2)):
boxes_on_featuremap = boxes * (1.0 / config.anchor_strides[i])
all_rois.append(roi_align(featuremap, boxes_on_featuremap, output_shape))
# this can fail if using TF<=1.8 with MKL build
all_rois = tf.concat(all_rois, axis=0) # NCHW
# Unshuffle to the original order, to match the original samples
level_id_perm = tf.concat(level_ids, axis=0) # A permutation of 1~N
level_id_invert_perm = tf.invert_permutation(level_id_perm)
all_rois = tf.gather(all_rois, level_id_invert_perm)
return all_rois
def build_forward(self):
config = self.config
image = self.p_image # [1, C, H, W]
image_shape2d = tf.shape(image)[2:]
# the feature map shared by RPN and fast RCNN
# TODO: fix the batch norm mess
# TODO: fix global param like data_format and
# [1,C,FS,FS]
c2345 = resnet_fpn_backbone(
image, config.resnet_num_block, use_gn=config.use_gn,
resolution_requirement=config.fpn_resolution_requirement,
use_dilations=config.use_dilations,
use_deformable=config.use_deformable, tf_pad_reverse=True,
freeze=config.freeze, use_basic_block=config.use_basic_block,
use_se=config.use_se)
# include lateral 1x1 conv and final 3x3 conv
# -> [7, 7, 256]
p23456 = fpn_model(
c2345, num_channel=config.fpn_num_channel, use_gn=config.use_gn,
scope="fpn")
# here we assume N is not so big that the GPU can handle
rcnn_boxes = self.boxes # N, 4
# NxCx7x7 # (?, 256, 7, 7)
roi_feature_fastrcnn = self.multilevel_roi_align(p23456[:4], rcnn_boxes, 7)
# [N, 256]
self.final_box_features = tf.reduce_mean(roi_feature_fastrcnn, axis=[2, 3])
# given the image path, and the label for it
# preprocess
def get_feed_dict(self, im, boxes, is_train=False):
#{"imgs":[],"gt":[]}
config = self.config
feed_dict = {}
feed_dict[self.image] = im
feed_dict[self.boxes] = boxes
feed_dict[self.is_train] = is_train
return feed_dict
def initialize(load,load_best,config,sess):
tf.global_variables_initializer().run()
if load:
print("restoring model...")
allvars = tf.global_variables()
allvars = [var for var in allvars if "global_step" not in var.name]
#restore_vars = allvars
opts = ["Adam", "beta1_power", "beta1_power_1", "beta2_power",
"beta2_power_1", "Adam_1", "Adadelta_1", "Adadelta", "Momentum"]
allvars = [var for var in allvars
if var.name.split(":")[0].split("/")[-1] not in opts]
# so allvars is actually the variables except things for training
if config.ignore_gn_vars:
allvars = [var for var in allvars if "/gn" not in var.name.split(":")[0]]
if config.ignore_vars is not None:
ignore_vars = config.ignore_vars.split(":")
ignore_vars.extend(opts)
# also these
#ignore_vars+=["global_step"]
restore_vars = []
for var in allvars:
ignore_it = False
for ivar in ignore_vars:
if ivar in var.name:
ignore_it = True
print("ignored %s" % var.name)
break
if not ignore_it:
restore_vars.append(var)
print("ignoring %s variables, original %s vars, restoring for %s vars" % \
(len(ignore_vars), len(allvars), len(restore_vars)))
else:
restore_vars = allvars
saver = tf.train.Saver(restore_vars, max_to_keep=5)
load_from = None
if config.load_from is not None:
load_from = config.load_from
else:
if load_best:
load_from = config.save_dir_best
else:
load_from = config.save_dir
ckpt = tf.train.get_checkpoint_state(load_from)
if ckpt and ckpt.model_checkpoint_path:
loadpath = ckpt.model_checkpoint_path
saver.restore(sess, loadpath)
print("Model:")
print("\tloaded %s"%loadpath)
print("")
else:
if os.path.exists(load_from):
if load_from.endswith(".ckpt"):
# load_from should be a single .ckpt file
saver.restore(sess, load_from)
elif load_from.endswith(".npz"):
# load from dict
weights = np.load(load_from)
params = {get_op_tensor_name(n)[1]:v
#for n, v in dict(weights).iteritems()}
for n, v in dict(weights).items()}
#param_names = set(params.iterkeys())
param_names = set(params.keys())
#variables = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
variables = restore_vars
variable_names = set([k.name for k in variables])
intersect = variable_names & param_names
restore_vars = [v for v in variables if v.name in intersect]
with sess.as_default():
for v in restore_vars:
vname = v.name
v.load(params[vname])
#print variables # all the model"s params
not_used = [(one, weights[one].shape) for one in weights.keys()
if get_op_tensor_name(one)[1] not in intersect]
if not_used:
print("warning, %s/%s in npz not restored:%s" % (
len(weights.keys()) - len(intersect),
len(weights.keys()), not_used))
#if config.show_restore:
# print "loaded %s vars:%s"%(len(intersect),intersect)
else:
raise Exception("Not recognized model type:%s" % load_from)
else:
raise Exception("Model not exists")
print("done.")
| [
"[email protected]"
] | |
e2b246ef45c75445029b5451c1379c4957530865 | e70276d10c1161e8594a9d03ca8d89f9491f5a90 | /example1.py | 1959895a822248e6aa892ea4fd2d1cfdcc7685bb | [] | no_license | py-yyc/twisted-postgres | 655f177c26d3503524eeb82e9d5ce0dc2cb4da18 | d45ad294d969ea60698021c4e63463596437a01c | refs/heads/master | 2021-01-23T07:34:30.234497 | 2017-03-28T20:36:29 | 2017-03-28T20:36:29 | 86,429,271 | 0 | 0 | null | 2017-03-28T14:27:07 | 2017-03-28T07:35:21 | JavaScript | UTF-8 | Python | false | false | 1,193 | py | ## <h1>txpostgres</h1>
from twisted.internet import defer, task
from txpostgres import txpostgres
_create_table = '''
DROP TABLE IF EXISTS todo;
CREATE TABLE todo
(
id SERIAL,
todo VARCHAR(254) NOT NULL,
created_at TIMESTAMP NOT NULL,
PRIMARY KEY (id)
);
'''
@task.react
@defer.inlineCallbacks
def main(reactor):
connections = []
for x in range(25):
conn = txpostgres.Connection()
db = yield conn.connect('dbname=postgres')
connections.append(db)
yield connections[0].runOperation(_create_table)
# a 'real' generator, round-robin all connections
def connection_generator():
while True:
for c in connections:
yield c
connect = connection_generator()
inserts = []
for item in range(1000):
db = next(connect)
d = db.runOperation(
'INSERT INTO todo (todo, created_at) '
'VALUES (%s, NOW());', [item],
)
dl.append(d)
start = reactor.seconds()
yield defer.DeferredList(dl)
diff = reactor.seconds() - start
print("Took {}s".format(diff))
## show-output
| [
"[email protected]"
] | |
f4b47a906a369d7bcfd40afb48b6c743a23732f8 | a6590941fea4880593d5b1cd23eedfe696f4e446 | /ABC01_99/ABC67/a.py | 95417fc790f2c6d0b3eca24dde4ac56729752ad4 | [] | no_license | cod4i3/MyAtcoder | 9fb92f2dd06c5b6217e925a82d8db4f91355a70f | 53bdac3fa7eb4ac48ca6d5c70461639beb6aa81d | refs/heads/master | 2023-02-17T09:15:16.282873 | 2021-01-15T13:34:03 | 2021-01-15T13:34:03 | 232,006,424 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | A, B = map(int, input().split())
print('Possible') if(A % 3 == 0 or B % 3 == 0 or (A + B) % 3 == 0) else print('Impossible')
| [
"[email protected]"
] | |
bd6592a7ebfba73acfd25c785fc3d4ee13584107 | c78278b3c60fabee38c3c7368895ab44cef6af0c | /neopantry/recipes/tests/test_views.py | 49227341c90c91166b2227a3dce1512eaba87053 | [] | no_license | ParentJA/neopantry | b3280a935d4cb82b6fb92cf781fab0fdbe109ad1 | c8929199742b804d8abd1ea7ac39d36a02608ece | refs/heads/master | 2023-06-09T13:37:17.633405 | 2018-08-29T01:41:30 | 2018-08-29T01:41:30 | 45,959,290 | 0 | 1 | null | 2018-03-23T03:49:08 | 2015-11-11T04:15:28 | JavaScript | UTF-8 | Python | false | false | 16,536 | py | # Standard library imports.
import json
from unittest.mock import patch
# Third-party imports.
from rest_framework.reverse import reverse
from rest_framework.test import APIClient, APITestCase
# Local imports.
from ..models import Recipe, RecipeNote, UserRecipe
from ..serializers import RecipeNoteSerializer
from ..factories import (
IngredientFactory, RecipeFactory, RecipeNoteFactory, RecipeReviewFactory, UserFactory, UserRecipeFactory
)
PASSWORD = 'pAssW0rd!'
__author__ = 'Jason Parent'
class RecipeViewTest(APITestCase):
def setUp(self):
self.user = UserFactory(password=PASSWORD)
self.client = APIClient()
self.client.login(username=self.user.username, password=PASSWORD)
def test_user_can_retrieve_recipe(self):
# Given.
ingredient = IngredientFactory()
recipe = ingredient.recipe
# When.
response = self.client.get(reverse('recipe', kwargs={'pk': recipe.pk}))
# Then.
self.assertEqual(200, response.status_code)
self.assertEqual(recipe.id, response.data['id'])
self.assertEqual(recipe.name, response.data['name'])
self.assertEqual(recipe.description, response.data['description'])
self.assertEqual(recipe.instructions, response.data['instructions'])
self.assertIn(recipe.photo.url, response.data['photo'])
self.assertEqual(recipe.average_rating, response.data['average_rating'])
self.assertEqual(recipe.num_reviews, response.data['num_reviews'])
self.assertEqual(ingredient.description, response.data['ingredients'][0].get('description'))
self.assertEqual(ingredient.rank, response.data['ingredients'][0].get('rank'))
class RecipeSearchViewTest(APITestCase):
def setUp(self):
self.user = UserFactory(password=PASSWORD)
self.client = APIClient()
self.client.login(username=self.user.username, password=PASSWORD)
def test_user_can_search_recipes(self):
# Given.
recipe1 = RecipeFactory(name='Chicken Pot Pie')
recipe2 = RecipeFactory(name='Apple Pie')
RecipeFactory.create_batch(3)
# When.
response = self.client.get(path=reverse('recipe-search'), data={'page': 1, 'query': 'pie'})
# Then.
self.assertEqual(200, response.status_code)
self.assertEqual(2, response.data['count'])
# Get expected recipe IDs.
exp = [recipe1.pk, recipe2.pk]
# Get actual recipe IDs.
act = [result.get('id') for result in response.data['results']]
self.assertCountEqual(exp, act)
def test_user_can_list_recipes(self):
# Given.
recipes = RecipeFactory.create_batch(10)
# When.
response = self.client.get(path=reverse('recipe-search'), data={'page': 1})
# Then.
self.assertEqual(200, response.status_code)
self.assertEqual(10, response.data['count'])
# Get expected recipe IDs.
exp = [recipe.pk for recipe in recipes]
# Get actual recipe IDs.
act = [result.get('id') for result in response.data['results']]
self.assertCountEqual(exp, act)
def test_user_can_list_recipes_with_pagination(self):
# Given.
recipes = RecipeFactory.create_batch(15)
# When.
with patch('recipes.pagination.RecipePagination.get_page_size', return_value=10):
response = self.client.get(path=reverse('recipe-search'), data={'page': 1})
# Then.
self.assertEqual(200, response.status_code)
# Has 15 total results.
self.assertEqual(15, response.data['count'])
# Showing 10 results.
self.assertEqual(10, len(response.data['results']))
# Get expected recipe IDs.
exp = [recipe.pk for recipe in recipes[:10]]
# Get actual recipe IDs.
act = [result.get('id') for result in response.data['results']]
self.assertCountEqual(exp, act)
def test_user_can_list_recipes_with_reviews(self):
# Given.
recipe = RecipeFactory()
recipe_reviews = RecipeReviewFactory.create_batch(5, recipe=recipe)
# When.
response = self.client.get(reverse('recipe-search'))
# Then.
self.assertEqual(200, response.status_code)
# self.assertCountEqual(
# RecipeListSerializer(Recipe.objects.all(), many=True).data,
# response.data
# )
#
# def num_reviews(reviews):
# return len(reviews)
#
# self.assertEqual(num_reviews(recipe_reviews), response.data[0].get('num_reviews'))
# def test_search_vector_is_added_to_recipe_on_save(self):
# # When.
# recipe = Recipe(name='Recipe')
# recipe.save()
#
# # Then.
# self.assertIsNotNone(recipe.search_vector)
class RecipeNoteTest(APITestCase):
def setUp(self):
self.user = UserFactory(password=PASSWORD)
self.client = APIClient()
self.client.login(username=self.user.username, password=PASSWORD)
def test_user_can_list_recipe_notes(self):
# Given.
recipe = RecipeFactory()
RecipeNoteFactory(recipe=recipe, user=self.user)
# When.
response = self.client.get(reverse('recipe-note-list'), data={'recipe': recipe.pk})
# Then.
self.assertEqual(200, response.status_code)
# self.assertEqual(RecipeNoteSerializer(RecipeNote.objects.all(), many=True).data, response.data)
def test_user_can_only_list_own_notes(self):
# Given.
recipe = RecipeFactory()
other_user = UserFactory()
RecipeNoteFactory(recipe=recipe, user=other_user)
# When.
response = self.client.get(reverse('recipe-note-list'), data={'recipe': recipe.pk})
# Then.
self.assertEqual(200, response.status_code)
self.assertListEqual([], response.data)
def test_user_can_create_recipe_note(self):
# Given.
recipe = RecipeFactory()
# When.
response = self.client.post(reverse('recipe-note-list'), data={
'note': 'This is a note.',
'recipe': recipe.pk,
'user': self.user.pk,
})
# Then.
self.assertEqual(201, response.status_code)
# self.assertEqual(RecipeNoteSerializer(RecipeNote.objects.last()).data, response.data)
def test_user_can_retrieve_recipe_note(self):
# Given.
recipe = RecipeFactory()
recipe_note = RecipeNoteFactory(recipe=recipe, user=self.user)
# When.
response = self.client.get(reverse('recipe-note-detail', kwargs={'pk': recipe_note.pk}))
# Then.
self.assertEqual(200, response.status_code)
# self.assertEqual(RecipeNoteSerializer(RecipeNote.objects.get(pk=recipe_note.pk)).data, response.data)
def test_user_can_only_retrieve_own_note(self):
# Given.
recipe = RecipeFactory()
other_user = UserFactory()
recipe_note = RecipeNoteFactory(recipe=recipe, user=other_user)
# When.
response = self.client.get(reverse('recipe-note-detail', kwargs={'pk': recipe_note.pk}))
# Then.
self.assertEqual(404, response.status_code)
def test_user_can_update_recipe_note(self):
# Given.
recipe = RecipeFactory()
recipe_note = RecipeNoteFactory(recipe=recipe, user=self.user)
# When.
response = self.client.put(reverse('recipe-note-detail', kwargs={'pk': recipe_note.pk}), data={
**RecipeNoteSerializer(recipe_note).data,
'note': 'A new note.',
'user': self.user.pk,
})
# Then.
self.assertEqual(200, response.status_code)
# self.assertEqual(RecipeNoteSerializer(RecipeNote.objects.get(pk=recipe_note.pk)).data, response.data)
def test_user_can_only_update_own_note(self):
# Given.
recipe = RecipeFactory()
other_user = UserFactory()
recipe_note = RecipeNoteFactory(recipe=recipe, user=other_user)
# When.
response = self.client.put(reverse('recipe-note-detail', kwargs={'pk': recipe_note.pk}), data={
**RecipeNoteSerializer(recipe_note).data,
'note': 'A new note.',
'user': other_user.pk,
})
# Then.
self.assertEqual(403, response.status_code)
def test_user_can_destroy_recipe_note(self):
# Given.
recipe = RecipeFactory()
recipe_note = RecipeNoteFactory(recipe=recipe, user=self.user)
# When.
response = self.client.delete(reverse('recipe-note-detail', kwargs={'pk': recipe_note.pk}))
# Then.
self.assertEqual(204, response.status_code)
# self.assertIsNone(response.data)
self.assertFalse(RecipeNote.objects.filter(pk=recipe_note.pk).exists())
def test_user_can_only_destroy_own_note(self):
# Given.
recipe = RecipeFactory()
other_user = UserFactory()
recipe_note = RecipeNoteFactory(recipe=recipe, user=other_user)
# When.
response = self.client.delete(reverse('recipe-note-detail', kwargs={'pk': recipe_note.pk}))
# Then.
self.assertEqual(404, response.status_code)
self.assertTrue(RecipeNote.objects.filter(pk=recipe_note.pk).exists())
class RecipeReviewTest(APITestCase):
def setUp(self):
self.user1 = UserFactory(password=PASSWORD)
self.user2 = UserFactory(password=PASSWORD)
self.client = APIClient()
self.client.login(username=self.user1.username, password=PASSWORD)
def test_user_can_create_recipe_review(self):
# Given.
recipe = RecipeFactory(total_make_again=4, total_ratings=20, num_reviews=4)
review = RecipeReviewFactory.stub(recipe=recipe, user=self.user1, make_again=True, rating=5)
# And.
self.assertEqual(5.0, recipe.average_rating)
# When.
response = self.client.post(reverse('recipe-review'), data={
'recipe': review.recipe.pk,
'user': review.user.pk,
'make_again': review.make_again,
'rating': review.rating,
'review': review.review,
})
# Then.
self.assertEqual(201, response.status_code)
self.assertEqual(review.recipe.pk, response.data['recipe'])
self.assertEqual(review.user.pk, response.data['user'])
self.assertEqual(review.make_again, response.data['make_again'])
self.assertEqual(review.rating, response.data['rating'])
self.assertEqual(review.review, response.data['review'])
self.assertEqual(review.user.username, response.data['username'])
# And.
recipe = Recipe.objects.get(pk=recipe.pk)
self.assertEqual(100, recipe.average_make_again)
self.assertEqual(5, recipe.average_rating)
self.assertEqual(5, recipe.num_reviews)
def test_user_can_only_create_recipe_review_for_self(self):
# Given.
recipe = RecipeFactory()
review = RecipeReviewFactory.stub(recipe=recipe, user=self.user2)
# When.
response = self.client.post(reverse('recipe-review'), data={
'recipe': review.recipe.pk,
'user': review.user.pk,
'make_again': review.make_again,
'rating': review.rating,
'review': review.review,
})
# Then.
self.assertEqual(403, response.status_code)
def test_user_can_only_create_one_review_per_recipe(self):
# Given.
recipe = RecipeFactory()
review = RecipeReviewFactory(recipe=recipe, user=self.user1)
# When.
response = self.client.post(reverse('recipe-review'), data={
'recipe': review.recipe.pk,
'user': review.user.pk,
'make_again': review.make_again,
'rating': review.rating,
'review': review.review,
})
# Then.
self.assertEqual(400, response.status_code)
def test_user_can_get_reviews_by_recipe(self):
# Given.
recipe1 = RecipeFactory()
review1 = RecipeReviewFactory(recipe=recipe1, user=self.user1)
recipe2 = RecipeFactory()
review2 = RecipeReviewFactory(recipe=recipe2, user=self.user2)
# When.
response = self.client.get(reverse('recipe-review'), data={
'recipe': recipe1.pk
})
# Then.
self.assertEqual(200, response.status_code)
self.assertCountEqual([recipe1.pk], [data.get('recipe') for data in response.data])
def test_user_can_get_reviews_by_user(self):
# Given.
recipe1 = RecipeFactory()
review1 = RecipeReviewFactory(recipe=recipe1, user=self.user1)
recipe2 = RecipeFactory()
review2 = RecipeReviewFactory(recipe=recipe2, user=self.user2)
# When.
response = self.client.get(reverse('recipe-review'), data={
'user': self.user1.pk
})
# Then.
self.assertEqual(200, response.status_code)
self.assertCountEqual([self.user1.pk], [data.get('user') for data in response.data])
class UserRecipeTest(APITestCase):
def setUp(self):
self.user1 = UserFactory(password=PASSWORD)
self.user2 = UserFactory(password=PASSWORD)
self.client = APIClient()
self.client.login(username=self.user1.username, password=PASSWORD)
def test_user_can_get_saved_recipes(self):
# Given.
UserRecipeFactory.create_batch(3, user=self.user1)
UserRecipeFactory.create_batch(2, user=self.user2)
# When.
response = self.client.get(reverse('user-recipe-search', kwargs={'user_pk': self.user1.pk}))
# Then.
self.assertEqual(200, response.status_code)
# Get 'user' and 'recipe' values from database records.
exp = UserRecipe.objects.filter(user=self.user1).values_list('user', 'recipe')
# Get 'user' and 'recipe' values from response data.
act = [(result['user'], result['recipe'].get('id')) for result in response.data['results']]
self.assertCountEqual(exp, act)
def test_user_cannot_get_other_users_saved_recipes(self):
# Given.
UserRecipeFactory.create_batch(3, user=self.user2)
# When.
# NOTE: Logged in as 'user1'.
response = self.client.get(reverse('user-recipe-search', kwargs={'user_pk': self.user2.pk}))
# Then.
self.assertEqual(403, response.status_code)
def test_user_can_save_recipes(self):
# Given.
recipe = RecipeFactory()
# Get 'recipe' count from database.
self.assertEqual(UserRecipe.objects.filter(user=self.user1).count(), 0)
# When.
response = self.client.post(reverse('user-recipe-search', kwargs={'user_pk': self.user1.pk}), data=json.dumps({
'user': self.user1.pk,
'recipe': {
'id': recipe.pk,
'name': recipe.name,
},
}), content_type='application/json')
# Then.
self.assertEqual(201, response.status_code)
self.assertEqual(self.user1.pk, response.data['user'])
self.assertEqual(recipe.pk, response.data['recipe'].get('id'))
# Get 'recipe' count from database.
self.assertEqual(UserRecipe.objects.filter(user=self.user1).count(), 1)
def test_user_cannot_save_recipe_more_than_once(self):
# Given.
user_recipe = UserRecipeFactory(user=self.user1)
# When.
response = self.client.post(reverse('user-recipe-search', kwargs={'user_pk': self.user1.pk}), data={
'recipe': user_recipe.recipe.pk
})
# Then.
self.assertEqual(400, response.status_code)
def test_user_can_delete_recipes(self):
# Given.
user_recipe = UserRecipeFactory(user=self.user1)
# When.
response = self.client.delete(reverse(
'user-recipe',
kwargs={'user_pk': self.user1.pk, 'recipe_pk': user_recipe.recipe.pk})
)
# Then.
self.assertEqual(204, response.status_code)
def test_user_cannot_delete_other_users_recipes(self):
# Given.
user_recipe = UserRecipeFactory(user=self.user2)
# When.
response = self.client.delete(reverse(
'user-recipe',
kwargs={'user_pk': user_recipe.user.pk, 'recipe_pk': user_recipe.recipe.pk})
)
# Then.
self.assertEqual(403, response.status_code)
| [
"[email protected]"
] | |
79495020056d56275a8f266ff7a23318b987552b | 2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8 | /pardus/tags/2007/applications/network/grsync/actions.py | 69e6713e63b0e0168a7da300e514053089af4597 | [] | no_license | aligulle1/kuller | bda0d59ce8400aa3c7ba9c7e19589f27313492f7 | 7f98de19be27d7a517fe19a37c814748f7e18ba6 | refs/heads/master | 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 952 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2006 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
def setup():
autotools.autoconf() # for turkish patch
autotools.configure()
def build():
autotools.make()
def install():
autotools.rawInstall("DESTDIR=%(DESTINATION)s \
INSTALLDIR=%(DESTINATION)s/usr/bin \
MANDIR=%(DESTINATION)s/usr/share/man/man1 \
INCLUDEDIR=%(DESTINATION)s/usr/include \
LOCALEDIR=%(DESTINATION)s/usr/share/locale \
PKGCONFIGDIR=%(DESTINATION)s/usr/lib/pkgconfig" % {'DESTINATION': get.installDIR()})
pisitools.dodoc("AUTHORS", "COPYING", "README", "Changelog", "INSTALL", "NEWS")
| [
"[email protected]"
] | |
8768675fb9f7b9b350c1e7cacfd9cfc4b8ef5d8a | de5be7e4d9e20bbfda3ce8697afc3433a3ccf55d | /python_tutorial/excercise_13_oops_concept/multilevel_inhwrentance.py | 9af603e09b2240ba940161d99b7718d5cf32ef4c | [] | no_license | poojataksande9211/python_data | 42a88e0a0395f383d4375000a3d01b894bd38e62 | 64c952d622abfa77f2fdfd737c210014fce153c5 | refs/heads/main | 2023-04-16T10:24:27.213764 | 2021-04-27T16:34:32 | 2021-04-27T16:34:32 | 360,673,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 977 | py | #multilevel inherence
class Phone:#base class/parent class
def __init__(self,model_name,brand,price):
self.model_name=model_name
self.brand=brand
self._price=max(price,0)
def full_name(self):
return f"{self.model_name} {self.brand}"
def caliing_no(self,phone_no):
return f"calling from {phone_no}"
class Smartphone(Phone):#derived class/child class
def __init__(self,model_name,brand,price,ram,internal_memory,rear_camera):
Phone.__init__(self,model_name,brand,price)
self.ram=ram
self.internal_memory=internal_memory
self.rear_camera=rear_camera
class flagshipPhone(Smartphone):
def __init__(self,model_name,brand,price,ram,internal_memory,rear_camera,front_camera):
Smartphone.__init__(self,model_name,brand,price,ram,internal_memory,rear_camera)
self.front_camera=front_camera
p1=flagshipPhone("1+","67ytr",78000,"16gb","4gb","7mp","9mp")
print(p1.full_name()) | [
"[email protected]"
] | |
a34d0ddd7df3516c21f514127949bf7cbd07ebc1 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /oyS6TX4NXzpbfjL4a_12.py | db54f2431410716bc482bd2f3768b6388bb56f68 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,659 | py | """
This challenge is based on the game Scrabble. Each word you play is scored
based on the point value of each tile/letter (see first table), as well as
additional points conferred by any special squares your tiles land on (see
second table).
Create a function that takes a list representing a row of squares in a
Scrabble board, and a string representing the word to be played. The list will
consist of `-` representing normal squares, and "DL", "TL", "DW" representing
special squares. Return the index of the list where the first letter of the
word should be placed to maximise the score of the word to be played. Return
the lowest index, if several exist.
Letter| Points
---|---
A| 1
B| 3
C| 3
D| 2
E| 1
F| 4
G| 2
H| 4
I| 1
J| 8
K| 5
L| 2
M| 3
N| 1
O| 1
P| 3
Q| 10
R| 1
S| 1
T| 1
U| 1
V| 4
W| 4
X| 8
Y| 4
Z| 10
Special Square| Meaning
---|---
DL| Double letter score - doubles the point value of a letter placed on the
square
TL| Triple letter score - triples the point value of a letter placed on the
square
DW| Double word score - doubles the score of an entire word if one of its
letters is placed on the square
### Examples
best_start(["-","DW","-","-","-","TL","-","-","-","TL","-","-","-","DW","-"], "quiz") ➞ 0
# Doubling the entire word maximises the score. Starting at
# indices 1,10, and 11 have the same effect, but the function
# should return the lowest index.
best_start(["-","DW","-","-","-","TL","-","-","-","TL","-","-","-","DW","-"], "quit") ➞ 5
# Tripling the first letter alone gives a higher score than
# doubling the entire word, as the other 3 letters have
# low point-values.
best_start(["-","DW","-","-","-","TL","-","-","-","TL","-","-","-","DW","-"], "quart") ➞ 9
# Tripling the first (high-scoring) letter, and doubling the word.
best_start(["-","DW","-","-","-","TL","-","-","-","TL","-","-","-","DW","-"], "quartz") ➞ 0
# Tripling the last (high-scoring) letter, and doubling the word.
# Index 9 has the same effect (tripling the first letter, doubling
# the word), but 0 is the lower index.
### Notes
N/A
"""
def best_start(lst, word):
points = [1,3,3,2,1,4,2,4,1,8,5,2,3,1,1,3,10,1,1,1,1,4,4,8,4,10]
lst2 = []
add = ['-','DL','TL']
for i in range(16-len(word)):
p = 0
multiple = 1
for j in range(len(word)):
if lst[i+j] == 'DW':
p += points[ord(word[j].lower())-97]
multiple *= 2
else:
p += (add.index(lst[i+j])+1)*points[ord(word[j].lower())-97]
lst2.append(multiple*p)
return lst2.index(max(lst2))
| [
"[email protected]"
] | |
a36ea33f2fdd065e7a8deca00f0cebbf46407cdc | f03bd5bd7873c5cc33b4ef5199f219539f3a340e | /CAAPR/CAAPR_AstroMagic/PTS/pts/modeling/misc/geometryplotter.py | ba2446591d26f513d77abd0a0603b2eed23ff4a1 | [
"GPL-1.0-or-later",
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-philippe-de-muyter",
"MIT"
] | permissive | Stargrazer82301/CAAPR | 5f8a7033b16792f23abd5d07021b53b9228a5db4 | 62b2339beb2eb956565e1605d44d92f934361ad7 | refs/heads/master | 2022-08-29T02:53:33.658022 | 2022-08-05T19:06:46 | 2022-08-05T19:06:46 | 49,977,601 | 8 | 1 | MIT | 2022-08-05T19:06:47 | 2016-01-19T19:32:42 | Python | UTF-8 | Python | false | false | 5,856 | py | #!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.modeling.misc.geometryplotter Contains the GeometryPlotter class.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import standard modules
from textwrap import wrap
import matplotlib.pyplot as plt
from matplotlib.patches import Ellipse as plt_Ellipse
from collections import OrderedDict
# Import the relevant PTS classes and modules
from ...core.tools.logging import log
from ..basics.models import SersicModel, ExponentialDiskModel, DeprojectionModel
# -----------------------------------------------------------------
pretty_colors = ["r", "dodgerblue", "purple", "darkorange", "lawngreen", "yellow", "darkblue", "teal", "darkgreen", "lightcoral", "crimson", "saddlebrown"]
# -----------------------------------------------------------------
class GeometryPlotter(object):
"""
This class...
"""
def __init__(self):
"""
The constructor ...
:return:
"""
# Call the constructor of the base class
super(GeometryPlotter, self).__init__()
# -- Attributes --
# The geometries
self.geometries = OrderedDict()
# The patches
self.patches = OrderedDict()
# The figure
self._figure = None
self._min_x = None
self._max_x = None
self._min_y = None
self._max_y = None
# Properties
self.title = None
self.format = None
self.transparent = False
# -----------------------------------------------------------------
def add_geometry(self, geometry, label):
"""
This function ...
:param geometry:
:param label:
:return:
"""
self.geometries[label] = geometry
# -----------------------------------------------------------------
def run(self, path):
"""
This function ...
:param path:
:return:
"""
# Create matplotlib patches from the geometries
self.create_patches()
# Plot
self.plot(path)
# -----------------------------------------------------------------
def create_patches(self):
"""
This function ...
:return:
"""
colors = iter(pretty_colors)
# Loop over the geometries
for label in self.geometries:
geometry = self.geometries[label]
x_center = 0.0
y_center = 0.0
major = None # 2 * major axis radius
minor = None # 2 * minor axis radius
angle = None # in degrees
if isinstance(geometry, SersicModel):
major = 2.0 * geometry.effective_radius.to("pc").value
minor = geometry.flattening * major
angle = geometry.tilt.to("deg").value
elif isinstance(geometry, ExponentialDiskModel):
major = 2.0 * geometry.radial_scale.to("pc").value
minor = 2.0 * geometry.axial_scale.to("pc").value
angle = geometry.tilt.to("deg").value
elif isinstance(geometry, DeprojectionModel):
minor = 2.0 * geometry.scale_height.to("pc").value
major = 0.3 * (geometry.pixelscale * geometry.x_size).to("pc").value
angle = 0.0
if self._min_x is None or 0.5*major > abs(self._min_x): self._min_x = - 0.5*major
if self._max_x is None or 0.5*major > self._max_x: self._max_x = 0.5*major
if self._min_y is None or 0.5*minor > abs(self._min_y): self._min_y = - 0.5*minor
if self._max_y is None or 0.5*minor > self._max_y: self._max_y = 0.5*minor
# Create the patch
color = next(colors)
ell = plt_Ellipse((x_center, y_center), major, minor, angle, edgecolor='none', facecolor=color, lw=3, alpha=0.7)
# Add the patch
self.patches[label] = ell
# -----------------------------------------------------------------
def plot(self, path):
"""
This function ...
:param path:
:return:
"""
# Inform the user
log.info("Plotting ...")
# Create the figure
self._figure = plt.figure()
ax = self._figure.add_subplot(111, aspect='equal')
for label in self.patches:
ax.add_patch(self.patches[label])
# TODO: add text for label
#plt.grid('on')
ax.set_xlim(self._min_x, self._max_x)
ax.set_ylim(self._min_y, self._max_y)
# Set the title
if self.title is not None: self._figure.suptitle("\n".join(wrap(self.title, 60)))
# Finish
self.finish_plot(path)
# -----------------------------------------------------------------
def finish_plot(self, path):
"""
This function ...
:param path:
:return:
"""
# Debugging
if type(path).__name__ == "BytesIO":
log.debug("Saving the SED plot to a buffer ...")
elif path is None: log.debug("Showing the SED plot ...")
else: log.debug("Saving the SED plot to " + str(path) + " ...")
# Save the figure
if path is not None: plt.savefig(path, bbox_inches='tight', pad_inches=0.25, transparent=self.transparent, format=self.format)
else: plt.show()
plt.close()
# -----------------------------------------------------------------
| [
"[email protected]"
] | |
7ee7efe6546c3a50ec69004cb842ff4254183a01 | 5201e237c0d58cdfdbc2fdf8103f9141161eb9f8 | /itkBSplineTransformInitializerPython.pyi | 6e847fd18da0e9fedf867511dbcf47d39fe173e9 | [] | no_license | hjmjohnson/itk-stubs | 704f5b92a755e55b81d02fcad62a366143e125f3 | 771951d007ae425b758e088eae6f9e4ca0e4afb1 | refs/heads/main | 2023-01-22T05:50:33.649088 | 2020-12-04T01:31:09 | 2020-12-04T01:35:06 | 318,368,028 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,449 | pyi | import itk.itkRGBPixelPython
from typing import Any
class _SwigNonDynamicMeta(type):
__setattr__: Any = ...
def itkBSplineTransformInitializerBSTD23IF2_New(): ...
class itkBSplineTransformInitializerBSTD23IF2(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD23IF2___New_orig__: Any
itkBSplineTransformInitializerBSTD23IF2_cast: Any
def itkBSplineTransformInitializerBSTD23ISS2_New(): ...
class itkBSplineTransformInitializerBSTD23ISS2(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD23ISS2___New_orig__: Any
itkBSplineTransformInitializerBSTD23ISS2_cast: Any
def itkBSplineTransformInitializerBSTD23IUC2_New(): ...
class itkBSplineTransformInitializerBSTD23IUC2(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD23IUC2___New_orig__: Any
itkBSplineTransformInitializerBSTD23IUC2_cast: Any
def itkBSplineTransformInitializerBSTD33IF3_New(): ...
class itkBSplineTransformInitializerBSTD33IF3(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD33IF3___New_orig__: Any
itkBSplineTransformInitializerBSTD33IF3_cast: Any
def itkBSplineTransformInitializerBSTD33ISS3_New(): ...
class itkBSplineTransformInitializerBSTD33ISS3(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD33ISS3___New_orig__: Any
itkBSplineTransformInitializerBSTD33ISS3_cast: Any
def itkBSplineTransformInitializerBSTD33IUC3_New(): ...
class itkBSplineTransformInitializerBSTD33IUC3(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD33IUC3___New_orig__: Any
itkBSplineTransformInitializerBSTD33IUC3_cast: Any
| [
"[email protected]"
] | |
9b892bd0533c2466c006109d413052b477861b4f | a1cbf221a6befed3891d75c69e2a546effd2499d | /authentication/Newapi/views.py | 72124092bf1628bb4daabd0e9eaef153619b13da | [] | no_license | Coder339/V-django-newCRM | 9a93efbb252ba814241076ece17088af8dd15935 | 2182266204f54d301b7c087a99627d441e00fe54 | refs/heads/master | 2022-12-24T15:12:47.081949 | 2020-08-24T12:15:13 | 2020-08-24T12:15:13 | 247,274,031 | 0 | 2 | null | 2022-12-08T04:19:35 | 2020-03-14T12:39:13 | Python | UTF-8 | Python | false | false | 4,182 | py | import jwt
from django.conf import settings
from django.contrib import messages
from django.forms.models import model_to_dict
from django.http import Http404
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.views import View
from rest_framework import authentication
from rest_framework import permissions,authentication
from rest_framework import generics,mixins
from rest_framework import (
generics,
status,
)
from .serializer import *
from rest_framework.exceptions import NotFound
from rest_framework.permissions import (IsAuthenticated,
IsAuthenticatedOrReadOnly)
from rest_framework.response import Response
from rest_framework.views import APIView
# from authentication.models import (User, UserProfile, UserDevices)
from authentication.models import (User)
# from authentication.permissions import (
# IsClientAdmin,
# IsProfileOwner,
# IsOwnerOrAdmin)
from authentication.renderer import UserJSONRenderer, ClientJSONRenderer
from django.core.exceptions import ObjectDoesNotExist
from .serializer import (RegistrationSerializer, LoginSerializer)
from django.core.cache import cache
# from utils import BaseUtils
from utils.permissions import IsViewerOrReadOnly, IsReviewer, IsAdmin
# from utils.emailer import Emailer
from utils.util import generateOTP
# from utils.models import BaseAbstractModel
class RegistrationAPIView(generics.GenericAPIView):
"""Register new users."""
serializer_class = RegistrationSerializer
renderer_classes = (UserJSONRenderer,)
def post(self, request):
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
user_data = serializer.data
message = [
request,
user_data["email"]
]
response = {
"data": {
"user": dict(user_data),
"message": "Account created successfully",
"status": "success"
}
}
return Response(response, status=status.HTTP_201_CREATED)
class LoginAPIView(generics.GenericAPIView):
"""login a user via email"""
serializer_class = LoginSerializer
renderer_classes = (UserJSONRenderer,)
def post(self, request):
print('now here', request.data)
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user_data = serializer.data
response = {
"data": {
"user": dict(user_data),
"message": "You have successfully logged in",
"status": "success"
}
}
return Response(response, status=status.HTTP_200_OK)
class UserListCreateView(mixins.ListModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = []
# authentication_classes = [SessionAuthentication]
lookup_field = 'pk'
def get(self,request,*args,**kwargs):
return self.list(request,*args,**kwargs)
def post(self,request,*args,**kwargs):
return self.create(request,*args,**kwargs)
# def perform_create(self,serializer):
# user = self.request.user
# serializer.save(user=user)
class UserDetailView(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = []
# authentication_classes = [SessionAuthentication]
lookup_field = 'pk'
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
| [
"[email protected]"
] | |
961e3e5d418160b3651c41f909de28656b25a0da | c41497aef2158cbe051eea3c80889847e03a34ce | /scrap/migrations/0005_auto_20200523_1841.py | 4ba0ce822d98aaf70bb48b358fe27b048504c0b9 | [] | no_license | NicolleLouis/scrap_freelance | 27e4570b2d2804783879927f4c29d7ff4804acd9 | f9d0e750651e4ff4def2d39427c4918ac057aa9d | refs/heads/master | 2022-08-27T14:22:38.047438 | 2020-05-28T12:44:26 | 2020-05-28T12:44:26 | 251,595,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,220 | py | # Generated by Django 3.0.4 on 2020-05-23 18:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scrap', '0004_auto_20200523_1246'),
]
operations = [
migrations.AddField(
model_name='bike',
name='amortisseur',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='boitier_de_pedalier',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='cadre',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='cassette',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='chaine',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='cintre',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='coloris',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='derailleur_arriere',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='derailleur_avant',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='extras',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='fourche',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='freins',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='jantes',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='leviers_de_frein',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='manettes',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='moyeux',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='pedales',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='pedalier',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='pneus',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='poids',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='potence',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='rayons',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='selle',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='tailles',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='tige_de_selle',
field=models.TextField(blank=True, null=True),
),
]
| [
"[email protected]"
] | |
a71e544bf006f57c8759ee695e014662dc59ea3f | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/A/awalias/european_league_tables_by_year.py | 4e206a1c8aa5872e5562db75e75110a4b1e8aa0f | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,892 | py |
# Scraper for Premier League, Bundesliga, and Seria A league tables between 2009-2012
# Extra countries can be added (france) or Divisions (england2, germany2 etc.) in countries list
# Ant Wilson 2013
import scraperwiki
import lxml.html
countries = ['england', 'germany', 'italy']
class EOS_Table(object):
"""class representing the league table at the end of the season"""
fields = ["Position" ,
"Team" ,
"Matches played" ,
"Matches won" ,
"Draws" ,
"Matches lost" ,
"Goals For" ,
"Goals Against" ,
"Goal Difference",
"Points" ,
"League" ,
"Year" ]
def is_ascii(self,s):
return all(ord(c) < 128 for c in s)
# when initialised, entity will parse for selectors and save resulting dict
def __init__(self, element, year, league):
row = element.cssselect("tr")
for el in row:
td = el.cssselect("td")
store = {}
if (self.is_ascii(td[0].text_content())):
for i in range(0,10):
store[self.fields[i]] = td[i].text_content().strip()
store[self.fields[10]] = league
store[self.fields[11]] = year
store['Key'] = store['Team'] + '-' + str(store['Year'])
scraperwiki.sqlite.save(unique_keys=["Key"], data=store)
# main. Grabs league table for each combination of country-year. Leagues/Countries set at top of file.
for country in countries:
for year in range(2009,2013):
html = scraperwiki.scrape("http://www.soccerstats.com/latest.asp?league=%s_%s" % (country, year))
root = lxml.html.fromstring(html)
for element in root.cssselect("table.stat"):
EOS_Table(element, year, country)
# Scraper for Premier League, Bundesliga, and Seria A league tables between 2009-2012
# Extra countries can be added (france) or Divisions (england2, germany2 etc.) in countries list
# Ant Wilson 2013
import scraperwiki
import lxml.html
countries = ['england', 'germany', 'italy']
class EOS_Table(object):
"""class representing the league table at the end of the season"""
fields = ["Position" ,
"Team" ,
"Matches played" ,
"Matches won" ,
"Draws" ,
"Matches lost" ,
"Goals For" ,
"Goals Against" ,
"Goal Difference",
"Points" ,
"League" ,
"Year" ]
def is_ascii(self,s):
return all(ord(c) < 128 for c in s)
# when initialised, entity will parse for selectors and save resulting dict
def __init__(self, element, year, league):
row = element.cssselect("tr")
for el in row:
td = el.cssselect("td")
store = {}
if (self.is_ascii(td[0].text_content())):
for i in range(0,10):
store[self.fields[i]] = td[i].text_content().strip()
store[self.fields[10]] = league
store[self.fields[11]] = year
store['Key'] = store['Team'] + '-' + str(store['Year'])
scraperwiki.sqlite.save(unique_keys=["Key"], data=store)
# main. Grabs league table for each combination of country-year. Leagues/Countries set at top of file.
for country in countries:
for year in range(2009,2013):
html = scraperwiki.scrape("http://www.soccerstats.com/latest.asp?league=%s_%s" % (country, year))
root = lxml.html.fromstring(html)
for element in root.cssselect("table.stat"):
EOS_Table(element, year, country)
| [
"[email protected]"
] | |
7f7c5b5bf16c39cf0d28d88b300d7c8220fd855c | 8f5f0c3ef83fdd482387973149738f6178477a42 | /medium/algos/combination_sum_iii.py | 9fb25611bcb7124aaf1677d2282f38179e15f76f | [] | no_license | nicokuzak/leetcode | 79a5771ad83786cc7dbfd790f8fffcf1ce58794e | 39b0235dc429a97a7cba0689d44641a6af6d7a32 | refs/heads/main | 2023-04-06T21:02:09.553185 | 2021-04-14T22:21:20 | 2021-04-14T22:21:20 | 336,847,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,529 | py | """Find all valid combinations of k numbers that sum up to n such that the following conditions are true:
Only numbers 1 through 9 are used.
Each number is used at most once.
Return a list of all possible valid combinations. The list must not contain the same combination twice, and the combinations may be returned in any order.
Example 1:
Input: k = 3, n = 7
Output: [[1,2,4]]
Explanation:
1 + 2 + 4 = 7
There are no other valid combinations.
Example 2:
Input: k = 3, n = 9
Output: [[1,2,6],[1,3,5],[2,3,4]]
Explanation:
1 + 2 + 6 = 9
1 + 3 + 5 = 9
2 + 3 + 4 = 9
There are no other valid combinations.
Example 3:
Input: k = 4, n = 1
Output: []
Explanation: There are no valid combinations. [1,2,1] is not valid because 1 is used twice.
Example 4:
Input: k = 3, n = 2
Output: []
Explanation: There are no valid combinations.
Example 5:
Input: k = 9, n = 45
Output: [[1,2,3,4,5,6,7,8,9]]
Explanation:
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 = 45
There are no other valid combinations.
Constraints:
2 <= k <= 9
1 <= n <= 60"""
class Solution:
def combinationSum3(self, k: int, n: int) -> List[List[int]]:
res = []
def dfs(cur, k, n, nxt):
if len(cur) == k:
if sum(cur) == n:
res.append(cur)
return
for j in range(len(nxt)):
dfs(cur[:]+[nxt[j]], k, n, nxt[j+1:])
for i in range(1, 10):
dfs([i], k, n, [num for num in range(i+1,10)])
return res | [
"[email protected]"
] | |
72e25a646457b19dbade0f1a472183493982b85a | b1c89709a76de2e5ed6ec3b1d38ad2214dbd6cfb | /treecorr/ggcorrelation.py | b8e7d23b00659a2f98301a651cdea8402adef0be | [
"BSD-2-Clause",
"BSD-2-Clause-Views"
] | permissive | kstoreyf/TreeCorr | 63f371dd8a28db7786135445353f4d39c40fbb3b | ca4864de39db7ecb78bf2f8c32f18e1e649d1395 | refs/heads/master | 2020-04-13T10:29:02.448238 | 2018-10-02T15:55:44 | 2018-10-02T15:55:44 | 163,141,491 | 0 | 0 | NOASSERTION | 2018-12-26T05:51:32 | 2018-12-26T05:51:32 | null | UTF-8 | Python | false | false | 29,640 | py | # Copyright (c) 2003-2015 by Mike Jarvis
#
# TreeCorr is free software: redistribution and use in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions, and the disclaimer given in the accompanying LICENSE
# file.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the disclaimer given in the documentation
# and/or other materials provided with the distribution.
"""
.. module:: ggcorrelation
"""
import treecorr
import numpy
class GGCorrelation(treecorr.BinnedCorr2):
"""This class handles the calculation and storage of a 2-point shear-shear correlation
function.
Ojects of this class holds the following attributes:
:nbins: The number of bins in logr
:bin_size: The size of the bins in logr
:min_sep: The minimum separation being considered
:max_sep: The maximum separation being considered
In addition, the following attributes are numpy arrays of length (nbins):
:logr: The nominal center of the bin in log(r) (the natural logarithm of r).
:rnom: The nominal center of the bin converted to regular distance.
i.e. r = exp(logr).
:meanr: The (weighted) mean value of r for the pairs in each bin.
If there are no pairs in a bin, then exp(logr) will be used instead.
:meanlogr: The (weighted) mean value of log(r) for the pairs in each bin.
If there are no pairs in a bin, then logr will be used instead.
:xip: The correlation function, :math:`\\xi_+(r)`.
:xim: The correlation funciton, :math:`\\xi_-(r)`.
:xip_im: The imaginary part of :math:`\\xi_+(r)`.
:xim_im: The imaginary part of :math:`\\xi_-(r)`.
:varxi: The variance of xip and xim, only including the shape noise propagated
into the final correlation. This does not include sample variance, so
it is always an underestimate of the actual variance.
:weight: The total weight in each bin.
:npairs: The number of pairs going into each bin.
If `sep_units` are given (either in the config dict or as a named kwarg) then the distances
will all be in these units. Note however, that if you separate out the steps of the
:func:`process` command and use :func:`process_auto` and/or :func:`process_cross`, then the
units will not be applied to :meanr: or :meanlogr: until the :func:`finalize` function is
called.
The typical usage pattern is as follows:
>>> gg = treecorr.GGCorrelation(config)
>>> gg.process(cat) # For auto-correlation.
>>> gg.process(cat1,cat2) # For cross-correlation.
>>> gg.write(file_name) # Write out to a file.
>>> xip = gg.xip # Or access the correlation function directly.
:param config: A configuration dict that can be used to pass in kwargs if desired.
This dict is allowed to have addition entries in addition to those listed
in :class:`~treecorr.BinnedCorr2`, which are ignored here. (default: None)
:param logger: If desired, a logger object for logging. (default: None, in which case
one will be built according to the config dict's verbose level.)
See the documentation for :class:`~treecorr.BinnedCorr2` for the list of other allowed kwargs,
which may be passed either directly or in the config dict.
"""
def __init__(self, config=None, logger=None, **kwargs):
treecorr.BinnedCorr2.__init__(self, config, logger, **kwargs)
self.xip = numpy.zeros(self.nbins, dtype=float)
self.xim = numpy.zeros(self.nbins, dtype=float)
self.xip_im = numpy.zeros(self.nbins, dtype=float)
self.xim_im = numpy.zeros(self.nbins, dtype=float)
self.varxi = numpy.zeros(self.nbins, dtype=float)
self.meanr = numpy.zeros(self.nbins, dtype=float)
self.meanlogr = numpy.zeros(self.nbins, dtype=float)
self.weight = numpy.zeros(self.nbins, dtype=float)
self.npairs = numpy.zeros(self.nbins, dtype=float)
self._build_corr()
self.logger.debug('Finished building GGCorr')
def _build_corr(self):
from treecorr.util import double_ptr as dp
self.corr = treecorr._lib.BuildGGCorr(
self._min_sep,self._max_sep,self.nbins,self.bin_size,self.b,
self.min_rpar, self.max_rpar,
dp(self.xip),dp(self.xip_im),dp(self.xim),dp(self.xim_im),
dp(self.meanr),dp(self.meanlogr),dp(self.weight),dp(self.npairs))
def __del__(self):
# Using memory allocated from the C layer means we have to explicitly deallocate it
# rather than being able to rely on the Python memory manager.
if hasattr(self,'corr'): # In case __init__ failed to get that far
treecorr._lib.DestroyGGCorr(self.corr)
def copy(self):
import copy
return copy.deepcopy(self)
def __getstate__(self):
d = self.__dict__.copy()
del d['corr']
del d['logger'] # Oh well. This is just lost in the copy. Can't be pickled.
return d
def __setstate__(self, d):
self.__dict__ = d
self._build_corr()
self.logger = treecorr.config.setup_logger(
treecorr.config.get(self.config,'verbose',int,0),
self.config.get('log_file',None))
def __repr__(self):
return 'GGCorrelation(config=%r)'%self.config
def process_auto(self, cat, metric=None, num_threads=None):
"""Process a single catalog, accumulating the auto-correlation.
This accumulates the weighted sums into the bins, but does not finalize
the calculation by dividing by the total weight at the end. After
calling this function as often as desired, the finalize() command will
finish the calculation.
:param cat: The catalog to process
:param metric: Which metric to use. See :meth:`~treecorr.GGCorrelation.process` for
details. (default: 'Euclidean'; this value can also be given in the
constructor in the config dict.)
:param num_threads: How many OpenMP threads to use during the calculation.
(default: use the number of cpu cores; this value can also be given in
the constructor in the config dict.) Note that this won't work if the
system's C compiler is clang prior to version 3.7.
"""
if cat.name == '':
self.logger.info('Starting process GG auto-correlations')
else:
self.logger.info('Starting process GG auto-correlations for cat %s.',cat.name)
self._set_metric(metric, cat.coords)
self._set_num_threads(num_threads)
min_size, max_size = self._get_minmax_size()
field = cat.getGField(min_size,max_size,self.split_method,self.max_top)
self.logger.info('Starting %d jobs.',field.nTopLevelNodes)
treecorr._lib.ProcessAutoGG(self.corr, field.data, self.output_dots,
self._coords, self._metric)
def process_cross(self, cat1, cat2, metric=None, num_threads=None):
"""Process a single pair of catalogs, accumulating the cross-correlation.
This accumulates the weighted sums into the bins, but does not finalize
the calculation by dividing by the total weight at the end. After
calling this function as often as desired, the finalize() command will
finish the calculation.
:param cat1: The first catalog to process
:param cat2: The second catalog to process
:param metric: Which metric to use. See :meth:`~treecorr.GGCorrelation.process` for
details. (default: 'Euclidean'; this value can also be given in the
constructor in the config dict.)
:param num_threads: How many OpenMP threads to use during the calculation.
(default: use the number of cpu cores; this value can also be given in
the constructor in the config dict.) Note that this won't work if the
system's C compiler is clang prior to version 3.7.
"""
if cat1.name == '' and cat2.name == '':
self.logger.info('Starting process GG cross-correlations')
else:
self.logger.info('Starting process GG cross-correlations for cats %s, %s.',
cat1.name, cat2.name)
self._set_metric(metric, cat1.coords, cat2.coords)
self._set_num_threads(num_threads)
min_size, max_size = self._get_minmax_size()
f1 = cat1.getGField(min_size,max_size,self.split_method,self.max_top)
f2 = cat2.getGField(min_size,max_size,self.split_method,self.max_top)
self.logger.info('Starting %d jobs.',f1.nTopLevelNodes)
treecorr._lib.ProcessCrossGG(self.corr, f1.data, f2.data, self.output_dots,
self._coords, self._metric)
def process_pairwise(self, cat1, cat2, metric=None, num_threads=None):
"""Process a single pair of catalogs, accumulating the cross-correlation, only using
the corresponding pairs of objects in each catalog.
This accumulates the weighted sums into the bins, but does not finalize
the calculation by dividing by the total weight at the end. After
calling this function as often as desired, the finalize() command will
finish the calculation.
:param cat1: The first catalog to process
:param cat2: The second catalog to process
:param metric: Which metric to use. See :meth:`~treecorr.GGCorrelation.process` for
details. (default: 'Euclidean'; this value can also be given in the
constructor in the config dict.)
:param num_threads: How many OpenMP threads to use during the calculation.
(default: use the number of cpu cores; this value can also be given in
the constructor in the config dict.) Note that this won't work if the
system's C compiler is clang prior to version 3.7.
"""
if cat1.name == '' and cat2.name == '':
self.logger.info('Starting process GG pairwise-correlations')
else:
self.logger.info('Starting process GG pairwise-correlations for cats %s, %s.',
cat1.name, cat2.name)
self._set_metric(metric, cat1.coords, cat2.coords)
self._set_num_threads(num_threads)
f1 = cat1.getGSimpleField()
f2 = cat2.getGSimpleField()
treecorr._lib.ProcessPairGG(self.corr, f1.data, f2.data, self.output_dots,
self._coords, self._metric)
def finalize(self, varg1, varg2):
"""Finalize the calculation of the correlation function.
The process_auto and process_cross commands accumulate values in each bin,
so they can be called multiple times if appropriate. Afterwards, this command
finishes the calculation by dividing each column by the total weight.
:param varg1: The shear variance per component for the first field.
:param varg2: The shear variance per component for the second field.
"""
mask1 = self.weight != 0
mask2 = self.weight == 0
self.xip[mask1] /= self.weight[mask1]
self.xim[mask1] /= self.weight[mask1]
self.xip_im[mask1] /= self.weight[mask1]
self.xim_im[mask1] /= self.weight[mask1]
self.meanr[mask1] /= self.weight[mask1]
self.meanlogr[mask1] /= self.weight[mask1]
self.varxi[mask1] = varg1 * varg2 / self.weight[mask1]
# Update the units of meanr, meanlogr
self._apply_units(mask1)
# Use meanr, meanlogr when available, but set to nominal when no pairs in bin.
self.meanr[mask2] = self.rnom[mask2]
self.meanlogr[mask2] = self.logr[mask2]
self.varxi[mask2] = 0.
def clear(self):
"""Clear the data vectors
"""
self.xip[:] = 0
self.xim[:] = 0
self.xip_im[:] = 0
self.xim_im[:] = 0
self.meanr[:] = 0
self.meanlogr[:] = 0
self.weight[:] = 0
self.npairs[:] = 0
def __iadd__(self, other):
"""Add a second GGCorrelation's data to this one.
Note: For this to make sense, both Correlation objects should have been using
process_auto and/or process_cross, and they should not have had finalize called yet.
Then, after adding them together, you should call finalize on the sum.
"""
if not isinstance(other, GGCorrelation):
raise AttributeError("Can only add another GGCorrelation object")
if not (self.nbins == other.nbins and
self.min_sep == other.min_sep and
self.max_sep == other.max_sep):
raise ValueError("GGCorrelation to be added is not compatible with this one.")
self.xip[:] += other.xip[:]
self.xim[:] += other.xim[:]
self.xip_im[:] += other.xip_im[:]
self.xim_im[:] += other.xim_im[:]
self.meanr[:] += other.meanr[:]
self.meanlogr[:] += other.meanlogr[:]
self.weight[:] += other.weight[:]
self.npairs[:] += other.npairs[:]
return self
def process(self, cat1, cat2=None, metric=None, num_threads=None):
"""Compute the correlation function.
If only 1 argument is given, then compute an auto-correlation function.
If 2 arguments are given, then compute a cross-correlation function.
Both arguments may be lists, in which case all items in the list are used
for that element of the correlation.
:param cat1: A catalog or list of catalogs for the first G field.
:param cat2: A catalog or list of catalogs for the second G field, if any.
(default: None)
:param metric: Which metric to use for distance measurements. Options are:
- 'Euclidean' = straight line Euclidean distance between two points.
For spherical coordinates (ra,dec without r), this is the chord
distance between points on the unit sphere.
- 'Rperp' = the perpendicular component of the distance. For two points
with distance from Earth `r1, r2`, if `d` is the normal Euclidean
distance and :math:`Rparallel = |r1-r2|`, then we define
:math:`Rperp^2 = d^2 - Rparallel^2`.
- 'Rlens' = the projected distance perpendicular to the first point
in the pair (taken to be a lens) to the line of sight to the second
point (e.g. a lensed source galaxy).
- 'Arc' = the true great circle distance for spherical coordinates.
(default: 'Euclidean'; this value can also be given in the constructor
in the config dict.)
:param num_threads: How many OpenMP threads to use during the calculation.
(default: use the number of cpu cores; this value can also be given in
the constructor in the config dict.) Note that this won't work if the
system's C compiler is clang prior to version 3.7.
"""
import math
self.clear()
if not isinstance(cat1,list): cat1 = [cat1]
if cat2 is not None and not isinstance(cat2,list): cat2 = [cat2]
if len(cat1) == 0:
raise AttributeError("No catalogs provided for cat1")
if cat2 is None or len(cat2) == 0:
varg1 = treecorr.calculateVarG(cat1)
varg2 = varg1
self.logger.info("varg = %f: sig_sn (per component) = %f",varg1,math.sqrt(varg1))
self._process_all_auto(cat1, metric, num_threads)
else:
varg1 = treecorr.calculateVarG(cat1)
varg2 = treecorr.calculateVarG(cat2)
self.logger.info("varg1 = %f: sig_sn (per component) = %f",varg1,math.sqrt(varg1))
self.logger.info("varg2 = %f: sig_sn (per component) = %f",varg2,math.sqrt(varg2))
self._process_all_cross(cat1,cat2, metric, num_threads)
self.finalize(varg1,varg2)
def write(self, file_name, file_type=None, prec=None):
"""Write the correlation function to the file, file_name.
The output file will include the following columns:
:R_nom: The nominal center of the bin in R.
:meanR: The mean value :math:`\\langle R\\rangle` of pairs that fell into each bin.
:meanlogR: The mean value :math:`\\langle logR\\rangle` of pairs that fell into each
bin.
:xip: The real part of the :math:`\\xi_+` correlation function.
:xim: The real part of the :math:`\\xi_-` correlation function.
:xip_im: The imag part of the :math:`\\xi_+` correlation function.
:xim_im: The imag part of the :math:`\\xi_-` correlation function.
:sigma_xi: The sqrt of the variance estimate of :math:`\\xi_+`, :math:`\\xi_-`.
:weight: The total weight contributing to each bin.
:npairs: The number of pairs contributing ot each bin.
If `sep_units` was given at construction, then the distances will all be in these units.
Otherwise, they will be in either the same units as x,y,z (for flat or 3d coordinates) or
radians (for spherical coordinates).
:param file_name: The name of the file to write to.
:param file_type: The type of file to write ('ASCII' or 'FITS'). (default: determine
the type automatically from the extension of file_name.)
:param prec: For ASCII output catalogs, the desired precision. (default: 4;
this value can also be given in the constructor in the config dict.)
"""
self.logger.info('Writing GG correlations to %s',file_name)
if prec is None:
prec = treecorr.config.get(self.config,'precision',int,4)
treecorr.util.gen_write(
file_name,
['R_nom','meanR','meanlogR','xip','xim','xip_im','xim_im','sigma_xi','weight','npairs'],
[ self.rnom, self.meanr, self.meanlogr,
self.xip, self.xim, self.xip_im, self.xim_im, numpy.sqrt(self.varxi),
self.weight, self.npairs ],
prec=prec, file_type=file_type, logger=self.logger)
def read(self, file_name, file_type=None):
"""Read in values from a file.
This should be a file that was written by TreeCorr, preferably a FITS file, so there
is no loss of information.
Warning: The GGCorrelation object should be constructed with the same configuration
parameters as the one being read. e.g. the same min_sep, max_sep, etc. This is not
checked by the read function.
:param file_name: The name of the file to read in.
:param file_type: The type of file ('ASCII' or 'FITS'). (default: determine the type
automatically from the extension of file_name.)
"""
self.logger.info('Reading GG correlations from %s',file_name)
data, _ = treecorr.util.gen_read(file_name, file_type=file_type)
self.rnom = data['R_nom']
self.logr = numpy.log(data['R_nom'])
self.meanr = data['meanR']
self.meanlogr = data['meanlogR']
self.xip = data['xip']
self.xim = data['xim']
self.xip_im = data['xip_im']
self.xim_im = data['xim_im']
self.varxi = data['sigma_xi']**2
self.weight = data['weight']
self.npairs = data['npairs']
self._build_corr()
def calculateMapSq(self, m2_uform=None):
"""Calculate the aperture mass statistics from the correlation function.
.. math::
\\langle M_{ap}^2 \\rangle(R) &= \\int_{0}^{rmax} \\frac{r dr}{2R^2}
\\left [ T_+\\left(\\frac{r}{R}\\right) \\xi_+(r) +
T_-\\left(\\frac{r}{R}\\right) \\xi_-(r) \\right] \\\\
\\langle M_\\times^2 \\rangle(R) &= \\int_{0}^{rmax} \\frac{r dr}{2R^2}
\\left [ T_+\\left(\\frac{r}{R}\\right) \\xi_+(r) -
T_-\\left(\\frac{r}{R}\\right) \\xi_-(r) \\right]
The m2_uform parameter sets which definition of the aperture mass to use.
The default is to use 'Crittenden'.
If m2_uform == 'Crittenden':
.. math::
U(r) &= \\frac{1}{2\\pi} (1-r^2) \\exp(-r^2/2) \\\\
Q(r) &= \\frac{1}{4\\pi} r^2 \\exp(-r^2/2) \\\\
T_+(s) &= \\frac{s^4 - 16s^2 + 32}{128} \\exp(-s^2/4) \\\\
T_-(s) &= \\frac{s^4}{128} \\exp(-s^2/4) \\\\
rmax &= \\infty
If m2_uform == 'Schneider':
.. math::
U(r) &= \\frac{9}{\\pi} (1-r^2) (1/3-r^2) \\\\
Q(r) &= \\frac{6}{\\pi} r^2 (1-r^2) \\\\
T_+(s) &= \\frac{12}{5\\pi} (2-15s^2) \\arccos(s/2)
+ \\frac{1}{100\\pi} s \\sqrt{4-s^2} (120 + 2320s^2 - 754s^4 + 132s^6 - 9s^8) \\\\
T_-(s) &= \\frac{3}{70\\pi} s^3 (4-s^2)^{7/2} \\\\
rmax &= 2R
cf. Schneider, et al (2001): http://xxx.lanl.gov/abs/astro-ph/0112441
:param m2_uform: Which form to use for the aperture mass, as described above.
(default: 'Crittenden'; this value can also be given in the
constructor in the config dict.)
:returns: (mapsq, mapsq_im, mxsq, mxsq_im, varmapsq) as a tuple
"""
if m2_uform is None:
m2_uform = treecorr.config.get(self.config,'m2_uform',str,'Crittenden')
if m2_uform not in ['Crittenden', 'Schneider']:
raise ValueError("Invalid m2_uform")
# Make s a matrix, so we can eventually do the integral by doing a matrix product.
r = self.rnom
s = numpy.outer(1./r, self.meanr)
ssq = s*s
if m2_uform == 'Crittenden':
exp_factor = numpy.exp(-ssq/4.)
Tp = (32. + ssq*(-16. + ssq)) / 128. * exp_factor
Tm = ssq * ssq / 128. * exp_factor
else:
Tp = numpy.zeros_like(s)
Tm = numpy.zeros_like(s)
sa = s[s<2.]
ssqa = ssq[s<2.]
Tp[s<2.] = 12./(5.*numpy.pi) * (2.-15.*ssqa) * numpy.arccos(sa/2.)
Tp[s<2.] += 1./(100.*numpy.pi) * sa * numpy.sqrt(4.-ssqa) * (
120. + ssqa*(2320. + ssqa*(-754. + ssqa*(132. - 9.*ssqa))))
Tm[s<2.] = 3./(70.*numpy.pi) * sa * ssqa * (4.-ssqa)**3.5
Tp *= ssq
Tm *= ssq
# Now do the integral by taking the matrix products.
# Note that dlogr = bin_size
Tpxip = Tp.dot(self.xip)
Tmxim = Tm.dot(self.xim)
mapsq = (Tpxip + Tmxim) * 0.5 * self.bin_size
mxsq = (Tpxip - Tmxim) * 0.5 * self.bin_size
Tpxip_im = Tp.dot(self.xip_im)
Tmxim_im = Tm.dot(self.xim_im)
mapsq_im = (Tpxip_im + Tmxim_im) * 0.5 * self.bin_size
mxsq_im = (Tpxip_im - Tmxim_im) * 0.5 * self.bin_size
# The variance of each of these is
# Var(<Map^2>(R)) = int_r=0..2R [1/4 s^4 dlogr^2 (T+(s)^2 + T-(s)^2) Var(xi)]
varmapsq = (Tp**2 + Tm**2).dot(self.varxi) * 0.25 * self.bin_size**2
return mapsq, mapsq_im, mxsq, mxsq_im, varmapsq
def calculateGamSq(self, eb=False):
"""Calculate the tophat shear variance from the correlation function.
.. math::
\\langle \\gamma^2 \\rangle(R) &= \\int_0^{2R} \\frac{r dr}{R^2} S_+(s) \\xi_+(r) \\\\
\\langle \\gamma^2 \\rangle_E(R) &= \\int_0^{2R} \\frac{r dr}{2 R^2}
\\left [ S_+\\left(\\frac{r}{R}\\right) \\xi_+(r) +
S_-\\left(\\frac{r}{R}\\right) \\xi_-(r) \\right ] \\\\
\\langle \\gamma^2 \\rangle_B(R) &= \\int_0^{2R} \\frac{r dr}{2 R^2}
\\left [ S_+\\left(\\frac{r}{R}\\right) \\xi_+(r) -
S_-\\left(\\frac{r}{R}\\right) \\xi_-(r) \\right ] \\\\
S_+(s) &= \\frac{1}{\\pi} \\left(4 \\arccos(s/2) - s \\sqrt{4-s^2} \\right) \\\\
S_-(s) &= \\begin{cases}
s<=2, & [ s \\sqrt{4-s^2} (6-s^2) - 8(3-s^2) \\arcsin(s/2) ] / (\\pi s^4) \\\\
s>=2, & 4(s^2-3)/(s^4)
\\end{cases}
cf Schneider, et al, 2001: http://adsabs.harvard.edu/abs/2002A%26A...389..729S
The default behavior is not to compute the E/B versions. They are calculated if
eb is set to True.
:param eb: Whether to include the E/B decomposition as well as the total
:math:`\\langle \\gamma^2\\rangle`. (default: False)
:returns: (gamsq, vargamsq) if `eb == False` or
(gamsq, vargamsq, gamsq_e, gamsq_b, vargamsq_e) if `eb == True`
"""
r = self.rnom
s = numpy.outer(1./r, self.meanr)
ssq = s*s
Sp = numpy.zeros_like(s)
sa = s[s<2]
ssqa = ssq[s<2]
Sp[s<2.] = 1./numpy.pi * (4.*numpy.arccos(sa/2.) - sa*numpy.sqrt(4.-ssqa))
Sp *= ssq
# Now do the integral by taking the matrix products.
# Note that dlogr = bin_size
Spxip = Sp.dot(self.xip)
gamsq = Spxip * self.bin_size
vargamsq = (Sp**2).dot(self.varxi) * self.bin_size**2
# Stop here if eb == False
if not eb: return gamsq, vargamsq
Sm = numpy.empty_like(s)
Sm[s<2.] = 1./(ssqa*numpy.pi) * (sa*numpy.sqrt(4.-ssqa)*(6.-ssqa)
-8.*(3.-ssqa)*numpy.arcsin(sa/2.))
Sm[s>=2.] = 4.*(ssq[s>=2]-3.)/ssq[s>=2]
# This already includes the extra ssq factor.
Smxim = Sm.dot(self.xim)
gamsq_e = (Spxip + Smxim) * 0.5 * self.bin_size
gamsq_b = (Spxip - Smxim) * 0.5 * self.bin_size
vargamsq_e = (Sp**2 + Sm**2).dot(self.varxi) * 0.25 * self.bin_size**2
return gamsq, vargamsq, gamsq_e, gamsq_b, vargamsq_e
def writeMapSq(self, file_name, m2_uform=None, file_type=None, prec=None):
"""Write the aperture mass statistics based on the correlation function to the
file, file_name.
See :meth:`~treecorr.GGCorrelation.calculateMapSq` for an explanation of the m2_uform
parameter.
The output file will include the following columns:
:R: The aperture radius
:Mapsq: The real part of :math:`\\langle M_{ap}^2\\rangle`.
cf. :meth:`~treecorr.GGCorrelation.calculateMapSq`.
:Mxsq: The real part of :math:`\\langle M_x^2\\rangle`.
:MMxa: The imag part of :math:`\\langle M_{ap}^2\\rangle`.
This is one of two estimators of :math:`\\langle M_{ap} M_x\\rangle`.
:MMxb: The imag part of :math:`-\\langle M_x^2\\rangle`.
This is the second estimator of :math:`\\langle M_{ap} M_x\\rangle`.
:sig_map: The sqrt of the variance estimate of :math:`\\langle M_{ap}^2\\rangle`
(which is equal to the variance of :math:`\\langle M_x^2\\rangle` as well).
:Gamsq: The tophat shear variance :math:`\\langle \\gamma^2\\rangle`.
cf. :meth:`~treecorr.GGCorrelation.calculateGamSq`.
:sig_gam: The sqrt of the variance estimate of :math:`\\langle \\gamma^2\\rangle`
:param file_name: The name of the file to write to.
:param m2_uform: Which form to use for the aperture mass. (default: 'Crittenden';
this value can also be given in the constructor in the config dict.)
:param file_type: The type of file to write ('ASCII' or 'FITS'). (default: determine
the type automatically from the extension of file_name.)
:param prec: For ASCII output catalogs, the desired precision. (default: 4;
this value can also be given in the constructor in the config dict.)
"""
self.logger.info('Writing Map^2 from GG correlations to %s',file_name)
mapsq, mapsq_im, mxsq, mxsq_im, varmapsq = self.calculateMapSq(m2_uform=m2_uform)
gamsq, vargamsq = self.calculateGamSq()
if prec is None:
prec = treecorr.config.get(self.config,'precision',int,4)
treecorr.util.gen_write(
file_name,
['R','Mapsq','Mxsq','MMxa','MMxb','sig_map','Gamsq','sig_gam'],
[ self.rnom,
mapsq, mxsq, mapsq_im, -mxsq_im, numpy.sqrt(varmapsq),
gamsq, numpy.sqrt(vargamsq) ],
prec=prec, file_type=file_type, logger=self.logger)
| [
"[email protected]"
] | |
67b9ba8f95fe5eb0985c03d506574f1bc41c9344 | 3c1639bccf3fc0abc9c82c00ab92ac3f25cf105e | /book/section-8-函数/02-函数的实参和形参(位置参数).py | f1632e35aef8b467e9d0352a5e544321e51d7496 | [
"Apache-2.0"
] | permissive | LiuJunb/PythonStudy | 783318a64496c2db41442ad66e0cc9253b392734 | 3386b9e3ccb398bfcfcd1a3402182811f9bb37ca | refs/heads/master | 2022-12-11T05:22:53.725166 | 2018-11-15T01:34:37 | 2018-11-15T01:34:37 | 143,956,065 | 1 | 0 | Apache-2.0 | 2022-11-22T01:58:23 | 2018-08-08T03:26:26 | JavaScript | UTF-8 | Python | false | false | 861 | py | # 1.定义一个函数( 有两个形参 )
def get_animal(animal_name, animal_type):
"""获取动画的姓名和类型"""
print('name: '+animal_name + ' --> type: ' + animal_type)
get_animal('🐱', 'animal') # 传递两个实参
get_animal('animal', '🐱') # 传递两个实参
# 2.关键字实参( 避免参数顺序传递异常 )
# get_animal() #get_animal() missing 2 required positional arguments: 'animal_name' and 'animal_type'
get_animal(animal_type='animal', animal_name='🐶')
get_animal(animal_name='🐷', animal_type='animal')
# 3.参数的默认值
def get_animal_info(animal_name='🐒', animal_type='animal'):
"""获取动画的姓名和类型"""
print('name: '+animal_name + ' --> type: ' + animal_type)
print('---------------')
get_animal_info()
get_animal_info('🐭')
get_animal_info(animal_type='Animal')
| [
"[email protected]"
] | |
66cd4531f9739fd1f61386fe7b7fddbd5984c01d | 80d50ea48e10674b1b7d3f583a1c4b7d0b01200f | /examples/v1/service-level-objective-corrections/ListSLOCorrection_2647266873.py | cf0d6f2f87fd6aab757bf5bbacf2f99c3ebb8689 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"MPL-2.0"
] | permissive | DataDog/datadog-api-client-python | 3e01fa630278ad0b5c7005f08b7f61d07aa87345 | 392de360e7de659ee25e4a6753706820ca7c6a92 | refs/heads/master | 2023-09-01T20:32:37.718187 | 2023-09-01T14:42:04 | 2023-09-01T14:42:04 | 193,793,657 | 82 | 36 | Apache-2.0 | 2023-09-14T18:22:39 | 2019-06-25T22:52:04 | Python | UTF-8 | Python | false | false | 514 | py | """
Get all SLO corrections returns "OK" response with pagination
"""
from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v1.api.service_level_objective_corrections_api import ServiceLevelObjectiveCorrectionsApi
configuration = Configuration()
with ApiClient(configuration) as api_client:
api_instance = ServiceLevelObjectiveCorrectionsApi(api_client)
items = api_instance.list_slo_correction_with_pagination(
limit=2,
)
for item in items:
print(item)
| [
"[email protected]"
] | |
87a9327b9cb6b76cb848b894aa4ab84fa356902f | 60ccf143ae59bd2aeb6b831499ba0d4045025588 | /Exercicios/Ex081.py | 0f9ccdfe4f7c1d4a879cb65c8905eca4d045c0af | [
"MIT"
] | permissive | RenanRibeiroDaSilva/Meu-Aprendizado-Python | 3283fa644214149d41777d6b23f6e98804bf30de | 280bf2ad132ae0d26255e70b894fa7dbb69a5d01 | refs/heads/main | 2023-07-07T22:59:11.725000 | 2021-08-11T16:47:32 | 2021-08-11T16:47:32 | 369,657,470 | 2 | 0 | MIT | 2021-06-01T17:51:28 | 2021-05-21T21:25:46 | Python | UTF-8 | Python | false | false | 1,312 | py | """ Ex - 081 - Crie um programa que vai ler vários números e colocar em uma lista.
Depois disso, mostre:
A) Quantos números foram digitados.
B) A lista de valores, ordenada de forma decrescente.
C) Se o valor 5 foi digitado e está ou não na lista."""
# Como eu fiz
# Lista:
lista_num = list()
c = 0
# Loop:
while True:
num = int(input('Digite um valor: '))
lista_num.append(num)
c += 1
res = str(input('Quer continuar? [S/N] ')).strip()[0]
if res in 'Nn':
break
print('~-' * 25)
print(f'Foram digitados {c} valores!')
lista_num.sort(reverse=True)
print(f'Os valores digitados foram {lista_num} em ordem drecesente!')
if 5 in lista_num:
print(f'O número 5 foi digitado na lista!')
else:
print('O número 5 não foi digitado!')
# Como o Guanabara fez
valores = []
while True:
valores.append(int(input('Digite um valor: ')))
resp = str(input('Quer continuar? [S/N] '))
if resp in 'Nn':
break
print('-=' * 30)
print(f'Você digitou {len(valores)} elementos.')
valores.sort(reverse=True)
print(f'Os valores em ordem decrescente são {valores}')
if 5 in valores:
print('O valor 5 faz parte da lista')
else:
print('O valor 5 não foi encontrado na lista!')
| [
"[email protected]"
] | |
94d1e06020318b09a89c9d4c41acb0483c13bd08 | e5897d5b5eb3b018bec8703f01cfc666acea5b38 | /isy994/items/scenes/scene_container.py | ae59b821b3b55b875757c06f7a97ad6bf95a8438 | [
"MIT"
] | permissive | mjcumming/ISY994v5 | 5de41ce7e12be44c35dc0818daf639bb8c0e5487 | 928d8359fd15363e15b8daa402fbb1f5f53f3c45 | refs/heads/master | 2022-05-19T06:10:59.788621 | 2022-05-08T13:16:29 | 2022-05-08T13:16:29 | 187,289,265 | 4 | 10 | MIT | 2021-06-26T13:34:23 | 2019-05-17T22:36:55 | Python | UTF-8 | Python | false | false | 2,059 | py | #! /usr/bin/env python
import xml.etree.ElementTree as ET
import traceback
from ..item_container import Item_Container
from .scene_info import Scene_Info
from .scene_insteon import Scene_Insteon
import logging
logger = logging.getLogger(__name__)
scene_classes = {
"6": Scene_Insteon,
}
class Scene_Container(Item_Container):
def __init__(self, controller):
Item_Container.__init__(self, controller, "Scene")
def start(self):
success, response = self.controller.send_request("nodes/scenes")
if success:
try:
root = ET.fromstring(response)
self.process_scene_nodes(root)
self.items_retrieved = True
return True
except Exception as ex:
logger.error("container manager Error {}".format(ex))
traceback.print_exc()
else:
return False
def process_scene_nodes(self, root):
for scene in root.iter("group"):
self.process_scene_node(scene)
def process_scene_node(self, node):
if "nodeDefId" in node.attrib:
scene_info = Scene_Info(node)
if scene_info.valid: # make sure we have the info we need
# print('process scene',scene_info)
if scene_info.family in scene_classes:
scene_class = scene_classes[scene_info.family]
scene = scene_class(self, scene_info)
scene.update_onoff()
self.add(scene, scene.address)
else:
logger.warn("Invalid scene info {}".format(scene_info))
else:
logger.warn("Invalid scene info, nodeDefId {}".format(node))
def device_event(
self, device
): # notification from controller about a device event, used to "track" scene state
for address, scene in self.list.items():
scene.device_event(device)
def get_device(self, address):
return self.controller.device_container.get(address)
| [
"[email protected]"
] | |
a7b66fcea4a6778e70e3557ce2b745bc6c6c7e1a | 4e2799eb806d66716283aa10be2682ea811a790c | /apps/exports/tests/test_scheduling.py | 9c4488d7569f802dad1a5c4dde3536f4206bba7e | [] | no_license | jhonandre/commcare-sync | 37851a1e1127ee1691ab42fbccdc301c96c4e12e | 28f07691bc26bb5d7a292f5201fe44fab739a1d5 | refs/heads/master | 2023-08-15T02:36:27.323577 | 2021-09-23T11:33:46 | 2021-09-23T11:33:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,492 | py | from datetime import timedelta
from apps.exports.models import ExportRun
from apps.exports.tests.test_utils import BaseExportTestCase
from django.utils import timezone
class TestSchedule(BaseExportTestCase):
def test_export_is_scheduled_to_run(self):
# A config with no export runs should be scheduled
self.assertTrue(self.export_config.is_scheduled_to_run())
# A config that has an export_run in the QUEUED state should be seen as "scheduled"
export_run = ExportRun.objects.create(
base_export_config=self.export_config,
)
self.addCleanup(export_run.delete)
self.assertTrue(self.export_config.is_scheduled_to_run())
# A completed export that is failed shouldn't be rescheduled
export_run.status = ExportRun.FAILED
export_run.completed_at = timezone.now() - timedelta(minutes=5)
export_run.save()
self.assertFalse(self.export_config.is_scheduled_to_run())
# Once time_between_runs delay has passed, the export should be scheduled to run again
self.export_config.time_between_runs = 10
export_run.completed_at = timezone.now() - timedelta(minutes=15)
export_run.save()
self.assertTrue(self.export_config.is_scheduled_to_run())
def test_should_spawn_task(self):
ExportRun.objects.create(
base_export_config=self.export_config,
)
self.assertFalse(self.export_config.should_create_export_run())
| [
"[email protected]"
] | |
37f8292ce8d081070a11d5ccce47ad4b706b32bf | ab574f7511fa15e5ea50a26f26e3e38f7e33505a | /win_2020/scipy/ndimage/_ni_label.py | 6d70d5b7204499785ff19226dd63600867b68a33 | [] | no_license | zclongpop123/maya_python_packages | 49d6b340512a2580bc8c14ae6281ca3f57017acd | 4dd4a48c41749443ac16053d20aec04e9d2db202 | refs/heads/master | 2021-11-30T01:49:41.846727 | 2021-11-17T01:47:08 | 2021-11-17T01:47:08 | 49,186,909 | 16 | 9 | null | 2017-03-07T00:13:41 | 2016-01-07T06:48:35 | Python | UTF-8 | Python | false | false | 286 | py | def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__, '_ni_label.pyd')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
| [
"[email protected]"
] | |
509b63483a3b8e451b0686b900f5b462f0f554f1 | 5db0a48428381223d2327b8ce17c5ba95f9fecf0 | /college_football_risk/models/territory.py | 9ea033d2d3ea1e27e034aa554ada9cb386f05e65 | [] | no_license | tuttlepower/college-football-risk-python | 7349215c7f1e1c8512b74526193021b0af49bcfc | 3014130991dc27eb69469a4ee2dac88b3f7ea498 | refs/heads/master | 2021-04-15T03:08:34.640525 | 2020-03-21T18:10:29 | 2020-03-21T18:10:29 | 249,290,397 | 0 | 0 | null | 2020-03-22T23:13:52 | 2020-03-22T23:13:52 | null | UTF-8 | Python | false | false | 5,754 | py | # coding: utf-8
"""
College Football Risk API
Companion API for College Football Risk # noqa: E501
The version of the OpenAPI document: 1.3.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from college_football_risk.configuration import Configuration
class Territory(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'id': 'int',
'name': 'str',
'short_name': 'str',
'owner': 'str',
'neighbors': 'list[TerritoryNeighbors]'
}
attribute_map = {
'id': 'id',
'name': 'name',
'short_name': 'shortName',
'owner': 'owner',
'neighbors': 'neighbors'
}
def __init__(self, id=None, name=None, short_name=None, owner=None, neighbors=None, local_vars_configuration=None): # noqa: E501
"""Territory - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._id = None
self._name = None
self._short_name = None
self._owner = None
self._neighbors = None
self.discriminator = None
if id is not None:
self.id = id
if name is not None:
self.name = name
if short_name is not None:
self.short_name = short_name
if owner is not None:
self.owner = owner
if neighbors is not None:
self.neighbors = neighbors
@property
def id(self):
"""Gets the id of this Territory. # noqa: E501
:return: The id of this Territory. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Territory.
:param id: The id of this Territory. # noqa: E501
:type: int
"""
self._id = id
@property
def name(self):
"""Gets the name of this Territory. # noqa: E501
:return: The name of this Territory. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Territory.
:param name: The name of this Territory. # noqa: E501
:type: str
"""
self._name = name
@property
def short_name(self):
"""Gets the short_name of this Territory. # noqa: E501
:return: The short_name of this Territory. # noqa: E501
:rtype: str
"""
return self._short_name
@short_name.setter
def short_name(self, short_name):
"""Sets the short_name of this Territory.
:param short_name: The short_name of this Territory. # noqa: E501
:type: str
"""
self._short_name = short_name
@property
def owner(self):
"""Gets the owner of this Territory. # noqa: E501
:return: The owner of this Territory. # noqa: E501
:rtype: str
"""
return self._owner
@owner.setter
def owner(self, owner):
"""Sets the owner of this Territory.
:param owner: The owner of this Territory. # noqa: E501
:type: str
"""
self._owner = owner
@property
def neighbors(self):
"""Gets the neighbors of this Territory. # noqa: E501
:return: The neighbors of this Territory. # noqa: E501
:rtype: list[TerritoryNeighbors]
"""
return self._neighbors
@neighbors.setter
def neighbors(self, neighbors):
"""Sets the neighbors of this Territory.
:param neighbors: The neighbors of this Territory. # noqa: E501
:type: list[TerritoryNeighbors]
"""
self._neighbors = neighbors
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Territory):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Territory):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
3339fa9fa7c973a4244174dc6ce138593c73b2f8 | ccc4b6341676319c43a482d6322729d9172e8266 | /extra_annos/migrations/0001_initial.py | db0fc197abf80c29e82f33b3c8e54e26d2ff3a5e | [
"MIT"
] | permissive | Sumerian-Health/varfish-server | 87278fcbd3c4289e63b6cbd8140d8a454fa94853 | 152b23fa93c2ea685f51622e94bc8790479c2336 | refs/heads/master | 2023-06-12T14:17:41.065266 | 2021-07-08T11:17:31 | 2021-07-08T11:28:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,760 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-07-13 14:26
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="ExtraAnno",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("release", models.CharField(max_length=32)),
("chromosome", models.CharField(max_length=32)),
("start", models.IntegerField()),
("end", models.IntegerField()),
("bin", models.IntegerField()),
("reference", models.CharField(max_length=512)),
("alternative", models.CharField(max_length=512)),
("anno_data", django.contrib.postgres.fields.jsonb.JSONField(default={})),
],
),
migrations.CreateModel(
name="ExtraAnnoField",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("field", models.IntegerField()),
("label", models.CharField(max_length=128)),
],
),
migrations.AlterUniqueTogether(
name="extraanno",
unique_together=set([("release", "chromosome", "start", "reference", "alternative")]),
),
]
| [
"[email protected]"
] | |
7ce785ce5d5c5071581a2db86c31061bb9582cc0 | 18ea9b6e176be04f5d854dce1b75a9780d5052a7 | /dataduct/steps/upsert.py | b24f8a9c4280451dc687908b2316ba642fc72c4d | [
"Apache-2.0"
] | permissive | sungjuly/dataduct | fd89fbb82ae4cc87aa9651cdc8cd13c2c87c5212 | 3700d08a616820e5fecf22a6cf8aabac85a88cba | refs/heads/develop | 2021-04-15T07:50:16.998950 | 2015-02-26T22:33:45 | 2015-02-26T22:33:45 | 30,907,001 | 0 | 0 | null | 2015-02-17T07:32:37 | 2015-02-17T07:32:37 | null | UTF-8 | Python | false | false | 2,543 | py | """ETL step wrapper for Upsert SQL script
"""
from .etl_step import ETLStep
from ..pipeline import SqlActivity
from ..database import Table
from ..database import SqlScript
from ..database import SelectStatement
from ..database import HistoryTable
from ..s3 import S3File
from ..utils.helpers import parse_path
from ..utils.helpers import exactly_one
class UpsertStep(ETLStep):
"""Upsert Step class that helps run a step on the emr cluster
"""
def __init__(self, destination, redshift_database, sql=None,
script=None, source=None, enforce_primary_key=True,
delete_existing=False, history=None, **kwargs):
"""Constructor for the UpsertStep class
Args:
**kwargs(optional): Keyword arguments directly passed to base class
"""
assert exactly_one(sql, source, script), 'One of sql/source/script'
super(UpsertStep, self).__init__(**kwargs)
# Input formatting
dest = Table(SqlScript(filename=parse_path(destination)))
if source is not None:
source_relation = Table(SqlScript(filename=parse_path(source)))
else:
source_relation = SelectStatement(
SqlScript(sql=sql, filename=script).sql())
# Create the destination table if doesn't exist
script = dest.exists_clone_script()
script.append(dest.upsert_script(
source_relation, enforce_primary_key, delete_existing))
if history:
hist = HistoryTable(SqlScript(
filename=parse_path(history)))
script.append(hist.update_history_script(dest))
self.activity = self.create_pipeline_object(
object_class=SqlActivity,
resource=self.resource,
schedule=self.schedule,
depends_on=self.depends_on,
database=redshift_database,
max_retries=self.max_retries,
script=self.create_script(S3File(text=script.sql())))
@classmethod
def arguments_processor(cls, etl, input_args):
"""Parse the step arguments according to the ETL pipeline
Args:
etl(ETLPipeline): Pipeline object containing resources and steps
step_args(dict): Dictionary of the step arguments for the class
"""
step_args = cls.base_arguments_processor(etl, input_args)
cls.pop_inputs(step_args)
step_args['resource'] = etl.ec2_resource
step_args['redshift_database'] = etl.redshift_database
return step_args
| [
"[email protected]"
] | |
e1302ba5ff26724d8e3068658b3ad3b08a2a063a | c4a2ae65c06d53466e9db29430c5048ad9988c94 | /muddery/server/commands/general.py | c2fc7f1982968d4c9db9389c4409eeeeca26ca98 | [
"BSD-3-Clause"
] | permissive | muddery/muddery | ac5a0dcb16b034844d91360e8154c70daca4c9d0 | 5fa06b29bf800646dc4da5851fdf7a1f299f15a7 | refs/heads/master | 2023-03-08T22:01:09.434955 | 2023-01-20T14:06:14 | 2023-01-20T14:06:14 | 33,435,868 | 139 | 64 | NOASSERTION | 2022-11-05T14:45:11 | 2015-04-05T09:07:47 | Python | UTF-8 | Python | false | false | 16,485 | py | """
General Character commands usually availabe to all characters
"""
import traceback
from muddery.server.utils.logger import logger
from muddery.server.utils.localized_strings_handler import _
from muddery.common.utils.exception import MudderyError, ERR
from muddery.server.server import Server
from muddery.server.commands.command_set import CharacterCmd
@CharacterCmd.request("look_around")
async def look_around(character, args) -> dict or None:
"""
Get surroundings in the room.
Usage:
{
"cmd": "look_around",
}
"""
return character.look_around()
@CharacterCmd.request("inventory")
async def inventory(character, args) -> dict or None:
"""
Observe inventory
Usage:
{
"cmd": "inventory",
}
Show everything in your inventory.
"""
return character.get_inventory_appearance()
@CharacterCmd.request("inventory_obj")
async def inventory_obj(character, args) -> dict or None:
"""
look at an object in the inventory
Usage:
{
"cmd": "inventory_obj",
"args": <inventory's position>
}
Observes your location or objects in your vicinity.
"""
if not args:
raise MudderyError(ERR.missing_args, _("You should select something in your inventory."))
return await character.get_inventory_object_appearance(args)
@CharacterCmd.request("all_equipments")
async def all_equipments(character, args) -> dict or None:
"""
observe all equipments on the player's body
Usage:
{
"cmd": "equipments",
}
Show everything in your equipments.
"""
return character.get_equipments()
@CharacterCmd.request("equipments_obj")
async def equipments_obj(character, args) -> dict or None:
"""
look at an object in the equipments
Usage:
{
"cmd": "equipments_obj",
"args": <object's position>
}
Observes your location or objects in your vicinity.
"""
if not args:
raise MudderyError(ERR.missing_args, _("You should select something in your equipments."))
return await character.return_equipments_object(args)
@CharacterCmd.request("say")
async def say(character, args) -> dict or None:
"""
speak as your character
Usage:
{
"cmd": "say",
"args": {
"type": <target's type>,
"target": <target's id>,
"msg": <message>
}
}
Talk to those in your current location.
"""
if not args:
raise MudderyError(ERR.missing_args, _("You should say something."))
if "target" not in args:
raise MudderyError(ERR.missing_args, _("You should choose a target to say."))
if "message" not in args:
raise MudderyError(ERR.missing_args, _("You should say something."))
target_type = args["type"]
target = args["target"]
message = args["message"]
await Server.world.send_message(character, target_type, target, message)
return
@CharacterCmd.request("look_room_obj")
async def look_room_obj(character, args) -> dict or None:
"""
look at an object in the room
Usage:
{
"cmd": "look_room_obj",
"args": <object's key>
}
"""
if not character.is_alive:
raise MudderyError(ERR.died, _("You are died."))
if not args:
raise MudderyError(ERR.missing_args, _("You should appoint an object."))
try:
room = character.get_location()
obj = room.get_object(args)
except Exception as e:
raise MudderyError(ERR.invalid_input, _("Can not find the object."))
return await obj.get_detail_appearance(character)
@CharacterCmd.request("look_room_char")
async def look_room_char(character, args) -> dict or None:
"""
look at a character in the room
Usage:
{
"cmd": "look_room_char",
"args": <character's id>
}
"""
if not character.is_alive:
raise MudderyError(ERR.died, _("You are died."))
if not args:
raise MudderyError(ERR.missing_args, _("You should appoint a character."))
try:
char_id = int(args)
room = character.get_location()
obj = room.get_character(char_id)
except Exception as e:
raise MudderyError(ERR.invalid_input, _("Can not find the character."))
return await obj.get_detail_appearance(character)
@CharacterCmd.request("traverse")
async def traverse(character, args) -> dict or None:
"""
traverse an exit
Usage:
{
"cmd": "traverse",
"args": <exit's key>
}
Traverse an exit, go to the destination of the exit.
"""
if not character.is_alive:
raise MudderyError(ERR.died, _("You are died."))
if not args:
raise MudderyError(ERR.missing_args, _("Should appoint an exit to go."))
exit_key = args
try:
room = character.get_location()
exit_obj = room.get_exit(exit_key)
except Exception as e:
raise MudderyError(ERR.invalid_input, _("Can not find the exit."))
results = await exit_obj.traverse(character)
if results["traversed"]:
# the character moved to the new location
results.update({
"location": character.get_location_info()
})
else:
# can not traverse
results.update({
"exit": await exit_obj.get_detail_appearance(character)
})
return results
@CharacterCmd.request("talk")
async def talk(character, args) -> dict or None:
"""
Talk to an NPC.
Usage:
{
"cmd": "talk",
"args": <NPC's id>
}
Begin a talk with an NPC. Show all available dialogues of this NPC.
"""
if not character.is_alive:
raise MudderyError(ERR.died, _("You are died."))
if not args:
raise MudderyError(ERR.missing_args, _("You should talk to someone."))
try:
npc_id = int(args)
room = character.get_location()
npc = room.get_character(npc_id)
except Exception as e:
raise MudderyError(ERR.invalid_input, _("Can not find the character."))
return await character.talk_to_npc(npc)
@CharacterCmd.request("finish_dialogue")
async def finish_dialogue(character, args) -> dict or None:
"""
Finish current dialogue.
Usage:
{
"cmd": "finish_dialogue",
"args": {
"dialogue": <current dialogue>,
"npc": <NPC's id>,
}
}
Dialogue and sentence refer to the current sentence. This command finishes
current sentence and get next sentences.
"""
if not args:
raise MudderyError(ERR.missing_args, _("You should talk to someone."))
# Get the dialogue.
if "dialogue" not in args:
raise MudderyError(ERR.missing_args, _("You should say something."))
dlg_key = args["dialogue"]
try:
# get NPC
npc_id = int(args["npc"])
room = character.get_location()
npc = room.get_character(npc_id)
except:
npc = None
return await character.finish_dialogue(dlg_key, npc)
@CharacterCmd.request("loot")
async def loot(character, args) -> dict or None:
"""
Loot from a specified object.
Usage:
{
"cmd": "loot",
"args": <object's key>
}
This command pick out random objects from the loot list and give
them to the character.
"""
if not args:
raise MudderyError(ERR.missing_args, _("You should loot something."))
try:
room = character.get_location()
obj = room.get_object(args)
except:
raise MudderyError(ERR.invalid_input, _("Can not find the object."))
# loot
return await obj.loot(character)
@CharacterCmd.request("use")
async def use(character, args) -> dict or None:
"""
Use an object in the inventory.
Usage:
{
"cmd": "use",
"args": {
position: <object's position in the inventory>
}
}
Call caller's use_object function with specified object.
Different objects can have different results.
"""
if not character.is_alive:
raise MudderyError(ERR.died, _("You are died."))
if not args or "position" not in args:
raise MudderyError(ERR.missing_args, _("You should use something."))
position = args["position"]
# Use the object and get the result.
return await character.use_object(int(position))
@CharacterCmd.request("discard")
async def discard(character, args) -> dict or None:
"""
Discard an object in the inventory.
Usage:
{
"cmd":"discard",
"args": {
position: <object's position in the inventory>
}
}
"""
if not character.is_alive:
raise MudderyError(ERR.died, _("You are died."))
if not args or "position" not in args:
raise MudderyError(ERR.missing_args, _("You should discard something."))
position = args["position"]
# remove object
await character.remove_all_objects_by_position(int(position))
@CharacterCmd.request("equip")
async def equip(character, args) -> dict or None:
"""
Put on equipment.
Usage:
{
"cmd": "equip",
"args": {
position: <object's position in the inventory>
}
}
Put on equipment and add its attributes to the character.
"""
if not args or "position" not in args:
raise MudderyError(ERR.missing_args, _("You should equip something."))
position = args["position"]
# equip
await character.equip_object(int(position))
return {
"state": await character.get_state(),
}
@CharacterCmd.request("takeoff")
async def takeoff(character, args) -> dict or None:
"""
Take off an equipment and remove its attributes from the character.
Usage:
{
"cmd": "takeoff",
"args": {
position: <object's position in the equipments>
}
}
"""
if not args or "position" not in args:
raise MudderyError(ERR.missing_args, _("You should take off something."))
position = args["position"]
# Take off the equipment.
await character.take_off_equipment(position)
# Send the latest state to the player.
return {
"state": await character.get_state()
}
@CharacterCmd.request("cast_skill")
async def cast_skill(character, args) -> dict or None:
"""
Cast a skill when the caller is not in combat.
Usage:
{
"cmd": "cast_skill",
"args": {
"skill": <skill's key>,
"target": <skill's target>,
}
}
"""
if not character.is_alive:
raise MudderyError(ERR.died, _("You are died."))
if character.is_in_combat():
raise MudderyError(ERR.invalid_input, _("You can not cast this skill in a combat."))
if not args:
raise MudderyError(ERR.missing_args, _("You should select a skill to cast."))
if "skill" not in args:
raise MudderyError(ERR.missing_args, _("You should select a skill to cast."))
skill_key = args["skill"]
# Get target
target = None
if "target" in args and args["target"]:
try:
target_id = int(args["target"])
room = character.get_location()
target = room.get_character(target_id)
except:
raise MudderyError(ERR.invalid_input, _("Can not get the target."))
return await character.cast_skill(skill_key, target)
@CharacterCmd.request("give_up_quest")
async def give_up_quest(character, args) -> dict or None:
"""
Give up a quest.
Usage:
{
"cmd": "give_up_quest",
"args": <quest's key>
}
"""
if not args:
raise MudderyError(ERR.missing_args, _("You should give up a quest."))
quest_key = args
# Give up the quest.
return await character.quest_handler.give_up(quest_key)
@CharacterCmd.request("unlock_exit")
async def unlock_exit(character, args) -> dict or None:
"""
Unlock a locked exit. A character must unlock a LockedExit before traverse it.
Usage:
{
"cmd": "unlock_exit",
"args": <exit's key>
}
"""
if not args:
raise MudderyError(ERR.missing_args, _("You should unlock something."))
exit_key = args
try:
room = character.get_location()
exit_obj = room.get_exit(exit_key)
except Exception as e:
raise MudderyError(ERR.invalid_input, _("Can not find the exit."))
# Unlock the exit.
if await character.unlock_exit(exit_key):
# The exit may have different appearance after unlocking.
# Send the lastest appearance to the caller.
return {
"unlocked": True,
"exit": await exit_obj.get_detail_appearance(character)
}
else:
return {"unlocked": False}
@CharacterCmd.request("shopping")
async def shopping(character, args) -> dict or None:
"""
Open a shop from a character.
Usage:
{
"cmd": "shopping",
"args": {
"npc": <npc's id>,
"shop": <shop's key>,
}
}
"""
if not args or "npc" not in args or "shop" not in args:
raise MudderyError(ERR.missing_args, _("You should shopping in someplace."))
shop_key = args["shop"]
try:
npc_id = int(args["npc"])
room = character.get_location()
npc = room.get_character(npc_id)
except:
raise MudderyError(ERR.invalid_input, _("Can not find this NPC."))
return await npc.get_shop_info(shop_key, character)
@CharacterCmd.request("buy")
async def buy(character, args) -> dict or None:
"""
Buy a goods.
Usage:
{
"cmd": "buy",
"args": {
"npc": <npc's id>,
"shop": <shop's key>,
"goods": <goods' index>,
}
}
"""
if not args or "npc" not in args or "shop" not in args or "goods" not in args:
raise MudderyError(ERR.missing_args, _("You should buy something."))
try:
npc_id = int(args["npc"])
room = character.get_location()
npc = room.get_character(npc_id)
except:
raise MudderyError(ERR.invalid_input, _("Can not find this NPC."))
shop = args["shop"]
goods = args["goods"]
# buy goods
return await npc.sell_goods(shop, int(goods), character)
@CharacterCmd.request("all_quests")
async def all_quests(character, args) -> dict or None:
"""
Query the character's all quests.
Usage:
{
"cmd": "all_quests"
}
"""
return await character.get_quests()
@CharacterCmd.request("query_quest")
async def query_quest(character, args) -> dict or None:
"""
Query a quest's detail information.
Usage:
{
"cmd": "query_quest",
"args": {
"key": <quest's key>
}
}
"""
if not args or "key" not in args:
raise MudderyError(ERR.missing_args, _("Can not find the quest."))
quest_key = args["key"]
return await character.get_quest_info(quest_key)
@CharacterCmd.request("all_skills")
async def all_skills(character, args) -> dict or None:
"""
Query the character's all skills.
Usage:
{
"cmd": "all_skills"
}
"""
return character.get_skills()
@CharacterCmd.request("query_skill")
async def query_skill(character, args) -> dict or None:
"""
Query a skill's detail information.
Usage:
{
"cmd": "query_skill",
"args": {
"key": <skill's key>
}
}
"""
if not args or "key" not in args:
raise MudderyError(ERR.missing_args, _("Can not find the skill."))
skill_key = args["key"]
return await character.get_skill_info(skill_key)
@CharacterCmd.request("get_revealed_maps")
async def get_revealed_maps(character, args) -> dict or None:
"""
Get a character's revealed maps.
Usage:
{
"cmd": "get_revealed_maps"
}
"""
return character.get_revealed_maps()
| [
"[email protected]"
] | |
2d7d8a6afdec0a60e4fd6de6c6998cf8aa860009 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/BLACKBERRYSERVER-MIB.py | cda9b1b1c494f99ae16bd7b6558566582ed9a00e | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 35,454 | py | #
# PySNMP MIB module BLACKBERRYSERVER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BLACKBERRYSERVER-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:22:00 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter64, ObjectIdentity, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Counter32, TimeTicks, NotificationType, Integer32, iso, ModuleIdentity, Gauge32, MibIdentifier, IpAddress, enterprises, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "ObjectIdentity", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Counter32", "TimeTicks", "NotificationType", "Integer32", "iso", "ModuleIdentity", "Gauge32", "MibIdentifier", "IpAddress", "enterprises", "NotificationType")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
rim = MibIdentifier((1, 3, 6, 1, 4, 1, 3530))
blackBerryServer = MibIdentifier((1, 3, 6, 1, 4, 1, 3530, 5))
besTrapVariables = MibIdentifier((1, 3, 6, 1, 4, 1, 3530, 5, 9))
version = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: version.setStatus('mandatory')
besTotMsgsPending = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besTotMsgsPending.setStatus('mandatory')
besTotMsgsSent = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besTotMsgsSent.setStatus('mandatory')
besTotMsgsRecvd = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besTotMsgsRecvd.setStatus('mandatory')
besTotMsgsXpired = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besTotMsgsXpired.setStatus('mandatory')
besTotMsgsFiltered = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besTotMsgsFiltered.setStatus('mandatory')
besTotMsgsSentPerMin = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besTotMsgsSentPerMin.setStatus('mandatory')
besTotMsgsRecvdPerMin = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besTotMsgsRecvdPerMin.setStatus('mandatory')
besNumServerInfoAvailable = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besNumServerInfoAvailable.setStatus('mandatory')
besConfigTable = MibTable((1, 3, 6, 1, 4, 1, 3530, 5, 20), )
if mibBuilder.loadTexts: besConfigTable.setStatus('mandatory')
besConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1), ).setIndexNames((0, "BLACKBERRYSERVER-MIB", "besConfigServerInstance"))
if mibBuilder.loadTexts: besConfigEntry.setStatus('mandatory')
besConfigServerInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigServerInstance.setStatus('mandatory')
besConfigServerName = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigServerName.setStatus('mandatory')
besConfigVersionString = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigVersionString.setStatus('mandatory')
besConfigReleaseMaj = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigReleaseMaj.setStatus('mandatory')
besConfigReleaseMin = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigReleaseMin.setStatus('mandatory')
besConfigReleaseServicePack = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigReleaseServicePack.setStatus('mandatory')
besConfigReleaseBuild = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigReleaseBuild.setStatus('mandatory')
besConfigLicenceTotal = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigLicenceTotal.setStatus('mandatory')
besConfigLicenceUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigLicenceUsed.setStatus('mandatory')
besConfigLicenceRemaining = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigLicenceRemaining.setStatus('mandatory')
besConfigServerUID = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 30), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigServerUID.setStatus('mandatory')
besConfigSystemAttendant = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 40), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigSystemAttendant.setStatus('mandatory')
besConfigSRPHost = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 50), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigSRPHost.setStatus('mandatory')
besConfigSRPPort = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 51), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigSRPPort.setStatus('mandatory')
besConfigAutoBCCEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigAutoBCCEnabled.setStatus('mandatory')
besConfigAutoBCCAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 61), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigAutoBCCAddress.setStatus('mandatory')
besConfigForceSaveInSentEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 70), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigForceSaveInSentEnabled.setStatus('mandatory')
besConfigWirelessEmailRecoEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 20, 1, 80), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besConfigWirelessEmailRecoEnabled.setStatus('mandatory')
besSysHealthTable = MibTable((1, 3, 6, 1, 4, 1, 3530, 5, 25), )
if mibBuilder.loadTexts: besSysHealthTable.setStatus('mandatory')
besSysHealthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1), ).setIndexNames((0, "BLACKBERRYSERVER-MIB", "besSysHealthServerInstance"))
if mibBuilder.loadTexts: besSysHealthEntry.setStatus('mandatory')
besSysHealthServerInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthServerInstance.setStatus('mandatory')
besSysHealthSrpConnectedState = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthSrpConnectedState.setStatus('mandatory')
besSysHealthSrpLastConnectDate = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthSrpLastConnectDate.setStatus('mandatory')
besSysHealthSrpReconnectSuccess = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthSrpReconnectSuccess.setStatus('mandatory')
besSysHealthSrpReconnectsFail = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthSrpReconnectsFail.setStatus('mandatory')
besSysHealthSrpTotalSecNotConnected = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthSrpTotalSecNotConnected.setStatus('mandatory')
besSysHealthSrpLastErrorText = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 15), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthSrpLastErrorText.setStatus('mandatory')
besSysHealthSrpLastErrorTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 16), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthSrpLastErrorTime.setStatus('mandatory')
besSysHealthMsgTotalProc = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMsgTotalProc.setStatus('mandatory')
besSysHealthMsgToHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMsgToHandheld.setStatus('mandatory')
besSysHealthMsgFromHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMsgFromHandheld.setStatus('mandatory')
besSysHealthMsgFilteredByUser = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMsgFilteredByUser.setStatus('mandatory')
besSysHealthMsgFilteredByGlobal = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMsgFilteredByGlobal.setStatus('mandatory')
besSysHealthMsgPending = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 25), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMsgPending.setStatus('mandatory')
besSysHealthMsgExpired = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 26), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMsgExpired.setStatus('mandatory')
besSysHealthMsgErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 27), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMsgErrors.setStatus('mandatory')
besSysHealthMsgMoreRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 28), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMsgMoreRequests.setStatus('mandatory')
besSysHealthCalUsersOTACEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 40), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthCalUsersOTACEnabled.setStatus('mandatory')
besSysHealthCalEventToHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 41), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthCalEventToHandheld.setStatus('mandatory')
besSysHealthCalEventFromHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 42), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthCalEventFromHandheld.setStatus('mandatory')
besSysHealthWERUsersEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 50), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthWERUsersEnabled.setStatus('mandatory')
besSysHealthWERRequestsToHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 51), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthWERRequestsToHandheld.setStatus('mandatory')
besSysHealthWERRequestsFromHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 52), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthWERRequestsFromHandheld.setStatus('mandatory')
besSysHealthMdsDeviceConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 60), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsDeviceConnections.setStatus('mandatory')
besSysHealthMdsPushConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 61), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsPushConnections.setStatus('mandatory')
besSysHealthMdsTotalBytesFromDevices = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 62), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsTotalBytesFromDevices.setStatus('mandatory')
besSysHealthMdsMaxPacketSizeFromDevice = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 63), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsMaxPacketSizeFromDevice.setStatus('mandatory')
besSysHealthMdsAvgPacketSizeFromDevice = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 64), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsAvgPacketSizeFromDevice.setStatus('mandatory')
besSysHealthMdsTotalBytesToDevice = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 65), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsTotalBytesToDevice.setStatus('mandatory')
besSysHealthMdsMaxPacketSizeToDevice = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 66), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsMaxPacketSizeToDevice.setStatus('mandatory')
besSysHealthMdsAvgPacketSizeToDevice = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 67), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsAvgPacketSizeToDevice.setStatus('mandatory')
besSysHealthMdsRefusedPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 68), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsRefusedPackets.setStatus('mandatory')
besSysHealthMdsInvalidPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 69), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsInvalidPackets.setStatus('mandatory')
besSysHealthMdsConnectionSuccess = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 70), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsConnectionSuccess.setStatus('mandatory')
besSysHealthMdsConnectionFailure = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 71), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsConnectionFailure.setStatus('mandatory')
besSysHealthMdsConnectionTruncated = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 72), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthMdsConnectionTruncated.setStatus('mandatory')
besSysHealthV1MsgsPending = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 202), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthV1MsgsPending.setStatus('mandatory')
besSysHealthV1TotalMsgsSent = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 203), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthV1TotalMsgsSent.setStatus('mandatory')
besSysHealthV1TotalMsgsReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 204), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthV1TotalMsgsReceived.setStatus('mandatory')
besSysHealthV1TotalMsgsExpired = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 205), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthV1TotalMsgsExpired.setStatus('mandatory')
besSysHealthV1TotalMsgsFiltered = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 206), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthV1TotalMsgsFiltered.setStatus('mandatory')
besSysHealthV1MsgsSentPerMin = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 207), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthV1MsgsSentPerMin.setStatus('mandatory')
besSysHealthV1MsgsRecvdPerMin = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 208), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthV1MsgsRecvdPerMin.setStatus('mandatory')
besSysHealthV1SRPConnectState = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 25, 1, 209), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSysHealthV1SRPConnectState.setStatus('mandatory')
besMailServerHealthTable = MibTable((1, 3, 6, 1, 4, 1, 3530, 5, 26), )
if mibBuilder.loadTexts: besMailServerHealthTable.setStatus('mandatory')
besMailServerHealthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3530, 5, 26, 1), ).setIndexNames((0, "BLACKBERRYSERVER-MIB", "besMailServerHealthServerInstance"), (0, "BLACKBERRYSERVER-MIB", "besMailServerHealthServerId"))
if mibBuilder.loadTexts: besMailServerHealthEntry.setStatus('mandatory')
besMailServerHealthServerInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 26, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besMailServerHealthServerInstance.setStatus('mandatory')
besMailServerHealthServerId = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 26, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besMailServerHealthServerId.setStatus('mandatory')
besMailServerHealthServerName = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 26, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besMailServerHealthServerName.setStatus('mandatory')
besMailServerHealthTotalUsers = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 26, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besMailServerHealthTotalUsers.setStatus('mandatory')
besMailServerHealthAvgResponceTime10min = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 26, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besMailServerHealthAvgResponceTime10min.setStatus('mandatory')
besMailServerHealthFailedConn10min = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 26, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besMailServerHealthFailedConn10min.setStatus('mandatory')
besUserHealthTable = MibTable((1, 3, 6, 1, 4, 1, 3530, 5, 30), )
if mibBuilder.loadTexts: besUserHealthTable.setStatus('mandatory')
besUserHealthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1), ).setIndexNames((0, "BLACKBERRYSERVER-MIB", "besUserHealthServerInstance"), (0, "BLACKBERRYSERVER-MIB", "besUserHealthUserId"))
if mibBuilder.loadTexts: besUserHealthEntry.setStatus('mandatory')
besUserHealthServerInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthServerInstance.setStatus('mandatory')
besUserHealthUserId = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthUserId.setStatus('mandatory')
besUserHealthUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthUserName.setStatus('mandatory')
besUserHealthLastErrorText = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthLastErrorText.setStatus('mandatory')
besUserHealthLastErrorTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthLastErrorTime.setStatus('mandatory')
besUserHealthDeviceNetwork = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 20), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthDeviceNetwork.setStatus('mandatory')
besUserHealthDevicePIN = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 21), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthDevicePIN.setStatus('mandatory')
besUserHealthDeviceInCradle = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthDeviceInCradle.setStatus('mandatory')
besUserHealthNumRedirectedFolders = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 30), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthNumRedirectedFolders.setStatus('mandatory')
besUserHealthSaveInSent = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthSaveInSent.setStatus('mandatory')
besUserHealthRedirectEnabledOnDesktop = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthRedirectEnabledOnDesktop.setStatus('mandatory')
besUserHealthDisableWhileInCradle = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 33), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthDisableWhileInCradle.setStatus('mandatory')
besUserHealthFullyConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 34), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthFullyConfigured.setStatus('mandatory')
besUserHealthEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 35), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthEnabled.setStatus('mandatory')
besUserHealthMsgTotalProc = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 40), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgTotalProc.setStatus('mandatory')
besUserHealthMsgToHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 41), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgToHandheld.setStatus('mandatory')
besUserHealthMsgFromHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 42), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgFromHandheld.setStatus('mandatory')
besUserHealthMsgFiltered = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 43), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgFiltered.setStatus('mandatory')
besUserHealthMsgPending = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 44), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgPending.setStatus('mandatory')
besUserHealthMsgExpired = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 45), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgExpired.setStatus('mandatory')
besUserHealthMsgErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 46), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgErrors.setStatus('mandatory')
besUserHealthMsgMoreRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 47), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgMoreRequests.setStatus('mandatory')
besUserHealthMsgForwardedFromDevice = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 48), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgForwardedFromDevice.setStatus('mandatory')
besUserHealthMsgRepliedToWithText = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 49), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthMsgRepliedToWithText.setStatus('mandatory')
besUserHealthLastTimeInCradle = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 60), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthLastTimeInCradle.setStatus('mandatory')
besUserHealthLastInteractionWithDevice = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 61), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthLastInteractionWithDevice.setStatus('mandatory')
besUserHealthLastMessageForwarded = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 62), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthLastMessageForwarded.setStatus('mandatory')
besUserHealthLastKeyDateGenerated = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 63), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthLastKeyDateGenerated.setStatus('mandatory')
besUserHealthAvgKBForwarded = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 70), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthAvgKBForwarded.setStatus('mandatory')
besUserHealthAvgKBReplyWithText = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 71), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthAvgKBReplyWithText.setStatus('mandatory')
besUserHealthAvgLatencyInSecLast10Msg = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 72), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthAvgLatencyInSecLast10Msg.setStatus('mandatory')
besUserHealthCalOTAEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 80), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthCalOTAEnabled.setStatus('mandatory')
besUserHealthCalEventToHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 81), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthCalEventToHandheld.setStatus('mandatory')
besUserHealthCalEventFromHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 82), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthCalEventFromHandheld.setStatus('mandatory')
besUserHealthWirelessEmailRecoEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 90), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthWirelessEmailRecoEnabled.setStatus('mandatory')
besUserHealthWERRequestsToHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 91), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthWERRequestsToHandheld.setStatus('mandatory')
besUserHealthWERRequestsFromHandheld = MibTableColumn((1, 3, 6, 1, 4, 1, 3530, 5, 30, 1, 92), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: besUserHealthWERRequestsFromHandheld.setStatus('mandatory')
besSRPConnectState = MibScalar((1, 3, 6, 1, 4, 1, 3530, 5, 9, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: besSRPConnectState.setStatus('mandatory')
besSRPConnectEvent = NotificationType((1, 3, 6, 1, 4, 1, 3530, 5) + (0,1)).setObjects(("BLACKBERRYSERVER-MIB", "besSRPConnectState"))
besHungThreadEvent = NotificationType((1, 3, 6, 1, 4, 1, 3530, 5) + (0,3))
besMailServerDownEvent = NotificationType((1, 3, 6, 1, 4, 1, 3530, 5) + (0,5))
besMDStoBESConnectionEvent = NotificationType((1, 3, 6, 1, 4, 1, 3530, 5) + (0,7))
besMDSStartStopEvent = NotificationType((1, 3, 6, 1, 4, 1, 3530, 5) + (0,11))
besMDStoDBConnectionEvent = NotificationType((1, 3, 6, 1, 4, 1, 3530, 5) + (0,13))
besCriticalEvent = NotificationType((1, 3, 6, 1, 4, 1, 3530, 5) + (0,21))
mibBuilder.exportSymbols("BLACKBERRYSERVER-MIB", besUserHealthMsgToHandheld=besUserHealthMsgToHandheld, besTotMsgsPending=besTotMsgsPending, besUserHealthLastInteractionWithDevice=besUserHealthLastInteractionWithDevice, besMailServerHealthServerName=besMailServerHealthServerName, besUserHealthMsgPending=besUserHealthMsgPending, besUserHealthMsgTotalProc=besUserHealthMsgTotalProc, besConfigServerUID=besConfigServerUID, besConfigTable=besConfigTable, besSysHealthCalUsersOTACEnabled=besSysHealthCalUsersOTACEnabled, besSRPConnectEvent=besSRPConnectEvent, besSysHealthCalEventFromHandheld=besSysHealthCalEventFromHandheld, besUserHealthMsgFromHandheld=besUserHealthMsgFromHandheld, besSysHealthMsgToHandheld=besSysHealthMsgToHandheld, besSysHealthMdsAvgPacketSizeFromDevice=besSysHealthMdsAvgPacketSizeFromDevice, besConfigAutoBCCAddress=besConfigAutoBCCAddress, besConfigReleaseBuild=besConfigReleaseBuild, besSysHealthWERRequestsToHandheld=besSysHealthWERRequestsToHandheld, besConfigVersionString=besConfigVersionString, besMailServerHealthServerId=besMailServerHealthServerId, besSysHealthV1TotalMsgsSent=besSysHealthV1TotalMsgsSent, besUserHealthServerInstance=besUserHealthServerInstance, besSysHealthMdsInvalidPackets=besSysHealthMdsInvalidPackets, besSysHealthMdsTotalBytesFromDevices=besSysHealthMdsTotalBytesFromDevices, besSysHealthEntry=besSysHealthEntry, besMailServerHealthServerInstance=besMailServerHealthServerInstance, besMailServerHealthFailedConn10min=besMailServerHealthFailedConn10min, besUserHealthMsgForwardedFromDevice=besUserHealthMsgForwardedFromDevice, besUserHealthRedirectEnabledOnDesktop=besUserHealthRedirectEnabledOnDesktop, besTotMsgsSent=besTotMsgsSent, besSysHealthMdsTotalBytesToDevice=besSysHealthMdsTotalBytesToDevice, besMDSStartStopEvent=besMDSStartStopEvent, besUserHealthFullyConfigured=besUserHealthFullyConfigured, besTotMsgsRecvd=besTotMsgsRecvd, besConfigServerInstance=besConfigServerInstance, besSysHealthMdsConnectionFailure=besSysHealthMdsConnectionFailure, besSysHealthV1TotalMsgsExpired=besSysHealthV1TotalMsgsExpired, besUserHealthAvgKBReplyWithText=besUserHealthAvgKBReplyWithText, besSysHealthV1TotalMsgsFiltered=besSysHealthV1TotalMsgsFiltered, besUserHealthWERRequestsFromHandheld=besUserHealthWERRequestsFromHandheld, besSysHealthV1TotalMsgsReceived=besSysHealthV1TotalMsgsReceived, besConfigReleaseMaj=besConfigReleaseMaj, besUserHealthWERRequestsToHandheld=besUserHealthWERRequestsToHandheld, besSysHealthMdsMaxPacketSizeFromDevice=besSysHealthMdsMaxPacketSizeFromDevice, besUserHealthLastKeyDateGenerated=besUserHealthLastKeyDateGenerated, besUserHealthEnabled=besUserHealthEnabled, besUserHealthMsgExpired=besUserHealthMsgExpired, besSRPConnectState=besSRPConnectState, besUserHealthMsgMoreRequests=besUserHealthMsgMoreRequests, besNumServerInfoAvailable=besNumServerInfoAvailable, besSysHealthV1MsgsSentPerMin=besSysHealthV1MsgsSentPerMin, besUserHealthDeviceNetwork=besUserHealthDeviceNetwork, besMailServerHealthAvgResponceTime10min=besMailServerHealthAvgResponceTime10min, besTotMsgsFiltered=besTotMsgsFiltered, besUserHealthAvgLatencyInSecLast10Msg=besUserHealthAvgLatencyInSecLast10Msg, besConfigServerName=besConfigServerName, besSysHealthTable=besSysHealthTable, besSysHealthSrpReconnectsFail=besSysHealthSrpReconnectsFail, besUserHealthLastMessageForwarded=besUserHealthLastMessageForwarded, blackBerryServer=blackBerryServer, besSysHealthMdsDeviceConnections=besSysHealthMdsDeviceConnections, besConfigSRPPort=besConfigSRPPort, besConfigEntry=besConfigEntry, besConfigLicenceRemaining=besConfigLicenceRemaining, besUserHealthCalEventFromHandheld=besUserHealthCalEventFromHandheld, besSysHealthSrpLastErrorTime=besSysHealthSrpLastErrorTime, besSysHealthMsgPending=besSysHealthMsgPending, besSysHealthMdsAvgPacketSizeToDevice=besSysHealthMdsAvgPacketSizeToDevice, besConfigAutoBCCEnabled=besConfigAutoBCCEnabled, besSysHealthSrpReconnectSuccess=besSysHealthSrpReconnectSuccess, besTotMsgsRecvdPerMin=besTotMsgsRecvdPerMin, besConfigSRPHost=besConfigSRPHost, besCriticalEvent=besCriticalEvent, besSysHealthSrpConnectedState=besSysHealthSrpConnectedState, besUserHealthWirelessEmailRecoEnabled=besUserHealthWirelessEmailRecoEnabled, besMDStoDBConnectionEvent=besMDStoDBConnectionEvent, besUserHealthUserId=besUserHealthUserId, besSysHealthSrpLastErrorText=besSysHealthSrpLastErrorText, besUserHealthUserName=besUserHealthUserName, besSysHealthCalEventToHandheld=besSysHealthCalEventToHandheld, besUserHealthCalEventToHandheld=besUserHealthCalEventToHandheld, besSysHealthV1MsgsPending=besSysHealthV1MsgsPending, besConfigWirelessEmailRecoEnabled=besConfigWirelessEmailRecoEnabled, besSysHealthWERRequestsFromHandheld=besSysHealthWERRequestsFromHandheld, besUserHealthEntry=besUserHealthEntry, besSysHealthMsgFilteredByGlobal=besSysHealthMsgFilteredByGlobal, besUserHealthDisableWhileInCradle=besUserHealthDisableWhileInCradle, besMailServerHealthTotalUsers=besMailServerHealthTotalUsers, besMailServerDownEvent=besMailServerDownEvent, besMailServerHealthTable=besMailServerHealthTable, besConfigReleaseServicePack=besConfigReleaseServicePack, rim=rim, besConfigLicenceUsed=besConfigLicenceUsed, besSysHealthV1SRPConnectState=besSysHealthV1SRPConnectState, besSysHealthSrpLastConnectDate=besSysHealthSrpLastConnectDate, besUserHealthMsgFiltered=besUserHealthMsgFiltered, besUserHealthCalOTAEnabled=besUserHealthCalOTAEnabled, besUserHealthNumRedirectedFolders=besUserHealthNumRedirectedFolders, besHungThreadEvent=besHungThreadEvent, besConfigReleaseMin=besConfigReleaseMin, besConfigLicenceTotal=besConfigLicenceTotal, besConfigSystemAttendant=besConfigSystemAttendant, besSysHealthMsgTotalProc=besSysHealthMsgTotalProc, besSysHealthMsgExpired=besSysHealthMsgExpired, besSysHealthServerInstance=besSysHealthServerInstance, besSysHealthWERUsersEnabled=besSysHealthWERUsersEnabled, besSysHealthSrpTotalSecNotConnected=besSysHealthSrpTotalSecNotConnected, besSysHealthMdsPushConnections=besSysHealthMdsPushConnections, besUserHealthLastTimeInCradle=besUserHealthLastTimeInCradle, besUserHealthLastErrorTime=besUserHealthLastErrorTime, besSysHealthMdsMaxPacketSizeToDevice=besSysHealthMdsMaxPacketSizeToDevice, besUserHealthDeviceInCradle=besUserHealthDeviceInCradle, besSysHealthV1MsgsRecvdPerMin=besSysHealthV1MsgsRecvdPerMin, besSysHealthMsgErrors=besSysHealthMsgErrors, besUserHealthMsgRepliedToWithText=besUserHealthMsgRepliedToWithText, besMDStoBESConnectionEvent=besMDStoBESConnectionEvent, besSysHealthMsgFromHandheld=besSysHealthMsgFromHandheld, besTotMsgsSentPerMin=besTotMsgsSentPerMin, besUserHealthSaveInSent=besUserHealthSaveInSent, besTrapVariables=besTrapVariables, version=version, besTotMsgsXpired=besTotMsgsXpired, besSysHealthMdsRefusedPackets=besSysHealthMdsRefusedPackets, besSysHealthMsgMoreRequests=besSysHealthMsgMoreRequests, besMailServerHealthEntry=besMailServerHealthEntry, besConfigForceSaveInSentEnabled=besConfigForceSaveInSentEnabled, besSysHealthMdsConnectionSuccess=besSysHealthMdsConnectionSuccess, besUserHealthDevicePIN=besUserHealthDevicePIN, besSysHealthMdsConnectionTruncated=besSysHealthMdsConnectionTruncated, besUserHealthAvgKBForwarded=besUserHealthAvgKBForwarded, besUserHealthTable=besUserHealthTable, besUserHealthLastErrorText=besUserHealthLastErrorText, besUserHealthMsgErrors=besUserHealthMsgErrors, besSysHealthMsgFilteredByUser=besSysHealthMsgFilteredByUser)
| [
"[email protected]"
] | |
0d9abe4cfde90729fa8def7c6aeeca70aa7f3509 | 16cc8f796eac98e9a475da11e4bc0aa26317e894 | /panasonic3-14/a.py | 8305feb550b7ca853ac551b0a9dcfc8c01baae23 | [] | no_license | amaguri0408/AtCoder-python | 2f3fcdd82c52f5ddee88627fb99466c9e003164f | ab8ec04b8e434939e9f7035f3a280b30c0682427 | refs/heads/master | 2022-10-30T00:07:03.560011 | 2020-06-13T10:41:36 | 2020-06-13T10:41:36 | 271,954,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | lst = [1, 1, 1, 2, 1, 2, 1, 5, 2, 2, 1, 5, 1, 2, 1, 14, 1, 5, 1, 5, 2, 2, 1, 15, 2, 2, 5, 4, 1, 4, 1, 51]
k = int(input())
print(lst[k-1]) | [
"[email protected]"
] | |
91804d19aa6c1c9b19c33c3e7ef311da19bcbb76 | a5a4cee972e487512275c34f308251e6cc38c2fa | /dev/MgO_kde_sampling/dev__PyposmatMonteCarloSampler__buck_MgO.py | 8162dc48b47abcf6ab343640ec30b58aa6b9519e | [
"MIT"
] | permissive | eragasa/pypospack | 4f54983b33dcd2dce5b602bc243ea8ef22fee86b | 21cdecaf3b05c87acc532d992be2c04d85bfbc22 | refs/heads/master | 2021-06-16T09:24:11.633693 | 2019-12-06T16:54:02 | 2019-12-06T16:54:02 | 99,282,824 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,317 | py | import copy,yaml
from collections import OrderedDict
from pypospack.pyposmat import PyposmatMonteCarloSampler
from pypospack.pyposmat import PyposmatDataFile
from pypospack.pyposmat import PyposmatEngine
from pypospack.pyposmat import PyposmatConfigurationFile
#from pypospack.pyposmat import QoiDatabase
from pypospack.qoi import QoiDatabase
from pypospack.io.filesystem import OrderedDictYAMLLoader
import MgO
calc_elastic_properties = False
calc_point_defects = True
# <---------------- making a configuration file
MgO_qoi_db = QoiDatabase()
MgO_qoi_db.add_qoi(
qoi_name='MgO_NaCl.a0',
qoi_type='a11_min_all',
structures=OrderedDict([('ideal','MgO_NaCl')]),
target=4.246)
# <----------------- ELASTIC PROPERTIES
if calc_elastic_properties:
MgO_qoi_db.add_qoi(
qoi_name='MgO_NaCl.c11',
qoi_type='c11',
structures=OrderedDict([('ideal','MgO_NaCl')]),
target=277.00)
MgO_qoi_db.add_qoi(
qoi_name='MgO_NaCl.c12',
qoi_type='c12',
structures=OrderedDict([('ideal','MgO_NaCl')]),
target=91.67)
MgO_qoi_db.add_qoi(
qoi_name='MgO_NaCl.c44',
qoi_type='c44',
structures=OrderedDict([('ideal','MgO_NaCl')]),
target=144.01)
MgO_qoi_db.add_qoi(
qoi_name='MgO_NaCl.B',
qoi_type='bulk_modulus',
structures=OrderedDict([('ideal','MgO_NaCl')]),
target=153.45)
MgO_qoi_db.add_qoi(
qoi_name='MgO_NaCl.G',
qoi_type='shear_modulus',
structures=OrderedDict([('ideal','MgO_NaCl')]),
target=92.66)
#if calc_point_defects:
# MgO_qoi_db.add_qoi(
# qoi_name='MgO_NaCl.fr_a',
# qoi_type='point_defect',
# structures=OrderedDict([
# ('defect','MgO_NaCl_fr_a'),
# ('ideal','MgO_NaCl')]),
# target=10.978)
#MgO_qoi_db.add_qoi(
# qoi_name='MgO_NaCl.fr_c',
# qoi_type='point_defect',
# structures=OrderedDict([
# ('defect','MgO_NaCl_fr_c'),
# ('ideal','MgO_NaCl')]),
# target=8.986)
#MgO_qoi_db.add_qoi(
# qoi_name='MgO_NaCl.sch',
# qoi_type='point_defect',
# structures=OrderedDict([
# ('defect','MgO_NaCl_sch'),
# ('ideal','MgO_NaCl')]),
# target=5.067)
#MgO_qoi_db.add_qoi(
# qoi_name='MgO_NaCl.001s',
# qoi_type='surface_energy',
# structures=OrderedDict([
# ('slab','MgO_NaCl_001s'),
# ('ideal','MgO_NaCl')]),
# target=0.05595)
# <---------------- define potential formalism
MgO_potential = OrderedDict()
MgO_potential['potential_type'] = 'buckingham'
MgO_potential['symbols'] = ['Mg','O']
MgO_potential['cutoff_global'] = 10.0
# <---------------- Define Sampling Requirements
MgO_param_dist = OrderedDict()
MgO_param_dist['mc_sampling'] = OrderedDict()
MgO_param_dist['mc_sampling']['seed'] = 0
MgO_param_dist['mc_sampling']['n_iterations'] = 10
n_iterations = MgO_param_dist['mc_sampling']['n_iterations']
n_samples_per_iteration = 100
for i in range(n_iterations):
MgO_param_dist['mc_sampling'][i] = OrderedDict()
MgO_param_dist['mc_sampling'][i]['type'] = 'kde'
MgO_param_dist['mc_sampling'][i]['n_samples'] = n_samples_per_iteration
MgO_param_dist['mc_sampling'][0]['type'] = 'parametric'
#----
#MgO_param_dist['mc_sampling'][0]['type'] = 'kde'
#MgO_param_dist['kde_samples_file'][0] = 'culled_009_part_1.dat'
#<----------------- determine parameters
MgO_param_dist['parameters'] = OrderedDict()
#<----------------- free parameters
# For uniform distributions,
# a = is the low of the rnage,
# b = is the high of the
#MgO_param_dist['parameters']['chrg_Mg'] = ['uniform',{'a':+1.5, 'b':+2.5}]
MgO_param_dist['parameters']['chrg_O'] = ['equals','-chrg_Mg']
MgO_param_dist['parameters']['MgMg_A'] = ['equals',0.000]
MgO_param_dist['parameters']['MgMg_rho'] = ['equals',0.500]
MgO_param_dist['parameters']['MgMg_C'] = ['equals',0.000]
#MgO_param_dist['parameters']['MgO_A'] = ['uniform',{'a':800.00,'b':1300.00}]
#MgO_param_dist['parameters']['MgO_rho'] = ['uniform',{'a':0.2900,'b':0.3300}]
MgO_param_dist['parameters']['MgO_C'] = ['equals',0.000]
#MgO_param_dist['parameters']['OO_A'] = ['uniform',{'a':500.00,'b':25000.00}]
#MgO_param_dist['parameters']['OO_rho'] = ['uniform',{'a':0.1000,'b':0.4000}]
#MgO_param_dist['parameters']['OO_C'] = ['uniform',{'a':25.00, 'b':77.00}]
#<----------------- constrained parameters
#<----------------- parameter constriants
MgO_parameter_constraints = OrderedDict()
MgO_parameter_constraints['chrgMg_gt_0'] = ['chrg_Mg > 0']
MgO_parameter_constraints['chrgO_lt_0'] = ['chrg_O < 0']
MgO_parameter_constraints['MgMg_A_gt_0'] = ['MgMg_A > 0']
MgO_parameter_constraints['MgMg_rho_gt_0'] = ['MgMg_rho > 0']
MgO_parameter_constraints['MgMg_C_gt_0'] = ['MgMg_C > 0']
MgO_parameter_constraints['MgO_A_gt_0'] = ['MgO_A > 0']
MgO_parameter_constraints['MgO_rho_gt_0'] = ['MgO_rho > 0']
MgO_parameter_constraints['MgO_C_gt_0'] = ['MgO_C > 0']
MgO_parameter_constraints['OO_A_gt_0'] = ['OO_A > 0']
MgO_parameter_constraints['OO_rho_gt_0'] = ['OO_rho > 0']
MgO_parameter_constraints['OO_C_gt_0'] = ['OO_C > 0']
#<----------------- qoi performance constraints
MgO_qoi_constraints = OrderedDict()
# define performance constraints as 20 of the qoi target value
for qoi_name, qoi_info in MgO_qoi_db.qois.items():
MgO_qoi_constraints[qoi_name] = qoi_info['target'] * 0.20
# print out qoi performance constraints
print(80*'-')
print('{:^80}'.format('QOI PERFORMANCE CONSTRAINTS'))
print(80*'-')
for qoi_name, value in MgO_qoi_constraints.items():
print('{:>20} {:>10}'.format(qoi_name,value))
MgO_structures = OrderedDict()
MgO_structures['structure_directory'] = 'test__PyposmatMonteCarloSampler'
MgO_structures['structures'] = OrderedDict()
MgO_structures['structures']['MgO_NaCl'] = 'MgO_NaCl_unit.gga.relax.vasp'
MgO_configuration = PyposmatConfigurationFile()
MgO_configuration.qois = MgO_qoi_db.qois
MgO_configuration.potential = MgO_potential
MgO_configuration.structures = MgO_structures
MgO_configuration.parameter_distribution_definitions = MgO_param_dist
assert isinstance(MgO_configuration.configuration,OrderedDict)
MgO_configuration.write(filename='pypospack.config.in')
MgO_configuration.read(filename='pypospack.config.in')
# <---------------- end make configuration file
filename_in='pypospack.config.in'
filename_out='pypospack.results.out'
engine = PyposmatMonteCarloSampler(
filename_in=filename_in,
filename_out=filename_out)
# <---------------- printout for debugging purposes
print('base_directory:{}'.format(engine.base_directory))
print('input_filename:{}'.format(engine.pyposmat_filename_in))
print('output_filename:{}'.format(engine.pyposmat_filename_out))
# <---------------- the steps of engine.configure() tested individually
# this is the step which configures the object from the
# configuration file
# engine.configure()
engine.create_base_directories()
engine.read_configuration_file()
engine.configure_qoi_manager()
engine.configure_task_manager()
n_iterations = engine.configuration.parameter_distribution_definitions['mc_sampling']['n_iterations']
n_samples = engine.configuration.parameter_distribution_definitions['mc_sampling'][0]['n_samples']
param_dist_def = engine.configuration.parameter_distribution_definitions['parameters']
parameter_names = [p for p in param_dist_def]
free_parameter_names = [k for k,v in param_dist_def.items() if v[0] != 'equals']
for p in param_dist_def:
if p in free_parameter_names:
str_free = 'free'
print('{:^10} {:^10} {:^10} {:^10} {:^10}'.format(
p,
str_free,
param_dist_def[p][0],
param_dist_def[p][1]['a'],
param_dist_def[p][1]['b']))
else:
str_free = 'not_free'
print('{:^10} {:^10}'.format(p,str_free))
import scipy.stats
_rv_generators = OrderedDict()
for p in free_parameter_names:
if param_dist_def[p][0] == 'uniform':
a = param_dist_def[p][1]['a']
b = param_dist_def[p][1]['b']
_loc = a
_scale = b-a
_rv_generators[p] = scipy.stats.uniform(loc=_loc,scale=_scale)
# eugene added this broken code
elif param_dist_def[p][0] == 'kde':
#sub selection of pandas param dataframe on free parameter names
free_params = datas[:,self._kde_free_param_indx]
_kde_kernel = scipy.stats.gaussian_kde(free_params.transpose())
else:
pass
for i_sample in range(n_samples):
# generate parameter set
_parameters = OrderedDict([(p,None) for p in parameter_names])
if param_dist_def[p][0] == 'uniform':
for p in free_parameter_names:
# _rv_generators is listscypi.stats.uniform
_parameters[p] = _rv_generators[p].rvs(size=1)[0]
# EUGENE ADDED THIS AND IT'S PROBABLY BROKEN.
elif param_dist_def[p][0] == 'kde':
_free_parameters = _kde_kernel.resample(size=1)
for i,pn in enumerate(free_parameter_names):
param_dict[pn] = _free_parameters[i,0]
else:
raise ValueError("unkown parameter distribution type")
# fill in param_dict for constrained values
_constrained_parameter_names = [
p for p in _parameters if p not in free_parameter_names]
for p in _constrained_parameter_names:
_str_eval = str(param_dist_def[p][1])
for fp in free_parameter_names:
if fp in _str_eval:
_str_eval = _str_eval.replace(fp,str(_parameters[fp]))
_parameters[p] = eval(_str_eval)
#_parameters = MgO.MgO_LewisCatlow['parameters']
_results = engine.evaluate_parameter_set(parameters=_parameters)
_strout = str(i_sample) + ","\
+ ",".join([str(v) for k,v in _results['parameters'].items()]) + ","\
+ ",".join([str(v) for k,v in _results['qois'].items()]) + ","\
+ ",".join([str(v) for k,v in _results['errors'].items()])
#print(_strout)
print(i_sample)
print(_results)
print(_results['parameters'])
print(_results['qois'])
print(_results['errors'])
print(_results['parameters']['MgMg_A'])
| [
"[email protected]"
] | |
6e86f87f488cd7eabf25543983656750a69ab9a7 | 3e9bf87895b31e42c25f5d52bc15cd64aaad2fca | /Landscapes/Landscapes/wsgi.py | 55bd19a563b63bb6d6e7554f5ded084c49dfb346 | [] | no_license | kevinpav/Django | e5bd39d3c7c856fbecabff92d5655d73683e5180 | 38f53fe679b584084bfb7e355271059d6d335edd | refs/heads/master | 2021-01-01T06:01:58.448599 | 2017-07-27T23:20:21 | 2017-07-27T23:20:21 | 97,335,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | """
WSGI config for Landscapes project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Landscapes.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
95cc420ac962966131d93517456112712e8d5895 | c7603730fe2e0615cb8af85360f4270c6e519dcd | /eu-structural-funds/common/processors/MT/mt_malta_scraper.py | 08fdc146452c6d2de38c6d173f333a4873a0187f | [
"MIT"
] | permissive | transpresupuestaria/os-data-importers | b58266d03274901bf6104dc10ab725fa97a22d18 | 929e07aefc98ae4788e75c682d4c3adc014bf6ce | refs/heads/master | 2022-07-02T16:21:34.023556 | 2020-05-18T18:48:08 | 2020-05-18T18:48:08 | 112,221,613 | 0 | 0 | MIT | 2018-08-07T00:26:10 | 2017-11-27T16:40:20 | Python | UTF-8 | Python | false | false | 4,034 | py | """A scraper for Malta 2007-2013."""
from datapackage_pipelines.wrapper import spew, ingest
from logging import info, debug
from lxml.html import fromstring
from requests import Session
BASE_URL = 'https://investinginyourfuture.gov.mt'
PAGINATION_URL = BASE_URL + '/ajax/loadProjects.ashx?page={counter}'
PROJECT_URLS_XPATH = './/div[@class="project-listing-item-title"]/a'
FIELD_XPATHS = {
'Code': './/span[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectRefCode"]',
'Title': './/span[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectTitle"]',
'Project Cost': ".//*[@id='mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectCostBeneficiaryItem_divCostValue']",
'Beneficiary': './/span[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectCostBeneficiaryItem_divBeneficiaryValue"]',
'Line Ministry': './/td[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_tdLineMinistry"]',
'Start Date': './/td[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_tdStartDate"]',
'End Date': './/td[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_tdEndDate"]',
'Non Technical Short Summary Of Project': ".//*[@id='mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_divNonTechnicalShortSummaryContent']/p",
'Operational Programme': './/td[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_tdOperationalProgramme"]',
'Fund': './/td[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_tdFund"]',
'Operational Objective': './/td[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_tdOperationalObjective"]/p',
'Priority Axis': './/td[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_tdPriorityAxis"]',
'Focus Area Of Intervention': './/td[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_tdFocusAreaOfIntervention1"]',
'Project Objectives': './/div[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_divProjectObjectives"]/p',
'Project Results': './/div[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_divProjectResults"]/p',
'Project Purpose': './/div[@id="mainPlaceHolder_coreContentPlaceHolder_mainContentPlaceHolder_projectDetails_divProjectPurpose"]/p',
}
session = Session()
def scrape_project(url):
"""Return project data as a generator of tuples."""
response = session.get(url)
doc = fromstring(response.content)
def get_text(html_node):
if html_node is not None:
return html_node.text
for key, xpath in FIELD_XPATHS.items():
node = doc.find(xpath)
value = get_text(node)
debug('Extracted %s = %s', key, value)
yield key, value
def scrape_projects(paths):
"""Return generator of project dictionaries."""
for path in paths:
url = BASE_URL + path
project_row = dict(scrape_project(url))
info('Scraped %s', project_row)
yield project_row
def get_project_urls():
"""Return the complete list of project URLS."""
counter = 0
paths = []
while True:
counter += 1
project = PAGINATION_URL.format(counter=counter)
response = session.get(project)
if response.text:
doc = fromstring(response.content)
more_links = doc.findall(PROJECT_URLS_XPATH)
more_paths = list(map(lambda x: x.get('href'), more_links))
paths.extend(more_paths)
info('Collected %s urls on page %s', len(more_paths), counter)
else:
return paths
if __name__ == '__main__':
_, datapackage, _ = ingest()
project_paths = get_project_urls()
project_rows = scrape_projects(project_paths)
spew(datapackage, [project_rows])
| [
"[email protected]"
] | |
0d49d73ccb4f93db186fffd39c53b3d8f1cccc1b | 1670dca534ef4fd7e8d9ca9e6d55b5885e4071f9 | /CodeChef/CodeChef55.py | eb3b8f431333456c3d272f468c5b583c2b9a8353 | [] | no_license | Tejas1510/Pythonary | 24512a6c5abfee17457397aa37849f3a5a739002 | 55c11f74d9f540bf696acecaa78febecd14d8422 | refs/heads/master | 2022-11-23T23:27:32.219513 | 2020-08-02T17:22:17 | 2020-08-02T17:22:17 | 264,151,076 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | """"""""""""""""""""""""""""""""""""""
Name of Question:Chef and his Student
Link of Question:https://www.codechef.com/problems/CHEFSTUD
"""""""""""""""""""""""""""""""""""""""
t=int(input())
for i in range(t):
s=input()
a=list(s)
for i in range(len(a)):
if(a[i]=="<"):
a[i]=">"
elif(a[i]==">"):
a[i]="<"
s="".join(a)
print(s.count("><"))
| [
"[email protected]"
] | |
5c1cb0eab885f6d3fcd93a1780c6fe08a79db9cc | 0594725345fc65cfd3e5b60beffcda9ce4ee2a2c | /mainapp/apps.py | f6834ebaeb5f0e0b1ee9b71b86f2bc3e4859478a | [] | no_license | disenQF/StoreOA | 667fadd06f218c295a0774cb8f868aadbe6314d8 | 01c90b28b3f6a9bf94effb606760e89f63f02cdf | refs/heads/master | 2020-07-11T18:35:25.494730 | 2019-08-30T08:35:11 | 2019-08-30T08:35:11 | 204,616,179 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 120 | py | from django.apps import AppConfig
class MainappConfig(AppConfig):
app_label = '主要模块'
name = 'mainapp'
| [
"[email protected]"
] | |
027a5979fa94e310120de50128b49b537fb1fa40 | bec60c149e879666de11bd1bcf47ab0dc2225d49 | /RF_MicroPython/main.py | ac8c2e72620466f6e070dbab064d15a2621d6230 | [] | no_license | KipCrossing/OpenEM | 7fee5f3d98bb931209999a8dca41295c1412308e | 0572d3697b1c8299c29e31840e6ec1f9e08c172c | refs/heads/master | 2021-07-17T02:58:15.385369 | 2020-07-02T12:16:39 | 2020-07-02T12:16:39 | 186,344,444 | 0 | 0 | null | 2019-09-06T02:25:56 | 2019-05-13T04:19:59 | Python | UTF-8 | Python | false | false | 5,823 | py | import sht31
import machine
import pyb
import array
import math
import utime
from pyb import Pin, Timer
from ad9833 import AD9833
from pyb import Pin
from pyb import SPI
from specialmath import SpecialMath
print("(Main program started)")
blueled = pyb.LED(4)
# Wave gen
ss = Pin('Y5', Pin.OUT_PP)
spi = SPI(2, SPI.MASTER, baudrate=9600, polarity=1, phase=0, firstbit=SPI.MSB)
wave = AD9833(spi, ss)
# Bluetooth
blue_uart = pyb.UART(6, 9600)
blue_uart.init(9600, bits=8, stop=1, parity=None)
# Temp sensor
SCLpin = 'Y9'
SDApin = 'Y10'
i2c = machine.I2C(sda=machine.Pin(SDApin), scl=machine.Pin(SCLpin), freq=400000)
sht31sensor = sht31.SHT31(i2c)
# Initial variables
spw = 10 # Samples per wave
WAVES = 1000 # Number of waves to take an average from
freq = 16000 # Frequency in Hz
# send wave
wave.set_freq(freq)
wave.set_type(0)
wave.send()
wait = True
while wait:
print('Blue Out:')
if b'BTM-U' == blue_uart.read():
print("Start")
wait = False
pyb.delay(1000)
# pyb.repl_uart(blue_uart)
blue_uart.write("Warming up!")
blue_uart.write("Started")
utime.sleep(2)
wave.set_freq(freq)
wave.set_type(0)
wave.send()
# Timers for ADC's
adc1 = pyb.ADC(pyb.Pin.board.Y11) # create an ADC on pin X11
adc2 = pyb.ADC(pyb.Pin.board.X4) # create an ADC on pin X4
adc_voltage = pyb.ADC(pyb.Pin.board.Y12)
voltage = (adc_voltage.read()/4096)*14.12
adcall = pyb.ADCAll(12, 0x70000) # 12 bit resolution, internal channels
coretemp = adcall.read_core_temp()
# tim = pyb.Timer(8, freq=200000) # Create timer
# buf1 = bytearray(WAVES*spw) # create a buffer
# buf2 = bytearray(WAVES*spw) # create a buffe
# # read analog values into buffers at 100Hz (takes one second)
# pyb.ADC.read_timed_multi((adc1, adc2), (buf1, buf2), tim)
sm = SpecialMath()
(sm.hp_amp, sm.hp_sft) = (0, 0)
# Output File
outfile = open('out.csv', 'w')
outfile.write("i0,i1,i2,i3,i4,i5,i6,i7,i8,i9,\n")
outfile.close()
def record(f):
tim = pyb.Timer(8, freq=f*spw) # Create timer
buf1 = bytearray(WAVES*spw) # create a buffer
buf2 = bytearray(WAVES*spw) # create a buffe
# read analog values into buffers at 100Hz (takes one second)
pyb.ADC.read_timed_multi((adc1, adc2), (buf1, buf2), tim)
listc = []
for i in range(spw):
listc.append(0)
count = 0
for n in range(len(buf1)):
if count > spw-1:
count = 0
listc[count] += buf1[n]
count += 1
listd = []
for i in range(spw):
listd.append(0)
count = 0
for n in range(len(buf2)):
if count > spw-1:
count = 0
listd[count] += buf2[n]
count += 1
# (a,s) = sm.fit_sin(listd,10)
(a1, s1) = sm.fit_sin(listc, 3)
# print("-")
data_mean = sm.mean(listd)
for d in range(0, len(listd)):
listd[d] -= data_mean
# total wave - Hp to get Hs
# sm.hp = sm.gen_sin(10, sm.hp_amp, s1 + sm.hp_sft)
listout = listd # [x - y for x, y in zip(listd, sm.hp)]
# print(listout)
outtext = ''
for d in listout:
outtext += str(d)+','
outfile = open('out.csv', 'a')
outfile.write(outtext+"\n")
outfile.close()
(a2, s2) = sm.fit_sin(listout, 3)
# print(listout)
# print('Hp - Amp: %f Sft: %f' % (a1,s1))
# print('Hs - Amp: %f Sft: %f' % (a2,s2))
# print(s2-s1)
if s2-s1 < 0:
return(a1, a2, s2-s1 + spw)
else:
return(a1, a2, s2-s1)
'''
outfile = open('RF_calibrate.csv', 'w')
outfile.write("Freq,Amp,Shift\n")
mul = 10
for i in range(900, 2000):
freq = i*mul
wave.set_freq(freq)
wave.send()
pyb.delay(50)
ampl = []
sftl = []
for j in range(4):
(or_amp, amp, sft) = record(freq)
ampl.append(amp)
sftl.append(sft)
output = "{},{},{}".format(wave.freq, int(sm.mean(ampl)), round(sm.mean(sftl), 3))
outfile.write(output+"\n")
blue_uart.write(output)
print(output)
blueled.toggle()
outfile.close()
'''
# Output File
outfile = open('OpenEM_data.csv', 'w')
outfile.write("ID,Amp,Shift,Shift_out,Voltage,Temp,Humidity,CoreTemp,Hs,Hp\n")
outfile.close()
count = 0
callibrate = []
Hp_prev = 0
calivbate = True
c_amp = 0
c_sft = 0
amp_roll = []
sft_roll = []
while True:
print("------------------------------" + str(freq))
blueled.toggle()
(or_amp, amp, sft) = record(freq)
sht31_t, sht31_h = sht31sensor.get_temp_humi()
coretemp = adcall.read_core_temp()
voltage = (adc_voltage.read()/4096)*14.12
sm.hp_sft = 9.54 - 0.25
if sft - sm.hp_sft < 0:
sft_out = sft - sm.hp_sft + spw
else:
sft_out = sft - sm.hp_sft
Hs = amp*math.sin(math.pi*2*sft_out/spw)
Hp = amp*math.cos(math.pi*2*sft_out/spw)
amp_roll.append(amp)
sft_roll.append(sft)
if len(amp_roll) > 4:
amp_roll.pop(0)
sft_roll.pop(0)
out_string = "%s, %s, %s, %s, %s, %s, %s, %s, %s, %s\n" % (count,
amp,
sft,
sft_out,
voltage,
sht31_t,
sht31_h,
coretemp,
Hs,
Hp)
print(out_string)
outfile = open('OpenEM_data.csv', 'a')
outfile.write(out_string)
outfile.close()
blue_uart.write('%s, %s, %s' % (
count,
int(sm.mean(amp_roll)),
sm.mean(sft_roll)))
count += 1
| [
"[email protected]"
] | |
34767046d1b574b160cf38d2d476cabea85b10fa | 03a79c4bef915a566f597d75d0d4a5bacc44c16e | /blog/posts/utils.py | 6472726e6b46f32a26b82a86c24f8a8a488e7891 | [] | no_license | TarekCsePust/Blog-Apps-with-Django-Rest-Framework-Postgresql | d2bb77d4427b2dc791fc6761487d83b8821d8550 | 750a4918825100e2e3fd761844fa8b235bef687a | refs/heads/master | 2020-04-02T04:38:27.263459 | 2019-01-24T07:08:20 | 2019-01-24T07:08:20 | 154,026,717 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 716 | py | import datetime
import re
import math
from django.utils.html import strip_tags
def count_words(html_string):
# html_string = """
# <h1>This is a title</h1>
# """
word_string = strip_tags(html_string)
matching_words = re.findall(r'\w+', word_string)
count = len(matching_words) #joincfe.com/projects/
return count
def get_read_time(html_string):
count = count_words(html_string)
read_time_min = math.ceil(count/200.0) #assuming 200wpm reading
print("min: ",read_time_min)
# read_time_sec = read_time_min * 60
# read_time = str(datetime.timedelta(seconds=read_time_sec))
# read_time = str(datetime.timedelta(minutes=read_time_min))
return int(read_time_min) | [
"[email protected]"
] | |
7771c441c900edf84030b5fa1d84a1b0c3051375 | b110fdc592315daeeec7b0ce48535dfada995d68 | /highlander/api/controllers/v1/validation.py | 395e8d223f8bf47ba0ac5963159629aa0f9ee73f | [
"Apache-2.0"
] | permissive | StephenTao/stephen | 1ee5c77b2b4c96d6118911cc8a4458cb94735851 | 06da7cbc93b40fcd089eeed2972adc1fe6bd3cb9 | refs/heads/master | 2021-01-10T15:46:40.109013 | 2016-02-25T06:52:57 | 2016-02-25T06:52:57 | 52,503,137 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,264 | py | # Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pecan
from pecan import rest
from highlander import exceptions as exc
from highlander.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class SpecValidationController(rest.RestController):
def __init__(self, parser):
super(SpecValidationController, self).__init__()
self._parse_func = parser
@pecan.expose('json')
def post(self):
"""Validate a spec."""
definition = pecan.request.text
try:
self._parse_func(definition)
except exc.DSLParsingException as e:
return {'valid': False, 'error': e.message}
return {'valid': True}
| [
"[email protected]"
] | |
29dd727b2f5a7952aa89561ac5cc127cab893549 | 81313cbd75bbd29cd48632d3cfc6b84884884650 | /HistFitterNtuples/MakeArtificialDataTree.py | 8988845de4059ffdce93eee5bd0f807f082a3fe8 | [] | no_license | UPenn-SUSY/PennSUSYFrame | ee93fd299e4e36ebc74e0065db0740451309682a | 41303b163dbc05451b22c19b00b436cc25440cf6 | refs/heads/master | 2019-01-19T10:28:47.417027 | 2015-05-08T15:07:24 | 2015-05-08T15:07:24 | 13,934,522 | 2 | 0 | null | 2015-05-08T15:07:24 | 2013-10-28T18:23:35 | C++ | UTF-8 | Python | false | false | 4,295 | py | import itertools
import ROOT
import array
import datetime
# ------------------------------------------------------------------------------
process_list = {'ttbar':1, 'ZGamma':1}
flavor_list = ['ee', 'mm', 'em']
region_list = ['cr_top', 'cr_z', 'sr']
hist_name = 'mbl_0'
hist_bins, hist_min, hist_max = 20, 0, 2000
lumi = 21.e3
rand = ROOT.TRandom3(datetime.datetime.now().microsecond)
# ------------------------------------------------------------------------------
def fillArtificialDataTree(in_file):
# create output file and tree
file_name_tag = '.'.join(['_'.join([k,str(v)]) for k, v in process_list.items()])
out_file_name = '.'.join(['ArtificialData', file_name_tag, 'root'])
out_file = ROOT.TFile(out_file_name, 'RECREATE')
# out_tree = ROOT.TTree('ArtificialData', 'ArtificialData')
out_tree = ROOT.TTree('data', 'data')
# create variables for branches
mbl_0 = array.array('d', [0])
is_ee = array.array('i', [0])
is_mm = array.array('i', [0])
is_em = array.array('i', [0])
is_sr = array.array('i', [0])
is_cr_top = array.array('i', [0])
is_cr_z = array.array('i', [0])
# connect branches
out_tree.Branch('mbl_0' , mbl_0 , 'mbl_0/D')
out_tree.Branch('is_ee' , is_ee , 'is_ee/I')
out_tree.Branch('is_mm' , is_mm , 'is_mm/I')
out_tree.Branch('is_em' , is_em , 'is_em/I')
out_tree.Branch('is_sr' , is_sr , 'is_sr/I')
out_tree.Branch('is_cr_top' , is_cr_top , 'is_cr_top/I')
out_tree.Branch('is_cr_z' , is_cr_z , 'is_cr_z/I')
# loop through processes and flavors
for pl, fl in itertools.product(process_list.keys(), flavor_list):
# get tree for this process and flavor channel
tree_name = '_'.join([fl, pl, 'NoSys'])
print 'process: ', pl, ' - flavor: ', fl, ' - tree: ', tree_name
t = in_file.Get(tree_name)
is_ee[0] = 1 if fl == 'ee' else 0
is_mm[0] = 1 if fl == 'mm' else 0
is_em[0] = 1 if fl == 'em' else 0
# loop through regions
for rl in region_list:
is_sr[0] = 1 if rl == 'sr' else 0
is_cr_top[0] = 1 if rl == 'cr_top' else 0
is_cr_z[0] = 1 if rl == 'cr_z' else 0
print 'is_sr[0] : ' , is_sr[0]
print 'is_cr_top[0]: ' , is_cr_top[0]
print 'is_cr_z[0] : ' , is_cr_z[0]
# create and fill histogram
this_hist_name = '_'.join([tree_name, rl, hist_name])
print ' region: ', rl, ' - hist name: ', this_hist_name
region_hist = ROOT.TH1F(this_hist_name,
'',
hist_bins,
hist_min,
hist_max)
t.Draw(' >> '.join([hist_name, this_hist_name]),
''.join([str(lumi),
'*weight*is_',
rl,
'*',
str(process_list[pl])]))
print ' integral: ', region_hist.Integral()
print ''
# find bin centers and content
bin_centers = [region_hist.GetBinCenter(this_bin) for this_bin in
xrange(hist_bins + 2)]
bin_content = [region_hist.GetBinContent(this_bin) for this_bin in
xrange(hist_bins + 2)]
print bin_centers
print bin_content
print sum(bin_content)
print ''
for center, content in itertools.izip(bin_centers, bin_content):
mbl_0[0] = center
print center, ' - ', content
num_events = rand.Poisson(content)
print ' bin center: ', center, ' - exp content: ', content, ' - gen content: ', num_events
# for i in xrange(int(content)):
for i in xrange(num_events):
# print ' - filling entry ', i
out_tree.Fill()
print ''
# write and close file
out_file.Write()
out_file.Close()
if __name__ == '__main__':
# file to extract samples
bkg_file = ROOT.TFile('BackgroundHistFitterTrees.root', 'r')
fillArtificialDataTree(bkg_file)
| [
"[email protected]"
] | |
2b0cce52d9dac0de24d82954a5fc72a01db37e85 | bec2947aadb26bb3a5ecd102bd6270f30836ae9b | /backend/manage.py | 37888a98e44b390e4f50a898587b8c60fa490b1c | [] | no_license | crowdbotics-apps/plate-28244 | 0f7dabf2f9bf3c5f78a15dc69a4ab00f7ad1f408 | af968be757c3e41245fe271c38153cff2b1b0590 | refs/heads/master | 2023-06-10T07:30:04.372083 | 2021-06-26T13:28:56 | 2021-06-26T13:28:56 | 380,508,320 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 631 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'plate_28244.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
1810565238029931f0f8d33d7f786dce3eb2940b | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-1/a5c70dc6725c422fcccd37ea07e8655b6ecbc833-<main>-fix.py | 540ca1ec66339ceb0b9db883443a3a94f3ba9b5e | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,325 | py |
def main():
argument_spec = openstack_full_argument_spec(name=dict(required=True), password=dict(required=False, default=None, no_log=True), email=dict(required=False, default=None), default_project=dict(required=False, default=None), domain=dict(required=False, default=None), enabled=dict(default=True, type='bool'), state=dict(default='present', choices=['absent', 'present']), update_password=dict(default='always', choices=['always', 'on_create']))
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(argument_spec, **module_kwargs)
if (not HAS_SHADE):
module.fail_json(msg='shade is required for this module')
name = module.params['name']
password = module.params.pop('password')
email = module.params['email']
default_project = module.params['default_project']
domain = module.params['domain']
enabled = module.params['enabled']
state = module.params['state']
update_password = module.params['update_password']
try:
cloud = shade.openstack_cloud(**module.params)
user = cloud.get_user(name)
domain_id = None
if domain:
opcloud = shade.operator_cloud(**module.params)
domain_id = _get_domain_id(opcloud, domain)
if (state == 'present'):
if (update_password in ('always', 'on_create')):
if (not password):
msg = ('update_password is %s but a password value is missing' % update_password)
module.fail_json(msg=msg)
default_project_id = None
if default_project:
default_project_id = _get_default_project_id(cloud, default_project, module)
if (user is None):
user = cloud.create_user(name=name, password=password, email=email, default_project=default_project_id, domain_id=domain_id, enabled=enabled)
changed = True
else:
params_dict = {
'email': email,
'enabled': enabled,
'password': password,
'update_password': update_password,
}
if (domain_id is not None):
params_dict['domain_id'] = domain_id
if (default_project_id is not None):
params_dict['default_project_id'] = default_project_id
if _needs_update(params_dict, user):
if (update_password == 'always'):
user = cloud.update_user(user['id'], password=password, email=email, default_project=default_project_id, domain_id=domain_id, enabled=enabled)
else:
user = cloud.update_user(user['id'], email=email, default_project=default_project_id, domain_id=domain_id, enabled=enabled)
changed = True
else:
changed = False
module.exit_json(changed=changed, user=user)
elif (state == 'absent'):
if (user is None):
changed = False
else:
cloud.delete_user(user['id'])
changed = True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e), extra_data=e.extra_data)
| [
"[email protected]"
] | |
6504bc2fece9e2db5ffca7ae1fc4cb9dcc612d74 | 7d90d2ce27c6ee0af74391b09909edbd45fdc2f0 | /renix_py_api/api_gen/OfpMeterTableConfig_Autogen.py | d6dbee9cc6d4bfb5fc23919add3b4a32597c6f8d | [] | no_license | gaoxingyu-hub/54testframework-master-e284 | d7ea0d4a715b65c8652430e963a86b9522a7237a | 57dd2197e7d91b8ad8fb2bd0e3503f10afa08544 | refs/heads/master | 2023-04-30T05:50:41.542402 | 2021-05-28T09:19:37 | 2021-05-28T09:19:37 | 309,922,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,840 | py | """
Auto-generated File
Create Time: 2019-12-27 02:33:27
"""
from .ROMEnum_Autogen import *
from renix_py_api.renix_common_api import *
from renix_py_api import rom_manager
from .OfpGlobalConfig_Autogen import OfpGlobalConfig
@rom_manager.rom
class OfpMeterTableConfig(OfpGlobalConfig):
def __init__(self, ID=None, BandUnit=None, EnableBurstSize=None, EnableStatistics=None, **kwargs):
self._ID = ID # ID
self._BandUnit = BandUnit # Band Unit
self._EnableBurstSize = EnableBurstSize # Enable Burst Size
self._EnableStatistics = EnableStatistics # Enable Statistics
properties = kwargs.copy()
if ID is not None:
properties['ID'] = ID
if BandUnit is not None:
properties['BandUnit'] = BandUnit
if EnableBurstSize is not None:
properties['EnableBurstSize'] = EnableBurstSize
if EnableStatistics is not None:
properties['EnableStatistics'] = EnableStatistics
# call base class function, and it will send message to renix server to create a class.
super(OfpMeterTableConfig, self).__init__(**properties)
def delete(self):
"""
call to delete itself
"""
return self._finalize()
def edit(self, ID=None, BandUnit=None, EnableBurstSize=None, EnableStatistics=None, **kwargs):
properties = kwargs.copy()
if ID is not None:
self._ID = ID
properties['ID'] = ID
if BandUnit is not None:
self._BandUnit = BandUnit
properties['BandUnit'] = BandUnit
if EnableBurstSize is not None:
self._EnableBurstSize = EnableBurstSize
properties['EnableBurstSize'] = EnableBurstSize
if EnableStatistics is not None:
self._EnableStatistics = EnableStatistics
properties['EnableStatistics'] = EnableStatistics
super(OfpMeterTableConfig, self).edit(**properties)
@property
def ID(self):
"""
get the value of property _ID
"""
if self.force_auto_sync:
self.get('ID')
return self._ID
@property
def BandUnit(self):
"""
get the value of property _BandUnit
"""
if self.force_auto_sync:
self.get('BandUnit')
return self._BandUnit
@property
def EnableBurstSize(self):
"""
get the value of property _EnableBurstSize
"""
if self.force_auto_sync:
self.get('EnableBurstSize')
return self._EnableBurstSize
@property
def EnableStatistics(self):
"""
get the value of property _EnableStatistics
"""
if self.force_auto_sync:
self.get('EnableStatistics')
return self._EnableStatistics
@ID.setter
def ID(self, value):
self._ID = value
self.edit(ID=value)
@BandUnit.setter
def BandUnit(self, value):
self._BandUnit = value
self.edit(BandUnit=value)
@EnableBurstSize.setter
def EnableBurstSize(self, value):
self._EnableBurstSize = value
self.edit(EnableBurstSize=value)
@EnableStatistics.setter
def EnableStatistics(self, value):
self._EnableStatistics = value
self.edit(EnableStatistics=value)
def _set_id_with_str(self, value):
try:
self._ID = int(value)
except ValueError:
self._ID = hex(int(value, 16))
def _set_bandunit_with_str(self, value):
seperate = value.find(':')
exec('self._BandUnit = EnumOfpBandUnit.%s' % value[:seperate])
def _set_enableburstsize_with_str(self, value):
self._EnableBurstSize = (value == 'True')
def _set_enablestatistics_with_str(self, value):
self._EnableStatistics = (value == 'True')
| [
"[email protected]"
] | |
e81cea9a4f0cb5a1ef73fcf0a2db186d9a8a2073 | a2362576001e0f9e22dc69c623170e108908c1b4 | /testing_sys/testsys/migrations/0047_auto_20190524_2057.py | e21d7976bb73ac2e309fbf7b49eb37d9c68f8c49 | [] | no_license | mdigbazova/TestSystem | c1a694eb1877567bcc63a2cc3f615469ba4f8fd9 | e5cca7a3aa31f1af4e1f7807895124e36348b9af | refs/heads/master | 2022-12-15T22:20:14.812166 | 2019-06-11T08:14:24 | 2019-06-11T08:14:24 | 183,647,017 | 0 | 1 | null | 2022-11-22T03:50:12 | 2019-04-26T14:53:54 | Python | UTF-8 | Python | false | false | 1,072 | py | # Generated by Django 2.2 on 2019-05-24 17:57
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('testsys', '0046_auto_20190524_2050'),
]
operations = [
migrations.AlterField(
model_name='agent',
name='currentdefinitionsdate',
field=models.DateTimeField(default=datetime.datetime(2019, 5, 24, 17, 57, 33, 27108, tzinfo=utc), null=True, verbose_name='Current Definitions Date'),
),
migrations.AlterField(
model_name='alertsbody',
name='alerttimestamp',
field=models.DateTimeField(default=datetime.datetime(2019, 5, 24, 17, 57, 33, 28106, tzinfo=utc), verbose_name='Alert Timestamp'),
),
migrations.AlterField(
model_name='alertsbody',
name='createdat',
field=models.DateTimeField(default=datetime.datetime(2019, 5, 24, 17, 57, 33, 28106, tzinfo=utc), verbose_name='Creation Date'),
),
]
| [
"[email protected]"
] | |
36d556974768695b7e1e8d9f902557a81d9650f3 | 731c3f2f85f6002725322eedc0b2c8b5e74f610e | /sale_discount_total/reports/invoice_report.py | 2a6626455f4295c09de5b56c9dd0dd2afffc9203 | [] | no_license | babarlhr/project-0021 | 1ac824657f893c8f25d6eb3b839051f350d7cc9d | e30b8a9f5d2147d3ca5b56b69ec5dbd22f712a91 | refs/heads/master | 2021-09-22T15:45:47.431000 | 2018-09-11T14:59:49 | 2018-09-11T14:59:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | from openerp import fields, models
class AccountInvoiceReport(models.Model):
_inherit = 'account.invoice.report'
discount = fields.Float('Discount', readonly=True)
def _select(self):
res = super(AccountInvoiceReport,self)._select()
select_str = res + """, sub.discount AS discount """
return select_str
def _sub_select(self):
res = super(AccountInvoiceReport,self)._sub_select()
select_str = res + """,SUM(CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN - ((ail.quantity / u.factor * u2.factor) * ail.price_unit * (ail.discount) / 100.0)
ELSE ((ail.quantity / u.factor * u2.factor) * ail.price_unit * (ail.discount) / 100.0) END) as discount"""
return select_str | [
"[email protected]"
] | |
acfc8e328100a02bf944650a202675138090aec8 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=3.0_rd=0.65_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=32/sched.py | 3f94907f550578e3ad9bd176c4a0232307ccaf22 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 315 | py | -X FMLP -Q 0 -L 2 100 400
-X FMLP -Q 0 -L 2 70 250
-X FMLP -Q 0 -L 2 64 200
-X FMLP -Q 1 -L 1 53 175
-X FMLP -Q 1 -L 1 47 150
-X FMLP -Q 1 -L 1 42 200
-X FMLP -Q 2 -L 1 41 200
-X FMLP -Q 2 -L 1 40 200
-X FMLP -Q 3 -L 1 32 175
-X FMLP -Q 3 -L 1 25 100
22 150
21 200
16 200
12 150
9 125
| [
"[email protected]"
] | |
bb05c6d8f5cdb8e988bbb9b22fd2ca62a282ec17 | d2ec5cdf0c94ae429476b802f4ae133fc74d35c2 | /documents/management/commands/fixdocuments_remove_phantoms.py | cf875ee035bf0558687471075ab5f9eb28a2222f | [
"MIT"
] | permissive | Barolina/doc-versions | eb4e6f0ce087d7027dc1bbd0b5b53a7779efab8e | ae536892f6245206abb7145592cf61408bc1161c | refs/heads/master | 2021-01-12T10:27:25.218122 | 2013-02-23T18:34:55 | 2013-02-23T18:34:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,015 | py | # -*- encoding: utf-8 -*-
from django.core.management.base import NoArgsCommand
from django.db.models import get_models, F
from documents.models import Document
from documents.management.commands.documentscheck import \
info, warning, set_options
def fix_model(model):
mn = model.__name__
info('fixing model : ' + mn)
c = model.objects.filter(document_start__gte=F('document_end')).count()
if c:
model.objects.filter(document_start__gte=F('document_end')).delete()
warning(mn + ': %d phantom document(s) removed' % c)
else:
info(mn + ': no phantom documents found')
def fix(out, err, **options):
set_options(out, err, **options)
for m in get_models():
if issubclass(m, Document):
fix_model(m)
class Command(NoArgsCommand):
help = 'Remove all records with document_start >= document_end ' \
'on all Document subclasses'
def handle_noargs(self, **options):
fix(self.stdout, self.stderr, **options)
| [
"[email protected]"
] | |
5b84b46750948531241467dbff1f604ee2a07454 | 9cebe39a7ed1bb813b2aebe1ae923821f3c08394 | /ndb/util.py | f5d250fd689812eb345c4479d626001a9c10ae0a | [] | no_license | argeweb/core | 1f6a53092544bc7b7c972d4aa505d5d6ef8f3b50 | bf78434714cdb5242b9b3b345666482b27d73528 | refs/heads/master | 2020-12-25T13:33:24.689677 | 2018-04-18T00:29:35 | 2018-04-18T00:29:35 | 67,552,917 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,701 | py | """
Utilities for working with both db and ndb models
"""
from google.appengine.ext import db, ndb
def list(Model, *args, **kwargs):
"""
Returns a query object for a db or ndb Model
"""
if issubclass(Model, db.Model):
return Model.all()
else:
return Model.query()
def decode_key(str, kind=None):
"""
Makes a ndb Key object from the given data
and optionally a kind. Kind is only needed if
the str is an id.
"""
if isinstance(str, ndb.Key):
return str
str = str.lstrip(':')
try:
id = long(str)
return ndb.Key(kind, id)
except ValueError:
return ndb.Key(urlsafe=str)
def encode_key(ins):
"""
Gets the urlsafe of a key for either a db or ndb instance
"""
try:
return new_key(ins).urlsafe()
except AttributeError:
return new_key(ins.key).urlsafe()
def new_key(ins_or_key):
"""
Makes a ndb.Key from ndb or db instances or keys
"""
if isinstance(ins_or_key, ndb.Key):
return ins_or_key
elif isinstance(ins_or_key, db.Model):
return ndb.Key.from_old_key(ins_or_key.key())
elif isinstance(ins_or_key, db.Key):
return ndb.Key.from_old_key(ins_or_key)
elif isinstance(ins_or_key, ndb.Model):
return ins_or_key.key
return None
def old_key(ins_or_key):
"""
Makes a db.Key from ndb or db instances or keys
"""
if isinstance(ins_or_key, ndb.Model):
return ins_or_key.key.to_old_key()
elif isinstance(ins_or_key, ndb.Key):
return ins_or_key.to_old_key()
elif isinstance(ins_or_key, db.Model):
return ins_or_key.key()
else:
return ins_or_key
| [
"[email protected]"
] | |
5d189a253e3bc1ba72529d977c88c26e1a0f2eae | 623c915efdad396b9d40d0c46c9aed532839a383 | /sudoku/grid_values.py | 43a32971354cf454262ebe30e036f90496992ef3 | [] | no_license | KeithYJohnson/aind | f997aa20da2878b76a2950bed1452a826bcb11b5 | d70ca4fbf5a38e2aaddedfc1fb01b212c008309b | refs/heads/master | 2021-01-21T19:57:53.828896 | 2017-06-16T23:13:35 | 2017-06-16T23:13:35 | 92,176,831 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,532 | py | # from utils import *
boxes = ['A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'A7', 'A8', 'A9',
'B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8', 'B9',
'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9',
'D1', 'D2', 'D3', 'D4', 'D5', 'D6', 'D7', 'D8', 'D9',
'E1', 'E2', 'E3', 'E4', 'E5', 'E6', 'E7', 'E8', 'E9',
'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9',
'G1', 'G2', 'G3', 'G4', 'G5', 'G6', 'G7', 'G8', 'G9',
'H1', 'H2', 'H3', 'H4', 'H5', 'H6', 'H7', 'H8', 'H9',
'I1', 'I2', 'I3', 'I4', 'I5', 'I6', 'I7', 'I8', 'I9']
def grid_values(grid_string, boxes):
"""Convert grid string into {<box>: <value>} dict with '123456789' value for empties.
Args:
grid: Sudoku grid in string form, 81 characters long
Returns:
Sudoku grid in dictionary form:
- keys: Box labels, e.g. 'A1'
- values: Value in corresponding box, e.g. '8', or '123456789' if it is empty.
"""
grid_dict = {}
for idx, char in enumerate(grid_string):
if char == '.':
grid_dict[boxes[idx]] = '123456789'
else:
grid_dict[boxes[idx]] = char
return grid_dict
# Credit to the course provider
def slicker_implementation(grid_string, boxes):
assert len(grid) == 81, "Input grid must be a string of length 81 (9x9)"
return dict(zip(boxes, grid))
if __name__ == '__main__':
string_grid = '..3.2.6..9..3.5..1..18.64....81.29..7.......8..67.82....26.95..8..2.3..9..5.1.3..'
print(grid_values(string_grid, boxes))
| [
"[email protected]"
] | |
ae8352c390609e6e0fd1f97b581fdc749145f99b | d92c34d44d025ae7619bb3ec0e974647d86d715c | /02_gpio/gpio.py | abdb7cbed45345dc18faed606459a2751aea0340 | [] | no_license | braingram/bbb_pru_tests | 317ca0f0867f94cc27e00d7036f510cbe5affa16 | c19374251e4f628ed0fe78a88d7ce40057e78e41 | refs/heads/master | 2021-01-19T10:59:10.083272 | 2015-03-14T21:48:53 | 2015-03-14T21:48:53 | 31,833,847 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,216 | py | ''' gpio.py
blink a led for a certain number of times'''
import struct
import mmap
import pypruss
# count, duration
PRU_ICSS = 0x4A300000
PRU_ICSS_LEN = 512*1024
SHAREDRAM_START = 0x00012000
count_value = 4
#duration_value = 1000 * 1000 * 100 # 500 ms
duration_value = 1000 * 1000 * 10 # 50 ms
print("Count : %s" % count_value)
print("Duration: %s" % duration_value)
with open("/dev/mem", "r+b") as f:
ddr_mem = mmap.mmap(f.fileno(), PRU_ICSS_LEN, offset=PRU_ICSS)
ddr_mem[SHAREDRAM_START:SHAREDRAM_START+4] = struct.pack('L', count_value)
ddr_mem[SHAREDRAM_START+4:SHAREDRAM_START+8] = struct.pack('L', duration_value)
pypruss.modprobe() # This only has to be called once pr boot
pypruss.init() # Init the PRU
pypruss.open(0) # Open PRU event 0 which is PRU0_ARM_INTERRUPT
pypruss.pruintc_init() # Init the interrupt controller
pypruss.exec_program(0, "./gpio.bin") # Load firmware "blinkled.bin" on PRU 0
pypruss.wait_for_event(0) # Wait for event 0 which is connected to PRU0_ARM_INTERRUPT
pypruss.clear_event(0) # Clear the event
pypruss.pru_disable(0) # Disable PRU 0, this is already done by the firmware
pypruss.exit() # Exit, don't know what this does.
| [
"root@beaglebone.(none)"
] | root@beaglebone.(none) |
4196ac2dc9cfce344ae991d7e8f49bd052ce3e5e | 6c5ce1e621e0bd140d127527bf13be2093f4a016 | /ex075/venv/Scripts/pip3.7-script.py | e452b3b38862c357a618f44dd9740312f44bd5ab | [
"MIT"
] | permissive | ArthurAlesi/Python-Exercicios-CursoEmVideo | 124e2ee82c3476a5a49baafed657788591a232c1 | ed0f0086ddbc0092df9d16ec2d8fdbabcb480cdd | refs/heads/master | 2022-12-31T13:21:30.001538 | 2020-09-24T02:09:23 | 2020-09-24T02:09:23 | 268,917,509 | 0 | 0 | null | null | null | null | ISO-8859-2 | Python | false | false | 467 | py | #!C:\Users\User\Documents\github-MeusRepositórios\Python-Exercicios-CursoEmVideo\ex075\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.7'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')()
)
| [
"[email protected]"
] | |
58a810eb3bf799250724d7139f7bafde4a61ba14 | 3e35f5ab6e600d5c215eeecab8857ebebadf6ac4 | /my_app/models.py | 81d11b663f5810bb7dd6bd5dd09f301d0fc75288 | [] | no_license | jpisano99/my_app_template_r3 | c14135d81b7f66a8b72305f16111d247b09dee49 | dbdd9616c9cd86451e93a211a174a40dff31b3df | refs/heads/master | 2023-02-22T07:06:34.852386 | 2022-07-25T17:51:05 | 2022-07-25T17:51:05 | 226,744,777 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,379 | py | from my_app import db
class Test_Table(db.Model):
__tablename__ = 'test_table'
# Use this to specify a default schema/db for this table
# __table_args__ = {'schema': 'dev'}
# Us this to specify a different bind/sql server for this table
# __bind_key__ = 'dev'
id = db.Column(db.Integer(), primary_key=True)
first_name = db.Column(db.String(40))
last_name = db.Column(db.String(40))
qty_on_hand = db.Column(db.Integer)
cost = db.Column(db.Float)
date_added = db.Column(db.DateTime)
password_hash = db.Column(db.String(128))
@staticmethod
def newest():
return Test_Table.query.all()
def newest_name(num):
return Test_Table.query.order_by(Test_Table.first_name).limit(num)
def __repr__(self):
return "<name {}: '{} , {}'>".format(self.id, self.pss_name,self.tsa_name)
# class Bookings(db.Model):
# __tablename__ = 'bookings'
#
# erp_end_customer_name = db.Column(db.String(100))
# total_bookings = db.Column(db.Float)
# product_id = db.Column(db.String(25))
# date_added = db.Column(db.DateTime)
# hash_value = db.Column(db.String(50), primary_key=True)
# class Customers(db.Model):
# __tablename__ = 'customers'
#
# id = db.Column(db.Integer(), primary_key=True)
# last_name = db.Column(db.String(45))
# first_name = db.Column(db.String(45))
| [
"[email protected]"
] | |
5d3af36631918afa519eae61c95e01e084b19684 | 1e84a9fec36deaf9a55a2734749ea035f72ac869 | /KAKAO BLIND RECRUITMENT/2017/3차/압축/main.py | 636e20aa1a11fd3166c11bef8a77b1a406c6023d | [] | no_license | mgh3326/programmers_algorithm | aa3afc91231550e1fec2d72d90e85b140f79d677 | b62f08ccccbdcac71e484d508985a5a9ce5f2434 | refs/heads/master | 2022-08-31T04:19:15.728666 | 2022-07-31T14:02:26 | 2022-07-31T14:02:26 | 201,747,526 | 0 | 0 | null | 2022-07-23T10:19:13 | 2019-08-11T10:02:15 | Python | UTF-8 | Python | false | false | 1,425 | py | def solution(msg):
answer = []
my_dict = {}
dict_index = 1
for i in ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U",
"V", "W", "X", "Y", "Z"]:
my_dict[i] = dict_index
dict_index += 1
temp_idx = 2
for idx, my_msg in enumerate(msg):
if temp_idx > 2:
temp_idx -= 1
continue
temp_idx = 1
while True:
if idx + temp_idx > len(msg):
answer.append(out_idx)
break
find_msg = msg[idx:idx + temp_idx]
if find_msg in my_dict:
temp_idx += 1
out_idx = my_dict[find_msg]
continue
else:
answer.append(out_idx)
my_dict[find_msg] = dict_index
dict_index += 1
break
return answer
msg_list = [
"K",
"KAKAO",
"TOBEORNOTTOBEORTOBEORNOT",
"ABABABABABABABAB"
]
return_list = [
[11],
[11, 1, 27, 15],
[20, 15, 2, 5, 15, 18, 14, 15, 20, 27, 29, 31, 36, 30, 32, 34],
[1, 2, 27, 29, 28, 31, 30]
]
for _input_data in zip(msg_list, return_list):
_0 = _input_data[0]
_r = _input_data[-1]
print(msg_list.index(_0))
result = solution(_0)
print(result)
print(_r)
if result == _r:
print("맞음")
else:
print("틀림")
| [
"[email protected]"
] | |
1d4c47022930b5b454743b7015afc67a9b6eab89 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2697/60747/258513.py | e919dcb1081dd6fbc54e37ba84292fa5f160b216 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 547 | py | s=input()
s=s[1:len(s)-1].split(",")
root=int(s[0])
a=0
i=1
for j in range(len(s)):
if s[j]!="null":
s[j]=int(s[j])
while i<len(s)/2-1:
if s[s.index(root)*2+1]=='null' or root>int(s[s.index(root)*2+1]):
if root=="null"or root<int(s[s.index(root)*2+2]) :
root = s[i]
else :
print("false")
a=-1
break
else:
print("false")
a=-1
break
i+=1
if a!=-1:
print("true") | [
"[email protected]"
] | |
4f9a33939b547bc8b3418b780f4286fc835f7124 | de4e5524afba7331a6b00e0141bdf70e8d36e491 | /measure_mergers.py | 8b09b1cc59b89fc0ca2f86329575dbd9a57239c4 | [] | no_license | RaymondSimons/kinematic_mocks | 6a127f47386d82a396e95c0249554d65c87c6ec7 | 9fc966fd5a64dec0aa76e85b5de27ba2140899ec | refs/heads/master | 2020-12-25T15:17:51.995519 | 2018-02-16T01:27:47 | 2018-02-16T01:27:47 | 66,017,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,963 | py | import astropy
import pyfits
import glob
from glob import glob
import astrodendro
from astropy.convolution import Gaussian1DKernel, Gaussian2DKernel, convolve_fft
import photutils
from photutils import detect_sources
from photutils import *
from mpl_toolkits.axes_grid.anchored_artists import AnchoredText
from joblib import Parallel, delayed
from astropy.io import fits
from numpy import *
import matplotlib.pyplot as plt
import os, sys, argparse
import random
from matplotlib.pyplot import *
plt.ioff()
def write_fits(fits_name, mom_data, merger_tag, x_stars_box , y_stars_box , z_stars_box, vx_stars_box , vy_stars_box , vz_stars_box):
print '\tGenerating fits for %s'%fits_name
master_hdulist = []
master_hdulist.append(mom_data['PRIMARY'])
colhdr = fits.Header()
master_hdulist.append(mom_data['nir_mstar_cat'])
master_hdulist.append(mom_data['nir_net_momentum'])
master_hdulist.append(mom_data['nir_net_momentum_s'])
master_hdulist.append(mom_data['stars_id'])
master_hdulist.append(fits.ImageHDU(data = np.stack((x_stars_box , y_stars_box , z_stars_box,)), header = colhdr, name = 'stars_xyz_box_position'))
master_hdulist.append(fits.ImageHDU(data = np.stack((vx_stars_box , vy_stars_box , vz_stars_box)), header = colhdr, name = 'stars_xyz_box_velocity'))
master_hdulist.append(mom_data['star_mass'])
master_hdulist.append(mom_data['star_age'])
master_hdulist.append(fits.ImageHDU(data = merger_tag, header = colhdr, name = 'star_merger_tag'))
print '\tSaving to ' + fits_name
thdulist = fits.HDUList(master_hdulist)
thdulist.writeto(fits_name, clobber = True)
return master_hdulist
def make_heatmap(ax, epsilon, zz_gas, min_z, max_z, weights = None, good = None, xlabel = 'z height (kpc)', ylabel = 'j$_z$/j$_{circ}$', bins_n = 200, eps_min = 2, eps_max = 2, segm = None, srt_labels = None, do_plot = True):
if weights == None:
weights = np.ones(len(zz_gas))
if good:
epsilon = epsilon[good]
zz_gas = zz_gas[good]
weights = weights[good]
heatmap, xedges, yedges = np.histogram2d(epsilon, zz_gas, bins=[linspace(eps_min,eps_max,bins_n), linspace(min_z,max_z,bins_n)], weights = weights)
sorted_heatmap = argsort(heatmap.ravel())
vmn = 10.
vmx_scale = 0.998
vmx = heatmap.ravel()[sorted_heatmap[int(vmx_scale*len(sorted_heatmap))]]
heatmap = np.ma.masked_where((heatmap < 10), heatmap)
heatmap.data[heatmap.data < 10.] = nan
#heatmap.data[segm > 1] = 0
if srt_labels!=None:
#for lbl in srt_labels[1:len(srt_labels)]:
# heatmap.data[segm == lbl] = 0
heatmap.data[segm!=srt_labels[0]] = 0
if do_plot:
ax.imshow(heatmap, interpolation = 'nearest', norm = mpl.colors.LogNorm(vmin = vmn, vmax = vmx), origin = 'lower', cmap = 'viridis')
kern = Gaussian2DKernel(1.)
kern.normalize()
heatmap_conv = convolve_fft(heatmap, kern)
heatmap_conv = np.ma.masked_where((heatmap_conv < 10), heatmap_conv)
heatmap_conv.data[heatmap_conv.data < 10.] = nan
X = arange(heatmap.data.shape[0])
Y = arange(heatmap.data.shape[1])
Z = log10(heatmap.data)
ax.contour(X, Y, Z, 8, colors = 'grey')
ax.set_yticks([0,bins_n/4,bins_n/2,3*bins_n/4,bins_n-1])
ax.set_xticks([0,bins_n/2,bins_n-1])
ax.set_xticklabels([format(yedges[0],'.0f'),format(yedges[bins_n/2],'.0f'),format(yedges[bins_n-1],'.0f')])
ax.set_yticklabels([''])
ax.set_yticklabels([format(xedges[0],'.0f'),format(xedges[bins_n/4],'.0f'), format(xedges[bins_n/2],'.0f'),format(xedges[3*bins_n/4.],'.0f'),format(xedges[bins_n-1],'.0f')])
#ax.set_xticklabels([''])
ax.set_xlabel(xlabel, fontsize = 15)
ax.set_ylabel(ylabel, fontsize = 20)
ax.minorticks_on()
ax.tick_params(axis="both", which='major', color='black', labelcolor='black',size=5, width=1.5)
ax.tick_params(axis="both", which='minor', color='black', labelcolor='black',size=3, width=1.5)
return ax, heatmap
else:
return heatmap
def add_at(ax, t, loc=2):
fp = dict(size=10)
_at = AnchoredText(t, loc=loc, prop=fp)
ax.add_artist(_at)
return _at
def weighted_avg_and_std(values, weights):
"""
Return the weighted average and standard deviation.
values, weights -- Numpy ndarrays with the same shape.
"""
values = values[-isnan(weights)]
weights = weights[-isnan(weights)]
average = np.average(values, weights=weights)
variance = np.average((values-average)**2, weights=weights) # Fast and numerically precise
return (average, math.sqrt(variance))
def find_thresh(mn, mx, npix, heatmap):
nlabels = 0.
segm_labels_prev = 0
mr_prev2 = -99
mr_prev = -99
kern = Gaussian2DKernel(0.2, x_size = 4*10, y_size = 4*10)
kern.normalize()
a = zeros(kern.array.shape)
a[kern.array.shape[1]/2.,kern.array.shape[1]/2.] = 1
kern_2 = Gaussian1DKernel(8)
a[:,kern.array.shape[1]/2.] = convolve_fft(a[:,kern.array.shape[1]/2.], kern_2)
a/=sum(a)
b = convolve_fft(a, kern)
b/=sum(b)
temp_heatmap = convolve_fft(heatmap.data, b)
temp_heatmap[temp_heatmap <= 0] = nan
for tt, t in enumerate(linspace(mn, mx, 1000)):
threshold = t
segm = detect_sources(log10(temp_heatmap), threshold = threshold, npixels = npix)
masses = array([sum(temp_heatmap[segm.array == lbl]) for lbl in arange(1, segm.nlabels+1)])
srt_masses = masses[argsort(masses)[::-1]]
if len(masses) > 1:
mass_ratio = srt_masses[0]/srt_masses[1]
if mr_prev == -99:
mr_prev = mass_ratio
thresh = threshold
if (log10(srt_masses[0]) > 7.5) & (log10(srt_masses[1]) > 7.5) & \
(mr_prev/mass_ratio > 10) & (mass_ratio < 100) & (nansum(srt_masses) > 0.50*nansum(temp_heatmap)):
thresh = threshold
mr_prev = mass_ratio
if len(masses) > 2:
mass_ratio2 = srt_masses[0]/srt_masses[2]
if mr_prev2 == -99:
mr_prev2 = mass_ratio2
thresh = threshold
if (log10(srt_masses[0]) > 7.5) & (log10(srt_masses[1]) > 7.5) & (mr_prev2/mass_ratio2 > 10) & (mass_ratio2 < 300) & (nansum(srt_masses) > 0.50*nansum(temp_heatmap)):
thresh = threshold
mr_prev2 = mass_ratio2
segm_labels_prev = segm.nlabels
return thresh, temp_heatmap
#This file will be used to store the profile of the momentum
def parse():
'''
Parse command line arguments
'''
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='''\
Generate the cameras to use in Sunrise and make projection plots
of the data for some of these cameras. Then export the data within
the fov to a FITS file in a format that Sunrise understands.
''')
parser.add_argument('gal', nargs='?', default=None, help='Galaxy to be analyzed')
#parser.add_argument('-s', '--snap_base', default='10MpcBox_csf512_',
# help='Base of the snapshots file names.')
#parser.add_argument('-d', '--distance', default=100000, type=float,
# help='Distance between cameras and the center of the galaxy (in [kpc]).')
#parser.add_argument('--no_export',action='store_true',
# help='Do not export data to fits for Sunrise.')
args = vars(parser.parse_args())
return args
#gals = ['VELA01', 'VELA06', 'VELA07', 'VELA11']
#gals = ['VELA20','VELA21']
#gals = ['VELA24', 'VELA27', 'VELA28']
#gals = ['VELA29', 'VELA33', 'VELA34']
#gals = ['VELA34']
#gals = ['VELA01', 'VELA06', 'VELA07', 'VELA11', 'VELA15', 'VELA17', 'VELA20', \
# 'VELA21', 'VELA24', 'VELA27', 'VELA28', 'VELA29', 'VELA33', 'VELA34']
#gals = ['VELA21']
#gals = ['VELA01', 'VELA07', 'VELA11']
#gals = ['VELA01']
#scales = arange(200, 550, 10)
#scales = arange(390, 550, 300)
def run_measure_merger(gal, scale, make_cat = True, do_plot = False):
eps_min = -2
eps_max = 2
rr_min = 0.
rr_max = 70
zz_min = -10
zz_max = 10
bins_n = 200
rec_cat = np.loadtxt('/nobackupp2/rcsimons/catalogs/recenter_%s.cat'%gal, skiprows = 12)
if make_cat:
m_cat = open('/nobackupp2/rcsimons/mergers/catalogs/individual/%s_%i.cat'%(gal,scale), 'w+')
print gal, '\t', scale
rec_c = rec_cat[(1000.*rec_cat[:,0]).astype('int') == scale]
if len(rec_c) > 0:
rec_c = rec_c[0]
max_nmergers = 15
masses_arr = zeros(max_nmergers)*nan
radii_arr = zeros(max_nmergers)*nan
jz_arr = zeros(max_nmergers)*nan
radii_std_arr = zeros(max_nmergers)*nan
jz_std_arr = zeros(max_nmergers)*nan
mn_box_pos = zeros((max_nmergers,3))*nan
mn_box_vel = zeros((max_nmergers,3))*nan
young_mn = nan
random.seed(1)
mom_fl = glob('/nobackupp2/rcsimons/momentum_measurements/%s/*%s*momentum.fits'%(gal, scale))
rec_fl = glob('/nobackupp2/rcsimons/recenter/%s_%s.fits'%(gal, scale))
if len(mom_fl) > 0:
mom_data = fits.open(mom_fl[0])
rec_data = fits.open(rec_fl[0])
epsilon_stars = mom_data['STARS_EPSILON'].data
rr_stars = mom_data['STARS_CYLINDRICAL_POSITION'].data[0]
zz_stars = mom_data['STARS_CYLINDRICAL_POSITION'].data[1]
r_stars = sqrt(sum(mom_data['STARS_XYZ_POSITION'].data**2., axis = 0))
epsilon_stars_digitized = np.digitize(epsilon_stars, bins = linspace(eps_min, eps_max, bins_n))
r_stars_digitized = np.digitize(r_stars, bins = linspace(rr_min, rr_max, bins_n))
empt_arr = np.empty((bins_n-1,bins_n-1), dtype = object)
for i in arange(bins_n-1):
good_r_stars = where(r_stars_digitized == i)[0]
r_stars_digitized_new = r_stars_digitized[good_r_stars]
epsilon_stars_digitized_new = epsilon_stars_digitized[good_r_stars]
for j in arange(bins_n-1):
good_eps_stars = good_r_stars[where(epsilon_stars_digitized_new == j)[0]]
empt_arr[i,j] = good_eps_stars
x_stars_box = rec_data['STARS_XYZ_POSITION_BOX'].data[0]
y_stars_box = rec_data['STARS_XYZ_POSITION_BOX'].data[1]
z_stars_box = rec_data['STARS_XYZ_POSITION_BOX'].data[2]
vx_stars_box = rec_data['STARS_XYZ_VELOCITY_BOX'].data[0]
vy_stars_box = rec_data['STARS_XYZ_VELOCITY_BOX'].data[1]
vz_stars_box = rec_data['STARS_XYZ_VELOCITY_BOX'].data[2]
star_age = mom_data['STAR_AGE'].data
star_mass= mom_data['STAR_MASS'].data
if do_plot:
plt.close('all')
fig = plt.figure(1, figsize = (25, 5))
clf()
ax1 = fig.add_subplot(151)
ax2 = fig.add_subplot(152)
ax3 = fig.add_subplot(153)
ax4 = fig.add_subplot(154)
ax5 = fig.add_subplot(155)
ax1.set_ylabel(r'$\frac{j_z}{j_{circ}}$', fontsize = 30, rotation = 0, labelpad = 20)
ax5.set_ylabel(r'$\frac{j_z}{j_{circ}}$', fontsize = 30, rotation = 0, labelpad = 20)
rand_arr = np.random.randint(0, len(r_stars), size = 40000)
ax1.scatter(r_stars[rand_arr], epsilon_stars[rand_arr], marker = 'o', s = star_mass[rand_arr]*1.e-3)
ax1.set_xlim(rr_min, rr_max)
ax1.set_ylim(eps_min, eps_max)
ax1.minorticks_on()
ax1.tick_params(axis="both", which='major', color='black', labelcolor='black',size=5, width=1.5)
ax1.tick_params(axis="both", which='minor', color='black', labelcolor='black',size=3, width=1.5)
ax2, heatmap = make_heatmap(ax2, epsilon_stars, r_stars, min_z = rr_min, max_z = rr_max, weights = star_mass,
good = None, xlabel = '', ylabel = '', bins_n = bins_n, eps_min = eps_min, eps_max = eps_max)
add_at(ax2, "stars", loc=1)
else:
heatmap = make_heatmap(None, epsilon_stars, r_stars, min_z = rr_min, max_z = rr_max, weights = star_mass,
good = None, xlabel = '', ylabel = '', bins_n = bins_n,
eps_min = eps_min, eps_max = eps_max, do_plot = do_plot)
npix = 20
#find_thresh
mn = 4
mx = 8
thresh, temp_heatmap = find_thresh(mn, mx, npix, heatmap)
segm = detect_sources(log10(temp_heatmap), threshold = thresh, npixels = npix)
m = segm.array
masked_m = np.ma.masked_where(m == 0, m)
masses = array([sum(temp_heatmap[segm.array == lbl]) for lbl in arange(1, segm.nlabels+1)])
st = argsort(masses)[::-1]
srt_masses = masses[st]
if sum(srt_masses)/nansum(heatmap.data) < 0.6:
mn = 4
mx = 6.5
thresh, temp_heatmap = find_thresh(mn, mx, npix, heatmap)
segm = detect_sources(log10(temp_heatmap), threshold = thresh, npixels = npix)
m = segm.array
masked_m = np.ma.masked_where(m == 0, m)
if do_plot:
pl = ax3.imshow(masked_m, cmap = 'Set1', origin = 'lower', interpolation = 'nearest', vmin = 0., vmax = 8)
ax3.set_xticklabels(ax2.get_xticklabels())
ax3.set_yticklabels(ax2.get_yticklabels())
ax3.set_xticks(ax2.get_xticks())
ax3.set_yticks(ax2.get_yticks())
ax1.set_xticks([0,35, 70])
ax1.set_yticks([-2, -1, 0, 1, 2])
ax3.minorticks_on()
ax3.tick_params(axis="both", which='major', color='black', labelcolor='black',size=5, width=1.5)
ax3.tick_params(axis="both", which='minor', color='black', labelcolor='black',size=3, width=1.5)
radii = array([weighted_avg_and_std(values = where(segm.array == lbl)[1], weights = temp_heatmap[segm.array == lbl])[0] for lbl in arange(1, segm.nlabels+1)])
jz = array([weighted_avg_and_std(values = where(segm.array == lbl)[0], weights = temp_heatmap[segm.array == lbl])[0] for lbl in arange(1, segm.nlabels+1)])
radii_std = array([weighted_avg_and_std(values = where(segm.array == lbl)[1], weights = temp_heatmap[segm.array == lbl])[1] for lbl in arange(1, segm.nlabels+1)])
jz_std = array([weighted_avg_and_std(values = where(segm.array == lbl)[0], weights = temp_heatmap[segm.array == lbl])[1] for lbl in arange(1, segm.nlabels+1)])
masses = array([sum(temp_heatmap[segm.array == lbl]) for lbl in arange(1, segm.nlabels+1)])
st = argsort(masses)[::-1]
srt_masses = masses[st]
srt_radii = radii[st]
srt_radii_std = radii_std[st]
srt_labels = segm.labels[st]
srt_jz = jz[st]
srt_jz_std = jz_std[st]
contours = segm.outline_segments()
masked_contours = np.ma.masked_where(contours == 0, contours)
#plot the correct stars
merger_tag = np.empty(len(r_stars))
for i in arange(200-1):
for j in arange(200-1):
for lll in srt_labels:
if masked_m[i,j] == lll:
id_list = empt_arr[j,i] #somehow this is swapped, very confused
if (id_list != None) & (len(id_list) > 0):
merger_tag[id_list] = lll
rand_arr = np.random.randint(0, len(id_list), size = min(len(id_list), 1))
id_list = id_list[rand_arr]
if do_plot:
ax5.plot(r_stars[id_list], epsilon_stars[id_list], 'k.')
fits_name = '/nobackupp2/rcsimons/mergers/fits/'+gal+'_a0.'+str(scale)+'_starsmergers.fits'
master_hdulist = write_fits(fits_name, mom_data, merger_tag, x_stars_box , y_stars_box , z_stars_box, vx_stars_box , vy_stars_box , vz_stars_box)
mn_box_pos[0:len(masses),0] = array([weighted_avg_and_std(values = x_stars_box[merger_tag == lbl], weights = star_mass[merger_tag == lbl])[0] for lbl in arange(1, segm.nlabels+1)])
mn_box_pos[0:len(masses),1] = array([weighted_avg_and_std(values = y_stars_box[merger_tag == lbl], weights = star_mass[merger_tag == lbl])[0] for lbl in arange(1, segm.nlabels+1)])
mn_box_pos[0:len(masses),2] = array([weighted_avg_and_std(values = z_stars_box[merger_tag == lbl], weights = star_mass[merger_tag == lbl])[0] for lbl in arange(1, segm.nlabels+1)])
mn_box_vel[0:len(masses),0] = array([weighted_avg_and_std(values = vx_stars_box[merger_tag == lbl], weights = star_mass[merger_tag == lbl])[0] for lbl in arange(1, segm.nlabels+1)])
mn_box_vel[0:len(masses),1] = array([weighted_avg_and_std(values = vy_stars_box[merger_tag == lbl], weights = star_mass[merger_tag == lbl])[0] for lbl in arange(1, segm.nlabels+1)])
mn_box_vel[0:len(masses),2] = array([weighted_avg_and_std(values = vz_stars_box[merger_tag == lbl], weights = star_mass[merger_tag == lbl])[0] for lbl in arange(1, segm.nlabels+1)])
if do_plot:
ax5.set_xlim(rr_min, rr_max)
ax5.set_ylim(eps_min, eps_max)
ax5.minorticks_on()
ax5.tick_params(axis="both", which='major', color='black', labelcolor='black',size=5, width=1.5)
ax5.tick_params(axis="both", which='minor', color='black', labelcolor='black',size=3, width=1.5)
ax5.set_xticks([0,35, 70])
ax5.set_yticks([-2, -1, 0, 1, 2])
#ax2.imshow(masked_contours, cmap = 'Set1', origin = 'lower', vmin = 0., vmax = 8)
ax3.annotate(r"%2s%5s%2s%.1f"%('M$_{sum}$','/M$_{tot}$','=',sum(srt_masses)/nansum(heatmap.data)), (107, 55), color = 'black', fontweight = 'bold')
if len(masses) > 1:
mass_ratio = srt_masses[0]/srt_masses[1]
ax3.annotate("%4s%6s%5s"%('m1','',''), (110, 40), color = cm.Set1(srt_labels[0]/8.), fontweight = 'bold')
ax3.annotate("%4s%6s%5s"%('','/m2',''), (110, 40), color = cm.Set1(srt_labels[1]/8.), fontweight = 'bold')
ax3.annotate("%4s%6s%5s%.1f"%('','','=',mass_ratio), (110, 40), color = 'black', fontweight = 'bold')
ax3.errorbar(srt_radii[0], srt_jz[0], xerr = srt_radii_std[0], yerr = srt_jz_std[0], fmt = 'o', color = 'black')
ax3.errorbar(srt_radii[1], srt_jz[1], xerr = srt_radii_std[1], yerr = srt_jz_std[1], fmt = 'o', color = 'black')
if len(masses) > 2:
mass_ratio = srt_masses[0]/srt_masses[2]
ax3.annotate("%4s%6s%5s"%('m1','',''), (110, 25), color = cm.Set1(srt_labels[0]/8.), fontweight = 'bold')
ax3.annotate("%4s%6s%5s"%('','/m3',''), (110, 25), color = cm.Set1(srt_labels[2]/8.), fontweight = 'bold')
ax3.annotate("%4s%6s%5s%.1f"%('','','=',mass_ratio), (110, 25), color = 'black', fontweight = 'bold')
ax3.errorbar(srt_radii[2], srt_jz[2], xerr = srt_radii_std[2], yerr = srt_jz_std[2], fmt = 'o', color = 'black')
if len(masses) > 3:
mass_ratio = srt_masses[0]/srt_masses[3]
ax3.annotate("%4s%6s%5s"%('m1','',''), (110, 10), color = cm.Set1(srt_labels[0]/8.), fontweight = 'bold')
ax3.annotate("%4s%6s%5s"%('','/m4',''), (110, 10), color = cm.Set1(srt_labels[3]/8.), fontweight = 'bold')
ax3.annotate("%4s%6s%5s%.1f"%('','','=',mass_ratio), (110, 10), color = 'black', fontweight = 'bold')
ax3.errorbar(srt_radii[3], srt_jz[3], xerr = srt_radii_std[3], yerr = srt_jz_std[3], fmt = 'o', color = 'black')
masses_arr[0:len(masses)] = srt_masses
radii_arr[0:len(masses)] = srt_radii*(rr_max - rr_min)/temp_heatmap.shape[1] +rr_min
jz_arr[0:len(masses)] = srt_jz*(eps_max - eps_min)/temp_heatmap.shape[1] +eps_min
radii_std_arr[0:len(masses)] = srt_radii_std*(rr_max - rr_min)/temp_heatmap.shape[1]
jz_std_arr[0:len(masses)] = srt_jz_std*(eps_max - eps_min)/temp_heatmap.shape[1]
#m = segm.array
#m_new = convolve_fft(m, kern).astype('int')
#ax4 = fig.add_subplot(144)
#masked_mmew = np.ma.masked_where(m_new == 0, m_new)
#ax4.imshow(masked_mmew, cmap = 'Set1', origin = 'lower', interpolation = 'nearest')
#ax4.set_xticklabels(ax2.get_xticklabels())
#ax4.set_yticklabels(ax2.get_yticklabels())
#ax4.set_xticks(ax2.get_xticks())
#ax4.set_yticks(ax2.get_yticks())
#ax4.minorticks_on()
#ax4.tick_params(axis="both", which='major', color='black', labelcolor='black',size=5, width=1.5)
#ax4.tick_params(axis="both", which='minor', color='black', labelcolor='black',size=3, width=1.5)
if do_plot:
ax4, heatmap_young = make_heatmap(ax4, epsilon_stars, r_stars, min_z = rr_min, max_z = rr_max, weights = star_mass,
good = where(star_age < 20), xlabel = '', ylabel = '', bins_n = bins_n,
eps_min = eps_min, eps_max = eps_max, segm = segm, srt_labels = srt_labels)
ax4.annotate("young stars (<20 Myr)\nof m1", (80, 170), color = 'blue', fontweight = 'bold')
else:
heatmap_young = make_heatmap(None, epsilon_stars, r_stars, min_z = rr_min, max_z = rr_max, weights = star_mass,
good = where(star_age < 20), xlabel = '', ylabel = '', bins_n = bins_n,
eps_min = eps_min, eps_max = eps_max, segm = segm, srt_labels = srt_labels, do_plot = do_plot)
#for lbl in srt_labels[1:len(srt_labels)]:
# heatmap_young[segm.array == lbl] = 0
#sm = nansum(heatmap_young.data, axis = 1)
#x = (arange(len(sm))-len(sm)/2.)*(eps_max-eps_min)/(1.*len(sm))
#young_mn, young_std = weighted_avg_and_std(values = x, weights = sm)
young_radii, young_radii_std = weighted_avg_and_std(values = where(heatmap_young!= 0)[1], weights = heatmap_young[heatmap_young!= 0])
young_jz, young_jz_std = weighted_avg_and_std(values = where(heatmap_young!= 0)[0], weights = heatmap_young[heatmap_young!= 0])
if do_plot:
ax4.errorbar(young_radii, young_jz, xerr = young_radii_std, yerr = young_jz_std, fmt = 'o', color = 'black')
young_rdi_mn = young_radii*(rr_max - rr_min)/temp_heatmap.shape[1] + rr_min
young_rdi_std = young_radii_std*(rr_max - rr_min)/temp_heatmap.shape[1]
young_jz_mn = young_jz*(eps_max - eps_min)/temp_heatmap.shape[1] + eps_min
young_jz_std = young_jz_std*(eps_max - eps_min)/temp_heatmap.shape[1]
if do_plot:
ax1.set_xlabel(r'radius (kpc)', fontsize = 18, rotation = 0, labelpad = 15)
ax2.set_xlabel(r'radius (kpc)', fontsize = 18, rotation = 0, labelpad = 15)
ax3.set_xlabel(r'radius (kpc)', fontsize = 18, rotation = 0, labelpad = 15)
ax4.set_xlabel(r'radius (kpc)', fontsize = 18, rotation = 0, labelpad = 15)
ax5.set_xlabel(r'radius (kpc)', fontsize = 18, rotation = 0, labelpad = 15)
fig.tight_layout()
savefig('/nobackupp2/rcsimons/mergers/figures/merger_maps/%s_%s.png'%(gal, scale), dpi = 300)
plt.close('all')
if make_cat:
#write young
ngals = len(where(-isnan(masses_arr))[0])
m_cat.write('%.3i\t\t'%scale)
m_cat.write('%i\t\t'%ngals)
m_cat.write('%.2f\t'%young_jz_mn)
m_cat.write('%.2f\t'%young_jz_std)
m_cat.write('%.2f\t'%young_rdi_mn)
m_cat.write('%.2f\t'%young_rdi_std)
#write all
for m, mass in enumerate(masses_arr):
if -isnan(mass):
m_cat.write('%.4f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t'%(mass/(1.e10), radii_arr[m], radii_std_arr[m], jz_arr[m], jz_std_arr[m],
mn_box_pos[m,0], mn_box_pos[m,1], mn_box_pos[m,2],
mn_box_vel[m,0], mn_box_vel[m,1], mn_box_vel[m,2]))
pass
else:
m_cat.write('%5s\t%5s\t%5s\t%5s\t%5s\t%5s\t%5s\t%5s\t%5s\t%5s\t%5s\t'%(mass, radii_arr[m], radii_std_arr[m],jz_arr[m], jz_std_arr[m],
mn_box_pos[m,0], mn_box_pos[m,1], mn_box_pos[m,2],
mn_box_vel[m,0], mn_box_vel[m,1], mn_box_vel[m,2]))
pass
if make_cat: m_cat.write('\n')
if make_cat: m_cat.close()
if __name__ == "__main__":
args = parse()
import yt
if args['gal'] is not None: gal = args['gal']
else: print 'no galaxy entered'
print "Generating Sunrise Input for: ", gal
scales = arange(200, 550, 10)
#scales = arange(450, 550, 10)
#scales = arange(350, 550, 50)
Parallel(n_jobs = -1, backend = 'threading')(delayed(run_measure_merger)(gal, scale) for scale in scales)
m_cat = open('/nobackupp2/rcsimons/mergers/catalogs/%s.cat'%gal, 'w+')
cat_hdrs = ['scale',
'number of central/satellites',
'mean jz/jcirc of young stars in central galaxy-- galaxy coordinates',
'std jz/jcirc of young stars in central galaxy-- galaxy coordinates',
'mean radial location of young stars in central galaxy (kpc)-- galaxy coordinates',
'std radial location of young stars in central galaxy (kpc)-- galaxy coordinates',
'central stellar mass (1.e10 Msun)',
'central mean radial location (kpc)-- galaxy coordinates',
'central std radial location (kpc)-- galaxy coordinates',
'central mean jz/jcirc-- galaxy coordinates',
'central std jz/jcirc-- galaxy coordinates',
'central mean x-position (kpc)-- simulation coordinates',
'central mean y-position (kpc)-- simulation coordinates',
'central mean z-position (kpc)-- simulation coordinates',
'central mean x-velocity (km/s)-- simulation coordinates',
'central mean y-velocity (km/s)-- simulation coordinates',
'central mean z-velocity (km/s)-- simulation coordinates',
'satellite 1 stellar mass (1.e10 Msun)',
'satellite 1 mean radial location (kpc)-- galaxy coordinates',
'satellite 1 std radial location (kpc)-- galaxy coordinates',
'satellite 1 mean jz/jcirc-- galaxy coordinates',
'satellite 1 std jz/jcirc-- galaxy coordinates',
'satellite 1 mean x-position (kpc)-- simulation coordinates',
'satellite 1 mean y-position (kpc)-- simulation coordinates',
'satellite 1 mean z-position (kpc)-- simulation coordinates',
'satellite 1 mean x-velocity (km/s)-- simulation coordinates',
'satellite 1 mean y-velocity (km/s)-- simulation coordinates',
'satellite 1 mean z-velocity (km/s)-- simulation coordinates',
'satellite 1 stellar mass (1.e10 Msun)',
'satellite 2 mean radial location (kpc)-- galaxy coordinates',
'satellite 2 std radial location (kpc)-- galaxy coordinates',
'satellite 2 mean jz/jcirc-- galaxy coordinates',
'satellite 2 std jz/jcirc-- galaxy coordinates',
'satellite 2 mean x-position (kpc)-- simulation coordinates',
'satellite 2 mean y-position (kpc)-- simulation coordinates',
'satellite 2 mean z-position (kpc)-- simulation coordinates',
'satellite 2 mean x-velocity (km/s)-- simulation coordinates',
'satellite 2 mean y-velocity (km/s)-- simulation coordinates',
'satellite 2 mean z-velocity (km/s)-- simulation coordinates',
'etc.']
for i in arange(len(cat_hdrs)):
if i < len(cat_hdrs):
m_cat.write('#(%i) %s\n'%(i, cat_hdrs[i]))
else:
m_cat.write('#(%i:...) %s\n\n\n\n'%(i, cat_hdrs[i]))
m_cat.write('\n\n\n\n')
for s, scale in enumerate(scales):
cat_s = np.loadtxt('/nobackupp2/rcsimons/mergers/catalogs/individual/%s_%i.cat'%(gal,scale), dtype = 'str', delimiter = 'notarealword')
if size(cat_s) > 0: m_cat.write('%s\n'%cat_s)
else: os.system('rm /nobackupp2/rcsimons/mergers/catalogs/individual/%s_%i.cat'%(gal,scale))
m_cat.close()
| [
"[email protected]"
] | |
62d60230c1a889d8e64f09dc716744bb275ea099 | 0b79d66196e9bef7cf81c0c17b6baac025b0d7f1 | /apps/property/inventory/models/trans.py | 3878f3289ac3505812de4ef1c51e3ecffe04347e | [] | no_license | tsevindik/sis-back | bf0244a803ba9432980844ff35498780ac664564 | 4ba942fe38cc150c70898db4daf211213b84a61a | refs/heads/master | 2021-03-24T09:35:49.199712 | 2017-01-25T08:19:37 | 2017-01-25T08:19:37 | 73,540,756 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 640 | py | from django.utils.translation import ugettext_lazy as _
from django.db import models
from utils.models import trans as trans_models
from . import main
class InventoryTypeTrans(trans_models.Translation):
neutral = models.ForeignKey(
main.Inventory
)
name = models.CharField(
max_length=50,
verbose_name=_("İsim")
)
class InventoryTrans(trans_models.Translation):
neutral = models.ForeignKey(
main.Inventory
)
name = models.CharField(
max_length=150,
verbose_name=_("İsim")
)
description = models.TextField(
verbose_name=_("Açıklama")
)
| [
"[email protected]"
] | |
f1e72430ddeb7762b293af65083afe0d2fab8a65 | 21b4585de4a0eacdb0d1e488dfae53684bb6564e | /62. Unique Paths.py | e249ce1880d51a1f8063a5a08d7fbd9ee3cb1af7 | [] | no_license | gaosq0604/LeetCode-in-Python | de8d0cec3ba349d6a6462f66286fb3ddda970bae | 57ec95779a4109008dbd32e325cb407fcbfe5a52 | refs/heads/master | 2021-09-15T23:14:14.565865 | 2018-06-12T16:30:40 | 2018-06-12T16:30:40 | 113,881,474 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | class Solution:
def uniquePaths(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
"""
res=[1]*n
for _ in range(m-1):
for i in range(1,n):
res[i]+=res[i-1]
return res[-1] | [
"[email protected]"
] | |
57486d8950198e14b5fe481c20ed8c146bb9095e | a94089d207f9efc78d6d75736ba443f7b2d5f5b4 | /authsys/migrations/0001_initial.py | 756703c700f78e0a83def8299be51e8e8822e99c | [] | no_license | Larionov0/PyTest | 217526fcd19785d886d74d638173d3fc5f963b26 | a4ab75d4868845890ca2ffc117230a0b346f9c43 | refs/heads/master | 2023-02-18T04:09:16.745759 | 2021-01-15T14:50:26 | 2021-01-15T14:50:26 | 217,780,040 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,869 | py | # Generated by Django 2.2.6 on 2019-10-28 21:43
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('catalog', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Achievement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='name', max_length=100)),
('condition', models.TextField(default='text that describes achievement')),
],
),
migrations.CreateModel(
name='FailedPack',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(default=datetime.datetime(2019, 10, 28, 23, 43, 27, 54674))),
('pack', models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, to='catalog.Pack')),
],
),
migrations.CreateModel(
name='MoneyAchievement',
fields=[
('achievement_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='authsys.Achievement')),
('paisons', models.IntegerField(default=1000000)),
],
bases=('authsys.achievement',),
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('paisons', models.IntegerField(default=0)),
('achievements', models.ManyToManyField(blank=True, to='authsys.Achievement')),
('completed_packs', models.ManyToManyField(blank=True, related_name='completed_users', to='catalog.Pack')),
('failed_packs', models.ManyToManyField(blank=True, related_name='failed_users', to='authsys.FailedPack')),
('user', models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='PacksAchievement',
fields=[
('achievement_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='authsys.Achievement')),
('pack_set', models.ManyToManyField(to='catalog.Pack')),
],
bases=('authsys.achievement',),
),
]
| [
"[email protected]"
] | |
d006d0a8f2aa0dff9f11db31950f1157a03e345e | ad13583673551857615498b9605d9dcab63bb2c3 | /output/instances/nistData/atomic/unsignedByte/Schema+Instance/NISTXML-SV-IV-atomic-unsignedByte-minExclusive-2-2.py | 22a0009905fe1daee2186d1122a0e3f545d55a05 | [
"MIT"
] | permissive | tefra/xsdata-w3c-tests | 397180205a735b06170aa188f1f39451d2089815 | 081d0908382a0e0b29c8ee9caca6f1c0e36dd6db | refs/heads/main | 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 | MIT | 2023-07-25T14:19:04 | 2020-02-10T21:59:47 | Python | UTF-8 | Python | false | false | 302 | py | from output.models.nist_data.atomic.unsigned_byte.schema_instance.nistschema_sv_iv_atomic_unsigned_byte_min_exclusive_2_xsd.nistschema_sv_iv_atomic_unsigned_byte_min_exclusive_2 import NistschemaSvIvAtomicUnsignedByteMinExclusive2
obj = NistschemaSvIvAtomicUnsignedByteMinExclusive2(
value=190
)
| [
"[email protected]"
] | |
99f39851b384d27161ca03df0afa00bc1feff198 | 95124b283d7f67b0a1b9737c921a1c80c3390b56 | /cookbook/migrations/0004_alter_chef_options.py | 5ae4bb611914b8670b26cbf4c4da0f351d5d85b4 | [] | no_license | Saviodiow95/Recipes | ad905605ee9f9c2fce2c2d7e3ed75e1b5dfa79d4 | 0e88968f92dde012c3eee3518367d7d9950d856a | refs/heads/main | 2023-08-28T05:03:03.798398 | 2021-10-30T01:39:53 | 2021-10-30T01:39:53 | 422,679,586 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | # Generated by Django 3.2.8 on 2021-10-28 14:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0003_auto_20211028_1048'),
]
operations = [
migrations.AlterModelOptions(
name='chef',
options={'verbose_name': 'Chef', 'verbose_name_plural': 'Chefes'},
),
]
| [
"[email protected]"
] | |
4a82e0f926a3e0cd84548bb25cce801091d6ee31 | fe5b4e7af9a4504437d33734de0ea62baf454b69 | /Learning/Python/Practices/mytimer.py | 2ee9f967be12962d3f12cf066fefd1e21540ae51 | [] | no_license | FelicxFoster/Sources | 937f2936b0fa3eef9dd2bbbde09e7f44755b8a8a | 3750c393088c281c000228d84fe619ba321bd5bc | refs/heads/master | 2020-04-22T09:37:05.191325 | 2016-08-06T07:02:50 | 2016-08-06T07:02:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 632 | py | import time
class Mytimer:
def __init__(self):
self.begin = 0
self.time = 0
def __repr__(self):
if self.time == 0:
return "请先调用stop结束计时!"
else:
return "总共运行了%.5f秒" % self.time
def start(self):
print("开始计时...")
self.begin = time.clock()
def stop(self):
if self.begin == 0:
print("请先调用start开始计时!")
else:
print("计时结束.")
self.time = time.clock() - self.begin
| [
"[email protected]"
] | |
682690ba8a3c0eb20c5c8e1b7f765ac4fcbdb026 | 51345d1d33fbee88a7a3435d41b07333f2901c10 | /g12d/contraparte/migrations/0007_auto__add_output.py | 78dcc02443dfe9f32b670219f523ad116786a46a | [] | no_license | CARocha/trocaire-gob | 96832a02c1c52a6d8fb189fe0b81ae5322529e4a | 3c93ef3a55e589a17bd3de2c6d71fec860db2e07 | refs/heads/master | 2021-01-17T22:27:43.391145 | 2015-05-19T16:55:57 | 2015-05-19T16:55:57 | 4,053,839 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,830 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Output'
db.create_table('contraparte_output', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('date', self.gf('django.db.models.fields.DateField')()),
('time', self.gf('django.db.models.fields.TimeField')()),
('params', self.gf('django.db.models.fields.TextField')()),
('comment', self.gf('django.db.models.fields.TextField')(default='', blank=True)),
('file', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('contraparte', ['Output'])
def backwards(self, orm):
# Deleting model 'Output'
db.delete_table('contraparte_output')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'contraparte.actividad': {
'Meta': {'object_name': 'Actividad'},
'acuerdos': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'adultos': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'aprendizaje': ('django.db.models.fields.IntegerField', [], {}),
'autoridades': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'comentarios': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'comunidad': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': "orm['lugar.Comunidad']"}),
'efectividad': ('django.db.models.fields.IntegerField', [], {}),
'ejes_transversales': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trocaire.EjeTransversal']"}),
'empoderamiento': ('django.db.models.fields.IntegerField', [], {}),
'estudiantes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'fecha': ('django.db.models.fields.DateTimeField', [], {}),
'foto1': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'foto2': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'foto3': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'hombres': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jovenes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lideres': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'maestros': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mes': ('django.db.models.fields.IntegerField', [], {}),
'miembros': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mujeres': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'municipio': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': "orm['lugar.Municipio']"}),
'ninos': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'no_dato': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'no_dato1': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'nombre_actividad': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'organizacion': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trocaire.Organizacion']"}),
'participacion': ('django.db.models.fields.IntegerField', [], {}),
'persona_organiza': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contraparte.Organizador']"}),
'pobladores': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'proyecto': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': "orm['contraparte.Proyecto']"}),
'relevancia': ('django.db.models.fields.IntegerField', [], {}),
'resultado': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': "orm['contraparte.Resultado']"}),
'tema_actividad': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trocaire.Tema']"}),
'tipo_actividad': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trocaire.TipoActividad']"}),
'video': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
'contraparte.organizador': {
'Meta': {'object_name': 'Organizador'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'contraparte.output': {
'Meta': {'object_name': 'Output'},
'comment': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'file': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'contraparte.proyecto': {
'Meta': {'object_name': 'Proyecto'},
'aporta_trocaire': ('django.db.models.fields.IntegerField', [], {}),
'codigo': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'contacto': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'finalizacion': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inicio': ('django.db.models.fields.DateField', [], {}),
'monto_contrapartida': ('django.db.models.fields.IntegerField', [], {}),
'monto_trocaire': ('django.db.models.fields.IntegerField', [], {}),
'municipios': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['lugar.Municipio']", 'symmetrical': 'False'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'organizacion': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trocaire.Organizacion']"})
},
'contraparte.resultado': {
'Meta': {'object_name': 'Resultado'},
'aporta_a': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trocaire.ResultadoPrograma']"}),
'descripcion': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre_corto': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'proyecto': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contraparte.Proyecto']"})
},
'lugar.comunidad': {
'Meta': {'object_name': 'Comunidad'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipio': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lugar.Municipio']"}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'lugar.departamento': {
'Meta': {'object_name': 'Departamento'},
'extension': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2'}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'null': 'True', 'db_index': 'True'})
},
'lugar.municipio': {
'Meta': {'ordering': "['departamento__nombre']", 'object_name': 'Municipio'},
'departamento': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lugar.Departamento']"}),
'extension': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'latitud': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'longitud': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'null': 'True', 'db_index': 'True'})
},
'trocaire.ejetransversal': {
'Meta': {'object_name': 'EjeTransversal'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
'trocaire.organizacion': {
'Meta': {'object_name': 'Organizacion'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'contacto': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'direccion': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'}),
'historia': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'nombre_corto': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'telefono': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '12', 'blank': 'True'}),
'web': ('django.db.models.fields.URLField', [], {'default': "'www.example.com'", 'max_length': '200', 'blank': 'True'})
},
'trocaire.resultadoprograma': {
'Meta': {'object_name': 'ResultadoPrograma'},
'descripcion': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre_corto': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'trocaire.tema': {
'Meta': {'object_name': 'Tema'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
'trocaire.tipoactividad': {
'Meta': {'object_name': 'TipoActividad'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '150'})
}
}
complete_apps = ['contraparte']
| [
"[email protected]"
] | |
0743849f184d5055155ee69ce3c1a52ebb1b4098 | cdb7bb6215cc2f362f2e93a040c7d8c5efe97fde | /Q/QueriesonaPermutationWithKey.py | 5f50edc964410d94953829c8957acc876e3cc808 | [] | no_license | bssrdf/pyleet | 8861bbac06dfe0f0f06f6ad1010d99f8def19b27 | 810575368ecffa97677bdb51744d1f716140bbb1 | refs/heads/master | 2023-08-20T05:44:30.130517 | 2023-08-19T21:54:34 | 2023-08-19T21:54:34 | 91,913,009 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,517 | py | '''
-Medium-
Given the array queries of positive integers between 1 and m, you have to process all queries[i]
(from i=0 to i=queries.length-1) according to the following rules:
In the beginning, you have the permutation P=[1,2,3,...,m].
For the current i, find the position of queries[i] in the permutation P (indexing from 0) and then
move this at the beginning of the permutation P. Notice that the position of queries[i] in P is
the result for queries[i].
Return an array containing the result for the given queries.
Example 1:
Input: queries = [3,1,2,1], m = 5
Output: [2,1,2,1]
Explanation: The queries are processed as follow:
For i=0: queries[i]=3, P=[1,2,3,4,5], position of 3 in P is 2, then we move 3 to the beginning of P resulting in P=[3,1,2,4,5].
For i=1: queries[i]=1, P=[3,1,2,4,5], position of 1 in P is 1, then we move 1 to the beginning of P resulting in P=[1,3,2,4,5].
For i=2: queries[i]=2, P=[1,3,2,4,5], position of 2 in P is 2, then we move 2 to the beginning of P resulting in P=[2,1,3,4,5].
For i=3: queries[i]=1, P=[2,1,3,4,5], position of 1 in P is 1, then we move 1 to the beginning of P resulting in P=[1,2,3,4,5].
Therefore, the array containing the result is [2,1,2,1].
Example 2:
Input: queries = [4,1,2,2], m = 4
Output: [3,1,2,0]
Example 3:
Input: queries = [7,5,5,8,3], m = 8
Output: [6,5,0,7,5]
Constraints:
1 <= m <= 10^3
1 <= queries.length <= m
1 <= queries[i] <= m
'''
class Fenwick:
def __init__(self, n):
sz = 1
while sz <= n:
sz *= 2
self.size = sz
self.data = [0] * sz
def sum(self, i):
s = 0
while i > 0:
s += self.data[i]
i -= i & -i
return s
def add(self, i, x):
while i < self.size:
self.data[i] += x
i += i & -i
class Solution(object):
def processQueries(self, queries, n):
"""
:type queries: List[int]
:type m: int
:rtype: List[int]
"""
fenw = Fenwick(2 * n)
vimap = {}
for i in range(1, n + 1):
fenw.add(i + n, 1)
vimap[i] = i + n
cur = n
ans = []
for q in queries:
i = vimap.pop(q)
rank = fenw.sum(i-1)
ans.append(rank)
vimap[q] = cur
fenw.add(i, -1)
fenw.add(cur, 1)
cur -= 1
return ans
if __name__ == "__main__":
print(Solution().processQueries([3,1,2,1], 5)) | [
"[email protected]"
] | |
05542e43a78dc07d7935c775597e82a11f69e451 | 9b32b795e45a572ae644ab515224b3c3f9836094 | /notify.py | 18ee6d1d22a7cc908e1e7ce990b0af5cce9a975a | [] | no_license | Ginkooo/notifier | 1a3cd49189400d5a25a95cc3e1518aaf88abd948 | fec05e305971e6d1bdff85139465b0b48483df21 | refs/heads/master | 2021-01-22T22:02:42.366126 | 2017-03-26T19:07:38 | 2017-03-26T19:07:38 | 85,500,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 995 | py | #!/usr/bin/python
import subprocess
import sys
import os
import socket
from socket import AF_INET, SOCK_STREAM
CONFIG_FILE = os.getenv('NOTIFY_CONFIG')
if not CONFIG_FILE:
print('You have no NOTIFY_CONFIG env variable set')
exit()
if len(sys.argv) < 2:
print('Too few arguments')
exit()
with open(CONFIG_FILE, 'r') as f:
for line in f.readlines():
c = line.strip().split('=')
if c[0] == 'PORT':
PORT = int(c[1])
if c[0] == 'HOST':
HOST = c[1]
def send_message(msg, host, port):
sock = socket.socket(AF_INET, SOCK_STREAM)
sock.connect((host, port))
sock.sendall(msg)
def send_and_recv(msg):
sock = socket.socket(AF_INET, SOCK_STREAM)
sock.connect((HOST, PORT))
sock.sendall(msg)
resp = sock.recv(1024)
return resp
msg = ' '.join(sys.argv[1:]).encode('utf-8')
sys.stdout.flush()
if msg == b'GET':
resp = send_and_recv(msg)
print(resp)
quit()
send_message(msg, HOST, PORT)
| [
"[email protected]"
] | |
62a6f9325e708567dfd8ff11116c7fc187205b63 | 3c81687bb6cd84ea72dac1a160660dc9ee8d59b4 | /171.excel表列序号.py | 68526f53563f188ce8a9a0efdac7bc3cb7e76382 | [] | no_license | whuhenry/leetcode_solution | 59751b6f736117ce4c4d71c347161c18ffb86293 | 74e5add753a918437879154cbd3048ed47cc2e88 | refs/heads/master | 2023-02-09T06:06:06.623680 | 2023-02-04T06:43:42 | 2023-02-04T06:43:42 | 184,874,909 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | py | #
# @lc app=leetcode.cn id=171 lang=python3
#
# [171] Excel表列序号
#
# @lc code=start
class Solution:
def titleToNumber(self, s: str) -> int:
idx = 0
for ch in s:
idx = idx * 26 + ord(ch) - ord('A') + 1
return idx
# @lc code=end
| [
"[email protected]"
] | |
ce0c753cb1ba1ff73a477842dc11a7b50abf1e6f | 27044bb88c709e7ffa5278afc7c81f37e0b6e9e4 | /venv/lib/python3.10/site-packages/pygments/styles/pastie.py | 743f1d562108826edd620ec482fa3aadaecdf8c1 | [] | no_license | mesaye85/organika_Inventory_project-with-React- | 48c93efb6aba64d5e9310c57e4c5a06d3f2cc502 | 6aa3d29e8be3e22b8dc9163d558cdcc8c9122fd1 | refs/heads/main | 2023-02-19T10:11:47.880754 | 2023-02-14T01:55:43 | 2023-02-14T01:55:43 | 298,101,393 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | /home/runner/.cache/pip/pool/85/3e/15/ac45908932b6e9ec1eade05fd76e1243a9ef0515d05354106bc0c66fe2 | [
"[email protected]"
] | |
102a33fa88cc761820a152d7110ca283f14f05b7 | 9d0195aa83cc594a8c61f334b90375961e62d4fe | /JTTest/SL7/CMSSW_10_2_15/src/dataRunA/nano1852.py | 24c577e168a8d33484c328fe58ee91adb63ae33e | [] | no_license | rsk146/CMS | 4e49592fc64f6438051544c5de18598db36ed985 | 5f8dab8c59ae556598b9747b52b88205fffc4dbe | refs/heads/master | 2022-12-01T03:57:12.126113 | 2020-08-04T03:29:27 | 2020-08-04T03:29:27 | 284,863,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,292 | py | # Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: nanoAOD_jetToolbox_cff -s NANO --data --eventcontent NANOAOD --datatier NANOAOD --no_exec --conditions 102X_dataRun2_Sep2018Rereco_v1 --era Run2_2018,run2_nanoAOD_102Xv1 --customise_commands=process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False))) --customise JMEAnalysis/JetToolbox/nanoAOD_jetToolbox_cff.nanoJTB_customizeMC --filein /users/h2/rsk146/JTTest/SL7/CMSSW_10_6_12/src/ttbarCutTest/dataReprocessing/0004A5E9-9F18-6B42-B31D-4206406CE423.root --fileout file:jetToolbox_nano_datatest.root
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('NANO',eras.Run2_2018,eras.run2_nanoAOD_102Xv1)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff')
process.load('PhysicsTools.NanoAOD.nano_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:root://cms-xrd-global.cern.ch//store/data/Run2018A/EGamma/MINIAOD/17Sep2018-v2/60000/5CE5E9F9-932F-F54C-AFEB-D8C69612ADF2.root'),
secondaryFileNames = cms.untracked.vstring()
)
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('nanoAOD_jetToolbox_cff nevts:1'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.NANOAODoutput = cms.OutputModule("NanoAODOutputModule",
compressionAlgorithm = cms.untracked.string('LZMA'),
compressionLevel = cms.untracked.int32(9),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('NANOAOD'),
filterName = cms.untracked.string('')
),
fileName = cms.untracked.string('file:jetToolbox_nano_datatest1852.root'),
outputCommands = process.NANOAODEventContent.outputCommands
)
# Additional output definition
# Other statements
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '102X_dataRun2_Sep2018Rereco_v1', '')
# Path and EndPath definitions
process.nanoAOD_step = cms.Path(process.nanoSequence)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.NANOAODoutput_step = cms.EndPath(process.NANOAODoutput)
# Schedule definition
process.schedule = cms.Schedule(process.nanoAOD_step,process.endjob_step,process.NANOAODoutput_step)
from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
associatePatAlgosToolsTask(process)
# customisation of the process.
# Automatic addition of the customisation function from PhysicsTools.NanoAOD.nano_cff
from PhysicsTools.NanoAOD.nano_cff import nanoAOD_customizeData
#call to customisation function nanoAOD_customizeData imported from PhysicsTools.NanoAOD.nano_cff
process = nanoAOD_customizeData(process)
# Automatic addition of the customisation function from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff import nanoJTB_customizeMC
#call to customisation function nanoJTB_customizeMC imported from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
process = nanoJTB_customizeMC(process)
# End of customisation functions
# Customisation from command line
process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False)))
# Add early deletion of temporary data products to reduce peak memory need
from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
process = customiseEarlyDelete(process)
# End adding early deletion | [
"[email protected]"
] | |
48405c6209f4df38f3a8111edb01761a4d084dc0 | 0bce7412d58675d6cc410fa7a81c294ede72154e | /Python3/0060. Permutation Sequence.py | 5c9cf1b5e02c8fb54da3ceaa99f1bbef418d215b | [] | no_license | yang4978/LeetCode | 9ddf010b0f1dda32cddc7e94c3f987509dea3214 | 6387d05b619d403414bad273fc3a7a2c58668db7 | refs/heads/master | 2022-01-15T04:21:54.739812 | 2021-12-28T12:28:28 | 2021-12-28T12:28:28 | 182,653,666 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 947 | py | class Solution:
def getPermutation(self, n: int, k: int) -> str:
num = [str(i) for i in range(1,n+1)]
res = ''
step = math.factorial(n)
while n:
step //= n
n -= 1
idx = math.ceil(k/step)-1
res += num.pop(idx)
k -= idx*step
return res
# num = [str(i) for i in range(1,n+1)]
# res = ''
# fac = math.factorial(n-1)
# n -= 1
# while k:
# if fac>k:
# fac//=n
# n -= 1
# res += num.pop(0)
# if fac==k:
# res += num.pop(0) + "".join(reversed(num))
# return res
# else:
# idx = math.ceil(k/fac)-1
# res += num.pop(idx)
# k -= idx*fac
# fac //= n
# n -= 1
# return res
| [
"[email protected]"
] | |
7ae72c913b5d5163ecd86671e670bb91b49497f5 | 9816f1460de340aac3de692a780197dc62c9056c | /manager_proj/manager_proj/wsgi.py | 19d6003fb648413f76e20639548702538df00179 | [] | no_license | LovelyHoltz/LovelyHoltz-django_courses | eb1a72040f0afab021b2f1b252ef874e1ba15576 | 7a933e86354a7f062c02ccdb393a1080cabc4eee | refs/heads/master | 2020-03-31T15:35:36.640226 | 2018-10-09T22:36:19 | 2018-10-09T22:36:19 | 152,342,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | """
WSGI config for manager_proj project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "manager_proj.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
a9dfdae8d0e8118e70f99ba34f2c0fbf177aa6a2 | ff6248be9573caec94bea0fa2b1e4b6bf0aa682b | /StudentProblem/10.21.11.29/3/1569572235.py | 642c20c838b05b050f2e4c92c306eac8aa43970e | [] | no_license | LennartElbe/codeEvo | 0e41b1a7705204e934ef71a5a28c047366c10f71 | e89b329bc9edd37d5d9986f07ca8a63d50686882 | refs/heads/master | 2020-12-21T17:28:25.150352 | 2020-03-26T10:22:35 | 2020-03-26T10:22:35 | 236,498,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 424 | py | import functools
import typing
import string
import random
import pytest
def leap(year):
n = year
if n % 4 == 0:
if n % 100 == 0:
return False
elif n % 400 == 0:
return True
else:
return False
######################################################################
## Lösung Teil 2 (Tests)
######################################################################
| [
"[email protected]"
] | |
a5207bcd16f7acc0c7a5d00c75fe7233a5b232e4 | be838a8cc823ee2a1056aa94ac002462092b2ce0 | /src/beheerconsole/conf/ci.py | bd973df1e4c2e6bf8a02adecfeda8694bc5e8f8d | [] | no_license | GemeenteUtrecht/beheerconsole | 702b2f18dafa8602396cca7944fea089b1e0678a | 21ad66fa67ac23a8bd1e50d907fa09bd6ea9b3f1 | refs/heads/master | 2022-12-14T22:07:03.466320 | 2021-04-12T14:51:17 | 2021-04-12T14:51:17 | 225,420,641 | 0 | 0 | null | 2022-12-11T15:42:08 | 2019-12-02T16:31:58 | Python | UTF-8 | Python | false | false | 572 | py | """
Continuous integration settings module.
"""
import logging
import os
os.environ.setdefault("SECRET_KEY", "dummy")
from .includes.base import * # noqa isort:skip
CACHES = {
"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"},
# See: https://github.com/jazzband/django-axes/blob/master/docs/configuration.rst#cache-problems
"axes": {"BACKEND": "django.core.cache.backends.dummy.DummyCache"},
}
LOGGING = None # Quiet is nice
logging.disable(logging.CRITICAL)
ENVIRONMENT = "CI"
#
# Django-axes
#
AXES_BEHIND_REVERSE_PROXY = False
| [
"[email protected]"
] | |
0d1c5da15b52464aa619306e1fe553edbe3df3b6 | c632e6ba36598f34e6336d1cb5e2411c1e571da8 | /simple-rop/grader.py | 31e162c9f13abff79dd42b8e5051a0d3dc62f2b7 | [] | no_license | adbforlife/easyctf-2017-problems | 0f7e229d884d6d66c3d0ae1226e2e2e1826d4c17 | c19872a88080845fa4c5ac51a45ddaffbf40690b | refs/heads/master | 2021-01-22T23:34:09.525637 | 2017-03-21T03:30:53 | 2017-03-21T03:30:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | def grade(random, key):
if key.find("r0p_7o_v1ct0ry") != -1:
return True, "Correct!"
return False, "Nope."
| [
"[email protected]"
] | |
41728b57e94b0d1fad0bbdb748558ddaa8d75399 | a0f0efaaaf69d6ccdc2a91596db29f04025f122c | /build/ca_driver/atomic_configure/_setup_util.py | c1c01bb865033f565d69f3a1695f7433a09b068e | [] | no_license | chiuhandsome/ros_ws_test-git | 75da2723154c0dadbcec8d7b3b1f3f8b49aa5cd6 | 619909130c23927ccc902faa3ff6d04ae0f0fba9 | refs/heads/master | 2022-12-24T05:45:43.845717 | 2020-09-22T10:12:54 | 2020-09-22T10:12:54 | 297,582,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,703 | py | #!/usr/bin/python2
# -*- coding: utf-8 -*-
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""This file generates shell code for the setup.SHELL scripts to set environment variables."""
from __future__ import print_function
import argparse
import copy
import errno
import os
import platform
import sys
CATKIN_MARKER_FILE = '.catkin'
system = platform.system()
IS_DARWIN = (system == 'Darwin')
IS_WINDOWS = (system == 'Windows')
PATH_TO_ADD_SUFFIX = ['bin']
if IS_WINDOWS:
# while catkin recommends putting dll's into bin, 3rd party packages often put dll's into lib
# since Windows finds dll's via the PATH variable, prepend it with path to lib
PATH_TO_ADD_SUFFIX.extend([['lib', os.path.join('lib', 'x86_64-linux-gnu')]])
# subfolder of workspace prepended to CMAKE_PREFIX_PATH
ENV_VAR_SUBFOLDERS = {
'CMAKE_PREFIX_PATH': '',
'LD_LIBRARY_PATH' if not IS_DARWIN else 'DYLD_LIBRARY_PATH': ['lib', os.path.join('lib', 'x86_64-linux-gnu')],
'PATH': PATH_TO_ADD_SUFFIX,
'PKG_CONFIG_PATH': [os.path.join('lib', 'pkgconfig'), os.path.join('lib', 'x86_64-linux-gnu', 'pkgconfig')],
'PYTHONPATH': 'lib/python2.7/dist-packages',
}
def rollback_env_variables(environ, env_var_subfolders):
"""
Generate shell code to reset environment variables.
by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH.
This does not cover modifications performed by environment hooks.
"""
lines = []
unmodified_environ = copy.copy(environ)
for key in sorted(env_var_subfolders.keys()):
subfolders = env_var_subfolders[key]
if not isinstance(subfolders, list):
subfolders = [subfolders]
value = _rollback_env_variable(unmodified_environ, key, subfolders)
if value is not None:
environ[key] = value
lines.append(assignment(key, value))
if lines:
lines.insert(0, comment('reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH'))
return lines
def _rollback_env_variable(environ, name, subfolders):
"""
For each catkin workspace in CMAKE_PREFIX_PATH remove the first entry from env[NAME] matching workspace + subfolder.
:param subfolders: list of str '' or subfoldername that may start with '/'
:returns: the updated value of the environment variable.
"""
value = environ[name] if name in environ else ''
env_paths = [path for path in value.split(os.pathsep) if path]
value_modified = False
for subfolder in subfolders:
if subfolder:
if subfolder.startswith(os.path.sep) or (os.path.altsep and subfolder.startswith(os.path.altsep)):
subfolder = subfolder[1:]
if subfolder.endswith(os.path.sep) or (os.path.altsep and subfolder.endswith(os.path.altsep)):
subfolder = subfolder[:-1]
for ws_path in _get_workspaces(environ, include_fuerte=True, include_non_existing=True):
path_to_find = os.path.join(ws_path, subfolder) if subfolder else ws_path
path_to_remove = None
for env_path in env_paths:
env_path_clean = env_path[:-1] if env_path and env_path[-1] in [os.path.sep, os.path.altsep] else env_path
if env_path_clean == path_to_find:
path_to_remove = env_path
break
if path_to_remove:
env_paths.remove(path_to_remove)
value_modified = True
new_value = os.pathsep.join(env_paths)
return new_value if value_modified else None
def _get_workspaces(environ, include_fuerte=False, include_non_existing=False):
"""
Based on CMAKE_PREFIX_PATH return all catkin workspaces.
:param include_fuerte: The flag if paths starting with '/opt/ros/fuerte' should be considered workspaces, ``bool``
"""
# get all cmake prefix paths
env_name = 'CMAKE_PREFIX_PATH'
value = environ[env_name] if env_name in environ else ''
paths = [path for path in value.split(os.pathsep) if path]
# remove non-workspace paths
workspaces = [path for path in paths if os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE)) or (include_fuerte and path.startswith('/opt/ros/fuerte')) or (include_non_existing and not os.path.exists(path))]
return workspaces
def prepend_env_variables(environ, env_var_subfolders, workspaces):
"""Generate shell code to prepend environment variables for the all workspaces."""
lines = []
lines.append(comment('prepend folders of workspaces to environment variables'))
paths = [path for path in workspaces.split(os.pathsep) if path]
prefix = _prefix_env_variable(environ, 'CMAKE_PREFIX_PATH', paths, '')
lines.append(prepend(environ, 'CMAKE_PREFIX_PATH', prefix))
for key in sorted(key for key in env_var_subfolders.keys() if key != 'CMAKE_PREFIX_PATH'):
subfolder = env_var_subfolders[key]
prefix = _prefix_env_variable(environ, key, paths, subfolder)
lines.append(prepend(environ, key, prefix))
return lines
def _prefix_env_variable(environ, name, paths, subfolders):
"""
Return the prefix to prepend to the environment variable NAME.
Adding any path in NEW_PATHS_STR without creating duplicate or empty items.
"""
value = environ[name] if name in environ else ''
environ_paths = [path for path in value.split(os.pathsep) if path]
checked_paths = []
for path in paths:
if not isinstance(subfolders, list):
subfolders = [subfolders]
for subfolder in subfolders:
path_tmp = path
if subfolder:
path_tmp = os.path.join(path_tmp, subfolder)
# skip nonexistent paths
if not os.path.exists(path_tmp):
continue
# exclude any path already in env and any path we already added
if path_tmp not in environ_paths and path_tmp not in checked_paths:
checked_paths.append(path_tmp)
prefix_str = os.pathsep.join(checked_paths)
if prefix_str != '' and environ_paths:
prefix_str += os.pathsep
return prefix_str
def assignment(key, value):
if not IS_WINDOWS:
return 'export %s="%s"' % (key, value)
else:
return 'set %s=%s' % (key, value)
def comment(msg):
if not IS_WINDOWS:
return '# %s' % msg
else:
return 'REM %s' % msg
def prepend(environ, key, prefix):
if key not in environ or not environ[key]:
return assignment(key, prefix)
if not IS_WINDOWS:
return 'export %s="%s$%s"' % (key, prefix, key)
else:
return 'set %s=%s%%%s%%' % (key, prefix, key)
def find_env_hooks(environ, cmake_prefix_path):
"""Generate shell code with found environment hooks for the all workspaces."""
lines = []
lines.append(comment('found environment hooks in workspaces'))
generic_env_hooks = []
generic_env_hooks_workspace = []
specific_env_hooks = []
specific_env_hooks_workspace = []
generic_env_hooks_by_filename = {}
specific_env_hooks_by_filename = {}
generic_env_hook_ext = 'bat' if IS_WINDOWS else 'sh'
specific_env_hook_ext = environ['CATKIN_SHELL'] if not IS_WINDOWS and 'CATKIN_SHELL' in environ and environ['CATKIN_SHELL'] else None
# remove non-workspace paths
workspaces = [path for path in cmake_prefix_path.split(os.pathsep) if path and os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE))]
for workspace in reversed(workspaces):
env_hook_dir = os.path.join(workspace, 'etc', 'catkin', 'profile.d')
if os.path.isdir(env_hook_dir):
for filename in sorted(os.listdir(env_hook_dir)):
if filename.endswith('.%s' % generic_env_hook_ext):
# remove previous env hook with same name if present
if filename in generic_env_hooks_by_filename:
i = generic_env_hooks.index(generic_env_hooks_by_filename[filename])
generic_env_hooks.pop(i)
generic_env_hooks_workspace.pop(i)
# append env hook
generic_env_hooks.append(os.path.join(env_hook_dir, filename))
generic_env_hooks_workspace.append(workspace)
generic_env_hooks_by_filename[filename] = generic_env_hooks[-1]
elif specific_env_hook_ext is not None and filename.endswith('.%s' % specific_env_hook_ext):
# remove previous env hook with same name if present
if filename in specific_env_hooks_by_filename:
i = specific_env_hooks.index(specific_env_hooks_by_filename[filename])
specific_env_hooks.pop(i)
specific_env_hooks_workspace.pop(i)
# append env hook
specific_env_hooks.append(os.path.join(env_hook_dir, filename))
specific_env_hooks_workspace.append(workspace)
specific_env_hooks_by_filename[filename] = specific_env_hooks[-1]
env_hooks = generic_env_hooks + specific_env_hooks
env_hooks_workspace = generic_env_hooks_workspace + specific_env_hooks_workspace
count = len(env_hooks)
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_COUNT', count))
for i in range(count):
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_%d' % i, env_hooks[i]))
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_%d_WORKSPACE' % i, env_hooks_workspace[i]))
return lines
def _parse_arguments(args=None):
parser = argparse.ArgumentParser(description='Generates code blocks for the setup.SHELL script.')
parser.add_argument('--extend', action='store_true', help='Skip unsetting previous environment variables to extend context')
parser.add_argument('--local', action='store_true', help='Only consider this prefix path and ignore other prefix path in the environment')
return parser.parse_known_args(args=args)[0]
if __name__ == '__main__':
try:
try:
args = _parse_arguments()
except Exception as e:
print(e, file=sys.stderr)
sys.exit(1)
if not args.local:
# environment at generation time
CMAKE_PREFIX_PATH = '/home/handsome/ros_ws_test/install/libcreate;/home/handsome/ros_ws_test/install/ca_msgs;/home/handsome/ros_ws_test/install/ca_description;/home/handsome/ros_ws/install/yocs_cmd_vel_mux;/home/handsome/ros_ws/install/urdf_tutorial;/home/handsome/ros_ws/install/tuw_waypoint_to_spline_msgs;/home/handsome/ros_ws/install/tuw_multi_robot_router;/home/handsome/ros_ws/install/tuw_voronoi_graph;/home/handsome/ros_ws/install/tuw_vehicle_msgs;/home/handsome/ros_ws/install/tuw_order_planner;/home/handsome/ros_ws/install/tuw_object_rviz;/home/handsome/ros_ws/install/tuw_object_msgs;/home/handsome/ros_ws/install/tuw_nav_rviz;/home/handsome/ros_ws/install/tuw_multi_robot_local_behavior_controller;/home/handsome/ros_ws/install/tuw_multi_robot_ctrl;/home/handsome/ros_ws/install/tuw_nav_msgs;/home/handsome/ros_ws/install/tuw_multi_robot_rviz;/home/handsome/ros_ws/install/tuw_multi_robot_goal_generator;/home/handsome/ros_ws/install/robot_scheduling_utility;/home/handsome/ros_ws/install/robot_scheduling_actions;/home/handsome/ros_ws/install/actionlib_modules_bridge;/home/handsome/ros_ws/install/tuw_multi_robot_msgs;/home/handsome/ros_ws/install/tuw_multi_robot_demo;/home/handsome/ros_ws/install/tuw_local_controller_msgs;/home/handsome/ros_ws/install/tuw_geometry_rviz;/home/handsome/ros_ws/install/tuw_geometry_msgs;/home/handsome/ros_ws/install/tuw_geometry;/home/handsome/ros_ws/install/tuw_gazebo_msgs;/home/handsome/ros_ws/install/tuw_control;/home/handsome/ros_ws/install/tuw_airskin_msgs;/home/handsome/ros_ws/install/turtlebot_teleop;/home/handsome/ros_ws/install/tug_example_pnp_server;/home/handsome/ros_ws/install/tug_example_actions;/home/handsome/ros_ws/install/tug_example_msgs;/home/handsome/ros_ws/install/timed_roslaunch;/home/handsome/ros_ws/install/teb2_local_planner;/home/handsome/ros_ws/install/stated_roslaunch;/home/handsome/ros_ws/install/spatio_temporal_voxel_layer;/home/handsome/ros_ws/install/robot_udp_bridge;/home/handsome/ros_ws/install/robot_database_bridge;/home/handsome/ros_ws/install/samsungcmd_msgs;/home/handsome/ros_ws/install/rplidar_ros;/home/handsome/ros_ws/install/rp_action;/home/handsome/ros_ws/install/rp_action_msgs;/home/handsome/ros_ws/install/rosserial_xbee;/home/handsome/ros_ws/install/rosserial_windows;/home/handsome/ros_ws/install/rosserial_tivac;/home/handsome/ros_ws/install/rosserial_test;/home/handsome/ros_ws/install/rosserial_server;/home/handsome/ros_ws/install/rosserial_python;/home/handsome/ros_ws/install/rosserial_mbed;/home/handsome/ros_ws/install/rosserial_embeddedlinux;/home/handsome/ros_ws/install/rosserial_arduino;/home/handsome/ros_ws/install/rosserial_client;/home/handsome/ros_ws/install/rosserial_msgs;/home/handsome/ros_ws/install/cellctrl_qtgui_bridge;/home/handsome/ros_ws/install/car_db_manager_qtgui;/home/handsome/ros_ws/install/car_db_manager_bridge;/home/handsome/ros_ws/install/car_db_manager_action;/home/handsome/ros_ws/install/ros_utility_tools;/home/handsome/ros_ws/install/ros_package_test;/home/handsome/ros_ws/install/ros_package_manager;/home/handsome/ros_ws/install/robot_scheduling_server;/home/handsome/ros_ws/install/robot_scheduling_msgs;/home/handsome/ros_ws/install/robot_localization;/home/handsome/ros_ws/install/robot_control_msgs;/home/handsome/ros_ws/install/reset_location;/home/handsome/ros_ws/install/razor_imu_9dof;/home/handsome/ros_ws/install/pnp_rosplan;/home/handsome/ros_ws/install/actionlib_pnp_controller;/home/handsome/ros_ws/install/actionlib_modules_controller;/home/handsome/ros_ws/install/pnp_ros;/home/handsome/ros_ws/install/pnp_msgs;/home/handsome/ros_ws/install/open_auto_dock;/home/handsome/ros_ws/install/open_auto_dock_msgs;/home/handsome/ros_ws/install/omron_os32c_driver;/home/handsome/ros_ws/install/dlux_plugins;/home/handsome/ros_ws/install/dlux_global_planner;/home/handsome/ros_ws/install/nav_grid_pub_sub;/home/handsome/ros_ws/install/dwb_critics;/home/handsome/ros_ws/install/nav_grid_iterators;/home/handsome/ros_ws/install/locomove_base;/home/handsome/ros_ws/install/locomotor;/home/handsome/ros_ws/install/nav_core_adapter;/home/handsome/ros_ws/install/dwb_plugins;/home/handsome/ros_ws/install/dwb_local_planner;/home/handsome/ros_ws/install/nav_2d_utils;/home/handsome/ros_ws/install/global_planner_tests;/home/handsome/ros_ws/install/costmap_queue;/home/handsome/ros_ws/install/nav_core2;/home/handsome/ros_ws/install/nav_grid;/home/handsome/ros_ws/install/car_schedule_msgs;/home/handsome/ros_ws/install/actionlib_modules_msgs;/home/handsome/ros_ws/install/locomotor_msgs;/home/handsome/ros_ws/install/dwb_msgs;/home/handsome/ros_ws/install/nav_2d_msgs;/home/handsome/ros_ws/install/mongodb_log;/home/handsome/ros_ws/install/mongodb_store;/home/handsome/ros_ws/install/mongodb_store_msgs;/home/handsome/ros_ws/install/ca_driver;/home/handsome/ros_ws/install/libcreate;/home/handsome/ros_ws/install/i_robot_stage;/home/handsome/ros_ws/install/i_robot_navigation;/home/handsome/ros_ws/install/hyc_order_planner;/home/handsome/ros_ws/install/hyc_multi_robot_msgs;/home/handsome/ros_ws/install/fetch_open_auto_dock;/home/handsome/ros_ws/install/fetch_auto_dock_msgs;/home/handsome/ros_ws/install/change_teb2_max_vel_x_onfly;/home/handsome/ros_ws/install/cellctrl_control_msgs;/home/handsome/ros_ws/install/car_db_manager_msgs;/home/handsome/ros_ws/install/ca_msgs;/home/handsome/ros_ws/install/ca_description;/home/handsome/ros_ws/install/botcmd_msgs;/opt/ros/melodic'.split(';')
else:
# don't consider any other prefix path than this one
CMAKE_PREFIX_PATH = []
# prepend current workspace if not already part of CPP
base_path = os.path.dirname(__file__)
# CMAKE_PREFIX_PATH uses forward slash on all platforms, but __file__ is platform dependent
# base_path on Windows contains backward slashes, need to be converted to forward slashes before comparison
if os.path.sep != '/':
base_path = base_path.replace(os.path.sep, '/')
if base_path not in CMAKE_PREFIX_PATH:
CMAKE_PREFIX_PATH.insert(0, base_path)
CMAKE_PREFIX_PATH = os.pathsep.join(CMAKE_PREFIX_PATH)
environ = dict(os.environ)
lines = []
if not args.extend:
lines += rollback_env_variables(environ, ENV_VAR_SUBFOLDERS)
lines += prepend_env_variables(environ, ENV_VAR_SUBFOLDERS, CMAKE_PREFIX_PATH)
lines += find_env_hooks(environ, CMAKE_PREFIX_PATH)
print('\n'.join(lines))
# need to explicitly flush the output
sys.stdout.flush()
except IOError as e:
# and catch potential "broken pipe" if stdout is not writable
# which can happen when piping the output to a file but the disk is full
if e.errno == errno.EPIPE:
print(e, file=sys.stderr)
sys.exit(2)
raise
sys.exit(0)
| [
"[email protected]"
] | |
4d5bad39f9a0d575b58bf1cae7bbb513a1b3f018 | 5cb33f0b2f58145ccf9c183b6366af9284227957 | /home/migrations/0052_member_ards.py | 3fc16148533deffa6876b509720a866868eff12d | [] | no_license | joel081112/ArdsProject | a72b3038349d5cf949e55037989644d0f26fab65 | d7867be34cdd199d4c07f4a637b89f5f7305ac36 | refs/heads/main | 2023-04-24T04:55:40.296316 | 2021-04-29T09:30:41 | 2021-04-29T09:30:41 | 336,305,114 | 0 | 0 | null | 2021-04-29T09:30:42 | 2021-02-05T15:06:40 | HTML | UTF-8 | Python | false | false | 390 | py | # Generated by Django 3.1.5 on 2021-03-05 17:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0051_auto_20210305_1628'),
]
operations = [
migrations.AddField(
model_name='member',
name='ards',
field=models.BooleanField(blank=True, null=True),
),
]
| [
"[email protected]"
] | |
1861628aaba3bac8ca796df257d9f5249ec9eb96 | a60e81b51935fb53c0900fecdadba55d86110afe | /LeetCode/python/98_medium_Validate Binary Search Tree.py | 7312152c974df12e1c55d32eb657132f520cbf5e | [] | no_license | FrankieZhen/Lookoop | fab6855f5660467f70dc5024d9aa38213ecf48a7 | 212f8b83d6ac22db1a777f980075d9e12ce521d2 | refs/heads/master | 2020-07-27T08:12:45.887814 | 2019-09-16T11:48:20 | 2019-09-16T11:48:20 | 209,021,915 | 1 | 0 | null | 2019-09-17T10:10:46 | 2019-09-17T10:10:46 | null | UTF-8 | Python | false | false | 2,254 | py | """
Given a binary tree, determine if it is a valid binary search tree (BST).
Assume a BST is defined as follows:
The left subtree of a node contains only nodes with keys less than the node's key.
The right subtree of a node contains only nodes with keys greater than the node's key.
Both the left and right subtrees must also be binary search trees.
Example 1:
Input:
2
/ \
1 3
Output: true
Example 2:
5
/ \
1 4
/ \
3 6
Output: false
Explanation: The input is: [5,1,4,null,null,3,6]. The root node's value
is 5 but its right child's value is 4.
"""
# 2018-6-30
# Validate Binary Search Tree
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
# LTE
class Solution1:
def __init__(self):
self.lists = []
def isValidBST(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
if root == None:
return True
self.isValidBST(root.left)
self.lists.append(root.val)
# print(self.lists)
if len(self.lists) == 2:
if self.lists[1] <= self.lists[0]:
return False
else:
self.lists.pop(0)
self.isValidBST(root.right)
# print(self.lists)
if len(self.lists) == 2:
if self.lists[1] <= self.lists[0]:
return False
else:
return True
# root.left.val < root.val and root.right.val > root.val
# https://leetcode.com/problems/validate-binary-search-tree/discuss/32178/Clean-Python-Solution
class Solution2:
def isValidBST(self, root, floor=float('-inf'), ceiling=float('inf')):
"""
:type root: TreeNode
:rtype: bool
"""
# print(root,floor,ceiling)
if root == None:
return True
if root.val <= floor or root.val >= ceiling:
return False
return self.isValidBST(root.left, floor, root.val) and self.isValidBST(root.right, root.val, ceiling)
# test
root = TreeNode(1)
s = TreeNode(2)
s.left = TreeNode(3)
root.right = s
test = Solution2()
res = test.isValidBST(root)
print(res) | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.