blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4b7c937f22f3014ec84bad9e620ce8522f0d431f | 1dacbf90eeb384455ab84a8cf63d16e2c9680a90 | /bin/jupyter-qtconsole | 488a1d74540d18578cde9d0aa14b719fbdb5f409 | [
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
] | permissive | wangyum/Anaconda | ac7229b21815dd92b0bd1c8b7ec4e85c013b8994 | 2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6 | refs/heads/master | 2022-10-21T15:14:23.464126 | 2022-10-05T12:10:31 | 2022-10-05T12:10:31 | 76,526,728 | 11 | 10 | Apache-2.0 | 2022-10-05T12:10:32 | 2016-12-15T05:26:12 | Python | UTF-8 | Python | false | false | 101 | #!/usr/bin/env python
from qtconsole.qtconsoleapp import main
if __name__ == '__main__':
main()
| [
"[email protected]"
] | ||
352121d56b8a5bb9fa3eec78314000a59d9186b6 | b50508302647ad849029210bff200930b1902987 | /apps/articles/migrations/0001_initial.py | dcee0bc7423816df2b8733388e92bfed9f9a7652 | [] | no_license | tianjiajun123/myBlog | a46718ed3fde114bfa282428d0c8b7f36b5adce9 | 2cd67bc0e85974cda477c366db9f7051b8b11132 | refs/heads/master | 2023-02-15T11:12:37.266980 | 2021-01-06T10:58:50 | 2021-01-06T10:58:50 | 326,363,135 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,498 | py | # Generated by Django 3.1.4 on 2021-01-03 20:04
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Articles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=128, verbose_name='文章标题')),
('img', models.ImageField(upload_to='', verbose_name='文章配图')),
('abstract', models.TextField(verbose_name='文章摘要')),
('content', models.TextField(verbose_name='文章内容')),
('visited', models.IntegerField(verbose_name='文章访问量')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='修改时间')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='文章作者')),
],
options={
'verbose_name': '文章',
'verbose_name_plural': '文章',
'ordering': ('-created_at',),
},
),
]
| [
"[email protected]"
] | |
d0999586ccbd5cec385e34f8a7edbf19decb2542 | 4443d08048f9980045e5f0541c69db0d756391d1 | /partner_ngos/programs_management/doctype/project_indicator/test_project_indicator.py | 886c2b9e33e38f60e194f3c716e3dc39fa36f037 | [
"MIT"
] | permissive | mohsinalimat/partner_ngos | dea0db6e0f9718e7ffc69f7171bdb1603a055d72 | 4a345fb6989ff5a21db7fca07aa4e5174dca8f59 | refs/heads/master | 2023-03-15T13:15:40.571368 | 2020-07-09T07:22:59 | 2020-07-09T07:22:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2020, Akram Mutaher and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestProjectIndicator(unittest.TestCase):
pass
| [
"[email protected]"
] | |
c5ef9f3c896720bfe3cbcd8bf8087394c0635cc3 | 343bdaddfc66c6316e2cee490e9cedf150e3a5b7 | /0101_0200/0140/0140.py | fcfbf5535dac6588d0fb41901a5501b3284bd7d6 | [] | no_license | dm-alexi/acmp | af7f6b4484b78f5922f3b464406a0ba5dea0d738 | 3fa0016d132adfeab7937b3e8c9687a34642c93a | refs/heads/master | 2021-07-09T15:14:25.857086 | 2020-10-20T19:08:54 | 2020-10-20T19:08:54 | 201,908,038 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 484 | py | from math import inf
with open("input.txt", "r") as f, open("output.txt", "w") as q:
n = int(f.readline())
m = [[int(x) if x != "100000" else inf for x in f.readline().split()] for i in range(n)]
for k in range(n):
for i in range(n):
for j in range(n):
if m[i][k] < inf and m[k][j] < inf and m[i][k] + m[k][j] < m[i][j]:
m[i][j] = m[i][k] + m[k][j]
q.write("YES" if any(m[i][i] < 0 for i in range(n)) else "NO")
| [
"[email protected]"
] | |
e3777872b94428267992a01b44c30ba2643b99bc | c91b68be796a9835c528856b6f5fa7b56d2af451 | /examples/mnist_convnet.py | d9e994d350811b397b81ced710890fceedbf32db | [
"Apache-2.0"
] | permissive | syzh1991/tensorpack | fe61cb46fd40aa0cb9f8a0a3ea4ea3bb833cb3c5 | 174c3fc9d60b0cbeccac2ae3e73e73d6e788dbe0 | refs/heads/master | 2021-01-17T00:24:08.366350 | 2016-04-19T06:25:57 | 2016-04-19T06:25:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,520 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: mnist_convnet.py
# Author: Yuxin Wu <[email protected]>
import tensorflow as tf
import numpy as np
import os, sys
import argparse
from tensorpack.train import *
from tensorpack.models import *
from tensorpack.utils import *
from tensorpack.tfutils.symbolic_functions import *
from tensorpack.tfutils.summary import *
from tensorpack.tfutils import *
from tensorpack.callbacks import *
from tensorpack.dataflow import *
"""
MNIST ConvNet example.
about 0.6% validation error after 30 epochs.
"""
BATCH_SIZE = 128
IMAGE_SIZE = 28
class Model(ModelDesc):
def _get_input_vars(self):
return [InputVar(tf.float32, (None, IMAGE_SIZE, IMAGE_SIZE), 'input'),
InputVar(tf.int32, (None,), 'label')
]
def _get_cost(self, input_vars, is_training):
is_training = bool(is_training)
keep_prob = tf.constant(0.5 if is_training else 1.0)
image, label = input_vars
image = tf.expand_dims(image, 3) # add a single channel
nl = PReLU.f
image = image * 2 - 1
l = Conv2D('conv0', image, out_channel=32, kernel_shape=3, nl=nl,
padding='VALID')
l = MaxPooling('pool0', l, 2)
l = Conv2D('conv1', l, out_channel=32, kernel_shape=3, nl=nl, padding='SAME')
l = Conv2D('conv2', l, out_channel=32, kernel_shape=3, nl=nl, padding='VALID')
l = MaxPooling('pool1', l, 2)
l = Conv2D('conv3', l, out_channel=32, kernel_shape=3, nl=nl, padding='VALID')
l = FullyConnected('fc0', l, 512)
l = tf.nn.dropout(l, keep_prob)
# fc will have activation summary by default. disable this for the output layer
logits = FullyConnected('fc1', l, out_dim=10, nl=tf.identity)
prob = tf.nn.softmax(logits, name='prob')
cost = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, label)
cost = tf.reduce_mean(cost, name='cross_entropy_loss')
tf.add_to_collection(MOVING_SUMMARY_VARS_KEY, cost)
# compute the number of failed samples, for ClassificationError to use at test time
wrong = prediction_incorrect(logits, label)
nr_wrong = tf.reduce_sum(wrong, name='wrong')
# monitor training error
tf.add_to_collection(
MOVING_SUMMARY_VARS_KEY, tf.reduce_mean(wrong, name='train_error'))
# weight decay on all W of fc layers
wd_cost = tf.mul(1e-5,
regularize_cost('fc.*/W', tf.nn.l2_loss),
name='regularize_loss')
tf.add_to_collection(MOVING_SUMMARY_VARS_KEY, wd_cost)
add_param_summary([('.*/W', ['histogram'])]) # monitor histogram of all W
return tf.add_n([wd_cost, cost], name='cost')
def get_config():
basename = os.path.basename(__file__)
logger.set_logger_dir(
os.path.join('train_log', basename[:basename.rfind('.')]))
# prepare dataset
dataset_train = BatchData(dataset.Mnist('train'), 128)
dataset_test = BatchData(dataset.Mnist('test'), 256, remainder=True)
step_per_epoch = dataset_train.size()
# prepare session
sess_config = get_default_sess_config()
sess_config.gpu_options.per_process_gpu_memory_fraction = 0.5
lr = tf.train.exponential_decay(
learning_rate=1e-3,
global_step=get_global_step_var(),
decay_steps=dataset_train.size() * 10,
decay_rate=0.3, staircase=True, name='learning_rate')
tf.scalar_summary('learning_rate', lr)
return TrainConfig(
dataset=dataset_train,
optimizer=tf.train.AdamOptimizer(lr),
callbacks=Callbacks([
StatPrinter(),
ModelSaver(),
InferenceRunner(dataset_test,
[ScalarStats('cost'), ClassificationError() ])
]),
session_config=sess_config,
model=Model(),
step_per_epoch=step_per_epoch,
max_epoch=100,
)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', help='comma separated list of GPU(s) to use.') # nargs='*' in multi mode
parser.add_argument('--load', help='load model')
args = parser.parse_args()
if args.gpu:
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
else:
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
with tf.Graph().as_default():
config = get_config()
if args.load:
config.session_init = SaverRestore(args.load)
SimpleTrainer(config).train()
| [
"[email protected]"
] | |
d06d7c4a50a9d2ed62e1339c2c422ef078e2e611 | 7410903c6cd5ef35c592af00c934fb21c369cbf2 | /00_Code/01_LeetCode/69_Sqrt.py | 4f2aa947d9e808ddbc9837a59a51ea6e638dbf3b | [
"MIT"
] | permissive | KartikKannapur/Algorithms | f4e4726170599db0622d18e8c06a382e9bce9e77 | 66e3c8112826aeffb78bd74d02be1a8d1e478de8 | refs/heads/master | 2020-12-25T18:32:41.086518 | 2020-10-19T02:59:47 | 2020-10-19T02:59:47 | 93,961,043 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 806 | py | # #Implement int sqrt(int x).
# #Compute and return the square root of x.
# #x is guaranteed to be a non-negative integer.
# #Your runtime beats 81.07 % of python submissions.
class Solution(object):
def mySqrt(self, x):
"""
:type x: int
:rtype: int
"""
"""
Method 1: Built-in functions
"""
# import math
# return int(math.sqrt(int(x)))
"""
Method 2: Binary Search
Your runtime beats 53.94 % of python submissions.
"""
low = 0
high = x
while low <= high:
mid = (low + high) // 2
if mid ** 2 <= x < (mid + 1) ** 2:
return mid
elif mid ** 2 > x:
high = mid
else:
low = mid + 1 | [
"[email protected]"
] | |
873f832b9b4a502cdab6b718ab5f202b53555a0a | f4b60f5e49baf60976987946c20a8ebca4880602 | /lib64/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/datetime/rsntpprovtontpauthkey.py | a7e4bb2cc3d0c51d834f75f92edff686fd33660f | [] | no_license | cqbomb/qytang_aci | 12e508d54d9f774b537c33563762e694783d6ba8 | a7fab9d6cda7fadcc995672e55c0ef7e7187696e | refs/heads/master | 2022-12-21T13:30:05.240231 | 2018-12-04T01:46:53 | 2018-12-04T01:46:53 | 159,911,666 | 0 | 0 | null | 2022-12-07T23:53:02 | 2018-12-01T05:17:50 | Python | UTF-8 | Python | false | false | 8,541 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class RsNtpProvToNtpAuthKey(Mo):
"""
The authentication key to apply to a specific provider. Keys can be shared with different providers.
"""
meta = NamedSourceRelationMeta("cobra.model.datetime.RsNtpProvToNtpAuthKey", "cobra.model.datetime.NtpAuthKey")
meta.targetNameProps["id"] = "tnDatetimeNtpAuthKeyId"
meta.cardinality = SourceRelationMeta.ONE_TO_M
meta.moClassName = "datetimeRsNtpProvToNtpAuthKey"
meta.rnFormat = "rsntpProvToNtpAuthKey-%(tnDatetimeNtpAuthKeyId)s"
meta.category = MoCategory.RELATIONSHIP_TO_LOCAL
meta.label = "Relation to Datetime Authentication Key"
meta.writeAccessMask = 0x10000000001
meta.readAccessMask = 0x10000000001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.parentClasses.add("cobra.model.datetime.NtpProv")
meta.superClasses.add("cobra.model.reln.Inst")
meta.superClasses.add("cobra.model.reln.To")
meta.superClasses.add("cobra.model.pol.NToRef")
meta.rnPrefixes = [
('rsntpProvToNtpAuthKey-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "forceResolve", "forceResolve", 107, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("forceResolve", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 14775, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "rType", "rType", 106, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "mo"
prop._addConstant("local", "local", 3)
prop._addConstant("mo", "mo", 1)
prop._addConstant("service", "service", 2)
meta.props.add("rType", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "state", "state", 103, PropCategory.REGULAR)
prop.label = "State"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unformed"
prop._addConstant("cardinality-violation", "cardinality-violation", 5)
prop._addConstant("formed", "formed", 1)
prop._addConstant("invalid-target", "invalid-target", 4)
prop._addConstant("missing-target", "missing-target", 2)
prop._addConstant("unformed", "unformed", 0)
meta.props.add("state", prop)
prop = PropMeta("str", "stateQual", "stateQual", 104, PropCategory.REGULAR)
prop.label = "State Qualifier"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("default-target", "default-target", 2)
prop._addConstant("mismatch-target", "mismatch-target", 1)
prop._addConstant("none", "none", 0)
meta.props.add("stateQual", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "tCl", "tCl", 13318, PropCategory.REGULAR)
prop.label = "Target-class"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 4527
prop.defaultValueStr = "datetimeNtpAuthKey"
prop._addConstant("datetimeNtpAuthKey", None, 4527)
prop._addConstant("unspecified", "unspecified", 0)
meta.props.add("tCl", prop)
prop = PropMeta("str", "tContextDn", "tContextDn", 4990, PropCategory.REGULAR)
prop.label = "Target-context"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("tContextDn", prop)
prop = PropMeta("str", "tDn", "tDn", 100, PropCategory.REGULAR)
prop.label = "Target-dn"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("tDn", prop)
prop = PropMeta("str", "tRn", "tRn", 4989, PropCategory.REGULAR)
prop.label = "Target-rn"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("tRn", prop)
prop = PropMeta("str", "tType", "tType", 4988, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "name"
prop._addConstant("all", "all", 2)
prop._addConstant("mo", "mo", 1)
prop._addConstant("name", "name", 0)
meta.props.add("tType", prop)
prop = PropMeta("str", "tnDatetimeNtpAuthKeyId", "tnDatetimeNtpAuthKeyId", 16589, PropCategory.REGULAR)
prop.label = "Auth Key Id"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.range = [(1, 65535)]
prop.defaultValue = 1
prop.defaultValueStr = "1"
meta.props.add("tnDatetimeNtpAuthKeyId", prop)
prop = PropMeta("str", "uid", "uid", 8, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("uid", prop)
meta.namingProps.append(getattr(meta.props, "tnDatetimeNtpAuthKeyId"))
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
def __init__(self, parentMoOrDn, tnDatetimeNtpAuthKeyId, markDirty=True, **creationProps):
namingVals = [tnDatetimeNtpAuthKeyId]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
74108a22b91ad3b4c6b46bc638f052f5195fb339 | e030b26ea0f45eda5a25bf18051e9231e604fdd5 | /doc/source/sphinxext/numpy_ext/docscrape_sphinx.py | bcf7e70731cc798b73e4f22a48c25d361f65c6d1 | [
"CECILL-B",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | neurospin/piws | d246dc1925c563964309e53f36fc44e48f929cf7 | 4ec6f60c6343623a82761c90c74642b4b372ffd1 | refs/heads/master | 2021-01-17T03:49:35.787846 | 2018-10-15T09:44:39 | 2018-10-15T09:44:39 | 32,163,903 | 0 | 17 | NOASSERTION | 2020-10-14T12:56:38 | 2015-03-13T15:29:57 | HTML | UTF-8 | Python | false | false | 8,004 | py | import re
import inspect
import textwrap
import pydoc
import sphinx
from docscrape import NumpyDocString
from docscrape import FunctionDoc
from docscrape import ClassDoc
class SphinxDocString(NumpyDocString):
def __init__(self, docstring, config=None):
config = {} if config is None else config
self.use_plots = config.get('use_plots', False)
NumpyDocString.__init__(self, docstring, config=config)
# string conversion routines
def _str_header(self, name, symbol='`'):
return ['.. rubric:: ' + name, '']
def _str_field_list(self, name):
return [':' + name + ':']
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' ' * indent + line]
return out
def _str_signature(self):
return ['']
if self['Signature']:
return ['``%s``' % self['Signature']] + ['']
else:
return ['']
def _str_summary(self):
return self['Summary'] + ['']
def _str_extended_summary(self):
return self['Extended Summary'] + ['']
def _str_param_list(self, name):
out = []
if self[name]:
out += self._str_field_list(name)
out += ['']
for param, param_type, desc in self[name]:
out += self._str_indent(['**%s** : %s' % (param.strip(),
param_type)])
out += ['']
out += self._str_indent(desc, 8)
out += ['']
return out
@property
def _obj(self):
if hasattr(self, '_cls'):
return self._cls
elif hasattr(self, '_f'):
return self._f
return None
def _str_member_list(self, name):
"""
Generate a member listing, autosummary:: table where possible,
and a table where not.
"""
out = []
if self[name]:
out += ['.. rubric:: %s' % name, '']
prefix = getattr(self, '_name', '')
if prefix:
prefix = '~%s.' % prefix
autosum = []
others = []
for param, param_type, desc in self[name]:
param = param.strip()
if not self._obj or hasattr(self._obj, param):
autosum += [" %s%s" % (prefix, param)]
else:
others.append((param, param_type, desc))
if autosum:
# GAEL: Toctree commented out below because it creates
# hundreds of sphinx warnings
# out += ['.. autosummary::', ' :toctree:', '']
out += ['.. autosummary::', '']
out += autosum
if others:
maxlen_0 = max([len(x[0]) for x in others])
maxlen_1 = max([len(x[1]) for x in others])
hdr = "=" * maxlen_0 + " " + "=" * maxlen_1 + " " + "=" * 10
fmt = '%%%ds %%%ds ' % (maxlen_0, maxlen_1)
n_indent = maxlen_0 + maxlen_1 + 4
out += [hdr]
for param, param_type, desc in others:
out += [fmt % (param.strip(), param_type)]
out += self._str_indent(desc, n_indent)
out += [hdr]
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
out += ['']
content = textwrap.dedent("\n".join(self[name])).split("\n")
out += content
out += ['']
return out
def _str_see_also(self, func_role):
out = []
if self['See Also']:
see_also = super(SphinxDocString, self)._str_see_also(func_role)
out = ['.. seealso::', '']
out += self._str_indent(see_also[2:])
return out
def _str_warnings(self):
out = []
if self['Warnings']:
out = ['.. warning::', '']
out += self._str_indent(self['Warnings'])
return out
def _str_index(self):
idx = self['index']
out = []
if len(idx) == 0:
return out
out += ['.. index:: %s' % idx.get('default', '')]
for section, references in idx.iteritems():
if section == 'default':
continue
elif section == 'refguide':
out += [' single: %s' % (', '.join(references))]
else:
out += [' %s: %s' % (section, ','.join(references))]
return out
def _str_references(self):
out = []
if self['References']:
out += self._str_header('References')
if isinstance(self['References'], str):
self['References'] = [self['References']]
out.extend(self['References'])
out += ['']
# Latex collects all references to a separate bibliography,
# so we need to insert links to it
if sphinx.__version__ >= "0.6":
out += ['.. only:: latex', '']
else:
out += ['.. latexonly::', '']
items = []
for line in self['References']:
m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I)
if m:
items.append(m.group(1))
out += [' ' + ", ".join(["[%s]_" % item for item in items]), '']
return out
def _str_examples(self):
examples_str = "\n".join(self['Examples'])
if (self.use_plots and 'import matplotlib' in examples_str
and 'plot::' not in examples_str):
out = []
out += self._str_header('Examples')
out += ['.. plot::', '']
out += self._str_indent(self['Examples'])
out += ['']
return out
else:
return self._str_section('Examples')
def __str__(self, indent=0, func_role="obj"):
out = []
out += self._str_signature()
out += self._str_index() + ['']
out += self._str_summary()
out += self._str_extended_summary()
for param_list in ('Parameters', 'Returns', 'Raises'):
out += self._str_param_list(param_list)
out += self._str_warnings()
out += self._str_see_also(func_role)
out += self._str_section('Notes')
out += self._str_references()
out += self._str_examples()
for param_list in ('Attributes', 'Methods'):
out += self._str_member_list(param_list)
out = self._str_indent(out, indent)
return '\n'.join(out)
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
def __init__(self, obj, doc=None, config={}):
self.use_plots = config.get('use_plots', False)
FunctionDoc.__init__(self, obj, doc=doc, config=config)
class SphinxClassDoc(SphinxDocString, ClassDoc):
def __init__(self, obj, doc=None, func_doc=None, config={}):
self.use_plots = config.get('use_plots', False)
ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config)
class SphinxObjDoc(SphinxDocString):
def __init__(self, obj, doc=None, config=None):
self._f = obj
SphinxDocString.__init__(self, doc, config=config)
def get_doc_object(obj, what=None, doc=None, config={}):
if what is None:
if inspect.isclass(obj):
what = 'class'
elif inspect.ismodule(obj):
what = 'module'
elif callable(obj):
what = 'function'
else:
what = 'object'
if what == 'class':
return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc,
config=config)
elif what in ('function', 'method'):
return SphinxFunctionDoc(obj, doc=doc, config=config)
else:
if doc is None:
doc = pydoc.getdoc(obj)
return SphinxObjDoc(obj, doc, config=config)
| [
"[email protected]"
] | |
e4c2ae41b7aec6371b17182c26cbfda22f852b60 | b466a62a6b8151937212688c09b3a5704eaa7466 | /Python OOP - Exam Preparation - 2 April 2020/tests/test_battlefield.py | 86b729b594d2a13d2cc6756a5da43117a61aedc9 | [
"MIT"
] | permissive | DiyanKalaydzhiev23/OOP---Python | 89efa1a08056375496278dac3af97e10876f7728 | 7ac424d5fb08a6bd28dc36593e45d949b3ac0cd0 | refs/heads/main | 2023-07-08T08:23:05.148293 | 2021-08-13T12:09:12 | 2021-08-13T12:09:12 | 383,723,287 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,693 | py | from unittest import TestCase, main
from project.battle_field import BattleField
from project.controller import Controller
class TestBattleField(TestCase):
def setUp(self):
self.c = Controller()
self.c.add_player("Beginner", "pesho")
self.c.add_player("Advanced", "ivan")
self.c.add_card("Magic", "boom")
self.c.add_card("Trap", "oops")
self.c.add_player_card("pesho", "boom")
self.c.add_player_card("ivan", "oops")
self.c.add_player_card("ivan", "boom")
self.attacker = self.c.player_repository.find("pesho")
self.enemy = self.c.player_repository.find("ivan")
self.b = BattleField()
def test_attacker_enemy_dead(self):
self.attacker.health = 0
self.enemy.health = 0
with self.assertRaises(ValueError) as ve:
self.c.fight("pesho", "ivan")
self.assertEqual("Player is dead!", str(ve.exception))
def test_increase_beginner(self):
self.b.increase_beginner(self.attacker)
self.assertEqual(90, self.attacker.health)
def test_getting_bonus_points(self):
self.b.get_bonus_points(self.attacker)
self.b.get_bonus_points(self.enemy)
self.assertEqual(130, self.attacker.health)
self.assertEqual(335, self.enemy.health)
def test_attacker_is_dead_after_fight(self):
self.c.fight("pesho", "ivan")
self.c.fight("pesho", "ivan")
self.assertTrue(self.attacker.is_dead)
def test_enemy_is_dead_after_fight(self):
self.c.fight("ivan", "pesho")
self.c.fight("ivan", "pesho")
self.assertTrue(self.attacker.is_dead)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
ac4c91a50fd1f04ce141715e5289aa64f8765f8f | 0bb474290e13814c2498c086780da5096453da05 | /agc034/B/main.py | dcdc2a07ea70836db87eccb7f03314c35c2aad03 | [] | no_license | ddtkra/atcoder | 49b6205bf1bf6a50106b4ae94d2206a324f278e0 | eb57c144b5c2dbdd4abc432ecd8b1b3386244e30 | refs/heads/master | 2022-01-25T15:38:10.415959 | 2020-03-18T09:22:08 | 2020-03-18T09:22:08 | 208,825,724 | 1 | 0 | null | 2022-01-21T20:10:20 | 2019-09-16T14:51:01 | Python | UTF-8 | Python | false | false | 721 | py | #!/usr/bin/env python3
import sys
def solve(s: str):
s = s.replace('BC', 'X')
ans = 0
cur = 0
for i in range(len(s)):
if(s[i] == 'A'):
cur += 1
elif(s[i] == 'X'):
ans += cur
else:
cur = 0
print(ans)
return
# Generated by 1.1.4 https://github.com/kyuridenamida/atcoder-tools (tips: You use the default template now. You can remove this line by using your custom template)
def main():
def iterate_tokens():
for line in sys.stdin:
for word in line.split():
yield word
tokens = iterate_tokens()
s = next(tokens) # type: str
solve(s)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
18169718282ec7bfbfb2b7d2c8bd1613b7b9aa52 | 9b8e2992a38f591032997b5ced290fe1acc3ad94 | /lcs4t.py | ede392018cce26478bbc4a6e676503d973b8be70 | [] | no_license | girishdhegde/aps-2020 | c694443c10d0d572c8022dad5a6ce735462aaa51 | fb43d8817ba16ff78f93a8257409d77dbc82ced8 | refs/heads/master | 2021-08-08T04:49:18.876187 | 2021-01-02T04:46:20 | 2021-01-02T04:46:20 | 236,218,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,238 | py | from collections import defaultdict
import math
t=int(input())
for i in range(t):
n, total=map(int,input().split())
coin = []
values = defaultdict(list)
y = list(map(int,input().split()))
for j in range(n):
coin.append(y[j])
values[y[j]].append(0)
coins = []
for j in range(n):
if coin[j]!=1:
coins.append(coin[j])
print("coins:", coins)
if(len(coins) == 1):
if(total%coins[0]==0):
print("NO")
else:
values[coins[0]][0]=math.ceil(total/coins[0])
print("YES",end=" ")
x=list(values.values())
for h in x:
print(h[0],end=" ")
else:
coins=sorted(coins,reverse=True)
flag=0
for c in coins:
if total%c==0:
d=total/c-1
values[c][0]=int(d)
total-=d*c
else:
flag=1
d=math.ceil(total/c)
values[c][0]=int(d)
break
if flag==0:
print("NO")
else:
print("YES",end=" ")
x=list(values.values())
for h in x:
print(h[0],end=" ") | [
"[email protected]"
] | |
eda051d72d323b88e5d07e61bdabdbd16c2948e5 | d6a3186af0aaa86b3936f1d98730b7120918b962 | /testing_practice/tests_django/car_v2.py | 91228379ab91290fe1f4b03df8524ddd44bd8be1 | [] | no_license | kranthy09/testing | edd6376733723ef58a8a5ecece31cbaf030ca45d | ecdd5ce3b3688b42181d5ccb74003ed97e79fbc9 | refs/heads/master | 2022-07-02T23:58:09.308746 | 2020-05-05T16:58:45 | 2020-05-05T16:58:45 | 261,354,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,592 | py | class Car:
def __init__(self,max_speed, acceleration, tyre_friction, color = None):
self._color = color
self.is_valid_data("max_speed", max_speed)
self.is_valid_data("acceleration", acceleration)
self.is_valid_data("tyre_friction", tyre_friction)
self._acceleration = acceleration
self._tyre_friction = tyre_friction
self._max_speed = max_speed
self._is_engine_started = False
self._current_speed = 0
def start_engine(self):
if self._is_engine_started:
print("Stop the engine to start_engine")
else:
self._is_engine_started = True
def accelerate(self):
if self._is_engine_started:
self._current_speed += self._acceleration
if self._current_speed > self._max_speed:
self._current_speed = self._max_speed
else:
print("Start the engine to accelerate")
def apply_brakes(self):
if self._is_engine_started:
self._current_speed -= self._tyre_friction
if self._current_speed <= 0:
self._current_speed = 0
else:
print("Start the engine to apply_breaks")
def sound_horn(self):
if self._is_engine_started:
print("Beep Beep")
else:
print("Start the engine to sound_horn")
def stop_engine(self):
if self._is_engine_started:
self._is_engine_started = False
else:
print("Start the engine to stop_engine")
@property
def max_speed(self):
return self._max_speed
@property
def acceleration(self):
return self._acceleration
@property
def tyre_friction(self):
return self._tyre_friction
@property
def color(self):
return self._color
@property
def is_engine_started(self):
return self._is_engine_started
@property
def current_speed(self):
return self._current_speed
@staticmethod
def is_valid_data(args, value):
if value > 0:
return True
else:
raise ValueError(f"Invalid value for {args}")
class Truck(Car):
def __init__(self,max_speed, acceleration, tyre_friction, max_cargo_weight, color=None):
super().__init__(max_speed, acceleration, tyre_friction, color)
self.is_valid_data("max_cargo_weight", max_cargo_weight)
self._max_cargo_weight = max_cargo_weight
self._weight_in_cargo = 0
def sound_horn(self):
if self._is_engine_started:
print("Honk Honk")
else:
print("Start the engine to sound_horn")
def load(self, cargo_weight):
self.is_valid_data("cargo_weight", cargo_weight)
if self._current_speed:
print("Cannot load cargo during motion")
else:
self._weight_in_cargo += cargo_weight
if self._weight_in_cargo > self._max_cargo_weight:
print(f"Cannot load cargo more than max limit: {self._max_cargo_weight}")
self._weight_in_cargo -= cargo_weight
def unload(self, cargo_weight):
self.is_valid_data("cargo_weight", cargo_weight)
if self._current_speed:
print("Cannot unload cargo during motion")
else:
self._weight_in_cargo -= cargo_weight
if self._weight_in_cargo < 0:
print(f"Cannot unload cargo less than min limit: {0}")
self._weight_in_cargo += cargo_weight
@property
def max_cargo_weight(self):
return self._max_cargo_weight
@property
def weight_in_cargo(self):
return self._weight_in_cargo
class RaceCar(Car):
def __init__(self, max_speed, acceleration, tyre_friction, color = None):
super().__init__(max_speed, acceleration, tyre_friction,color)
self._nitro = 0
def accelerate(self):
import math
super().accelerate()
if self._nitro:
self._current_speed += math.ceil(self._acceleration * 0.3)
self._nitro -= 10
if self._current_speed > self._max_speed:
self._current_speed = self._max_speed
def apply_brakes(self):
if self._current_speed > (0.5 * self._max_speed):
self._nitro += 10
super().apply_brakes()
def sound_horn(self):
if self._is_engine_started:
print("Peep Peep\nBeep Beep")
else:
print("Start the engine to sound_horn")
@property
def nitro(self):
return self._nitro | [
"[email protected]"
] | |
de7ce52b41660eee7eea8ff7603241674cd09c47 | 9da8754002fa402ad8e6f25659978bd269bbcec8 | /src/622A/cdf_622A.py | 696901db63211acbb043bb8a0098147f0db843e9 | [
"MIT"
] | permissive | kopok2/CodeforcesSolutionsPython | a00f706dbf368ba0846c8ae86d4145b5dd3e1613 | 35bec0dbcff47765b123b5fe60476014376153df | refs/heads/master | 2023-02-02T03:08:22.097651 | 2020-12-17T22:00:50 | 2020-12-17T22:00:50 | 196,035,812 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 645 | py | import math
class CodeforcesTask622ASolution:
def __init__(self):
self.result = ''
self.n = 0
def read_input(self):
self.n = int(input())
def process_task(self):
n = int(math.sqrt(self.n))
a = (n + n ** 2) / 2
while a < self.n:
n += 1
a = (n + n ** 2) / 2
n -= 1
x = self.n - (n + n ** 2) / 2
self.result = str(int(x))
def get_result(self):
return self.result
if __name__ == "__main__":
Solution = CodeforcesTask622ASolution()
Solution.read_input()
Solution.process_task()
print(Solution.get_result())
| [
"[email protected]"
] | |
249ce324bde793fd41492fa2f8d1d0c2ce88c9cd | ed97fb5c71da7ed89235432e3971bb0ef6064f8b | /algorithms/python/290.py | 3c1bbff0685733f3cd42f905b78b0d011cbfcd85 | [
"MIT"
] | permissive | viing937/leetcode | 8241be4f8bc9234a882b98ada2e5d13b0ebcca68 | b07f7ba69f3d2a7e294f915934db302f43c0848f | refs/heads/master | 2023-08-31T18:25:06.443397 | 2023-08-31T15:31:38 | 2023-08-31T15:31:38 | 37,374,931 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 559 | py | class Solution(object):
def wordPattern(self, pattern, str):
"""
:type pattern: str
:type str: str
:rtype: bool
"""
arr = str.split(' ')
if len(pattern) != len(arr):
return False
hashmap = {}
for i in range(len(pattern)):
if pattern[i] in hashmap.keys() and hashmap[pattern[i]] != arr[i]:
return False
hashmap[pattern[i]] = arr[i]
if hashmap.values().count(arr[i]) > 1:
return False
return True
| [
"[email protected]"
] | |
f5fccd5cf37b249aa0bd6ec0df11050ccceac4ba | 226b1c73a706f4734834196d18305d4d2c873589 | /synlib/descriptions/INVX12.py | d9548d8ad876fcb48facd24d7fd0a2450a47ae9a | [] | no_license | ocakgun/vlsistuff | 43b4b07ae186b8d2360d11c57cd10b861e96bcbe | 776c07f5d0c40fe7d410b5c85e7381017d4dab64 | refs/heads/master | 2022-06-13T14:40:22.641310 | 2020-05-08T11:09:00 | 2020-05-08T11:09:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | Desc = cellDescClass("INVX12")
Desc.properties["cell_leakage_power"] = "3253.878540"
Desc.properties["cell_footprint"] = "inv"
Desc.properties["area"] = "43.243200"
Desc.pinOrder = ['A', 'Y']
Desc.add_arc("A","Y","combi")
Desc.set_job("inv") # (!A)
Desc.add_param("area",43.243200);
Desc.add_pin("A","input")
Desc.add_pin("Y","output")
Desc.add_pin_func("Y","unknown")
CellLib["INVX12"]=Desc
| [
"[email protected]"
] | |
66cba7b1d697df1b112e0741f078b2d82f7853cf | a0801d0e7325b31f0383fc68517e208680bb36d6 | /ProjectEuler/113.py | 67180d3f1e179379f2c22641ec3d5bb209b71d03 | [] | no_license | conormccauley1999/CompetitiveProgramming | bd649bf04438817c7fa4755df2c2c7727273b073 | a7e188767364be40f625612af3d16182f2d8d4de | refs/heads/master | 2023-05-14T13:19:32.678134 | 2023-05-11T16:07:33 | 2023-05-11T16:07:33 | 179,089,010 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 523 | py | # Problem 113
def cnt(length, inc):
end = 10 if inc else -1
step = 1 if inc else -1
dp = []
dp.append([1] * 10)
dp[0][0] = 0
for _ in range(length - 1):
dp.append([0] * 10)
for cur_position in range(1, length):
for cur_digit in range(10):
for next_digit in range(cur_digit, end, step):
dp[cur_position][cur_digit] += dp[cur_position - 1][next_digit]
return sum(dp[length - 1])
print(sum(cnt(i, True) + cnt(i, False) - 9 for i in range(1, 101)))
| [
"[email protected]"
] | |
1856097d55982c651ccb71ab0b51f7fa59839318 | c7386a7a7aafabe9feb8368c42b607cff70dcfe7 | /01_GPLVM_Training/GPy/plotting/matplot_dep/plot_definitions.py | 0e3bc32d398084b4f35cc48526b06f520e9121e8 | [] | no_license | dechamoungsri/Prosody_modeling | 8f3d603af6c54786cb048186bab65cfcd5b441f1 | 7895a032dde1c2c34cf42b7c362ca2b61ada0f37 | refs/heads/master | 2021-08-31T02:32:42.813986 | 2017-12-20T06:42:36 | 2017-12-20T06:42:36 | 114,848,055 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,812 | py | #===============================================================================
# Copyright (c) 2015, Max Zwiessele
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of GPy.plotting.matplot_dep.plot_definitions nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===============================================================================
import numpy as np
from matplotlib import pyplot as plt
from ..abstract_plotting_library import AbstractPlottingLibrary
from .. import Tango
from . import defaults
from matplotlib.colors import LinearSegmentedColormap
from .controllers import ImshowController, ImAnnotateController
import itertools
from .util import legend_ontop
class MatplotlibPlots(AbstractPlottingLibrary):
def __init__(self):
super(MatplotlibPlots, self).__init__()
self._defaults = defaults.__dict__
def figure(self, rows=1, cols=1, gridspec_kwargs={}, tight_layout=True, **kwargs):
fig = plt.figure(tight_layout=tight_layout, **kwargs)
fig.rows = rows
fig.cols = cols
fig.gridspec = plt.GridSpec(rows, cols, **gridspec_kwargs)
return fig
def new_canvas(self, figure=None, row=1, col=1, projection='2d', xlabel=None, ylabel=None, zlabel=None, title=None, xlim=None, ylim=None, zlim=None, **kwargs):
if projection == '3d':
from mpl_toolkits.mplot3d import Axes3D
elif projection == '2d':
projection = None
if 'ax' in kwargs:
ax = kwargs.pop('ax')
else:
if figure is not None:
fig = figure
elif 'num' in kwargs and 'figsize' in kwargs:
fig = self.figure(num=kwargs.pop('num'), figsize=kwargs.pop('figsize'))
elif 'num' in kwargs:
fig = self.figure(num=kwargs.pop('num'))
elif 'figsize' in kwargs:
fig = self.figure(figsize=kwargs.pop('figsize'))
else:
fig = self.figure()
#if hasattr(fig, 'rows') and hasattr(fig, 'cols'):
ax = fig.add_subplot(fig.gridspec[row-1, col-1], projection=projection)
if xlim is not None: ax.set_xlim(xlim)
if ylim is not None: ax.set_ylim(ylim)
if xlabel is not None: ax.set_xlabel(xlabel)
if ylabel is not None: ax.set_ylabel(ylabel)
if title is not None: ax.set_title(title)
if projection == '3d':
if zlim is not None: ax.set_zlim(zlim)
if zlabel is not None: ax.set_zlabel(zlabel)
return ax, kwargs
def add_to_canvas(self, ax, plots, legend=False, title=None, **kwargs):
#ax.autoscale_view()
fontdict=dict(family='sans-serif', weight='light', size=9)
if legend is True:
ax.legend(*ax.get_legend_handles_labels())
elif legend >= 1:
#ax.legend(prop=fontdict)
legend_ontop(ax, ncol=legend, fontdict=fontdict)
if title is not None: ax.figure.suptitle(title)
return ax
def show_canvas(self, ax):
ax.figure.canvas.draw()
return ax.figure
def scatter(self, ax, X, Y, Z=None, color=Tango.colorsHex['mediumBlue'], label=None, marker='o', **kwargs):
if Z is not None:
return ax.scatter(X, Y, c=color, zs=Z, label=label, marker=marker, **kwargs)
return ax.scatter(X, Y, c=color, label=label, marker=marker, **kwargs)
def plot(self, ax, X, Y, Z=None, color=None, label=None, **kwargs):
if Z is not None:
return ax.plot(X, Y, color=color, zs=Z, label=label, **kwargs)
return ax.plot(X, Y, color=color, label=label, **kwargs)
def plot_axis_lines(self, ax, X, color=Tango.colorsHex['darkRed'], label=None, **kwargs):
from matplotlib import transforms
from matplotlib.path import Path
if 'marker' not in kwargs:
kwargs['marker'] = Path([[-.2,0.], [-.2,.5], [0.,1.], [.2,.5], [.2,0.], [-.2,0.]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
if 'transform' not in kwargs:
if X.shape[1] == 1:
kwargs['transform'] = transforms.blended_transform_factory(ax.transData, ax.transAxes)
if X.shape[1] == 2:
return ax.scatter(X[:,0], X[:,1], ax.get_zlim()[0], c=color, label=label, **kwargs)
return ax.scatter(X, np.zeros_like(X), c=color, label=label, **kwargs)
def barplot(self, ax, x, height, width=0.8, bottom=0, color=Tango.colorsHex['mediumBlue'], label=None, **kwargs):
if 'align' not in kwargs:
kwargs['align'] = 'center'
return ax.bar(left=x, height=height, width=width,
bottom=bottom, label=label, color=color,
**kwargs)
def xerrorbar(self, ax, X, Y, error, color=Tango.colorsHex['darkRed'], label=None, **kwargs):
if not('linestyle' in kwargs or 'ls' in kwargs):
kwargs['ls'] = 'none'
#if Z is not None:
# return ax.errorbar(X, Y, Z, xerr=error, ecolor=color, label=label, **kwargs)
return ax.errorbar(X, Y, xerr=error, ecolor=color, label=label, **kwargs)
def yerrorbar(self, ax, X, Y, error, color=Tango.colorsHex['darkRed'], label=None, **kwargs):
if not('linestyle' in kwargs or 'ls' in kwargs):
kwargs['ls'] = 'none'
#if Z is not None:
# return ax.errorbar(X, Y, Z, yerr=error, ecolor=color, label=label, **kwargs)
return ax.errorbar(X, Y, yerr=error, ecolor=color, label=label, **kwargs)
def imshow(self, ax, X, extent=None, label=None, vmin=None, vmax=None, **imshow_kwargs):
if 'origin' not in imshow_kwargs:
imshow_kwargs['origin'] = 'lower'
#xmin, xmax, ymin, ymax = extent
#xoffset, yoffset = (xmax - xmin) / (2. * X.shape[0]), (ymax - ymin) / (2. * X.shape[1])
#xmin, xmax, ymin, ymax = extent = xmin-xoffset, xmax+xoffset, ymin-yoffset, ymax+yoffset
return ax.imshow(X, label=label, extent=extent, vmin=vmin, vmax=vmax, **imshow_kwargs)
def imshow_interact(self, ax, plot_function, extent, label=None, resolution=None, vmin=None, vmax=None, **imshow_kwargs):
if imshow_kwargs is None: imshow_kwargs = {}
if 'origin' not in imshow_kwargs:
imshow_kwargs['origin'] = 'lower'
return ImshowController(ax, plot_function, extent, resolution=resolution, vmin=vmin, vmax=vmax, **imshow_kwargs)
def annotation_heatmap(self, ax, X, annotation, extent=None, label=None, imshow_kwargs=None, **annotation_kwargs):
if imshow_kwargs is None: imshow_kwargs = {}
if 'origin' not in imshow_kwargs:
imshow_kwargs['origin'] = 'lower'
if ('ha' not in annotation_kwargs) and ('horizontalalignment' not in annotation_kwargs):
annotation_kwargs['ha'] = 'center'
if ('va' not in annotation_kwargs) and ('verticalalignment' not in annotation_kwargs):
annotation_kwargs['va'] = 'center'
imshow = self.imshow(ax, X, extent, label, **imshow_kwargs)
if extent is None:
extent = (0, X.shape[0], 0, X.shape[1])
xmin, xmax, ymin, ymax = extent
xoffset, yoffset = (xmax - xmin) / (2. * X.shape[0]), (ymax - ymin) / (2. * X.shape[1])
xlin = np.linspace(xmin, xmax, X.shape[0], endpoint=False)
ylin = np.linspace(ymin, ymax, X.shape[1], endpoint=False)
annotations = []
for [i, x], [j, y] in itertools.product(enumerate(xlin), enumerate(ylin)):
annotations.append(ax.text(x+xoffset, y+yoffset, "{}".format(annotation[j, i]), **annotation_kwargs))
return imshow, annotations
def annotation_heatmap_interact(self, ax, plot_function, extent, label=None, resolution=15, imshow_kwargs=None, **annotation_kwargs):
if imshow_kwargs is None: imshow_kwargs = {}
if 'origin' not in imshow_kwargs:
imshow_kwargs['origin'] = 'lower'
return ImAnnotateController(ax, plot_function, extent, resolution=resolution, imshow_kwargs=imshow_kwargs or {}, **annotation_kwargs)
def contour(self, ax, X, Y, C, levels=20, label=None, **kwargs):
return ax.contour(X, Y, C, levels=np.linspace(C.min(), C.max(), levels), label=label, **kwargs)
def surface(self, ax, X, Y, Z, color=None, label=None, **kwargs):
return ax.plot_surface(X, Y, Z, label=label, **kwargs)
def fill_between(self, ax, X, lower, upper, color=Tango.colorsHex['mediumBlue'], label=None, **kwargs):
return ax.fill_between(X, lower, upper, facecolor=color, label=label, **kwargs)
def fill_gradient(self, canvas, X, percentiles, color=Tango.colorsHex['mediumBlue'], label=None, **kwargs):
ax = canvas
plots = []
if 'edgecolors' not in kwargs:
kwargs['edgecolors'] = 'none'
if 'facecolors' in kwargs:
color = kwargs.pop('facecolors')
if 'array' in kwargs:
array = kwargs.pop('array')
else:
array = 1.-np.abs(np.linspace(-.97, .97, len(percentiles)-1))
if 'alpha' in kwargs:
alpha = kwargs.pop('alpha')
else:
alpha = .8
if 'cmap' in kwargs:
cmap = kwargs.pop('cmap')
else:
cmap = LinearSegmentedColormap.from_list('WhToColor', (color, color), N=array.size)
cmap._init()
cmap._lut[:-3, -1] = alpha*array
kwargs['facecolors'] = [cmap(i) for i in np.linspace(0,1,cmap.N)]
# pop where from kwargs
where = kwargs.pop('where') if 'where' in kwargs else None
# pop interpolate, which we actually do not do here!
if 'interpolate' in kwargs: kwargs.pop('interpolate')
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
from itertools import tee
#try:
# from itertools import izip as zip
#except ImportError:
# pass
a, b = tee(iterable)
next(b, None)
return zip(a, b)
polycol = []
for y1, y2 in pairwise(percentiles):
import matplotlib.mlab as mlab
# Handle united data, such as dates
ax._process_unit_info(xdata=X, ydata=y1)
ax._process_unit_info(ydata=y2)
# Convert the arrays so we can work with them
from numpy import ma
x = ma.masked_invalid(ax.convert_xunits(X))
y1 = ma.masked_invalid(ax.convert_yunits(y1))
y2 = ma.masked_invalid(ax.convert_yunits(y2))
if y1.ndim == 0:
y1 = np.ones_like(x) * y1
if y2.ndim == 0:
y2 = np.ones_like(x) * y2
if where is None:
where = np.ones(len(x), np.bool)
else:
where = np.asarray(where, np.bool)
if not (x.shape == y1.shape == y2.shape == where.shape):
raise ValueError("Argument dimensions are incompatible")
from functools import reduce
mask = reduce(ma.mask_or, [ma.getmask(a) for a in (x, y1, y2)])
if mask is not ma.nomask:
where &= ~mask
polys = []
for ind0, ind1 in mlab.contiguous_regions(where):
xslice = x[ind0:ind1]
y1slice = y1[ind0:ind1]
y2slice = y2[ind0:ind1]
if not len(xslice):
continue
N = len(xslice)
p = np.zeros((2 * N + 2, 2), np.float)
# the purpose of the next two lines is for when y2 is a
# scalar like 0 and we want the fill to go all the way
# down to 0 even if none of the y1 sample points do
start = xslice[0], y2slice[0]
end = xslice[-1], y2slice[-1]
p[0] = start
p[N + 1] = end
p[1:N + 1, 0] = xslice
p[1:N + 1, 1] = y1slice
p[N + 2:, 0] = xslice[::-1]
p[N + 2:, 1] = y2slice[::-1]
polys.append(p)
polycol.extend(polys)
from matplotlib.collections import PolyCollection
if 'zorder' not in kwargs:
kwargs['zorder'] = 0
plots.append(PolyCollection(polycol, **kwargs))
ax.add_collection(plots[-1], autolim=True)
ax.autoscale_view()
return plots
| [
"[email protected]"
] | |
acf05d3118136eda97208eb88dd7e8c8055cb3e6 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /Hs7YDjZALCEPRPD6Z_23.py | df3fe0713ed74b8a036c99152bfd2695f2129cfd | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69 | py |
count_uppercase=lambda a: sum([y.isupper() for x in a for y in x])
| [
"[email protected]"
] | |
1852e9dffcb63b063f786faeffaec2ee72e25153 | 390d19c3159133d8c688396cb11b4ed3f8178d09 | /BaekJoon/단계별로 풀어보기/Stack & Queue/1874_스택 수열.py | a3b4cc9eb6adb72cb147d6257fb3a6768140f325 | [] | no_license | JJayeee/CodingPractice | adba64cbd1d030b13a877f0b2e5ccc1269cb2e11 | 60f8dce48c04850b9b265a9a31f49eb6d9fc13c8 | refs/heads/master | 2021-08-16T17:14:01.161390 | 2021-07-16T00:42:18 | 2021-07-16T00:42:18 | 226,757,079 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 324 | py | n = int(input())
arr = [int(input()) for _ in range(n)]
cnt = 0
stack = []
path = []
for i in range(1, n+1):
stack.append(i)
path.append('+')
while stack and stack[-1] == arr[cnt]:
stack.pop()
path.append('-')
cnt += 1
if stack:
print('NO')
else:
for p in path:
print(p)
| [
"[email protected]"
] | |
b755ac44206a9c9dda1a389710cdf3a9fba8ec30 | 9cd180fc7594eb018c41f0bf0b54548741fd33ba | /sdk/python/pulumi_azure_nextgen/securityinsights/v20190101preview/get_watchlist.py | c72fb1c220e6a8ad7528ffa0f04b1c3a81368bad | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | MisinformedDNA/pulumi-azure-nextgen | c71971359450d03f13a53645171f621e200fe82d | f0022686b655c2b0744a9f47915aadaa183eed3b | refs/heads/master | 2022-12-17T22:27:37.916546 | 2020-09-28T16:03:59 | 2020-09-28T16:03:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,021 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetWatchlistResult',
'AwaitableGetWatchlistResult',
'get_watchlist',
]
@pulumi.output_type
class GetWatchlistResult:
"""
Represents a Watchlist in Azure Security Insights.
"""
def __init__(__self__, created_by=None, created_time_utc=None, default_duration=None, description=None, display_name=None, etag=None, labels=None, last_updated_time_utc=None, name=None, notes=None, provider=None, source=None, tenant_id=None, type=None, updated_by=None, watchlist_items=None, watchlist_type=None, workspace_id=None):
if created_by and not isinstance(created_by, dict):
raise TypeError("Expected argument 'created_by' to be a dict")
pulumi.set(__self__, "created_by", created_by)
if created_time_utc and not isinstance(created_time_utc, str):
raise TypeError("Expected argument 'created_time_utc' to be a str")
pulumi.set(__self__, "created_time_utc", created_time_utc)
if default_duration and not isinstance(default_duration, str):
raise TypeError("Expected argument 'default_duration' to be a str")
pulumi.set(__self__, "default_duration", default_duration)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if labels and not isinstance(labels, list):
raise TypeError("Expected argument 'labels' to be a list")
pulumi.set(__self__, "labels", labels)
if last_updated_time_utc and not isinstance(last_updated_time_utc, str):
raise TypeError("Expected argument 'last_updated_time_utc' to be a str")
pulumi.set(__self__, "last_updated_time_utc", last_updated_time_utc)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if notes and not isinstance(notes, str):
raise TypeError("Expected argument 'notes' to be a str")
pulumi.set(__self__, "notes", notes)
if provider and not isinstance(provider, str):
raise TypeError("Expected argument 'provider' to be a str")
pulumi.set(__self__, "provider", provider)
if source and not isinstance(source, str):
raise TypeError("Expected argument 'source' to be a str")
pulumi.set(__self__, "source", source)
if tenant_id and not isinstance(tenant_id, str):
raise TypeError("Expected argument 'tenant_id' to be a str")
pulumi.set(__self__, "tenant_id", tenant_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if updated_by and not isinstance(updated_by, dict):
raise TypeError("Expected argument 'updated_by' to be a dict")
pulumi.set(__self__, "updated_by", updated_by)
if watchlist_items and not isinstance(watchlist_items, list):
raise TypeError("Expected argument 'watchlist_items' to be a list")
pulumi.set(__self__, "watchlist_items", watchlist_items)
if watchlist_type and not isinstance(watchlist_type, str):
raise TypeError("Expected argument 'watchlist_type' to be a str")
pulumi.set(__self__, "watchlist_type", watchlist_type)
if workspace_id and not isinstance(workspace_id, str):
raise TypeError("Expected argument 'workspace_id' to be a str")
pulumi.set(__self__, "workspace_id", workspace_id)
@property
@pulumi.getter(name="createdBy")
def created_by(self) -> Optional['outputs.UserInfoResponse']:
"""
Describes a user that created the watchlist
"""
return pulumi.get(self, "created_by")
@property
@pulumi.getter(name="createdTimeUtc")
def created_time_utc(self) -> Optional[str]:
"""
The time the watchlist was created
"""
return pulumi.get(self, "created_time_utc")
@property
@pulumi.getter(name="defaultDuration")
def default_duration(self) -> Optional[str]:
"""
The default duration of a watchlist (in ISO 8601 duration format)
"""
return pulumi.get(self, "default_duration")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
A description of the watchlist
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
The display name of the watchlist
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
Etag of the azure resource
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def labels(self) -> Optional[Sequence[str]]:
"""
List of labels relevant to this watchlist
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="lastUpdatedTimeUtc")
def last_updated_time_utc(self) -> Optional[str]:
"""
The last time the watchlist was updated
"""
return pulumi.get(self, "last_updated_time_utc")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def notes(self) -> Optional[str]:
"""
The notes of the watchlist
"""
return pulumi.get(self, "notes")
@property
@pulumi.getter
def provider(self) -> str:
"""
The provider of the watchlist
"""
return pulumi.get(self, "provider")
@property
@pulumi.getter
def source(self) -> str:
"""
The source of the watchlist
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[str]:
"""
The tenantId where the watchlist belongs to.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="updatedBy")
def updated_by(self) -> Optional['outputs.UserInfoResponse']:
"""
Describes a user that updated the watchlist
"""
return pulumi.get(self, "updated_by")
@property
@pulumi.getter(name="watchlistItems")
def watchlist_items(self) -> Optional[Sequence['outputs.WatchlistItemResponse']]:
"""
List of watchlist items.
"""
return pulumi.get(self, "watchlist_items")
@property
@pulumi.getter(name="watchlistType")
def watchlist_type(self) -> Optional[str]:
"""
The type of the watchlist
"""
return pulumi.get(self, "watchlist_type")
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> Optional[str]:
"""
The workspaceId where the watchlist belongs to.
"""
return pulumi.get(self, "workspace_id")
class AwaitableGetWatchlistResult(GetWatchlistResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWatchlistResult(
created_by=self.created_by,
created_time_utc=self.created_time_utc,
default_duration=self.default_duration,
description=self.description,
display_name=self.display_name,
etag=self.etag,
labels=self.labels,
last_updated_time_utc=self.last_updated_time_utc,
name=self.name,
notes=self.notes,
provider=self.provider,
source=self.source,
tenant_id=self.tenant_id,
type=self.type,
updated_by=self.updated_by,
watchlist_items=self.watchlist_items,
watchlist_type=self.watchlist_type,
workspace_id=self.workspace_id)
def get_watchlist(operational_insights_resource_provider: Optional[str] = None,
resource_group_name: Optional[str] = None,
watchlist_alias: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWatchlistResult:
"""
Use this data source to access information about an existing resource.
:param str operational_insights_resource_provider: The namespace of workspaces resource provider- Microsoft.OperationalInsights.
:param str resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param str watchlist_alias: Watchlist Alias
:param str workspace_name: The name of the workspace.
"""
__args__ = dict()
__args__['operationalInsightsResourceProvider'] = operational_insights_resource_provider
__args__['resourceGroupName'] = resource_group_name
__args__['watchlistAlias'] = watchlist_alias
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:securityinsights/v20190101preview:getWatchlist', __args__, opts=opts, typ=GetWatchlistResult).value
return AwaitableGetWatchlistResult(
created_by=__ret__.created_by,
created_time_utc=__ret__.created_time_utc,
default_duration=__ret__.default_duration,
description=__ret__.description,
display_name=__ret__.display_name,
etag=__ret__.etag,
labels=__ret__.labels,
last_updated_time_utc=__ret__.last_updated_time_utc,
name=__ret__.name,
notes=__ret__.notes,
provider=__ret__.provider,
source=__ret__.source,
tenant_id=__ret__.tenant_id,
type=__ret__.type,
updated_by=__ret__.updated_by,
watchlist_items=__ret__.watchlist_items,
watchlist_type=__ret__.watchlist_type,
workspace_id=__ret__.workspace_id)
| [
"[email protected]"
] | |
3f80baeaee8814fce5876d1fd05427e8e5961cfc | 281a10505f8044dbed73f11ed731bd0fbe23e0b5 | /expenseApp/forms.py | a27cb1ba630beb7e285f40eca794838affed173f | [
"Apache-2.0"
] | permissive | cs-fullstack-fall-2018/project3-django-jpark1914 | 7c6f57ab5f8055c11ac5b9d3c8bf0aa5057008d7 | 53bca13243d7e50263ec25b2fb8a299a8bbada1c | refs/heads/master | 2020-04-02T00:59:33.254360 | 2018-10-29T04:58:42 | 2018-10-29T04:58:42 | 153,831,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 873 | py | from .models import AccountModel, UserSetup, TransactionModel
from django import forms
class AccountForm(forms.ModelForm):
class Meta:
model = AccountModel
fields = ['account_name','balance','savings','deposit', 'expense']
widgets = {'balance': forms.HiddenInput, 'savings':forms.HiddenInput(), 'deposit': forms.HiddenInput(), 'expense': forms.HiddenInput()}
class DepositForm(forms.ModelForm):
class Meta:
model = AccountModel
fields = ['deposit','expense']
widgets = {'expense': forms.HiddenInput()}
class ExpenseForm(forms.ModelForm):
class Meta:
model = AccountModel
fields = ['expense']
class UserForm(forms.ModelForm):
class Meta:
model = UserSetup
fields = ['name', 'email', 'password']
widgets = {
'password': forms.PasswordInput(),
}
| [
"[email protected]"
] | |
e95f81c2edaab21bbe2f5f9e621eae62b13fdc86 | 9f1039075cc611198a988034429afed6ec6d7408 | /tensorflow-stubs/_api/v1/keras/datasets/cifar10/__init__.pyi | f00ca8b56bea50c2b28c37c450e8e377366f9b62 | [] | no_license | matangover/tensorflow-stubs | 9422fbb1cb3a3638958d621461291c315f9c6ec2 | 664bd995ef24f05ba2b3867d979d23ee845cb652 | refs/heads/master | 2020-05-23T12:03:40.996675 | 2019-05-15T06:21:43 | 2019-05-15T06:21:43 | 186,748,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | pyi | # Stubs for tensorflow._api.v1.keras.datasets.cifar10 (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from tensorflow.python.keras.datasets.cifar10 import load_data as load_data
| [
"[email protected]"
] | |
5f6125e1653f98b3cd435fbb27e389a8d9bf676d | c033b4bc0b80938192712a336a941f921b056106 | /PythonK/FirstPython/Actual/Chapter7/venv/Scripts/pip3.6-script.py | 8c4ea0adf094c7250d2fa2c3df53aa8e4c53ad4f | [] | no_license | BigBrou/Laboratory | 4e9a92cbccf2379c8fedb5abbb5dc896a477d018 | 2ec4ece91bf7d5774d7028c3ea620a36b279bfbb | refs/heads/master | 2020-03-24T16:48:25.017498 | 2018-12-27T09:04:13 | 2018-12-27T09:04:34 | 142,837,539 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | #!C:\Laboratory\PythonK\Actual\Chapter7\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.6'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.6')()
)
| [
"[email protected]"
] | |
615b0e3ce001a61de185b62a6465cb046a30fcc6 | 416baad7e83075c1c59f1fa920d9a18cd3351f16 | /tensor2tensor/models/__init__.py | 58badcb4e011ce3350c1cf88d2bd7a49cbdc8d59 | [
"Apache-2.0"
] | permissive | jmlingeman/tensor2tensor | aa759fc9101149284b0b6f2f7a03e9759b7214a9 | 9921c4a816aafb76964a960541045ce4d730b3c9 | refs/heads/master | 2021-04-29T01:52:38.283004 | 2018-04-23T20:04:12 | 2018-04-23T20:04:12 | 121,812,413 | 0 | 0 | null | 2018-02-16T23:39:11 | 2018-02-16T23:39:11 | null | UTF-8 | Python | false | false | 1,986 | py | # coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models defined in T2T. Imports here force registration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
# pylint: disable=unused-import
from tensor2tensor.layers import modalities
from tensor2tensor.models import basic
from tensor2tensor.models import bytenet
from tensor2tensor.models import lstm
from tensor2tensor.models import neural_gpu
from tensor2tensor.models import resnet
from tensor2tensor.models import revnet
from tensor2tensor.models import shake_shake
from tensor2tensor.models import slicenet
from tensor2tensor.models import transformer
from tensor2tensor.models import vanilla_gan
from tensor2tensor.models import xception
from tensor2tensor.models.research import aligned
from tensor2tensor.models.research import attention_lm
from tensor2tensor.models.research import attention_lm_moe
from tensor2tensor.models.research import cycle_gan
from tensor2tensor.models.research import gene_expression
from tensor2tensor.models.research import multimodel
from tensor2tensor.models.research import super_lm
from tensor2tensor.models.research import transformer_moe
from tensor2tensor.models.research import transformer_revnet
from tensor2tensor.models.research import transformer_sketch
from tensor2tensor.models.research import transformer_vae
# pylint: enable=unused-import
| [
"[email protected]"
] | |
06f2d36fbb85bae7a5b684e93a760e88ee7b328d | de392462a549be77e5b3372fbd9ea6d7556f0282 | /accounts/migrations/0198_auto_20210812_1748.py | 26547e06f8a5011da9233e88c29c998430ef3246 | [] | no_license | amutebe/AMMS_General | 2830770b276e995eca97e37f50a7c51f482b2405 | 57b9b85ea2bdd272b44c59f222da8202d3173382 | refs/heads/main | 2023-07-17T02:06:36.862081 | 2021-08-28T19:07:17 | 2021-08-28T19:07:17 | 400,064,408 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 729 | py | # Generated by Django 3.2.3 on 2021-08-12 14:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0197_auto_20210812_1740'),
]
operations = [
migrations.AlterField(
model_name='car',
name='car_number',
field=models.CharField(default='BCL12082021142', max_length=200, primary_key=True, serialize=False, verbose_name='Corrective action no.:'),
),
migrations.AlterField(
model_name='employees',
name='employeeID',
field=models.CharField(default='BCL11', max_length=20, primary_key=True, serialize=False, verbose_name='Employee ID'),
),
]
| [
"[email protected]"
] | |
281090431d5371b25cb5f61faa42b5ded0fee6a8 | b2ba670818623f8ab18162382f7394baed97b7cb | /test-data/AndroidSlicer/Carnote/DD/32.py | 4201f082c91275a5dd9fd6d0c08a4ab34f2b21ff | [
"MIT"
] | permissive | hsumyatwin/ESDroid-artifact | 012c26c40537a79b255da033e7b36d78086b743a | bff082c4daeeed62ceda3d715c07643203a0b44b | refs/heads/main | 2023-04-11T19:17:33.711133 | 2022-09-30T13:40:23 | 2022-09-30T13:40:23 | 303,378,286 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,140 | py | #start monkey test seedNo 0
import os;
from subprocess import Popen
from subprocess import PIPE
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice, MonkeyImage
from com.android.monkeyrunner.MonkeyDevice import takeSnapshot
from com.android.monkeyrunner.easy import EasyMonkeyDevice
from com.android.monkeyrunner.easy import By
from com.android.chimpchat.hierarchyviewer import HierarchyViewer
from com.android.monkeyrunner import MonkeyView
import random
import sys
import subprocess
from sys import exit
from random import randint
device = MonkeyRunner.waitForConnection()
package = 'com.spisoft.quicknote'
activity ='com.spisoft.quicknote.MainActivity'
runComponent = package+'/'+activity
device.startActivity(component=runComponent)
MonkeyRunner.sleep(0.5)
MonkeyRunner.sleep(0.5)
device.touch(982,153, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(699,932, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(923,1695, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(963,1730, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(62,124, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(467,678, 'DOWN_AND_UP')
| [
"[email protected]"
] | |
ff38640ad5a4f55a1f83c27af699d4597b525d3d | 70f41a06d733e680af3bb1f00d8ff33574f4f4bb | /src/fh_tools/language_test/base_test/bisect_demo/grades_demo.py | 3c04e262f62fafdf04b345d787e0ae2cae0fa7b6 | [
"MIT"
] | permissive | mmmaaaggg/RefUtils | 209f7136acc63c880e60974c347e19adc4c7ac2e | f127658e75b5c52b4db105a22176ee0931ceacae | refs/heads/master | 2021-06-11T16:06:06.245275 | 2021-03-10T05:32:14 | 2021-03-10T05:32:14 | 139,413,962 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | #! /usr/bin/env python
# -*- coding:utf-8 -*-
"""
@author : MG
@Time : 19-8-9 上午10:33
@File : grades_demo.py
@contact : [email protected]
@desc : 通过二分发计算分数等级
"""
import bisect
def grade(score, breakpoints=[60, 70, 80, 90], grades='FDCBA'):
i = bisect.bisect(breakpoints, score)
return grades[i]
print([grade(score) for score in [33, 99, 77, 70, 89, 90, 100]])
if __name__ == "__main__":
pass
| [
"[email protected]"
] | |
2155f6826ed7b9607bfc77f9e46bc7b6daf29ed5 | 95d64b1dea3db73e85562aa2665c3a696370a679 | /code/information-gain.py | da46714604a93ed9d59f09d41c3b92d03c5e7812 | [] | no_license | Smolky/exist-2021 | 7481e36fb3f44263c1a2190890fc6ac894c4fac5 | 2d51a01a829cb9e9b44eca5b9eefb06cb62162c8 | refs/heads/main | 2023-05-30T14:21:56.913663 | 2021-06-16T08:10:36 | 2021-06-16T08:10:36 | 364,022,851 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,758 | py | """
Information Gain per class
This class calculates the Information Gain (Mutual Info) of a dataset
and uses it to select the most discrimatory features
@author José Antonio García-Díaz <[email protected]>
@author Rafael Valencia-Garcia <[email protected]>
"""
import os
import sys
import argparse
import pandas as pd
import numpy as np
import pickle
from pathlib import Path
from sklearn import preprocessing
from sklearn.feature_selection import mutual_info_classif
from sklearn.feature_selection import mutual_info_regression
from dlsdatasets.DatasetResolver import DatasetResolver
from utils.Parser import DefaultParser
from features.FeatureResolver import FeatureResolver
from sklearn.pipeline import Pipeline, FeatureUnion
from features.TokenizerTransformer import TokenizerTransformer
def main ():
# var parser
parser = DefaultParser (description = 'Calculates the Information Gain (Mutual Info) per class and obtains the best LF')
# @var args Get arguments
args = parser.parse_args ()
# @var dataset_resolver DatasetResolver
dataset_resolver = DatasetResolver ()
# @var dataset Dataset This is the custom dataset for evaluation purposes
dataset = dataset_resolver.get (args.dataset, args.corpus, args.task, False)
dataset.filename = dataset.get_working_dir (args.task, 'dataset.csv')
# @var df Ensure if we already had the data processed
df = dataset.get ()
# @var task_type String
task_type = dataset.get_task_type ()
# @var df_train DataFrame
df_train = dataset.get_split (df, 'train')
df_train = df_train[df_train['label'].notna()]
# @var feature_resolver FeatureResolver
feature_resolver = FeatureResolver (dataset)
# @var feature_file String
feature_file = feature_resolver.get_suggested_cache_file ('lf', task_type)
# @var features_cache String The file where the features are stored
features_cache = dataset.get_working_dir (args.task, feature_file)
# If the feautures are not found, get the default one
if not Path (features_cache).is_file ():
raise Exception ('features lf file are not avaiable')
sys.exit ()
# @var transformer Transformer
transformer = feature_resolver.get ('lf', cache_file = features_cache)
# @var features_df DataFrame
features_df = transformer.transform ([])
# @var linguistic_features List
linguistic_features = features_df.columns.to_list ()
# Keep only the training features
features_df = features_df[features_df.index.isin (df_train.index)].reindex (df_train.index)
# Attach label
features_df = features_df.assign (label = df_train['label'])
# @var unique_labels Series Bind to the label
unique_labels = dataset.get_available_labels ()
# @var X
X = features_df.loc[:, features_df.columns != 'label']
# @var mi
if 'classification' == task_type:
mi = mutual_info_classif (X = X, y = df_train['label']).reshape (-1, 1)
elif 'regression':
mi = mutual_info_regression (X = X, y = df_train['label']).reshape (-1, 1)
# @var best_features_indexes List
best_features_indexes = pd.DataFrame (mi,
columns = ['Coefficient'],
index = linguistic_features
)
if 'regression' == task_type:
print ("by dataset")
print ("----------")
best_features_indexes.index = linguistic_features
print ("top")
print (best_features_indexes.sort_values (by = 'Coefficient', ascending = False).head (20).to_csv (float_format = '%.5f'))
print ("worst")
print (best_features_indexes.sort_values (by = 'Coefficient', ascending = True).head (10).to_csv (float_format = '%.5f'))
if 'classification' == task_type:
# @var average_features_per_label List
average_features_per_label = [features_df.loc[df_train.loc[features_df['label'] == label].index].mean ().to_frame ().T for label in unique_labels]
# Merge features by label
features_df_merged = pd \
.concat (average_features_per_label) \
.reset_index (drop = True) \
.assign (label = unique_labels) \
.set_index ('label') \
.transpose ()
# Attach coefficient to the features
features_df_merged = features_df_merged.assign (Coefficient = best_features_indexes.values)
print ("by dataset")
print ("----------")
best_features_indexes.index = features_df_merged.index
print ("top")
print (best_features_indexes.sort_values (by = 'Coefficient', ascending = False).head (10).to_csv (float_format = '%.5f'))
print ("worst")
print (best_features_indexes.sort_values (by = 'Coefficient', ascending = True).head (10).to_csv (float_format = '%.5f'))
# Results merged by label
print ("by label")
print ("----------")
print ("top")
print (features_df_merged.sort_values (by = 'Coefficient', ascending = False).head (10)[unique_labels].to_csv (float_format = '%.5f'))
print ("worst")
print (features_df_merged.sort_values (by = 'Coefficient', ascending = True).head (10)[unique_labels].to_csv (float_format = '%.5f'))
if __name__ == "__main__":
main ()
| [
"[email protected]"
] | |
9545a1c677720b2cc1d1a08ee3eaaa268a423759 | 390d19c3159133d8c688396cb11b4ed3f8178d09 | /BaekJoon/APS_2019/2669_직사각형 네개의 합집합의 면적.py | 1d2b01330c52119bf983190b61936d5a7dcf040a | [] | no_license | JJayeee/CodingPractice | adba64cbd1d030b13a877f0b2e5ccc1269cb2e11 | 60f8dce48c04850b9b265a9a31f49eb6d9fc13c8 | refs/heads/master | 2021-08-16T17:14:01.161390 | 2021-07-16T00:42:18 | 2021-07-16T00:42:18 | 226,757,079 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | py |
dwr = [[0]*100 for _ in range(100)]
count = 0
for i in range(4):
xs, ys, xe, ye = map(int, input().split())
for x in range(xs, xe):
for y in range(ys, ye):
if dwr[x][y] == 0:
dwr[x][y] = 1
count += 1
print(count)
| [
"[email protected]"
] | |
173ca58a434c334f2487c1bf0d994d3395abcd30 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_11371.py | a3211135d7b87d8da37dd74f4338428a4d5ef5ce | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47 | py | # Using python modules over Paramiko (SSH)
RMI
| [
"[email protected]"
] | |
a865e9f10130569267073fa37a1314786a38c6bb | 8efd8bcd3945d88370f6203e92b0376ca6b41c87 | /problems1_100/79_ Word_Search.py | 5374317283275085258b340378ccd5eef61390f0 | [] | no_license | Provinm/leetcode_archive | 732ad1ef5dcdfdde6dd5a33522e86f7e24ae2db5 | 3e72dcaa579f4ae6f587898dd316fce8189b3d6a | refs/heads/master | 2021-09-21T08:03:31.427465 | 2018-08-22T15:58:30 | 2018-08-22T15:58:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,465 | py |
'''
Given a 2D board and a word, find if the word exists in the grid.
The word can be constructed from letters of sequentially adjacent cell, where "adjacent" cells are those horizontally or vertically neighboring. The same letter cell may not be used more than once.
For example,
Given board =
[
['A','B','C','E'],
['S','F','C','S'],
['A','D','E','E']
]
word = "ABCCED", -> returns true,
word = "SEE", -> returns true,
word = "ABCB", -> returns false.
'''
class Solution(object):
Routes = []
def exist(self, board, word):
"""
:type board: List[List[str]]
:type word: str
:rtype: bool
"""
if not word: return True
for i in self.get_first_pos(board, word[0]):
next_pos = i
if self.sub_exist(next_pos, board, word, tem=[next_pos]):
return True
return False
def sub_exist(self, ini, board, word, tem, deep=1):
print('deep={}, tem={}'.format(deep, tem))
if not word[deep:]:
return True
tem_r = self.get_next_pos(board, word[deep], ini, tem)
# print('ini={}, tem_r={}'.format(ini, tem_r))
if not tem_r:
return False
for i in tem_r:
deep += 1
tem.append(i)
if self.sub_exist(i, board, word, tem, deep):
return True
deep -= 1
tem = tem[:deep]
def get_first_pos(self, board, word):
for row in range(len(board)):
for col in range(len(board[0])):
if board[row][col] == word:
yield (row, col)
def get_next_pos(self, board, word, pos, tem):
row, col = pos
left, top = (row, col-1), (row-1, col)
right, down = (row, col+1), (row+1, col)
res = []
for p in [i for i in [left, top, right, down] if self.valid_pos(board, i)]:
if board[p[0]][p[1]] == word and p not in tem:
res.append(p)
return res
def valid_pos(self, board, pos):
max_row = len(board)
max_col = len(board[0])
row, col = pos
if 0 <= row < max_row and \
0 <= col < max_col:
return True
else:
return False
s = Solution()
board = [["A","B","C","E"],
["S","F","E","S"],
["A","D","E","E"]]
word = "ABCESEEEFS"
print(s.exist(board, word)) | [
"[email protected]"
] | |
54a075ef2572940304283d2f526de481af678278 | 5154364983b0e44c4af2d41a59cfa8edc923283a | /python_Source/developmentP/deeplearining/pie_chart_01.py | 00eb701184787dad7373c13e41ea294c5459683e | [] | no_license | vasana12/python_python_git | 082f84df30e4b307d223e8970f87a7432a1d80fd | db87e112731fca1fe80fef29d8f180f19b9e7afc | refs/heads/master | 2020-03-30T23:22:30.197307 | 2018-11-24T05:05:51 | 2018-11-24T05:05:51 | 151,698,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 562 | py | import matplotlib.pyplot as plt
from matplotlib import font_manager, rc
import matplotlib
font_location = "C:/Windows/fonts/malgun.ttf"
font_name = font_manager.FontProperties(fname=font_location).get_name()
matplotlib.rc('font', family=font_name)
labels = '개구리', '돼지', '개', '통나무'
sizes = [15, 30, 40, 10]
colors = ['yellowgreen', 'gold', 'lightskyblue', 'lightcoral']
explode = (0, 0.1, 0, 0)
plt.pie(sizes, explode = explode, labels=labels, colors=colors,
autopct='%2.2f%%', shadow=False, startangle=90)
plt.axis('equal')
plt.show() | [
"[email protected]"
] | |
b8f5573ff344929c69dceabf3640aea61ec7232f | bd97064b5ed9f17b11bcd3ac9a1f2c8ea9ffaf82 | /restapi/routers/Utils.py | 8284d48e5f99c5cebcbd87f7d2ecb895771a1912 | [] | no_license | IndominusByte/bhaktirahayu-backend | a33eff6d0a74894934a6643ef3b81af283542ecf | 628d5d5cdbe145696835e32c47f77ca03dc72708 | refs/heads/main | 2023-08-13T16:25:21.241086 | 2021-09-18T18:04:25 | 2021-09-18T18:04:25 | 389,309,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | from fastapi import APIRouter, Depends
from fastapi_jwt_auth import AuthJWT
from schemas.utils.UtilSchema import UtilEncodingImageBase64
from libs.MagicImage import MagicImage
router = APIRouter()
@router.post('/encoding-image-base64',response_model=bytes)
async def encoding_image_base64(util_data: UtilEncodingImageBase64, authorize: AuthJWT = Depends()):
authorize.jwt_required()
return MagicImage.convert_image_as_base64(util_data.path_file)
| [
"[email protected]"
] | |
96f9cd408c789bb3d86cc01acb1306bd78afc4ad | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/detection/SSD/.dev_scripts/batch_test.py | e5d863211eaf2182497d8ddfbc0f40ce48f93e99 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 8,393 | py | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
some instructions
1. Fill the models that needs to be checked in the modelzoo_dict
2. Arange the structure of the directory as follows, the script will find the
corresponding config itself:
model_dir/model_family/checkpoints
e.g.: models/faster_rcnn/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth
models/faster_rcnn/faster_rcnn_r101_fpn_1x_coco_20200130-047c8118.pth
3. Excute the batch_test.sh
"""
import argparse
import json
import os
import subprocess
import mmcv
import torch
from mmcv import Config, get_logger
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import (get_dist_info, init_dist, load_checkpoint,
wrap_fp16_model)
from mmdet.apis import multi_gpu_test, single_gpu_test
from mmdet.datasets import (build_dataloader, build_dataset,
replace_ImageToTensor)
from mmdet.models import build_detector
modelzoo_dict = {
'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py': {
'bbox': 0.374
},
'configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py': {
'bbox': 0.382,
'segm': 0.347
},
'configs/rpn/rpn_r50_fpn_1x_coco.py': {
'AR@1000': 0.582
}
}
def parse_args():
parser = argparse.ArgumentParser(
description='The script used for checking the correctness \
of batch inference')
parser.add_argument('model_dir', help='directory of models')
parser.add_argument(
'json_out', help='the output json records test information like mAP')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
return args
def check_finish(all_model_dict, result_file):
# check if all models are checked
tested_cfgs = []
with open(result_file, 'r+') as f:
for line in f:
line = json.loads(line)
tested_cfgs.append(line['cfg'])
is_finish = True
for cfg in sorted(all_model_dict.keys()):
if cfg not in tested_cfgs:
return cfg
if is_finish:
with open(result_file, 'a+') as f:
f.write('finished\n')
def dump_dict(record_dict, json_out):
# dump result json dict
with open(json_out, 'a+') as f:
mmcv.dump(record_dict, f, file_format='json')
f.write('\n')
def main():
args = parse_args()
# touch the output json if not exist
with open(args.json_out, 'a+'):
pass
# init distributed env first, since logger depends on the dist
# info.
if args.launcher == 'none':
distributed = False
else:
distributed = True
init_dist(args.launcher, backend='nccl')
rank, world_size = get_dist_info()
logger = get_logger('root')
# read info of checkpoints and config
result_dict = dict()
for model_family_dir in os.listdir(args.model_dir):
for model in os.listdir(
os.path.join(args.model_dir, model_family_dir)):
# cpt: rpn_r50_fpn_1x_coco_20200218-5525fa2e.pth
# cfg: rpn_r50_fpn_1x_coco.py
cfg = model.split('.')[0][:-18] + '.py'
cfg_path = os.path.join('configs', model_family_dir, cfg)
assert os.path.isfile(
cfg_path), f'{cfg_path} is not valid config path'
cpt_path = os.path.join(args.model_dir, model_family_dir, model)
result_dict[cfg_path] = cpt_path
assert cfg_path in modelzoo_dict, f'please fill the ' \
f'performance of cfg: {cfg_path}'
cfg = check_finish(result_dict, args.json_out)
cpt = result_dict[cfg]
try:
cfg_name = cfg
logger.info(f'evaluate {cfg}')
record = dict(cfg=cfg, cpt=cpt)
cfg = Config.fromfile(cfg)
# cfg.data.test.ann_file = 'data/val_0_10.json'
# set cudnn_benchmark
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
cfg.model.pretrained = None
if cfg.model.get('neck'):
if isinstance(cfg.model.neck, list):
for neck_cfg in cfg.model.neck:
if neck_cfg.get('rfp_backbone'):
if neck_cfg.rfp_backbone.get('pretrained'):
neck_cfg.rfp_backbone.pretrained = None
elif cfg.model.neck.get('rfp_backbone'):
if cfg.model.neck.rfp_backbone.get('pretrained'):
cfg.model.neck.rfp_backbone.pretrained = None
# in case the test dataset is concatenated
if isinstance(cfg.data.test, dict):
cfg.data.test.test_mode = True
elif isinstance(cfg.data.test, list):
for ds_cfg in cfg.data.test:
ds_cfg.test_mode = True
# build the dataloader
samples_per_gpu = 2 # hack test with 2 image per gpu
if samples_per_gpu > 1:
# Replace 'ImageToTensor' to 'DefaultFormatBundle'
cfg.data.test.pipeline = replace_ImageToTensor(
cfg.data.test.pipeline)
dataset = build_dataset(cfg.data.test)
data_loader = build_dataloader(
dataset,
samples_per_gpu=samples_per_gpu,
workers_per_gpu=cfg.data.workers_per_gpu,
dist=distributed,
shuffle=False)
# build the model and load checkpoint
model = build_detector(
cfg.model, train_cfg=None, test_cfg=cfg.test_cfg)
fp16_cfg = cfg.get('fp16', None)
if fp16_cfg is not None:
wrap_fp16_model(model)
checkpoint = load_checkpoint(model, cpt, map_location='cpu')
# old versions did not save class info in checkpoints,
# this walkaround is for backward compatibility
if 'CLASSES' in checkpoint['meta']:
model.CLASSES = checkpoint['meta']['CLASSES']
else:
model.CLASSES = dataset.CLASSES
if not distributed:
model = MMDataParallel(model, device_ids=[0])
outputs = single_gpu_test(model, data_loader)
else:
model = MMDistributedDataParallel(
model.cuda(),
device_ids=[torch.cuda.current_device()],
broadcast_buffers=False)
outputs = multi_gpu_test(model, data_loader, 'tmp')
if rank == 0:
ref_mAP_dict = modelzoo_dict[cfg_name]
metrics = list(ref_mAP_dict.keys())
metrics = [
m if m != 'AR@1000' else 'proposal_fast' for m in metrics
]
eval_results = dataset.evaluate(outputs, metrics)
print(eval_results)
for metric in metrics:
if metric == 'proposal_fast':
ref_metric = modelzoo_dict[cfg_name]['AR@1000']
eval_metric = eval_results['AR@1000']
else:
ref_metric = modelzoo_dict[cfg_name][metric]
eval_metric = eval_results[f'{metric}_mAP']
if abs(ref_metric - eval_metric) > 0.003:
record['is_normal'] = False
dump_dict(record, args.json_out)
check_finish(result_dict, args.json_out)
except Exception as e:
logger.error(f'rank: {rank} test fail with error: {e}')
record['terminate'] = True
dump_dict(record, args.json_out)
check_finish(result_dict, args.json_out)
# hack there to throw some error to prevent hang out
subprocess.call('xxx')
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
9166dc2e456f9adbf39f8f327bc6c3f432090aa9 | 976d399110f839ba98dc30e51004297385c56479 | /phone.py | cd062200df0418c8ebf51a5f6d08aaded568f901 | [] | no_license | EileenLL/Phone-App-Practice | 4f9bb0eda10e505c833b79d15e21b5e3525399f6 | 3b83fd7547a4248752f89255f530e19710b91033 | refs/heads/master | 2020-12-05T02:01:28.760728 | 2017-03-02T05:15:49 | 2017-03-02T05:15:49 | 83,637,368 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,924 | py |
class Phone(object):
"""A simple Phone class to keep track of contacts"""
def __init__(self, number, name, contacts=None):
self.number = number
self.name = name
if contacts:
self.contacts = contacts
else:
self.contacts = {}
# The __repr__ method gives the class a print format that is meaningful to
# humans, in this case we chose first and last name
def __repr__(self):
return self.name
def add_contact(self, first_name, last_name, number):
"""Creates new Contact instance and adds the instance to contacts"""
entry = Contact(first_name, last_name, number)
self.contacts[self._get_contact_key(first_name, last_name)] = entry
print self.contacts
# See the types of each parameter from the function call in contact_ui.py
pass
def call(self, first_name, last_name):
"""Call a contact."""
call_name = self._get_contact_key(first_name, last_name)
contact = self.contacts[self._get_contact_key(first_name, last_name)]
contact_number = contact.phone_number
# look up number in dictionary through name key
print "You are calling " + str(call_name) + " at " + str(contact_number)
pass
def text(self, first_name, message):
"""Send a contact a message."""
pass
def del_contact(self, first_name, last_name):
"""Remove a contact from phone"""
del self.contacts[self._get_contact_key(first_name, last_name)]
pass
def _get_contact_key(self, first_name, last_name):
"""This is a private method. It's meant to be used only from within
this class. We notate private attributes and methods by prepending with
an underscore.
"""
return first_name.lower() + " " + last_name.lower()
# class definition for a Contact
class Contact(object):
"""A class to hold information about an individual"""
# initialize an instance of the object Contact
def __init__(self,
first_name,
last_name,
phone_number,
email="",
twitter_handle=""):
self.first_name = first_name
self.last_name = last_name
self.phone_number = phone_number
self.email = email
self.twitter_handle = twitter_handle
# The __repr__ method gives the class a print format that is meaningful to
# humans, in this case we chose first and last name
def __repr__(self):
return "%s %s" % (self.first_name, self.last_name)
def full_name(self):
return self.first_name + " " + self.last_name
# some examples of how to use these two classes
# Make a Phone instace
# tommys_phone = Phone(5555678, "Tommy Tutone's Phone")
# Use the Phone class to add new contacts!
# tommys_phone.add_contact("Jenny", "From That Song", 8675309)
| [
"[email protected]"
] | |
870f4ef3cedddc663fb7b8f310d6b86d04b6de4f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03588/s377192087.py | 33be853cadb78a71d1fcb119905a836c2c06e43c | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 160 | py | n = int(input())
max_a = 0
st_b = 0
for i in range(n):
a,b = map(int,input().split())
if max_a < a:
max_a = a
st_b = b
print(max_a+st_b) | [
"[email protected]"
] | |
8b183bf27487b5db210287a08477ad86698afa14 | 7d328fa9c4b336f28fa357306aad5483afa2d429 | /BinTreeFromSortedArray.py | 2d3addba12667610b79141ff6049c7dda7f413fa | [] | no_license | ktyagi12/LeetCode | 30be050f1e2fcd16f73aa38143727857cc943536 | 64e68f854b327ea70dd1834de25e756d64957514 | refs/heads/master | 2021-07-01T21:24:26.765487 | 2021-05-09T11:42:50 | 2021-05-09T11:42:50 | 230,497,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 600 | py | #Problem available at: https://leetcode.com/problems/convert-sorted-array-to-binary-search-tree/submissions/
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
mid = len(nums) // 2
root = TreeNode(nums[mid])
root.left = self.sortedArrayToBST(nums[:mid])
root.right = self.sortedArrayToBST(nums[mid+1:])
return root
| [
"[email protected]"
] | |
15225b8ed699b8710acd02ca79f4d765e1fdcdbf | 150af06564fbd615479d67385e39b491d55a2ac2 | /examples/aio.py | c590d077ca02df6b350e0b14348466c5b12f2d8d | [
"MIT"
] | permissive | colanconnon/graphql-ws | 3d340abe167a7202cca858fe86d829dd700dc99a | 3df53014dc60762007e2669d45135fb0f574e759 | refs/heads/master | 2021-05-07T17:36:38.551202 | 2017-10-25T21:16:27 | 2017-10-25T21:16:27 | 108,750,259 | 0 | 0 | null | 2017-10-29T16:21:07 | 2017-10-29T16:21:07 | null | UTF-8 | Python | false | false | 1,645 | py | from aiohttp import web, WSMsgType
from template import render_graphiql
from schema import schema
from graphql import format_error
import json
from graphql_ws import WebSocketSubscriptionServer
async def graphql_view(request):
payload = await request.json()
response = await schema.execute(payload.get('query', ''), return_promise=True)
data = {}
if response.errors:
data['errors'] = [format_error(e) for e in response.errors]
if response.data:
data['data'] = response.data
jsondata = json.dumps(data,)
return web.Response(text=jsondata, headers={'Content-Type': 'application/json'})
async def graphiql_view(request):
return web.Response(text=render_graphiql(), headers={'Content-Type': 'text/html'})
subscription_server = WebSocketSubscriptionServer(schema)
async def subscriptions(request):
ws = web.WebSocketResponse(protocols=('graphql-ws',))
await ws.prepare(request)
await subscription_server.handle(ws)
# async for msg in ws:
# if msg.type == WSMsgType.TEXT:
# if msg.data == 'close':
# await ws.close()
# else:
# await ws.send_str(msg.data + '/answer')
# elif msg.type == WSMsgType.ERROR:
# print('ws connection closed with exception %s' %
# ws.exception())
# print('websocket connection closed')
return ws
app = web.Application()
app.router.add_get('/subscriptions', subscriptions)
app.router.add_get('/graphiql', graphiql_view)
app.router.add_get('/graphql', graphql_view)
app.router.add_post('/graphql', graphql_view)
web.run_app(app, port=8000)
| [
"[email protected]"
] | |
51b9d85a67e999addd2899a420954e72eea8ab63 | 978248bf0f275ae688f194593aa32c267832b2b6 | /xlsxwriter/test/comparison/test_table14.py | f0690c66bde3644bc9256ba5ff345a5604768e7d | [
"BSD-2-Clause-Views"
] | permissive | satish1337/XlsxWriter | b0c216b91be1b74d6cac017a152023aa1d581de2 | 0ab9bdded4f750246c41a439f6a6cecaf9179030 | refs/heads/master | 2021-01-22T02:35:13.158752 | 2015-03-31T20:32:28 | 2015-03-31T20:32:28 | 33,300,989 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,856 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'table14.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with tables."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
format1 = workbook.add_format({'num_format': '0.00;[Red]0.00', 'dxf_index': 2})
format2 = workbook.add_format({'num_format': '0.00_ ;\-0.00\ ', 'dxf_index': 1})
format3 = workbook.add_format({'num_format': '0.00_ ;[Red]\-0.00\ ', 'dxf_index': 0})
data = [
['Foo', 1234, 2000, 4321],
['Bar', 1256, 4000, 4320],
['Baz', 2234, 3000, 4332],
['Bop', 1324, 1000, 4333],
]
worksheet.set_column('C:F', 10.288)
worksheet.add_table('C2:F6', {'data': data,
'columns': [{},
{'format': format1},
{'format': format2},
{'format': format3},
]})
workbook.close()
self.assertExcelEqual()
| [
"[email protected]"
] | |
e5c4b6d4c1599915e9426a9c04b64e22883ba6cc | efac669c3351e2b4055d575638205199b9296680 | /pytorch_lightning/tuner/tuning.py | b1a38bd27688ca53c2f8926ab1afb36155bcdff2 | [
"Apache-2.0"
] | permissive | peteriz/pytorch-lightning | 5c90456f57b9cbe4688d71999c8a8240f799a7c6 | 49a4a36ad45b937dd0124ecfb08eb7400dbf3950 | refs/heads/master | 2022-03-15T19:08:00.991416 | 2022-03-08T18:10:18 | 2022-03-08T18:10:18 | 235,549,600 | 0 | 0 | Apache-2.0 | 2020-02-13T10:03:24 | 2020-01-22T10:28:16 | Python | UTF-8 | Python | false | false | 9,167 | py | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional, Union
import pytorch_lightning as pl
from pytorch_lightning.trainer.states import TrainerStatus
from pytorch_lightning.tuner.batch_size_scaling import scale_batch_size
from pytorch_lightning.tuner.lr_finder import _LRFinder, lr_find
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
class Tuner:
"""Tuner class to tune your model."""
def __init__(self, trainer: "pl.Trainer") -> None:
self.trainer = trainer
def on_trainer_init(self, auto_lr_find: Union[str, bool], auto_scale_batch_size: Union[str, bool]) -> None:
self.trainer.auto_lr_find = auto_lr_find
self.trainer.auto_scale_batch_size = auto_scale_batch_size
def _tune(
self,
model: "pl.LightningModule",
scale_batch_size_kwargs: Optional[Dict[str, Any]] = None,
lr_find_kwargs: Optional[Dict[str, Any]] = None,
) -> Dict[str, Optional[Union[int, _LRFinder]]]:
scale_batch_size_kwargs = scale_batch_size_kwargs or {}
lr_find_kwargs = lr_find_kwargs or {}
# return a dict instead of a tuple so BC is not broken if a new tuning procedure is added
result = {}
self.trainer.strategy.connect(model)
is_tuning = self.trainer.auto_scale_batch_size or self.trainer.auto_lr_find
if self.trainer._accelerator_connector.is_distributed and is_tuning:
raise MisconfigurationException(
"`trainer.tune()` is currently not supported with"
f" `Trainer(strategy={self.trainer.strategy.strategy_name!r})`."
)
# Run auto batch size scaling
if self.trainer.auto_scale_batch_size:
if isinstance(self.trainer.auto_scale_batch_size, str):
scale_batch_size_kwargs.setdefault("mode", self.trainer.auto_scale_batch_size)
result["scale_batch_size"] = scale_batch_size(self.trainer, model, **scale_batch_size_kwargs)
# Run learning rate finder:
if self.trainer.auto_lr_find:
lr_find_kwargs.setdefault("update_attr", True)
result["lr_find"] = lr_find(self.trainer, model, **lr_find_kwargs)
self.trainer.state.status = TrainerStatus.FINISHED
return result
def _run(self, *args: Any, **kwargs: Any) -> None:
"""`_run` wrapper to set the proper state during tuning, as this can be called multiple times."""
self.trainer.state.status = TrainerStatus.RUNNING # last `_run` call might have set it to `FINISHED`
self.trainer.training = True
self.trainer._run(*args, **kwargs)
self.trainer.tuning = True
def scale_batch_size(
self,
model: "pl.LightningModule",
train_dataloaders: Optional[Union[TRAIN_DATALOADERS, "pl.LightningDataModule"]] = None,
val_dataloaders: Optional[EVAL_DATALOADERS] = None,
datamodule: Optional["pl.LightningDataModule"] = None,
mode: str = "power",
steps_per_trial: int = 3,
init_val: int = 2,
max_trials: int = 25,
batch_arg_name: str = "batch_size",
) -> Optional[int]:
"""Iteratively try to find the largest batch size for a given model that does not give an out of memory
(OOM) error.
Args:
model: Model to tune.
train_dataloaders: A collection of :class:`torch.utils.data.DataLoader` or a
:class:`~pytorch_lightning.core.datamodule.LightningDataModule` specifying training samples.
In the case of multiple dataloaders, please see this :ref:`section <multiple-dataloaders>`.
val_dataloaders: A :class:`torch.utils.data.DataLoader` or a sequence of them specifying validation samples.
datamodule: An instance of :class:`~pytorch_lightning.core.datamodule.LightningDataModule`.
mode: Search strategy to update the batch size:
- ``'power'`` (default): Keep multiplying the batch size by 2, until we get an OOM error.
- ``'binsearch'``: Initially keep multiplying by 2 and after encountering an OOM error
do a binary search between the last successful batch size and the batch size that failed.
steps_per_trial: number of steps to run with a given batch size.
Ideally 1 should be enough to test if a OOM error occurs,
however in practise a few are needed
init_val: initial batch size to start the search with
max_trials: max number of increase in batch size done before
algorithm is terminated
batch_arg_name: name of the attribute that stores the batch size.
It is expected that the user has provided a model or datamodule that has a hyperparameter
with that name. We will look for this attribute name in the following places
- ``model``
- ``model.hparams``
- ``trainer.datamodule`` (the datamodule passed to the tune method)
"""
self.trainer.auto_scale_batch_size = True
result = self.trainer.tune(
model,
train_dataloaders=train_dataloaders,
val_dataloaders=val_dataloaders,
datamodule=datamodule,
scale_batch_size_kwargs={
"mode": mode,
"steps_per_trial": steps_per_trial,
"init_val": init_val,
"max_trials": max_trials,
"batch_arg_name": batch_arg_name,
},
)
self.trainer.auto_scale_batch_size = False
return result["scale_batch_size"]
def lr_find(
self,
model: "pl.LightningModule",
train_dataloaders: Optional[Union[TRAIN_DATALOADERS, "pl.LightningDataModule"]] = None,
val_dataloaders: Optional[EVAL_DATALOADERS] = None,
datamodule: Optional["pl.LightningDataModule"] = None,
min_lr: float = 1e-8,
max_lr: float = 1,
num_training: int = 100,
mode: str = "exponential",
early_stop_threshold: float = 4.0,
update_attr: bool = False,
) -> Optional[_LRFinder]:
"""Enables the user to do a range test of good initial learning rates, to reduce the amount of guesswork in
picking a good starting learning rate.
Args:
model: Model to tune.
train_dataloaders: A collection of :class:`torch.utils.data.DataLoader` or a
:class:`~pytorch_lightning.core.datamodule.LightningDataModule` specifying training samples.
In the case of multiple dataloaders, please see this :ref:`section <multiple-dataloaders>`.
val_dataloaders: A :class:`torch.utils.data.DataLoader` or a sequence of them specifying validation samples.
datamodule: An instance of :class:`~pytorch_lightning.core.datamodule.LightningDataModule`.
min_lr: minimum learning rate to investigate
max_lr: maximum learning rate to investigate
num_training: number of learning rates to test
mode: Search strategy to update learning rate after each batch:
- ``'exponential'`` (default): Will increase the learning rate exponentially.
- ``'linear'``: Will increase the learning rate linearly.
early_stop_threshold: threshold for stopping the search. If the
loss at any point is larger than early_stop_threshold*best_loss
then the search is stopped. To disable, set to None.
update_attr: Whether to update the learning rate attribute or not.
Raises:
MisconfigurationException:
If learning rate/lr in ``model`` or ``model.hparams`` isn't overridden when ``auto_lr_find=True``,
or if you are using more than one optimizer.
"""
self.trainer.auto_lr_find = True
result = self.trainer.tune(
model,
train_dataloaders=train_dataloaders,
val_dataloaders=val_dataloaders,
datamodule=datamodule,
lr_find_kwargs={
"min_lr": min_lr,
"max_lr": max_lr,
"num_training": num_training,
"mode": mode,
"early_stop_threshold": early_stop_threshold,
"update_attr": update_attr,
},
)
self.trainer.auto_lr_find = False
return result["lr_find"]
| [
"[email protected]"
] | |
ef297538dbdda1ba03ef3bd3400677dee6aa2c18 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/A3COM-HUAWEI-DOT11-ROAM-MIB.py | d7ff4a0c1ceab21807297768f81b42c90780999e | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 15,440 | py | #
# PySNMP MIB module A3COM-HUAWEI-DOT11-ROAM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/A3COM-HUAWEI-DOT11-ROAM-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 16:49:40 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
h3cDot11, = mibBuilder.importSymbols("A3COM-HUAWEI-DOT11-REF-MIB", "h3cDot11")
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint")
InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
TimeTicks, IpAddress, Integer32, iso, Gauge32, Counter64, NotificationType, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, Bits, ObjectIdentity, Counter32, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "IpAddress", "Integer32", "iso", "Gauge32", "Counter64", "NotificationType", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "Bits", "ObjectIdentity", "Counter32", "Unsigned32")
DisplayString, MacAddress, RowStatus, TruthValue, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "MacAddress", "RowStatus", "TruthValue", "TextualConvention")
h3cDot11ROAM = ModuleIdentity((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10))
h3cDot11ROAM.setRevisions(('2010-08-04 18:00', '2009-05-07 20:00', '2008-07-23 12:00',))
if mibBuilder.loadTexts: h3cDot11ROAM.setLastUpdated('201008041800Z')
if mibBuilder.loadTexts: h3cDot11ROAM.setOrganization('Hangzhou H3C Technologies Co., Ltd.')
class H3cDot11RoamMobileTunnelType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("ipv4", 1), ("ipv6", 2))
class H3cDot11RoamAuthMode(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("none", 1), ("md5", 2))
class H3cDot11RoamIACTPStatus(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))
namedValues = NamedValues(("init", 1), ("idle", 2), ("joinRequestWait", 3), ("joinResponseWait", 4), ("joinConfirmWait", 5), ("joinError", 6), ("run", 7))
h3cDot11RoamCfgGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1))
h3cDot11RoamStatusGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2))
h3cDot11RoamStatisGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 3))
h3cDot11RoamStatis2Group = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 4))
h3cDot11MobGrpTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 1), )
if mibBuilder.loadTexts: h3cDot11MobGrpTable.setStatus('current')
h3cDot11MobGrpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 1, 1), ).setIndexNames((0, "A3COM-HUAWEI-DOT11-ROAM-MIB", "h3cDot11MobGrpName"))
if mibBuilder.loadTexts: h3cDot11MobGrpEntry.setStatus('current')
h3cDot11MobGrpName = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 1, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 15)))
if mibBuilder.loadTexts: h3cDot11MobGrpName.setStatus('current')
h3cdot11MobGrpTunnelType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 1, 1, 2), H3cDot11RoamMobileTunnelType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cdot11MobGrpTunnelType.setStatus('current')
h3cDot11MobGrpSrcIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 1, 1, 3), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cDot11MobGrpSrcIPAddr.setStatus('current')
h3cDot11MobGrpAuthMode = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 1, 1, 4), H3cDot11RoamAuthMode().clone('none')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cDot11MobGrpAuthMode.setStatus('current')
h3cDot11MobGrpAuthKey = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cDot11MobGrpAuthKey.setStatus('current')
h3cDot11MobGrpEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 1, 1, 6), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cDot11MobGrpEnable.setStatus('current')
h3cDot11MobGrpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 1, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cDot11MobGrpRowStatus.setStatus('current')
h3cDot11MobGrpMemberTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 2), )
if mibBuilder.loadTexts: h3cDot11MobGrpMemberTable.setStatus('current')
h3cDot11MobGrpMemberEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 2, 1), ).setIndexNames((0, "A3COM-HUAWEI-DOT11-ROAM-MIB", "h3cDot11MobGrpName"), (0, "A3COM-HUAWEI-DOT11-ROAM-MIB", "h3cDot11MobGrpMemberIpAddr"))
if mibBuilder.loadTexts: h3cDot11MobGrpMemberEntry.setStatus('current')
h3cDot11MobGrpMemberIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 2, 1, 1), InetAddress())
if mibBuilder.loadTexts: h3cDot11MobGrpMemberIpAddr.setStatus('current')
h3cDot11MobGrpMemberStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 2, 1, 2), H3cDot11RoamIACTPStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11MobGrpMemberStatus.setStatus('current')
h3cDot11MobGrpMemberIf = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 2, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11MobGrpMemberIf.setStatus('current')
h3cDot11MobGrpMemberUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 2, 1, 4), Integer32()).setUnits('second').setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11MobGrpMemberUpTime.setStatus('current')
h3cDot11MobGrpMemberRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 1, 2, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cDot11MobGrpMemberRowStatus.setStatus('current')
h3cDot11RoamInInfoTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 1), )
if mibBuilder.loadTexts: h3cDot11RoamInInfoTable.setStatus('current')
h3cDot11RoamInInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 1, 1), ).setIndexNames((0, "A3COM-HUAWEI-DOT11-ROAM-MIB", "h3cDot11RoamClientMAC"))
if mibBuilder.loadTexts: h3cDot11RoamInInfoEntry.setStatus('current')
h3cDot11RoamClientMAC = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 1, 1, 1), MacAddress())
if mibBuilder.loadTexts: h3cDot11RoamClientMAC.setStatus('current')
h3cDot11RoamInClientBSSID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 1, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamInClientBSSID.setStatus('current')
h3cDot11RoamInClientVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamInClientVlanID.setStatus('current')
h3cDot11RoamInHomeACIPType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 1, 1, 4), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamInHomeACIPType.setStatus('current')
h3cDot11RoamInHomeACIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 1, 1, 5), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamInHomeACIPAddr.setStatus('current')
h3cDot11RoamOutInfoTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 2), )
if mibBuilder.loadTexts: h3cDot11RoamOutInfoTable.setStatus('current')
h3cDot11RoamOutInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 2, 1), ).setIndexNames((0, "A3COM-HUAWEI-DOT11-ROAM-MIB", "h3cDot11RoamClientMAC"))
if mibBuilder.loadTexts: h3cDot11RoamOutInfoEntry.setStatus('current')
h3cDot11RoamOutClientBSSID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 2, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamOutClientBSSID.setStatus('current')
h3cDot11RoamOutClientVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamOutClientVlanID.setStatus('current')
h3cDot11RoamOutForeignACIPType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 2, 1, 3), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamOutForeignACIPType.setStatus('current')
h3cDot11RoamOutForeignACIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 2, 1, 4), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamOutForeignACIPAddr.setStatus('current')
h3cDot11RoamOutClientUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 2, 1, 5), Integer32()).setUnits('second').setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamOutClientUpTime.setStatus('current')
h3cDot11RoamTrackTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 3), )
if mibBuilder.loadTexts: h3cDot11RoamTrackTable.setStatus('current')
h3cDot11RoamTrackEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 3, 1), ).setIndexNames((0, "A3COM-HUAWEI-DOT11-ROAM-MIB", "h3cDot11RoamTrackIndex"))
if mibBuilder.loadTexts: h3cDot11RoamTrackEntry.setStatus('current')
h3cDot11RoamTrackIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 3, 1, 1), Integer32())
if mibBuilder.loadTexts: h3cDot11RoamTrackIndex.setStatus('current')
h3cDot11RoamTrackClientMAC = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 3, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamTrackClientMAC.setStatus('current')
h3cDot11RoamTrackBSSID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 3, 1, 3), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamTrackBSSID.setStatus('current')
h3cDot11RoamTrackUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 3, 1, 4), Integer32()).setUnits('second').setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamTrackUpTime.setStatus('current')
h3cDot11RoamTrackACIPType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 3, 1, 5), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamTrackACIPType.setStatus('current')
h3cDot11RoamTrackACIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 2, 3, 1, 6), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11RoamTrackACIPAddr.setStatus('current')
h3cDot11IntraACRoamingSuccCnt = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 3, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11IntraACRoamingSuccCnt.setStatus('current')
h3cDot11InterACRoamingSuccCnt = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 3, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11InterACRoamingSuccCnt.setStatus('current')
h3cDot11InterACRoamOutSuccCnt = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 3, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11InterACRoamOutSuccCnt.setStatus('current')
h3cDot11IntraACRoamingSuccCnt2 = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 4, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11IntraACRoamingSuccCnt2.setStatus('current')
h3cDot11InterACRoamingSuccCnt2 = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 4, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11InterACRoamingSuccCnt2.setStatus('current')
h3cDot11InterACRoamOutSuccCnt2 = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 75, 10, 4, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cDot11InterACRoamOutSuccCnt2.setStatus('current')
mibBuilder.exportSymbols("A3COM-HUAWEI-DOT11-ROAM-MIB", h3cdot11MobGrpTunnelType=h3cdot11MobGrpTunnelType, h3cDot11MobGrpSrcIPAddr=h3cDot11MobGrpSrcIPAddr, h3cDot11RoamOutClientBSSID=h3cDot11RoamOutClientBSSID, h3cDot11RoamTrackEntry=h3cDot11RoamTrackEntry, h3cDot11InterACRoamOutSuccCnt=h3cDot11InterACRoamOutSuccCnt, h3cDot11MobGrpMemberIf=h3cDot11MobGrpMemberIf, h3cDot11RoamTrackClientMAC=h3cDot11RoamTrackClientMAC, h3cDot11MobGrpAuthKey=h3cDot11MobGrpAuthKey, h3cDot11RoamOutInfoTable=h3cDot11RoamOutInfoTable, h3cDot11RoamInInfoEntry=h3cDot11RoamInInfoEntry, h3cDot11InterACRoamingSuccCnt=h3cDot11InterACRoamingSuccCnt, PYSNMP_MODULE_ID=h3cDot11ROAM, h3cDot11RoamInClientVlanID=h3cDot11RoamInClientVlanID, h3cDot11MobGrpMemberEntry=h3cDot11MobGrpMemberEntry, H3cDot11RoamMobileTunnelType=H3cDot11RoamMobileTunnelType, h3cDot11MobGrpTable=h3cDot11MobGrpTable, H3cDot11RoamAuthMode=H3cDot11RoamAuthMode, h3cDot11MobGrpMemberStatus=h3cDot11MobGrpMemberStatus, h3cDot11MobGrpMemberUpTime=h3cDot11MobGrpMemberUpTime, h3cDot11RoamOutForeignACIPAddr=h3cDot11RoamOutForeignACIPAddr, H3cDot11RoamIACTPStatus=H3cDot11RoamIACTPStatus, h3cDot11RoamClientMAC=h3cDot11RoamClientMAC, h3cDot11RoamTrackTable=h3cDot11RoamTrackTable, h3cDot11ROAM=h3cDot11ROAM, h3cDot11IntraACRoamingSuccCnt=h3cDot11IntraACRoamingSuccCnt, h3cDot11IntraACRoamingSuccCnt2=h3cDot11IntraACRoamingSuccCnt2, h3cDot11RoamInHomeACIPAddr=h3cDot11RoamInHomeACIPAddr, h3cDot11InterACRoamOutSuccCnt2=h3cDot11InterACRoamOutSuccCnt2, h3cDot11RoamStatusGroup=h3cDot11RoamStatusGroup, h3cDot11InterACRoamingSuccCnt2=h3cDot11InterACRoamingSuccCnt2, h3cDot11RoamStatis2Group=h3cDot11RoamStatis2Group, h3cDot11RoamInClientBSSID=h3cDot11RoamInClientBSSID, h3cDot11RoamTrackBSSID=h3cDot11RoamTrackBSSID, h3cDot11RoamInInfoTable=h3cDot11RoamInInfoTable, h3cDot11RoamInHomeACIPType=h3cDot11RoamInHomeACIPType, h3cDot11RoamOutInfoEntry=h3cDot11RoamOutInfoEntry, h3cDot11MobGrpName=h3cDot11MobGrpName, h3cDot11RoamTrackIndex=h3cDot11RoamTrackIndex, h3cDot11RoamTrackACIPType=h3cDot11RoamTrackACIPType, h3cDot11MobGrpEntry=h3cDot11MobGrpEntry, h3cDot11RoamStatisGroup=h3cDot11RoamStatisGroup, h3cDot11MobGrpMemberTable=h3cDot11MobGrpMemberTable, h3cDot11MobGrpAuthMode=h3cDot11MobGrpAuthMode, h3cDot11MobGrpMemberRowStatus=h3cDot11MobGrpMemberRowStatus, h3cDot11RoamOutForeignACIPType=h3cDot11RoamOutForeignACIPType, h3cDot11RoamTrackUpTime=h3cDot11RoamTrackUpTime, h3cDot11MobGrpRowStatus=h3cDot11MobGrpRowStatus, h3cDot11RoamOutClientVlanID=h3cDot11RoamOutClientVlanID, h3cDot11MobGrpMemberIpAddr=h3cDot11MobGrpMemberIpAddr, h3cDot11RoamCfgGroup=h3cDot11RoamCfgGroup, h3cDot11RoamTrackACIPAddr=h3cDot11RoamTrackACIPAddr, h3cDot11MobGrpEnable=h3cDot11MobGrpEnable, h3cDot11RoamOutClientUpTime=h3cDot11RoamOutClientUpTime)
| [
"[email protected]"
] | |
0b61ccd08991ebb0902f43a83ba3074f2e60a203 | 18305efd1edeb68db69880e03411df37fc83b58b | /pdb_files3000rot/g7/1g7v/tractability_450/pymol_results_file.py | b3ca0aa99f8776269651041e072c2f991de4c442 | [] | no_license | Cradoux/hotspot_pipline | 22e604974c8e38c9ffa979092267a77c6e1dc458 | 88f7fab8611ebf67334474c6e9ea8fc5e52d27da | refs/heads/master | 2021-11-03T16:21:12.837229 | 2019-03-28T08:31:39 | 2019-03-28T08:31:39 | 170,106,739 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,214 | py |
from os.path import join
import tempfile
import zipfile
from pymol import cmd, finish_launching
from pymol.cgo import *
finish_launching()
dirpath = None
def cgo_arrow(atom1='pk1', atom2='pk2', radius=0.07, gap=0.0, hlength=-1, hradius=-1, color='blue red', name=''):
from chempy import cpv
radius, gap = float(radius), float(gap)
hlength, hradius = float(hlength), float(hradius)
try:
color1, color2 = color.split()
except:
color1 = color2 = color
color1 = list(cmd.get_color_tuple(color1))
color2 = list(cmd.get_color_tuple(color2))
def get_coord(v):
if not isinstance(v, str):
return v
if v.startswith('['):
return cmd.safe_list_eval(v)
return cmd.get_atom_coords(v)
xyz1 = get_coord(atom1)
xyz2 = get_coord(atom2)
normal = cpv.normalize(cpv.sub(xyz1, xyz2))
if hlength < 0:
hlength = radius * 3.0
if hradius < 0:
hradius = hlength * 0.6
if gap:
diff = cpv.scale(normal, gap)
xyz1 = cpv.sub(xyz1, diff)
xyz2 = cpv.add(xyz2, diff)
xyz3 = cpv.add(cpv.scale(normal, hlength), xyz2)
obj = [cgo.CYLINDER] + xyz1 + xyz3 + [radius] + color1 + color2 + [cgo.CONE] + xyz3 + xyz2 + [hradius, 0.0] + color2 + color2 + [1.0, 0.0]
return obj
dirpath = tempfile.mkdtemp()
zip_dir = 'out.zip'
with zipfile.ZipFile(zip_dir) as hs_zip:
hs_zip.extractall(dirpath)
cmd.load(join(dirpath,"protein.pdb"), "protein")
cmd.show("cartoon", "protein")
if dirpath:
f = join(dirpath, "label_threshold_10.mol2")
else:
f = "label_threshold_10.mol2"
cmd.load(f, 'label_threshold_10')
cmd.hide('everything', 'label_threshold_10')
cmd.label("label_threshold_10", "name")
cmd.set("label_font_id", 7)
cmd.set("label_size", -0.4)
if dirpath:
f = join(dirpath, "label_threshold_14.mol2")
else:
f = "label_threshold_14.mol2"
cmd.load(f, 'label_threshold_14')
cmd.hide('everything', 'label_threshold_14')
cmd.label("label_threshold_14", "name")
cmd.set("label_font_id", 7)
cmd.set("label_size", -0.4)
if dirpath:
f = join(dirpath, "label_threshold_17.mol2")
else:
f = "label_threshold_17.mol2"
cmd.load(f, 'label_threshold_17')
cmd.hide('everything', 'label_threshold_17')
cmd.label("label_threshold_17", "name")
cmd.set("label_font_id", 7)
cmd.set("label_size", -0.4)
colour_dict = {'acceptor':'red', 'donor':'blue', 'apolar':'yellow', 'negative':'purple', 'positive':'cyan'}
threshold_list = [10, 14, 17]
gfiles = ['donor.grd', 'apolar.grd', 'acceptor.grd']
grids = ['donor', 'apolar', 'acceptor']
num = 0
surf_transparency = 0.2
if dirpath:
gfiles = [join(dirpath, g) for g in gfiles]
for t in threshold_list:
for i in range(len(grids)):
try:
cmd.load(r'%s'%(gfiles[i]), '%s_%s'%(grids[i], str(num)))
cmd.isosurface('surface_%s_%s_%s'%(grids[i], t, num), '%s_%s'%(grids[i], num), t)
cmd.set('transparency', surf_transparency, 'surface_%s_%s_%s'%(grids[i], t, num))
cmd.color(colour_dict['%s'%(grids[i])], 'surface_%s_%s_%s'%(grids[i], t, num))
cmd.group('threshold_%s'%(t), members = 'surface_%s_%s_%s'%(grids[i],t, num))
cmd.group('threshold_%s' % (t), members='label_threshold_%s' % (t))
except:
continue
try:
cmd.group('hotspot_%s' % (num), members='threshold_%s' % (t))
except:
continue
for g in grids:
cmd.group('hotspot_%s' % (num), members='%s_%s' % (g,num))
cluster_dict = {"16.4940004349":[], "16.4940004349_arrows":[]}
cluster_dict["16.4940004349"] += [COLOR, 0.00, 0.00, 1.00] + [ALPHA, 0.6] + [SPHERE, float(6.0), float(103.5), float(82.5), float(1.0)]
cluster_dict["16.4940004349_arrows"] += cgo_arrow([6.0,103.5,82.5], [3.903,105.552,80.989], color="blue red", name="Arrows_16.4940004349_1")
cluster_dict["16.4940004349"] += [COLOR, 0.00, 0.00, 1.00] + [ALPHA, 0.6] + [SPHERE, float(9.5), float(108.0), float(80.5), float(1.0)]
cluster_dict["16.4940004349_arrows"] += cgo_arrow([9.5,108.0,80.5], [11.728,106.388,80.182], color="blue red", name="Arrows_16.4940004349_2")
cluster_dict["16.4940004349"] += [COLOR, 0.00, 0.00, 1.00] + [ALPHA, 0.6] + [SPHERE, float(9.5), float(105.0), float(79.0), float(1.0)]
cluster_dict["16.4940004349_arrows"] += cgo_arrow([9.5,105.0,79.0], [11.728,106.388,80.182], color="blue red", name="Arrows_16.4940004349_3")
cluster_dict["16.4940004349"] += [COLOR, 0.00, 0.00, 1.00] + [ALPHA, 0.6] + [SPHERE, float(9.5), float(105.5), float(77.0), float(1.0)]
cluster_dict["16.4940004349_arrows"] += cgo_arrow([9.5,105.5,77.0], [11.141,102.835,76.967], color="blue red", name="Arrows_16.4940004349_4")
cluster_dict["16.4940004349"] += [COLOR, 0.00, 0.00, 1.00] + [ALPHA, 0.6] + [SPHERE, float(11.0), float(110.5), float(81.0), float(1.0)]
cluster_dict["16.4940004349_arrows"] += cgo_arrow([11.0,110.5,81.0], [13.419,110.042,82.914], color="blue red", name="Arrows_16.4940004349_5")
cluster_dict["16.4940004349"] += [COLOR, 1.00, 1.000, 0.000] + [ALPHA, 0.6] + [SPHERE, float(7.42102675834), float(107.749665562), float(78.4210819103), float(1.0)]
cluster_dict["16.4940004349"] += [COLOR, 1.00, 0.00, 0.00] + [ALPHA, 0.6] + [SPHERE, float(5.5), float(113.5), float(80.0), float(1.0)]
cluster_dict["16.4940004349_arrows"] += cgo_arrow([5.5,113.5,80.0], [5.021,110.73,80.529], color="red blue", name="Arrows_16.4940004349_6")
cluster_dict["16.4940004349"] += [COLOR, 1.00, 0.00, 0.00] + [ALPHA, 0.6] + [SPHERE, float(8.5), float(115.0), float(78.0), float(1.0)]
cluster_dict["16.4940004349_arrows"] += cgo_arrow([8.5,115.0,78.0], [6.555,117.389,78.438], color="red blue", name="Arrows_16.4940004349_7")
cluster_dict["16.4940004349"] += [COLOR, 1.00, 0.00, 0.00] + [ALPHA, 0.6] + [SPHERE, float(10.5), float(109.0), float(79.5), float(1.0)]
cluster_dict["16.4940004349_arrows"] += cgo_arrow([10.5,109.0,79.5], [11.883,106.786,77.978], color="red blue", name="Arrows_16.4940004349_8")
cluster_dict["16.4940004349"] += [COLOR, 1.00, 0.00, 0.00] + [ALPHA, 0.6] + [SPHERE, float(11.5), float(113.0), float(78.0), float(1.0)]
cluster_dict["16.4940004349_arrows"] += cgo_arrow([11.5,113.0,78.0], [13.328,115.357,77.05], color="red blue", name="Arrows_16.4940004349_9")
cmd.load_cgo(cluster_dict["16.4940004349"], "Features_16.4940004349", 1)
cmd.load_cgo(cluster_dict["16.4940004349_arrows"], "Arrows_16.4940004349")
cmd.set("transparency", 0.2,"Features_16.4940004349")
cmd.group("Pharmacophore_16.4940004349", members="Features_16.4940004349")
cmd.group("Pharmacophore_16.4940004349", members="Arrows_16.4940004349")
if dirpath:
f = join(dirpath, "label_threshold_16.4940004349.mol2")
else:
f = "label_threshold_16.4940004349.mol2"
cmd.load(f, 'label_threshold_16.4940004349')
cmd.hide('everything', 'label_threshold_16.4940004349')
cmd.label("label_threshold_16.4940004349", "name")
cmd.set("label_font_id", 7)
cmd.set("label_size", -0.4)
cmd.group('Pharmacophore_16.4940004349', members= 'label_threshold_16.4940004349')
cmd.bg_color("white")
cmd.show("cartoon", "protein")
cmd.color("slate", "protein")
cmd.show("sticks", "organic")
cmd.hide("lines", "protein")
| [
"[email protected]"
] | |
3c1414d17c449561e276f13e399900b1c4bd8035 | 72a9d5019a6cc57849463fc315eeb0f70292eac8 | /Python-Programming/6- Numpy/Numpy_.py | 98ac37a1616122702019f51a69f73e320c98fe2f | [] | no_license | lydiawawa/Machine-Learning | 393ce0713d3fd765c8aa996a1efc9f1290b7ecf1 | 57389cfa03a3fc80dc30a18091629348f0e17a33 | refs/heads/master | 2020-03-24T07:53:53.466875 | 2018-07-22T23:01:42 | 2018-07-22T23:01:42 | 142,578,611 | 1 | 0 | null | 2018-07-27T13:08:47 | 2018-07-27T13:08:47 | null | UTF-8 | Python | false | false | 3,509 | py | # %%%%%%%%%%%%% Python %%%%%%%%%%%%%%%%%%%%%%%%%%%
# %%%%%%%%%%%%% Authors %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# Dr. Martin Hagan----->Email: [email protected]
# Dr. Amir Jafari------>Email: [email protected]
# %%%%%%%%%%%%% Date:
# V1 Jan - 04 - 2018
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# %%%%%%%%%%%%% Numpy Python %%%%%%%%%%%%%%%%%%%%%%%%%%%%
# =============================================================
import numpy as np
# ----------------------------------------------------------------------------------
#---------------------- creating numpy array----------------------------------------
x = np.array([1, 2, 3, 4])
y = np.linspace(-5, 1, 10)
z = np.arange(0, 10)
print(x)
print(y)
print(z)
type(x)
print(x.dtype)
# ----------------------------------------------------------------------------------
#---------------------- Step Size---------------------------------------------------
x1 = np.arange(0, 10, 2)
x2 = np.arange(0, 5, .5)
x3 = np.arange(0, 1, .1)
y1 = np.linspace(1, 5, 2)
List = list(x1)
print(List)
Min = np.amin(x1)
print(Min)
Max = np.amax(y1)
print(Max)
# ----------------------------------------------------------------------------------
#---------------------- Array Operands----------------------------------------------
a1 = np.array([1, 1, 1, 1]) + np.array([2, 2, 2, 2])
print(a1)
a2 = np.array([1, 1, 1, 1]) - np.array([2, 2, 2, 2])
print(a2)
a3 = np.array([1, 1, 1, 1]) * np.array([2, 2, 2, 2])
print(a3)
a4 = np.array([1, 1, 1, 1]) / np.array([2, 2, 2, 2])
print(a4)
a5 = np.array([True, True, False]) + np.array([True, False, False])
print(a5)
a6 = np.array([True, True, False]) * np.array([True, False, False])
print(a6)
# ----------------------------------------------------------------------------------
#---------------------- Mathematical Function---------------------------------------
print (abs(-2))
list1 = [-1, -2, -3]
s1 = []
for i in range(len(list1)):
s1.append(abs(list1[i]))
print(s1)
np.abs(-3)
np.abs([-2, -7, 1])
# ----------------------------------------------------------------------------------
#---------------------- Indexing----------------------------------------------------
a7 = np.arange(1, 5, .5)
print(len(a7))
second_element = a7[1]
print(second_element)
first_three_elements = a7[0:3]
print(first_three_elements)
# ----------------------------------------------------------------------------------
# --------------------------Masking-------------------------------------------------
print(a7)
bigger_than_3 = a7 > 3
print(bigger_than_3)
type(bigger_than_3)
len(bigger_than_3)
d2 = [i for i, v in enumerate(a7) if v > 3]
print(d2)
[i for i, v in enumerate(a7) if v > 3]
d3 = [v for i, v in enumerate(a7) if v > 26]
print(d3)
sum(bigger_than_3)
len(d2)
large_nums = a7[bigger_than_3]
len(a7[bigger_than_3])
print(large_nums)
large_nums = a7[a7 > 3]
print(large_nums)
# ----------------------------------------------------------------------------------
# --------------------------More----------------------------------------------------
a8 = np.logical_and(a7 > 1, a7 < 3)
print(a8)
a9 = a7[np.logical_and(a7 > 1, a7 < 3)]
print(a9)
a10 = np.logical_or(a7 < 3, a7 > 4)
print(a10)
a11= a7[np.logical_or(a7 < 22, a7 > 27)]
print(a11)
# ----------------------------------------------------------------------------------
# --------------------------Vectorizing Function-------------------------------------
def f(x):
return x ** 2 > 2
f_v = np.vectorize(f)
print(f_v([1,2,3]))
| [
"[email protected]"
] | |
fcff171d2095a1a02ec1b3033c6527903854024e | a844cba1a0cd54c650b640a7a5cbeabb8c2d15a5 | /modules/debugger/modules.py | 952d7b44e0a87252905c2dcc0c446df72cfd9ab7 | [
"MIT"
] | permissive | romain-tracktik/sublime_debugger | de5950d9f79fcfbe0407af4f89e15e91acb035aa | 6ff71182fee427cfc0254a9d47679d7a6d1424f9 | refs/heads/master | 2020-09-13T12:06:54.544461 | 2019-11-16T09:51:55 | 2019-11-16T09:51:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,109 | py | from ..typecheck import *
from ..import dap
from ..import core
from ..import ui
class Modules:
def __init__(self):
self.modules = [] #type: List[dap.Module]
self.on_updated = core.Event() #type: core.Event[None]
def on_module_event(self, event: dap.ModuleEvent) -> None:
if event.reason == dap.ModuleEvent.new:
self.modules.append(event.module)
self.on_updated()
return
if event.reason == dap.ModuleEvent.new:
# FIXME: NOT IMPLEMENTED
return
if event.reason == dap.ModuleEvent.new:
# FIXME: NOT IMPLEMENTED
return
def clear_session_date(self) -> None:
self.modules.clear()
self.on_updated()
class ModulesView(ui.Block):
def __init__(self, modules: Modules):
super().__init__()
self.modules = modules
def added(self, layout: ui.Layout):
self.on_updated_handle = self.modules.on_updated.add(self.dirty)
def removed(self):
self.on_updated_handle.dispose()
def render(self) -> ui.Panel.Children:
items = []
for module in self.modules.modules:
items.append(
ui.block(
ui.Label(module.name)
)
)
return [
ui.Table(items=items)
]
| [
"[email protected]"
] | |
4c61d56834868c5e80a82df074f0e9fbc4e1815a | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/semicolon.py | ed11103b36352b18bd6e69914773b3ce1e715926 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 63 | py | ii = [('KirbWPW2.py', 1), ('BachARE.py', 1), ('HogaGMM.py', 1)] | [
"[email protected]"
] | |
98e405fff7ad9fa147d9ed56eddd076e542a2578 | d5e7a3f489c2f4e95204906cd07e44ef812ddd24 | /Part/湮灭之瞳.py | bac547ea7ac3107ff582ee495d66640d0abf6897 | [] | no_license | VV4yne/DNFCalculating | ee57a1901421c7def6e81a29113dec69adde69c9 | 631992a653029d0c95d23abbdba162cd9ebfa4ee | refs/heads/master | 2022-10-04T13:54:52.668409 | 2020-06-09T09:13:24 | 2020-06-09T09:13:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,195 | py | from PublicReference.base import *
class 湮灭之瞳主动技能(技能):
#只扩展了技能的三条属性,第一条技能hit默认1,2、3条hit默认为0,需要手动赋值
#如果需要继续扩展,可以在各自职业类内继承后自行扩展,同时需要重写下等效百分比函数
#固伤在填写基础及成长的时候需要注意,技能面板/独立得到的成长及数值需要*100
基础 = 0.0
成长 = 0.0
攻击次数 = 1.0
基础2 = 0.0
成长2 = 0.0
攻击次数2 = 0.0
基础3 = 0.0
成长3 = 0.0
攻击次数3 = 0.0
CD = 0.0
# Will添加
CD倍率 = 1.0
TP成长 = 0.0
TP上限 = 0
TP等级 = 0
是否主动 = 1
是否有伤害 = 1
元素之力蓄力数量 = 0
恢复 = 1.0
倍率 = 1.0
被动倍率 = 1.0
基础释放次数 = 0
演出时间 = 0
是否有护石 = 0
关联技能 = ['无']
关联技能2 = ['无']
关联技能3 = ['无']
关联技能4 = ['无']
# Will添加
冷却关联技能 = ['无']
冷却关联技能2 = ['无']
冷却关联技能3 = ['无']
def 等效百分比(self, 武器类型):
if self.等级 == 0:
return 0
else:
return int((self.攻击次数 * (self.基础 + self.成长 * self.等级) + self.攻击次数2 * (self.基础2 + self.成长2 * self.等级) + self.攻击次数3 * (
self.基础3 + self.成长3 * self.等级)) * (1 + self.TP成长 * self.TP等级) * self.倍率)
def 等效CD(self, 武器类型):
if 武器类型 == '魔杖':
return round(self.CD / self.恢复 * 1.0, 1)
if 武器类型 == '法杖':
return round(self.CD / self.恢复 * 1.1, 1)
class 湮灭之瞳被动技能(技能):
是否主动 = 0
是否有伤害 = 0
元素之力蓄力数量 = 0
关联技能 = ['所有']
# Will添加
关联技能2 = ['无']
关联技能3 = ['无']
关联技能4 = ['无']
冷却关联技能 = ['无']
冷却关联技能2 = ['无']
冷却关联技能3 = ['无']
class 湮灭之瞳技能0(湮灭之瞳被动技能):
名称 = '元素循环'
所在等级 = 30
等级上限 = 20
基础等级 = 10
def 加成倍率(self, 武器类型):
if self.等级 == 0:
return 1.0
else:
return round(1.00 + 0.02 * self.等级, 5)
class 湮灭之瞳技能1(湮灭之瞳被动技能):
名称 = '元素之力'
所在等级 = 20
等级上限 = 11
基础等级 = 1
关联技能 = ['无']
def 加成倍率(self, 武器类型):
if self.等级 == 0:
return 1.0
else:
return round(0.055+0.014*self.等级,2)
class 湮灭之瞳技能2(湮灭之瞳主动技能):
名称 = '元素环绕'
所在等级 = 25
等级上限 = 20
基础等级 = 10
是否有伤害 = 0
def 属强加成(self):
if self.等级 == 0:
return 0
else:
return (6 + self.等级 * 3)
class 湮灭之瞳技能3(湮灭之瞳被动技能):
名称 = '元素融合'
所在等级 = 15
等级上限 = 11
基础等级 = 1
def 加成倍率(self, 武器类型):
return 1.0
def 属强加成(self):
if self.等级 == 0:
return 0
else:
return (37 + self.等级 * 3)
class 湮灭之瞳技能4(湮灭之瞳被动技能):
名称 = '元素爆发'
所在等级 = 48
等级上限 = 40
基础等级 = 20
def 加成倍率(self, 武器类型):
if self.等级 == 0:
return 1.0
else:
if self.等级 <= 16:
return round(1.015 + 0.015 * self.等级, 5)
else:
return round(1.255 + 0.020 * (self.等级 - 16), 5)
class 湮灭之瞳技能5(湮灭之瞳被动技能):
名称 = '黑瞳'
所在等级 = 75
等级上限 = 40
基础等级 = 11
def 加成倍率(self, 武器类型):
if self.等级 == 0:
return 1.0
else:
return round(1.23 + 0.02 * self.等级, 5)
class 湮灭之瞳技能6(湮灭之瞳被动技能):
名称 = '卓越之力'
所在等级 = 95
等级上限 = 40
基础等级 = 4
def 加成倍率(self, 武器类型):
if self.等级 == 0:
return 1.0
else:
return round(1.18 + 0.02 * self.等级, 5)
class 湮灭之瞳技能7(湮灭之瞳被动技能):
名称 = '超卓之心'
所在等级 = 95
等级上限 = 11
基础等级 = 1
def 加成倍率(self, 武器类型):
if self.等级 == 0:
return 1.0
else:
return round(1.045 + 0.005 * self.等级, 5)
class 湮灭之瞳技能8(湮灭之瞳被动技能):
名称 = '觉醒之抉择'
所在等级 = 100
等级上限 = 40
基础等级 = 2
关联技能 = ['无']
def 加成倍率(self, 武器类型):
if self.等级 == 0:
return 1.0
else:
return round(1.10 + 0.05 * self.等级, 5)
class 湮灭之瞳技能9(湮灭之瞳主动技能):
名称 = '元素炮'
所在等级 = 15
等级上限 = 11
基础等级 = 1
基础 = 490
成长 = 10
CD = 4.0
class 湮灭之瞳技能10(湮灭之瞳主动技能):
名称 = '属性变换'
所在等级 = 15
等级上限 = 60
基础等级 = 19
是否有伤害 = 1
是否主动 = 1
基础 = 195
成长 = 58.7
TP成长 = 0.08
TP上限 = 7
关联技能 = ['元素炮','魔球连射']
def 加成倍率(self, 武器类型):
if self.等级 == 0:
return 1.0
else:
return round((1.95 + 0.587 * self.等级 )* (1+0.08 * self.TP等级), 5)
class 湮灭之瞳技能11(湮灭之瞳主动技能):
名称 = '魔球连射'
所在等级 = 5
等级上限 = 11
基础等级 = 1
基础 = 108
成长 = 2
攻击次数 = 5
CD = 2.4
演出时间 = 1.5
class 湮灭之瞳技能12(湮灭之瞳主动技能):
名称 = '幻魔四重奏'
所在等级 = 50
等级上限 = 40
基础等级 = 12
基础 = 42510
成长 = 12850
CD = 145.0
class 湮灭之瞳技能13(湮灭之瞳主动技能):
名称 = '末日湮灭'
所在等级 = 85
等级上限 = 40
基础等级 = 5
基础 = 95595.6
成长 = 28856.4
CD = 180.0
class 湮灭之瞳技能14(湮灭之瞳主动技能):
名称 = '地炎'
所在等级 = 25
等级上限 = 60
基础等级 = 41
基础 = 1753.702
成长 = 198.297
CD = 4.0
TP成长 = 0.04
TP上限 = 7
演出时间 = 1.8
class 湮灭之瞳技能15(湮灭之瞳主动技能):
名称 = '冰晶坠'
所在等级 = 20
等级上限 = 60
基础等级 = 43
基础 = 2956.143
成长 = 333.857
CD = 6.4
TP成长 = 0.10
TP上限 = 7
演出时间 = 1.5
class 湮灭之瞳技能16(湮灭之瞳主动技能):
名称 = '雷光链'
所在等级 = 30
等级上限 = 60
基础等级 = 38
基础 = 3723.636
成长 = 420.364
CD = 9.6
TP成长 = 0.20
TP上限 = 7
演出时间 = 1.6
class 湮灭之瞳技能17(湮灭之瞳主动技能):
名称 = '暗域扩张'
所在等级 = 30
等级上限 = 60
基础等级 = 38
基础 = 5289.705
成长 = 597.295
CD = 12.0
TP成长 = 0.10
TP上限 = 7
演出时间 = 0.4
class 湮灭之瞳技能18(湮灭之瞳主动技能):
名称 = '冰晶之浴'
所在等级 = 35
等级上限 = 60
基础等级 = 36
基础 = 5459.317
成长 = 616.683
CD = 12.0
TP成长 = 0.0
TP上限 = 1
演出时间 = 4.5
def 等效CD(self, 武器类型):
if self.TP等级 == 0:
if 武器类型 == '魔杖':
return round (0.8 * self.CD * self.CD倍率 / self.恢复, 1)
if 武器类型 == '法杖':
return round (0.8 * 1.1 * self.CD * self.CD倍率 / self.恢复, 1)
else:
if 武器类型 == '魔杖':
return round (0.8 * (self.CD - 3.0) * self.CD倍率 / self.恢复, 1)
if 武器类型 == '法杖':
return round (0.8 * 1.1 * (self.CD - 3.0) * self.CD倍率 / self.恢复, 1)
class 湮灭之瞳技能19(湮灭之瞳主动技能):
名称 = '旋炎破'
所在等级 = 35
等级上限 = 60
基础等级 = 36
基础 = 6199.512
成长 = 700.488
CD = 16.0
TP成长 = 0.10
TP上限 = 7
是否有护石 = 1
演出时间 = 2.0
def 装备护石(self):
self.倍率 *= 1.22
class 湮灭之瞳技能20(湮灭之瞳主动技能):
名称 = '雷光屏障'
所在等级 = 40
等级上限 = 60
基础等级 = 33
基础 = 6881.948
成长 = 777.052
CD = 16.0
TP成长 = 0.10
TP上限 = 7
是否有护石 = 1
演出时间 = 1.2
def 装备护石(self):
self.倍率 *= 1.23
class 湮灭之瞳技能21(湮灭之瞳主动技能):
名称 = '黑暗禁域'
所在等级 = 40
等级上限 = 60
基础等级 = 33
基础 = 6500.105
成长 = 733.895
CD = 16.0
TP成长 = 0.10
TP上限 = 7
演出时间 = 4.0
class 湮灭之瞳技能22(湮灭之瞳主动技能):
名称 = '元素轰炸'
所在等级 = 45
等级上限 = 60
基础等级 = 31
基础 = 16196.139
成长 = 1833.861
CD = 32
TP成长 = 0.10
TP上限 = 7
是否有护石 = 1
演出时间 = 2.0
def 装备护石(self):
self.倍率 *= 1.23
class 湮灭之瞳技能23(湮灭之瞳主动技能):
名称 = '元素浓缩球'
所在等级 = 60
等级上限 = 40
基础等级 = 23
基础 = 14117.087
成长 = 1593.913
CD = 24
TP成长 = 0.10
TP上限 = 7
是否有护石 = 1
演出时间 = 1.0
def 装备护石(self):
self.倍率 *= 1.26
class 湮灭之瞳技能24(湮灭之瞳主动技能):
名称 = '元素幻灭'
所在等级 = 70
等级上限 = 40
基础等级 = 18
基础 = 22054.889
成长 = 2490.111
CD = 40.0
TP成长 = 0.10
TP上限 = 7
是否有护石 = 1
演出时间 = 1.2
def 装备护石(self):
self.倍率 *= 1.23
class 湮灭之瞳技能25(湮灭之瞳主动技能):
名称 = '元素禁域'
所在等级 = 75
等级上限 = 40
基础等级 = 16
基础 = 36737.1875
成长 = 4147.8125
CD = 32.0
演出时间 = 0.4
class 湮灭之瞳技能26(湮灭之瞳主动技能):
名称 = '聚能魔炮'
所在等级 = 80
等级上限 = 40
基础等级 = 13
基础 = 45659.769
成长 = 5155.231
CD = 36.0
演出时间 = 1.5
湮灭之瞳技能列表 = []
i = 0
while i >= 0:
try:
exec('湮灭之瞳技能列表.append(湮灭之瞳技能' + str(i) + '())')
i += 1
except:
i = -1
湮灭之瞳技能序号 = dict()
for i in range(len(湮灭之瞳技能列表)):
湮灭之瞳技能序号[湮灭之瞳技能列表[i].名称] = i
湮灭之瞳一觉序号 = 0
湮灭之瞳二觉序号 = 0
湮灭之瞳三觉序号 = 0
for i in 湮灭之瞳技能列表:
if i.所在等级 == 50:
湮灭之瞳一觉序号 = 湮灭之瞳技能序号[i.名称]
if i.所在等级 == 85:
湮灭之瞳二觉序号 = 湮灭之瞳技能序号[i.名称]
if i.所在等级 == 100:
湮灭之瞳三觉序号 = 湮灭之瞳技能序号[i.名称]
湮灭之瞳护石选项 = ['无']
for i in 湮灭之瞳技能列表:
if i.是否有伤害 == 1 and i.是否有护石 == 1:
湮灭之瞳护石选项.append(i.名称)
湮灭之瞳符文选项 = ['无']
for i in 湮灭之瞳技能列表:
if i.所在等级 >= 20 and i.所在等级 <= 80 and i.所在等级 != 50 and i.是否有伤害 == 1:
湮灭之瞳符文选项.append(i.名称)
class 湮灭之瞳角色属性(角色属性):
职业名称 = '湮灭之瞳'
武器选项 = ['魔杖', '法杖']
# '物理百分比','魔法百分比','物理固伤','魔法固伤'
伤害类型选择 = ['魔法百分比']
# 默认
伤害类型 = '魔法百分比'
防具类型 = '布甲'
防具精通属性 = ['智力']
主BUFF = 2.07
# 基础属性(含唤醒)
基础力量 = 774
基础智力 = 976
# 适用系统奶加成
力量 = 基础力量
智力 = 基础智力
# 人物基础 + 唤醒
物理攻击力 = 65.0
魔法攻击力 = 65.0
独立攻击力 = 1045.0
火属性强化 = 13
冰属性强化 = 13
光属性强化 = 13
暗属性强化 = 13
def __init__(self):
self.技能栏 = copy.deepcopy(湮灭之瞳技能列表)
self.技能序号 = copy.deepcopy(湮灭之瞳技能序号)
def 属性强化加成(self):
属性强化值 = 0
for i in self.技能栏:
if i.名称 != '元素环绕':
属性强化值 += 0
else:
属性强化值 += i.属强加成()
return (属性强化值)
def 伤害指数计算(self):
self.冰属性强化 += self.技能栏[self.技能序号['元素环绕']].属强加成()
self.光属性强化 += self.技能栏[self.技能序号['元素环绕']].属强加成()
self.火属性强化 += self.技能栏[self.技能序号['元素环绕']].属强加成()
self.暗属性强化 += self.技能栏[self.技能序号['元素环绕']].属强加成()
self.冰属性强化 += self.技能栏[self.技能序号['元素融合']].属强加成()
self.光属性强化 += self.技能栏[self.技能序号['元素融合']].属强加成()
self.火属性强化 += self.技能栏[self.技能序号['元素融合']].属强加成()
self.暗属性强化 += self.技能栏[self.技能序号['元素融合']].属强加成()
基准倍率 = 1.5 * self.主BUFF * (1 - 443215 / (443215 + 20000))
面板 = (self.面板智力()/250+1) * (self.魔法攻击力 + self.进图魔法攻击力) * (1 + self.百分比三攻)
属性倍率=1.05+0.0045*max(self.火属性强化,self.冰属性强化,self.光属性强化,self.暗属性强化)
增伤倍率=1+self.伤害增加
增伤倍率*=1+self.暴击伤害
增伤倍率*=1+self.最终伤害
增伤倍率*=self.技能攻击力
增伤倍率*=1+self.持续伤害*(1-0.1*self.持续伤害计算比例)
增伤倍率*=1+self.附加伤害+self.属性附加*属性倍率
self.伤害指数=面板*属性倍率*增伤倍率*基准倍率/100
def 被动倍率计算(self):
for i in self.技能栏:
if i.关联技能 != ['无']:
if i.关联技能 == ['所有']:
for j in self.技能栏:
if j.是否有伤害 == 1:
j.被动倍率 *= i.加成倍率(self.武器类型)
else :
for k in i.关联技能:
self.技能栏[self.技能序号[k]].被动倍率 *= i.加成倍率(self.武器类型)
# Will添加
if i.关联技能2 != ['无']:
if i.关联技能2 == ['所有']:
for j in self.技能栏:
if j.是否有伤害 == 1:
j.被动倍率 *= i.加成倍率2(self.武器类型)
else :
for k in i.关联技能2:
self.技能栏[self.技能序号[k]].被动倍率 *= i.加成倍率2(self.武器类型)
# Will添加
if i.关联技能3 != ['无']:
if i.关联技能3 == ['所有']:
for j in self.技能栏:
if j.是否有伤害 == 1:
j.被动倍率 *= i.加成倍率3(self.武器类型)
else :
for k in i.关联技能3:
self.技能栏[self.技能序号[k]].被动倍率 *= i.加成倍率3(self.武器类型)
def 伤害计算(self, x=0):
self.所有属性强化(self.进图属强)
# Will添加
self.CD倍率计算()
self.加算冷却计算()
self.被动倍率计算()
self.伤害指数计算()
技能释放次数 = []
技能单次伤害 = []
技能总伤害 = []
# 技能释放次数计算
for i in self.技能栏:
if i.是否有伤害 == 1:
if self.次数输入[self.技能序号[i.名称]] == '/CD':
技能释放次数.append(int((self.时间输入 - i.演出时间) / i.等效CD(self.武器类型) + 1 + i.基础释放次数))
else:
技能释放次数.append(int(self.次数输入[self.技能序号[i.名称]]) + i.基础释放次数)
else:
技能释放次数.append(0)
for i in self.技能栏:
if i.关联技能4 != ['无']:
for j in i.关联技能4:
i.元素之力蓄力数量 += 技能释放次数[self.技能序号[j]]
# 技能单次伤害计算
for i in self.技能栏:
if i.是否主动 == 1 and i.名称 != '元素炮' :
技能单次伤害.append(i.等效百分比(self.武器类型) * self.伤害指数 * i.被动倍率)
elif i.名称 == '元素炮':
技能单次伤害.append(i.等效百分比(self.武器类型) * self.伤害指数 * i.被动倍率*
self.技能栏[self.技能序号['元素循环']].加成倍率(self.武器类型)*
self.技能栏[self.技能序号['超卓之心']].加成倍率(self.武器类型)*
self.技能栏[self.技能序号['卓越之力']].加成倍率(self.武器类型)*
(1.0 + self.技能栏[self.技能序号['元素之力']].加成倍率(self.武器类型)*5))
else:
技能单次伤害.append(0)
# 单技能伤害合计
for i in self.技能栏:
if i.是否主动 == 1 and 技能释放次数[self.技能序号[i.名称]] != 0:
技能总伤害.append(技能单次伤害[self.技能序号[i.名称]] * 技能释放次数[self.技能序号[i.名称]] * (
1 + self.白兔子技能 * 0.20 + self.年宠技能 * 0.10 * self.宠物次数[self.技能序号[i.名称]] / 技能释放次数[
self.技能序号[i.名称]] + self.斗神之吼秘药 * 0.12))
else:
技能总伤害.append(0)
总伤害 = 0
for i in self.技能栏:
总伤害 += 技能总伤害[self.技能序号[i.名称]]
if x == 0:
return 总伤害
if x == 1:
详细数据 = []
for i in range(0, len(self.技能栏)):
详细数据.append(技能释放次数[i])
详细数据.append(技能总伤害[i])
if 技能释放次数[i] != 0:
详细数据.append(技能总伤害[i] / 技能释放次数[i])
else:
详细数据.append(0)
if 总伤害 != 0:
详细数据.append(技能总伤害[i] / 总伤害 * 100)
else:
详细数据.append(0)
return 详细数据
class 湮灭之瞳(角色窗口):
def 窗口属性输入(self):
self.初始属性 = 湮灭之瞳角色属性()
self.角色属性A = 湮灭之瞳角色属性()
self.角色属性B = 湮灭之瞳角色属性()
self.一觉序号 = 湮灭之瞳一觉序号
self.二觉序号 = 湮灭之瞳二觉序号
self.三觉序号 = 湮灭之瞳三觉序号
self.护石选项 = copy.deepcopy(湮灭之瞳护石选项)
self.符文选项 = copy.deepcopy(湮灭之瞳符文选项)
| [
"[email protected]"
] | |
551b428503874c903e41834e1c62952b6faaeea5 | 8baa6d8e35a17f331345d9f314cdb2787653d38a | /src/exojax/spec/exomol.py | b13173a276fb13d010cf9c32fe7d85bedb6157c2 | [
"MIT"
] | permissive | bmorris3/exojax | 2fb1dae486a1d4d7a91ee8e9fdd1c9e616fb1b3f | 67d1b6c868d69892d4bbf9e620ed05e432cfe61f | refs/heads/master | 2023-09-04T20:12:32.817699 | 2021-06-12T06:14:00 | 2021-06-12T06:14:00 | 379,588,979 | 0 | 0 | MIT | 2021-06-23T12:03:57 | 2021-06-23T12:03:57 | null | UTF-8 | Python | false | false | 1,356 | py | import numpy as np
def Sij0(A,g,nu_lines,elower,QTref):
"""Reference Line Strength in Tref=296K, S0.
Note:
Tref=296K
Args:
A: Einstein coefficient (s-1)
g: the upper state statistical weight
nu_lines: line center wavenumber (cm-1)
elower: elower
QTref: partition function Q(Tref)
Mmol: molecular mass (normalized by m_u)
Returns:
Sij(T): Line strength (cm)
"""
ccgs=29979245800.0
hcperk=1.4387773538277202 #hc/kB in cgs
Tref=296.0
S0=-A*g*np.exp(-hcperk*elower/Tref)*np.expm1(-hcperk*nu_lines/Tref)\
/(8.0*np.pi*ccgs*nu_lines**2*QTref)
return S0
def gamma_exomol(P, T, n_air, alpha_ref):
"""gamma factor by a pressure broadening
Args:
P: pressure (bar)
T: temperature (K)
n_air: coefficient of the temperature dependence of the air-broadened halfwidth
alpha_ref: broadening parameter
Returns:
gamma: pressure gamma factor (cm-1)
"""
Tref=296.0 #reference tempearture (K)
gamma=alpha_ref*P*(Tref/T)**n_air
return gamma
def gamma_natural(A):
"""gamma factor by natural broadning
1/(4 pi c) = 2.6544188e-12 (cm-1 s)
Args:
A: Einstein A-factor (1/s)
Returns:
gamma_natural: natural width (cm-1)
"""
return 2.6544188e-12*A
| [
"[email protected]"
] | |
ec9c417e630a5d1e8843d6a2e23386c2db157f4e | d136c10cdd556055717f8b4330066f56052688b5 | /setup.py | 49f3c1e51fc0701380fc53e434bee2a0568658c7 | [
"CC0-1.0"
] | permissive | biomodels/BIOMD0000000083 | 24cfbb23497e0dae2fa764e13bbdfef565bd2a51 | e8221a507f10df4490c4c6dd004368c9384f2de6 | refs/heads/master | 2021-01-01T05:34:54.036641 | 2014-10-16T05:22:13 | 2014-10-16T05:22:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | from setuptools import setup, find_packages
setup(name='BIOMD0000000083',
version=20140916,
description='BIOMD0000000083 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main/BIOMD0000000083',
maintainer='Stanley Gu',
maintainer_url='[email protected]',
packages=find_packages(),
package_data={'': ['*.xml', 'README.md']},
) | [
"[email protected]"
] | |
8817047c2c71be9cdae859f54be02ae569fe144c | 724ae861f52fedc9d57f39c30971fab0114dc34b | /cms_content/cms_app.py | 1f95945cb2b4dac8bdf406ce033d0d0148078a6c | [
"BSD-3-Clause"
] | permissive | mmlic/django-cms-content | 36faa82f5303ba151d1f5542d895bbf65aec753a | d6b214542ab618b6fa4645483fa5bf58e8212f86 | refs/heads/master | 2021-01-16T18:06:01.393986 | 2010-09-18T14:54:45 | 2010-09-18T14:54:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from cms_content.menu import CMSContentMenu
class CMSContentApp(CMSApp):
name = _(u"CMS Content App")
urls = ["cms_content.urls"]
menus = [CMSContentMenu]
apphook_pool.register(CMSContentApp)
| [
"[email protected]"
] | |
871af0fab6d27935b3a9d6894e5b69448e205e49 | e7d4b6fcace1509d37359776d9f72020dad3da41 | /part010/ch08_basemap/sec2_draw/test_2_plot_x_x.py | 955139e8b3ec761b1aa180217c7fb55b866cb87f | [] | no_license | LinyunGH/book_python_gis | b422e350222c4ab5711efb4cc6101e229bd26f7b | 067d75e324c006e2098827ac16ba38d4894b8a21 | refs/heads/master | 2020-04-09T22:25:35.049625 | 2018-10-14T09:56:38 | 2018-10-14T09:56:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,862 | py | # -*- coding: utf-8 -*-
print('=' * 40)
print(__file__)
from helper.textool import get_tmp_file
################################################################################
from mpl_toolkits.basemap import Basemap
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
para = {'projection': 'merc',
'lat_0': 0, 'lon_0': 120,
'resolution': 'h', 'area_thresh': .1,
'llcrnrlon': 116, 'llcrnrlat': 36.6,
'urcrnrlon': 124, 'urcrnrlat': 40.2 }
my_map = Basemap(**para)
my_map.drawcoastlines(); my_map.drawmapboundary()
################################################################################
lon = 121.60001; lat = 38.91027
x, y = my_map(lon, lat)
my_map.plot(x, y, 'bo', markersize=12)
# plt.show()
plt.savefig(get_tmp_file(__file__, '1'), bbox_inches='tight', dpi=600)
plt.savefig(get_tmp_file(__file__, '1', file_ext='pdf'), bbox_inches='tight', dpi=600)
plt.clf()
################################################################################
my_map = Basemap(**para)
my_map.drawcoastlines(); my_map.drawmapboundary()
lons = [121.60001, 121.38617, 117.19723]
lats = [38.91027, 37.53042, 39.12473]
x, y = my_map(lons, lats)
################################################################################
my_map.plot(x, y, 'bo', markersize=10)
# plt.show()
plt.savefig(get_tmp_file(__file__, '2'), bbox_inches='tight', dpi=600)
plt.savefig(get_tmp_file(__file__, '2', file_ext='pdf'), bbox_inches='tight', dpi=600)
plt.clf()
################################################################################
my_map = Basemap(**para)
my_map.drawcoastlines(); my_map.drawmapboundary()
my_map.plot(x, y, marker=None,color='m')
# plt.show()
plt.savefig(get_tmp_file(__file__, '3'), bbox_inches='tight', dpi=600)
plt.savefig(get_tmp_file(__file__, '3', file_ext='pdf'), bbox_inches='tight', dpi=600)
plt.clf()
| [
"[email protected]"
] | |
662c71161cfd8d0510f97315e3f4b811738fdcf3 | c1c7214e1f9230f19d74bb9776dac40d820da892 | /examples/django/model一般/FilePathFieldの使い方の例/project/app/views.py | 862e10440074a4c00591ebc229e8546a7d8428c2 | [] | no_license | FujitaHirotaka/djangoruler3 | cb326c80d9413ebdeaa64802c5e5f5daadb00904 | 9a743fbc12a0efa73dbc90f93baddf7e8a4eb4f8 | refs/heads/master | 2020-04-01T13:32:28.078110 | 2018-12-13T00:39:56 | 2018-12-13T00:39:56 | 153,256,642 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 412 | py | from django.shortcuts import render
import os
from .forms import *
from pathlib import Path
import re
from ajax.views import z
#この部分は本編とは関係なし
########################
d=z()
########################
def index(request):
d["form"]=Form
d["form2"]=Form2
d["form3"]=Form3
d["form4"]=Form4
d["form5"]=Form5
return render(request, 'app/index.html', d)
| [
"[email protected]"
] | |
4e633c267ce51f581b210db672c0ed1041e02ffd | 37ff29a9a83eafbf0f54e2ce0bf2c0255b1663a1 | /build/husky_control/catkin_generated/generate_cached_setup.py | d7af4a5285b20e1200840d1b9135e75b800eadd2 | [] | no_license | wy7727/husky | f8d9c2a05487f66efbfb58e8fc1c141efc10e177 | 7925bc34ae316639aef88fc3e6a8d36aba12620b | refs/heads/master | 2020-04-09T12:09:41.420418 | 2019-12-01T09:24:24 | 2019-12-01T09:24:24 | 160,337,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,353 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os
import stat
import sys
# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(os.path.join('/opt/ros/kinetic/share/catkin/cmake', 'catkinConfig.cmake.in')):
sys.path.insert(0, os.path.join('/opt/ros/kinetic/share/catkin/cmake', '..', 'python'))
try:
from catkin.environment_cache import generate_environment_script
except ImportError:
# search for catkin package in all workspaces and prepend to path
for workspace in "/home/ying/wy_ws/devel;/home/ying/px4/catkin_ws/devel;/opt/ros/kinetic".split(';'):
python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
if os.path.isdir(os.path.join(python_path, 'catkin')):
sys.path.insert(0, python_path)
break
from catkin.environment_cache import generate_environment_script
code = generate_environment_script('/home/ying/wy_ws/devel/.private/husky_control/env.sh')
output_filename = '/home/ying/wy_ws/build/husky_control/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
#print('Generate script for cached setup "%s"' % output_filename)
f.write('\n'.join(code))
mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
| [
"[email protected]"
] | |
24ff4cfbe9ebc07dc6bf91e7a7bdf56035b30726 | b5402b40b69244380bc0d3f85ff65483d0505181 | /mongodb/factory/execode.py | c462619c70d66714d3b5f4d8d1e6279769e79df5 | [
"MIT"
] | permissive | RxJellyBot/Jelly-Bot | ea7b6bd100431736732f9f4cc739858ec148e3e2 | c7da1e91783dce3a2b71b955b3a22b68db9056cf | refs/heads/master | 2023-08-29T20:41:01.813945 | 2021-10-20T05:27:21 | 2021-10-20T05:27:21 | 189,347,226 | 5 | 1 | MIT | 2020-09-05T00:50:41 | 2019-05-30T04:47:48 | Python | UTF-8 | Python | false | false | 7,976 | py | """Execode-related data controllers."""
from datetime import timedelta
from typing import Type, Optional, Tuple
from bson import ObjectId
from django.http import QueryDict # pylint: disable=wrong-import-order
from extutils.dt import now_utc_aware
from flags import Execode, ExecodeCompletionOutcome, ExecodeCollationFailedReason
from models import ExecodeEntryModel, Model
from models.exceptions import ModelConstructionError
from mongodb.utils import ExtendedCursor
from mongodb.exceptions import NoCompleteActionError, ExecodeCollationError
from mongodb.helper import ExecodeCompletor, ExecodeRequiredKeys
from mongodb.factory.results import (
EnqueueExecodeResult, CompleteExecodeResult, GetExecodeEntryResult,
OperationOutcome, GetOutcome, WriteOutcome
)
from JellyBot.systemconfig import Database
from ._base import BaseCollection
from .mixin import GenerateTokenMixin
__all__ = ("ExecodeManager",)
DB_NAME = "execode"
class _ExecodeManager(GenerateTokenMixin, BaseCollection):
token_length = ExecodeEntryModel.EXECODE_LENGTH
token_key = ExecodeEntryModel.Execode.key
database_name = DB_NAME
collection_name = "main"
model_class = ExecodeEntryModel
def build_indexes(self):
self.create_index(ExecodeEntryModel.Execode.key, name="Execode", unique=True)
self.create_index(ExecodeEntryModel.Timestamp.key,
name="Timestamp (for TTL)", expireAfterSeconds=Database.ExecodeExpirySeconds)
def enqueue_execode(self, root_uid: ObjectId, execode_type: Execode, data_cls: Type[Model] = None,
**data_kw_args) \
-> EnqueueExecodeResult:
"""
Enqueue an Execode action.
:param root_uid: user to execute the enqueued Execode
:param execode_type: type of the execode
:param data_cls: model class of the additional data class
:param data_kw_args: arguments to construct the model
:return: enqueuing result
"""
execode = self.generate_hex_token()
now = now_utc_aware(for_mongo=True)
if not data_cls and data_kw_args:
return EnqueueExecodeResult(WriteOutcome.X_NO_MODEL_CLASS)
if data_cls:
try:
data = data_cls(**data_kw_args).to_json()
except ModelConstructionError as ex:
return EnqueueExecodeResult(WriteOutcome.X_INVALID_MODEL, ex)
else:
data = {}
if execode_type == Execode.UNKNOWN:
return EnqueueExecodeResult(WriteOutcome.X_UNKNOWN_EXECODE_ACTION)
model, outcome, ex = self.insert_one_data(
CreatorOid=root_uid, Execode=execode, ActionType=execode_type, Timestamp=now, Data=data)
return EnqueueExecodeResult(
outcome, ex, model, execode, now + timedelta(seconds=Database.ExecodeExpirySeconds))
def get_queued_execodes(self, root_uid: ObjectId) -> ExtendedCursor[ExecodeEntryModel]:
"""
Get the queued Execodes of ``root_uid``.
:param root_uid: user OID to get the queued Execodes
:return: a cursor yielding queued Execodes of the user
"""
filter_ = {ExecodeEntryModel.CreatorOid.key: root_uid}
return ExtendedCursor(self.find(filter_), self.count_documents(filter_), parse_cls=ExecodeEntryModel)
def get_execode_entry(self, execode: str, action: Optional[Execode] = None) -> GetExecodeEntryResult:
"""
Get the entry of an Execode action.
Limits the result to only return the Execode with the action type ``action`` if it is not ``None``.
:param execode: code of the Execode
:param action: action of the Execode
:return: result of getting the Execode
"""
cond = {ExecodeEntryModel.Execode.key: execode}
if action:
cond[ExecodeEntryModel.ActionType.key] = action
ret: ExecodeEntryModel = self.find_one_casted(cond)
if not ret:
if self.count_documents({ExecodeEntryModel.Execode.key: execode}) > 0:
return GetExecodeEntryResult(GetOutcome.X_EXECODE_TYPE_MISMATCH)
return GetExecodeEntryResult(GetOutcome.X_NOT_FOUND_ABORTED_INSERT)
return GetExecodeEntryResult(GetOutcome.O_CACHE_DB, model=ret)
def remove_execode(self, execode: str):
"""
Delete the Execode entry.
:param execode: execode of the entry to be deleted
"""
self.delete_one({ExecodeEntryModel.Execode.key: execode})
def _attempt_complete(self, execode: str, tk_model: ExecodeEntryModel, execode_kwargs: QueryDict) \
-> Tuple[OperationOutcome, Optional[ExecodeCompletionOutcome], Optional[Exception]]:
cmpl_outcome = ExecodeCompletionOutcome.X_NOT_EXECUTED
ex = None
try:
cmpl_outcome = ExecodeCompletor.complete_execode(tk_model, execode_kwargs)
if cmpl_outcome.is_success:
outcome = OperationOutcome.O_COMPLETED
self.remove_execode(execode)
else:
outcome = OperationOutcome.X_COMPLETION_FAILED
except NoCompleteActionError as e:
outcome = OperationOutcome.X_NO_COMPLETE_ACTION
ex = e
except ExecodeCollationError as e:
if e.err_code == ExecodeCollationFailedReason.MISSING_KEY:
outcome = OperationOutcome.X_MISSING_ARGS
else:
outcome = OperationOutcome.X_COLLATION_ERROR
ex = e
except Exception as e:
outcome = OperationOutcome.X_COMPLETION_ERROR
ex = e
return outcome, cmpl_outcome, ex
def complete_execode(self, execode: str, execode_kwargs: dict, action: Optional[Execode] = None) \
-> CompleteExecodeResult:
"""
Finalize the pending Execode.
:param execode: execode of the action to be completed
:param execode_kwargs: arguments may be needed to complete the Execode action
:param action: type of the Execode action
"""
ex = None
tk_model: Optional[ExecodeEntryModel] = None
# Force type to be dict because the type of `execode_kwargs` might be django QueryDict
if isinstance(execode_kwargs, QueryDict):
execode_kwargs = execode_kwargs.dict()
if not execode:
outcome = OperationOutcome.X_EXECODE_EMPTY
return CompleteExecodeResult(outcome, None, None, set(), ExecodeCompletionOutcome.X_NOT_EXECUTED)
# Not using self.find_one_casted for catching `ModelConstructionError`
get_execode = self.get_execode_entry(execode, action)
if get_execode.success:
tk_model = get_execode.model
# Check for missing keys
if missing_keys := ExecodeRequiredKeys.get_required_keys(tk_model.action_type).difference(execode_kwargs):
return CompleteExecodeResult(OperationOutcome.X_MISSING_ARGS, None, tk_model, missing_keys,
ExecodeCompletionOutcome.X_MISSING_ARGS)
try:
outcome, cmpl_outcome, ex = self._attempt_complete(execode, tk_model, execode_kwargs)
except ModelConstructionError as e:
outcome = OperationOutcome.X_CONSTRUCTION_ERROR
cmpl_outcome = ExecodeCompletionOutcome.X_MODEL_CONSTRUCTION
ex = e
else:
cmpl_outcome = ExecodeCompletionOutcome.X_EXECODE_NOT_FOUND
if get_execode.outcome == GetOutcome.X_NOT_FOUND_ABORTED_INSERT:
outcome = OperationOutcome.X_EXECODE_NOT_FOUND
elif get_execode.outcome == GetOutcome.X_EXECODE_TYPE_MISMATCH:
outcome = OperationOutcome.X_EXECODE_TYPE_MISMATCH
else:
outcome = OperationOutcome.X_ERROR
return CompleteExecodeResult(outcome, ex, tk_model, set(), cmpl_outcome)
ExecodeManager = _ExecodeManager()
| [
"[email protected]"
] | |
bc311155799542ad602305eb319bcfe862940578 | f37978530be6cf40bd7b4e5dbaf63f779114ff95 | /src/bioregistry/curation/add_descriptions_from_gs.py | 04f3759ae75bb8aaa0a3fc81e7d23e8b6bb18533 | [
"MIT",
"CC0-1.0",
"CC-PDDC",
"CC-BY-4.0"
] | permissive | biopragmatics/bioregistry | 03d983e96b65681352d0eddbe39902059d299e6d | a05af7e42f60109f01133e3072bb673423b74dd3 | refs/heads/main | 2023-08-30T21:02:44.854342 | 2023-08-30T01:10:16 | 2023-08-30T01:10:16 | 319,481,281 | 77 | 28 | MIT | 2023-09-12T08:21:24 | 2020-12-08T00:33:21 | Python | UTF-8 | Python | false | false | 940 | py | # -*- coding: utf-8 -*-
"""Add descriptions from a google curation sheet."""
import click
import pandas as pd
import bioregistry
URL = (
"https://docs.google.com/spreadsheets/d/e/2PACX-1vQVw4odnZF34f267p9WqdQOhi"
"Y9tewD-jbnATgpi5W9smbkemvbOcVZSdeboXknoWxDhPyvtcxUYiQO/pub?gid=1947246172&single=true&output=tsv"
)
@click.command()
def main():
"""Add descriptions from a google curation sheet."""
df = pd.read_csv(URL, sep="\t")
del df[df.columns[0]]
df = df[df.description.notna()]
df = df[df["prefix"].map(lambda p: bioregistry.get_description(p) is None)]
df = df[df["prefix"].map(lambda p: bioregistry.get_obofoundry_prefix(p) is None)]
click.echo(df.to_markdown())
r = dict(bioregistry.read_registry())
for prefix, description in df[["prefix", "description"]].values:
r[prefix].description = description
bioregistry.write_registry(r)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
e1e3e0a6195e8962484a4fa4111f09eb936c7802 | add74ecbd87c711f1e10898f87ffd31bb39cc5d6 | /xcp2k/classes/_each148.py | 382a04d946964f99cf0e1924c7cf471beca627c8 | [] | no_license | superstar54/xcp2k | 82071e29613ccf58fc14e684154bb9392d00458b | e8afae2ccb4b777ddd3731fe99f451b56d416a83 | refs/heads/master | 2021-11-11T21:17:30.292500 | 2021-11-06T06:31:20 | 2021-11-06T06:31:20 | 62,589,715 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,113 | py | from xcp2k.inputsection import InputSection
class _each148(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Just_energy = None
self.Powell_opt = None
self.Qs_scf = None
self.Xas_scf = None
self.Md = None
self.Pint = None
self.Metadynamics = None
self.Geo_opt = None
self.Rot_opt = None
self.Cell_opt = None
self.Band = None
self.Ep_lin_solver = None
self.Spline_find_coeffs = None
self.Replica_eval = None
self.Bsse = None
self.Shell_opt = None
self.Tddft_scf = None
self._name = "EACH"
self._keywords = {'Just_energy': 'JUST_ENERGY', 'Powell_opt': 'POWELL_OPT', 'Qs_scf': 'QS_SCF', 'Xas_scf': 'XAS_SCF', 'Md': 'MD', 'Pint': 'PINT', 'Metadynamics': 'METADYNAMICS', 'Geo_opt': 'GEO_OPT', 'Rot_opt': 'ROT_OPT', 'Cell_opt': 'CELL_OPT', 'Band': 'BAND', 'Ep_lin_solver': 'EP_LIN_SOLVER', 'Spline_find_coeffs': 'SPLINE_FIND_COEFFS', 'Replica_eval': 'REPLICA_EVAL', 'Bsse': 'BSSE', 'Shell_opt': 'SHELL_OPT', 'Tddft_scf': 'TDDFT_SCF'}
| [
"[email protected]"
] | |
f43afaa4f0016dfe69be9baf4880dfa31bc9e26b | 85e50dc8487701f0c15c72141681c849021d9826 | /news/scripts/indiatoday.py | 0dc1c52a98c0c909ed68bcbfa7521e59391e3c62 | [] | no_license | sadakchap/news-aggregator | 4c46a9b3ab18cf4c693dff2c71c9c7fc58f1ee1c | dbd7e031d783f9cc9cf9e652be8b70d73d53d9cb | refs/heads/master | 2022-12-09T12:34:43.929043 | 2021-06-02T01:46:30 | 2021-06-02T01:46:30 | 199,251,857 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 880 | py | from bs4 import BeautifulSoup
import requests
from news.models import NewsBox
requests.packages.urllib3.disable_warnings()
url = 'https://www.indiatoday.in/'
source = requests.get(url).text
soup = BeautifulSoup(source, "lxml")
news_box = soup.find('ul', class_='itg-listing')
# print(news_box.prettify())
def indiatoday():
for news_story in news_box.find_all('li')[:7]:
news_link = url + news_story.find('a').get('href')
img_src = None
news_title = news_story.find('a').text
if not NewsBox.objects.filter(news_link=news_link).exists():
news = NewsBox()
news.src_name = 'India Today'
news.src_link = url
news.title = news_title
news.news_link = news_link
news.img = img_src
news.save()
# print(news_link)
# print(news_title)
# print('*'*80)
| [
"[email protected]"
] | |
53f066b9b58f4908fa80f7b05ad0314541a78b2f | d83120a187421256f4a59e7dec582657b8b8bb88 | /starter/.history/backend/flaskr/__init___20210711040044.py | 96f30636e696400be6d39026aec94776b5b72129 | [] | no_license | saraalmuraytib/Trivia | 2dc382cf0c124d673cad699cb01563ca87389d38 | f88e21ac04681f4131c737f1674dcde32731071c | refs/heads/main | 2023-06-30T04:52:08.612654 | 2021-08-03T15:33:50 | 2021-08-03T15:33:50 | 384,743,716 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,468 | py | import os
from flask import Flask, request, abort, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS
import random
from models import setup_db, Question, Category
QUESTIONS_PER_PAGE = 10
def paginate_questions(request, selection):
page = request.args.get('page', 1, type=int)
start = (page - 1) * QUESTIONS_PER_PAGE
end = start + QUESTIONS_PER_PAGE
questions = [question.format() for question in selection]
current_questions = questions[start:end]
return current_questions
def create_app(test_config=None):
# create and configure the app
app = Flask(__name__)
setup_db(app)
'''
@TODO 1: Set up CORS. Allow '*' for origins. Delete the sample route after completing the TODOs
'''
CORS(app)
#CORS(app, resources={'/': {'origins': '*'}})
'''
@TODO 2: Use the after_request decorator to set Access-Control-Allow
'''
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization,true')
response.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
return response
'''
@TODO 3:
Create an endpoint to handle GET requests
for all available categories.
'''
@app.route('/categories')
def get_categories():
categories = Category.query.order_by(Category.type).all()
if len(categories) == 0:
abort(404)
return jsonify({
'success': True,
'categories': {category.id: category.type for category in categories}
})
'''
@TODO 4:
Create an endpoint to handle GET requests for questions,
including pagination (every 10 questions).
This endpoint should return a list of questions,
number of total questions, current category, categories.
TEST: At this point, when you start the application
you should see questions and categories generated,
ten questions per page and pagination at the bottom of the screen for three pages.
Clicking on the page numbers should update the questions.
'''
@app.route('/questions')
def get_questions():
selection = Question.query.order_by(Question.id).all()
current_questions = paginate_questions(request, selection)
categories = Category.query.order_by(Category.type).all()
if len(current_questions) == 0:
abort(404)
'''
This endpoint should return a list of questions,
number of total questions, current category, categories
'''
return jsonify({
'success': True,
'questions': current_questions,
'total_questions': len(selection),
'categories': {category.id: category.type for category in categories},
'current_category': None
})
'''
@TODO 5:
Create an endpoint to DELETE question using a question ID.
TEST: When you click the trash icon next to a question, the question will be removed.
This removal will persist in the database and when you refresh the page.
'''
@app.route('/questions/<int:question_id>', methods=['DELETE'])
def delete_question(question_id):
try:
question = Question.query.filter(
Question.id == question_id).one_or_none()
if question is None:
abort(404)
question.delete()
return jsonify({
'success': True,
'deleted': question_id
})
except:
abort(422)
'''
@TODO 6:
Create an endpoint to POST a new question,
which will require the question and answer text,
category, and difficulty score.
TEST: When you submit a question on the "Add" tab,
the form will clear and the question will appear at the end of the last page
of the questions list in the "List" tab.
'''
@app.route('/questions', methods=['POST'])
def create_question():
body = request.get_json()
new_question = body.get('question')
new_answer = body.get('answer')
new_category = body.get('category')
new_difficulty = body.get('difficulty')
try:
question = Question(question=new_question, answer=new_answer,
category=new_category,difficulty=new_difficulty)
question.insert()
return jsonify({
'success': True,
'created': question.id,
})
except:
abort(422)
'''
@TODO 7:
Create a POST endpoint to get questions based on a search term.
It should return any questions for whom the search term
is a substring of the question.
TEST: Search by any phrase. The questions list will update to include
only question that include that string within their question.
Try using the word "title" to start.
'''
'''
@TODO 8:
Create a GET endpoint to get questions based on category.
TEST: In the "List" tab / main screen, clicking on one of the
categories in the left column will cause only questions of that
category to be shown.
'''
'''
@TODO 9:
Create a POST endpoint to get questions to play the quiz.
This endpoint should take category and previous question parameters
and return a random questions within the given category,
if provided, and that is not one of the previous questions.
TEST: In the "Play" tab, after a user selects "All" or a category,
one question at a time is displayed, the user is allowed to answer
and shown whether they were correct or not.
'''
'''
@TODO 10:
Create error handlers for all expected errors
including 404 and 422.
'''
return app
| [
"[email protected]"
] | |
ef3d8382444a8442f31cd305561d3477ba1a01b4 | 4ff8676136167cdd81d7a983272102fff86360e8 | /python/258. 各位相加.py | 138d139307262136c271371b5e43d5a1c038538f | [] | no_license | geniuscynic/leetcode | 0ec256af2377d19fee22ce736462a7e95e3f4e67 | 379a8f27f8213951ee8be41bd56598036995d267 | refs/heads/master | 2023-07-19T07:22:20.001770 | 2021-09-07T14:50:40 | 2021-09-07T14:50:40 | 297,277,833 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,290 | py | import sys
from collections import defaultdict
from collections import Counter
from collections import deque
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def addDigits(self, num: int) -> int:
res = num
while res >= 10:
num = res
res = 0
while num != 0:
res += num % 10
num = num // 10
return res
def coverttoTree():
ls =deque([6,2,8,0,4,7,9,None,None,3,5])
temp = TreeNode(ls.popleft())
res = deque()
res.append(temp)
while ls:
left = ls.popleft()
right = ls.popleft()
node = res.popleft()
#print(node.val, left, right)
if left != None:
node.left = TreeNode(left)
res.append(node.left)
if right != None:
node.right = TreeNode(right)
res.append(node.right)
return temp
if __name__ == "__main__":
solution = Solution()
nums1 = 38
m = TreeNode(2)
nums2 = TreeNode(4)
n = 3
result = solution.addDigits(nums1)
#print(solution.ls)
print(result) | [
"[email protected]"
] | |
c4eed199172881acd25a5d986044c3aded598837 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/2D_20200722174654.py | d47af0911f58fcf03a29cbe21f4c5d543a6f60ef | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,036 | py | def array(n,m):
# where n is row size and m is column size
array = [[0 for x in range(n)] for x in range(m)]
print(array)
a = [[2, 4, 6, 8, 10], [3, 6, 9, 12, 15], [4, 8, 12, 16, 20]]
# where the first arguement reps the row and second arguement reps the column
print(a[0][3])
def hourGlass(arr):
# you have a 2d array
# get max hour glass
# var maxCount to keep record of the max count
# what do you know about an hourglass
# the indicies fall in a pattern where
# i and i+2 are not equal to 0 and i + 1 is equal to 0
maxCount = 1
totalCount = 0
count = 0
j = 3
if arr !=[]:
for i in range(len(arr)):
# remember j is looping through arr[i]
for j in range(len(arr[i])):
print(arr[i][j],arr[i])
else:
return 0
print(hourGlass([[1,1,1,0,0,0],[0,1,0,0,0,0],[1,1,1,0,0,0],[0,0,2,4,4,0],[0,0,0,2,0,0],[0,0,1,2,4,0]])) | [
"[email protected]"
] | |
74f652c82ed09864bfc29d35dfe58397eefec789 | 613d8e4af67407c8e95effb1759b9ffca5246cd3 | /oc_stats/common.py | c9028eedcbcc4e889573a69a6a85ae4c9a630def | [] | no_license | old-castle-fansubs/stats | c0286af1c66d82165a526b4a307c79235da6807a | dd814252918d1d5f08af90ec0d39b008249639a1 | refs/heads/master | 2021-07-21T15:14:22.622202 | 2021-07-18T15:02:35 | 2021-07-18T15:02:35 | 189,878,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,049 | py | import dataclasses
import typing as T
from datetime import date, datetime, timedelta
from pathlib import Path
PROJ_DIR = Path(__file__).parent
ROOT_DIR = PROJ_DIR.parent
DATA_DIR = ROOT_DIR / "data"
CACHE_DIR = DATA_DIR / "cache"
STATIC_DIR = PROJ_DIR / "static"
def json_default(obj: T.Any) -> T.Any:
if dataclasses.is_dataclass(obj):
return dataclasses.asdict(obj)
if isinstance(obj, (datetime, date)):
return obj.isoformat()
if isinstance(obj, timedelta):
return obj.total_seconds()
return None
def convert_to_diffs(
items: dict[date, T.Union[int, float]]
) -> dict[date, T.Union[int, float]]:
ret: dict[date, T.Union[int, float]] = {}
if not items:
return ret
prev_key = list(items.keys())[0]
prev_value = None
for key, value in sorted(items.items(), key=lambda kv: kv[0]):
if prev_value is not None:
if abs((key - prev_key).days) <= 1:
ret[key] = value - prev_value
prev_key = key
prev_value = value
return ret
| [
"[email protected]"
] | |
1afc24adfe7a79c184a08009fa6d8f3031d965d5 | b02a759c59a9d469f5fee00b3775703c5405e1b2 | /16.RecommenderSystems/test_evaluation_model.py | 0697d622f761ff08d2ef829abbb01ea9d92d7d8e | [] | no_license | asdlei99/MachineLearningCombat | a18672e11306d26cc59b9bb16ee90db06af24df5 | 57e8dafcef849eb407bc79a0b0724abc9931bd27 | refs/heads/master | 2021-09-15T12:50:24.531567 | 2018-06-02T01:30:27 | 2018-06-02T01:30:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,754 | py | import random
import math
def SplitData(data, M, k, seed):
test = []
train = []
random.seed(seed)
for user, item in data:
if random.randint(0, M) == k:
test.append([user, item])
else:
train.append([user, item])
return train, test
# 准确率
def Precision(train, test, N):
hit = 0
all = 0
for user in train.keys():
tu = test[user]
rank = GetRecommendation(user, N)
for item, pui in rank:
if item in tu:
hit += 1
all += N
return hit / (all * 1.0)
# 召回率
def Recall(train, test, N):
hit = 0
all = 0
for user in train.keys():
tu = test[user]
rank = GetRecommendation(user, N)
for item, pui in rank:
if item in tu:
hit += 1
all += len(tu)
return hit / (all * 1.0)
# 覆盖率
def Coverage(train, test, N):
recommend_items = set()
all_items = set()
for user in train.keys():
for item in train[user].keys():
all_items.add(item)
rank = GetRecommendation(user, N)
for item, pui in rank:
recommend_items.add(item)
return len(recommend_items) / (len(all_items) * 1.0)
# 新颖度
def Popularity(train, test, N):
item_popularity = dict()
for user, items in train.items():
for item in items.keys():
if item not in item_popularity:
item_popularity[item] = 0
item_popularity[item] += 1
ret = 0
n = 0
for user in train.keys():
rank = GetRecommendation(user, N)
for item, pui in rank:
ret += math.log(1 + item_popularity[item])
n += 1
ret /= n * 1.0
return ret
| [
"[email protected]"
] | |
924614ca77686fbed9c16d7c46a7bc9e6c37e0a0 | 6471dcdf07fc0bdcde9797914857d154d85e3c07 | /pie_app/bSerial.py | 8f5c20566aaf7756b96301008a6c2c9f609a675b | [] | no_license | cudmore/pie | e43ec3c4c95acb5a051a25a8d5549071908ed5c8 | b74b105bc101a8504453d20a066fcd764864731f | refs/heads/master | 2023-05-15T01:51:54.326274 | 2023-05-09T02:17:11 | 2023-05-09T02:17:11 | 139,335,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,915 | py | """
Author: Robert H Cudmore
Date: 20180808
"""
import time, threading, serial, queue
import logging
logger = logging.getLogger('flask.app')
#########################################################################
class mySerialThread(threading.Thread):
"""
background thread which monitors inSerialQueue and sends commands out serial.
"""
def __init__(self, inSerialQueue, outSerialQueue, errorSerialQueue, port, baud):
threading.Thread.__init__(self)
self._stop_event = threading.Event()
self.inSerialQueue = inSerialQueue
self.outSerialQueue = outSerialQueue
self.errorSerialQueue = errorSerialQueue
self.port = port #'/dev/ttyACM0'
self.baud = baud #115200
logger.debug('mySerialThread initializing, port:' + str(port) + ' baud:' + str(baud))
self.mySerial = None
try:
# there is no corresponding self.mySerial.close() ???
self.mySerial = serial.Serial(port, baud, timeout=0.25)
except (serial.SerialException) as e:
logger.error(str(e))
errorSerialQueue.put(str(e))
except:
logger.error('other exception in mySerialThread init')
raise
#else:
# errorSerialQueue.put('None')
def stop(self):
"""
call stop() then join() to ensure thread is done
"""
self._stop_event.set()
def run(self):
logger.debug('starting mySerialThread')
while not self._stop_event.is_set():
try:
# serialDict is {'type': command/dump, 'str': command/filePath}
serialDict = self.inSerialQueue.get(block=False, timeout=0)
except (queue.Empty) as e:
# there was nothing in the queue
pass
else:
# there was something in the queue
#logger.info('serialThread inSerialQueue: "' + str(serialCommand) + '"')
serialType = serialDict['type']
serialCommand = serialDict['str']
try:
if self.mySerial is not None:
if serialType == 'dump':
# dump a teensy/arduino trial to a file
fullSavePath = serialCommand
self.mySerial.write('d\n'.encode()) # write 'd\n'
#time.sleep(0.01)
resp = self.mySerial.readline().decode().strip()
with open(fullSavePath, 'w') as file:
while resp:
file.write(resp + '\n')
resp = self.mySerial.readline().decode().strip()
elif serialType == 'command':
# send a command to teensy and get one line response
if not serialCommand.endswith('\n'):
serialCommand += '\n'
self.mySerial.write(serialCommand.encode())
#time.sleep(0.01)
resp = self.mySerial.readline().decode().strip()
self.outSerialQueue.put(resp)
logger.info('serialThread outSerialQueue: "' + str(resp) + '"')
else:
logger.error('bad serial command type' + str(serialDict))
except (serial.SerialException) as e:
logger.error(str(e))
except:
logger.error('other exception in mySerialThread run')
raise
# make sure not to remove this
time.sleep(0.1)
| [
"[email protected]"
] | |
3802507496894d4653a42e21e7b67071fb3f637a | d5e8610ad12b6733e5b014abba5cf356cb658a44 | /network/client.py | cbcce48537f16c421516b99c718c8210d6b6b600 | [
"WTFPL"
] | permissive | EliasFarhan/GBJam | 494c4bfcff1e2246001d489c3e60a6e9ddb6ae62 | 156a003378b8db994d4bd1f9c3a12415ceac7c01 | refs/heads/master | 2016-09-05T11:41:50.329113 | 2014-08-10T20:14:31 | 2014-08-10T20:14:31 | 22,541,009 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,869 | py | import copy
import socket
from threading import Lock, Thread
from engine.const import log, CONST
from engine.vector import Vector2
players = {}
players_lock = Lock()
player_pos = Vector2()
player_anim_state = ""
player_anim_counter = 0
client_player_lock = Lock()
PORT = CONST.port
HOST = CONST.host
update_thread = None
self_id = 0
sock = None
def get_players():
global players,players_lock
players_lock.acquire()
tmp_players = copy.deepcopy(players)
players_lock.release()
return tmp_players
def get_player():
global player_pos, player_anim_counter, player_anim_state, client_player_lock
client_player_lock.acquire()
pos = copy.deepcopy(player_pos)
state = copy.deepcopy(player_anim_state)
frame = copy.deepcopy(player_anim_counter)
client_player_lock.release()
return pos, state, frame
def set_player(new_player):
global player_pos, player_anim_counter, player_anim_state, client_player_lock
from engine.init import engine
client_player_lock.acquire()
player_pos = new_player.pos + new_player.screen_relative_pos * engine.get_screen_size()
player_anim_state = new_player.anim.state
player_anim_counter = new_player.anim.anim_counter
client_player_lock.release()
def get_self_id():
global self_id
return self_id
def init():
global update_thread, self_id,sock
data = "ID_REQUEST;"
new_id_request = None
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((HOST, PORT))
sock.sendall(data)
new_id_request = sock.recv(1024)
except socket.error as e:
sock.close()
sock = None
log("Network init: "+str(e),1)
return
self_id = new_id_request.split(";")[1]
get_thread = Thread(target=client_get)
get_thread.daemon = True
get_thread.start()
set_thread = Thread(target=client_set)
set_thread.daemon = True
set_thread.start()
def client_get():
log("START UPDATE SERVER")
from engine.init import engine
udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while not engine.finish:
udp_sock.sendto("GET_REQUEST;%s"%self_id, (HOST, PORT+1))
get_player_data = udp_sock.recv(1024)
"""Position"""
parsed_data = get_player_data.split(';')
parsed_data[1] = parsed_data[1].split(',')
parsed_data[1] = Vector2(int(float(parsed_data[1][0])), int(float(parsed_data[1][1])))
"""Frame"""
parsed_data[3] = int(parsed_data[3])
"""update players position"""
players_lock.acquire()
players[parsed_data[0]] = parsed_data
players_lock.release()
def client_set():
log("START UPDATE SERVER")
from engine.init import engine
udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while not engine.finish:
(pos, state, frame) = get_player()
if state != "":
udp_sock.sendto("SET_REQUEST;"+str(self_id)+";"
+pos.get_string() +";"
+state+";"
+str(frame)+";"
, (HOST, PORT+2))
def set_request(pos, state, frame):
global sock
"""Change the position of the player on the server"""
"""Set correct pos, state, frame"""
try:
if not sock:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((HOST, PORT))
sock.sendall("SET_REQUEST;"+str(self_id)+";"+pos.get_string() +";"+state+";"+str(frame)+";")
sock.recv(1024)
except socket.error as e:
sock.close()
sock = None
log("Network set: "+str(e),1)
return
def get_players_request():
global sock
try:
if not sock:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((HOST, PORT))
sock.sendall("GET_REQUEST;")
get_request_nmb = sock.recv(1024)
#log(get_request_nmb)
try:
nmb = int(get_request_nmb.split(';')[1])
sock.sendall("%i;"%nmb)
length = 5
for i in range(nmb):
get_request_data = sock.recv(1024)
#log(get_request_data)
"""Position"""
parsed_data = get_request_data.split(';')
parsed_data[1] = parsed_data[1].split(',')
parsed_data[1] = Vector2(int(float(parsed_data[1][0])), int(float(parsed_data[1][1])))
"""Frame"""
parsed_data[3] = int(parsed_data[3])
"""update players position"""
players[parsed_data[0]] = parsed_data
sock.sendall("NEXT")
except IndexError:
pass
except socket.error as e:
sock.close()
sock = None
log("Network get: "+str(e),1)
return
| [
"[email protected]"
] | |
670e3279efdafed42ae531d9129bbbdd829e0b5a | b472c692ac9efc39e508d2709fe14e2b4e844fd7 | /Python/test.py | 818eb63debb2c34cbca6a32dc628f5e91c3de1f1 | [] | no_license | enningxie/Interview | 75ac734873282dc67503815019718a6e1b27c512 | becdd40463c01551f2c8a53abc9d2281979f2bc1 | refs/heads/master | 2020-03-18T08:13:00.049201 | 2018-12-25T01:02:45 | 2018-12-25T01:02:45 | 134,496,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 216 | py | from collections import defaultdict
if __name__ == '__main__':
d_dict = defaultdict()
nums1 = [1, 2, 2, 1]
nums2 = [2, 2]
for i, value in enumerate(nums1):
d_dict[i] = value
d_dict.pop()
| [
"[email protected]"
] | |
ef5cedf36af3d5382bcecb579ae28b374f22bd7d | 7bd5ca970fbbe4a3ed0c7dadcf43ba8681a737f3 | /atcoder/arc/arc006/c.py | cb6d240266da7927722159dc9328cf026bb5874f | [] | no_license | roiti46/Contest | c0c35478cd80f675965d10b1a371e44084f9b6ee | c4b850d76796c5388d2e0d2234f90dc8acfaadfa | refs/heads/master | 2021-01-17T13:23:30.551754 | 2017-12-10T13:06:42 | 2017-12-10T13:06:42 | 27,001,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | # -*- coding: utf-8 -*-
import sys,copy,math,heapq,itertools as it,fractions,re,bisect,collections as coll
N = int(raw_input())
a = []
for i in xrange(N):
w = int(raw_input())
for j in xrange(len(a)):
if a[j] >= w:
a[j] = w
break
else:
a.append(w)
a = sorted(a)
print len(a)
| [
"[email protected]"
] | |
784fa6abdaea42679c74b37c9104130a6c8ab49a | ee7e42417d9d1e76b0e84e44dc6eb037adc3ebad | /.history/manage_20190703133205.py | 97195c48fcefe2c35d0dc6fce4c318d0b570fa33 | [] | no_license | web3-qa/pets-api | 4632127ee84a299f207d95754f409fc1e4c0013d | ee4a04e7291740ac8eb6147c305b41d27d5be29c | refs/heads/master | 2023-05-12T09:09:47.509063 | 2019-07-18T15:07:13 | 2019-07-18T15:07:13 | 197,611,701 | 0 | 0 | null | 2023-05-01T19:42:17 | 2019-07-18T15:19:59 | Python | UTF-8 | Python | false | false | 429 | py | import os
import sys
from os import path
from xmlrpc.client import Server
from virtualenv import create_bootstrap_script
from application import create_bootstrap_script
from flask_script import Manager, Server
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),'..')))
app = create_app()
manager = Manager(app)
manager.add_command("runserver", Server
use_debugger = True,
use_reloader = True,
) | [
"[email protected]"
] | |
3e0f44d691a846473e3bd241080b059dfc70b086 | 8fd56e9b9dbc49c16b4a8afe1007f824183bb0ab | /Python_Stack/django/django_fundamentals/django_intro/dojo_and_ninjas/dojo_and_ninjas/settings.py | f0d9e05a39974011a73b5ec3195f1ab742aa2ea1 | [] | no_license | DiyarBarham/CodingDojo | b1cc7d7355f5fb139cb640168f78d6b7f91e372a | 0891e2c41ddbb9004eadfd2d54fe7f34d6d4ef58 | refs/heads/main | 2023-07-08T12:12:33.227932 | 2021-08-07T13:55:33 | 2021-08-07T13:55:33 | 363,878,740 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,293 | py | """
Django settings for dojo_and_ninjas project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-53wty_=t8u#0m8n^_@cd965s9q2u-cj=wrylu)y0ty#e9=@#z_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'dojo_and_ninjas_app',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'dojo_and_ninjas.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'dojo_and_ninjas.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"[email protected]"
] | |
bff8c1e537389300427596e9c36f71fadaf71296 | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/mirumee-django-messages/allPythonContent.py | 9918ddf59a580b2c2fb4f59768881fac6fa2cc3f | [] | no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,475 | py | __FILENAME__ = admin
from django import forms
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from django.contrib import admin
from django.contrib.auth.models import User, Group
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
else:
notification = None
from django_messages.models import Message
class MessageAdminForm(forms.ModelForm):
"""
Custom AdminForm to enable messages to groups and all users.
"""
recipient = forms.ModelChoiceField(
label=_('Recipient'), queryset=User.objects.all(), required=True)
group = forms.ChoiceField(label=_('group'), required=False,
help_text=_('Creates the message optionally for all users or a group of users.'))
def __init__(self, *args, **kwargs):
super(MessageAdminForm, self).__init__(*args, **kwargs)
self.fields['group'].choices = self._get_group_choices()
def _get_group_choices(self):
return [('', u'---------'), ('all', _('All users'))] + \
[(group.pk, group.name) for group in Group.objects.all()]
class Meta:
model = Message
class MessageAdmin(admin.ModelAdmin):
form = MessageAdminForm
fieldsets = (
(None, {
'fields': (
'sender',
('recipient', 'group'),
),
}),
(_('Message'), {
'fields': (
'parent_msg',
'subject', 'body',
),
'classes': ('monospace' ),
}),
(_('Date/time'), {
'fields': (
'sent_at', 'read_at', 'replied_at',
'deleted_at',
),
'classes': ('collapse', 'wide'),
}),
)
list_display = ('subject', 'sender', 'recipient', 'sent_at', 'read_at')
list_filter = ('sent_at', 'sender', 'recipient')
search_fields = ('subject', 'body')
def save_model(self, request, obj, form, change):
"""
Saves the message for the recipient and looks in the form instance
for other possible recipients. Prevents duplication by excludin the
original recipient from the list of optional recipients.
When changing an existing message and choosing optional recipients,
the message is effectively resent to those users.
"""
obj.save()
if notification:
# Getting the appropriate notice labels for the sender and recipients.
if obj.parent_msg is None:
recipients_label = 'messages_received'
else:
recipients_label = 'messages_reply_received'
if form.cleaned_data['group'] == 'all':
# send to all users
recipients = User.objects.exclude(pk=obj.recipient.pk)
else:
# send to a group of users
recipients = []
group = form.cleaned_data['group']
if group:
group = Group.objects.get(pk=group)
recipients.extend(
list(group.user_set.exclude(pk=obj.recipient.pk)))
# create messages for all found recipients
for user in recipients:
obj.pk = None
obj.recipient = user
obj.save()
if notification:
# Notification for the recipient.
notification.send([user], recipients_label, {'message' : obj,})
admin.site.register(Message, MessageAdmin)
########NEW FILE########
__FILENAME__ = context_processors
from django_messages.models import inbox_count_for
def inbox(request):
if request.user.is_authenticated():
return {'messages_inbox_count': inbox_count_for(request.user)}
else:
return {}
########NEW FILE########
__FILENAME__ = fields
"""
Based on http://www.djangosnippets.org/snippets/595/
by sopelkin
"""
from django import forms
from django.forms import widgets
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
class CommaSeparatedUserInput(widgets.Input):
input_type = 'text'
def render(self, name, value, attrs=None):
if value is None:
value = ''
elif isinstance(value, (list, tuple)):
value = (', '.join([user.username for user in value]))
return super(CommaSeparatedUserInput, self).render(name, value, attrs)
class CommaSeparatedUserField(forms.Field):
widget = CommaSeparatedUserInput
def __init__(self, *args, **kwargs):
recipient_filter = kwargs.pop('recipient_filter', None)
self._recipient_filter = recipient_filter
super(CommaSeparatedUserField, self).__init__(*args, **kwargs)
def clean(self, value):
super(CommaSeparatedUserField, self).clean(value)
if not value:
return ''
if isinstance(value, (list, tuple)):
return value
names = set(value.split(','))
names_set = set([name.strip() for name in names])
users = list(User.objects.filter(username__in=names_set))
unknown_names = names_set ^ set([user.username for user in users])
recipient_filter = self._recipient_filter
invalid_users = []
if recipient_filter is not None:
for r in users:
if recipient_filter(r) is False:
users.remove(r)
invalid_users.append(r.username)
if unknown_names or invalid_users:
raise forms.ValidationError(_(u"The following usernames are incorrect: %(users)s") % {'users': ', '.join(list(unknown_names)+invalid_users)})
return users
########NEW FILE########
__FILENAME__ = forms
import datetime
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext_noop
from django.contrib.auth.models import User
import uuid
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
else:
notification = None
from django_messages.models import Message
from django_messages.fields import CommaSeparatedUserField
from django_messages.utils import format_quote
class MessageForm(forms.ModelForm):
"""
base message form
"""
recipients = CommaSeparatedUserField(label=_(u"Recipient"))
subject = forms.CharField(label=_(u"Subject"))
body = forms.CharField(label=_(u"Body"),
widget=forms.Textarea(attrs={'rows': '12', 'cols':'55'}))
class Meta:
model = Message
fields = ('recipients', 'subject', 'body',)
def __init__(self, sender, *args, **kw):
recipient_filter = kw.pop('recipient_filter', None)
self.sender = sender
super(MessageForm, self).__init__(*args, **kw)
if recipient_filter is not None:
self.fields['recipients']._recipient_filter = recipient_filter
def create_recipient_message(self, recipient, message):
return Message(
owner = recipient,
sender = self.sender,
to = recipient.username,
recipient = recipient,
subject = message.subject,
body = message.body,
thread = message.thread,
sent_at = message.sent_at,
)
def get_thread(self, message):
return message.thread or uuid.uuid4().hex
def save(self, commit=True):
recipients = self.cleaned_data['recipients']
instance = super(MessageForm, self).save(commit=False)
instance.sender = self.sender
instance.owner = self.sender
instance.recipient = recipients[0]
instance.thread = self.get_thread(instance)
instance.unread = False
instance.sent_at = datetime.datetime.now()
message_list = []
# clone messages in recipients inboxes
for r in recipients:
if r == self.sender: # skip duplicates
continue
msg = self.create_recipient_message(r, instance)
message_list.append(msg)
instance.to = ','.join([r.username for r in recipients])
if commit:
instance.save()
for msg in message_list:
msg.save()
if notification:
notification.send([msg.recipient],
"messages_received", {'message': msg,})
return instance, message_list
class ComposeForm(MessageForm):
"""
A simple default form for private messages.
"""
class Meta:
model = Message
fields = ('recipients', 'subject', 'body',)
class ReplyForm(MessageForm):
"""
reply to form
"""
class Meta:
model = Message
fields = ('recipients', 'subject', 'body',)
def __init__(self, sender, message, *args, **kw):
self.parent_message = message
initial = kw.pop('initial', {})
initial['recipients'] = message.sender.username
initial['body'] = self.quote_message(message)
initial['subject'] = self.quote_subject(message.subject)
kw['initial'] = initial
super(ReplyForm, self).__init__(sender, *args, **kw)
def quote_message(self, original_message):
return format_quote(original_message.sender, original_message.body)
def quote_subject(self, subject):
return u'Re: %s' % subject
def create_recipient_message(self, recipient, message):
msg = super(ReplyForm, self).create_recipient_message(recipient, message)
msg.replied_at = datetime.datetime.now()
# find parent in recipient messages
try:
msg.parent_msg = Message.objects.get(
owner=recipient,
sender=message.recipient,
recipient=message.sender,
thread=message.thread)
except (Message.DoesNotExist, Message.MultipleObjectsReturned):
# message may be deleted
pass
return msg
def get_thread(self, message):
return self.parent_message.thread
def save(self, commit=True):
instance, message_list = super(ReplyForm, self).save(commit=False)
instance.replied_at = datetime.datetime.now()
instance.parent_msg = self.parent_message
if commit:
instance.save()
for msg in message_list:
msg.save()
if notification:
notification.send([msg.recipient],
"messages_reply_received", {
'message': msg,
'parent_msg': self.parent_message,
})
return instance, message_list
########NEW FILE########
__FILENAME__ = management
from django.db.models import get_models, signals
from django.conf import settings
from django.utils.translation import ugettext_noop as _
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("messages_received", _("Message Received"), _("you have received a message"), default=2)
notification.create_notice_type("messages_reply_received", _("Reply Received"), _("you have received a reply to a message"), default=2)
signals.post_syncdb.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes as notification app not found"
########NEW FILE########
__FILENAME__ = models
import datetime
from django.db import models
from django.conf import settings
from django.db.models import signals
from django.db.models.query import QuerySet
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
class MessageQueryset(QuerySet):
def unread(self):
return self.filter(unread=True)
class BaseMessageManager(models.Manager):
def get_query_set(self):
return MessageQueryset(self.model)
def trash(self, messages):
"""
move messages to trash
"""
messages.update(deleted=True, deleted_at=datetime.datetime.now())
def send(self, messages):
"""
send messages
"""
pass
class Inbox(BaseMessageManager):
def get_query_set(self):
return super(Inbox, self).get_query_set().filter(deleted=False)
def for_user(self, user):
"""
Returns all messages that were received by the given user and are not
marked as deleted.
"""
return self.get_query_set().filter(owner=user, recipient=user)
class Outbox(BaseMessageManager):
def get_query_set(self):
return super(Outbox, self).get_query_set().filter(deleted=False)
def for_user(self, user):
"""
Returns all messages that were sent by the given user and are not
marked as deleted.
"""
return self.get_query_set().filter(owner=user, sender=user)
class Trash(BaseMessageManager):
"""
Trash manager
"""
def get_query_set(self):
return super(Trash, self).get_query_set().filter(deleted=True)
def for_user(self, user):
"""
Returns all messages that were either received or sent by the given
user and are marked as deleted.
"""
return self.get_query_set().filter(owner=user)
class Message(models.Model):
"""
A private message from user to user
"""
owner = models.ForeignKey(User, related_name='messages')
to = models.CharField(max_length=255) # recipient usernames comma separated
subject = models.CharField(_("Subject"), max_length=120)
body = models.TextField(_("Body"))
sender = models.ForeignKey(User, related_name='+', verbose_name=_("Sender"))
recipient = models.ForeignKey(User, related_name='+', null=True, blank=True, verbose_name=_("Recipient"))
thread = models.CharField(max_length=64, null=True, blank=True, db_index=True)
parent_msg = models.ForeignKey('self', related_name='next_messages', null=True, blank=True, verbose_name=_("Parent message"))
sent_at = models.DateTimeField(_("sent at"), null=True, blank=True)
unread = models.BooleanField(default=True, db_index=True)
read_at = models.DateTimeField(_("read at"), null=True, blank=True)
replied_at = models.DateTimeField(_("replied at"), null=True, blank=True)
deleted = models.BooleanField(default=False, db_index=True)
deleted_at = models.DateTimeField(_("Sender deleted at"), null=True, blank=True)
objects = BaseMessageManager()
inbox = Inbox()
outbox = Outbox()
trash = Trash()
def is_unread(self):
"""returns whether the recipient has read the message or not"""
return bool(self.read_at is None)
def undelete(self):
self.deleted = False
self.deleted_at = None
def mark_read(self):
self.unread = False
self.read_at = datetime.datetime.now()
def mark_unread(self):
self.unread = True
self.read_at = None
def move_to_trash(self):
self.deleted = True
self.deleted_at = datetime.datetime.now()
def replied(self):
"""returns whether the recipient has written a reply to this message"""
return bool(self.replied_at is not None)
def __unicode__(self):
return self.subject
def all_recipients(self):
return User.objects.filter(username__in=self.to.split(','))
@models.permalink
def get_absolute_url(self):
return ('messages_detail', None, {'message_id': self.pk})
class Meta:
ordering = ['-sent_at']
verbose_name = _("Message")
verbose_name_plural = _("Messages")
db_table = 'messages_message'
def inbox_count_for(user):
"""
returns the number of unread messages for the given user but does not
mark them seen
"""
return Message.inbox.for_user(user).unread().count()
# fallback for email notification if django-notification could not be found
if "notification" not in settings.INSTALLED_APPS:
from django_messages.utils import new_message_email
signals.post_save.connect(new_message_email, sender=Message)
########NEW FILE########
__FILENAME__ = signals
########NEW FILE########
__FILENAME__ = inbox
from django.template import Library, Node, TemplateSyntaxError
from django_messages.models import inbox_count_for
class InboxOutput(Node):
def __init__(self, varname=None):
self.varname = varname
def render(self, context):
try:
user = context['user']
count = inbox_count_for(user)
except (KeyError, AttributeError):
count = ''
if self.varname is not None:
context[self.varname] = count
return ""
else:
return "%s" % (count)
def do_print_inbox_count(parser, token):
"""
A templatetag to show the unread-count for a logged in user.
Returns the number of unread messages in the user's inbox.
Usage::
{% load inbox %}
{% inbox_count %}
{# or assign the value to a variable: #}
{% inbox_count as my_var %}
{{ my_var }}
"""
bits = token.contents.split()
if len(bits) > 1:
if len(bits) != 3:
raise TemplateSyntaxError, "inbox_count tag takes either no arguments or exactly two arguments"
if bits[1] != 'as':
raise TemplateSyntaxError, "first argument to inbox_count tag must be 'as'"
return InboxOutput(bits[2])
else:
return InboxOutput()
register = Library()
register.tag('inbox_count', do_print_inbox_count)
########NEW FILE########
__FILENAME__ = tests
import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from django_messages.models import Message
class SendTestCase(TestCase):
def setUp(self):
self.user1 = User.objects.create_user('user1', '[email protected]', '123456')
self.user2 = User.objects.create_user('user2', '[email protected]', '123456')
self.msg1 = Message(sender=self.user1, recipient=self.user2, subject='Subject Text', body='Body Text')
self.msg1.save()
def testBasic(self):
self.assertEquals(self.msg1.sender, self.user1)
self.assertEquals(self.msg1.recipient, self.user2)
self.assertEquals(self.msg1.subject, 'Subject Text')
self.assertEquals(self.msg1.body, 'Body Text')
self.assertEquals(self.user1.sent_messages.count(), 1)
self.assertEquals(self.user1.received_messages.count(), 0)
self.assertEquals(self.user2.received_messages.count(), 1)
self.assertEquals(self.user2.sent_messages.count(), 0)
class DeleteTestCase(TestCase):
def setUp(self):
self.user1 = User.objects.create_user('user3', '[email protected]', '123456')
self.user2 = User.objects.create_user('user4', '[email protected]', '123456')
self.msg1 = Message(sender=self.user1, recipient=self.user2, subject='Subject Text 1', body='Body Text 1')
self.msg2 = Message(sender=self.user1, recipient=self.user2, subject='Subject Text 2', body='Body Text 2')
self.msg1.sender_deleted_at = datetime.datetime.now()
self.msg2.recipient_deleted_at = datetime.datetime.now()
self.msg1.save()
self.msg2.save()
def testBasic(self):
self.assertEquals(Message.objects.outbox_for(self.user1).count(), 1)
self.assertEquals(Message.objects.outbox_for(self.user1)[0].subject, 'Subject Text 2')
self.assertEquals(Message.objects.inbox_for(self.user2).count(),1)
self.assertEquals(Message.objects.inbox_for(self.user2)[0].subject, 'Subject Text 1')
#undelete
self.msg1.sender_deleted_at = None
self.msg2.recipient_deleted_at = None
self.msg1.save()
self.msg2.save()
self.assertEquals(Message.objects.outbox_for(self.user1).count(), 2)
self.assertEquals(Message.objects.inbox_for(self.user2).count(),2)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls.defaults import *
from django.views.generic.simple import redirect_to
from django_messages.views import *
urlpatterns = patterns('',
url(r'^$', redirect_to, {'url': 'inbox/'}),
url(r'^inbox/$', inbox, name='messages_inbox'),
url(r'^outbox/$', outbox, name='messages_outbox'),
url(r'^compose/$', compose, name='messages_compose'),
url(r'^compose/(?P<recipient>[\+\w]+)/$', compose, name='messages_compose_to'),
url(r'^reply/(?P<message_id>[\d]+)/$', reply, name='messages_reply'),
url(r'^view/(?P<message_id>[\d]+)/$', view, name='messages_detail'),
url(r'^delete/(?P<message_id>[\d]+)/$', delete, name='messages_delete'),
url(r'^undelete/(?P<message_id>[\d]+)/$', undelete, name='messages_undelete'),
url(r'^trash/$', trash, name='messages_trash'),
)
########NEW FILE########
__FILENAME__ = utils
# -*- coding:utf-8 -*-
import re
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils.encoding import force_unicode
from django.utils.text import wrap
from django.utils.translation import ugettext_lazy as _
from django.template import Context, loader
from django.template.loader import render_to_string
# favour django-mailer but fall back to django.core.mail
if "mailer" in settings.INSTALLED_APPS:
from mailer import send_mail
else:
from django.core.mail import send_mail
def format_quote(sender, body):
"""
Wraps text at 55 chars and prepends each
line with `> `.
Used for quoting messages in replies.
"""
lines = wrap(body, 55).split('\n')
for i, line in enumerate(lines):
lines[i] = "> %s" % line
quote = '\n'.join(lines)
return _(u"%(sender)s wrote:\n%(body)s") % {
'sender': sender,
'body': quote,
}
def new_message_email(sender, instance, signal,
subject_prefix=_(u'New Message: %(subject)s'),
template_name="django_messages/new_message.html",
default_protocol=None,
*args, **kwargs):
"""
This function sends an email and is called via Django's signal framework.
Optional arguments:
``template_name``: the template to use
``subject_prefix``: prefix for the email subject.
``default_protocol``: default protocol in site URL passed to template
"""
if default_protocol is None:
default_protocol = getattr(settings, 'DEFAULT_HTTP_PROTOCOL', 'http')
if 'created' in kwargs and kwargs['created']:
try:
current_domain = Site.objects.get_current().domain
subject = subject_prefix % {'subject': instance.subject}
message = render_to_string(template_name, {
'site_url': '%s://%s' % (default_protocol, current_domain),
'message': instance,
})
if instance.recipient.email != "":
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL,
[instance.recipient.email,])
except Exception, e:
#print e
pass #fail silently
########NEW FILE########
__FILENAME__ = views
# -*- coding:utf-8 -*-
import datetime
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.template import RequestContext
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_noop
from django.core.urlresolvers import reverse
from django.conf import settings
from django.db import transaction
from django.views.generic.list_detail import object_list, object_detail
from django_messages.models import Message
from django_messages.forms import ComposeForm, ReplyForm
from django_messages.utils import format_quote
@login_required
def message_list(request, queryset, paginate_by=25,
extra_context=None, template_name=None):
return object_list(request, queryset=queryset, paginate_by=paginate_by,
extra_context=extra_context, template_name=template_name,
template_object_name='message')
@login_required
def inbox(request, template_name='django_messages/inbox.html', **kw):
"""
Displays a list of received messages for the current user.
"""
kw['template_name'] = template_name
queryset = Message.inbox.for_user(request.user)
return message_list(request, queryset, **kw)
@login_required
def outbox(request, template_name='django_messages/outbox.html', **kw):
"""
Displays a list of sent messages for the current user.
"""
kw['template_name'] = template_name
queryset = Message.outbox.for_user(request.user)
return message_list(request, queryset, **kw)
@login_required
def trash(request, template_name='django_messages/trash.html', **kw):
"""
Displays a list of deleted messages.
"""
kw['template_name'] = template_name
queryset = Message.trash.for_user(request.user)
return message_list(request, queryset, **kw)
@login_required
@transaction.commit_on_success
def compose(request, recipient=None, form_class=ComposeForm,
template_name='django_messages/compose.html', success_url=None,
recipient_filter=None, extra_context=None):
"""
Displays and handles the ``form_class`` form to compose new messages.
Required Arguments: None
Optional Arguments:
``recipient``: username of a `django.contrib.auth` User, who should
receive the message, optionally multiple usernames
could be separated by a '+'
``form_class``: the form-class to use
``template_name``: the template to use
``success_url``: where to redirect after successfull submission
``extra_context``: extra context dict
"""
if request.method == "POST":
form = form_class(request.user, data=request.POST,
recipient_filter=recipient_filter)
if form.is_valid():
instance, message_list = form.save()
Message.objects.send(message_list)
messages.add_message(request, messages.SUCCESS, _(u"Message successfully sent."))
return redirect(success_url or request.GET.get('next') or inbox)
else:
form = form_class(request.user, initial={'recipients': recipient})
ctx = extra_context or {}
ctx.update({
'form': form,
})
return render_to_response(template_name, RequestContext(request, ctx))
@login_required
@transaction.commit_on_success
def reply(request, message_id, form_class=ReplyForm,
template_name='django_messages/reply.html', success_url=None,
recipient_filter=None, extra_context=None):
"""
Prepares the ``form_class`` form for writing a reply to a given message
(specified via ``message_id``).
"""
parent = get_object_or_404(Message, pk=message_id, owner=request.user)
if request.method == "POST":
form = form_class(request.user, parent, data=request.POST,
recipient_filter=recipient_filter)
if form.is_valid():
instance, message_list = form.save()
Message.objects.send(message_list)
messages.add_message(request, messages.SUCCESS, _(u"Message successfully sent."))
return redirect(success_url or inbox)
else:
form = form_class(request.user, parent)
ctx = extra_context or {}
ctx.update({
'form': form,
})
return render_to_response(template_name,
RequestContext(request, ctx))
@login_required
@transaction.commit_on_success
def delete(request, message_id, success_url=None):
"""
Marks a message as deleted by sender or recipient. The message is not
really removed from the database, because two users must delete a message
before it's save to remove it completely.
A cron-job should prune the database and remove old messages which are
deleted by both users.
As a side effect, this makes it easy to implement a trash with undelete.
You can pass ?next=/foo/bar/ via the url to redirect the user to a different
page (e.g. `/foo/bar/`) than ``success_url`` after deletion of the message.
"""
message = get_object_or_404(Message, pk=message_id, owner=request.user)
message.move_to_trash()
message.save()
messages.add_message(request, messages.SUCCESS, _(u"Message successfully deleted."))
return redirect(request.GET.get('next') or success_url or inbox)
@login_required
@transaction.commit_on_success
def undelete(request, message_id, success_url=None):
"""
Recovers a message from trash.
"""
message = get_object_or_404(Message, pk=message_id, owner=request.user)
message.undelete()
message.save()
message_view = inbox # should be dependent on message box (inbox,outbox)
messages.add_message(request, messages.SUCCESS,
_(u"Message successfully recovered."))
return redirect(request.GET.get('next') or success_url or message_view)
@login_required
def view(request, message_id, template_name='django_messages/view.html',
extra_context=None):
"""
Shows a single message.``message_id`` argument is required.
The user is only allowed to see the message, if he is either
the sender or the recipient. If the user is not allowed a 404
is raised.
If the user is the recipient and the message is unread
``read_at`` is set to the current datetime.
"""
message = get_object_or_404(Message, pk=message_id, owner=request.user)
if message.is_unread():
message.mark_read()
message.save()
ctx = extra_context or {}
ctx.update({
'message': message,
})
return render_to_response(template_name, RequestContext(request, ctx))
########NEW FILE########
__FILENAME__ = conf
# -*- coding: utf-8 -*-
#
# django-messages documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 26 10:27:49 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-messages'
copyright = u'2009, Arne Brodowski'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.4'
# The full version, including alpha/beta/rc tags.
release = '0.4.3pre'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
unused_docs = ['README',]
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
html_theme_path = ['.',]
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
# html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = './django-messages.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_use_modindex = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
html_copy_source = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-messagesdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'django-messages.tex', ur'django-messages Documentation',
ur'Arne Brodowski', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
########NEW FILE########
| [
"[email protected]"
] | |
8f629fd50ba1ec120f55b90d665fc66b65f77590 | 07a783c06bb4bb059e8c38589fe3f9bfc5a14b22 | /tests/builtins/test_iter.py | b06f6fbbe28a6ac4114a49d71dc32bf850c37594 | [
"BSD-3-Clause",
"MIT"
] | permissive | vishalsodani/batavia | a3c79b0342069fe6387eb3d7cc3ac3f4947d1842 | 690e5093da6653456381466e5fb9c153c295cb6b | refs/heads/master | 2021-01-22T08:10:07.777012 | 2016-09-03T15:14:52 | 2016-09-03T15:14:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class IterTests(TranspileTestCase):
pass
class BuiltinIterFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["iter"]
not_implemented = [
'test_bytearray',
'test_bytes',
'test_complex',
'test_dict',
'test_frozenset',
'test_NotImplemented',
]
| [
"[email protected]"
] | |
dbfee9b9af6cb812be348a4573cc993fd8d52d08 | c7a6f8ed434c86b4cdae9c6144b9dd557e594f78 | /ECE364/.PyCharm40/system/python_stubs/348993582/gtk/_gtk/TreeSortable.py | 37cf02c4e1bb9f31abf693a5bf90d28a3cd5e6da | [] | no_license | ArbalestV/Purdue-Coursework | 75d979bbe72106975812b1d46b7d854e16e8e15e | ee7f86145edb41c17aefcd442fa42353a9e1b5d1 | refs/heads/master | 2020-08-29T05:27:52.342264 | 2018-04-03T17:59:01 | 2018-04-03T17:59:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,309 | py | # encoding: utf-8
# module gtk._gtk
# from /usr/lib64/python2.6/site-packages/gtk-2.0/gtk/_gtk.so
# by generator 1.136
# no doc
# imports
import atk as __atk
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
class TreeSortable(__gobject.GInterface):
# no doc
@classmethod
def do_has_default_sort_func(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def do_set_sort_column_id(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def do_sort_column_changed(cls, *args, **kwargs): # real signature unknown
pass
def get_sort_column_id(self, *args, **kwargs): # real signature unknown
pass
def has_default_sort_func(self, *args, **kwargs): # real signature unknown
pass
def set_default_sort_func(self, *args, **kwargs): # real signature unknown
pass
def set_sort_column_id(self, *args, **kwargs): # real signature unknown
pass
def set_sort_func(self, *args, **kwargs): # real signature unknown
pass
def sort_column_changed(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
| [
"[email protected]"
] | |
31a19fe8af08abd87b739ef20aafc4f44fe5f260 | 97c5fe6a54636de9b056719ea62ac1de4e76ebdc | /src/newsletter/views.py | 04f05109ae7071108c1fdf1cfd58ac97df81bd14 | [
"MIT"
] | permissive | EdwardBetts/matchmaker | 937ece7acbfd1fcb57ab59cd13b16c3cd67d54f3 | ec56d18c6af8ca904325deca3be56484d3415c70 | refs/heads/master | 2020-12-11T01:50:10.773983 | 2016-01-26T16:53:29 | 2016-01-26T16:53:29 | 56,478,725 | 0 | 0 | null | 2016-04-18T05:11:12 | 2016-04-18T05:11:12 | null | UTF-8 | Python | false | false | 3,301 | py | from django.conf import settings
from django.core.mail import send_mail
from django.shortcuts import render, get_object_or_404
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.edit import ModelFormMixin
from likes.models import Like
from matches.models import Match
from profiles.models import UserJob
from questions.mixins import PostQuestionMixin
from questions.models import UserAnswer, Question, LEVELS
from questions.forms import QuestionForm
from .forms import ContactForm, SignUpForm
from .models import SignUp
class homeTemplateView(PostQuestionMixin, ModelFormMixin, TemplateView):
template_name = "home.html"
form_class = QuestionForm
def get_context_data(self, *args, **kwargs):
if self.request.user.is_authenticated():
self.object = Question.objects.get_unanswered_questions(self.request.user).order_by("?").first()
positions = []
locations = []
employers = []
context = super(homeTemplateView, self).get_context_data(*args, **kwargs)
matches, users = Match.objects.get_match_all(self.request.user)[:6]
jobs = UserJob.objects.filter(user__in=users).order_by("?")[:6]
user_like = get_object_or_404(Like, user=self.request.user)
context["answers"] = UserAnswer.objects.filter(
user=self.request.user,
question=self.object).first()
if jobs:
for job in jobs:
if job.position not in positions:
positions.append(job.position)
if job.location not in locations:
locations.append(job.location)
if job.employer not in employers:
employers.append(job.employer)
context["positions"] = positions
context["locations"] = locations
context["employers"] = employers
context["liked_users"] = user_like.liked_users.all()
context["matches_list"] = matches
context["question_object"] = self.object
context["levels"] = LEVELS
context["form"] = self.form_class
return context
def contact(request):
title = 'Contact Us'
title_align_center = True
form = ContactForm(request.POST or None)
if form.is_valid():
form_email = form.cleaned_data.get("email")
form_message = form.cleaned_data.get("message")
form_full_name = form.cleaned_data.get("full_name")
subject = 'Site contact form'
from_email = settings.EMAIL_HOST_USER
to_email = [from_email, '[email protected]']
contact_message = "%s: %s via %s" % (
form_full_name,
form_message,
form_email)
some_html_message = """
<h1>hello</h1>
"""
send_mail(subject,
contact_message,
from_email,
to_email,
html_message=some_html_message,
fail_silently=True)
context = {
"form": form,
"title": title,
"title_align_center": title_align_center,
}
return render(request, "forms.html", context)
| [
"[email protected]"
] | |
f41ef0801111314ead7d07efb93cd9336de689ed | c63629e0e0477aeb6ff2e8751d00d9985500e7cd | /Mmani/__init__.py | de7539b6d20d2871550662c0a7e3757fabc33446 | [
"BSD-2-Clause"
] | permissive | Jerryzcn/Mmani | 1389016e68eeac05d0d735aa153320913467d2a4 | 732d3c7581fddb762390699216b724a1e0890508 | refs/heads/master | 2020-12-28T22:46:17.515828 | 2016-01-26T00:40:13 | 2016-01-26T00:40:13 | 49,289,748 | 0 | 0 | null | 2016-01-26T00:44:02 | 2016-01-08T18:41:59 | Python | UTF-8 | Python | false | false | 66 | py | """Mmani: Scalable Manifold Learning"""
__version__ = "0.1.dev0"
| [
"[email protected]"
] | |
84aaf4120a0a6a01012a2a5dcf06b0d75f9c3de5 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /_MY_ORGS/Web-Dev-Collaborative/blog-research/Data-Structures/1-Python/strings/contain_string.py | 67056fed67317b8f05ae54f52aee5108734c2c45 | [
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 651 | py | """
Implement strStr().
Return the index of the first occurrence of needle in haystack, or -1 if needle is not part of haystack.
Example 1:
Input: haystack = "hello", needle = "ll"
Output: 2
Example 2:
Input: haystack = "aaaaa", needle = "bba"
Output: -1
Reference: https://leetcode.com/problems/implement-strstr/description/
"""
def contain_string(haystack, needle):
if len(needle) == 0:
return 0
if len(needle) > len(haystack):
return -1
for i in range(len(haystack)):
if len(haystack) - i < len(needle):
return -1
if haystack[i:i+len(needle)] == needle:
return i
return -1
| [
"[email protected]"
] | |
675ae4611cdb22f5676993b3c7c77fdad3196c7b | e638e9fda0e672fa9a414515d0c05a24ab55ad38 | /FindPeakElement.py | 8ff51d79a507758cc2e9a7895bd046e1d669a131 | [] | no_license | zjuzpz/Algorithms | 8d1c7d50429aa5540eb817dc5495a20fc3f11125 | 2df1a58aa9474f2ecec2ee7c45ebf12466181391 | refs/heads/master | 2021-01-21T05:55:48.768728 | 2020-08-04T22:44:08 | 2020-08-04T22:44:08 | 44,586,024 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,221 | py | """
162. Find Peak Element
A peak element is an element that is greater than its neighbors.
Given an input array where num[i] ≠ num[i+1], find a peak element and return its index.
The array may contain multiple peaks, in that case return the index to any one of the peaks is fine.
You may imagine that num[-1] = num[n] = -∞.
For example, in array [1, 2, 3, 1], 3 is a peak element
and your function should return the index number 2.
"""
# O(logn)
# O(1)
class Solution(object):
def findPeakElement(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if not nums:
return
lower, upper = 0, len(nums) - 1
while lower < upper:
mid = (lower + upper) // 2
if mid == 0:
if len(nums) == 1 or nums[mid] > nums[mid + 1]:
return mid
return mid + 1
if nums[mid] > nums[mid - 1]:
if mid == len(nums) - 1 or nums[mid] > nums[mid + 1]:
return mid
lower = mid + 1
else:
upper = mid - 1
return lower
if __name__ == "__main__":
print(Solution().findPeakElement([1,2,3,1]))
| [
"[email protected]"
] | |
84e04268feae1b1a5487fcbd2eaeda1fadbcb044 | 978248bf0f275ae688f194593aa32c267832b2b6 | /xlsxwriter/test/table/test_table04.py | b4fda1d6dc147b5f25e5321e06cd322bd6883955 | [
"BSD-2-Clause-Views"
] | permissive | satish1337/XlsxWriter | b0c216b91be1b74d6cac017a152023aa1d581de2 | 0ab9bdded4f750246c41a439f6a6cecaf9179030 | refs/heads/master | 2021-01-22T02:35:13.158752 | 2015-03-31T20:32:28 | 2015-03-31T20:32:28 | 33,300,989 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,867 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ..helperfunctions import _xml_to_list
from ...table import Table
from ...worksheet import Worksheet
from ...workbook import WorksheetMeta
from ...sharedstrings import SharedStringTable
class TestAssembleTable(unittest.TestCase):
"""
Test assembling a complete Table file.
"""
def test_assemble_xml_file(self):
"""Test writing a table"""
self.maxDiff = None
worksheet = Worksheet()
worksheet.worksheet_meta = WorksheetMeta()
worksheet.str_table = SharedStringTable()
worksheet.add_table('C3:F13', {'autofilter': False})
worksheet._prepare_tables(1)
fh = StringIO()
table = Table()
table._set_filehandle(fh)
table._set_properties(worksheet.tables[0])
table._assemble_xml_file()
exp = _xml_to_list("""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<table xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" id="1" name="Table1" displayName="Table1" ref="C3:F13" totalsRowShown="0">
<tableColumns count="4">
<tableColumn id="1" name="Column1"/>
<tableColumn id="2" name="Column2"/>
<tableColumn id="3" name="Column3"/>
<tableColumn id="4" name="Column4"/>
</tableColumns>
<tableStyleInfo name="TableStyleMedium9" showFirstColumn="0" showLastColumn="0" showRowStripes="1" showColumnStripes="0"/>
</table>
""")
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
| [
"[email protected]"
] | |
b9da67d441c8a8ea04914a68e4e11e3566b32dde | 9ca55981d3245d87d45debce8e9825b60db43046 | /chemicals/thermal_conductivity.pyi | 75e0492a449dc884cb25dbc4ed2080c24b0a1a97 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | CalebBell/chemicals | c6b1ebd409c32e0e1053c4f97668a8ebcc92b969 | 37e32a7c7f819e0cb8e2a8784f8448f68b9a4215 | refs/heads/master | 2023-07-25T23:34:17.754310 | 2023-07-25T02:00:14 | 2023-07-25T02:00:14 | 264,697,738 | 137 | 33 | MIT | 2022-06-05T18:21:02 | 2020-05-17T15:27:11 | Python | UTF-8 | Python | false | false | 2,921 | pyi | # DO NOT EDIT - AUTOMATICALLY GENERATED BY tests/make_test_stubs.py!
from typing import List
from pandas.core.frame import DataFrame
from typing import (
List,
Optional,
Union,
)
def Bahadori_gas(T: float, MW: int) -> float: ...
def Bahadori_liquid(T: float, M: int) -> float: ...
def Chung(T: float, MW: float, Tc: float, omega: float, Cvm: float, mu: float) -> float: ...
def Chung_dense(
T: float,
MW: float,
Tc: float,
Vc: float,
omega: float,
Cvm: float,
Vm: float,
mu: float,
dipole: float,
association: float = ...
) -> float: ...
def DIPPR9B(
T: float,
MW: float,
Cvm: float,
mu: float,
Tc: Optional[float] = ...,
chemtype: Optional[str] = ...
) -> float: ...
def DIPPR9G(T: float, P: float, Tc: float, Pc: float, kl: float) -> float: ...
def DIPPR9H(ws: List[float], ks: List[float]) -> float: ...
def DIPPR9I(zs: List[float], Vms: List[float], ks: List[float]) -> float: ...
def Eli_Hanley(T: float, MW: float, Tc: float, Vc: float, Zc: float, omega: float, Cvm: float) -> float: ...
def Eli_Hanley_dense(
T: float,
MW: float,
Tc: float,
Vc: float,
Zc: float,
omega: float,
Cvm: float,
Vm: float
) -> float: ...
def Eucken(MW: float, Cvm: float, mu: float) -> float: ...
def Eucken_modified(MW: float, Cvm: float, mu: float) -> float: ...
def Filippov(ws: List[float], ks: List[float]) -> float: ...
def Gharagheizi_gas(T: float, MW: float, Tb: float, Pc: float, omega: float) -> float: ...
def Gharagheizi_liquid(T: int, M: int, Tb: int, Pc: float, omega: float) -> float: ...
def Lakshmi_Prasad(T: float, M: int) -> float: ...
def Lindsay_Bromley(
T: float,
ys: List[float],
ks: List[float],
mus: List[float],
Tbs: List[float],
MWs: List[float]
) -> float: ...
def Mersmann_Kind_thermal_conductivity_liquid(T: int, MW: float, Tc: float, Vc: float, na: int) -> float: ...
def Missenard(T: float, P: float, Tc: float, Pc: float, kl: float) -> float: ...
def Nicola(T: int, M: float, Tc: float, Pc: float, omega: float) -> float: ...
def Nicola_original(T: int, M: float, Tc: float, omega: float, Hfus: int) -> float: ...
def Sato_Riedel(T: int, M: int, Tb: int, Tc: int) -> float: ...
def Sheffy_Johnson(T: int, M: int, Tm: int) -> float: ...
def Stiel_Thodos_dense(T: float, MW: float, Tc: float, Pc: float, Vc: float, Zc: float, Vm: float, kg: float) -> float: ...
def Wassiljewa_Herning_Zipperer(
zs: List[float],
ks: List[float],
MWs: Union[List[int], List[float]],
MW_roots: Optional[List[float]] = ...
) -> float: ...
def __getattr__(name: str) -> DataFrame: ...
def _load_k_data() -> None: ...
def k_IAPWS(
T: float,
rho: float,
Cp: Optional[float] = ...,
Cv: Optional[float] = ...,
mu: Optional[float] = ...,
drho_dP: Optional[float] = ...
) -> float: ...
__all__: List[str] | [
"[email protected]"
] | |
03ac8c891da817f67d9b5c8e05d36778c398ff8e | 8015f1c62a2cb4efd21aa8938336913bf8117868 | /bamap/ba3483.pngMap.py | 11a26fda8cd8a7ca0878fb5776a770da9bee2f06 | [] | no_license | GamerNoTitle/Beepers-and-OLED | 675b5e3c179df0f0e27b42bf594c43860d03b9af | afe1340e5394ae96bda5f9022a8a66824368091e | refs/heads/master | 2020-04-20T00:09:47.122471 | 2019-04-29T04:59:35 | 2019-04-29T04:59:35 | 168,515,579 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 8,468 | py | ba3483.pngMap = [
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000011100000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000111100000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000111111111100000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000001111111111100000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000111111111111111000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000001001111111111111000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000001100111111111100000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000101111111111000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111110101000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000111111111111000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111100000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111111000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111111000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111111110000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000111111111111111110000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000001111111111111110000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111100000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111111000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111111010000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111010000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111110000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111111010000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111111000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111100000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111110000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000001100000000111000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000001100000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000100010000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
]
| [
"[email protected]"
] | |
47c2d02951db7604b35e5da5d690abc1744355f0 | 5cb8b2e8496a2e6d0cfa7f2bae26d43bce263632 | /example2.py | 820d5006d40bf4e7f280e7882c49bd00a38c06f4 | [] | no_license | Malak-Abdallah/Intro_to_python | 3745786e499c8d6a95c414d3e5d87d27e5332039 | 9dbd8fa6e52b3056ef5406ab1337291feefad8da | refs/heads/master | 2023-06-25T09:09:16.709645 | 2021-07-28T12:26:18 | 2021-07-28T12:26:18 | 383,780,163 | 0 | 1 | null | 2021-07-26T13:02:32 | 2021-07-07T11:48:29 | Python | UTF-8 | Python | false | false | 306 | py | if __name__ == '__main__':
x=list(map(int,input().split(" ")))
lists={}
for i in range(x[0]):
lists[i]=list(map(int,input().split(" ")))
lists[i].sort(reverse=True)
num=0
for i in range(x[0]):
num = num+ lists[i][0] ** 2
if num <x[1]:
print(num)
| [
"[email protected]"
] | |
efde0bd1158f2af6861f6ccd43d34cadf581acf1 | c0e7f3e1801bfd99c5fc86067ea552a8436c47dd | /junk_drawer/store.py | 91bbab4f943ccdcd3bb220b99dccdc8e8a27896d | [] | no_license | Opentrons/junk-drawer | 27a463053dbb45e56902f3d8286f57f931631f59 | 32ca381f754489b476e26fbf18001bbf98881ea9 | refs/heads/main | 2023-08-29T06:11:09.028658 | 2021-10-08T18:03:20 | 2021-10-08T18:03:20 | 298,059,402 | 0 | 0 | null | 2023-05-23T16:17:06 | 2020-09-23T18:26:53 | Python | UTF-8 | Python | false | false | 5,183 | py | """Store module for junk_drawer."""
from __future__ import annotations
from logging import getLogger
from typing import Optional
from .read_store import SCHEMA_VERSION_KEY, ReadStore, ModelT
from .filesystem import (
PathNotFoundError,
RemoveFileError,
FileEncodeError,
FileWriteError,
)
log = getLogger(__name__)
class Store(ReadStore[ModelT]):
"""A Store is used to create, read, update, and delete items in a collection."""
async def put(self, item: ModelT, key: Optional[str] = None) -> Optional[str]:
"""
Put a single item to the store.
Returns the key of the added item. If `ignore_errors` is set to `True`,
`put` will return None if the item was unable to be added.
"""
item_key = self._get_item_key(item, key)
key_path = self._get_key_path(item_key)
try:
await self._filesystem.write_json(
key_path, item, encode_json=self.encode_json
)
return item_key
except (FileWriteError, FileEncodeError) as error:
self._maybe_raise_file_error(error)
return None
def put_sync(self, item: ModelT, key: Optional[str] = None) -> Optional[str]:
"""
Put a single item to the store.
Synchronous version of :py:meth:`put`.
"""
item_key = self._get_item_key(item, key)
key_path = self._get_key_path(item_key)
try:
self._filesystem.sync.write_json(
key_path, item, encode_json=self.encode_json
)
return item_key
except (FileWriteError, FileEncodeError) as error:
self._maybe_raise_file_error(error)
return None
async def ensure(self, default_item: ModelT, key: Optional[str] = None) -> ModelT:
"""
Ensure an item exists in the store at the given key.
If an item with `key` already exists, `ensure` will return the item. If
no item with `key` exists, it will write `default_item` to the store
before returning the item.
This method is a shortcut for a `get` followed by a `put` if the `get`
returns `None`.
"""
item_key = self._get_item_key(default_item, key)
result = await self.get(item_key)
if result is None:
await self.put(default_item, key)
result = default_item
return result
def ensure_sync(self, default_item: ModelT, key: Optional[str] = None) -> ModelT:
"""
Ensure an item exists in the store at the given key.
Synchronous version of :py:meth:`ensure`.
"""
item_key = self._get_item_key(default_item, key)
result = self.get_sync(item_key)
if result is None:
self.put_sync(default_item, key)
result = default_item
return result
async def delete(self, key: str) -> Optional[str]:
"""
Delete a single item in the store.
Returns the deleted key if the item was removed or None if no item was
found at that key. If `ignore_errors` is set, delete will also return
None if the item is unable to be removed.
"""
key_path = self._get_key_path(key)
try:
await self._filesystem.remove(key_path)
return key
except (PathNotFoundError, RemoveFileError) as error:
self._maybe_raise_file_error(error)
return None
def delete_sync(self, key: str) -> Optional[str]:
"""
Delete a single item in the store.
Synchronous version of :py:meth:`delete`.
"""
key_path = self._get_key_path(key)
try:
self._filesystem.sync.remove(key_path)
return key
except (PathNotFoundError, RemoveFileError) as error:
self._maybe_raise_file_error(error)
return None
async def delete_store(self) -> None:
"""Delete the store and all its items."""
return await self._filesystem.remove_dir(self._directory)
def delete_store_sync(self) -> None:
"""
Delete the store and all its items.
Synchronous version of :py:meth:`delete_store`.
"""
return self._filesystem.sync.remove_dir(self._directory)
def encode_json(self, item: ModelT) -> str:
"""Encode a model instance into JSON."""
obj = item.dict()
obj[SCHEMA_VERSION_KEY] = len(self._migrations)
# NOTE(mc, 2020-10-25): __json_encoder__ is an undocumented property
# of BaseModel, but its usage here is to ensure Pydantic model config
# related to serialization is properly used. This functionality is
# covered by basic integration tests
return item.__config__.json_dumps(obj, default=item.__json_encoder__)
def parse_json(self, data: str) -> ModelT:
"""Decode a string into a model instance."""
obj = self._schema.__config__.json_loads(data)
schema_version = obj.pop(SCHEMA_VERSION_KEY, 0)
for migrate in self._migrations[schema_version:]:
obj = migrate(obj)
return self._schema.parse_obj(obj)
| [
"[email protected]"
] | |
375dc7a9f08e87def6b9d83af33b3624c9f7ab69 | 56df6683865fd9319b389afd6dd4a922299da593 | /source/scripts/python/host/source/host.py.in | 0e4829aa2a05b70be869733b69853d892d4ff567 | [
"Python-2.0",
"GPL-2.0-or-later",
"MPL-1.1",
"NCSA",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"BSD-3-Clause",
"MPL-2.0",
"Ruby",
"BSD-2-Clause",
"MIT",
"Apache-2.0"
] | permissive | metacall/core | 4f36fe0b13924853aab6d0f053285b649398cc1d | 419ffb573b17501c91662f0f161032bb19ea1ab3 | refs/heads/develop | 2023-08-23T10:19:30.898387 | 2023-08-10T18:39:08 | 2023-08-10T18:39:08 | 163,221,062 | 1,391 | 167 | Apache-2.0 | 2023-09-13T23:49:43 | 2018-12-26T22:02:57 | C | UTF-8 | Python | false | false | 957 | in | #!/usr/bin/env python3
import sys
# Insert Python Port folder first in the system path list
sys.path.insert(0, '@PROJECT_METACALL_PORTS_DIRECTORY@')
from metacall import metacall, metacall_load_from_memory
script = '''#!/usr/bin/env node
'use strict';
const path = require('path');
/* Load MetaCall addon */
const addon = (() => {
try {
/* This forces metacall port to be run always by metacall cli */
return process._linkedBinding('node_loader_port_module');
} catch (e) {
console.error('MetaCall failed to load, probably you are importing this file from NodeJS directly.');
console.error('You should use MetaCall CLI instead. Install it from: https://github.com/metacall/install');
throw e;
}
})();
function b() {
return addon.metacall('c');
}
module.exports = {
b
};
'''
metacall_load_from_memory('node', script)
def a():
result = metacall('b')
print('Result call from b:')
print(result)
return result
def c():
return 3.0
| [
"[email protected]"
] | |
53d2fbfd9f3c99ec42a32fc5ee87f71345a8cd07 | 14e7058adf766352a0b90b66b7dcf887105a481c | /djangoappengine/settings_base.py | b62b7cbd0c0dfb09d8af41eeb26eecfd9cb6af34 | [
"BSD-2-Clause"
] | permissive | brunogamacatao/portalsaladeaula | 2b7f07f07c2518dd359f043483fbb27417f62aaf | 9429e485aa37ffea3208339a807032e9230a3c84 | refs/heads/master | 2020-12-29T01:42:18.594281 | 2012-06-22T12:24:44 | 2012-06-22T12:24:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,102 | py | # Initialize App Engine SDK if necessary
try:
from google.appengine.api import api_proxy_stub_map
except ImportError:
from .boot import setup_env
setup_env()
from djangoappengine.utils import on_production_server, have_appserver
DEBUG = not on_production_server
TEMPLATE_DEBUG = DEBUG
ROOT_URLCONF = 'urls'
DATABASES = {
'default': {
'ENGINE': 'djangoappengine.db',
},
}
if on_production_server:
EMAIL_BACKEND = 'djangoappengine.mail.AsyncEmailBackend'
else:
EMAIL_BACKEND = 'djangoappengine.mail.EmailBackend'
PREPARE_UPLOAD_BACKEND = 'djangoappengine.storage.prepare_upload'
SERVE_FILE_BACKEND = 'djangoappengine.storage.serve_file'
DEFAULT_FILE_STORAGE = 'djangoappengine.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangoappengine.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
CACHE_BACKEND = 'memcached://?timeout=0'
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
if not on_production_server:
INTERNAL_IPS = ('127.0.0.1',)
| [
"[email protected]"
] | |
080bcf39abb2b1192174b56c122775222dc094e5 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Games/Pygame/pygame_widgets/widgets/holder.py | 96b94e5e4579dd18844513a989799935872a599d | [] | no_license | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:72853c292faeedcdb3d5218c2cef2738641f5b2b222c1b069415a537cc49181f
size 7770
| [
"[email protected]"
] | |
af1a0b1e3fbc5532f301616c7de79889ed3c1f13 | 338298474c517e28d9a214c3525b9709625fa438 | /YouWeesh/Controllers/RegisterController.py | e1c6418416713ae4a61fcdd2a707f2d628d8db50 | [] | no_license | vincehar/Backend | f5a8f0e264de2ba7ccadba3bce015f3a30e9c478 | fb143c6c70cb65018d0436bf5b891cb72620208d | refs/heads/master | 2023-07-06T10:37:50.057555 | 2023-06-28T13:04:36 | 2023-06-28T13:04:36 | 73,698,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,284 | py | from base64 import b64decode
from django.core.files.base import ContentFile
from django.http import Http404
from mongoengine.django.auth import User
from rest_framework.decorators import api_view, renderer_classes, permission_classes
from rest_framework.permissions import AllowAny
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from YouWeesh.Models.Address import Address
from YouWeesh.Models.Preferences import Preferences
from YouWeesh.Models.SocialNetworks import SocialNetworks
from YouWeesh.Models.Users import Users
from YouWeesh.Tools.app import App
@api_view(('POST',))
@permission_classes((AllowAny,))
@renderer_classes((JSONRenderer,))
def registeruser(request):
username = request.POST['username']
password = request.POST['password']
email = request.POST['email'].lower();
lastname = request.POST['lastname']
firstname = request.POST['firstname']
socialnetwork = request.POST['socialnetwork']
pictureBase64 = request.POST['picture']
home_town = 'Geneve'#request.POST['home_town']
picturedata = b64decode(pictureBase64)
socialnetworkObject = SocialNetworks.objects.get(label=socialnetwork)
u=User.objects.create(username=username, email=email, first_name=firstname, last_name=lastname)
if socialnetwork == 'Youweesh':
u.set_password(password)
u.save()
preferences = Preferences()
preferences.save()
if home_town != "":
addr = Address()
addr.city = home_town
addr.getorUpdateCoordinates()
addr.save()
users = Users.objects.create(user=u, social_network=socialnetworkObject, address=addr, preferences=preferences)
else:
users = Users.objects.create(user=u, social_network=socialnetworkObject, preferences=preferences)
if socialnetwork == 'Facebook' or socialnetwork == 'Twitter':
users.picture.replace(ContentFile(picturedata))
users.save()
return Response(True)
@api_view(('POST',))
@permission_classes((AllowAny,))
@renderer_classes((JSONRenderer,))
def registerFCMToken(request):
try:
connected_user = App.getCurrentUser(request)
connected_user.update_fcm_token(request.POST['fcmToken'])
except connected_user.DoesNotExist:
raise Http404('Not logged')
return Response(True) | [
"[email protected]"
] | |
1fa1bec403921087904bbafbee13cec85e2e510f | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/trapping_20200617172315.py | 650a589856ffb33697b3724367c5a84d8aea7bf7 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | def trap(arr):
# left = max(arr)
# copy = []
# for j in arr:
# copy.append(j)
# arr.remove(left)
# right = max(arr)
# total = 0
# print(copy)
# for i in range(len(copy)-1):
# total += min(left,right) - copy[i]
# print(min(left,right),"-",copy[i],"==",total)
# print (total)
res = 0
trap([0, 1, 0, 2, 1, 0, 1, 3, 2, 1, 2, 1])
| [
"[email protected]"
] | |
bd4a9a56ca71e397b6a266f1919c1626b4d31214 | 5390d79dad71ad0d9ff9d0777435dcaf4aad16b3 | /chapter_07/pizza_topping3.py | d3a2378353388256cf56dcd63ea0eaf942f43e2c | [] | no_license | JasperMi/python_learning | 19770d79cce900d968cec76dac11e45a3df9c34c | 8111d0d12e4608484864dddb597522c6c60b54e8 | refs/heads/master | 2020-11-26T08:57:02.983869 | 2020-03-11T10:14:55 | 2020-03-11T10:14:55 | 218,935,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | prompt = "\nPlease input your pizza toppings:"
prompt += "\nEnter 'quit' to end the program. "
message = ""
while True:
pizza_topping = input(prompt)
if pizza_topping == 'quit':
break
else:
print("We'll add " + pizza_topping + ".")
| [
"[email protected]"
] | |
691cf31fb1d7e831b764492ca883e2c4ccfdeb40 | 9a8ff03d27e4822fa27f78fb6ba1dd419725cf90 | /home/urls.py | 1411cf4d18bcdf2783251f6cd45251f3234642e5 | [] | no_license | Pavlo-Olshansky/Social-lemon | 0f9f994fbbba78cd0e7defa1e7fcf60b6ed55165 | 3b7f0b9e8526f6c74d98ad38412151ea2678f808 | refs/heads/master | 2022-12-11T01:12:30.919023 | 2018-06-17T14:57:10 | 2018-06-17T14:57:10 | 104,945,732 | 0 | 0 | null | 2022-11-22T01:53:50 | 2017-09-26T22:48:52 | Python | UTF-8 | Python | false | false | 2,089 | py | from django.conf.urls import url, include
from . import views
from django.contrib.auth import views as auth_views
from .forms import CustomAuthForm
from django.contrib.auth.models import User
urlpatterns = [
# url(r'^$', views.HomePage.as_view(), name='home'),
# Register new user
url(r'^signup/', views.SignUp.as_view(), name='signup'),
# Login URL
url(r'^login/$', auth_views.login, {'template_name': 'registration/login.html', 'authentication_form': CustomAuthForm}, name='login'),
url(r'^$', auth_views.login,
{'template_name': 'home.html',
'authentication_form': CustomAuthForm,
'extra_context':
{'recommendations': views.recommendation_list }
}, name='home-login'),
# Logout URL
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout'),
# Reset password
url(r'^password_reset/$', auth_views.password_reset, {'post_reset_redirect': '/password_reset/done/'}, name='password_reset'),
url(r'^password_reset/done/$', auth_views.password_reset_done, name='password_reset_done'),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',auth_views.password_reset_confirm, {'post_reset_redirect': '/reset/done/'}, name='password_reset_confirm'),
url(r'^reset/done/$', auth_views.password_reset_complete, name='password_reset_complete'),
# Send an activation URL
url(r'^account_activation_sent/$', views.account_activation_sent, name='account_activation_sent'),
# Activation URL
url(r'^activate/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.activate, name='activate'),
# Profile URL's
url(r'^profile/$', views.ViewProfile.as_view(), name='view_profile'),
url(r'^profile/(?P<pk>\d+)/$', views.ViewProfile.as_view(), name='view_profile_with_pk'),
url(r'^profile/edit/$', views.edit_profile, name='edit_profile'),
url(r'^profile/password/$', views.ChangePassword.as_view(), name='change_password'),
]
| [
"[email protected]"
] | |
824a8a299cdea984c99f9a2b32fe5eb4b4918082 | d094ba0c8a9b1217fbf014aa79a283a49aabe88c | /env/lib/python3.6/site-packages/traits/tests/test_container_events.py | 7ccbe4fccef1ce095b0e9bfabb18e60297996524 | [
"Apache-2.0"
] | permissive | Raniac/NEURO-LEARN | d9274e0baadd97bb02da54bdfcf6ca091fc1c703 | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | refs/heads/master | 2022-12-25T23:46:54.922237 | 2020-09-06T03:15:14 | 2020-09-06T03:15:14 | 182,013,100 | 9 | 2 | Apache-2.0 | 2022-12-09T21:01:00 | 2019-04-18T03:57:00 | CSS | UTF-8 | Python | false | false | 5,193 | py | # ------------------------------------------------------------------------------
#
# Copyright (c) 2007, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# ------------------------------------------------------------------------------
"""
Tests for Dict items_changed events
"""
from __future__ import absolute_import, print_function
from traits.testing.unittest_tools import unittest
from traits.api import HasTraits, Dict
class MyClass(HasTraits):
""" A dummy HasTraits class with a Dict """
d = Dict({"a": "apple", "b": "banana", "c": "cherry", "d": "durian"})
def __init__(self, callback):
"The callback is called with the TraitDictEvent instance"
self.callback = callback
return
def _d_items_changed(self, event):
if self.callback:
self.callback(event)
return
class MyOtherClass(HasTraits):
""" A dummy HasTraits class with a Dict """
d = Dict({"a": "apple", "b": "banana", "c": "cherry", "d": "durian"})
class Callback:
"""
A stateful callback that gets initialized with the values to check for
"""
def __init__(self, obj, added={}, changed={}, removed={}):
self.obj = obj
self.added = added
self.changed = changed
self.removed = removed
self.called = False
return
def __call__(self, event):
if event.added != self.added:
print("\n\n******Error\nevent.added:", event.added)
else:
self.obj.assertEqual(event.added, self.added)
self.obj.assertEqual(event.changed, self.changed)
self.obj.assertEqual(event.removed, self.removed)
self.called = True
return
class DictEventTestCase(unittest.TestCase):
def test_setitem(self):
# overwriting an existing item
cb = Callback(self, changed={"c": "cherry"})
foo = MyClass(cb)
foo.d["c"] = "coconut"
self.assertTrue(cb.called)
# adding a new item
cb = Callback(self, added={"g": "guava"})
bar = MyClass(cb)
bar.d["g"] = "guava"
self.assertTrue(cb.called)
return
def test_delitem(self):
cb = Callback(self, removed={"b": "banana"})
foo = MyClass(cb)
del foo.d["b"]
self.assertTrue(cb.called)
return
def test_clear(self):
removed = MyClass(None).d.copy()
cb = Callback(self, removed=removed)
foo = MyClass(cb)
foo.d.clear()
self.assertTrue(cb.called)
return
def test_update(self):
update_dict = {"a": "artichoke", "f": "fig"}
cb = Callback(self, changed={"a": "apple"}, added={"f": "fig"})
foo = MyClass(cb)
foo.d.update(update_dict)
self.assertTrue(cb.called)
return
def test_setdefault(self):
# Test retrieving an existing value
cb = Callback(self)
foo = MyClass(cb)
self.assertEqual(foo.d.setdefault("a", "dummy"), "apple")
self.assertFalse(cb.called)
# Test adding a new value
cb = Callback(self, added={"f": "fig"})
bar = MyClass(cb)
self.assertTrue(bar.d.setdefault("f", "fig") == "fig")
self.assertTrue(cb.called)
return
def test_pop(self):
# Test popping a non-existent key
cb = Callback(self)
foo = MyClass(cb)
self.assertEqual(foo.d.pop("x", "dummy"), "dummy")
self.assertFalse(cb.called)
# Test popping a regular item
cb = Callback(self, removed={"c": "cherry"})
bar = MyClass(cb)
self.assertEqual(bar.d.pop("c"), "cherry")
self.assertTrue(cb.called)
return
def test_popitem(self):
foo = MyClass(None)
foo.d.clear()
foo.d["x"] = "xylophone"
cb = Callback(self, removed={"x": "xylophone"})
foo.callback = cb
self.assertEqual(foo.d.popitem(), ("x", "xylophone"))
self.assertTrue(cb.called)
return
def test_dynamic_listener(self):
foo = MyOtherClass()
# Test adding
func = Callback(self, added={"g": "guava"})
foo.on_trait_change(func.__call__, "d_items")
foo.d["g"] = "guava"
foo.on_trait_change(func.__call__, "d_items", remove=True)
self.assertTrue(func.called)
# Test removing
func2 = Callback(self, removed={"a": "apple"})
foo.on_trait_change(func2.__call__, "d_items")
del foo.d["a"]
foo.on_trait_change(func2.__call__, "d_items", remove=True)
self.assertTrue(func2.called)
# Test changing
func3 = Callback(self, changed={"b": "banana"})
foo.on_trait_change(func3.__call__, "d_items")
foo.d["b"] = "broccoli"
foo.on_trait_change(func3.__call__, "d_items", remove=True)
self.assertTrue(func3.called)
return
| [
"[email protected]"
] | |
0330944a234507649832eb94badabbf3a9353faf | 5a9194df7e40b1f9694576c88c536b24d22f548b | /tests/projects/test_delete_project.py | 96d89108aa6b51da09f660ffdc8b7e8fd51e6b38 | [] | no_license | jamesstidard/Talk-Zoho | 17230611e40e5c232dcd33bdbd5148ba20543810 | 3a918d72146dae1ed6bb8afee09dfe658a540048 | refs/heads/master | 2021-05-08T10:44:05.881154 | 2017-03-03T16:49:34 | 2017-03-03T16:49:34 | 119,862,940 | 0 | 0 | null | 2018-02-01T16:34:38 | 2018-02-01T16:34:38 | null | UTF-8 | Python | false | false | 383 | py | import pytest
from tests.projects.fixtures import * # noqa
@pytest.mark.gen_test
def test_cant_delete_user(projects, portal_id):
# Deleting user with wrong id always returns true (CRM API limitation)
# Pull projects down to lowest common denominator for unified interface.
success = yield projects.projects.delete('123456789', portal_id=portal_id)
assert success
| [
"[email protected]"
] | |
1963788b916b4fec844fe1d1523a7cfee98a0955 | bd109656f1ea18fe2eae9afffcc0074d75826bb9 | /setup.py | 55a44300afd9c9ab3918a9fb2a7ad146c8367a9b | [
"MIT"
] | permissive | ffreemt/baidu-tr-async-free | 4db5356e24e1ac818a6f641ccad7093113dd32ec | 3bf422e8d8406123479c5bcdb679af795db0ba8f | refs/heads/master | 2021-01-02T21:41:05.188476 | 2020-02-12T06:15:30 | 2020-02-12T06:15:30 | 239,812,578 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,407 | py | ''' setup '''
# pylint: disable=invalid-name
from pathlib import Path
import re
from setuptools import setup, find_packages
name = """baidu-tr-async-free"""
description = 'baidu translate for free with async and proxy support'
dir_name, *_ = find_packages()
# dir_name = 'bdtr_async'
curr_dir = Path(__file__).parent
_ = Path(f'{dir_name}/__init__.py').read_text(encoding='utf-8')
version, *_ = re.findall(r"__version__\W*=\W*'([^']+)'", _)
targz = 'v_' + version.replace('.', '') + '.tar.gz'
install_requires = ['httpx', 'loguru', 'google-sign']
README_rst = f'{curr_dir}/README.md'
long_description = (
open(README_rst, encoding='utf-8').read() if Path(README_rst).exists() else ''
)
setup(
name=name,
packages=find_packages(),
version=version,
description=description,
long_description=long_description,
long_description_content_type='text/markdown',
keywords=['machine translation', 'free', 'sign'],
author="mikeee",
url=fr'http://github.com/ffreemt/{name}',
download_url=fr'https://github.com/ffreemt/{name}/archive/' + targz,
install_requires=install_requires,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
],
license='MIT License',
)
| [
"[email protected]"
] | |
c8e6f159a7813608572c6285f8a0b42c0a56fd09 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_213/61.py | cfe6796cc3346eddb991963bab839f4b05e02f73 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,296 | py | # link: https://code.google.com/codejam/contest/5314486/dashboard#s=1
import string
import time
testIndex=2
problemRoot="d:/prog/versenyek/googlejam"
problemDir="2017/round2"
problemName="B"
inputFiles= ["-example.in", "-small.in", "-large.in"]
outputFiles=["-example.out", "-small.out", "-large.out"]
time1=time.time()
fileName=string.strip(problemRoot)+"/"+problemDir+"/"+problemName+inputFiles[testIndex]
inputData=[map(int,line.split()) for line in open(fileName,'r') if line.strip()]
fileName=string.strip(problemRoot)+"/"+problemDir+"/"+problemName+outputFiles[testIndex]
fileToWrite=open(fileName,'wb')
time2=time.time()
lineIdx=1
for case in xrange(inputData[0][0]):
n,c,m=inputData[lineIdx]
tick=[0]*n
cost=[0]*c
lineIdx+=1
for i in xrange(m):
tick[inputData[lineIdx+i][0]-1]+=1
cost[inputData[lineIdx+i][1]-1]+=1
lineIdx+=m
rides=max(cost) # the most ticket at one user
fstk=0
for i in xrange(n):
fstk+=tick[i]
rides=max(rides,(fstk-1)/(i+1)+1)
pro=0
for i in xrange(n):
if tick[i]>rides:
pro+=tick[i]-rides
fileToWrite.write("Case #"+str(case+1)+": "+str(rides)+" "+str(pro)+"\n")
fileToWrite.close()
print 'Total time: ', time.time() - time1
print 'Solving time: ', time.time() - time2
| [
"[email protected]"
] | |
2dea5afe2da38332a9e1ae100dcd6b3750a2efc4 | c6374029bcba930ab37098e8e954067aeae252d8 | /mla/svm/kernerls.py | da289a183af1d605a4fb65c9a4a197ad9621ecda | [
"MIT"
] | permissive | bhumikadhingra/MLAlgorithms | ab6f20aa8a899ff265668155cb4083ec19535429 | 8f002d0804663854eaec41b4ead698caaaf11c69 | refs/heads/master | 2020-08-07T09:12:36.194453 | 2019-10-07T13:15:12 | 2019-10-07T13:15:12 | 213,387,107 | 1 | 0 | MIT | 2019-10-07T13:13:09 | 2019-10-07T13:13:09 | null | UTF-8 | Python | false | false | 721 | py | # coding:utf-8
import numpy as np
import scipy.spatial.distance as dist
class Linear(object):
def __call__(self, x, y):
return np.dot(x, y.T)
def __repr__(self):
return "Linear kernel"
class Poly(object):
def __init__(self, degree=2):
self.degree = degree
def __call__(self, x, y):
return np.dot(x, y.T) ** self.degree
def __repr__(self):
return "Poly kernel"
class RBF(object):
def __init__(self, gamma=0.1):
self.gamma = gamma
def __call__(self, x, y):
x = np.atleast_2d(x)
y = np.atleast_2d(y)
return np.exp(-self.gamma * dist.cdist(x, y) ** 2).flatten()
def __repr__(self):
return "RBF kernel"
| [
"[email protected]"
] | |
387523e464797ebfe8e34406b339dc22c29b74c0 | 69099b95bb1507b30e6be8d4ad1d39f923833e97 | /BIRL_optimal_demos.py | 737cf7525eced539ad6a481bc67d6068cbee618d | [
"MIT"
] | permissive | dsbrown1331/aima-based-irl | 80d8cc2eafd751bd84bdcda6ad5a9a44060947c6 | 553550030240ae886f4260ece59dd252adb1fc6e | refs/heads/master | 2021-01-17T23:12:03.038606 | 2016-09-27T15:30:27 | 2016-09-27T15:30:27 | 67,889,747 | 1 | 0 | null | 2016-09-10T19:16:56 | 2016-09-10T19:16:55 | null | UTF-8 | Python | false | false | 4,300 | py | import numpy as np
import mdp
from my_birl_batch import *
from my_birl import *
from halfspace_uncertainty import *
from mdp_feature_counts import *
from optimal_teaching import *
from activeLearning import chain_variance
import operator
for size in range(3,4):
print "^^^^^^", size, "^^^^^^^"
f = open('active_results/optimalTest' + str(size)+ '2.txt','w')
for iter in range(10):
print "-----", iter, "------"
#generate a random n by n world
grid_width = size
grid_height = size
rand_reward = []
for row in range(grid_height):
temp = []
for col in range(grid_width):
temp.append(np.random.randint(-10,0))
rand_reward.append(temp)
rand_reward[0][0] = 10
###for debugging
#rand_reward = [[10.00, -5.00, -5.00],
#[-1.00, -1.00, -1.00 ]]
###
terminals=[(0,grid_height-1)]
init = []
for row in range(grid_height):
for col in range(grid_width):
if row == grid_height-1 and col == 0:
continue
init.append((col,row))
print "init"
print init
expert_mdp = mdp.GridMDP(deepcopy(rand_reward), terminals, init)
expert_mdp.print_rewards()
expert_mdp.print_arrows()
#try Cakmak's Task 1 with just one start to see if it gets the same demo
#birlToy = DeterministicWeightGridMDP(
# features = ['f0', 'f1', 'f2'],
# weights = {'f0': 10, 'f1': -5, 'f2': -1, None: None},
# grid = [['f0', 'f1', 'f1'],
# ['f2', 'f2', 'f2']],
# terminals=[(0,1)],
# init = [(0,0),(1,0),(1,1),(2,0),(2,1)], gamma = 0.9)
features = []
count = 0
for row in range(grid_height):
for col in range(grid_width):
features.append('f' + str(count))
count += 1
#print "features"
#print features
weights = {}
count = 0
for row in range(grid_height):
for col in range(grid_width):
#print row,col
weights[features[count]] = rand_reward[row][col]
count += 1
weights[None] = None
print "weights"
print weights
grid = []
count = 0
for row in range(grid_height):
temp = []
for col in range(grid_width):
temp.append(features[count])
count += 1
grid.append(temp)
#print "grid"
#print grid
#select random init state
demo_init = init[np.random.randint(0,len(init))]
print "demo_init"
print demo_init
#generate random demo
demo = []
expert_policy = best_policy(expert_mdp, value_iteration(expert_mdp, 0.001))
demo.append(mdp.generate_demonstration(demo_init, expert_policy, expert_mdp))
print "demo"
print demo
rand_task = DeterministicWeightGridMDP(
features, weights, grid, terminals, init, gamma = 0.95)
#rand_task.print_rewards()
#rand_task.print_arrows()
cakmak_optimal = seeded_optimal_teaching(demo,rand_task, 100000,10)
#print("solution: ", cakmak_optimal)
score, cakmak_demo = cakmak_optimal
cakmak_init = cakmak_demo[0][0]
print "cakmak", cakmak_init
#compare to BIRL active learning reward variance approach
chain_length = 12000
chain_burn = 2000
birl = BIRL_BATCH(demo, expert_mdp.get_grid_size(), expert_mdp.terminals, expert_mdp.init,
step_size=1.0, birl_iteration = chain_length)
chain, mapMDP = birl.run_birl()
chain_var = chain_variance(chain, chain_burn)
#find highest variance that's not start of demo or terminal state
chain_var.pop(terminals[0])
sorted_var = sorted(chain_var.items(), key=operator.itemgetter(1))
sorted_var.reverse()
query_states = [state for state, var in sorted_var]
print query_states
indx = query_states.index(cakmak_init)
print indx
f.write(str(indx) + '\n') # python will convert \n to os.linesep
f.close()
| [
"[email protected]"
] | |
6e4e5e3c39abdfef03c473cadda68be2c7a10fa9 | 97072bdb023dd3916d0ced6aba1c98ec0893ee01 | /tests/test_user.py | 6c75c87d9a88a701e3f63fcc1efb7784b662cc2f | [
"MIT"
] | permissive | AnumAsif/my-blogs | ed814d0559a1d84e138a02b846d2a2b85aacfebd | 8dd6d8e9e84867582dad10265203d1219c00926c | refs/heads/master | 2020-04-23T01:51:07.902859 | 2019-02-19T12:18:42 | 2019-02-19T12:18:42 | 170,826,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | import unittest
from app.models import User
class TestUserModel(unittest.TestCase):
def setUp(self):
self.user = User(password="anum123")
def test_password_setter(self):
self.assertTrue(self.user.password_hash is not None)
def test_no_access_password(self):
with self.assertRaises(AttributeError):
self.user.password
def test_password_verification(self):
self.assertTrue(self.user.verify_password('anum123')) | [
"[email protected]"
] | |
88e0919771e070e602f208c9d6bbeae0dab97897 | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.5/django/contrib/databrowse/__init__.py | 00928c97e129815852bcfa60bb1f0d6611f9a0ce | [] | no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.5/django/contrib/databrowse/__init__.py | [
"[email protected]"
] | |
6416d21e330f6923f19a06e51308eeb8b9f4168b | f26d67e3e9f8b90e5d6243279a1c2ce87fa41d46 | /tests/api/test_prodstats.py | c06973ae4a39c46587d14ccdcf139af25afd3c4a | [
"MIT"
] | permissive | OCB-DS/prodstats | cf554e3abee651463e9f81606d4b633f464658a7 | 4ff5a6e0b0d6152af2d7e1f3844ede2d33ad4824 | refs/heads/master | 2022-11-25T15:30:06.988683 | 2020-08-02T16:08:05 | 2020-08-02T16:08:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,722 | py | # import logging
# import pandas as pd
# import pytest
# import starlette.status as codes
# from db.models import ProdStat as Model
# from tests.utils import rand_str
# logger = logging.getLogger(__name__)
# pytestmark = pytest.mark.asyncio
# @pytest.fixture(scope="session")
# def prodstat_records(json_fixture):
# yield json_fixture("prodstats.json")
# @pytest.fixture
# def prodstat_df(prodstat_records):
# yield pd.DataFrame(prodstat_records).set_index(["api10", "prod_date"])
# @pytest.fixture(autouse=True)
# async def seed_prodstats(bind, prodstat_records):
# await Model.bulk_insert(prodstat_records)
# class TestPlaceEndpoint:
# path: str = "/api/v1/prodstats"
# async def test_create_prodstat(self, client):
# prodstat_name = "test"
# response = await client.post(self.path, json=[{"name": prodstat_name}])
# assert response.status_code == codes.HTTP_202_ACCEPTED
# async def test_list_prodstats(self, client):
# expected_record_count = 25
# response = await client.get(self.path)
# assert response.status_code == codes.HTTP_200_OK
# data = response.json()
# assert len(data) == expected_record_count
# assert response.links["next"] is not None
# async def test_get_prodstat(self, client):
# id = 20
# response = await client.get(f"{self.path}/{id}")
# assert response.status_code == codes.HTTP_200_OK
# data = response.json()
# assert data["id"] == 20
# async def test_update_exising_prodstat(self, client):
# id = 10
# value = rand_str(length=8)
# response = await client.put(f"{self.path}/{id}", json={"state": value})
# assert response.status_code == codes.HTTP_200_OK
# data = response.json()
# assert data["id"] == id
# assert data["state"] == value
# async def test_update_prodstat_not_found(self, client):
# id = 99999
# value = rand_str(length=8)
# response = await client.put(f"{self.path}/{id}", json={"state": value})
# assert response.status_code == codes.HTTP_404_NOT_FOUND
# async def test_delete_existing_prodstat(self, client):
# id = 20
# response = await client.delete(f"{self.path}/{id}")
# assert response.status_code == codes.HTTP_200_OK
# data = response.json()
# assert data["id"] == id
# async def test_delete_prodstat_not_found(self, client):
# id = 99999
# response = await client.delete(f"{self.path}/{id}")
# assert response.status_code == codes.HTTP_404_NOT_FOUND
# data = response.json()
# assert data["detail"] == "prodstat not found"
| [
"[email protected]"
] | |
8e3b3c81c0c614f310d3cacfaea2b523e16773bf | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_344/ch46_2019_03_19_20_17_36_654772.py | 5d9a674902235b456d5687c756b4218f596434d0 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 212 | py | lista_palavras = []
palavra= input('Escreva uma palavra: ')
while palavra != 'fim':
lista_palvras.append(palavra)
palavra= input('Escreva outra palavra')
if palavra[0] = 'a':
print (palavra)
| [
"[email protected]"
] | |
4a091b6d0f3d4382e67c122acfa7fa4c68c9ce22 | 3cd5fe995670963e5e94918ba9f1796e9e7cb73f | /2.7/ipython/profile_default/ipython_config.py | a6a72a3e3e16cbe3fa84b7e6893e98aa0bbcbb58 | [] | no_license | GrahamDumpleton-abandoned/s2i-ipython-notebook | 1e0a7ff4c893b5b743bd250535cedf4b0ed0988f | 33246956d6ba32384f678d4803148ac964b0befe | refs/heads/master | 2021-05-31T04:53:03.679233 | 2016-04-02T09:28:01 | 2016-04-02T09:28:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,830 | py | # Configuration file for ipython.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# InteractiveShellApp configuration
#------------------------------------------------------------------------------
# A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# The following methods should be called by the :meth:`initialize` method of the
# subclass:
#
# - :meth:`init_path`
# - :meth:`init_shell` (to be implemented by the subclass)
# - :meth:`init_gui_pylab`
# - :meth:`init_extensions`
# - :meth:`init_code`
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.InteractiveShellApp.matplotlib = None
# A list of dotted module names of IPython extensions to load.
# c.InteractiveShellApp.extensions = traitlets.Undefined
# Execute the given command string.
# c.InteractiveShellApp.code_to_run = ''
# List of files to run at IPython startup.
# c.InteractiveShellApp.exec_files = traitlets.Undefined
# dotted module name of an IPython extension to load.
# c.InteractiveShellApp.extra_extension = ''
# Reraise exceptions encountered loading IPython extensions?
# c.InteractiveShellApp.reraise_ipython_extension_failures = False
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.InteractiveShellApp.pylab_import_all = True
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.InteractiveShellApp.exec_PYTHONSTARTUP = True
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.InteractiveShellApp.hide_initial_ns = True
# A file to be run
# c.InteractiveShellApp.file_to_run = ''
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.InteractiveShellApp.pylab = None
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx',
# 'pyglet', 'qt', 'qt5', 'tk', 'wx').
# c.InteractiveShellApp.gui = None
# lines of code to run at IPython startup.
# c.InteractiveShellApp.exec_lines = traitlets.Undefined
# Run the module as a script.
# c.InteractiveShellApp.module_to_run = ''
#------------------------------------------------------------------------------
# SingletonConfigurable configuration
#------------------------------------------------------------------------------
# A configurable that only allows one instance.
#
# This class is for classes that should only have one instance of itself or
# *any* subclass. To create and retrieve such a class use the
# :meth:`SingletonConfigurable.instance` method.
#------------------------------------------------------------------------------
# Application configuration
#------------------------------------------------------------------------------
# This is an application.
# Set the log level by value or name.
# c.Application.log_level = 30
# The date format used by logging formatters for %(asctime)s
# c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S'
# The Logging format template
# c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s'
#------------------------------------------------------------------------------
# BaseIPythonApplication configuration
#------------------------------------------------------------------------------
# IPython: an enhanced interactive Python shell.
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This option can also be specified through the environment
# variable IPYTHONDIR.
# c.BaseIPythonApplication.ipython_dir = u''
# Whether to create profile dir if it doesn't exist
# c.BaseIPythonApplication.auto_create = False
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.BaseIPythonApplication.copy_config_files = False
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.BaseIPythonApplication.verbose_crash = False
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.BaseIPythonApplication.extra_config_file = u''
# The IPython profile to use.
# c.BaseIPythonApplication.profile = u'default'
# Whether to overwrite existing config files when copying
# c.BaseIPythonApplication.overwrite = False
#------------------------------------------------------------------------------
# TerminalIPythonApp configuration
#------------------------------------------------------------------------------
# Start IPython quickly by skipping the loading of config files.
# c.TerminalIPythonApp.quick = False
# If a command or file is given via the command-line, e.g. 'ipython foo.py',
# start an interactive shell after executing the file or command.
# c.TerminalIPythonApp.force_interact = False
# Whether to display a banner upon starting IPython.
# c.TerminalIPythonApp.display_banner = True
#------------------------------------------------------------------------------
# InteractiveShell configuration
#------------------------------------------------------------------------------
# An enhanced, interactive shell for Python.
#
# c.InteractiveShell.ipython_dir = ''
# Set the color scheme (NoColor, Linux, or LightBG).
# c.InteractiveShell.colors = 'Linux'
#
# c.InteractiveShell.debug = False
# The name of the logfile to use.
# c.InteractiveShell.logfile = ''
#
# c.InteractiveShell.object_info_string_level = 0
#
# c.InteractiveShell.separate_out = ''
# Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issued). This limit is
# defined because otherwise you'll spend more time re-flushing a too small cache
# than working
# c.InteractiveShell.cache_size = 1000
#
# c.InteractiveShell.quiet = False
#
# c.InteractiveShell.xmode = 'Context'
# **Deprecated**
#
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). `deep_reload`
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
# c.InteractiveShell.deep_reload = False
# Start logging to the default log file in overwrite mode. Use `logappend` to
# specify a log file to **append** logs to.
# c.InteractiveShell.logstart = False
# 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run
# interactively (displaying output from expressions).
# c.InteractiveShell.ast_node_interactivity = 'last_expr'
# If True, anything that would be passed to the pager will be displayed as
# regular output instead.
# c.InteractiveShell.display_page = False
#
# c.InteractiveShell.readline_remove_delims = '-/~'
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.InteractiveShell.color_info = True
# Enable magic commands to be called without the leading %.
# c.InteractiveShell.automagic = True
# The part of the banner to be printed before the profile
# c.InteractiveShell.banner1 = 'Python 2.7.10 (default, Nov 15 2015, 22:51:08) \nType "copyright", "credits" or "license" for more information.\n\nIPython 4.0.1 -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n'
# The part of the banner to be printed after the profile
# c.InteractiveShell.banner2 = ''
# Save multi-line entries as one entry in readline history
# c.InteractiveShell.multiline_history = True
#
# c.InteractiveShell.separate_in = '\n'
# Deprecated, use PromptManager.in_template
# c.InteractiveShell.prompt_in1 = 'In [\\#]: '
# Deprecated, use PromptManager.in2_template
# c.InteractiveShell.prompt_in2 = ' .\\D.: '
# Deprecated, use PromptManager.out_template
# c.InteractiveShell.prompt_out = 'Out[\\#]: '
# Deprecated, use PromptManager.justify
# c.InteractiveShell.prompts_pad_left = True
#
# c.InteractiveShell.separate_out2 = ''
# Don't call post-execute functions that have failed in the past.
# c.InteractiveShell.disable_failing_post_execute = False
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.InteractiveShell.ast_transformers = traitlets.Undefined
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
# c.InteractiveShell.autocall = 0
# Show rewritten input, e.g. for autocall.
# c.InteractiveShell.show_rewritten_input = True
#
# c.InteractiveShell.history_length = 10000
# Autoindent IPython code entered interactively.
# c.InteractiveShell.autoindent = True
# The number of saved history entries to be loaded into the readline buffer at
# startup.
# c.InteractiveShell.history_load_length = 1000
#
# c.InteractiveShell.readline_parse_and_bind = traitlets.Undefined
#
# c.InteractiveShell.wildcards_case_sensitive = True
# Start logging to the given file in append mode. Use `logfile` to specify a log
# file to **overwrite** logs to.
# c.InteractiveShell.logappend = ''
# Automatically call the pdb debugger after every exception.
# c.InteractiveShell.pdb = False
#
# c.InteractiveShell.readline_use = True
#------------------------------------------------------------------------------
# TerminalInteractiveShell configuration
#------------------------------------------------------------------------------
# Enable auto setting the terminal title.
# c.TerminalInteractiveShell.term_title = False
# Set the editor used by IPython (default to $EDITOR/vi/notepad).
# c.TerminalInteractiveShell.editor = 'vi'
# auto editing of files with syntax errors.
# c.TerminalInteractiveShell.autoedit_syntax = False
# Set to confirm when you try to exit IPython with an EOF (Control-D in Unix,
# Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a
# direct exit without any confirmation.
# c.TerminalInteractiveShell.confirm_exit = True
# The shell program to be used for paging.
# c.TerminalInteractiveShell.pager = 'less'
# Number of lines of your screen, used to control printing of very long strings.
# Strings longer than this number of lines will be sent through a pager instead
# of directly printed. The default value for this is 0, which means IPython
# will auto-detect your screen size every time it needs to print certain
# potentially long strings (this doesn't change the behavior of the 'print'
# keyword, it's only triggered internally). If for some reason this isn't
# working well (it needs curses support), specify it yourself. Otherwise don't
# change the default.
# c.TerminalInteractiveShell.screen_length = 0
#------------------------------------------------------------------------------
# PromptManager configuration
#------------------------------------------------------------------------------
# This is the primary interface for producing IPython's prompts.
# Continuation prompt.
# c.PromptManager.in2_template = ' .\\D.: '
# Output prompt. '\#' will be transformed to the prompt number
# c.PromptManager.out_template = 'Out[\\#]: '
# Input prompt. '\#' will be transformed to the prompt number
# c.PromptManager.in_template = 'In [\\#]: '
#
# c.PromptManager.color_scheme = 'Linux'
# If True (default), each prompt will be right-aligned with the preceding one.
# c.PromptManager.justify = True
#------------------------------------------------------------------------------
# HistoryAccessorBase configuration
#------------------------------------------------------------------------------
# An abstract class for History Accessors
#------------------------------------------------------------------------------
# HistoryAccessor configuration
#------------------------------------------------------------------------------
# Access the history database without adding to it.
#
# This is intended for use by standalone history tools. IPython shells use
# HistoryManager, below, which is a subclass of this.
# enable the SQLite history
#
# set enabled=False to disable the SQLite history, in which case there will be
# no stored history, no SQLite connection, and no background saving thread.
# This may be necessary in some threaded environments where IPython is embedded.
# c.HistoryAccessor.enabled = True
# Options for configuring the SQLite connection
#
# These options are passed as keyword args to sqlite3.connect when establishing
# database conenctions.
# c.HistoryAccessor.connection_options = traitlets.Undefined
# Path to file to use for SQLite history database.
#
# By default, IPython will put the history database in the IPython profile
# directory. If you would rather share one history among profiles, you can set
# this value in each, so that they are consistent.
#
# Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts.
# If you see IPython hanging, try setting this to something on a local disk,
# e.g::
#
# ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
# c.HistoryAccessor.hist_file = u''
#------------------------------------------------------------------------------
# HistoryManager configuration
#------------------------------------------------------------------------------
# A class to organize all history-related functionality in one place.
# Write to database every x commands (higher values save disk access & power).
# Values of 1 or less effectively disable caching.
# c.HistoryManager.db_cache_size = 0
# Should the history database include output? (default: no)
# c.HistoryManager.db_log_output = False
#------------------------------------------------------------------------------
# LoggingConfigurable configuration
#------------------------------------------------------------------------------
# A parent class for Configurables that log.
#
# Subclasses have a log trait, and the default behavior is to get the logger
# from the currently running Application.
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = u''
#------------------------------------------------------------------------------
# BaseFormatter configuration
#------------------------------------------------------------------------------
# A base formatter class that is configurable.
#
# This formatter should usually be used as the base class of all formatters. It
# is a traited :class:`Configurable` class and includes an extensible API for
# users to determine how their objects are formatted. The following logic is
# used to find a function to format an given object.
#
# 1. The object is introspected to see if it has a method with the name
# :attr:`print_method`. If is does, that object is passed to that method
# for formatting.
# 2. If no print method is found, three internal dictionaries are consulted
# to find print method: :attr:`singleton_printers`, :attr:`type_printers`
# and :attr:`deferred_printers`.
#
# Users should use these dictionaries to register functions that will be used to
# compute the format data for their objects (if those objects don't have the
# special print methods). The easiest way of using these dictionaries is through
# the :meth:`for_type` and :meth:`for_type_by_name` methods.
#
# If no function/callable is found to compute the format data, ``None`` is
# returned and this format type is not used.
#
# c.BaseFormatter.singleton_printers = traitlets.Undefined
#
# c.BaseFormatter.type_printers = traitlets.Undefined
#
# c.BaseFormatter.deferred_printers = traitlets.Undefined
#
# c.BaseFormatter.enabled = True
#------------------------------------------------------------------------------
# PlainTextFormatter configuration
#------------------------------------------------------------------------------
# The default pretty-printer.
#
# This uses :mod:`IPython.lib.pretty` to compute the format data of the object.
# If the object cannot be pretty printed, :func:`repr` is used. See the
# documentation of :mod:`IPython.lib.pretty` for details on how to write pretty
# printers. Here is a simple example::
#
# def dtype_pprinter(obj, p, cycle):
# if cycle:
# return p.text('dtype(...)')
# if hasattr(obj, 'fields'):
# if obj.fields is None:
# p.text(repr(obj))
# else:
# p.begin_group(7, 'dtype([')
# for i, field in enumerate(obj.descr):
# if i > 0:
# p.text(',')
# p.breakable()
# p.pretty(field)
# p.end_group(7, '])')
# Truncate large collections (lists, dicts, tuples, sets) to this size.
#
# Set to 0 to disable truncation.
# c.PlainTextFormatter.max_seq_length = 1000
#
# c.PlainTextFormatter.pprint = True
#
# c.PlainTextFormatter.max_width = 79
#
# c.PlainTextFormatter.verbose = False
#
# c.PlainTextFormatter.float_precision = ''
#
# c.PlainTextFormatter.newline = '\n'
#------------------------------------------------------------------------------
# Completer configuration
#------------------------------------------------------------------------------
# Activate greedy completion
#
# This will enable completion on elements of lists, results of function calls,
# etc., but can be unsafe because the code is actually evaluated on TAB.
# c.Completer.greedy = False
#------------------------------------------------------------------------------
# IPCompleter configuration
#------------------------------------------------------------------------------
# Extension of the completer class with IPython-specific features
# Instruct the completer to omit private method names
#
# Specifically, when completing on ``object.<tab>``.
#
# When 2 [default]: all names that start with '_' will be excluded.
#
# When 1: all 'magic' names (``__foo__``) will be excluded.
#
# When 0: nothing will be excluded.
# c.IPCompleter.omit__names = 2
# Whether to merge completion results into a single list
#
# If False, only the completion results from the first non-empty completer will
# be returned.
# c.IPCompleter.merge_completions = True
# Instruct the completer to use __all__ for the completion
#
# Specifically, when completing on ``object.<tab>``.
#
# When True: only those names in obj.__all__ will be included.
#
# When False [default]: the __all__ attribute is ignored
# c.IPCompleter.limit_to__all__ = False
#------------------------------------------------------------------------------
# Magics configuration
#------------------------------------------------------------------------------
# Base class for implementing magic functions.
#
# Shell functions which can be reached as %function_name. All magic functions
# should accept a string, which they can parse for their own needs. This can
# make some functions easier to type, eg `%cd ../` vs. `%cd("../")`
#
# Classes providing magic functions need to subclass this class, and they MUST:
#
# - Use the method decorators `@line_magic` and `@cell_magic` to decorate
# individual methods as magic functions, AND
#
# - Use the class decorator `@magics_class` to ensure that the magic
# methods are properly registered at the instance level upon instance
# initialization.
#
# See :mod:`magic_functions` for examples of actual implementation classes.
#------------------------------------------------------------------------------
# ScriptMagics configuration
#------------------------------------------------------------------------------
# Magics for talking to scripts
#
# This defines a base `%%script` cell magic for running a cell with a program in
# a subprocess, and registers a few top-level magics that call %%script with
# common interpreters.
# Extra script cell magics to define
#
# This generates simple wrappers of `%%script foo` as `%%foo`.
#
# If you want to add script magics that aren't on your path, specify them in
# script_paths
# c.ScriptMagics.script_magics = traitlets.Undefined
# Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby'
#
# Only necessary for items in script_magics where the default path will not find
# the right interpreter.
# c.ScriptMagics.script_paths = traitlets.Undefined
#------------------------------------------------------------------------------
# StoreMagics configuration
#------------------------------------------------------------------------------
# Lightweight persistence for python variables.
#
# Provides the %store magic.
# If True, any %store-d variables will be automatically restored when IPython
# starts.
# c.StoreMagics.autorestore = False
| [
"[email protected]"
] | |
d5c5c47e59e9a5bc56b001df5aa50bcd31f4ad79 | ab8117bc5b5040e5107fc59337fabc966cb062ba | /.history/twitter/engine_20200328094821.py | ee9b4bd9c70c405259d659015ba54699abc23b6b | [] | no_license | mirfarzam/DownloaderBro | 6019ab561c67a397135d0a1585d01d4c6f467df4 | 8e0a87dd1f768cfd22d24a7f8c223ce968e9ecb6 | refs/heads/master | 2022-04-16T15:31:38.551870 | 2020-04-15T17:36:26 | 2020-04-15T17:36:26 | 255,090,475 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,772 | py |
import tweepy
import datetime
import configparser
import time
config = configparser.ConfigParser()
config.read('credential.conf')
consumer_key = config['API']["API_key"]
consumer_secret = config['API']["API_secret_key"]
access_token = config['ACCESS']["Access_token"]
access_token_secret = config['ACCESS']["Access_token_secert"]
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
# api.verify_credentials()
def check_mentions(api, keywords, since_id):
new_since_id = since_id
for tweet in tweepy.Cursor(api.mentions_timeline,
since_id=since_id).items():
new_since_id = max(tweet.id, new_since_id)
if tweet.in_reply_to_status_id is None:
continue
main = (api.statuses_lookup([tweet.in_reply_to_status_id], include_entities=True ))[0]
try :
if 'media' in main.extended_entities:
for video in main.extended_entities['media'][0]['video_info']['variants']:
videos = {}
try:
print(f"{video['bitrate']} and is {video['url']}")
except:
print(f"Error in finding video in tweet id : {main.id}")
# if 'variants' in main.extended_entities['media'][0]:
# for video in main.extended_entities['media'][0]['variants']:
# if 'bitrate' in video:
# print(f"{video['bitrate']} and is {video['url']}")
except:
print(f"Cannot get Tweet video and tweet id is : {main.id}")
return new_since_id
since_id = 1
while True:
since_id = check_mentions(api, ["help", "support"], since_id)
time.sleep(5) | [
"[email protected]"
] | |
9855860eeee26a97c487f030f08eba7c367d287f | 4b4828d3c98d76d7bf38f90a015945acc408ddc5 | /PythonAI/Practice/DAY03/src/URL_Parsing_02.py | 0ef5fb5e164d8ee32d3ebfbbf7f7046114d8a105 | [] | no_license | Huh-jae-won/Study | cb5d32728e8dcded492e7edb054b500c91ec607c | e4dbc3fef69bb273b62b866fb5ef2a7250222f10 | refs/heads/main | 2023-06-20T13:06:26.691899 | 2021-07-11T07:43:41 | 2021-07-11T07:43:41 | 362,759,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | import sys
import urllib.request as req
import urllib.parse as parse
# 명령줄 매개변수 추출
if len(sys.argv) <= 1 :
print("USAGE : download-forecast-argv <Region Number>")
sys.exit()
regionNumber = sys.argv[1]
# 매개변수를 URL 인코딩
API = "http://www.kma.go.kr/weather/forecast/mid-term-rss3.jsp"
values = { 'stnid': regionNumber }
params = parse.urlencode(values)
url = API + "?" + params
print("url=", url) | [
"[email protected]"
] | |
66fb33b0030c894e919d60edb6cc528e910809b4 | 8cce0b5a4be09783016906a36192c52e9daa84aa | /equipment_engineering/meter_reader_4_pointer/main.py | d3851c2d25d932f0aa84ba3adc0c8e32b8fd3a3b | [
"MIT"
] | permissive | Castrol68/opencv-practice | fcc9495553d3a10fb045c396697391a5d2a06f36 | 83d76132d004ebbc96d99d34a0fd3fc37a044f9f | refs/heads/master | 2023-08-31T07:18:51.497902 | 2020-05-03T17:43:12 | 2020-05-03T17:43:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,288 | py | #!/usr/bin/env python3
# -*- coding=utf-8 -*-
import abc
from abc import ABC
import argparse
import os
class EquipmentRunner(ABC):
def __init__(self):
self.__successor = None
@property
def mode(self):
if not self.__successor:
exit(404)
return self.__successor
@mode.setter
def mode(self, successor):
self.__successor = successor
@abc.abstractmethod
def run(self, request): ...
class RunSettingMode(EquipmentRunner):
def run(self, request):
if True is request["set"]:
try:
log("设置模式 ...")
min_angle = input_number_check("表盘最小值对应的刻度")
max_angle = input_number_check("表盘最大值对应的刻度")
min_value = input_number_check("表盘最小值")
max_value = input_number_check("表盘最大值")
util = input("仪表单位: ")
set_detector_argument(min_angle, max_angle, min_value, max_value, util)
except Exception as e:
log(e, ERROR)
else:
self.next.run(request)
class RunDebugMode(EquipmentRunner):
def run(self, request):
if True is request["debug"]:
try:
log("调试模式 ...")
start_with_debug()
except Exception as e:
log(e, ERROR)
else:
self.next.run(request)
class RunVisionMode(EquipmentRunner):
def run(self, request):
if True is request["windows"]:
try:
log("可视化模式 ...")
start_with_vision()
except Exception as e:
log(e, ERROR)
class RunBackendMode(EquipmentRunner):
def run(self, request):
if True is request["backend"]:
try:
log("后台模式 ...")
start_with_backend()
except Exception as e:
log(e, ERROR)
else:
self.next.run(request)
def fork():
setting_mode = RunSettingMode()
debug_mode = RunDebugMode()
vision_mode = RunVisionMode()
backend_mode = RunBackendMode()
setting_mode.next = debug_mode
debug_mode.next = backend_mode
backend_mode.next = vision_mode
# try:
# os.chdir("/tmp")
# os.setsid()
# os.umask(0)
setting_mode.run(args)
# except OSError:
# pass
if "__main__" == __name__:
from reader_4_pointer import start_with_vision, start_with_debug, set_detector_argument, start_with_backend
from reader_4_pointer import version, log, ERROR, input_number_check
version()
ap = argparse.ArgumentParser()
ap.add_argument("-d", "--debug", type=bool, help=" debug模式", default=False)
ap.add_argument("-s", "--set", type=bool, help="设置模式", default=False)
ap.add_argument("-w", "--windows", type=bool, help="可视化模式", default=True)
ap.add_argument("-b", "--backend", type=bool, help="后台模式", default=False)
ap.add_argument("-p", "--path", help="日志存放位置")
args = vars(ap.parse_args())
fork()
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.