blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
13eda0de95f9467954a2064cc95a5abdd0b0ec64
|
9e831c0defd126445772cfcee38b57bfd8c893ca
|
/code/questions/221~230_/224.py
|
d90b7f67cdd7fc5614ac658666790161c4a04e2c
|
[] |
no_license
|
m358807551/Leetcode
|
66a61abef5dde72250d032b7ea06feb3f2931d54
|
be3f037f6e2057a8f2acf9e820bbbbc21d7aa1d2
|
refs/heads/main
| 2023-04-22T15:13:43.771145 | 2021-05-07T06:47:13 | 2021-05-07T06:47:13 | 321,204,181 | 4 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,174 |
py
|
"""
https://leetcode-cn.com/problems/basic-calculator
"""
import re
class Solution(object):
def calculate(self, s):
"""
:type s: str
:rtype: int
"""
s = re.sub('[^0-9]', lambda x: '!{}!'.format(x.group()), s)
s = [x for x in s.split('!') if x.strip()]
queue, stack = [], []
for x in s:
if x == '(':
stack.append(x)
elif x in '+-':
while stack and stack[-1] in '+-':
queue.append(stack.pop(-1))
stack.append(x)
elif x == ')':
while stack[-1] != '(':
queue.append(stack.pop(-1))
stack.pop(-1)
else:
queue.append(int(x))
while stack:
queue.append(stack.pop(-1))
stack = []
for x in queue:
if x == '+':
stack.append(stack.pop(-2) + stack.pop(-1))
elif x == '-':
stack.append(stack.pop(-2) - stack.pop(-1))
else:
stack.append(x)
return stack[0]
print(
Solution().calculate(
'(71)-(0)+(14)'
)
)
|
[
"[email protected]"
] | |
494853650bc48daabecbdd20ffd1824486452123
|
743d1918178e08d4557abed3a375c583130a0e06
|
/src/ToCPSC/getDailyCount.py
|
dc5994f9a0237db0f7b31ecd26d5406d7d555d78
|
[] |
no_license
|
aquablue1/dns_probe
|
2a027c04e0928ec818a82c5bf04f485a883cfcb3
|
edd4dff9bea04092ac76c17c6e77fab63f9f188f
|
refs/heads/master
| 2020-03-25T19:40:07.346354 | 2018-11-17T05:31:43 | 2018-11-17T05:31:43 | 144,094,014 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,405 |
py
|
"""
" Get the daily count of src of DNS sessions that sent to cpsc ns.
" By Zhengping on 2018-08-14
"""
from src.GeneralAnalysis.DailySrcCount import dailySrcCount
from src.GeneralAnalysis.DailyDstCount import dailyDstCount
from src.GeneralAnalysis.DailyQueryCount import dailyNameCount
from src.GeneralAnalysis.DailyTypeCount import dailyTypeCount
from src.GeneralAnalysis.DailySrcPortCount import dailySrcPortCount
def getDailySrcCount(date, foldername):
cpscSrcCounter = dailySrcCount(date, foldername)
cpscSrcCounter.getDailySrcCount()
def getDailyDstCount(date, foldername):
cpscDstCounter = dailyDstCount(date, foldername)
cpscDstCounter.getDailyDstCount()
def getDailyNameCount(date, foldername):
cpscNameCounter = dailyNameCount(date, foldername)
cpscNameCounter.getDailyNameCount()
def getDailyTypeCount(date, foldername):
cpscTypeCounter = dailyTypeCount(date, foldername)
cpscTypeCounter.getDailyTypeCount()
def getDailySrcPortCount(date, foldername):
cpscSrcPortCounter = dailySrcPortCount(date, foldername)
cpscSrcPortCounter.getDailySrcPortCount()
if __name__ == '__main__':
date = "2018-09-19"
foldername = "../../result/ToCPSC/"
getDailySrcCount(date, foldername)
getDailyDstCount(date, foldername)
getDailyNameCount(date, foldername)
getDailyTypeCount(date, foldername)
getDailySrcPortCount(date, foldername)
|
[
"[email protected]"
] | |
12dad381805512acbfb45f4df790488bcc3335bf
|
0869d7edac80e8aebe951682a2cc311a083eade3
|
/Python/tdw/object_data/composite_object/composite_object_dynamic.py
|
e9057a039e69ee0a014cfdd06af0f7a5dfabbdb8
|
[
"BSD-2-Clause"
] |
permissive
|
threedworld-mit/tdw
|
7d5b4453832647733ff91ad7a7ce7ec2320454c1
|
9df96fba455b327bb360d8dd5886d8754046c690
|
refs/heads/master
| 2023-09-01T11:45:28.132298 | 2023-08-31T16:13:30 | 2023-08-31T16:13:30 | 245,492,977 | 427 | 75 |
BSD-2-Clause
| 2023-09-14T17:36:12 | 2020-03-06T18:42:09 |
Python
|
UTF-8
|
Python
| false | false | 1,537 |
py
|
from typing import Dict
from tdw.object_data.composite_object.sub_object.light_dynamic import LightDynamic
from tdw.object_data.composite_object.sub_object.hinge_dynamic import HingeDynamic
class CompositeObjectDynamic:
"""
Dynamic data for a composite object and its sub-objects.
Note that not all sub-objects will be in this output data because some of them don't have specialized dynamic properties.
For example, non-machines have dynamic positions, velocities, etc. but these can be found in `Transforms` and `Rigidbodies` data, respectively.
"""
def __init__(self, object_id: int, hinges: Dict[int, HingeDynamic], lights: Dict[int, LightDynamic]):
"""
:param object_id: The ID of the root object.
:param hinges: A dictionary of [`HingeDynamic`](sub_object/hinge_dynamic.md) sub-objects, which includes all hinges, springs, and motors.
:param lights: A dictionary of [`LightDynamic`](sub_object/light_dynamic.md) sub-objects such as lamp lightbulbs.
"""
""":field
The ID of the root object.
"""
self.object_id = object_id
""":field
A dictionary of [`HingeDynamic`](sub_object/hinge_dynamic.md) sub-objects, which includes all hinges, springs, and motors.
"""
self.hinges: Dict[int, HingeDynamic] = hinges
""":field
A dictionary of [`LightDynamic`](sub_object/light_dynamic.md) sub-objects such as lamp lightbulbs.
"""
self.lights: Dict[int, LightDynamic] = lights
|
[
"[email protected]"
] | |
9d2e2e509a635d8d7698a89d4e4b939dbc77cb36
|
7591c267059486c943d68e713bd3ff338900d2c5
|
/settings.py
|
5a2d36ae53de4e39a3fb123b6c4885d77b2de18b
|
[] |
no_license
|
westinedu/quanenta
|
00fe419da1e34ddd9001ffeb9848639d5c58d265
|
a59c75458b6eff186637ab8e0e36b6f68a1a99c9
|
refs/heads/master
| 2021-01-10T20:35:25.196907 | 2012-06-07T06:01:33 | 2012-06-07T06:01:33 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 846 |
py
|
try:
from djangoappengine.settings_base import *
has_djangoappengine = True
except ImportError:
has_djangoappengine = False
DEBUG = True
TEMPLATE_DEBUG = DEBUG
import os
SECRET_KEY = '=r-$b*8hglm+858&9t043hlm6-&6-3d3vfc4((7yd0dbrakhvi'
INSTALLED_APPS = (
'djangotoolbox',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'core',
)
if has_djangoappengine:
INSTALLED_APPS = ('djangoappengine',) + INSTALLED_APPS
TEST_RUNNER = 'djangotoolbox.test.CapturingTestSuiteRunner'
ADMIN_MEDIA_PREFIX = '/media/admin/'
MEDIA_ROOT = os.path.join(os.path.dirname(__file__), 'media')
TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), 'templates'),)
LOGIN_URL = '/login/'
LOGOUT_URL = '/logout/'
LOGIN_REDIRECT_URL = '/'
ROOT_URLCONF = 'urls'
|
[
"[email protected]"
] | |
f4a65671dde5f682aab0a747b66e2ba2cdc09f88
|
c9d4d4c78703d009da11999e4e59b6a168a454a2
|
/examples/Machine Learning In Action/reducer.py
|
67e8edacbbd3347055d42c21781c0208b8451281
|
[
"MIT"
] |
permissive
|
AkiraKane/Python
|
23df49d7f7ae0f375e0b4ccfe4e1b6a077b1a52b
|
12e2dcb9a61e9ab0fc5706e4a902c48e6aeada30
|
refs/heads/master
| 2020-12-11T07:20:01.524438 | 2015-11-07T12:42:22 | 2015-11-07T12:42:22 | 47,440,128 | 1 | 0 | null | 2015-12-05T03:15:52 | 2015-12-05T03:15:51 | null |
UTF-8
|
Python
| false | false | 1,139 |
py
|
'''
-------------------------------------------------------------------------
Book: Machine Learning In Action
# Lesson: MapReduce - reducer
# Author: Kelly Chan
# Date: Feb 3 2014
-------------------------------------------------------------------------
'''
import sys
from numpy import mat, mean, power
def dataLoad(dataFile):
for line in dataFile:
yield line.rstrip()
# creating a list of lines from dataFile
data = dataLoad(sys.stdin)
# spliting data lines into separte items and storing in list of lists
mapperOut = [line.split('\t') for line in data]
# accumulating total number of samples, overall sum and overall sum squared
accumulateN = 0.0
accumulateSum = 0.0
accumulateSumSquared = 0.0
for instance in mapperOut:
thisN = float(instance[0])
accumulateN += thisN
accumulateSum += thisN * float(instance[1])
accumulateSumSquared += thisN * float(instance[2])
# calculating means
mean = accumulateSum / accumulateN
meanSq = accumulateSumSquared / accumulateN
# printing size, mean, mean squared
print "%d\t%f\t%f" % (accumulateN, mean, meanSq)
print >> sys.stderr, "report: still alive"
|
[
"[email protected]"
] | |
4e55efc281a0895900555be7d28d0cb370371a1e
|
21c098079d2724ffbd3f6cb01c7919c1f59f7875
|
/src/aioquic/about.py
|
82cd8f33d65dcbf2397d12c83c651cc899e91556
|
[
"BSD-3-Clause"
] |
permissive
|
MattyHsueh/aioquic
|
f9c54717b3acdb84bc8f963a5e8bd5f969ebeb4b
|
2163f2d0940edd2a91a3773fb7cb061031fe87fa
|
refs/heads/master
| 2022-09-01T12:33:23.987233 | 2020-05-29T05:57:37 | 2020-05-29T05:57:37 | 263,517,328 | 1 | 0 |
BSD-3-Clause
| 2020-05-13T03:39:45 | 2020-05-13T03:39:44 | null |
UTF-8
|
Python
| false | false | 227 |
py
|
__author__ = "Jeremy Lainé"
__email__ = "[email protected]"
__license__ = "BSD"
__summary__ = "An implementation of QUIC and HTTP/3"
__title__ = "aioquic"
__uri__ = "https://github.com/aiortc/aioquic"
__version__ = "0.8.7"
|
[
"[email protected]"
] | |
ad83730199dd2c78f435dea6eee07e6fd00b8033
|
c8adae98cd1c2614c1bacc59ecf52fb7e45ce481
|
/0x1F-pascal_triangle/0-pascal_triangle.py
|
92a551bac2bdfd2d51644f8f5a6ef858cc8ca0fc
|
[] |
no_license
|
OctopusHugz/holbertonschool-interview
|
a75f1a9fe72227e46db1005796cc98fa10f1fd2f
|
546f659ca128118438200ae1515096407bb438de
|
refs/heads/master
| 2023-07-15T07:34:45.713801 | 2021-08-25T20:29:50 | 2021-08-25T20:29:50 | 319,363,351 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 510 |
py
|
#!/usr/bin/python3
""" This module implements a pascal triangle algorithm """
def pascal_triangle(n):
""" Returns a list of lists of integers representing the Pascal's triangle
of n """
triangle = []
for num in range(n):
row = []
for val in range(num + 1):
if val == 0 or val == num:
row.append(1)
continue
row.append(triangle[num - 1][val - 1] + triangle[num - 1][val])
triangle.append(row)
return triangle
|
[
"[email protected]"
] | |
7da12be20b0cf7f6dfdaf4b6ed1e6a7b1fb4459c
|
62e58c051128baef9452e7e0eb0b5a83367add26
|
/x12/6030/195006030.py
|
daab00096a42ba5ffccaf8a09c37c82a9d3352cd
|
[] |
no_license
|
dougvanhorn/bots-grammars
|
2eb6c0a6b5231c14a6faf194b932aa614809076c
|
09db18d9d9bd9d92cefbf00f1c0de1c590fe3d0d
|
refs/heads/master
| 2021-05-16T12:55:58.022904 | 2019-05-17T15:22:23 | 2019-05-17T15:22:23 | 105,274,633 | 0 | 0 | null | 2017-09-29T13:21:21 | 2017-09-29T13:21:21 | null |
UTF-8
|
Python
| false | false | 2,476 |
py
|
from bots.botsconfig import *
from records006030 import recorddefs
syntax = {
'version': '00603',
'functionalgroup': 'LA',
}
structure = [
{ID: 'ST', MIN: 1, MAX: 1, LEVEL: [
{ID: 'BGN', MIN: 1, MAX: 1},
{ID: 'DTM', MIN: 0, MAX: 99999},
{ID: 'QTY', MIN: 0, MAX: 99999},
{ID: 'PWK', MIN: 0, MAX: 99999},
{ID: 'CRC', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'NTE', MIN: 0, MAX: 99999},
]},
{ID: 'AMT', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'MSG', MIN: 0, MAX: 99999},
]},
{ID: 'N1', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'N2', MIN: 0, MAX: 3},
{ID: 'N3', MIN: 0, MAX: 2},
{ID: 'N4', MIN: 0, MAX: 1},
{ID: 'PER', MIN: 0, MAX: 5},
{ID: 'QTY', MIN: 0, MAX: 99999},
{ID: 'MEA', MIN: 0, MAX: 99999},
{ID: 'NTE', MIN: 0, MAX: 99999},
{ID: 'REF', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 99999},
{ID: 'MSG', MIN: 0, MAX: 99999},
]},
{ID: 'CRC', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'REF', MIN: 0, MAX: 99999},
]},
{ID: 'LM', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'LQ', MIN: 1, MAX: 99999},
{ID: 'QTY', MIN: 0, MAX: 99999},
{ID: 'MSG', MIN: 0, MAX: 99999},
]},
]},
{ID: 'PO1', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'QTY', MIN: 0, MAX: 99999},
{ID: 'MEA', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'LIE', MIN: 0, MAX: 99999},
]},
{ID: 'REF', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'LIE', MIN: 0, MAX: 99999},
]},
{ID: 'LM', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'LQ', MIN: 1, MAX: 99999},
{ID: 'MEA', MIN: 0, MAX: 99999},
{ID: 'MSG', MIN: 0, MAX: 99999},
]},
{ID: 'N1', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'N2', MIN: 0, MAX: 3},
{ID: 'N3', MIN: 0, MAX: 2},
{ID: 'N4', MIN: 0, MAX: 1},
]},
{ID: 'CRC', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'REF', MIN: 0, MAX: 99999},
{ID: 'DTM', MIN: 0, MAX: 99999},
{ID: 'LM', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'LQ', MIN: 1, MAX: 99999},
]},
{ID: 'N1', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'N2', MIN: 0, MAX: 3},
{ID: 'N3', MIN: 0, MAX: 2},
{ID: 'N4', MIN: 0, MAX: 1},
]},
]},
]},
{ID: 'SE', MIN: 1, MAX: 1},
]}
]
|
[
"[email protected]"
] | |
788d714d928080162d37b7236fd6f219b53d2324
|
ad59072be6c46c98782d8c04df97023a1cc6161c
|
/DL12-10-transfer-add-category.py
|
356bc4124d6c2eb230a7631f4c8b09aa920c17f7
|
[] |
no_license
|
cyrilvincent/ML
|
67c6bda2016bc70168bd197fe58eabc8dc3bfb00
|
42d11fad9b8b6ea3aba3d4173cb3bbdf7bbd638f
|
refs/heads/master
| 2023-05-25T00:36:49.561860 | 2023-05-24T14:14:04 | 2023-05-24T14:14:04 | 191,420,219 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,240 |
py
|
import tensorflow.keras as keras
model = keras.models.load_model('data/dogsvscats/vgg16model-small.h5')
newModel = keras.models.Sequential()
for layer in model.layers[:-1]:
newModel.add(layer)
layer.trainable = False
newModel.add(keras.layers.Dense(3, name="dense3"))
newModel.add(keras.layers.Activation('softmax'))
newModel.summary()
newModel.compile(loss='categorical_crossentropy',
optimizer="rmsprop",
metrics=['accuracy'])
trainset = keras.preprocessing.image.ImageDataGenerator(rescale=1. / 255, validation_split=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
batchSize = 16
trainGenerator = trainset.flow_from_directory(
'data/dogsvscats/small/train',
target_size=(224, 224),
subset='training',
class_mode="categorical",
batch_size=batchSize)
validationGenerator = trainset.flow_from_directory(
'data/dogsvscats/small/train',
target_size=(224, 224),
class_mode="categorical",
subset = 'validation',
batch_size=batchSize)
newModel.fit(
trainGenerator,
epochs=30,
validation_data=validationGenerator,
)
newModel.save('data/dogsvscats/vgg16model-cows.h5')
|
[
"[email protected]"
] | |
72bbbcb0b7231deff3c7aea39bd3d33ec372d704
|
91d1a6968b90d9d461e9a2ece12b465486e3ccc2
|
/robomaker_write_f/world-export-job_create.py
|
9d9bc5bc86ab9a791055102b84c3526c3069944b
|
[] |
no_license
|
lxtxl/aws_cli
|
c31fc994c9a4296d6bac851e680d5adbf7e93481
|
aaf35df1b7509abf5601d3f09ff1fece482facda
|
refs/heads/master
| 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 800 |
py
|
#!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html
if __name__ == '__main__':
"""
cancel-world-export-job : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/robomaker/cancel-world-export-job.html
describe-world-export-job : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/robomaker/describe-world-export-job.html
list-world-export-jobs : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/robomaker/list-world-export-jobs.html
"""
write_parameter("robomaker", "create-world-export-job")
|
[
"[email protected]"
] | |
b586baa8d46a591e777d5a5235059c44e5991d32
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2773/60662/287216.py
|
fd60f99029185c3b352f686730065a2f640c4b78
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,176 |
py
|
matrix = []
for i in range(0, 4):
s = input()
if 0 < i < 4:
temp = list(map(int, s.strip(' [],').split(',')))
matrix.append(temp)
a = len(matrix)
dic = {}
nums_max = 1
if a == 0:
nums_max = 0
else:
b = len(matrix[0])
for i in range(a):
for j in range(b):
dic[(i, j)] = matrix[i][j]
v = dic.keys()
nums1 = [[1 for i in range(b)] for j in range(a)]
dic = sorted(dic.items(), key=lambda x: x[1])
for k in dic:
i = k[0][0]
j = k[0][1]
if (i + 1, j) in v and matrix[i + 1][j] < matrix[i][j] and nums1[i][j] < nums1[i + 1][j] + 1:
nums1[i][j] = nums1[i + 1][j] + 1
if (i, j + 1) in v and matrix[i][j + 1] < matrix[i][j] and nums1[i][j] < nums1[i][j + 1] + 1:
nums1[i][j] = nums1[i][j + 1] + 1
if (i - 1, j) in v and matrix[i - 1][j] < matrix[i][j] and nums1[i][j] < nums1[i - 1][j] + 1:
nums1[i][j] = nums1[i - 1][j] + 1
if (i, j - 1) in v and matrix[i][j - 1] < matrix[i][j] and nums1[i][j] < nums1[i][j - 1] + 1:
nums1[i][j] = nums1[i][j - 1] + 1
nums_max = max(nums_max, nums1[i][j])
print(nums_max)
|
[
"[email protected]"
] | |
d149f7fc4838a57eb5d387bf9dd33399983d202b
|
e40a882c3717b3982db0fbc7ae42430746636ff0
|
/dvalib/yolo/test_yolo.py
|
e2274640d2ca3e97769741d4a88bb08caeb74ff6
|
[] |
no_license
|
longchuan1985/DeepVideoAnalytics
|
7dbe4bb9aab3ce15bc5bbcffcd3dbcea7157bea4
|
4264239ad6f9b23e450f90671c0120511c971678
|
refs/heads/master
| 2021-01-23T04:14:12.516312 | 2017-05-31T07:48:01 | 2017-05-31T07:48:01 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,046 |
py
|
#! /usr/bin/env python
"""Run a YOLO_v2 style detection model on test images."""
import argparse
import colorsys
import imghdr
import os
import random
import numpy as np
from keras import backend as K
from keras.models import load_model
from PIL import Image
from yad2k.models.keras_yolo import yolo_eval, yolo_head
def _main():
args = {
'anchors_path': 'model_data/yolo_anchors.txt',
'classes_path': 'model_data/coco_classes.txt',
'test_path': 'images',
'output_path': 'images/out',
'score_threshold': 0.3,
'iou': 0.5,
}
model_path = os.path.expanduser(args['model_path'])
anchors_path = os.path.expanduser(args['anchors_path'])
classes_path = os.path.expanduser(args['classes_path'])
test_path = os.path.expanduser(args['test_path'])
output_path = os.path.expanduser(args['output_path'])
if not os.path.exists(output_path):
print('Creating output path {}'.format(output_path))
os.mkdir(output_path)
sess = K.get_session()
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
with open(anchors_path) as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
anchors = np.array(anchors).reshape(-1, 2)
yolo_model = load_model(model_path)
num_classes = len(class_names)
num_anchors = len(anchors)
# TODO: Assumes dim ordering is channel last
model_output_channels = yolo_model.layers[-1].output_shape[-1]
assert model_output_channels == num_anchors * (num_classes + 5), \
'Mismatch between model and given anchor and class sizes. ' \
'Specify matching anchors and classes with --anchors_path and ' \
'--classes_path flags.'
print('{} model, anchors, and classes loaded.'.format(model_path))
# Check if model is fully convolutional, assuming channel last order.
model_image_size = yolo_model.layers[0].input_shape[1:3]
is_fixed_size = model_image_size != (None, None)
hsv_tuples = [(x / len(class_names), 1., 1.)for x in range(len(class_names))]
colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
colors = list(map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)),colors))
random.seed(10101) # Fixed seed for consistent colors across runs.
random.shuffle(colors) # Shuffle colors to decorrelate adjacent classes.
random.seed(None) # Reset seed to default.
yolo_outputs = yolo_head(yolo_model.output, anchors, len(class_names))
input_image_shape = K.placeholder(shape=(2, ))
boxes, scores, classes = yolo_eval(yolo_outputs,input_image_shape,score_threshold=args['score_threshold'],iou_threshold=args['iou_threshold'])
for image_file in os.listdir(test_path):
try:
image_type = imghdr.what(os.path.join(test_path, image_file))
if not image_type:
continue
except:
continue
image = Image.open(os.path.join(test_path, image_file))
if is_fixed_size: # TODO: When resizing we can use minibatch input.
resized_image = image.resize(
tuple(reversed(model_image_size)), Image.BICUBIC)
image_data = np.array(resized_image, dtype='float32')
else:
# Due to skip connection + max pooling in YOLO_v2, inputs must have
# width and height as multiples of 32.
new_image_size = (image.width - (image.width % 32),
image.height - (image.height % 32))
resized_image = image.resize(new_image_size, Image.BICUBIC)
image_data = np.array(resized_image, dtype='float32')
print(image_data.shape)
image_data /= 255.
image_data = np.expand_dims(image_data, 0) # Add batch dimension.
out_boxes, out_scores, out_classes = sess.run(
[boxes, scores, classes],
feed_dict={
yolo_model.input: image_data,
input_image_shape: [image.size[1], image.size[0]],
K.learning_phase(): 0
})
print('Found {} boxes for {}'.format(len(out_boxes), image_file))
thickness = (image.size[0] + image.size[1]) // 300
for i, c in reversed(list(enumerate(out_classes))):
predicted_class = class_names[c]
box = out_boxes[i]
score = out_scores[i]
label = '{} {:.2f}'.format(predicted_class, score)
top, left, bottom, right = box
top = max(0, np.floor(top + 0.5).astype('int32'))
left = max(0, np.floor(left + 0.5).astype('int32'))
bottom = min(image.size[1], np.floor(bottom + 0.5).astype('int32'))
right = min(image.size[0], np.floor(right + 0.5).astype('int32'))
print(label, (left, top), (right, bottom))
image.save(os.path.join(output_path, image_file), quality=90)
sess.close()
if __name__ == '__main__':
_main(parser.parse_args())
|
[
"[email protected]"
] | |
880f93512f34e2e80dc747776e6ed3b406dd4715
|
0d14a4be28107b9487c16fde5865f661c34f3595
|
/examples/common_features/species_2.py
|
f1c19320ad83c6dcd28d3f54c607a253cde10f4a
|
[
"MIT"
] |
permissive
|
lamyj/sycomore
|
729780544e5ac3940e47493c205797556c7f81b8
|
d0335f1b8b26facb2a0581de6c19e6e999517599
|
refs/heads/master
| 2023-09-01T18:02:56.062085 | 2023-08-06T16:06:53 | 2023-08-06T16:06:53 | 199,385,133 | 22 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 445 |
py
|
import sycomore
from sycomore.units import *
species = sycomore.Species(1000*ms, 100*ms)
# Assign the diffusion coefficient as a scalar
species.D = 3*um**2/s
# The diffusion coefficient is stored on the diagonal of the tensor
print(species.D[0,0])
# Assign the diffusion coefficient as a tensor
species.D = [
[3*um**2/s, 0*um**2/s, 0*um**2/s],
[0*um**2/s, 2*um**2/s, 0*um**2/s],
[0*um**2/s, 0*um**2/s, 1*um**2/s]]
print(species.D)
|
[
"[email protected]"
] | |
be591fb5e2d1805a2ef27f18908ad61e4fb28266
|
6dfba71133c5b93cef5b944dcfb50d6eebceca26
|
/src/acsf_feat.py
|
604fa9e5091700a7964e38ab3b336f034d10358e
|
[] |
no_license
|
matsuken92/molecular
|
67b223be7be604cdf907dcd66b9948faf9119433
|
759a697070efaac681aff89f645ff2a6a79f0b78
|
refs/heads/master
| 2022-02-18T01:41:01.674199 | 2019-08-29T04:01:25 | 2019-08-29T04:01:25 | 190,421,154 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,377 |
py
|
# 基本ライブラリ
import pandas as pd
import pandas.io.sql as psql
import numpy as np
import numpy.random as rd
import gc
import multiprocessing as mp
import os
import sys
import pickle
from collections import defaultdict
from glob import glob
import math
from datetime import datetime as dt
from pathlib import Path
import scipy.stats as st
import re
import shutil
from tqdm import tqdm_notebook as tqdm
import datetime
from dscribe.descriptors import ACSF
from dscribe.core.system import System
sys.path.append('..')
from lib.line_notif import send_message
from lib.utils import matrics_rotate
from lib.utils import reduce_mem_usage, current_time, unpickle, to_pickle
SYMBOL=['H', 'C', 'N', 'O', 'F']
ACSF_GENERATOR = ACSF(
species = SYMBOL,
rcut = 6.0,
g2_params=[[1, 1], [1, 2], [1, 3]],
g4_params=[[1, 1, 1], [1, 2, 1], [1, 1, -1], [1, 2, -1]],
)
def get_scsf(data):
ret_list = []
for molecule_name in data["mol_names"]:
df = gb_structure.get_group(molecule_name)
df = df.sort_values(['atom_index'], ascending=True)
a = df.atom.values.tolist()
xyz = df[['x','y','z']].values
atom = System(symbols=a, positions=xyz)
acsf = ACSF_GENERATOR.create(atom)
acsf_df = pd.DataFrame(acsf)
acsf_df.columns = [f"acsf_{c}" for c in range(acsf_df.shape[1])]
acsf_df = pd.concat([df[["molecule_name", "atom_index"]].reset_index(drop=True),
acsf_df.reset_index(drop=True)], axis=1)
ret_list.append(acsf_df)
return pd.concat(ret_list, axis=0)
print("loading structures")
structures = pd.read_csv("../input/structures.csv")
molecule_names = np.sort(structures.molecule_name.unique())
gb_structure = structures.groupby("molecule_name")
n_split = mp.cpu_count()
unit = np.ceil(len(molecule_names) / n_split).astype(int)
indexer = [[unit * (i), unit * (i + 1)] for i in range(n_split)]
split_mol_names = []
for idx in indexer:
split_mol_names.append(molecule_names[idx[0]:idx[1]])
mp_data = [{"mol_names": m} for m in split_mol_names]
print("start multiprocessing")
num_workers = mp.cpu_count()
with mp.Pool(num_workers) as executor:
features_chunk = executor.map(get_scsf, mp_data)
df = pd.concat(features_chunk)
to_pickle("../processed/v003/acsf_feat.pkl", df)
#df.to_csv("../processed/v003/acsf_feat.csv")
print("finished.")
|
[
"[email protected]"
] | |
198c683ce8f8d6109e25e666ec663c387887bcf4
|
7c66bba92b484e5fa6ee282ef39f2c26875ca775
|
/auto_login/weibo_auto_login.py
|
adbac7ea6274fa21e0b7a49a1bb7cc6022b031ae
|
[] |
no_license
|
KqSMea8/PythonTools
|
a5ac17182b2689a706180dc349d59c2484d3984c
|
7279570b82fecbf59b71aa6b58ef975e90c660df
|
refs/heads/master
| 2020-04-13T04:19:19.209243 | 2018-12-24T05:13:12 | 2018-12-24T05:13:12 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,876 |
py
|
#!/usr/bin/env python
# encoding: utf-8
"""
@version: 1.0
@author: ‘yuxuecheng‘
@contact: [email protected]
@software: PyCharm Community Edition
@file: weibo_auto_login.py
@time: 26/10/2017 09:49
"""
import sys
import urllib
import urllib2
import cookielib
import base64
import re
import json
import rsa
import binascii
import logging
import time
import os
import traceback
# import requests
# from bs4 import BeautifulSoup
# 新浪微博的模拟登陆
class WeiboLogin(object):
def __init__(self):
# 获取一个保存cookies的对象
# self.cj = cookielib.CookieJar()
self.cj = cookielib.LWPCookieJar()
def enable_cookies(self):
# 将一个保存cookies对象和一个HTTP的cookie的处理器绑定
cookie_support = urllib2.HTTPCookieProcessor(self.cj)
# 创建一个opener,设置一个handler用于处理http的url打开
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
# 安装opener,此后调用urlopen()时会使用安装过的opener对象
urllib2.install_opener(opener)
@staticmethod
def get_server_data():
"""
预登陆获得 servertime, nonce, pubkey, rsakv
:return:
"""
# url = 'http://login.sina.com.cn/sso/prelogin.php?entry=weibo&callback=sinaSSOController.preloginCallBack&su=ZW5nbGFuZHNldSU0MDE2My5jb20%3D&rsakt=mod&checkpin=1&client=ssologin.js(v1.4.18)&_=1442991685270'
prelogin_url_format = "https://login.sina.com.cn/sso/prelogin.php?entry=weibo&callback=sinaSSOController.preloginCallBack&su=&rsakt=mod&client=ssologin.js(v1.4.19)&_=%d"
cur_time = int((time.time() * 1000))
prelogin_url = prelogin_url_format % cur_time
data = urllib2.urlopen(prelogin_url).read()
try:
json_data = re.search(r'(\(.*\))', data).group(0)
data = json.loads(json_data[1:-1])
server_time = str(data['servertime'])
nonce = data['nonce']
pubkey = data['pubkey']
rsakv = data['rsakv']
return server_time, nonce, pubkey, rsakv
except:
logging.error('Get severtime error!')
return None
@staticmethod
def get_password(password, servertime, nonce, pubkey):
"""
获取加密后的密码
:param password:
:param servertime:
:param nonce:
:param pubkey:
:return:
"""
rsa_publickey = int(pubkey, 16)
key = rsa.PublicKey(rsa_publickey, 65537) # 创建公钥
message = str(servertime) + '\t' + str(nonce) + '\n' + str(password) # 拼接明文js加密文件中得到
password = rsa.encrypt(message, key) # 加密
password = binascii.b2a_hex(password) # 将加密信息转换为16进制。
return password
@staticmethod
def get_username(user_name):
"""
获取加密后的用户名
:param user_name:
:return:
"""
user_name = urllib.quote(user_name)
user_name = base64.encodestring(user_name)[:-1]
return user_name
@staticmethod
def get_form_data( user_name, password, servertime, nonce, pubkey, rsakv ):
"""
获取需要提交的表单数据
:param user_name:
:param password:
:param servertime:
:param nonce:
:param pubkey:
:param rsakv:
:return:
"""
user_name = WeiboLogin.get_username(user_name)
psw = WeiboLogin.get_password(password, servertime, nonce, pubkey)
form_data = {
'entry': 'weibo',
'gateway': '1',
'from': '',
'savestate': '7',
'useticket': '1',
'pagerefer': 'http://weibo.com/p/1005052679342531/home?from=page_100505&mod=TAB&pids=plc_main',
'vsnf': '1',
'su': user_name,
'service': 'miniblog',
'servertime': servertime,
'nonce': nonce,
'pwencode': 'rsa2',
'rsakv': rsakv,
'sp': psw,
'sr': '1366*768',
'encoding': 'UTF-8',
'prelt': '115',
'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack',
'returntype': 'META'
}
form_data = urllib.urlencode(form_data)
return form_data
# 登陆函数
def login(self, username, password):
self.enable_cookies()
url = 'https://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.19)'
servertime, nonce, pubkey, rsakv = WeiboLogin.get_server_data()
formData = WeiboLogin.get_form_data(username, password, servertime, nonce, pubkey, rsakv)
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0'}
req = urllib2.Request(
url=url,
data=formData,
headers=headers
)
result = urllib2.urlopen(req)
text = result.read()
logging.info("login data: %s" % text.decode("gb2312"))
# 还没完!!!这边有一个重定位网址,包含在脚本中,获取到之后才能真正地登陆
try:
url_data = re.search(r'(\(.*\))', text).group(0)
login_url = url_data[2:-2]
logging.info("login_url: %s" % login_url)
login_req = urllib2.Request(
url=login_url,
headers=headers
)
# 由于之前的绑定,cookies信息会直接写入
urllib2.urlopen(login_req)
logging.info("Login success!")
except urllib2.URLError as urle:
traceback.print_exc(urle)
logging.error('Login error! Error message: %s' % urle.message)
return -1
except Exception as e:
logging.error(e)
return -1
# 访问主页,把主页写入到文件中
# url = 'http://weibo.com/u/2679342531/home?topnav=1&wvr=6'
url = 'http://www.weibo.com/linusyuno1/home?wvr=5&lf=reg'
request = urllib2.Request(url)
response = urllib2.urlopen(request)
logging.info(response.headers.dict)
text = response.read()
filename = os.getcwd() + os.path.sep + "weibo.html"
fp_raw = open(filename, "w+")
fp_raw.write(text)
fp_raw.close()
logging.info(text.decode("gbk"))
if __name__ == "__main__":
init_logging("weibo")
logging.info(u'新浪微博模拟登陆:')
# username = raw_input(u'用户名:')
# password = raw_input(u'密码:')
username = "[email protected]"
password = "yuxc870704"
weibologin = WeiboLogin()
weibologin.login(username, password)
filename = os.getcwd() + os.path.sep + 'cookie.txt'
weibologin.cj.save(filename)
|
[
"[email protected]"
] | |
847889e16e3bd3550569db29c5361a86553d6bf7
|
f4b60f5e49baf60976987946c20a8ebca4880602
|
/lib64/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/ident/contextelement.py
|
b380cc640393755a40695dbc4560e4f854377a06
|
[] |
no_license
|
cqbomb/qytang_aci
|
12e508d54d9f774b537c33563762e694783d6ba8
|
a7fab9d6cda7fadcc995672e55c0ef7e7187696e
|
refs/heads/master
| 2022-12-21T13:30:05.240231 | 2018-12-04T01:46:53 | 2018-12-04T01:46:53 | 159,911,666 | 0 | 0 | null | 2022-12-07T23:53:02 | 2018-12-01T05:17:50 |
Python
|
UTF-8
|
Python
| false | false | 4,121 |
py
|
# coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ContextElement(Mo):
"""
The identity context element.
"""
meta = ClassMeta("cobra.model.ident.ContextElement")
meta.moClassName = "identContextElement"
meta.rnFormat = "id-[%(eDn)s]"
meta.category = MoCategory.REGULAR
meta.label = "None"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.parentClasses.add("cobra.model.ident.Context")
meta.rnPrefixes = [
('id-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "eDn", "eDn", 347, PropCategory.REGULAR)
prop.label = "Element DN"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("eDn", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "sDn", "sDn", 348, PropCategory.REGULAR)
prop.label = "Segment DN"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("sDn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "eDn"))
getattr(meta.props, "eDn").needDelimiter = True
def __init__(self, parentMoOrDn, eDn, markDirty=True, **creationProps):
namingVals = [eDn]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
|
[
"[email protected]"
] | |
9a0e63c5b5b8525ef929e64c55a91bb636cdfab2
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/Cases/2938/.mooctest/answer.py
|
8566383381c158a21b1b6cefb037db2930fa8950
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 449 |
py
|
#include<bits/stdc++.h>//头文件
using namespace std;
string a[100];//定义要排序的字符串数组
stringstream ss;//百度一下,你就知道
int main(){
for(int i=1;i<=100;i++){//开始存入1-1000的数
ss<<i;
ss>>a[i-1];
ss.str("");//清空缓存
ss.clear();//充值(重置)状态
}
sort(a,a+100);//排序
for(int i=0;i<100;i++)
cout<<a[i]<<endl;//输出
return 0;//完美结束
}
|
[
"[email protected]"
] | |
8885b77cdd0914bc461b0303e7c24a2db6ac1e80
|
2be8a9f06d4003d12c0a727fb83d284c31a53050
|
/HoudiniHotBox17.0/lib/Cd_Material.py
|
78316d6bcb22180bb4bd80f79268dcbe13118016
|
[] |
no_license
|
LiuLiangFx/SmileHotBOX
|
7551d9578b2defe612950cb8e3bffdb85024cede
|
8bd8eac69b3c2a9824b9aa4488ca77789bea8d85
|
refs/heads/master
| 2021-01-01T10:22:26.959731 | 2020-02-09T03:16:32 | 2020-02-09T03:16:32 | 239,236,801 | 0 | 0 | null | 2020-02-09T02:47:18 | 2020-02-09T02:47:18 | null |
UTF-8
|
Python
| false | false | 1,494 |
py
|
import hou
class Cd_Material:
def __init__(self):
self.pane=hou.ui.paneTabOfType(hou.paneTabType.NetworkEditor)
self.node= hou.selectedNodes()[0]
fl=open('material.txt', 'w')
fl.write(self.node.path())
fl.close()
def run(self):
if self.node.type().name() == "material" and self.node.parm("shop_materialpath1").eval() == "":
self.pane.cd("/shop")
elif self.node.type().name() == "material" and self.node.parm("shop_materialpath1").eval() != "":
try:
mNode = hou.node(self.node.parm("shop_materialpath1").eval())
mNode.allowEditingOfContents()
self.pane.cd(mNode.path())
except:
self.pane.cd("/shop")
if self.node.type().name() == "geo" and self.node.parm("shop_materialpath").eval() == "":
self.pane.cd("/shop")
elif self.node.type().name() == "geo" and self.node.parm("shop_materialpath").eval() != "":
try:
mNode = hou.node(self.node.parm("shop_materialpath").eval())
mNode.allowEditingOfContents()
self.pane.cd(mNode.path())
except:
self.pane.cd("/shop")
a= Cd_Material()
a.run()
|
[
"[email protected]"
] | |
82d781cf2b96438286ea7fd29e2c1490e21df986
|
6191bad7750404bc0bcaec43a8dea51b52980f04
|
/Seção_07/Collections/deque.py
|
b4b481fd5e7d3175e21b72433dcb6f37509d11ff
|
[] |
no_license
|
Lehcs-py/guppe
|
abfbab21c1b158b39251fa6234a4a98ce5f31c2a
|
2ff007bce88e065e6d3020971efd397ec7f7084b
|
refs/heads/main
| 2023-02-26T18:43:06.052699 | 2021-02-07T18:22:53 | 2021-02-07T18:22:53 | 330,180,078 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 182 |
py
|
from collections import deque
deq = deque('LEHCS')
print(deq)
deq.append('A')
print(deq)
deq.appendleft('D')
print(deq)
print(deq.pop())
print(deq)
print(deq.popleft())
print(deq)
|
[
"[email protected]"
] | |
438033b0dd31378c6fc09ace40f3c3bee1d9bafe
|
e5b4ed93d6666e195e96a265d3e7cfe4243a7300
|
/hunter/hunter.py
|
0914e6258d70bb3bcec304a85ffabec2451f20d3
|
[] |
no_license
|
Spider251/python
|
934f5b8b923c2b61186a6df8445957290e5c4c74
|
8b1931f862e1d5c29fed9af624bcac94c1d25755
|
refs/heads/master
| 2020-04-05T11:58:04.558098 | 2018-11-09T12:06:06 | 2018-11-09T12:06:06 | 156,852,553 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,098 |
py
|
'''
实现猎人的功能
需要传入的参数:1.所有人员的字典
'''
# person 所有角色共有的属性
# survival 所有人员存活情况{1:0,2:1...} 键1代表角色号码,值0为死亡,1为存活
class hunter:
def __init__(self,survival):
self.survival = survival
def fun(self):
for i in self.survival:
if i == 2:
if self.survival[i] == 1:
pass
elif self.survival[i] == 0:
print("猎人已经死亡")
self.say()
def say(self):
while True:
a = input("杀人Y/放弃N:")
if a == 'N':
print("结束")
elif a == 'Y':
print("请选择要带走的角色:",end="")
for i in self.survival:
if i != 2:
print(i,end=" ")
print()
a = input("杀死:")
print(a,"已死")
break
if __name__ == '__main__':
a = {1:0,2:0,3:1}
hunter = hunter(a)
hunter.fun()
|
[
"[email protected]"
] | |
1fc8cb941330ce78d411f3edb862796a171a89fc
|
e77b92df446f0afed18a923846944b5fd3596bf9
|
/Inflearn_algo/section7_dfs_bfs/pro1_maxScore_re.py
|
7feac790f2615b319b2bc54fde9f4010284f4fdd
|
[] |
no_license
|
sds1vrk/Algo_Study
|
e40ca8eb348d1fc6f88d883b26195b9ee6f35b2e
|
fbbc21bb06bb5dc08927b899ddc20e6cde9f0319
|
refs/heads/main
| 2023-06-27T05:49:15.351644 | 2021-08-01T12:43:06 | 2021-08-01T12:43:06 | 356,512,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 635 |
py
|
# 최대 점수 구하기 (DFS)
# 문제를 푼다, 안푼다라는 개념으로 가야됨
import sys
sys.stdin=open("input.txt","r")
n,m=map(int,input().split())
ss=[]
tt=[]
for i in range(n):
a,b=map(int,input().split())
ss.append(a)
tt.append(b)
max_score=-1
def dfs(l,s,t):
global max_score
# 가지치기 t가 m을 넘으면 더이상 할 필요 없음
if t>m:
return
if l==n:
if s>max_score:
max_score=s
else :
# 1번 문제를 푼다
dfs(l+1,s+ss[l],t+tt[l])
# 문제를 풀지 않는다
dfs(l+1,s,t)
dfs(0,0,0)
print(max_score)
|
[
"[email protected]"
] | |
3674de65b0e09eba8a92b497cf4a7530fb460826
|
d53bc632503254ca0d5099fe457c02c07212a131
|
/cookieproject1/cookieproject1/wsgi.py
|
0e0d958b3a4961808057c49586b4e5768c75d831
|
[] |
no_license
|
srikar1993/django
|
ba8428f6e1162cc40f2d034126e7baf29eb62edc
|
2199d5d94accc7bce5b3fac4a4b7b1444e39b35f
|
refs/heads/master
| 2023-07-14T21:10:52.654992 | 2021-08-26T06:37:04 | 2021-08-26T06:37:04 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 405 |
py
|
"""
WSGI config for cookieproject1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cookieproject1.settings')
application = get_wsgi_application()
|
[
"[email protected]"
] | |
eb6f175b0c5788c950623020ee524b875e28fc23
|
bafd37fdbaf76d5d7dabd9c07985969b3924f9c8
|
/example_client/example.py
|
9671da3b0660af7e0bb9fe806d6331467f1918ae
|
[
"Apache-2.0"
] |
permissive
|
k24dizzle/nagios_registration
|
2c1c95c7c871ee8ed31de46d555c812f2c0f41c8
|
be18dbadd2c08def81e795e4afe2fe2cf41775cf
|
refs/heads/master
| 2020-03-08T11:54:30.569982 | 2015-07-16T18:01:07 | 2015-07-16T18:01:07 | 128,111,583 | 1 | 0 | null | 2018-04-04T19:32:53 | 2018-04-04T19:32:52 | null |
UTF-8
|
Python
| false | false | 3,263 |
py
|
import oauth2
import json
###
#
# This script will create 2 hosts, and add them to a host group.
# It will then create a service, and assign that service to both hosts.
# It will then deploy a new nagios configuration file.
#
###
consumer_key = "OAUTH_KEY"
consumer_secret = "OAUTH_SECRET"
registration_server = "http://localhost:8000"
###
#
# You can create a consumer key and secret on the nagios_registration
# server with a django management command:
#
# python manage.py create_consumer
#
###
consumer = oauth2.Consumer(key=consumer_key, secret=consumer_secret)
client = oauth2.Client(consumer)
# Variables used by the actual requests below
hostname1 = "example app host"
address1 = "127.0.0.1"
hostname2 = "second app host"
address2 = "127.0.0.2"
groupname = "example_app_servers"
alias = "Example App Servers"
base_service = "24x7-active-service"
service_description = "Disk Usage"
check_command = "check_remote!disk_check.py!98!99"
# End of settings, now just making requests to the server
# Create the 2 hosts
client.request("%s/api/v1/host" % (registration_server),
method='POST',
body=json.dumps({"name": hostname1, "address": address1}),
headers={"Content-Type": "application/json"})
client.request("%s/api/v1/host" % (registration_server),
method='POST',
body=json.dumps({"name": hostname2, "address": address2}),
headers={"Content-Type": "application/json"})
# Create the hostgroup
client.request("%s/api/v1/hostgroup" % (registration_server),
method='POST',
body=json.dumps({"name": groupname, "alias": alias}),
headers={"Content-Type": "application/json"})
# Add the hosts to the hostgroup
client.request("%s/api/v1/hostgroup" % (registration_server),
method='PATCH',
body=json.dumps({"group": groupname, "host": hostname1}),
headers={"Content-Type": "application/json"})
client.request("%s/api/v1/hostgroup" % (registration_server),
method='PATCH',
body=json.dumps({"group": groupname, "host": hostname2}),
headers={"Content-Type": "application/json"})
# Create a service
client.request("%s/api/v1/service" % (registration_server),
method='POST',
body=json.dumps({"base_service": base_service,
"description": service_description,
"check_command": check_command}),
headers={"Content-Type": "application/json"})
# Add the service to the 2 hosts
client.request("%s/api/v1/service" % (registration_server),
method='PATCH',
body=json.dumps({"service": service_description,
"host": hostname1}),
headers={"Content-Type": "application/json"})
client.request("%s/api/v1/service" % (registration_server),
method='PATCH',
body=json.dumps({"service": service_description,
"host": hostname2}),
headers={"Content-Type": "application/json"})
# Deploy the changes
client.request("%s/api/v1/deploy" % (registration_server), method="POST")
print "Done!"
|
[
"[email protected]"
] | |
05546c27ea40660996b98f84d8a1a0f04a42c288
|
85bf9a13bf62c1f074894d134c23dd992ae8688c
|
/problems/p317/Solution.py
|
6d55e5a066806320f5503f718d38b8fa74f2166f
|
[] |
no_license
|
pololee/oj-leetcode
|
4cca3d309b2c9931d15d3cec4b07b5d9d22733ef
|
78a8b27ee108ba93aa7b659665976112f48fc2c2
|
refs/heads/master
| 2020-06-21T02:15:26.882273 | 2020-02-06T04:56:21 | 2020-02-06T04:56:21 | 197,320,113 | 0 | 0 | null | 2020-02-06T04:56:23 | 2019-07-17T05:20:02 |
Python
|
UTF-8
|
Python
| false | false | 2,336 |
py
|
import collections
import sys
class Solution:
DIRECTIONS = [(1, 0), (0, 1), (-1, 0), (0, -1)]
def shortestDistance(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
if not grid:
return 0
row_size = len(grid)
col_size = len(grid[0])
distance = [[0 for _ in range(col_size)]
for _ in range(row_size)]
reaches = [[0 for _ in range(col_size)]
for _ in range(row_size)]
num_of_buildings = 0
for i in range(row_size):
for j in range(col_size):
if grid[i][j] == 1:
num_of_buildings += 1
self.bfs(grid, distance, reaches, i, j)
shortest = sys.maxsize
for i in range(row_size):
for j in range(col_size):
if grid[i][j] == 0 and reaches[i][j] == num_of_buildings:
shortest = min(shortest, distance[i][j])
if shortest == sys.maxsize:
return -1
return shortest
def bfs(self, grid, distance, reaches, istart, jstart):
row_size = len(grid)
col_size = len(grid[0])
visited = [[False for _ in range(col_size)]
for _ in range(row_size)]
queue = collections.deque()
queue.append((istart, jstart))
visited[istart][jstart] = True
level = 0
while queue:
size = len(queue)
for _ in range(size):
row, col = queue.popleft()
if grid[row][col] == 0:
distance[row][col] += level
reaches[row][col] += 1
for drow, dcol in self.DIRECTIONS:
new_row = row + drow
new_col = col + dcol
if new_row >= 0 and new_row < row_size and new_col >= 0 and new_col < col_size and grid[new_row][new_col] == 0 and not visited[new_row][new_col]:
visited[new_row][new_col] = True
queue.append((new_row, new_col))
level += 1
def main():
test = [[1, 0, 2, 0, 1], [0, 0, 0, 0, 0], [0, 0, 1, 0, 0]]
sol = Solution()
print(sol.shortestDistance(test))
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
4120d60565a39b46cd5b6d64ed972b8c46931722
|
5a298ece5b17e6e993d50a855027f265e115e2bd
|
/utilities/filter_data.py
|
99687a7e234137d21978c275dd56b29a9d74c2f1
|
[] |
no_license
|
hvk3/IR_project
|
86b8a1176f6a8ed541f179f1c541eb139dde0295
|
ae6deea2276f0a76bfa23482fd1b7a4c1f039264
|
refs/heads/master
| 2021-10-16T17:33:11.258479 | 2019-02-12T08:45:51 | 2019-02-12T08:45:51 | 118,168,898 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 569 |
py
|
from pymongo import MongoClient
from langdetect import detect
from tqdm import tqdm
client = MongoClient()
db = client.youtube8m
ds_1 = db.iteration3
ds_2 = db.iteration4
ds_2.remove()
print("Before:", ds_1.find().count())
for record in tqdm(ds_1.find()):
title = record['metadata']['title']
description = record['metadata']['description']
# if len(description) > 0 and len(title) > 0:
# ds_2.insert_one(record)
try:
if detect(description) == 'en': #3: title, #4: description
ds_2.insert_one(record)
except:
continue
print("After:", ds_2.find().count())
|
[
"[email protected]"
] | |
87466cd291f6c19586b503ef7109c6a64acf8ca6
|
39157a854806af4db51b986adf5096bd342bacdb
|
/fuzznumpy/main.py
|
68c4295fe915c1b180f9319956c9abc08d8c52e3
|
[] |
no_license
|
xcainiao/fuzzing
|
b6b43550f7a5c05595a180d111d9ec03e4710293
|
5cadbe3e1bcc9090a68b1006cb5b6b76db990ae1
|
refs/heads/master
| 2020-03-30T01:51:59.811511 | 2018-09-27T14:25:05 | 2018-09-27T14:25:05 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 410 |
py
|
import fuzz
import numpy as np
from ctypes import CDLL
test = CDLL("c/test.so")
test.init()
fuzz.init()
while 1:
func = fuzz.generate()
# func = """import numpy\nnumpy.half(-1).choose(numpy.void(1), numpy.broadcast_arrays((1,)))"""
test.copybuff(func)
try:
exec(func, {"np":np})
except Exception as e:
# print e
continue
print func
fuzz.register(func)
|
[
"[email protected]"
] | |
d33d903e7de59d03eac8b1c9b2af624e056b3328
|
b8faf65ea23a2d8b119b9522a0aa182e9f51d8b1
|
/vmraid/website/doctype/social_link_settings/social_link_settings.py
|
35954b6ce718f192fa921627f23f3e2a83b1b277
|
[
"MIT"
] |
permissive
|
vmraid/vmraid
|
a52868c57b1999a8d648441eb9cd05815204345d
|
3c2e2a952003ba7ea2cf13673b9e79e127f4166e
|
refs/heads/main
| 2022-07-29T18:59:28.585133 | 2022-04-22T08:02:52 | 2022-04-22T08:02:52 | 372,473,120 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 215 |
py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, VMRaid Technologies and contributors
# License: MIT. See LICENSE
# import vmraid
from vmraid.model.document import Document
class SocialLinkSettings(Document):
pass
|
[
"[email protected]"
] | |
f819a142bd8930f08e51e57ed6af15a211801e81
|
4bcae7ca3aed842d647d9112547522cffa805d51
|
/0674.最长连续递增序列.py
|
43854333b238384701a6a84adb3ed71f0d9e3655
|
[] |
no_license
|
SLKyrim/vscode-leetcode
|
fd5a163f801661db0dfae1d4fdfa07b79fdb82b6
|
65a271c05258f447d3e56755726f02179780eb8a
|
refs/heads/master
| 2021-07-03T03:15:28.883786 | 2021-02-23T06:19:18 | 2021-02-23T06:19:18 | 226,062,540 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,286 |
py
|
#
# @lc app=leetcode.cn id=674 lang=python3
#
# [674] 最长连续递增序列
#
# https://leetcode-cn.com/problems/longest-continuous-increasing-subsequence/description/
#
# algorithms
# Easy (45.18%)
# Likes: 89
# Dislikes: 0
# Total Accepted: 30.7K
# Total Submissions: 68K
# Testcase Example: '[1,3,5,4,7]'
#
# 给定一个未经排序的整数数组,找到最长且连续的的递增序列,并返回该序列的长度。
#
#
#
# 示例 1:
#
# 输入: [1,3,5,4,7]
# 输出: 3
# 解释: 最长连续递增序列是 [1,3,5], 长度为3。
# 尽管 [1,3,5,7] 也是升序的子序列, 但它不是连续的,因为5和7在原数组里被4隔开。
#
#
# 示例 2:
#
# 输入: [2,2,2,2,2]
# 输出: 1
# 解释: 最长连续递增序列是 [2], 长度为1。
#
#
#
#
# 注意:数组长度不会超过10000。
#
#
# @lc code=start
class Solution:
def findLengthOfLCIS(self, nums: List[int]) -> int:
res = 0
n = len(nums)
if n == 0:
return 0
if n == 1:
return 1
cnt = 1
for i in range(1, n):
if nums[i] > nums[i-1]:
cnt += 1
else:
res = max(res, cnt)
cnt = 1
return max(res, cnt)
# @lc code=end
|
[
"[email protected]"
] | |
b26dfc2cc4ffb4aa822cac635d3e83c1522e9304
|
04b3a30ca30c3a9cc459b06fe1842a500dd5ab51
|
/addresss/views.py
|
06c24539f8cc82d589e80f97797e2431e41d5162
|
[] |
no_license
|
rahulsayon/Final-Ecommerce
|
17b7830e44ab86b7513f48d80fc1bb7f12c36516
|
ca0c860653ec1b80f0a3f012e338ecc2189019ac
|
refs/heads/master
| 2022-12-11T01:12:03.500783 | 2020-09-13T20:09:40 | 2020-09-13T20:09:40 | 295,228,975 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,454 |
py
|
from django.shortcuts import render
from .forms import AddressForm
from django.shortcuts import redirect
from billing.models import BillingProfile
from django.utils.http import is_safe_url
from addresss.models import Address
# Create your views here.
def checkout_address_create_view(request):
form = AddressForm(request.POST or None)
context = { "form" : form }
next_ = request.GET.get('next')
next_post = request.POST.get('next')
redirect_path = next_ or next_post or None
if form.is_valid():
print(form.cleaned_data)
instance = form.save(commit=False)
billing_profile , billing_profile_created = BillingProfile.objects.new_or_get(request)
if billing_profile is not None:
address_type = request.POST.get('address_type' , 'shipping')
print("billinf profile" , billing_profile)
instance.billing_profile = billing_profile
instance.address_type = request.POST.get('address_type' , 'shipping')
instance.save()
request.session[address_type + "_address_id"] = instance.id
print(address_type +"_address_id")
else:
print("error")
return redirect("cart:checkout")
if is_safe_url(redirect_path , request.get_host()):
return redirect(redirect_path)
else:
return redirect("cart:checkout")
return redirect("cart:checkout")
def checkout_address_reuse_view(request):
if request.user.is_authenticated:
context = {}
next_ = request.GET.get('next')
next_post = request.POST.get('next')
redirect_path = next_ or next_post or None
if request.method == "POST":
print(request.POST)
shipping_address = request.POST.get('shipping_address', None)
address_type = request.POST.get('address_type', 'shipping')
billing_profile, billing_profile_created = BillingProfile.objects.new_or_get(request)
if shipping_address is not None:
qs = Address.objects.filter(billing_profile=billing_profile, id=shipping_address)
if qs.exists():
request.session[address_type + "_address_id"] = shipping_address
if is_safe_url(redirect_path, request.get_host()):
return redirect(redirect_path)
return redirect("cart:checkout")
|
[
"[email protected]"
] | |
a75d04852aca116b804d4a5aa819b764cddff608
|
5d9636dcae2471d700da5583cfc0359644c7322d
|
/pugsley/auth/routes.py
|
78e16175c4ac581d44b4ba571f9a66b393c72966
|
[
"MIT"
] |
permissive
|
kfields/pugsley-lite
|
93a4c7c334fd9b4f3ab68acc565b1f29a4a31b99
|
9fdd4868895b38fb81855952f19bdf9cca1635b3
|
refs/heads/master
| 2023-01-24T18:29:15.338112 | 2019-08-11T20:33:30 | 2019-08-11T20:33:30 | 179,791,236 | 1 | 0 |
MIT
| 2023-01-09T22:22:33 | 2019-04-06T05:09:26 |
CSS
|
UTF-8
|
Python
| false | false | 4,521 |
py
|
from flask import render_template, redirect, url_for, flash, request, jsonify
from werkzeug.urls import url_parse
from flask_login import login_user, logout_user, current_user
from flask_babel import _
from pugsley import db
from pugsley.jwt import encode_auth_token
from pugsley.auth import bp
from pugsley.auth.forms import LoginForm, RegistrationForm, \
ResetPasswordRequestForm, ResetPasswordForm
from pugsley.models.users import User
from pugsley.auth.emails import send_password_reset_email
@bp.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('main.index'))
form = LoginForm()
if form.validate_on_submit():
email = form.email.data
if '@' in email:
user = User.query.filter_by(email=form.email.data).first()
else:
user = User.query.filter_by(username=form.email.data).first()
if user is None or not user.check_password(form.password.data):
flash(_('Invalid email or password'))
return redirect(url_for('auth.login'))
login_user(user, remember=form.remember_me.data)
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
next_page = url_for('main.index')
return redirect(next_page)
# return render_template('login.html', title=_('Log In'), form=form)
return render_template('layouts/auth-default.html',
content=render_template( 'pages/login.html', form=form ) )
@bp.route('/logout')
def logout():
logout_user()
return redirect(url_for('main.index'))
@bp.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('main.index'))
form = RegistrationForm()
if form.validate_on_submit():
# user = User(first_name=form.first_name.data, last_name=form.last_name.data, username=form.username.data, email=form.email.data)
user = User(username=form.email.data, email=form.email.data)
user.set_password(form.password.data)
db.session.add(user)
db.session.commit()
flash(_('Congratulations, you are now a registered user!'))
return redirect(url_for('auth.login'))
# return render_template('register.html', title=_('Register'), form=form)
return render_template('layouts/auth-default.html',
content=render_template( 'pages/register.html', form=form ) )
@bp.route('/reset_password_request', methods=['GET', 'POST'])
def reset_password_request():
if current_user.is_authenticated:
return redirect(url_for('main.index'))
form = ResetPasswordRequestForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user:
send_password_reset_email(user)
flash(
_('Check your email for the instructions to reset your password'))
return redirect(url_for('auth.login'))
return render_template('reset_password_request.html',
title=_('Reset Password'), form=form)
@bp.route('/reset_password/<token>', methods=['GET', 'POST'])
def reset_password(token):
if current_user.is_authenticated:
return redirect(url_for('main.index'))
user = User.verify_reset_password_token(token)
if not user:
return redirect(url_for('main.index'))
form = ResetPasswordForm()
if form.validate_on_submit():
user.set_password(form.password.data)
db.session.commit()
flash(_('Your password has been reset.'))
return redirect(url_for('auth.login'))
return render_template('reset_password.html', form=form)
@bp.route('/token', methods=['POST'])
def token():
if not request.is_json:
return jsonify({"msg": "Missing JSON in request"}), 400
username = request.json.get('username', None)
password = request.json.get('password', None)
if not username:
return jsonify({"msg": "Missing username parameter"}), 400
if not password:
return jsonify({"msg": "Missing password parameter"}), 400
user = User.query.filter_by(username=username).first()
if user is None or not user.check_password(password):
return jsonify({"msg": "Bad username or password"}), 401
# Identity can be any data that is json serializable
access_token = encode_auth_token(sub=username, id=user.id)
print(access_token)
return jsonify({"token": access_token.decode('utf-8')}), 200
|
[
"[email protected]"
] | |
d8982a501517e741145cac724e03b59326021d7d
|
09e5cfe06e437989a2ccf2aeecb9c73eb998a36c
|
/modules/dxtbx/command_line/print_header.py
|
0927cef0df1adb502a68d0f8709b4377dcad155a
|
[
"BSD-3-Clause"
] |
permissive
|
jorgediazjr/dials-dev20191018
|
b81b19653624cee39207b7cefb8dfcb2e99b79eb
|
77d66c719b5746f37af51ad593e2941ed6fbba17
|
refs/heads/master
| 2020-08-21T02:48:54.719532 | 2020-01-25T01:41:37 | 2020-01-25T01:41:37 | 216,089,955 | 0 | 1 |
BSD-3-Clause
| 2020-01-25T01:41:39 | 2019-10-18T19:03:17 |
Python
|
UTF-8
|
Python
| false | false | 1,633 |
py
|
from __future__ import absolute_import, division, print_function
import sys
from scitbx.array_family import flex
from dxtbx.format.FormatMultiImage import FormatMultiImage
from dxtbx.format.Registry import Registry
def print_header():
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
print("=== %s ===" % arg)
format_class = Registry.find(arg)
print("Using header reader: %s" % format_class.__name__)
i = format_class(arg)
beam = i.get_beam()
goniometer = i.get_goniometer()
detector = i.get_detector()
scan = i.get_scan()
if beam is None:
print("No beam model found")
else:
print(beam)
if detector is None:
print("No detector model found")
else:
print(detector)
if goniometer is None:
print("No goniometer model found")
else:
print(goniometer)
if scan is None:
print("No scan model found")
else:
print(scan)
if not issubclass(format_class, FormatMultiImage):
try:
raw_data = i.get_raw_data()
if not isinstance(raw_data, tuple):
raw_data = (raw_data,)
d = [p.as_1d() for p in raw_data]
print("Total Counts: %d" % sum([flex.sum(p.select(p >= 0)) for p in d]))
except AttributeError:
print("Could not read image data")
if __name__ == "__main__":
print_header()
|
[
"[email protected]"
] | |
1c930c629d264c1b02af2492b5b962be70f570d9
|
d6589ff7cf647af56938a9598f9e2e674c0ae6b5
|
/nlp-automl-20191111/setup.py
|
7541638ca97742a773686465bb0c04174993e7bc
|
[
"Apache-2.0"
] |
permissive
|
hazho/alibabacloud-python-sdk
|
55028a0605b1509941269867a043f8408fa8c296
|
cddd32154bb8c12e50772fec55429a9a97f3efd9
|
refs/heads/master
| 2023-07-01T17:51:57.893326 | 2021-08-02T08:55:22 | 2021-08-02T08:55:22 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,647 |
py
|
# -*- coding: utf-8 -*-
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import os
from setuptools import setup, find_packages
"""
setup module for alibabacloud_nlp-automl20191111.
Created on 30/12/2020
@author: Alibaba Cloud SDK
"""
PACKAGE = "alibabacloud_nlp_automl20191111"
NAME = "alibabacloud_nlp-automl20191111" or "alibabacloud-package"
DESCRIPTION = "Alibaba Cloud nlp-automl (20191111) SDK Library for Python"
AUTHOR = "Alibaba Cloud SDK"
AUTHOR_EMAIL = "[email protected]"
URL = "https://github.com/aliyun/alibabacloud-python-sdk"
VERSION = __import__(PACKAGE).__version__
REQUIRES = [
"alibabacloud_tea_util>=0.3.1, <1.0.0",
"alibabacloud_tea_openapi>=0.1.0, <1.0.0",
"alibabacloud_openapi_util>=0.0.3, <1.0.0",
"alibabacloud_endpoint_util>=0.0.3, <1.0.0"
]
LONG_DESCRIPTION = ''
if os.path.exists('./README.md'):
with open("README.md", encoding='utf-8') as fp:
LONG_DESCRIPTION = fp.read()
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license="Apache License 2.0",
url=URL,
keywords=["alibabacloud","nlp","automl20191111"],
packages=find_packages(exclude=["tests*"]),
include_package_data=True,
platforms="any",
install_requires=REQUIRES,
python_requires=">=3.6",
classifiers=(
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
"Topic :: Software Development"
)
)
|
[
"[email protected]"
] | |
48692f6bb82436458dcda51926e85f92d86ed1ad
|
589b5eedb71d83c15d44fedf60c8075542324370
|
/project/stock_project/alpha_model/alpha_factor/ARAPIncomeTTM.py
|
605a216d31fef56f1a88b39a3a9a2b23dfa799dd
|
[] |
no_license
|
rlcjj/quant
|
4c2be8a8686679ceb675660cb37fad554230e0d4
|
c07e8f0f6e1580ae29c78c1998a53774a15a67e1
|
refs/heads/master
| 2020-03-31T07:15:48.111511 | 2018-08-27T05:29:00 | 2018-08-27T05:29:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,685 |
py
|
import pandas as pd
from quant.stock.stock import Stock
from quant.stock.date import Date
from quant.stock.stock_factor_operate import StockFactorOperate
def ARAPIncomeTTM(beg_date, end_date):
"""
因子说明:(预收账款 + 应付账款) / 营业总收入 TTM
最近一期财报 实时更新
若有一个为负值 结果为负值
"""
# param
#################################################################################
factor_name = 'ARAPIncomeTTM'
ipo_num = 90
# read data
#################################################################################
income = Stock().get_factor_h5("OperatingIncome", None, "primary_mfc")
advance = Stock().get_factor_h5("AdvanceReceipts", None, "primary_mfc")
payable = Stock().get_factor_h5("AccountsPayable", None, "primary_mfc")
# data precessing
#################################################################################
[advance, payable, income] = Stock().make_same_index_columns([advance, payable, income])
add = advance.add(payable)
ratio = add.div(income)
ratio = StockFactorOperate().change_quarter_to_daily_with_report_date(ratio, beg_date, end_date)
res = ratio.T.dropna(how='all').T
# save data
#############################################################################
Stock().write_factor_h5(res, factor_name, "alpha_dfc")
return res
#############################################################################
if __name__ == '__main__':
from datetime import datetime
beg_date = '2004-01-01'
end_date = datetime.today()
data = ARAPIncomeTTM(beg_date, end_date)
print(data)
|
[
"[email protected]"
] | |
ba5fe81af0632687c14d963ae372ba1b8ee5503f
|
a8750439f200e4efc11715df797489f30e9828c6
|
/CodeForces/EC_46_2_C_1.py
|
8ee3122998960e839a22312f1db953f98a96581f
|
[] |
no_license
|
rajlath/rkl_codes
|
f657174305dc85c3fa07a6fff1c7c31cfe6e2f89
|
d4bcee3df2f501349feed7a26ef9828573aff873
|
refs/heads/master
| 2023-02-21T10:16:35.800612 | 2021-01-27T11:43:34 | 2021-01-27T11:43:34 | 110,989,354 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 310 |
py
|
n = int(input())
a = []
for i in range(n):
l, r = [int(x) for x in input().split()]
a.append([l, 1])
a.append([r+1, -1])
a = sorted(a)
ans = [0] * (n + 1)
idx = 0
for i in range(len(a) - 1):
idx += a[i][1]
ans[idx] += a[i+1][0] - a[i][0]
for i in range(1, n+1):
print(ans[i], end = " ")
|
[
"[email protected]"
] | |
9b05b73cd5f0370491f151c54c36a981422be0f9
|
16b567ed93c10287f7b9e90ddc819512aadbcaf5
|
/filters/stopwords_filter.py
|
c69ebfd10592f81b4efc0b75d78a5a7c9c1a54df
|
[] |
no_license
|
Rigel772/python-keyword-density
|
b3bdfb70e06e53264be7507e4111a923b40ea51a
|
c3a4469360de3d7c02dd9b8de2dc7eac45a3253a
|
refs/heads/master
| 2020-05-19T11:28:23.854324 | 2018-11-02T13:22:51 | 2018-11-02T13:22:51 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 647 |
py
|
#-*- coding: utf-8 -*-
import os.path
from .base_filter import BaseFilter
class StopwordsFilter(BaseFilter):
def __init__(self, country):
super(StopwordsFilter, self).__init__()
self.country = country
stopword_fname = '%s.txt' % self.country
folder_name = os.path.dirname(__file__)
self.fname = os.path.join(folder_name, 'stopwords', stopword_fname)
with open(self.fname, 'rb') as f:
self.stopwords = {l.strip().decode('utf8') for l in f if l}
def predicate(self, tok):
"""Returns True if tok not in stopwords else False"""
return tok not in self.stopwords
|
[
"[email protected]"
] | |
e638da845ef167b11b3122f120cd0e44acefa0de
|
e93c6e93f612bca7f192adf539b4f489ad114ab5
|
/m01_basics/l_07_nested_data.py
|
6739ccff73e049c4db47923a80292b1b7f11a800
|
[
"MIT"
] |
permissive
|
be1iever/python-52-weeks
|
8d57a10af9c0f5309ba21a9503a8fdf4bd82840c
|
185d8b3147c6bfb069d58e4933b74792081bf8f2
|
refs/heads/main
| 2023-08-19T08:21:45.330447 | 2021-09-21T15:00:28 | 2021-09-21T15:00:28 | 409,847,518 | 1 | 0 |
MIT
| 2021-09-24T05:51:14 | 2021-09-24T05:51:13 | null |
UTF-8
|
Python
| false | false | 2,506 |
py
|
from pprint import pprint
from random import choice
import copy
from util.create_utils import create_network
device = {
"name": "r3-L-n7",
"vendor": "cisco",
"model": "catalyst 2960",
"os": "ios",
"interfaces": [
]
}
print("\n\n----- device with no interfaces --------------------")
for key, value in device.items():
print(f"{key:>16s} : {value}")
interfaces = list()
for index in range(0, 8):
interface = {
"name": "g/0/0/" + str(index),
"speed": choice(["10", "100", "1000"])
}
interfaces.append(interface)
device["interfaces"] = interfaces
print("\n\n----- device with interfaces --------------------")
for key, value in device.items():
if key != "interfaces":
print(f"{key:>16s} : {value}")
else:
print(f"{key:>16s} :")
for interface in device["interfaces"]:
print(f"\t\t\t\t\t{interface}")
print()
print("\n\n----- device with interfaces using pprint--------------------")
pprint(device)
print("\n\n----- network with devices and interfaces --------------------")
network = create_network(num_devices=4, num_subnets=4)
pprint(network)
print("\n----- information about network --------------------")
print(f"-- number of subnets: {len(network['subnets'])}")
print(f"-- list of subnets: {network['subnets'].keys()}")
print(f"-- list of subnets w/o extraneous: {', '.join(network['subnets'])}")
print("\n----- network and devices nicely formatted --------------------")
for subnet_address, subnet in network["subnets"].items():
print(f"\n-- subnet: {subnet_address}")
for device in subnet["devices"]:
print(f" |-- device: {device['name']:8} {device['ip']:10} {device['vendor']:>10} : {device['os']}")
print("\n\n----- remember assignment vs shallow copy vs deep copy --------------------")
print(" modify 'network' only, and see if assign/copy/deepcopy versions reflect that change")
network_assign = network
network["subnets"]["10.0.1.0"]["devices"][0]["name"] = "different name assigned"
print(f" --- network == network_assign : {network==network_assign}")
network_copy = copy.copy(network)
network["subnets"]["10.0.1.0"]["devices"][0]["name"] = "another different name, copy this time"
print(f" --- network == network_copy : {network==network_copy}")
network_deepcopy = copy.deepcopy(network)
network["subnets"]["10.0.1.0"]["devices"][0]["name"] = "this time with deep copy"
print(f" --- network == network_deepcopy : {network==network_deepcopy}")
|
[
"[email protected]"
] | |
ce2469650940b0fa5dfceaad6a4836793f0f23b9
|
30fd01dbae99721069d936d5daa6a8050488a248
|
/hacker/FirefoxSQLite.py
|
7da8415a2e85749f5c5b4f1f6d446bc2933e030b
|
[] |
no_license
|
chenshuo666/mypython
|
6b334ad42b117c2750129028e82037643d99ab6a
|
3cfcf49f2d6cc3733d244cc7eb212a4dba6a439a
|
refs/heads/master
| 2020-03-10T04:04:35.530485 | 2018-04-17T04:02:16 | 2018-04-17T04:02:16 | 129,182,623 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,999 |
py
|
#!/usr/bin/python
# coding=utf-8
import re
import optparse
import os
import sqlite3
# 解析打印downloads.sqlite文件的内容,输出浏览器下载的相关信息
def printDownloads(downloadDB):
conn = sqlite3.connect(downloadDB)
c = conn.cursor()
c.execute('SELECT name, source, datetime(endTime/1000000, \'unixepoch\') FROM moz_downloads;')
print('\n[*] --- Files Downloaded --- ')
for row in c:
print('[+] File: ' + str(row[0]) + ' from source: ' + str(row[1]) + ' at: ' + str(row[2]))
# 解析打印cookies.sqlite文件的内容,输出cookie相关信息
def printCookies(cookiesDB):
try:
conn = sqlite3.connect(cookiesDB)
c = conn.cursor()
c.execute('SELECT host, name, value FROM moz_cookies')
print('\n[*] -- Found Cookies --')
for row in c:
host = str(row[0])
name = str(row[1])
value = str(row[2])
print('[+] Host: ' + host + ', Cookie: ' + name + ', Value: ' + value)
except Exception as e:
if 'encrypted' in str(e):
print('\n[*] Error reading your cookies database.')
print('[*] Upgrade your Python-Sqlite3 Library')
# 解析打印places.sqlite文件的内容,输出历史记录
def printHistory(placesDB):
try:
conn = sqlite3.connect(placesDB)
c = conn.cursor()
c.execute("SELECT url, datetime(visit_date/1000000, 'unixepoch') FROM moz_places, moz_historyvisits WHERE visit_count > 0 AND moz_places.id==moz_historyvisits.place_id;")
print('\n[*] -- Found History --')
for row in c:
url = str(row[0])
date = str(row[1])
print('[+] ' + date + ' - Visited: ' + url)
except Exception as e:
if 'encrypted' in str(e):
print('\n[*] Error reading your places database.')
print('[*] Upgrade your Python-Sqlite3 Library')
exit(0)
# 解析打印places.sqlite文件的内容,输出百度的搜索记录
def printBaidu(placesDB):
conn = sqlite3.connect(placesDB)
c = conn.cursor()
c.execute( "SELECT url, datetime(visit_date/1000000, 'unixepoch') FROM moz_places, moz_historyvisits WHERE visit_count > 0 AND moz_places.id==moz_historyvisits.place_id;")
print('\n[*] -- Found Baidu --')
for row in c:
url = str(row[0])
date = str(row[1])
if 'baidu' in url.lower():
r = re.findall(r'wd=.*?\&', url)
if r:
search = r[0].split('&')[0]
search = search.replace('wd=', '').replace('+', ' ')
print('[+] ' + date + ' - Searched For: ' + search)
def main():
parser = optparse.OptionParser("[*]Usage: firefoxParse.py -p <firefox profile path> ")
#C:\Users\用户名\AppData\Roaming\Mozilla\Firefox\Profiles\e28nsous.default,SQLite缓存的地址
parser.add_option('-p', dest='pathName', type='string', help='specify skype profile path')
(options, args) = parser.parse_args()
pathName = options.pathName
if pathName == None:
print(parser.usage)
exit(0)
elif os.path.isdir(pathName) == False:
print('[!] Path Does Not Exist: ' + pathName)
exit(0)
else:
downloadDB = os.path.join(pathName, 'downloads.sqlite')
if os.path.isfile(downloadDB):
printDownloads(downloadDB)
else:
print('[!] Downloads Db does not exist: ' + downloadDB)
cookiesDB = os.path.join(pathName, 'cookies.sqlite')
if os.path.isfile(cookiesDB):
pass
printCookies(cookiesDB)
else:
print('[!] Cookies Db does not exist:' + cookiesDB)
placesDB = os.path.join(pathName, 'places.sqlite')
if os.path.isfile(placesDB):
printHistory(placesDB)
printBaidu(placesDB)
else:
print('[!] PlacesDb does not exist: ' + placesDB)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
7e39d9636d8d51231c8e255ea73707f11e4c337e
|
56ffce29f0d27f83206e11870d95982c38524aae
|
/apweb/site/view/session_test.py
|
a77c127ed42f7fbac3078f43a773ba651e4786d4
|
[] |
no_license
|
adamandpaul/apweb
|
cce365085e2ee58cfbc31544c5a7414e67ad56b4
|
b1bb81fa7d7b39f19e187462aa3447ff482b46af
|
refs/heads/master
| 2022-10-19T02:09:52.437906 | 2021-05-21T06:10:08 | 2021-05-21T06:10:08 | 201,398,036 | 0 | 3 | null | 2022-09-21T21:39:41 | 2019-08-09T05:41:06 |
Python
|
UTF-8
|
Python
| false | false | 691 |
py
|
# -*- coding:utf-8 -*-
from . import session
from unittest import TestCase
from unittest.mock import MagicMock
from unittest.mock import patch
class TestSessionView(TestCase):
def setUp(self):
self.request = MagicMock()
self.context = MagicMock()
self.view = session.SessionView(self.context, self.request)
@patch("apweb.site.view.session.UserView")
def test_user(self, UserView):
self.assertEqual(self.view.user, UserView.return_value.info_manage)
UserView.assert_called_with(self.request.user, self.request)
def test_info(self):
self.view.__dict__["user"] = "foo"
self.assertEqual(self.view.info["user"], "foo")
|
[
"[email protected]"
] | |
99bc5f810433c2c56027c7cadd2f629bb37f2406
|
7f33d68323240d66e610e5a89efc516915a11a96
|
/manage.py
|
cd6b58f7934e4cd956b6d3cad8298609c08f1d21
|
[
"Apache-2.0"
] |
permissive
|
dbca-wa/observations
|
100df2765ef0f6f62aaf45fc13fbb4af4395f519
|
48b2ad17afa9f0019524cb22a9a0bba74850b87f
|
refs/heads/master
| 2021-05-31T16:29:30.906717 | 2016-04-06T02:42:05 | 2016-04-06T02:42:05 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 297 |
py
|
#!/usr/bin/env python
import os
import sys
import confy
confy.read_environment_file()
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "incredibus.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
[
"[email protected]"
] | |
c538fb5cbdac74431e65498c5bb4964e8dcd47c5
|
63768dc92cde5515a96d774a32facb461a3bf6e9
|
/jacket/compute/cloud/vm_mode.py
|
7ca85fbd53edb92822a9d5b0385735b37fb28c03
|
[
"Apache-2.0"
] |
permissive
|
ljZM33nd/jacket
|
6fe9156f6f5789e5c24425afa7ce9237c302673d
|
d7ad3147fcb43131098c2a5210847634ff5fb325
|
refs/heads/master
| 2023-04-16T11:02:01.153751 | 2016-11-15T02:48:12 | 2016-11-15T02:48:12 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,212 |
py
|
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Possible vm modes for instances.
Compute instance vm modes represent the host/guest ABI used for the
virtual machine / container. Individual hypervisors may support
multiple different vm modes per host. Available vm modes for a hypervisor
driver may also vary according to the architecture it is running on.
The 'vm_mode' parameter can be set against an instance to
choose what sort of VM to boot.
"""
from jacket.compute import exception
HVM = "hvm" # Native ABI (aka fully virtualized)
XEN = "xen" # Xen 3.0 paravirtualized
UML = "uml" # User Mode Linux paravirtualized
EXE = "exe" # Executables in containers
ALL = [HVM, XEN, UML, EXE]
def get_from_instance(instance):
"""Get the vm mode for an instance
:param instance: instance object to query
:returns: canonicalized vm mode for the instance
"""
mode = instance.vm_mode
return canonicalize(mode)
def is_valid(name):
"""Check if a string is a valid vm mode
:param name: vm mode name to validate
:returns: True if @name is valid
"""
return name in ALL
def canonicalize(mode):
"""Canonicalize the vm mode
:param name: vm mode name to canonicalize
:returns: a canonical vm mode name
"""
if mode is None:
return None
mode = mode.lower()
# For compatibility with pre-Folsom deployments
if mode == "pv":
mode = XEN
if mode == "hv":
mode = HVM
if mode == "baremetal":
mode = HVM
if not is_valid(mode):
raise exception.InvalidVirtualMachineMode(vmmode=mode)
return mode
|
[
"[email protected]"
] | |
8beeae688c7148ebe2715f0ca83ccfd8f6ce9996
|
6b9084d234c87d7597f97ec95808e13f599bf9a1
|
/data/tracking/sampler/SiamFC/_deprecated/sampler.py
|
db5571b4db36b29aa180d356235ddcd410d4e57c
|
[] |
no_license
|
LitingLin/ubiquitous-happiness
|
4b46234ce0cb29c4d27b00ec5a60d3eeb52c26fc
|
aae2d764e136ca4a36c054212b361dd7e8b22cba
|
refs/heads/main
| 2023-07-13T19:51:32.227633 | 2021-08-03T16:02:03 | 2021-08-03T16:02:03 | 316,664,903 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,797 |
py
|
import numpy as np
from Dataset.SOT.Storage.MemoryMapped.dataset import SingleObjectTrackingDataset_MemoryMapped
from Dataset.MOT.Storage.MemoryMapped.dataset import MultipleObjectTrackingDataset_MemoryMapped
from Dataset.DET.Storage.MemoryMapped.dataset import DetectionDataset_MemoryMapped
from data.tracking.sampler._sampler.sequence.SiamFC.DET import \
do_sampling_in_detection_dataset_image, get_one_random_sample_in_detection_dataset_image
from data.tracking.sampler._sampler.sequence.SiamFC.SOT import \
do_sampling_in_single_object_tracking_dataset_sequence, \
do_negative_sampling_in_single_object_tracking_dataset_sequence, \
get_one_random_sample_in_single_object_tracking_dataset_sequence
from data.tracking.sampler._sampler.sequence.SiamFC.MOT import \
do_sampling_in_multiple_object_tracking_dataset_sequence, \
do_negative_sampling_in_multiple_object_tracking_dataset_sequence, \
get_one_random_sample_in_multiple_object_tracking_dataset_sequence
from data.tracking.sampler.SiamFC.type import SiamesePairSamplingMethod
class SOTTrackingSiameseIterableDatasetSampler:
def __init__(self, datasets, negative_sample_ratio, enforce_fine_positive_sample, sampling_method: SiamesePairSamplingMethod, datasets_sampling_parameters=None, datasets_sampling_weight=None, data_processor=None):
self.datasets = datasets
self.dataset_lengths = [len(dataset) for dataset in datasets]
self.datasets_sampling_weight = datasets_sampling_weight
self.negative_sample_ratio = negative_sample_ratio
self.enforce_fine_positive_sample = enforce_fine_positive_sample
raise NotImplementedError
self.sampling_method = sampling_method
self.data_processor = data_processor
self.datasets_sampling_parameters = datasets_sampling_parameters
self.current_index_of_dataset = None
self.current_index_of_sequence = None
self.current_is_sampling_positive_sample = None
def move_next(self, rng_engine: np.random.Generator):
index_of_dataset = rng_engine.choice(np.arange(len(self.datasets)), p=self.datasets_sampling_weight)
if self.negative_sample_ratio == 0:
is_negative = False
else:
is_negative = rng_engine.random() < self.negative_sample_ratio
index_of_sequence = rng_engine.integers(0, self.dataset_lengths[index_of_dataset])
self.current_index_of_dataset = index_of_dataset
self.current_is_sampling_positive_sample = not is_negative
self.current_index_of_sequence = index_of_sequence
def _pick_random_object_as_negative_sample(self, rng_engine: np.random.Generator):
index_of_dataset = rng_engine.choice(np.arange(len(self.datasets)), p=self.datasets_sampling_weight)
dataset = self.datasets[index_of_dataset]
index_of_sequence = rng_engine.integers(0, len(dataset))
sequence = dataset[index_of_sequence]
if isinstance(dataset, DetectionDataset_MemoryMapped):
data = get_one_random_sample_in_detection_dataset_image(sequence, rng_engine)
elif isinstance(dataset, SingleObjectTrackingDataset_MemoryMapped):
data = get_one_random_sample_in_single_object_tracking_dataset_sequence(sequence, rng_engine)
elif isinstance(dataset, MultipleObjectTrackingDataset_MemoryMapped):
data = get_one_random_sample_in_multiple_object_tracking_dataset_sequence(sequence, rng_engine)
else:
raise NotImplementedError
return data
def do_sampling(self, rng_engine: np.random.Generator):
dataset = self.datasets[self.current_index_of_dataset]
sequence = dataset[self.current_index_of_sequence]
frame_range = 100
if self.datasets_sampling_parameters is not None:
sampling_parameter = self.datasets_sampling_parameters[self.current_index_of_dataset]
if 'frame_range' in sampling_parameter:
frame_range = sampling_parameter['frame_range']
if isinstance(dataset, (SingleObjectTrackingDataset_MemoryMapped, MultipleObjectTrackingDataset_MemoryMapped)):
if sequence.has_fps():
fps = sequence.get_fps()
frame_range = int(round(fps / 30 * frame_range))
if self.current_is_sampling_positive_sample:
if isinstance(dataset, DetectionDataset_MemoryMapped):
z_image, z_bbox = do_sampling_in_detection_dataset_image(sequence, rng_engine)
data = (z_image, z_bbox, z_image, z_bbox, True)
elif isinstance(dataset, (SingleObjectTrackingDataset_MemoryMapped, MultipleObjectTrackingDataset_MemoryMapped)):
if isinstance(dataset, SingleObjectTrackingDataset_MemoryMapped):
sampled_data, is_positive = do_sampling_in_single_object_tracking_dataset_sequence(sequence, frame_range, rng_engine)
else:
sampled_data, is_positive = do_sampling_in_multiple_object_tracking_dataset_sequence(sequence, frame_range, rng_engine)
if is_positive == 0:
data = (sampled_data[0][0], sampled_data[0][1], sampled_data[0][0], sampled_data[0][1], True)
else:
data = (sampled_data[0][0], sampled_data[0][1], sampled_data[1][0], sampled_data[1][1], is_positive == 1)
else:
raise NotImplementedError
else:
if isinstance(dataset, DetectionDataset_MemoryMapped):
z_image, z_bbox = do_sampling_in_detection_dataset_image(sequence, rng_engine)
x_image, x_bbox = self._pick_random_object_as_negative_sample(rng_engine)
data = (z_image, z_bbox, x_image, x_bbox, False)
elif isinstance(dataset, (SingleObjectTrackingDataset_MemoryMapped, MultipleObjectTrackingDataset_MemoryMapped)):
if isinstance(dataset, SingleObjectTrackingDataset_MemoryMapped):
sampled_data = do_negative_sampling_in_single_object_tracking_dataset_sequence(sequence, frame_range, rng_engine)
else:
sampled_data = do_negative_sampling_in_multiple_object_tracking_dataset_sequence(sequence, frame_range, rng_engine)
if len(sampled_data) == 1:
x_image, x_bbox = self._pick_random_object_as_negative_sample(rng_engine)
data = (sampled_data[0][0], sampled_data[0][1], x_image, x_bbox, False)
else:
data = (sampled_data[0][0], sampled_data[0][1], sampled_data[1][0], sampled_data[1][1], False)
else:
raise NotImplementedError
if self.data_processor is not None:
data = self.data_processor(*data)
return data
|
[
"[email protected]"
] | |
bc0c564fc708099ee3a1ee9245efc66093f51371
|
52cb25dca22292fce4d3907cc370098d7a57fcc2
|
/BAEKJOON/스택/1874_스택 수열.py
|
cd390374565cc30f00a17f883e2ac40791b3a1f1
|
[] |
no_license
|
shjang1013/Algorithm
|
c4fc4c52cbbd3b7ecf063c716f600d1dbfc40d1a
|
33f2caa6339afc6fc53ea872691145effbce0309
|
refs/heads/master
| 2022-09-16T12:02:53.146884 | 2022-08-31T16:29:04 | 2022-08-31T16:29:04 | 227,843,135 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 564 |
py
|
# 1부터 n까지의 수를 스택에 넣었다가 뽑아 늘어놓음으로써, 하나의 수열을 만들 수 있다.
# [1,2,3,4,5,6,7,8] => [4,3,6,8,7,5,2,1]
import sys
N = int(input())
stack = []
op = []
count = 1
temp = True
for i in range(N):
n = int(sys.stdin.readline())
while count <= n:
stack.append(count)
op.append("+")
count += 1
if stack[-1] == n:
stack.pop()
op.append("-")
else:
temp = False
break
if temp == False:
print("NO")
else:
print('\n'.join(op))
|
[
"[email protected]"
] | |
bc7d8ecba2ea3d08d1b0d03ab497311104f63738
|
ff93e108a358a40d71b426bb9615587dfcab4d03
|
/Python_Basic/9_Class/class_basics_1.py
|
626a48139eb7a1d686767ec3a31ac348d0fbd5a3
|
[] |
no_license
|
soumya9988/Python_Machine_Learning_Basics
|
074ff0e8e55fd925ca50e0f9b56dba76fc93d187
|
3711bc8e618123420985d01304e13051d9fb13e0
|
refs/heads/master
| 2020-03-31T14:31:49.217429 | 2019-11-16T21:55:54 | 2019-11-16T21:55:54 | 152,298,905 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,299 |
py
|
class Menu:
def __init__(self, name, items, start_time, end_time):
self.name = name
self.items = items
self.start_time = start_time
self.end_time = end_time
def __repr__(self):
return '{} menu available from {} to {}'.format(self.name,
self.start_time,
self.end_time)
def calculate_bill(self, purchased_items):
sum_of_items = 0
for item in purchased_items:
if item in self.items:
sum_of_items += self.items[item]
return sum_of_items
class Franchise:
def __init__(self, address, menus):
self.address = address
self.menus = menus
def __repr__(self):
return self.address
def available_menus(self, time):
available = []
for menu in self.menus:
if menu.start_time <= time and \
menu.end_time >= time:
available.append(menu.name)
return available
class Business:
def __init__(self, name, franchises):
self.name = name
self.franchises = franchises
items = {'pancakes': 7.50,
'waffles': 9.00,
'burger': 11.00,
'home fries': 4.50,
'coffee': 1.50,
'espresso': 3.00,
'tea': 1.00,
'mimosa': 10.50,
'orange juice': 3.50}
eb_items = {'salumeria plate': 8.00,
'salad and breadsticks (serves 2, no refills)': 14.00,
'pizza with quattro formaggi': 9.00,
'duck ragu': 17.50,
'mushroom ravioli (vegan)': 13.50,
'coffee': 1.50,
'espresso': 3.00,
}
d_items = {'crostini with eggplant caponata': 13.00,
'ceaser salad': 16.00,
'pizza with quattro formaggi': 11.00,
'duck ragu': 19.50,
'mushroom ravioli (vegan)': 13.50,
'coffee': 2.00,
'espresso': 3.00,
}
k_items = {'chicken nuggets': 6.50,
'fusilli with wild mushrooms': 12.00,
'apple juice': 3.00
}
brunch = Menu('brunch', items, 11.00, 16.00)
early_bird = Menu('early_bird', eb_items, 15.00, 18.00)
dinner = Menu('dinner', d_items, 17.00, 23.00)
kids = Menu('kids', k_items, 11.00, 21.00)
print(brunch)
print(early_bird)
print(dinner)
print(kids)
purchased = ['pancakes', 'home fries', 'coffee']
cost = brunch.calculate_bill(purchased)
print('Cost of brunch purchased: ', cost)
cost_eb = early_bird.calculate_bill(['mushroom ravioli (vegan)', 'salumeria plate'])
print('Cost of early bird purchased: ', cost_eb)
flagship_store = Franchise("1232 West End Road", [brunch, dinner, kids, early_bird])
new_installment = Franchise("12 East Mulberry Street", [brunch, dinner, kids, early_bird])
print('You can choose from the following menus at 12 pm: ', new_installment.available_menus(12.00))
print('You can choose from the following menus at 5 pm: ', new_installment.available_menus(17.00))
arepas_menu = {'arepa pabellon': 7.00, 'pernil arepa': 8.50, 'guayanes arepa': 8.00, 'jamon arepa': 7.50}
arepas_place = Franchise("189 Fitzgerald Avenue", arepas_menu)
arepas_business = Business("Take a' Arepa", arepas_place)
print(arepas_place)
|
[
"[email protected]"
] | |
9efe909c265f82499d2be6a904c8fd902fed2bcb
|
19236d9e966cf5bafbe5479d613a175211e1dd37
|
/cohesity_management_sdk/models/google_cloud_credentials.py
|
7e3886d7a27110ac94aca21b0b5ecde8f814ff97
|
[
"MIT"
] |
permissive
|
hemanshu-cohesity/management-sdk-python
|
236c44fbd9604809027f8ddd0ae6c36e4e727615
|
07c5adee58810979780679065250d82b4b2cdaab
|
refs/heads/master
| 2020-04-29T23:22:08.909550 | 2019-04-10T02:42:16 | 2019-04-10T02:42:16 | 176,474,523 | 0 | 0 |
NOASSERTION
| 2019-03-19T09:27:14 | 2019-03-19T09:27:12 | null |
UTF-8
|
Python
| false | false | 3,125 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2019 Cohesity Inc.
class GoogleCloudCredentials(object):
"""Implementation of the 'Google Cloud Credentials.' model.
Specifies the cloud credentials to connect to a Google service account.
Attributes:
client_email_address (string): Specifies the client email address used
to access Google Cloud Storage.
client_private_key (string): Specifies the private key used to access
Google Cloud Storage that is generated when the service account is
created.
project_id (string): Specifies the project id of an existing Google
Cloud project to store objects.
tier_type (TierType2Enum): Specifies the storage class of GCP.
GoogleTierType specifies the storage class for Google.
'kGoogleStandard' indicates a tier type of Google properties.
'kGoogleNearline' indicates a tier type of Google properties that
is not accessed frequently. 'kGoogleColdline' indicates a tier
type of Google properties that is rarely accessed.
'kGoogleRegional' indicates a tier type of Google properties that
stores frequently accessed data in the same region.
'kGoogleMultiRegional' indicates a tier type of Google properties
that is frequently accessed ("hot" objects) around the world.
"""
# Create a mapping from Model property names to API property names
_names = {
"client_email_address":'clientEmailAddress',
"client_private_key":'clientPrivateKey',
"project_id":'projectId',
"tier_type":'tierType'
}
def __init__(self,
client_email_address=None,
client_private_key=None,
project_id=None,
tier_type=None):
"""Constructor for the GoogleCloudCredentials class"""
# Initialize members of the class
self.client_email_address = client_email_address
self.client_private_key = client_private_key
self.project_id = project_id
self.tier_type = tier_type
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
client_email_address = dictionary.get('clientEmailAddress')
client_private_key = dictionary.get('clientPrivateKey')
project_id = dictionary.get('projectId')
tier_type = dictionary.get('tierType')
# Return an object of this model
return cls(client_email_address,
client_private_key,
project_id,
tier_type)
|
[
"[email protected]"
] | |
7932c0ccbe52f6dff8961451ec9518ed9b1d0ba0
|
c0d5b7f8e48a26c6ddc63c76c43ab5b397c00028
|
/piccolo/apps/user/piccolo_app.py
|
c01ee635f12d335e6a45650fda81dbfe9fab4925
|
[
"MIT"
] |
permissive
|
aminalaee/piccolo
|
f6c5e5e1c128568f7ccb9ad1dfb4746acedae262
|
af8d2d45294dcd84f4f9b6028752aa45b699ec15
|
refs/heads/master
| 2023-07-14T09:44:04.160116 | 2021-07-11T22:56:27 | 2021-07-11T22:56:27 | 386,398,401 | 0 | 0 |
MIT
| 2021-07-15T19:32:50 | 2021-07-15T19:08:17 | null |
UTF-8
|
Python
| false | false | 729 |
py
|
import os
from piccolo.conf.apps import AppConfig, Command
from .commands.change_password import change_password
from .commands.change_permissions import change_permissions
from .commands.create import create
from .tables import BaseUser
CURRENT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
APP_CONFIG = AppConfig(
app_name="user",
migrations_folder_path=os.path.join(
CURRENT_DIRECTORY, "piccolo_migrations"
),
table_classes=[BaseUser],
migration_dependencies=[],
commands=[
Command(callable=create, aliases=["new"]),
Command(callable=change_password, aliases=["password", "pass"]),
Command(callable=change_permissions, aliases=["perm", "perms"]),
],
)
|
[
"[email protected]"
] | |
db2cdc19635349844c5e850f4b577b0118d4ae0e
|
29a4c1e436bc90deaaf7711e468154597fc379b7
|
/modules/trigonometric/doc/fast_sind.py
|
46fee4678b0eeccbe7f58327e5d8b321d06b825f
|
[
"BSL-1.0"
] |
permissive
|
brycelelbach/nt2
|
31bdde2338ebcaa24bb76f542bd0778a620f8e7c
|
73d7e8dd390fa4c8d251c6451acdae65def70e0b
|
refs/heads/master
| 2021-01-17T12:41:35.021457 | 2011-04-03T17:37:15 | 2011-04-03T17:37:15 | 1,263,345 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,873 |
py
|
[ ## this file was manually modified by jt
{
'functor' : {
'arity' : '1',
'call_types' : [],
'ret_arity' : '0',
'rturn' : {
'default' : 'typename boost::result_of<nt2::meta::floating(T)>::type',
},
'simd_types' : ['real_convert_'],
'special' : ['trigonometric'],
'type_defs' : [],
'types' : ['real_', 'unsigned_int_', 'signed_int_'],
},
'info' : 'manually modified',
'unit' : {
'global_header' : {
'first_stamp' : 'created by jt the 11/02/2011',
'included' : ['#include <nt2/toolbox/trigonometric/include/constants.hpp>', '#include <nt2/include/functions/sind.hpp>'],
'notes' : [],
'stamp' : 'modified by jt the 11/02/2011',
},
'ranges' : {
'default' : [['T(-45)', 'T(45)']],
'unsigned_int_' : [['0', 'T(45)']],
},
'specific_values' : {
'default' : {
'nt2::Zero<T>()' : {'result' : 'nt2::Zero<r_t>()','ulp_thresh' : '0.5',},
'nt2::_45<T>()' : {'result' : 'nt2::Sqrt_2o_2<r_t>()','ulp_thresh' : '0.5',},
},
'real_' : {
'-nt2::_180<T>()' : {'result' : 'nt2::Nan<r_t>()','ulp_thresh' : '0.5',},
'-nt2::_45<T>()' : {'result' : '-nt2::Sqrt_2o_2<r_t>()','ulp_thresh' : '0.5',},
'-nt2::_90<T>()' : {'result' : 'nt2::Nan<r_t>()','ulp_thresh' : '0.5',},
'nt2::Inf<T>()' : {'result' : 'nt2::Nan<r_t>()','ulp_thresh' : '0.5',},
'nt2::Minf<T>()' : {'result' : 'nt2::Nan<r_t>()','ulp_thresh' : '0.5',},
'nt2::Nan<T>()' : {'result' : 'nt2::Nan<r_t>()','ulp_thresh' : '0.5',},
'nt2::Zero<T>()' : {'result' : 'nt2::Zero<r_t>()','ulp_thresh' : '0.5',},
'nt2::_180<T>()' : {'result' : 'nt2::Nan<r_t>()','ulp_thresh' : '0.5',},
'nt2::_45<T>()' : {'result' : 'nt2::Sqrt_2o_2<r_t>()','ulp_thresh' : '0.5',},
'nt2::_90<T>()' : {'result' : 'nt2::Nan<r_t>()','ulp_thresh' : '0.5',},
},
'signed_int_' : {
'-nt2::_45<T>()' : {'result' : '-nt2::Sqrt_2o_2<r_t>()','ulp_thresh' : '0.5',},
'nt2::Zero<T>()' : {'result' : 'nt2::Zero<r_t>()','ulp_thresh' : '0.5',},
'nt2::_45<T>()' : {'result' : 'nt2::Sqrt_2o_2<r_t>()','ulp_thresh' : '0.5',},
},
},
'verif_test' : {
'property_call' : {
'real_' : ['nt2::fast_sind(a0)'],
},
'property_value' : {
'real_' : ['nt2::sind(a0)'],
},
'ulp_thresh' : {
'real_' : ['1.0'],
},
},
},
},
]
|
[
"[email protected]"
] | |
a937f5d7fc87c0d7d50c3d34d25169594f08b310
|
3ef70fe63acaa665e2b163f30f1abd0a592231c1
|
/stackoverflow/venv/lib/python3.6/site-packages/twisted/protocols/haproxy/_v1parser.py
|
b17099f3cc388868573fb479110f29e78e8bce65
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
wistbean/learn_python3_spider
|
14914b63691ac032955ba1adc29ad64976d80e15
|
40861791ec4ed3bbd14b07875af25cc740f76920
|
refs/heads/master
| 2023-08-16T05:42:27.208302 | 2023-03-30T17:03:58 | 2023-03-30T17:03:58 | 179,152,420 | 14,403 | 3,556 |
MIT
| 2022-05-20T14:08:34 | 2019-04-02T20:19:54 |
Python
|
UTF-8
|
Python
| false | false | 4,326 |
py
|
# -*- test-case-name: twisted.protocols.haproxy.test.test_v1parser -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
IProxyParser implementation for version one of the PROXY protocol.
"""
from zope.interface import implementer
from twisted.internet import address
from ._exceptions import (
convertError, InvalidProxyHeader, InvalidNetworkProtocol,
MissingAddressData
)
from . import _info
from . import _interfaces
@implementer(_interfaces.IProxyParser)
class V1Parser(object):
"""
PROXY protocol version one header parser.
Version one of the PROXY protocol is a human readable format represented
by a single, newline delimited binary string that contains all of the
relevant source and destination data.
"""
PROXYSTR = b'PROXY'
UNKNOWN_PROTO = b'UNKNOWN'
TCP4_PROTO = b'TCP4'
TCP6_PROTO = b'TCP6'
ALLOWED_NET_PROTOS = (
TCP4_PROTO,
TCP6_PROTO,
UNKNOWN_PROTO,
)
NEWLINE = b'\r\n'
def __init__(self):
self.buffer = b''
def feed(self, data):
"""
Consume a chunk of data and attempt to parse it.
@param data: A bytestring.
@type data: L{bytes}
@return: A two-tuple containing, in order, a
L{_interfaces.IProxyInfo} and any bytes fed to the
parser that followed the end of the header. Both of these values
are None until a complete header is parsed.
@raises InvalidProxyHeader: If the bytes fed to the parser create an
invalid PROXY header.
"""
self.buffer += data
if len(self.buffer) > 107 and self.NEWLINE not in self.buffer:
raise InvalidProxyHeader()
lines = (self.buffer).split(self.NEWLINE, 1)
if not len(lines) > 1:
return (None, None)
self.buffer = b''
remaining = lines.pop()
header = lines.pop()
info = self.parse(header)
return (info, remaining)
@classmethod
def parse(cls, line):
"""
Parse a bytestring as a full PROXY protocol header line.
@param line: A bytestring that represents a valid HAProxy PROXY
protocol header line.
@type line: bytes
@return: A L{_interfaces.IProxyInfo} containing the parsed data.
@raises InvalidProxyHeader: If the bytestring does not represent a
valid PROXY header.
@raises InvalidNetworkProtocol: When no protocol can be parsed or is
not one of the allowed values.
@raises MissingAddressData: When the protocol is TCP* but the header
does not contain a complete set of addresses and ports.
"""
originalLine = line
proxyStr = None
networkProtocol = None
sourceAddr = None
sourcePort = None
destAddr = None
destPort = None
with convertError(ValueError, InvalidProxyHeader):
proxyStr, line = line.split(b' ', 1)
if proxyStr != cls.PROXYSTR:
raise InvalidProxyHeader()
with convertError(ValueError, InvalidNetworkProtocol):
networkProtocol, line = line.split(b' ', 1)
if networkProtocol not in cls.ALLOWED_NET_PROTOS:
raise InvalidNetworkProtocol()
if networkProtocol == cls.UNKNOWN_PROTO:
return _info.ProxyInfo(originalLine, None, None)
with convertError(ValueError, MissingAddressData):
sourceAddr, line = line.split(b' ', 1)
with convertError(ValueError, MissingAddressData):
destAddr, line = line.split(b' ', 1)
with convertError(ValueError, MissingAddressData):
sourcePort, line = line.split(b' ', 1)
with convertError(ValueError, MissingAddressData):
destPort = line.split(b' ')[0]
if networkProtocol == cls.TCP4_PROTO:
return _info.ProxyInfo(
originalLine,
address.IPv4Address('TCP', sourceAddr, int(sourcePort)),
address.IPv4Address('TCP', destAddr, int(destPort)),
)
return _info.ProxyInfo(
originalLine,
address.IPv6Address('TCP', sourceAddr, int(sourcePort)),
address.IPv6Address('TCP', destAddr, int(destPort)),
)
|
[
"[email protected]"
] | |
60bd09afebd2a97319aa608a6ee44a7fd37b29a0
|
53bd30eee243a73bf19921739454a177a8bab127
|
/excapp/migrations/0002_datahistory.py
|
31da2844c89998a546d9f612985ee6253eeef234
|
[] |
no_license
|
kirigaikabuto/bck
|
61697fbe2edd7e4f5b866628a368693a05f6dad9
|
2b17f8c5d438248d73aaf9dbebd3d5dea827a42d
|
refs/heads/master
| 2021-02-04T01:20:06.470527 | 2021-01-04T15:24:13 | 2021-01-04T15:24:13 | 243,593,240 | 0 | 0 | null | 2020-06-06T01:30:15 | 2020-02-27T18:55:15 |
Python
|
UTF-8
|
Python
| false | false | 745 |
py
|
# Generated by Django 2.2.10 on 2020-11-24 18:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('excapp', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='DataHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('child', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='child', to='excapp.Data')),
('parent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parent', to='excapp.Data')),
],
),
]
|
[
"[email protected]"
] | |
f9b7ee977762ae5cc9961537a33f3dde790fefac
|
d24a6e0be809ae3af8bc8daa6dacfc1789d38a84
|
/ABC/ABC251-300/ABC261/B.py
|
82b39fd3459c8526f81da79d543af2d50a7983b2
|
[] |
no_license
|
k-harada/AtCoder
|
5d8004ce41c5fc6ad6ef90480ef847eaddeea179
|
02b0a6c92a05c6858b87cb22623ce877c1039f8f
|
refs/heads/master
| 2023-08-21T18:55:53.644331 | 2023-08-05T14:21:25 | 2023-08-05T14:21:25 | 184,904,794 | 9 | 0 | null | 2023-05-22T16:29:18 | 2019-05-04T14:24:18 |
Python
|
UTF-8
|
Python
| false | false | 547 |
py
|
def solve(n, a):
for i in range(n - 1):
for j in range(i + 1, n):
ij = a[i][j]
ji = a[j][i]
if ij == ji == "D":
continue
elif ij == "W" and ji == "L":
continue
elif ij == "L" and ji == "W":
continue
else:
return "incorrect"
return "correct"
def main():
n = int(input())
a = [list(input()) for _ in range(n)]
res = solve(n, a)
print(res)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
b19e296e14053d689e9995c0c3db2c31aae5ef6f
|
1c3fb3c990bd07259c1701c709a28ec45cd0c748
|
/services/core-api/app/api/exports/response_models.py
|
487ed88be6d03eb97e804a09afb57dc551a1dd8e
|
[
"Apache-2.0"
] |
permissive
|
usingtechnology/mds
|
f973106232f73f773bb4bb57737094dd32b1bd3c
|
c9c542f729df21511ee46e184ea752bad0b7d10c
|
refs/heads/master
| 2022-04-13T07:56:59.060216 | 2020-03-21T22:43:05 | 2020-03-21T22:43:05 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,537 |
py
|
from app.extensions import api
from flask_restplus import fields
from app.api.mines.response_models import MINE_TENURE_TYPE_CODE_MODEL, MINE_COMMODITY_CODE_MODEL, MINE_DISTURBANCE_CODE_MODEL, MINE_STATUS_CODE_MODEL, MINE_REGION_OPTION, MINE_REPORT_DEFINITION_CATEGORIES, MINE_REPORT_DEFINITION_MODEL, MINE_REPORT_SUBMISSION_STATUS
from app.api.mines.permits.response_models import PERMIT_STATUS_CODE_MODEL
from app.api.compliance.response_models import COMPLIANCE_ARTICLE_MODEL
from app.api.incidents.response_models import MINE_INCIDENT_CATEGORY_MODEL, MINE_INCIDENT_DETERMINATION_TYPE_MODEL, MINE_INCIDENT_STATUS_CODE_MODEL, MINE_INCIDENT_DOCUMENT_TYPE_CODE_MODEL, MINE_INCIDENT_FOLLOWUP_INVESTIGATION_TYPE_MODEL
from app.api.parties.response_models import MINE_PARTY_APPT_TYPE_MODEL, SUB_DIVISION_CODE_MODEL
from app.api.variances.response_models import VARIANCE_APPLICATION_STATUS_CODE, VARIANCE_DOCUMENT_CATEGORY_CODE
from app.api.now_applications.response_models import NOW_APPLICATION_DOCUMENT_TYPE_MODEL, NOW_APPLICATION_REVIEW_TYPES, NOW_APPLICATION_TYPES, UNIT_TYPES, NOW_ACTIVITY_TYPES, NOW_APPLICATION_STATUS_CODES, UNDERGROUND_EXPLORATION_TYPES, NOW_APPLICATION_PERMIT_TYPES, NOW_APPLICATION_REVIEW_TYPES, APPLICATION_PROGRESS_STATUS_CODES
STATIC_CONTENT_MODEL = api.model(
'StaticContentModel', {
'mineDisturbanceOptions':
fields.List(fields.Nested(MINE_DISTURBANCE_CODE_MODEL), attribute='MineDisturbanceCode'),
'mineCommodityOptions':
fields.List(fields.Nested(MINE_COMMODITY_CODE_MODEL), attribute='MineCommodityCode'),
'mineStatusOptions':
fields.List(fields.Nested(MINE_STATUS_CODE_MODEL), attribute='MineStatusXref'),
'mineRegionOptions':
fields.List(fields.Nested(MINE_REGION_OPTION), attribute='MineRegionCode'),
'mineTenureTypes':
fields.List(fields.Nested(MINE_TENURE_TYPE_CODE_MODEL), attribute='MineTenureTypeCode'),
'permitStatusCodes':
fields.List(fields.Nested(PERMIT_STATUS_CODE_MODEL), attribute='PermitStatusCode'),
'incidentDocumentTypeOptions':
fields.List(
fields.Nested(MINE_INCIDENT_DOCUMENT_TYPE_CODE_MODEL),
attribute='MineIncidentDocumentTypeCode'),
'incidentFollowupActionOptions':
fields.List(
fields.Nested(MINE_INCIDENT_FOLLOWUP_INVESTIGATION_TYPE_MODEL),
attribute='MineIncidentFollowupInvestigationType'),
'incidentDeterminationOptions':
fields.List(
fields.Nested(MINE_INCIDENT_DETERMINATION_TYPE_MODEL),
attribute='MineIncidentDeterminationType'),
'incidentStatusCodeOptions':
fields.List(
fields.Nested(MINE_INCIDENT_STATUS_CODE_MODEL), attribute='MineIncidentStatusCode'),
'incidentCategoryCodeOptions':
fields.List(fields.Nested(MINE_INCIDENT_CATEGORY_MODEL), attribute='MineIncidentCategory'),
'provinceOptions':
fields.List(fields.Nested(SUB_DIVISION_CODE_MODEL), attribute='SubDivisionCode'),
'complianceCodes':
fields.List(fields.Nested(COMPLIANCE_ARTICLE_MODEL), attribute='ComplianceArticle'),
'varianceStatusOptions':
fields.List(
fields.Nested(VARIANCE_APPLICATION_STATUS_CODE),
attribute='VarianceApplicationStatusCode'),
'varianceDocumentCategoryOptions':
fields.List(
fields.Nested(VARIANCE_DOCUMENT_CATEGORY_CODE),
attribute='VarianceDocumentCategoryCode'),
'mineReportDefinitionOptions':
fields.List(fields.Nested(MINE_REPORT_DEFINITION_MODEL), attribute='MineReportDefinition'),
'mineReportStatusOptions':
fields.List(
fields.Nested(MINE_REPORT_SUBMISSION_STATUS),
attribute='MineReportSubmissionStatusCode'),
'mineReportCategoryOptions':
fields.List(
fields.Nested(MINE_REPORT_DEFINITION_CATEGORIES), attribute='MineReportCategory'),
'noticeOfWorkActivityTypeOptions':
fields.List(fields.Nested(NOW_ACTIVITY_TYPES), attribute='ActivityType'),
'noticeOfWorkUnitTypeOptions':
fields.List(fields.Nested(UNIT_TYPES), attribute='UnitType'),
'noticeOfWorkApplicationTypeOptions':
fields.List(fields.Nested(NOW_APPLICATION_TYPES), attribute='NOWApplicationType'),
'noticeOfWorkApplicationStatusOptions':
fields.List(fields.Nested(NOW_APPLICATION_STATUS_CODES), attribute='NOWApplicationStatus'),
'noticeOfWorkApplicationDocumentTypeOptions':
fields.List(
fields.Nested(NOW_APPLICATION_DOCUMENT_TYPE_MODEL),
attribute='NOWApplicationDocumentType'),
'noticeOfWorkUndergroundExplorationTypeOptions':
fields.List(
fields.Nested(UNDERGROUND_EXPLORATION_TYPES), attribute='UndergroundExplorationType'),
'noticeOfWorkApplicationProgressStatusCodeOptions':
fields.List(
fields.Nested(APPLICATION_PROGRESS_STATUS_CODES),
attribute='NOWApplicationProgressStatus'),
'noticeOfWorkApplicationPermitTypeOptions':
fields.List(
fields.Nested(NOW_APPLICATION_PERMIT_TYPES), attribute='NOWApplicationPermitType'),
'noticeOfWorkApplicationReviewOptions':
fields.List(
fields.Nested(NOW_APPLICATION_REVIEW_TYPES), attribute='NOWApplicationReviewType'),
'partyRelationshipTypes':
fields.List(
fields.Nested(MINE_PARTY_APPT_TYPE_MODEL), attribute='MinePartyAppointmentType')
})
|
[
"[email protected]"
] | |
3c46e5cd8e2c42fe17964f81fddb273c2e6424fc
|
debc9ddbb577ed68e907cfdb85e0f2c801fdc8af
|
/rx/linq/observable/onerrorresumenext.py
|
490579fe1b84b19f1004b2c54d19d968ec1a33e2
|
[
"Apache-2.0"
] |
permissive
|
pstiasny/RxPY
|
370eefc733de1241c1eac0dcdf8fa10780f71072
|
2bcf25ecbce1fbcd49d119bd73375572fbf9df5a
|
refs/heads/master
| 2021-01-09T20:01:18.664034 | 2014-07-27T22:19:39 | 2014-07-27T22:19:39 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,487 |
py
|
from six import add_metaclass
from rx.observable import Observable
from rx.anonymousobservable import AnonymousObservable
from rx.disposables import CompositeDisposable, SingleAssignmentDisposable, \
SerialDisposable
from rx.concurrency import immediate_scheduler
from rx.internal import ExtensionMethod
@add_metaclass(ExtensionMethod)
class ObservableOnErrorResumeNext(Observable):
def __init__(self, subscribe):
self.on_error_resume_next = self.__on_error_resume_next
def __on_error_resume_next(self, second):
"""Continues an observable sequence that is terminated normally or by
an exception with the next observable sequence.
Keyword arguments:
second -- Second observable sequence used to produce results after the first sequence terminates.
Returns an observable sequence that concatenates the first and second sequence, even if the first sequence terminates exceptionally.
"""
if not second:
raise Exception('Second observable is required')
return Observable.on_error_resume_next([self, second])
@classmethod
def on_error_resume_next(cls, *args):
"""Continues an observable sequence that is terminated normally or by
an exception with the next observable sequence.
1 - res = Observable.on_error_resume_next(xs, ys, zs)
2 - res = Observable.on_error_resume_next([xs, ys, zs])
Returns an observable sequence that concatenates the source sequences,
even if a sequence terminates exceptionally.
"""
if args and isinstance(args[0], list):
sources = args[0]
else:
sources = list(args)
def subscribe(observer):
subscription = SerialDisposable()
pos = [0]
def action(this, state=None):
if pos[0] < len(sources):
current = sources[pos[0]]
pos[0] += 1
d = SingleAssignmentDisposable()
subscription.disposable = d
d.disposable = current.subscribe(observer.on_next, lambda ex: this(), lambda: this())
else:
observer.on_completed()
cancelable = immediate_scheduler.schedule_recursive(action)
return CompositeDisposable(subscription, cancelable)
return AnonymousObservable(subscribe)
|
[
"[email protected]"
] | |
b6ebbb47ce8ed3feb705ac92c37cae8fce6f828d
|
842184bc3c73bef3dd5c2ab523eb33f34b7809ea
|
/ledger_processor/test_ledger_processor.py
|
0f1549e715a92ae4a28e218d752a9cddb2608f8c
|
[] |
no_license
|
fawkesley/h-work-simulation
|
1cb51515fcb57d1f12c13178b049c4e7f8d1702d
|
3f150d773a73a2dc2646e7b9c102f298e26cb936
|
refs/heads/master
| 2021-05-29T22:12:30.148431 | 2015-06-12T16:53:32 | 2015-06-13T07:51:07 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,060 |
py
|
import datetime
from decimal import Decimal
from os.path import dirname, join as pjoin
from nose.tools import assert_equal, assert_raises
from .ledger_processor import LedgerProcessor
EXAMPLE_LEDGER_FILENAME = pjoin(dirname(__file__), 'example_ledger.csv')
TEST_CASES = [
('john', datetime.date(2015, 1, 16), Decimal('0.00')),
('mary', datetime.date(2015, 1, 16), Decimal('0.00')),
('supermarket', datetime.date(2015, 1, 16), Decimal('0.00')),
('insurance', datetime.date(2015, 1, 16), Decimal('0.00')),
('mary', datetime.date(2015, 1, 17), Decimal('125.00')),
('john', datetime.date(2015, 1, 17), Decimal('-125.00')),
('john', datetime.date(2015, 1, 18), Decimal('-145.00')),
('supermarket', datetime.date(2015, 1, 18), Decimal('20.00')),
('mary', datetime.date(2015, 1, 18), Decimal('25.00')),
('insurance', datetime.date(2015, 1, 18), Decimal('100.00')),
]
def test_get_balance():
for account, test_date, expected_balance in TEST_CASES:
yield _assert_balance_equal, account, test_date, expected_balance
def _assert_balance_equal(account, test_date, expected_balance):
with open(EXAMPLE_LEDGER_FILENAME, 'r') as f:
ledger = LedgerProcessor(f)
got_balance = ledger.get_balance(account, test_date)
assert_equal(expected_balance, got_balance)
def test_get_all_balances():
with open(EXAMPLE_LEDGER_FILENAME, 'r') as f:
ledger = LedgerProcessor(f)
final_balances = ledger.get_all_balances(datetime.date(2015, 1, 18))
expected_final_balances = {
'john': Decimal('-145.00'),
'mary': Decimal('25.00'),
'supermarket': Decimal('20.00'),
'insurance': Decimal('100.00'),
}
assert_equal(expected_final_balances, final_balances)
def test_ledger_cant_be_used_twice():
with open(EXAMPLE_LEDGER_FILENAME, 'r') as f:
ledger = LedgerProcessor(f)
def use_ledger():
ledger.get_all_balances(datetime.date(2015, 1, 18))
use_ledger()
assert_raises(RuntimeError, use_ledger)
|
[
"[email protected]"
] | |
4b8524cc460faabc41efc6e9ca0584712bb5bfd6
|
ab69c2e3e4ec895fc533a4d37768aab517f86722
|
/tests/structures/test_comparisons.py
|
995b3acc05d605970a8217e4e74851e623881818
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
pranavmodx/batavia
|
9cf7d7528cb88b16d5b33b64481281b60e84cbec
|
084d78eb553f21c787009e1141638e810fcc654f
|
refs/heads/master
| 2020-08-07T19:08:36.105839 | 2019-10-08T06:32:23 | 2019-10-08T06:32:23 | 213,560,529 | 1 | 0 |
NOASSERTION
| 2019-10-08T06:01:52 | 2019-10-08T06:01:50 | null |
UTF-8
|
Python
| false | false | 5,319 |
py
|
from ..utils import TranspileTestCase
class ComparisonTests(TranspileTestCase):
def test_is(self):
self.assertCodeExecution("""
x = 1
if x is 1:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 1
if x is 5:
print('Incorrect')
else:
print('Correct')
print('Done.')
""")
self.assertCodeExecution("""
x = None
if x is None:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 1
if x is None:
print('Incorrect')
else:
print('Correct')
print('Done.')
""")
def test_is_not(self):
self.assertCodeExecution("""
x = 1
if x is not 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 1
if x is not 1:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 1
if x is not None:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = None
if x is not None:
print('Incorrect')
else:
print('Correct')
print('Done.')
""")
def test_lt(self):
self.assertCodeExecution("""
x = 1
if x < 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 5
if x < 5:
print('Incorrect')
else:
print('Correct')
print('Done.')
""")
self.assertCodeExecution("""
x = 10
if x < 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
def test_le(self):
self.assertCodeExecution("""
x = 1
if x <= 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 5
if x <= 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 10
if x <= 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
def test_gt(self):
self.assertCodeExecution("""
x = 10
if x > 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 5
if x > 5:
print('Incorrect')
else:
print('Correct')
print('Done.')
""")
self.assertCodeExecution("""
x = 1
if x > 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
def test_ge(self):
self.assertCodeExecution("""
x = 10
if x >= 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 5
if x >= 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 1
if x >= 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
def test_eq(self):
self.assertCodeExecution("""
x = 10
if x == 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 5
if x == 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
def test_ne(self):
self.assertCodeExecution("""
x = 5
if x == 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
self.assertCodeExecution("""
x = 10
if x == 5:
print('Correct')
else:
print('Incorrect')
print('Done.')
""")
|
[
"[email protected]"
] | |
4d73f1009f9545a495de388d2b5332138d8fc0d7
|
237162607427106ae9564670d47427a62356861f
|
/users/migrations/0040_auto_20190426_1040.py
|
477aac69c7a6db31f52e331f91b20015a89d3272
|
[] |
no_license
|
pitipund/basecore
|
8648c1f4fa37b6e6075fd710ca422fe159ba930e
|
a0c20cec1e17dd0eb6abcaaa7d2623e38b60318b
|
refs/heads/master
| 2020-09-13T20:16:02.622903 | 2019-11-20T09:07:15 | 2019-11-20T09:07:15 | 221,885,342 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 524 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2019-04-26 10:40
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0039_applicationdefaultrole'),
]
operations = [
migrations.AlterModelOptions(
name='applicationdefaultrole',
options={'ordering': ('id',), 'verbose_name': 'Application Default Role', 'verbose_name_plural': 'Application Default Roles'},
),
]
|
[
"[email protected]"
] | |
f8fd4511a108b8fa1fb60b90cb489e7232eb676d
|
9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97
|
/sdBs/AllRun/galex_j032139.63+472718.83/sdB_galex_j032139.63+472718.83_coadd.py
|
7700527b519c981826539b80b5486dc86e5c9e84
|
[] |
no_license
|
tboudreaux/SummerSTScICode
|
73b2e5839b10c0bf733808f4316d34be91c5a3bd
|
4dd1ffbb09e0a599257d21872f9d62b5420028b0
|
refs/heads/master
| 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 489 |
py
|
from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[50.415125,47.455231], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_galex_j032139.63+472718.83/sdB_galex_j032139.63+472718.83_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_galex_j032139.63+472718.83/sdB_galex_j032139.63+472718.83_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
a7c12c0c81879fc2ae0d9f7d163beeef16b99619
|
4b70a23e74a332c54e70fe33c9b0fe79bb328d85
|
/WGB/tests.py
|
150266ac3a772eb5520f7750260a12777f21311c
|
[] |
no_license
|
tevawolf/wgb
|
3b095897cbdc9b71c4b233f6b755f65f2693d582
|
f30be8575b03f24bf797b305e34b7fda866fa0c0
|
refs/heads/master
| 2022-12-10T23:18:04.175394 | 2021-01-29T06:40:01 | 2021-01-29T06:40:01 | 159,421,804 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 400 |
py
|
from django.test import TestCase
from . import models
class UserAccountTests(TestCase):
def test_blank_icon(self):
account = models.UserAccount()
account.username = 'test'
account.password = 'test'
account.nickname = 'test'
account.save()
saved = models.UserAccount.objects.get(username='test')
self.assertEqual(saved.username, 'test')
|
[
"[email protected]"
] | |
700fa75fb3bd427c2ace99115edf7c741cc1a10c
|
9449368b4a4100f1ef6dd0f4a845faad6f1161a4
|
/models/Qaw_reactnet_18_bf.py
|
658a6b782cca02444f3726bafd5009b17e234335
|
[
"MIT"
] |
permissive
|
TrendingTechnology/BNN_NoBN
|
b6a770fb176a9881d22ccea20381084b4abc0bcc
|
d2777845d04449cabfcfc5ce72738e1e6287f633
|
refs/heads/main
| 2023-06-17T13:38:26.296326 | 2021-04-21T22:28:49 | 2021-04-21T22:28:49 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,526 |
py
|
'''
React-birealnet-18(modified from resnet)
BN setting: remove all BatchNorm layers
Conv setting: replace conv2d with ScaledstdConv2d (add alpha beta each blocks)
Binary setting: only activation are binarized
'''
import torch
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
import torch.nn.functional as F
from layers import *
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return ScaledStdConv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return ScaledStdConv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
def binaryconv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return HardBinaryScaledStdConv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1)
def binaryconv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return HardBinaryScaledStdConv2d(in_planes, out_planes, kernel_size=1, stride=stride, padding=0)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, alpha, beta, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.alpha = alpha
self.beta = beta
self.move0 = LearnableBias(inplanes)
self.binary_activation = BinaryActivation()
self.binary_conv = binaryconv3x3(inplanes, planes, stride=stride)
self.move1 = LearnableBias(planes)
self.prelu = nn.PReLU(planes)
self.move2 = LearnableBias(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
x_in = x*self.beta
out = self.move0(x_in)
out = self.binary_activation(out)
out = self.binary_conv(out)
if self.downsample is not None:
residual = self.downsample(x_in)
out = out*self.alpha + residual
out = self.move1(out)
out = self.prelu(out)
out = self.move2(out)
return out
class BiRealNet(nn.Module):
def __init__(self, block, layers, imagenet=True, alpha=0.2, num_classes=1000):
super(BiRealNet, self).__init__()
self.inplanes = 64
if imagenet:
self.conv1 = ScaledStdConv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
else:
self.conv1 = ScaledStdConv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.maxpool = nn.Identity()
expected_var = 1.0
self.layer1, expected_var = self._make_layer(block, 64, layers[0], alpha, expected_var)
self.layer2, expected_var = self._make_layer(block, 128, layers[1], alpha, expected_var, stride=2)
self.layer3, expected_var = self._make_layer(block, 256, layers[2], alpha, expected_var, stride=2)
self.layer4, expected_var = self._make_layer(block, 512, layers[3], alpha, expected_var, stride=2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512 * block.expansion, num_classes)
def _make_layer(self, block, planes, blocks, alpha, expected_var, stride=1):
beta = 1. / expected_var ** 0.5
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.AvgPool2d(kernel_size=2, stride=stride),
binaryconv1x1(self.inplanes, planes * block.expansion)
)
# Reset expected var at a transition block
expected_var = 1.0
layers = []
layers.append(block(self.inplanes, planes, alpha, beta, stride, downsample))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
beta = 1. / expected_var ** 0.5
layers.append(block(self.inplanes, planes, alpha, beta))
expected_var += alpha ** 2
return nn.Sequential(*layers), expected_var
def forward(self, x):
x = self.conv1(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
def birealnet18(pretrained=False, **kwargs):
"""Constructs a BiRealNet-18 model. """
model = BiRealNet(BasicBlock, [4, 4, 4, 4], **kwargs)
return model
|
[
"[email protected]"
] | |
4d4dfa1fce2d0ec301b8527dca38e03ba0e4b365
|
e371a21cc31c0616da346e386fea411f39dd0f7b
|
/LAB04/02-CloudAlbum-Chalice/cloudalbum/chalicelib/config.py
|
525345eb14cc26298fa3b523b0b550141477e306
|
[
"MIT"
] |
permissive
|
aws-kr-tnc/moving-to-serverless-renew
|
c0152763de822cea64a862cd395f4f940d2e4e03
|
312248c689a19ea9b589025c82f880593fc70f82
|
refs/heads/master
| 2023-03-21T19:59:23.717295 | 2022-03-12T15:38:59 | 2022-03-12T15:38:59 | 199,081,822 | 6 | 4 |
MIT
| 2023-03-07T10:02:25 | 2019-07-26T21:26:02 |
Python
|
UTF-8
|
Python
| false | false | 1,530 |
py
|
"""
cloudalbum/chalicelib/cognito.py
~~~~~~~~~~~~~~~~~~~~~~~
Configurations for application.
:description: CloudAlbum is a fully featured sample application for 'Moving to AWS serverless' training course
:copyright: © 2019 written by Dayoungle Jun, Sungshik Jou.
:license: MIT, see LICENSE for more details.
"""
import boto3
from chalice import CORSConfig
from aws_parameter_store import AwsParameterStore
def get_param_path(param_path):
"""
Retrieve all key:values in the Parameter Store.
:param param_path:
:return:
"""
region = boto3.session.Session().region_name
store = AwsParameterStore(region)
return store.get_parameters_dict(param_path)
# store configuration values for Cloudalbum
conf = get_param_path('/cloudalbum/')
def get_param(param_name):
"""
This function reads a secure parameter from AWS' SSM service.
The request must be passed a valid parameter name, as well as
temporary credentials which can be used to access the parameter.
The parameter's value is returned.
"""
# Create the SSM Client
ssm = boto3.client('ssm')
# Get the requested parameter
response = ssm.get_parameters(
Names=[param_name, ], WithDecryption=True
)
# Store the credentials in a variable
result = response['Parameters'][0]['Value']
return result
cors_config = CORSConfig(
allow_origin='*',
allow_headers=['*'],
max_age=600,
expose_headers=['X-Special-Header'],
allow_credentials=True
)
|
[
"[email protected]"
] | |
eba0648acc9316ce39061499fa08bb07bd36bf3e
|
55c250525bd7198ac905b1f2f86d16a44f73e03a
|
/Python/Scripts/pyinstaller/PyInstaller/hooks/hook-PyQt5.QtQuickWidgets.py
|
5bfdb0b29bff0c35d055c5b3a918351177aeea00
|
[] |
no_license
|
NateWeiler/Resources
|
213d18ba86f7cc9d845741b8571b9e2c2c6be916
|
bd4a8a82a3e83a381c97d19e5df42cbababfc66c
|
refs/heads/master
| 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null |
UTF-8
|
Python
| false | false | 129 |
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:46db77cbf463b412fb237dd8420a2e12c39b4b5c5fd0cc8d34382ca45cfc9ae0
size 1992
|
[
"[email protected]"
] | |
6301edb7062fa45ed01d04ba326e978ab1a9c163
|
6fcfb638fa725b6d21083ec54e3609fc1b287d9e
|
/python/n1nj4sec_pupy/pupy-master/pupy/modules/screenshot.py
|
1a3055e23702c2b625f5306a537f0e3d8a04c751
|
[] |
no_license
|
LiuFang816/SALSTM_py_data
|
6db258e51858aeff14af38898fef715b46980ac1
|
d494b3041069d377d6a7a9c296a14334f2fa5acc
|
refs/heads/master
| 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 |
Python
|
UTF-8
|
Python
| false | false | 3,976 |
py
|
# -*- coding: utf-8 -*-
# --------------------------------------------------------------
# Copyright (c) 2015, Nicolas VERDIER ([email protected]) All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE
# --------------------------------------------------------------
from pupylib.PupyModule import *
from os import path
import time
import datetime
import subprocess
__class_name__="screenshoter"
@config(cat="gather")
class screenshoter(PupyModule):
""" take a screenshot :) """
dependencies = ['mss', 'screenshot']
def init_argparse(self):
self.arg_parser = PupyArgumentParser(prog='screenshot', description=self.__doc__)
self.arg_parser.add_argument('-e', '--enum', action='store_true', help='enumerate screen')
self.arg_parser.add_argument('-s', '--screen', type=int, default=None, help='take a screenshot on a specific screen (default all screen on one screenshot)')
self.arg_parser.add_argument('-v', '--view', action='store_true', help='directly open the default image viewer on the screenshot for preview')
def run(self, args):
rscreenshot = self.client.conn.modules['screenshot']
if args.enum:
self.rawlog('{:>2} {:>9} {:>9}\n'.format('IDX', 'SIZE', 'LEFT'))
for i, screen in enumerate(rscreenshot.screens()):
if not (screen['width'] and screen['height']):
continue
self.rawlog('{:>2}: {:>9} {:>9}\n'.format(
i,
'{}x{}'.format(screen['width'], screen['height']),
'({}x{})'.format(screen['top'], screen['left'])))
return
screenshots, error = rscreenshot.screenshot(args.screen)
if not screenshots:
self.error(error)
else:
self.success('number of monitor detected: %s' % str(len(screenshots)))
for screenshot in screenshots:
filepath = path.join("data","screenshots","scr_"+self.client.short_name()+"_"+str(datetime.datetime.now()).replace(" ","_").replace(":","-")+".png")
with open(filepath, 'w') as out:
out.write(screenshot)
# sleep used to be sure the file name will be different between 2 differents screenshots
time.sleep(1)
self.success(filepath)
# if args.view:
# viewer = config.get('default_viewers', 'image_viewer')
# subprocess.Popen([viewer, output])
|
[
"[email protected]"
] | |
a8639e979db7d895673d5f6b9e4d845b351e3782
|
dac57de9c28700ebacc25331d5ff04dec129b74b
|
/MxOnline/users/adminx.py
|
59f3b2e3d6b9d7bc1ab58c529d848aaef9f1bd53
|
[] |
no_license
|
zmm064/Django-
|
08144522ef9afcc3d85c11faa848554282fc6fcd
|
1f8836ebb4902a738efc6c626ab10aa91fdde720
|
refs/heads/master
| 2021-08-09T03:00:01.049464 | 2017-11-12T01:52:34 | 2017-11-12T01:52:34 | 110,396,352 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 931 |
py
|
import xadmin
from xadmin import views
from .models import EmailVerifyRecord, Banner
class BaseSetting:
enable_themes = True
use_bootswatch = True
class GlobalSettings:
site_title = "慕学后台管理系统"
site_footer = "慕学在线网"
menu_style = "accordion"
class EmailVerifyRecordAdmin:
list_display = ['code', 'email', 'send_type', 'send_time']
list_filter = ['code', 'email', 'send_type', 'send_time']
search_fields = ['code', 'email', 'send_type']
class BannerAdmin:
list_display = ['title', 'image', 'url', 'index', 'add_time']
list_filter = ['title', 'image', 'url', 'index', 'add_time']
search_fields = ['title', 'image', 'url', 'index']
xadmin.site.register(EmailVerifyRecord, EmailVerifyRecordAdmin)
xadmin.site.register(Banner, BannerAdmin)
xadmin.site.register(views.BaseAdminView, BaseSetting)
xadmin.site.register(views.CommAdminView, GlobalSettings)
|
[
"[email protected]"
] | |
463d4a3035c7536df43458eb4be4d53450af98d3
|
5fee6afe91711fbb1ca87845f502776fbfab7851
|
/examples/pymanopt_autograd_demo.py
|
1761abe78a82061ff7149582fca5d90df8e0d786
|
[
"MIT"
] |
permissive
|
chenxofhit/pyprobml
|
f66ad4c1186f0ba22e520e14700ac0bd6fee400d
|
fe48d6111bd121e01cfbdefe3361a993fa14abe1
|
refs/heads/master
| 2021-01-24T09:39:29.828935 | 2016-09-17T03:34:59 | 2016-09-17T03:34:59 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 290 |
py
|
#https://github.com/pymanopt/pymanopt/blob/master/pymanopt/core/problem.py
import autograd.numpy as np
from pymanopt import Problem
def cost(theta):
return np.square(theta)
problem = Problem(manifold=None, cost=cost, verbosity=1)
print problem.cost(5)
print problem.egrad(5.0)
|
[
"[email protected]"
] | |
eaca63e5e424fa56715f10e05ddfbe09b2ff2f4c
|
44064ed79f173ddca96174913910c1610992b7cb
|
/Second_Processing_app/temboo/Library/RunKeeper/Weight/UpdateEntry.py
|
e7d943997a26bf0fc309b517c6fea8f1ba7349e6
|
[] |
no_license
|
dattasaurabh82/Final_thesis
|
440fb5e29ebc28dd64fe59ecd87f01494ed6d4e5
|
8edaea62f5987db026adfffb6b52b59b119f6375
|
refs/heads/master
| 2021-01-20T22:25:48.999100 | 2014-10-14T18:58:00 | 2014-10-14T18:58:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,005 |
py
|
# -*- coding: utf-8 -*-
###############################################################################
#
# UpdateEntry
# Updates a weight entry in a user’s feed.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class UpdateEntry(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the UpdateEntry Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/RunKeeper/Weight/UpdateEntry')
def new_input_set(self):
return UpdateEntryInputSet()
def _make_result_set(self, result, path):
return UpdateEntryResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return UpdateEntryChoreographyExecution(session, exec_id, path)
class UpdateEntryInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the UpdateEntry
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Entry(self, value):
"""
Set the value of the Entry input for this Choreo. ((required, json) A JSON string containing the key/value pairs for the fields to be updated in the weight entry. See documentation for formatting examples.)
"""
InputSet._set_input(self, 'Entry', value)
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved after the final step in the OAuth2 process.)
"""
InputSet._set_input(self, 'AccessToken', value)
def set_EntryID(self, value):
"""
Set the value of the EntryID input for this Choreo. ((required, string) This can be the individual id of the weight entry, or you can pass the full uri for the entry as returned from the RetrieveEntries Choreo (i.e. /weight/24085455).)
"""
InputSet._set_input(self, 'EntryID', value)
class UpdateEntryResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the UpdateEntry Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from RunKeeper.)
"""
return self._output.get('Response', None)
class UpdateEntryChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return UpdateEntryResultSet(response, path)
|
[
"[email protected]"
] | |
6af51bf8cb1672b3a526dc92325dd61f00709985
|
63cbfedc2e6141ae12fc113a81e147e9b5769670
|
/Chapt 13/sample2.py
|
842aeb8880e4bceca85a07e275a5080323161ffd
|
[] |
no_license
|
DamoM73/Learn-to-program-in-Python
|
82d5fdfbb456186d63aa8ae244e87bf96955ff86
|
44b6b9ffa81735739180dc2055e2e803f4526c79
|
refs/heads/master
| 2020-04-23T06:51:58.591548 | 2019-04-27T09:16:14 | 2019-04-27T09:16:14 | 170,988,387 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,427 |
py
|
# Program name: Ch 13 Sample app 2 validate password aaaaa.py
# Program askss use to login, then checks password
# in this program password is "aaaaaa"
from tkinter import *
from tkinter import messagebox
def submit():
password = entry_password.get()
username = entry_username.get()
messageAlert = Label(root, width = 30)
messageAlert.grid(row = 3, column = 0, columnspan = 2, padx = 5, pady = 5)
if password != "aaaaaa":
messageAlert.config(text = "Password incorrect")
entry_username.delete(0,"END")
entry_password.delete(0,"END")
entry_username.focus_set()
else:
messageAlert.config(text = "Password accepted")
print("password accepted")
print("Username: ", username)
print("Password: ", password)
messagebox.showinfo(title = "Password Ok", message = "Press OK to continue")
root.destroy()
# display a message box with a hint for password
def hint():
messagebox.showinfo(title = "Password hint", message = "Hint: Try password aaaaaa")
# create main window
root = Tk()
root.geometry("250x180")
root.title("Login Screen")
root.resizable(False,False)
root.configure(background = "Light blue")
# place a frame round labels and user entries
frame_entry = Frame(root, bg = 'Light blue')
frame_entry.grid(row = 0, column = 0, columnspan = 2, padx = 10, pady = 10)
# place a frame around the buttons
frame_buttons = Frame(root, bg = "Light blue")
frame_buttons.grid(row = 2, column = 0, columnspan = 3, padx = 10 , pady = 10)
# place the labels and text entry fields
Label(frame_entry, text = "Enter username: ")\
.grid(row = 0, column = 0, padx = 5, pady = 5)
entry_username = Entry(frame_entry, width = 15, bg = "white")
entry_username.grid(row = 0, column = 1, padx = 5, pady = 5)
Label(frame_entry, text = "Enter password: ")\
.grid(row = 1, column = 0, padx = 10, pady = 10)
entry_password = Entry(frame_entry, width = 15, bg = "white", show = "*")
entry_password.grid(row = 1, column = 1, padx = 5, pady = 5)
# place the submit button
submit_button = Button(frame_buttons, text = "Submit", width = 8, command = submit)
submit_button.grid(row = 0, column = 0, padx = 5, pady = 5)
# place the Hint button
hint_button = Button(frame_buttons, text = "Hint", width = 15, command = hint)
hint_button.grid(row = 0, column = 1, padx = 5, pady = 5)
# run mainloop
root.mainloop()
print("carry on now...")
|
[
"[email protected]"
] | |
d02c4a0793ee279dabe9c0b95d2105dcd9706e63
|
7b3743f052da9a74808b7d2145418ce5c3e1a477
|
/v2/api.thewatcher.io/api/models/saviors.py
|
89626aa29873222a92953c0510d71808dfbb67f1
|
[
"MIT"
] |
permissive
|
quebecsti/kdm-manager
|
5547cbf8928d485c6449650dc77805877a67ee37
|
a5fcda27d04135429e43a21ac655e6f6acc7768e
|
refs/heads/master
| 2020-11-26T19:22:53.197651 | 2019-10-22T20:53:40 | 2019-10-22T20:53:40 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,129 |
py
|
#!/usr/bin/python2.7
from api.assets import saviors
from api import Models
import utils
class Assets(Models.AssetCollection):
def __init__(self, *args, **kwargs):
self.root_module = saviors
Models.AssetCollection.__init__(self, *args, **kwargs)
def get_asset_by_color(self, color=None):
""" This method will return an asset dictionary whose 'color' attrib
matches the value of the 'color' kwarg.
"""
if color is None:
msg = "get_asset_by_color() requires the 'color' kwarg!"
self.logger.exception(msg)
raise Exception(msg)
output = None
for d in self.get_dicts():
if d["color"] == color and output is None:
output = d
elif d["color"] == color and output is not None:
msg = "Multiple savior asset dicts have the color '%s'. Did you rememeber to filter?" % color
self.logger.exception(msg)
raise Exception(msg)
if output is None:
msg = "No asset dict found for color '%s'!" % color
return output
|
[
"[email protected]"
] | |
14940a0b39f1f7c4e8107e47cdc734cdf845df28
|
28bf7793cde66074ac6cbe2c76df92bd4803dab9
|
/answers/MridulMohanta/Day29/question1.py
|
bd0a470a4989c366aa27de5d8ad3952e877f35eb
|
[
"MIT"
] |
permissive
|
Codechef-SRM-NCR-Chapter/30-DaysOfCode-March-2021
|
2dee33e057ba22092795a6ecc6686a9d31607c9d
|
66c7d85025481074c93cfda7853b145c88a30da4
|
refs/heads/main
| 2023-05-29T10:33:31.795738 | 2021-06-10T14:57:30 | 2021-06-10T14:57:30 | 348,153,476 | 22 | 135 |
MIT
| 2021-06-10T14:57:31 | 2021-03-15T23:37:26 |
Java
|
UTF-8
|
Python
| false | false | 534 |
py
|
a=[]
b=[]
x=int(input("Enter length of the two variables"))
n=int(input("Enter test number"))
y=0
for i in range(0,x):
p=int(input("Enter element in a:"))
a.append(p)
q=int(input("Enter element in b:"))
b.append(q)
for i in range(x-1,-1,-1):
for j in range(i,-1,-1):
if ((a[i]+b[j])<=n):
print (a[i])
print (b[j])
temp=b[j]
b[j]=b[i]
b[i]=temp
y=y+1
break
print (b)
if ((x-1)<=y):
print ("YES")
else:
print("NO")
|
[
"[email protected]"
] | |
fdfaf5133245d102f34dbb38f190dc97481a6095
|
bdc0b8809d52933c10f8eb77442bd0b4453f28f9
|
/build/std_msgs/rosidl_generator_py/std_msgs/msg/_header.py
|
9b81821f0602af102a642cfc19c4bb22e9f5e525
|
[] |
no_license
|
ClaytonCalabrese/BuiltRos2Eloquent
|
967f688bbca746097016dbd34563716bd98379e3
|
76bca564bfd73ef73485e5c7c48274889032e408
|
refs/heads/master
| 2021-03-27T22:42:12.976367 | 2020-03-17T14:24:07 | 2020-03-17T14:24:07 | 247,810,969 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,074 |
py
|
# generated from rosidl_generator_py/resource/_idl.py.em
# with input from std_msgs:msg/Header.idl
# generated code does not contain a copyright notice
# Import statements for member types
import rosidl_parser.definition # noqa: E402, I100
class Metaclass_Header(type):
"""Metaclass of message 'Header'."""
_CREATE_ROS_MESSAGE = None
_CONVERT_FROM_PY = None
_CONVERT_TO_PY = None
_DESTROY_ROS_MESSAGE = None
_TYPE_SUPPORT = None
__constants = {
}
@classmethod
def __import_type_support__(cls):
try:
from rosidl_generator_py import import_type_support
module = import_type_support('std_msgs')
except ImportError:
import logging
import traceback
logger = logging.getLogger(
'std_msgs.msg.Header')
logger.debug(
'Failed to import needed modules for type support:\n' +
traceback.format_exc())
else:
cls._CREATE_ROS_MESSAGE = module.create_ros_message_msg__msg__header
cls._CONVERT_FROM_PY = module.convert_from_py_msg__msg__header
cls._CONVERT_TO_PY = module.convert_to_py_msg__msg__header
cls._TYPE_SUPPORT = module.type_support_msg__msg__header
cls._DESTROY_ROS_MESSAGE = module.destroy_ros_message_msg__msg__header
from builtin_interfaces.msg import Time
if Time.__class__._TYPE_SUPPORT is None:
Time.__class__.__import_type_support__()
@classmethod
def __prepare__(cls, name, bases, **kwargs):
# list constant names here so that they appear in the help text of
# the message class under "Data and other attributes defined here:"
# as well as populate each message instance
return {
}
class Header(metaclass=Metaclass_Header):
"""Message class 'Header'."""
__slots__ = [
'_stamp',
'_frame_id',
]
_fields_and_field_types = {
'stamp': 'builtin_interfaces/Time',
'frame_id': 'string',
}
SLOT_TYPES = (
rosidl_parser.definition.NamespacedType(['builtin_interfaces', 'msg'], 'Time'), # noqa: E501
rosidl_parser.definition.UnboundedString(), # noqa: E501
)
def __init__(self, **kwargs):
assert all('_' + key in self.__slots__ for key in kwargs.keys()), \
'Invalid arguments passed to constructor: %s' % \
', '.join(sorted(k for k in kwargs.keys() if '_' + k not in self.__slots__))
from builtin_interfaces.msg import Time
self.stamp = kwargs.get('stamp', Time())
self.frame_id = kwargs.get('frame_id', str())
def __repr__(self):
typename = self.__class__.__module__.split('.')
typename.pop()
typename.append(self.__class__.__name__)
args = []
for s, t in zip(self.__slots__, self.SLOT_TYPES):
field = getattr(self, s)
fieldstr = repr(field)
# We use Python array type for fields that can be directly stored
# in them, and "normal" sequences for everything else. If it is
# a type that we store in an array, strip off the 'array' portion.
if (
isinstance(t, rosidl_parser.definition.AbstractSequence) and
isinstance(t.value_type, rosidl_parser.definition.BasicType) and
t.value_type.typename in ['float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32', 'uint32', 'int64', 'uint64']
):
if len(field) == 0:
fieldstr = '[]'
else:
assert fieldstr.startswith('array(')
prefix = "array('X', "
suffix = ')'
fieldstr = fieldstr[len(prefix):-len(suffix)]
args.append(s[1:] + '=' + fieldstr)
return '%s(%s)' % ('.'.join(typename), ', '.join(args))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
if self.stamp != other.stamp:
return False
if self.frame_id != other.frame_id:
return False
return True
@classmethod
def get_fields_and_field_types(cls):
from copy import copy
return copy(cls._fields_and_field_types)
@property
def stamp(self):
"""Message field 'stamp'."""
return self._stamp
@stamp.setter
def stamp(self, value):
if __debug__:
from builtin_interfaces.msg import Time
assert \
isinstance(value, Time), \
"The 'stamp' field must be a sub message of type 'Time'"
self._stamp = value
@property
def frame_id(self):
"""Message field 'frame_id'."""
return self._frame_id
@frame_id.setter
def frame_id(self, value):
if __debug__:
assert \
isinstance(value, str), \
"The 'frame_id' field must be of type 'str'"
self._frame_id = value
|
[
"[email protected]"
] | |
0edb15c99b81287d2f5f4c1a226de09d6b692c6c
|
ce0a34a4a1f44cda31042e4294e6cef334392a37
|
/tests/test_gui_klgui.py
|
9c28eb7d7c5c47e2c9694da7f660414fd1c1df94
|
[
"GPL-3.0-only"
] |
permissive
|
PhonologicalCorpusTools/CorpusTools
|
ba6644f90a9790d3f61d923b3b5622eaeaa24caa
|
314bd30be24b1cb7ee0c252a6529bbfe964056ad
|
refs/heads/master
| 2022-09-29T20:36:12.148289 | 2022-09-16T01:57:47 | 2022-09-16T01:57:47 | 18,848,568 | 108 | 24 |
BSD-3-Clause
| 2021-05-07T23:58:03 | 2014-04-16T17:14:55 |
Python
|
UTF-8
|
Python
| false | false | 188 |
py
|
from corpustools.gui.klgui import *
def test_klgui(qtbot, specified_test_corpus, settings):
dialog = KLDialog(None, settings,specified_test_corpus, True)
qtbot.addWidget(dialog)
|
[
"[email protected]"
] | |
aec5cebc7c02dfa2d6a9bd26431eef3f3eb82c51
|
9870d2c6880fd3fa558c46e3bf160aae20c74157
|
/permuteUnique.py
|
f104cb1cf5024240cfeb1b15ac8dd83327f3196d
|
[] |
no_license
|
Yigang0622/LeetCode
|
e7f7f115c6e730c486296ef2f1a3dd1a3fdca526
|
c873cd1ee70a2bdb54571bdd50733db9f6475e9e
|
refs/heads/master
| 2023-03-03T14:32:25.498633 | 2021-02-15T13:59:00 | 2021-02-15T13:59:00 | 281,423,565 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 950 |
py
|
# LeetCode
# permuteUnique
# Created by Yigang Zhou on 2020/9/18.
# Copyright © 2020 Yigang Zhou. All rights reserved.
# 47. 全排列 II
# https://leetcode-cn.com/problems/permutations-ii/
from typing import List
class Solution:
def permuteUnique(self, nums: List[int]) -> List[List[int]]:
ans = []
visited = [0] * len(nums)
nums.sort()
self.dfs([], visited,0,nums,ans)
return ans
def dfs(self, current, visited, i, nums, ans):
if i == len(nums):
ans.append(current[:])
return
for j, each in enumerate(nums):
if visited[j] == 1 or (j > 0 and nums[j] == nums[j - 1] and visited[j - 1] == 0):
continue
visited[j] = 1
current.append(each)
self.dfs(current, visited, i+1, nums, ans)
visited[j] = 0
current.pop()
nums = [1,1,2]
r = Solution().permuteUnique(nums)
print(r)
|
[
"[email protected]"
] | |
b74676e45149ad9bbe55f3f25d2e2048b5786119
|
930c207e245c320b108e9699bbbb036260a36d6a
|
/BRICK-RDFAlchemy/generatedCode/brick/brickschema/org/schema/_1_0_2/Brick/AHU_Heating_Demand_Setpoint.py
|
ec0c93fb165063c910beab5029a9309ddd5da42c
|
[] |
no_license
|
InnovationSE/BRICK-Generated-By-OLGA
|
24d278f543471e1ce622f5f45d9e305790181fff
|
7874dfa450a8a2b6a6f9927c0f91f9c7d2abd4d2
|
refs/heads/master
| 2021-07-01T14:13:11.302860 | 2017-09-21T12:44:17 | 2017-09-21T12:44:17 | 104,251,784 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 427 |
py
|
from rdflib import Namespace, Graph, Literal, RDF, URIRef
from rdfalchemy.rdfSubject import rdfSubject
from rdfalchemy import rdfSingle, rdfMultiple, rdfList
from brick.brickschema.org.schema._1_0_2.Brick.Heating_Demand_Setpoint import Heating_Demand_Setpoint
class AHU_Heating_Demand_Setpoint(Heating_Demand_Setpoint):
rdf_type = Namespace('https://brickschema.org/schema/1.0.2/Brick#').AHU_Heating_Demand_Setpoint
|
[
"[email protected]"
] | |
570db6accc88fe50729a6579d12fd3b3c150c75c
|
65c3e7139829829dd1410228e17f85c285ab0706
|
/Aniyom Ebenezer/Phase 2/STRINGS/Day_29_Challenge_Solution/Question 8 Solution.py
|
d443f353ceb9d57814ceb49adf93228d5ddd05d5
|
[
"MIT"
] |
permissive
|
eaniyom/python-challenge-solutions
|
167e9d897d0a72f1e264ff2fed0e4cc5541b0164
|
21f91e06421afe06b472d391429ee2138c918c38
|
refs/heads/master
| 2022-11-24T02:57:39.920755 | 2020-08-05T09:23:04 | 2020-08-05T09:23:04 | 277,686,791 | 1 | 0 |
MIT
| 2020-07-07T01:31:00 | 2020-07-07T01:30:59 | null |
UTF-8
|
Python
| false | false | 308 |
py
|
"""
Write a Python program that takes a list of words and retuerns the length of the longest one.
"""
def longest_words(word_list):
word_len = []
for n in word_list:
word_len.append((len(n), n))
word_len.sort()
return word_len[-1][1]
print(longest_words(["PHP", "Python", "Backend"]))
|
[
"[email protected]"
] | |
e4b140bd4a3681c4aff2b85b0c7660c38588549f
|
9c0eebdeb427db1ea1ce33987947e22b2c897440
|
/map.py
|
1925fc08dc7034e1f11acc9e148e356e5ec8fb80
|
[] |
no_license
|
dkotenko/npuzz
|
6d52c2ca9d733c8d59450af65f89c8bbac938134
|
461a864659893ec8276fafe3e58f73d853d1e42c
|
refs/heads/main
| 2023-06-22T06:30:10.979771 | 2021-07-13T23:54:21 | 2021-07-13T23:54:21 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,075 |
py
|
from Printer import Printer
import sys
def parse_int(s):
n = 0
try:
n = int(s)
except ValueError:
s_value = s.strip() if s.strip() else '{empty value}'
Printer.print_error_exit(f"map error: string {s_value} is not an integer")
return n
def validate_map(b):
nums = [parse_int(s) for s in b.split("/")]
dict_count = {i: nums.count(i) for i in nums}
if max(dict_count.values()) > 1:
[Printer.print_error(f'map error: duplicated number {key}') for key, val in dict_count if val > 1]
sys.exit(1)
if list(filter(lambda x: x >= len(nums) or x < 0, nums)):
for n in nums:
if n >= len(nums) or n < 1:
Printer.print_error(f'map error: invalid number {n}: must be in range 0:{int(math.sqrt(nums))}')
sys.exit(1)
def parse_map(file_name):
try:
f = open(file_name)
except FileNotFoundError:
Printer.print_error_exit(f"there is no file {file_name}")
with open(file_name, "r") as file:
bb = ''
line = file.readline()
l_p = line.partition('#')[0]
while not l_p:
line = file.readline()
l_p = line.partition("#")[0]
size_matr = parse_int(l_p)
line = file.readline()
n_str = 1
while line:
line = line.partition('#')[0]
while not line:
line = file.readline()
line = line.partition("#")[0]
plus = '/'.join(line.split())
bb += '/'.join(line.split())
bb += '/' # где конец строки нечего заменять =(
line = file.readline()
if (len(plus.split('/'))) != size_matr:
Printer.print_error_exit(f"invalid map: invalid values number at row {n_str}")
exit(0)
n_str += 1
bb = bb[0: -1]
if (n_str - 1) != size_matr:
Printer.print_error_exit(f'invalid map: invalid rows number = {n_str - 1}')
return bb
|
[
"[email protected]"
] | |
d58c5d69ac4d4936a7aeabe6f33219107db46479
|
56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e
|
/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYBBHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467519/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_7/run_cfg.py
|
ecd7e088550ddaf95adcd0944c067d9074645308
|
[] |
no_license
|
rmanzoni/HTT
|
18e6b583f04c0a6ca10142d9da3dd4c850cddabc
|
a03b227073b2d4d8a2abe95367c014694588bf98
|
refs/heads/master
| 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,161 |
py
|
import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYBBHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467519/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_26_1_qK2.root',
'/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_27_1_vSH.root',
'/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_28_1_O6M.root')
)
|
[
"[email protected]"
] | |
ecc3e6b8d119081e510084e3005d631f9d895d53
|
23c4f6d8a2a6b97077628c2a012b2b402c816d91
|
/LeetCode算法题/0190_颠倒二进制位/颠倒二进制.py
|
a253597ca1dc577aa84d9985492621b0937a38bc
|
[] |
no_license
|
exueyuanAlgorithm/AlgorithmDemo
|
7ef6ff8104e8da5a81037795184115fb0ac8ca9a
|
d34d4b592d05e9e0e724d8834eaf9587a64c5034
|
refs/heads/master
| 2023-07-16T19:00:05.664780 | 2021-09-04T11:31:07 | 2021-09-04T11:31:07 | 277,327,574 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 405 |
py
|
class Solution:
def reverseBits(self, n: int) -> int:
result_num = 0
for i in range(31):
if n % 2 == 1:
result_num = result_num + 1 << 1
else:
result_num = result_num << 1
n = n >> 1
if n % 2 == 1:
result_num += 1
return result_num
solution = Solution()
print(solution.reverseBits(0b111))
|
[
"[email protected]"
] | |
10b5605b4bccd6d1f948a4c6810b3e573adb67ae
|
a961aa04d7c7d18fd2ac7da8a8016bacfabc6e1b
|
/elevennote/src/notes/migrations/0007_auto_20200509_1450.py
|
38a6a80a43cd9fce7abbf51b8a93bfb99cfc98ae
|
[] |
no_license
|
EgorovM/cs102
|
a4f6423f3e96064c68a9015118cd141a8a7eea14
|
0f72f9027dbcda510c67f815348a8ce58f76d857
|
refs/heads/master
| 2021-06-21T16:21:10.880523 | 2020-06-06T08:34:28 | 2020-06-06T08:34:28 | 214,231,423 | 0 | 1 | null | 2021-06-10T22:52:37 | 2019-10-10T16:24:08 |
JavaScript
|
UTF-8
|
Python
| false | false | 440 |
py
|
# Generated by Django 2.0.1 on 2020-05-09 14:50
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notes', '0006_note_shared'),
]
operations = [
migrations.AlterField(
model_name='note',
name='shared',
field=models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL),
),
]
|
[
"[email protected]"
] | |
cde9c5c591a0868fda460d5f45c15e0897cb2d77
|
89c4a43a505df8fdf1f0d7386988c4896c2e631b
|
/google/ads/googleads/v6/services/services/gender_view_service/transports/base.py
|
c2715ba063f55d3fe0da66e820e30cd4ad4a3ba0
|
[
"Apache-2.0"
] |
permissive
|
hurricanelennane/google-ads-python
|
a0a1fed690776a8bb2e81f637eb7eae10fb4992f
|
310a488b6fdad9d5beea8fa4b166edce779a2511
|
refs/heads/master
| 2023-07-04T03:07:53.344466 | 2021-07-16T19:06:36 | 2021-07-16T19:06:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,582 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
from google import auth
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.ads.googleads.v6.resources.types import gender_view
from google.ads.googleads.v6.services.types import gender_view_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class GenderViewServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for GenderViewService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",)
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]): The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
self._wrapped_methods = {
self.get_gender_view: gapic_v1.method.wrap_method(
self.get_gender_view,
default_timeout=None,
client_info=client_info,
),
}
@property
def get_gender_view(
self,
) -> typing.Callable[
[gender_view_service.GetGenderViewRequest], gender_view.GenderView
]:
raise NotImplementedError
__all__ = ("GenderViewServiceTransport",)
|
[
"[email protected]"
] | |
0a86e75c70dcb21815b1a3f7ca3483db5fd939cc
|
707c6a7f3b3213c8a996967ede905aeb18a8c6d9
|
/solutions/Insert-Interval.py
|
d680a3144665d7fbb6a2e681c4be95c980267521
|
[] |
no_license
|
Ziyilan/Pyleetcode
|
d35b9c2ae6c890dfd42804264b139bfddb8db563
|
81a9d98607b4ce554507d16763ee82f7dad49edd
|
refs/heads/master
| 2020-12-11T02:11:38.470153 | 2015-10-27T18:46:47 | 2015-10-27T18:46:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,486 |
py
|
"""
Author: Jing (https://github.com/gnijuohz)
Insert Interval: https://oj.leetcode.com/problems/insert-interval
Given a set of non-overlapping intervals, insert a new interval into the intervals (merge if necessary).
You may assume that the intervals were initially sorted according to their start times.
Example 1:
Given intervals [1,3],[6,9], insert and merge [2,5] in as [1,5],[6,9].
Example 2:
Given [1,2],[3,5],[6,7],[8,10],[12,16], insert and merge [4,9] in as [1,2],[3,10],[12,16].
This is because the new interval [4,9] overlaps with [3,5],[6,7],[8,10].
Tags
Array, Sort, Show Similar Problems, (H) Merge Intervals
"""
# Definition for an interval.
# class Interval:
# def __init__(self, s=0, e=0):
# self.start = s
# self.end = e
class Solution:
# @param intervals, a list of Intervals
# @param newInterval, a Interval
# @return a list of Interval
def insert(self, intervals, newInterval):
intervals.append(newInterval)
return self.merge(intervals)
def merge(self, intervals):
if not intervals or len(intervals) == 1:
return intervals
intervals = sorted(intervals, key=operator.attrgetter('start'))
res = [intervals[0]]
for i in range(1, len(intervals)):
if intervals[i].start <= res[-1].end:
res[-1].end = max(res[-1].end, intervals[i].end)
else:
res.append(intervals[i])
return res
|
[
"[email protected]"
] | |
fe49918b93bc0175098d4277f76f2d191bfbce49
|
5a69cab2b5ed410f944b57f3ec586b9c624a735c
|
/lib/demo_test/multi_platform_demo
|
0cd943b292114efcecb53e660e9883bb98850ba5
|
[
"Apache-2.0"
] |
permissive
|
T3kton/architect
|
214a176dd5f9a9bc340d358d692e16a61f362ebe
|
3368a66c0c1836eca12dbc7af97f01d5ba13984a
|
refs/heads/master
| 2021-01-20T09:03:25.451300 | 2018-09-17T23:03:24 | 2018-09-17T23:03:24 | 90,217,916 | 0 | 2 |
Apache-2.0
| 2018-09-17T23:03:24 | 2017-05-04T03:29:18 |
Python
|
UTF-8
|
Python
| false | false | 2,211 |
#!/usr/bin/env python3
import os
import django
os.environ.setdefault( 'DJANGO_SETTINGS_MODULE', 'architect.settings' )
django.setup()
from datetime import datetime, timezone, timedelta
from architect.Contractor.models import Complex, BluePrint
from architect.Plan.models import Plan, PlanComplex, PlanBluePrint, PlanTimeSeries
from architect.TimeSeries.models import CostTS, AvailabilityTS, ReliabilityTS, RawTimeSeries
print( 'Giving Blueprints their names...')
for blueprint in BluePrint.objects.filter( name__isnull=True ):
blueprint.name = blueprint.contractor_id
blueprint.full_clean()
blueprint.save()
try:
plan = Plan.objects.get( name='demo' )
except Plan.DoesNotExist:
print( 'Creating the Plan...' )
plan = Plan( name='demo', description='demo', enabled=True )
plan.script = """
cut_off: 0
demo: weighted( *INDEX*, @count, ( 1 / *COST* ) )
#demo-web: above_inclusive( demo, cut_off )
#demo-ssh: below( demo, cut_off )
"""
plan.config_values = {}
plan.max_inflight = 10
plan.last_change = datetime.now( timezone.utc ) - timedelta( days=1 )
plan.can_build = True
plan.can_destroy = True
plan.full_clean()
plan.save()
ts = RawTimeSeries( metric='data.count' )
ts.full_clean()
ts.save()
pts = PlanTimeSeries( plan=plan, timeseries=ts, script_name='count' )
pts.full_clean()
pts.save()
print( 'setting up blueprint link...' )
blueprint = BluePrint.objects.get( name='demo-web' )
pb = PlanBluePrint( plan=plan, blueprint=blueprint )
pb.full_clean()
pb.save()
blueprint = BluePrint.objects.get( name='demo-ssh' )
pb = PlanBluePrint( plan=plan, blueprint=blueprint )
pb.full_clean()
pb.save()
print( 'Giving Complexes their tsnames, and setting up buckets...')
for complex in Complex.objects.filter( tsname__isnull=True ):
complex.tsname = complex.contractor_id
complex.full_clean()
complex.save()
costts = CostTS( complex=complex )
costts.save()
availts = AvailabilityTS( complex=complex )
availts.save()
reliabts = ReliabilityTS( complex=complex )
reliabts.save()
pc = PlanComplex( plan=plan, complex=complex )
pc.cost = costts
pc.availability = availts
pc.reliability = reliabts
pc.full_clean()
pc.save()
|
[
"[email protected]"
] | ||
f4a5841f5d31f26e0da2530d937bbf5ce64db363
|
ac1bbabc7c1b3149711c416dd8b5f5969a0dbd04
|
/Programming Fundamentals/objects_and_classes/class.py
|
7289e5275a0ea25652981f2f7b0b49c310acc71b
|
[] |
no_license
|
AssiaHristova/SoftUni-Software-Engineering
|
9e904221e50cad5b6c7953c81bc8b3b23c1e8d24
|
d4910098ed5aa19770d30a7d9cdf49f9aeaea165
|
refs/heads/main
| 2023-07-04T04:47:00.524677 | 2021-08-08T23:31:51 | 2021-08-08T23:31:51 | 324,847,727 | 1 | 0 | null | 2021-08-08T23:31:52 | 2020-12-27T20:58:01 |
Python
|
UTF-8
|
Python
| false | false | 701 |
py
|
class Class:
def __init__(self, name):
self.name = name
self.students = []
self.grades = []
__students_count = 22
def add_student(self, name, grade):
if len(self.students) < Class.__students_count:
self.students.append(name)
self.grades.append(grade)
def get_average_grade(self):
return sum(self.grades) / len(self.grades)
def __repr__(self):
return f"The students in {self.name}: {', '.join(self.students)}. Average grade: {Class.get_average_grade(self):.2f}"
a_class = Class("11B")
a_class.add_student("Peter", 4.80)
a_class.add_student("George", 6.00)
a_class.add_student("Amy", 3.50)
print(a_class)
|
[
"[email protected]"
] | |
dda48464dce73f3af0af909f3571d348d3d0d84e
|
f8dd8d046100f1223713e047074f30c7ce5a59cd
|
/testing/epilogue/decorators.py
|
35dbdffbffc9e1b88e69fb384d455179a4f387c3
|
[] |
no_license
|
dotslash227/98fitcortex
|
57aed99270799eff68fdff62db0b8c1d9aabd4a2
|
bd4002151e5def00c3dea1f5a1abfb06ba3e809a
|
refs/heads/master
| 2022-12-17T00:51:20.302948 | 2019-02-27T13:54:22 | 2019-02-27T13:54:22 | 197,362,824 | 0 | 0 | null | 2022-12-08T00:02:42 | 2019-07-17T09:55:14 |
HTML
|
UTF-8
|
Python
| false | false | 3,694 |
py
|
import functools
import datetime
from django.db import models
def last_days(days = 6):
today = datetime.datetime.today().date()
while days >= 0:
val = today - datetime.timedelta(days = days)
days -= 1
yield val
def last_weeks(weeks = 6):
today = datetime.datetime.today().date()
current_year , current_week , current_day = today.isocalendar()
start_week = current_week
year = current_year
if start_week >= 6:
while weeks >= 0:
yield (year ,current_week)
current_week -= 1
weeks -= 1
else:
while weeks >= 0:
yield (year , current_week)
current_week -= 1
current_week = abs(52+current_week)%52
if current_week == 0:
current_week = 52
year -= 1
weeks -= 1
def add_today(f):
@functools.wraps(f)
def wrapper(*args , **kwargs):
kwargs['today'] = datetime.datetime.today().date()
return f(*args , **kwargs)
return wrapper
def add_empty_day_in_week(defaults , days_range = 6):
def decorator(f):
@functools.wraps(f)
def wrapper(*args , **kwargs):
vals = f(*args , **kwargs)
days = set(vals.values_list("date" , flat = True))
data = []
for e in last_days(days = days_range):
if e not in days:
d = {
"date" : e,
**defaults,
}
data.append(d)
return data + list(vals)
return wrapper
return decorator
def add_empty_weeks(defaults , sort = lambda x : (x['year'],x['week'])):
def decorator(f):
@functools.wraps(f)
def wrapper(*args , **kwargs):
weeks , data = f(*args , **kwargs)
for y,w in last_weeks():
if (y,w) not in weeks:
d = {
"week" : w,
"year" : y,
**defaults
}
data.append(d)
return sorted(data , key = sort)
return wrapper
return decorator
def sorter(key , reverse = False):
def decorator(f):
@functools.wraps(f)
def wrapper(*args , **kwargs):
vals = f(*args , **kwargs)
return sorted(vals , key = key , reverse = reverse)
return wrapper
return decorator
def scale_field(field,goal):
def decorator(fn):
@functools.wraps(fn)
def wrapper(*args , **kwargs):
returned_value = fn(*args , **kwargs)
field_values = (e.get(field) for e in returned_value)
scaling_factor = 100/(max(goal ,max(field_values)))
for e in returned_value:
e['plotting_value'] = e.get(field , 0) * scaling_factor
return returned_value
return wrapper
return decorator
def weekly_average(field):
def decorator(f):
@functools.wraps(f)
def wrapper(*args , **kwargs):
vals = f(*args , **kwargs)
weeks = set(vals.values_list("week" , flat = True) )
data = []
curr_week = datetime.datetime.now().isocalendar()[1]
for e in range(curr_week - 6 , curr_week +1):
if e not in weeks:
data.append({
"week" : e,
"avg" : 0
})
continue
avg = vals.filter(
week = e
).aggregate(
avg = models.Sum(field)
)
d = {
"week" : e,
"avg" : avg['avg']
}
data.append(d)
return data
return wrapper
return decorator
def monthly_average(field):
def decorator(f):
@functools.wraps(f)
def wrapper(self):
vals = f(self)
months = set(vals.values_list("month" , flat = True) )
data = []
for e in months:
avg = vals.filter(
month = e
).aggregate(
avg = models.Avg(field)
)
d = {
"month" : e,
"avg" : avg['avg']
}
data.append(d)
return data
return wrapper
return decorator
def map_transform_queryset(iterable , *fields):
def decorator(f):
@functools.wraps(f)
def mapper(*args , **kwargs):
l = map(lambda x : functools.partial(x , *fields) , iterable)
val = f(*args , **kwargs)
d = {}
for e in l:
d.update(**e(val))
return d
return mapper
return decorator
|
[
"[email protected]"
] | |
3ac83d2ac2af4145c059505d5214c148e2fa8ab9
|
f80ef3a3cf859b13e8af8433af549b6b1043bf6e
|
/pyobjc-framework-MailKit/PyObjCTest/test_memessagedecoder.py
|
9ebaeafd6990e8c6d9d71d48d0c727eca4fb01ad
|
[
"MIT"
] |
permissive
|
ronaldoussoren/pyobjc
|
29dc9ca0af838a56105a9ddd62fb38ec415f0b86
|
77b98382e52818690449111cd2e23cd469b53cf5
|
refs/heads/master
| 2023-09-01T05:15:21.814504 | 2023-06-13T20:00:17 | 2023-06-13T20:00:17 | 243,933,900 | 439 | 49 | null | 2023-06-25T02:49:07 | 2020-02-29T08:43:12 |
Python
|
UTF-8
|
Python
| false | false | 198 |
py
|
from PyObjCTools.TestSupport import TestCase
import MailKit # noqa: F401
class TestMEMessageDecoder(TestCase):
def test_protocols(self):
self.assertProtocolExists("MEMessageDecoder")
|
[
"[email protected]"
] | |
ee62c946bacf7cf765e57fe18224aea84ff72185
|
2fcf361eb89f1f01fe4d677d4772ddaba89b06ad
|
/hydrus/HydrusGlobals.py
|
06ab47dbf5399492ca0bfda15b4892944f211c47
|
[
"WTFPL"
] |
permissive
|
matjojo/hydrus
|
9f13f35e817bfe7e170ec7be22e18b64e393cb01
|
8f87206ea6ef242bc38235d7053bb33b5a785e68
|
refs/heads/master
| 2021-05-17T03:26:19.183503 | 2020-03-27T23:32:58 | 2020-03-27T23:32:58 | 250,597,596 | 0 | 0 |
NOASSERTION
| 2020-03-27T17:18:53 | 2020-03-27T17:18:52 | null |
UTF-8
|
Python
| false | false | 1,239 |
py
|
import threading
controller = None
client_controller = None
server_controller = None
test_controller = None
view_shutdown = False
model_shutdown = False
no_daemons = False
no_wal = False
no_db_temp_files = False
db_memory_journaling = False
db_synchronous_override = None
import_folders_running = False
export_folders_running = False
callto_report_mode = False
db_report_mode = False
db_profile_mode = False
file_report_mode = False
media_load_report_mode = False
gui_report_mode = False
shortcut_report_mode = False
subprocess_report_mode = False
subscription_report_mode = False
hover_window_report_mode = False
file_import_report_mode = False
phash_generation_report_mode = False
menu_profile_mode = False
network_report_mode = False
pubsub_report_mode = False
pubsub_profile_mode = False
ui_timer_profile_mode = False
daemon_report_mode = False
force_idle_mode = False
no_page_limit_mode = False
thumbnail_debug_mode = False
currently_uploading_pending = False
shutting_down_due_to_already_running = False
do_idle_shutdown_work = False
program_is_shutting_down = False
shutdown_complete = False
restart = False
emergency_exit = False
twisted_is_broke = False
dirty_object_lock = threading.Lock()
server_busy = threading.Lock()
|
[
"[email protected]"
] | |
f6ced2b4805a2ac25e3a6f5f5bc67b175ac0c922
|
69d3680f881833a0a4906ad708eac11401bc03c6
|
/python3/515. 在每个树行中找最大值.py
|
7f9663db2eb82e0576ad697414cd72b43c7432df
|
[] |
no_license
|
menghuu/YALeetcode
|
21df4b5ea6cb0a249263b0ce2df37e7580477ddd
|
1959a884bb1cc9f2f1acb1ba6f413498ea0d1aca
|
refs/heads/master
| 2023-08-18T03:55:41.470428 | 2021-09-11T12:39:02 | 2021-09-11T12:39:02 | 269,104,152 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 882 |
py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2020 m <[email protected]>
#
# Distributed under terms of the MIT license.
"""
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def largestValues(self, root: TreeNode) -> List[int]:
if not root:
return []
ans = []
level = [root]
while level:
l = len(level)
m = float('-inf')
for i in range(l):
root = level[i]
m = max(root.val, m)
if root.left:
level.append(root.left)
if root.right:
level.append(root.right)
level = level[l:]
ans.append(m)
return ans
|
[
"[email protected]"
] | |
468645e9619fb25182bf7c27b275edf40ec84218
|
afa4ad9cefeb12f78fa7176d2c80d71cc5a76d1c
|
/clastic/tests/common.py
|
e1327a4596c8a3033fab40fdefe4c40417973191
|
[
"BSD-3-Clause"
] |
permissive
|
slaporte/clastic
|
0d88fdc56570de578efcd221d1a5182be661ac97
|
d7734040160ece0bf2dd6ef10770be838776056f
|
refs/heads/master
| 2021-01-16T22:36:30.852244 | 2013-09-15T01:43:11 | 2013-09-15T01:43:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,119 |
py
|
# -*- coding: utf-8 -*-
import clastic
from clastic import Middleware
def hello_world(name=None):
if name is None:
name = 'world'
return clastic.Response('Hello, %s!' % name)
def hello_world_str(name=None):
if name is None:
name = 'world'
return 'Hello, %s!' % name
def hello_world_html(name=None):
if name is None:
name = 'world'
return '<html><body><p>Hello, <b>%s</b>!</p></body></html>' % name
def hello_world_ctx(name=None):
if name is None:
name = 'world'
greeting = 'Hello, %s!' % name
return {'name': name,
'greeting': greeting}
def session_hello_world(session, name=None):
if name is None:
name = session.get('name') or 'world'
session['name'] = name
return 'Hello, %s!' % name
def complex_context(name=None, date=None):
from datetime import datetime
ret = hello_world_ctx(name)
if date is None:
date = datetime.utcnow()
ret['date'] = date
ret['example_middleware'] = RequestProvidesName
ret['a_lambda'] = lambda x: None
ret['true'] = True
ret['bool_vals'] = set([True, False])
ret['the_locals'] = locals()
ret['the_locals'].pop('ret')
return ret
class RequestProvidesName(Middleware):
provides = ('name',)
def __init__(self, default_name=None):
self.default_name = default_name
def request(self, next, request):
try:
ret = next(request.args.get('name', self.default_name))
except Exception as e:
print e
raise
return ret
class DummyMiddleware(Middleware):
def __init__(self, verbose=False):
self.verbose = verbose
def request(self, next, request):
name = '%s (%s)' % (self.__class__.__name__, id(self))
if self.verbose:
print name, '- handling', id(request)
try:
ret = next()
except Exception as e:
if self.verbose:
print name, '- uhoh:', repr(e)
raise
if self.verbose:
print name, '- hooray:', repr(ret)
return ret
|
[
"[email protected]"
] | |
57ee62c2b454803a41896a4bf9ceef507af16a53
|
fa06915cb1f1d49d636ee2137889cfd66c6e55af
|
/metodos_confinamentos/secante.py
|
18e79f85bf7e3f67cb40920e9a009f43320520b7
|
[] |
no_license
|
DarknessRdg/mat-computacional
|
7ed45dd333bec52b509128e6d106efaa4a205cea
|
30fd0dd144a10a91f3a11055d20ebdab72be3620
|
refs/heads/main
| 2023-04-03T10:27:35.510285 | 2021-04-16T04:30:38 | 2021-04-16T04:30:38 | 329,485,627 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 769 |
py
|
import math
from utils import trunc
def secante(x1, x2, funcao, tolerancia):
f_x1 = 0
loop = 0
while loop == 0 or abs(f_x1) > tolerancia:
loop += 1
f_x1 = funcao(x1)
f_x2 = funcao(x2)
x3 = x2 - ((f_x2 * (x1 - x2)) / (f_x1 - f_x2))
feedback = (
'loop = {} '
'x1 = {} '
'x2 = {} '
'f(x1) = {} '
'f(x2) = {} '
'x3 = {} '
'f(x3) {} '
)
print(feedback.format(
loop, *map(trunc, (x1, x2, f_x1, f_x2, x3, funcao(x3)))
))
x1 = x2
x2 = x3
return x1
if __name__ == '__main__':
print(secante(
x1=1,
x2=2,
funcao=f,
tolerancia=10 ** -3
))
|
[
"[email protected]"
] | |
0e3558e47561e850419df0c5701c93bfd1818048
|
2772f804bae2bf1dad1c9fcab435c98696465c65
|
/二刷+题解/每日一题/minCameraCover.py
|
aba1966661634456e10dc88a4eea8520e49f8eee
|
[] |
no_license
|
1oser5/LeetCode
|
75e15a2f7a1a7de1251fe5f785ad06a58b4b8889
|
40726506802d2d60028fdce206696b1df2f63ece
|
refs/heads/master
| 2021-07-19T04:40:17.637575 | 2020-09-30T00:16:39 | 2020-09-30T00:16:39 | 211,662,258 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 987 |
py
|
# -*- encoding: utf-8 -*-
'''
@File : minCameraCover.py
@Time : 2020/09/22 08:52:25
@Author : Xia
@Version : 1.0
@Contact : [email protected]
@License : (C)Copyright 2019-2020, HB.Company
@Desc : None
'''
# here put the import lib
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
res = 0
def minCameraCover(self, root: TreeNode) -> int:
def dfs(root):
if not root:
return 1
left, right = dfs(root.left), dfs(root.right)
if left == 0 or right == 0:
self.res += 1
return 2
if left == 1 and right == 1:
return 0
if (left + right) >= 3:
return 1
return -1
if dfs(root) == 0:
self.res += 1
return self.res
if __name__ == '__main__':
pass
|
[
"[email protected]"
] | |
db6ba2fc2635e56052c35ca36a819d6348f32bd3
|
acd41dc7e684eb2e58b6bef2b3e86950b8064945
|
/res/packages/scripts/scripts/common/Lib/ctypes/macholib/dylib.py
|
55b791f15df2416f3ae4ab32269899edf999a3d8
|
[] |
no_license
|
webiumsk/WoT-0.9.18.0
|
e07acd08b33bfe7c73c910f5cb2a054a58a9beea
|
89979c1ad547f1a1bbb2189f5ee3b10685e9a216
|
refs/heads/master
| 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null |
WINDOWS-1250
|
Python
| false | false | 2,276 |
py
|
# 2017.05.04 15:31:20 Střední Evropa (letní čas)
# Embedded file name: scripts/common/Lib/ctypes/macholib/dylib.py
"""
Generic dylib path manipulation
"""
import re
__all__ = ['dylib_info']
DYLIB_RE = re.compile('(?x)\n(?P<location>^.*)(?:^|/)\n(?P<name>\n (?P<shortname>\\w+?)\n (?:\\.(?P<version>[^._]+))?\n (?:_(?P<suffix>[^._]+))?\n \\.dylib$\n)\n')
def dylib_info(filename):
"""
A dylib name can take one of the following four forms:
Location/Name.SomeVersion_Suffix.dylib
Location/Name.SomeVersion.dylib
Location/Name_Suffix.dylib
Location/Name.dylib
returns None if not found or a mapping equivalent to:
dict(
location='Location',
name='Name.SomeVersion_Suffix.dylib',
shortname='Name',
version='SomeVersion',
suffix='Suffix',
)
Note that SomeVersion and Suffix are optional and may be None
if not present.
"""
is_dylib = DYLIB_RE.match(filename)
if not is_dylib:
return None
else:
return is_dylib.groupdict()
def test_dylib_info():
def d(location = None, name = None, shortname = None, version = None, suffix = None):
return dict(location=location, name=name, shortname=shortname, version=version, suffix=suffix)
raise dylib_info('completely/invalid') is None or AssertionError
raise dylib_info('completely/invalide_debug') is None or AssertionError
raise dylib_info('P/Foo.dylib') == d('P', 'Foo.dylib', 'Foo') or AssertionError
raise dylib_info('P/Foo_debug.dylib') == d('P', 'Foo_debug.dylib', 'Foo', suffix='debug') or AssertionError
raise dylib_info('P/Foo.A.dylib') == d('P', 'Foo.A.dylib', 'Foo', 'A') or AssertionError
raise dylib_info('P/Foo_debug.A.dylib') == d('P', 'Foo_debug.A.dylib', 'Foo_debug', 'A') or AssertionError
raise dylib_info('P/Foo.A_debug.dylib') == d('P', 'Foo.A_debug.dylib', 'Foo', 'A', 'debug') or AssertionError
return
if __name__ == '__main__':
test_dylib_info()
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\common\Lib\ctypes\macholib\dylib.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:31:20 Střední Evropa (letní čas)
|
[
"[email protected]"
] | |
0eb91115050dd84862b4b5adc45e51414a098dc9
|
5faa3f139f30c0d290e327e04e3fd96d61e2aabb
|
/mininet-wifi/SIGCOMM-2016/hybridVirtualPhysical.py
|
a318efac432e79db502409c7800249359668848f
|
[] |
no_license
|
hongyunnchen/reproducible-research
|
c6dfc3cd3c186b27ab4cf25949470b48d769325a
|
ed3a7a01b84ebc9bea96c5b02e0c97705cc2f7c6
|
refs/heads/master
| 2021-05-07T08:24:09.586976 | 2017-10-31T13:08:05 | 2017-10-31T13:08:05 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,853 |
py
|
#!/usr/bin/python
"""Code created to be presented with the paper titled:
"Rich Experimentation through Hybrid Physical-Virtual Software-Defined Wireless Networking Emulation"
authors: Ramon dos Reis Fontes and Christian Esteve Rothenberg"""
"""Topology
(2)ap2(3)
/ \
(3) (2)
wlan1(2)phyap1 ap3(4)wlan0
(4) (3)
\ /
(3)ap4(2) """
from mininet.net import Mininet
from mininet.node import RemoteController, OVSKernelSwitch, UserAP, Controller
from mininet.link import TCLink
from mininet.cli import CLI
from mininet.node import Node
from mininet.log import setLogLevel
import os
import time
def topology():
"Create a network."
net = Mininet( controller=RemoteController, link=TCLink, accessPoint=UserAP )
staList = []
internetIface = 'eth0'
usbDongleIface = 'wlan11'
print "*** Creating nodes"
for n in range(10):
staList.append(n)
staList[n] = net.addStation( 'sta%s' % (n+1), wlans=2, mac='00:00:00:00:00:%s' % (n+1), ip='192.168.0.%s/24' % (n+1) )
phyap1 = net.addPhysicalBaseStation( 'phyap1', protocols='OpenFlow13', ssid='Sigcomm-2016-Mininet-WiFi', mode= 'g', channel= '1', position='50,115,0', phywlan=usbDongleIface )
ap2 = net.addAccessPoint( 'ap2', protocols='OpenFlow13', ssid='ap-ssid2', mode= 'g', channel= '11', position='100,175,0' )
ap3 = net.addAccessPoint( 'ap3', protocols='OpenFlow13', ssid='ap-ssid3', mode= 'g', channel= '6', position='150,115,0' )
ap4 = net.addAccessPoint( 'ap4', protocols='OpenFlow13', ssid='ap-ssid4', mode= 'g', channel= '11', position='100,55,0' )
c5 = net.addController( 'c5', controller=RemoteController, port=6653 )
sta11 = net.addStation( 'sta11', ip='10.0.0.111/8', position='60,100,0')
h12 = net.addHost( 'h12', ip='10.0.0.109/8')
root = net.addHost( 'root', ip='10.0.0.254/8', inNamespace=False )
print "*** Configuring wifi nodes"
net.configureWifiNodes()
print "*** Creating links"
for sta in staList:
net.addMesh(sta, ssid='meshNet')
"""uncomment to plot graph"""
net.plotGraph(max_x=240, max_y=240)
"""Routing"""
net.meshRouting('custom')
"""Seed"""
net.seed(20)
print "*** Associating and Creating links"
net.addLink(phyap1, ap2)
net.addLink(ap2, ap3)
net.addLink(sta11, ap2)
net.addLink(ap3, ap4)
net.addLink(ap4, phyap1)
net.addLink(root, ap3)
net.addLink(phyap1, h12)
print "*** Starting network"
net.build()
c5.start()
phyap1.start( [c5] )
ap2.start( [c5] )
ap3.start( [c5] )
ap4.start( [c5] )
time.sleep(2)
"""output=all,flood"""
ap3.cmd('dpctl unix:/tmp/ap3 meter-mod cmd=add,flags=1,meter=1 drop:rate=100')
ap3.cmd('dpctl unix:/tmp/ap3 flow-mod table=0,cmd=add in_port=4,eth_type=0x800,ip_dst=10.0.0.100,meter:1 apply:output=flood')
phyap1.cmd('dpctl unix:/tmp/phyap1 flow-mod table=0,cmd=add in_port=2,ip_dst=10.0.0.109,eth_type=0x800,ip_proto=6,tcp_dst=80 apply:set_field=tcp_dst:80,set_field=ip_dst:10.0.0.111,output=5')
phyap1.cmd('dpctl unix:/tmp/phyap1 flow-mod table=0,cmd=add in_port=1,eth_type=0x800,ip_proto=6,tcp_src=80 apply:set_field=ip_src:10.0.0.109,output=2')
fixNetworkManager( root, 'root-eth0' )
startNAT(root, internetIface)
sta11.cmd('ip route add default via 10.0.0.254')
sta11.cmd('pushd /home/fontes; python3 -m http.server 80 &')
ip = 201
for sta in staList:
sta.setIP('10.0.0.%s/8' % ip, intf="%s-wlan1" % sta)
sta.cmd('ip route add default via 10.0.0.254')
ip+=1
"*** Available models: RandomWalk, TruncatedLevyWalk, RandomDirection, RandomWayPoint, GaussMarkov, ReferencePoint, TimeVariantCommunity ***"
net.startMobility(startTime=0, model='RandomWalk', max_x=200, max_y=200, min_v=0.1, max_v=0.2)
print "*** Running CLI"
CLI( net )
print "*** Stopping network"
net.stop()
def startNAT( root, inetIntf, subnet='10.0/8', localIntf = None ):
"""Start NAT/forwarding between Mininet and external network
root: node to access iptables from
inetIntf: interface for internet access
subnet: Mininet subnet (default 10.0/8)"""
# Identify the interface connecting to the mininet network
if localIntf == None:
localIntf = root.defaultIntf()
# Flush any currently active rules
root.cmd( 'iptables -F' )
root.cmd( 'iptables -t nat -F' )
# Create default entries for unmatched traffic
root.cmd( 'iptables -P INPUT ACCEPT' )
root.cmd( 'iptables -P OUTPUT ACCEPT' )
root.cmd( 'iptables -P FORWARD DROP' )
# Configure NAT
root.cmd( 'iptables -I FORWARD -i', localIntf, '-d', subnet, '-j DROP' )
root.cmd( 'iptables -A FORWARD -i', localIntf, '-s', subnet, '-j ACCEPT' )
root.cmd( 'iptables -A FORWARD -i', inetIntf, '-d', subnet, '-j ACCEPT' )
root.cmd( 'iptables -t nat -A POSTROUTING -o ', inetIntf, '-j MASQUERADE' )
# Instruct the kernel to perform forwarding
root.cmd( 'sysctl net.ipv4.ip_forward=1' )
def fixNetworkManager( root, intf ):
"""Prevent network-manager from messing with our interface,
by specifying manual configuration in /etc/network/interfaces
root: a node in the root namespace (for running commands)
intf: interface name"""
cfile = '/etc/network/interfaces'
line = '\niface %s inet manual\n' % intf
config = open( cfile ).read()
if ( line ) not in config:
print '*** Adding', line.strip(), 'to', cfile
with open( cfile, 'a' ) as f:
f.write( line )
# Probably need to restart network-manager to be safe -
# hopefully this won't disconnect you
root.cmd( 'service network-manager restart' )
if __name__ == '__main__':
setLogLevel( 'info' )
topology()
|
[
"[email protected]"
] | |
21d3f37f0ebe4ec592d700a1d4acdf2080efe131
|
c77a40408bc40dc88c466c99ab0f3522e6897b6a
|
/Programming_basics/Exercise_6/PasswordGenerator.py
|
d16198682725a9db48d9d7f698aaecd4211c4375
|
[] |
no_license
|
vbukovska/SoftUni
|
3fe566d8e9959d390a61a4845381831929f7d6a3
|
9efd0101ae496290313a7d3b9773fd5111c5c9df
|
refs/heads/main
| 2023-03-09T17:47:20.642393 | 2020-12-12T22:14:27 | 2021-02-16T22:14:37 | 328,805,705 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 323 |
py
|
num = int(input())
let = int(input())
for i_1 in range(1, num+1):
for i_2 in range(1, num+1):
for i_3 in range(97, 97+let):
for i_4 in range(97, 97+let):
for i_5 in range(max(i_1, i_2)+1, num+1):
print(f'{str(i_1)+str(i_2)+chr(i_3)+chr(i_4)+str(i_5)}', end=' ')
|
[
"[email protected]"
] | |
ec3279a0d583a81c3f3babb1c9cf24cf74075378
|
2e4023d59718d87e1940b27ada9155a9a47a7668
|
/tests/serialization/serializers_test.py
|
78ee84a4e45f73535abd4bd8f1ecd15917121351
|
[
"Apache-2.0"
] |
permissive
|
olukas/hazelcast-python-client
|
c71038a22b73de894320d641dbf617509049c63d
|
63bcbaaef0bf755e4e94e8e536d19d964e02144a
|
refs/heads/master
| 2020-03-20T21:27:02.460282 | 2018-06-18T11:50:39 | 2018-06-19T12:02:37 | 137,741,377 | 0 | 0 | null | 2018-06-18T11:02:55 | 2018-06-18T11:02:55 | null |
UTF-8
|
Python
| false | false | 3,526 |
py
|
import binascii
from hzrc.ttypes import Lang
from hazelcast.config import SerializationConfig, INTEGER_TYPE
from hazelcast.serialization.data import Data
from hazelcast.serialization.serialization_const import CONSTANT_TYPE_DOUBLE
from hazelcast.serialization.service import SerializationServiceV1
from tests.base import SingleMemberTestCase
class SerializersTestCase(SingleMemberTestCase):
def setUp(self):
config = SerializationConfig()
config.default_integer_type = INTEGER_TYPE.BIG_INT
self.service = SerializationServiceV1(serialization_config=config)
def tearDown(self):
self.service.destroy()
def test_none_serializer(self):
none = None
data_n = self.service.to_data(none)
self.assertIsNone(data_n)
self.assertIsNone(self.service.to_object(Data()))
def test_boolean_serializer(self):
true = True
false = False
data_t = self.service.to_data(true)
data_f = self.service.to_data(false)
obj_t = self.service.to_object(data_t)
obj_f = self.service.to_object(data_f)
self.assertEqual(true, obj_t)
self.assertEqual(false, obj_f)
def test_char_type_serializer(self):
buff = bytearray(binascii.unhexlify("00000000fffffffb00e7"))
data = Data(buff)
obj = self.service.to_object(data)
self.assertEqual(unichr(0x00e7), obj)
def test_float(self):
buff = bytearray(binascii.unhexlify("00000000fffffff700000000"))
data = Data(buff)
obj = self.service.to_object(data)
self.assertEqual(0.0, obj)
def test_double(self):
double = 1.0
data = self.service.to_data(double)
obj = self.service.to_object(data)
self.assertEqual(data.get_type(), CONSTANT_TYPE_DOUBLE)
self.assertEqual(double, obj)
def test_datetime(self):
year = 2000
month = 11
day = 15
hour = 23
minute = 59
second = 49
script = """
from java.util import Date, Calendar
cal = Calendar.getInstance()
cal.set({}, ({}-1), {}, {}, {}, {})
result=instance_0.getSerializationService().toBytes(cal.getTime())
""".format(year, month, day, hour, minute, second)
response = self.rc.executeOnController(self.cluster.id, script, Lang.PYTHON)
data = Data(response.result)
val = self.service.to_object(data)
self.assertEqual(year, val.year)
self.assertEqual(month, val.month)
self.assertEqual(day, val.day)
self.assertEqual(hour, val.hour)
self.assertEqual(minute, val.minute)
self.assertEqual(second, val.second)
def test_big_int_small(self):
self._big_int_test(12)
def test_big_int_small_neg(self):
self._big_int_test(-13)
def test_big_int(self):
self._big_int_test(1234567890123456789012345678901234567890)
def test_big_int_neg(self):
self._big_int_test(-1234567890123456789012345678901234567890)
def _big_int_test(self, big_int):
script = """from java.math import BigInteger
result=instance_0.getSerializationService().toBytes(BigInteger("{}",10))""".format(big_int)
response = self.rc.executeOnController(self.cluster.id, script, Lang.PYTHON)
data = Data(response.result)
val = self.service.to_object(data)
data_local = self.service.to_data(big_int)
self.assertEqual(binascii.hexlify(data._buffer), binascii.hexlify(data_local._buffer))
self.assertEqual(big_int, val)
|
[
"[email protected]"
] | |
23033e06f849b85dadc20b94437ee03c24802976
|
c7f43c4cc0ee84a5fe246b67f51e30b8d726ebd5
|
/keras/keras09_mlp.py
|
44099b84aa0e9f80008f67c742b96110ca820afa
|
[] |
no_license
|
89Mansions/AI_STUDY
|
d9f8bdf206f14ba41845a082e731ea844d3d9007
|
d87c93355c949c462f96e85e8d0e186b0ce49c76
|
refs/heads/master
| 2023-07-21T19:11:23.539693 | 2021-08-30T08:18:59 | 2021-08-30T08:18:59 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 338 |
py
|
import numpy as np
# x = np.array([1,2,3,4,5,6,7,8,9,10]) # 스칼라가 10개인 벡터 x
x = np.array([[1,2,3,4,5,6,7,8,9,10],
[1,2,3,4,5,6,7,8,9,10]]) # 스칼라가 10개인 벡터가 두 개인 행렬
y = np.array([1,2,3,4,5,6,7,8,9,10])
print(x.shape) #(10,) - 스칼라가 10개라는 의미 ----> (2, 10)
|
[
"[email protected]"
] | |
e2bcbcc8eabdb541cdd13185af9f8b4f40943c05
|
79bf34ad2894c92a8ad887404225295595313958
|
/ex44d.py
|
3641234825b4c46314357fee5adaa74cce562d33
|
[
"MIT"
] |
permissive
|
sogada/python
|
98ac577a18d709a13ace2a56d27e675edeeb032b
|
4bdad72bc2143679be6d1f8722b83cc359753ca9
|
refs/heads/master
| 2020-04-21T00:12:44.872044 | 2015-10-29T20:18:02 | 2015-10-29T20:18:02 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 751 |
py
|
class Parent(object):
def override(self):
print "PARENT override()"
def implicit(self):
print "PARENT implicit()"
def altered(self):
print "PARENT altered()"
class Child(Parent):
def override(self):
print "CHILD override()"
def altered(self):
print "CHILD, BEFORE PARENT altered()"
super(Child, self).altered()
print "CHILD, AFTER PARENT altered()"
dad = Parent()
son = Child()
#Child inherits implicit
dad.implicit()
son.implicit()
#Child overrides the override() function from Parent
dad.override()
son.override()
#Child overrides the altered() method from Parent, then uses super
#to inherit and use the original function from Parent
dad.altered()
son.altered()
|
[
"[email protected]"
] | |
5bdd168eca6ca9a05b5765cb0375fb4bd7b45dc1
|
16f0171b1aecb8d104a208df4953884a9ab97b26
|
/googlenet_regression/get_regressions_batch.py
|
5412aac0db52a3a1cebf4611c8f5168f70565739
|
[] |
no_license
|
gombru/LearnFromWebData
|
97538dd91822a0e2a7d12084cde0d9dbf64f3c70
|
163447027c856004836abe40d9f653ec03da0702
|
refs/heads/master
| 2020-03-24T23:12:43.819864 | 2018-08-01T12:25:10 | 2018-08-01T12:25:10 | 143,123,717 | 13 | 7 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,341 |
py
|
import caffe
import numpy as np
from PIL import Image
import os
caffe.set_device(0)
caffe.set_mode_gpu()
test = np.loadtxt('../../../datasets/SocialMedia/word2vec_mean_gt/test_InstaCities1M.txt', dtype=str)
# test = np.loadtxt('../../../datasets/WebVision/info/test_filelist.txt', dtype=str)
#Model name
model = 'WebVision_Inception_frozen_word2vec_tfidfweighted_divbymax_iter_460000'
#Output file
output_file_dir = '../../../datasets/SocialMedia/regression_output/' + model
if not os.path.exists(output_file_dir):
os.makedirs(output_file_dir)
output_file_path = output_file_dir + '/test.txt'
output_file = open(output_file_path, "w")
# load net
net = caffe.Net('../googlenet_regression/prototxt/deploy.prototxt', '../../../datasets/WebVision/models/saved/'+ model + '.caffemodel', caffe.TEST)
size = 227
# Reshape net
batch_size = 250 #300
net.blobs['data'].reshape(batch_size, 3, size, size)
print 'Computing ...'
count = 0
i = 0
while i < len(test):
indices = []
if i % 100 == 0:
print i
# Fill batch
for x in range(0, batch_size):
if i > len(test) - 1: break
# load image
# filename = '../../../datasets/WebVision/test_images_256/' + test[i]
filename = '../../../datasets/SocialMedia/img_resized_1M/cities_instagram/' + test[i].split(',')[0] + '.jpg'
im = Image.open(filename)
im_o = im
im = im.resize((size, size), Image.ANTIALIAS)
indices.append(test[i])
# Turn grayscale images to 3 channels
if (im.size.__len__() == 2):
im_gray = im
im = Image.new("RGB", im_gray.size)
im.paste(im_gray)
#switch to BGR and substract mean
in_ = np.array(im, dtype=np.float32)
in_ = in_[:,:,::-1]
in_ -= np.array((104, 117, 123))
in_ = in_.transpose((2,0,1))
net.blobs['data'].data[x,] = in_
i += 1
# run net and take scores
net.forward()
# Save results for each batch element
for x in range(0,len(indices)):
topic_probs = net.blobs['probs'].data[x]
topic_probs_str = ''
for t in topic_probs:
topic_probs_str = topic_probs_str + ',' + str(t)
output_file.write(indices[x].split(',')[0] + topic_probs_str + '\n')
output_file.close()
print "DONE"
print output_file_path
|
[
"[email protected]"
] | |
db6f9e619cc3eb6af96cb90589f32f741554459c
|
c78ce4f66cc964c230ad60fbf2ced6b4811eab89
|
/0x10-python-network_0/6-peak.py
|
ab8163dbefd2215b422669954178d075b0be06a2
|
[] |
no_license
|
jebichii/holbertonschool-higher_level_programming-1
|
89026557909851dd775ae355f036db89ebd9adb9
|
741953aa479af90e8eac6f1315415eff4a20224f
|
refs/heads/master
| 2023-03-15T14:58:27.062528 | 2020-06-11T07:21:23 | 2020-06-11T07:21:23 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 661 |
py
|
#!/usr/bin/python3
"""
Provides a function to find a peak element in an unsorted list of integers
"""
def find_peak(integers):
"""
Finds a peak element in an unsorted list of integers
"""
if not integers:
return None
if len(integers) == 1:
return integers[0]
if len(integers) == 2:
return integers[0] if integers[0] > integers[1] else integers[1]
midpoint = len(integers) // 2
if integers[midpoint] < integers[midpoint - 1]:
return find_peak(integers[:midpoint])
if integers[midpoint] < integers[midpoint + 1]:
return find_peak(integers[midpoint + 1:])
return integers[midpoint]
|
[
"[email protected]"
] | |
d3644245fbb6e118e01fef312221feff42ab5904
|
892c35f72f46f145c3f3860c1c29f1f4503ef9a6
|
/solid/management/commands/solid_utils.py
|
bf76df8227f11afddcb1cdf4ef3e92ed3ccaa1ab
|
[] |
no_license
|
pymmrd/tuangou
|
aaa2b857e352f75f2ba0aa024d2880a6adac21a8
|
8f6a35dde214e809cdd6cbfebd8d913bafd68fb2
|
refs/heads/master
| 2021-01-10T20:31:55.238764 | 2013-11-13T13:53:53 | 2013-11-13T13:53:53 | 7,911,285 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 432 |
py
|
import os
from django.conf import settings
def gen_dest_tmpl(html, tmpl, flag=None):
tmpl = tmpl.replace('dy_tags', 'tags')
sub_dir, filename = tmpl.rsplit('/', 1)
if flag:
filename = flag
tmpl_dir = os.path.join(settings.TEMPLATE_DIRS[0], sub_dir)
if not os.path.exists(tmpl_dir):
os.makedirs(tmpl_dir)
with open(os.path.join(tmpl_dir, filename), 'w') as f:
f.write(html)
|
[
"zg163@zg163-Lenovo-IdeaPad-Y470.(none)"
] |
zg163@zg163-Lenovo-IdeaPad-Y470.(none)
|
e3ae61193e0a2880e6eb878f379a07f656630931
|
a722faf9fb50c794555861bb4858c3ed8a7a25f3
|
/contest/atcoder/abc095/D/main.py
|
7f2f0044567a122b8832c3dbfb0972c08712b132
|
[] |
no_license
|
ar90n/lab
|
31e5d2c320de5618bc37572011596fee8923255d
|
6d035e12f743e9ba984e79bfe660967b9ca8716b
|
refs/heads/main
| 2023-07-25T17:29:57.960915 | 2023-07-22T12:08:18 | 2023-07-22T12:08:18 | 77,883,405 | 4 | 0 | null | 2023-07-17T08:45:14 | 2017-01-03T04:15:49 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 1,428 |
py
|
#!/usr/bin/env python3
import sys
from collections.abc import Iterable
from math import *
from itertools import *
from collections import *
from functools import *
from operator import *
try:
from math import gcd
except Exception:
from fractions import gcd
def solve(N: int, C: int, x: "List[int]", v: "List[int]"):
x = [0] + x
v = [0] + v
mx_r = [0]
for xx, acc in zip(x[1:], accumulate(v[1:], add)):
mx_r.append(max(acc - xx, mx_r[-1]))
mx_l = [0]
for xx, cal in zip(reversed(x), accumulate(reversed(v), add)):
mx_l.append(max(cal - (C - xx), mx_l[-1]))
mx_l.reverse()
ans = 0
for i in range(N+1):
ans = max(mx_r[i], mx_r[i] - x[i] + mx_l[i+1], ans)
if i != 0:
ans = max(mx_l[i], mx_l[i] - (C - x[i]) + mx_r[i-1], ans)
return ans
def main():
def iterate_tokens():
for line in sys.stdin:
for word in line.split():
yield word
tokens = iterate_tokens()
N = int(next(tokens)) # type: int
C = int(next(tokens)) # type: int
x = [int()] * (N) # type: "List[int]"
v = [int()] * (N) # type: "List[int]"
for i in range(N):
x[i] = int(next(tokens))
v[i] = int(next(tokens))
result = solve(N, C, x, v)
if isinstance(result, Iterable):
result = '\n'.join([str(v) for v in result])
print(result)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
5edbe851415c7f12fe01314ef03eec162a7e5354
|
1afa1b1929d1cd463cd9970174dd58ce2ca6eb1e
|
/configs/deeplabv3plus/deeplabv3plus_r101-d8_512x512_40k_voc12aug.py
|
9e3cd3501becb0dd284113d675963a2c474b247b
|
[
"Apache-2.0"
] |
permissive
|
CAU-HE/CMCDNet
|
2328594bf4b883384c691099c72e119b65909121
|
31e660f81f3b625916a4c4d60cd606dcc8717f81
|
refs/heads/main
| 2023-08-08T17:21:57.199728 | 2023-07-28T07:34:40 | 2023-07-28T07:34:40 | 589,927,845 | 12 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 140 |
py
|
_base_ = './deeplabv3plus_r50-d8_512x512_40k_voc12aug.py'
model = dict(pretrained='open-mmlab://resnet101_v1c', backbone=dict(depth=101))
|
[
"[email protected]"
] | |
f925b0bbc8dff7cade5763ea534cd301ea570730
|
d36471a481ff0ff71aa277d14928a48db9b6140b
|
/melons.py
|
1ecfacead0e3930d7b11d129e6adb944e4fa10f5
|
[] |
no_license
|
Quynhd07/melons-classes
|
ca0e47f694cc6337136ca2431f7a856e9135b3ea
|
f668d5fd97dd7c3a37bd26bbfe2310324fdd388c
|
refs/heads/master
| 2020-12-30T00:40:33.897567 | 2020-02-07T21:05:17 | 2020-02-07T21:05:17 | 238,799,803 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,309 |
py
|
"""Classes for melon orders."""
class AbstractMelonOrder():
"""An abstract base class that other Melon Orders inherit from."""
def __init__(self, species, qty):
"""Initialize melon order attributes."""
self.species = species
self.qty = qty
self.shipped = False
def get_total(self):
"""Calculate price, including tax."""
base_price = 5
# if species == christmas melons
if self.species == "Christmas melons":
# multiple base price by 1.5
base_price = base_price * 1.5
total = (1 + self.tax) * self.qty * base_price
return total
def mark_shipped(self):
"""Record the fact than an order has been shipped."""
self.shipped = True
class DomesticMelonOrder(AbstractMelonOrder):
"""A melon order within the USA."""
def __init__(self, species, qty):
"""Initialize melon order attributes."""
super().__init__(species, qty)
self.order_type = "domestic"
self.tax = 0.08
class InternationalMelonOrder(AbstractMelonOrder):
"""An international (non-US) melon order."""
def __init__(self, species, qty, country_code):
"""Initialize melon order attributes."""
super().__init__(species, qty, country_code)
self.country_code = country_code
self.order_type = "international"
self.tax = .15
def get_country_code(self):
"""Return the country code."""
return self.country_code
def get_total(self):
total = super().get_total()
# base_price = 5
# if species == christmas melons
# if self.species == "Christmas melons":
# # multiple base price by 1.5
# base_price = base_price * 1.5
if self.qty < 10:
flat_fee = 3
total = total + flat_fee
return total
class GovernmentMelonOrder(AbstractMelonOrder):
def __init__(self, species, qty):
"""Initialize melon order attributes."""
super().__init__(species, qty)
self.passed_inspection = False
self.tax = 1
# create mark_inspection method
def mark_inspection(self, bool):
if bool == 'passed':
self.passed_inspection = True
return self.passed_inspection
|
[
"[email protected]"
] | |
c0a323d6563dda7c8ac2b49d827352f6379ba03d
|
caed98915a93639e0a56b8296c16e96c7d9a15ab
|
/DP/stocks/Stock_II.py
|
5ec46fcd78856a48fdfcf6ebdb61c059e7384e15
|
[] |
no_license
|
PiyushChandra17/365-Days-Of-LeetCode
|
0647787ec7e8f1baf10b6bfc687bba06f635838c
|
7e9e9d146423ca2c5b1c6a3831f21dd85fa376d5
|
refs/heads/main
| 2023-02-13T10:41:36.110303 | 2021-01-17T11:58:51 | 2021-01-17T11:58:51 | 319,974,573 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 153 |
py
|
class Solution:
def maxProfit(self, prices: List[int]) -> int:
return sum(max(prices[i+1]-prices[i],0)for i in range(len(prices)-1))
|
[
"[email protected]"
] | |
5acd1660ba5455bc1084047dc66d3485dde5efb6
|
fb9c24e1e27c930881f54a0d609683983c726cec
|
/main/migrations/0032_auto_20210326_1139.py
|
9fac8da7459eaeeeeff9e17fe2d1e1408b18388e
|
[] |
no_license
|
Safintim/flower-shop
|
6ba28f3f82912bcedd8c7d1e259557cda729410e
|
92c0b7488b5370fc5512d6ce85f0e76a2a55bdbd
|
refs/heads/master
| 2023-04-08T18:48:43.866959 | 2021-04-14T10:18:06 | 2021-04-14T10:18:06 | 254,976,051 | 0 | 0 | null | 2020-06-06T08:55:36 | 2020-04-11T23:50:58 |
Python
|
UTF-8
|
Python
| false | false | 371 |
py
|
# Generated by Django 3.1.7 on 2021-03-26 11:39
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0031_auto_20210326_1118'),
]
operations = [
migrations.DeleteModel(
name='Callback',
),
migrations.DeleteModel(
name='Configuration',
),
]
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.