blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ef8433f6bae0df2f57342d5ef4f9efcd844ecde0
|
ddf1267a1a7cb01e70e3b12ad4a7bfaf291edb3e
|
/src/search/tasks.py
|
2c428bb843cb84de7aa107d3c9693be9e16496f7
|
[
"MIT"
] |
permissive
|
Garinmckayl/researchhub-backend
|
46a17513c2c9928e51db4b2ce5a5b62df453f066
|
cd135076d9a3b49a08456f7ca3bb18ff35a78b95
|
refs/heads/master
| 2023-06-17T04:37:23.041787 | 2021-05-18T01:26:46 | 2021-05-18T01:26:46 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,339 |
py
|
from oauth.utils import get_orcid_works, check_doi_in_works
from paper.models import Paper
from paper.utils import download_pdf
from researchhub.celery import app
from utils.orcid import orcid_api
from user.models import Author
from purchase.models import Wallet
VALID_LICENSES = []
@app.task
def download_pdf_by_license(item, paper_id):
try:
licenses = item['license']
for license in licenses:
if license in VALID_LICENSES:
pdf, filename = get_pdf_and_filename(item['links'])
paper = Paper.objects.get(pk=paper_id)
paper.file.save(filename, pdf)
paper.save(update_fields=['file'])
break
except Exception:
pass
def get_pdf_and_filename(links):
for link in links:
if link['content-type'] == 'application/pdf':
return download_pdf(link['URL'])
return None, None
@app.task
def create_authors_from_crossref(crossref_authors, paper_id, paper_doi):
paper = None
try:
paper = Paper.objects.get(pk=paper_id)
except Paper.DoesNotExist:
pass
for crossref_author in crossref_authors:
try:
first_name = crossref_author['given']
last_name = crossref_author['family']
except KeyError:
break
affiliation = None
if len(crossref_author['affiliation']) > 0:
FIRST = 0
affiliation = crossref_author['affiliation'][FIRST]['name']
try:
orcid_id = crossref_author['ORCID'].split('/')[-1]
get_or_create_orcid_author(orcid_id, first_name, last_name, paper)
except KeyError:
orcid_authors = search_orcid_author(
first_name,
last_name,
affiliation
)
for orcid_author in orcid_authors:
works = get_orcid_works(orcid_author)
if (len(works) > 0) and check_doi_in_works(paper_doi, works):
create_orcid_author(orcid_author, paper)
def search_orcid_author(given_names, family_name, affiliation=None):
matches = []
try:
author_name_results = orcid_api.search_by_name(
given_names,
family_name
)
authors = author_name_results.json()['result']
if authors is not None:
for author in authors:
uid = author['orcid-identifier']['path']
author_id_results = orcid_api.search_by_id(uid)
matches.append(author_id_results.json())
except Exception as e:
print(e)
return matches
def create_orcid_author(orcid_author, paper):
name = orcid_author['person']['name']
first_name = name['given-names']['value']
last_name = name['family-name']['value']
orcid_id = orcid_author['orcid-identifier']['path']
get_or_create_orcid_author(orcid_id, first_name, last_name, paper)
def get_or_create_orcid_author(orcid_id, first_name, last_name, paper):
author, created = Author.models.get_or_create(
orcid_id=orcid_id,
defaults={
'first_name': first_name,
'last_name': last_name,
}
)
wallet, _ = Wallet.models.get_or_create(
author=author
)
if paper is not None:
paper.authors.add(author)
|
[
"[email protected]"
] | |
3fa9322ab882012f8dd6fc64efa180bbd27ec444
|
f0856e60a095ce99ec3497b3f27567803056ac60
|
/keras/keras19~31[scaler, CNN(GAP,DNN)]/keras31_cifar100_3_Djsull.py
|
9150dcc697bf72ace67e7f4f1b9da8a5c55e6d9b
|
[] |
no_license
|
hjuju/TF_Study-HAN
|
dcbac17ce8b8885f5fb7d7f554230c2948fda9ac
|
c0faf98380e7f220868ddf83a9aaacaa4ebd2c2a
|
refs/heads/main
| 2023-09-04T09:13:33.212258 | 2021-10-27T08:00:49 | 2021-10-27T08:00:49 | 384,371,952 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,418 |
py
|
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler, StandardScaler, RobustScaler, QuantileTransformer,MaxAbsScaler, PowerTransformer, OneHotEncoder
from tensorflow.keras.datasets import cifar100
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Dense, Input, Conv2D, Flatten, MaxPool2D
from keras.utils import to_categorical
from tensorflow.keras.callbacks import EarlyStopping
from icecream import ic
import time
#1. 데이터 전처리
(x_train, y_train), (x_test, y_test) = cifar100.load_data()
x_train = x_train.reshape(50000, 32 * 32 * 3)
x_test = x_test.reshape(10000, 32 * 32 * 3) # 2차원으로 바꿔줌
ic(x_train)
# # x_train = x_train/255.
# # x_test = x_test/255.
# y_train = to_categorical(y_train)
# y_test = to_categorical(y_test)
# scaler = StandardScaler()
# x_train = scaler.fit_transform(x_train) # x_train에서만 사용가능 x_train = scaler.fit(x_train), x_train = scaler.transform(x_train)를 한줄로
# x_test = scaler.transform(x_test)
# x_train = x_train.reshape(50000, 32, 32, 3)
# x_test = x_test.reshape(10000, 32 ,32, 3) # 스케일링 후 4차원으로 원위치
# # print(np.unique(y_train)) # [0 1 2 3 4 5 6 7 8 9]
# # one = OneHotEncoder() # shape를 2차원으로 잡아야함
# # y_train = y_train.reshape(-1,1) # 2차원으로 변경
# # y_test = y_test.reshape(-1,1)
# # one.fit(y_train)
# # y_train = one.transform(y_train).toarray() # (50000, 100)
# # y_test = one.transform(y_test).toarray() # (10000, 100)
# # to categorical -> 3,4,6,8 되어있어도 0,1,2가 자동생성(shape에 더 유연)
# # 3, 4, 5 ,6, 7 이면 그대로 3,4,5,6,7(shape가 2차원이어야함)
# #2. 모델링
# model = Sequential()
# model.add(Conv2D(filters=128, kernel_size=(2, 2), padding='valid', activation='relu', input_shape=(32, 32, 3)))
# model.add(Conv2D(128, (2, 2), padding='same', activation='relu'))
# model.add(MaxPool2D())
# model.add(Conv2D(128, (2, 2), padding='valid', activation='relu'))
# model.add(Conv2D(128, (2, 2), padding='same', activation='relu'))
# model.add(MaxPool2D())
# model.add(Conv2D(64, (2, 2), activation='relu'))
# model.add(Conv2D(64, (2, 2), padding='same', activation='relu')) # 큰사이즈 아닌 이상 4,4 까지 올라가지 않음
# model.add(MaxPool2D()) # 556개 / 나가는 데이터를 확인해서 레이의 노드 개수 구성
# model.add(Flatten())
# model.add(Dense(128, activation='relu'))
# model.add(Dense(128, activation='relu'))
# model.add(Dense(128, activation='relu'))
# model.add(Dense(100, activation='softmax'))
# #3. 컴파일, 훈련
# es = EarlyStopping(monitor='val_loss', patience=10, mode='auto', verbose=1)
# model.compile(loss='categorical_crossentropy', optimizer='adam',
# metrics=['acc'])
# start = time.time()
# hist = model.fit(x_train, y_train, epochs=100, batch_size=64,
# validation_split=0.25, callbacks=[es])
# 걸린시간 = round((time.time() - start) /60,1)
# #4. evaluating, prediction
# loss = model.evaluate(x_test, y_test, batch_size=128)
# print('loss = ', loss[0])
# print('accuracy = ', loss[1])
# ic(f'{걸린시간}분')
# import matplotlib.pyplot as plt
# plt.figure(figsize=(9,5))
# #1
# plt.subplot(2,1,1) # 그림을 2개그리는데 1행1렬
# plt.plot(hist.history['loss'], marker='.', c='red', label='loss')
# plt.plot(hist.history['val_loss'], marker='.', c='blue', label='val_loss')
# plt.grid()
# plt.title('loss')
# plt.ylabel('loss')
# plt.xlabel('epoch')
# plt.legend(loc='upper right')
# #2
# plt.subplot(2,1,2) # 그림을 2개그리는데 1행1렬
# plt.plot(hist.history['acc'])
# plt.plot(hist.history['val_acc'])
# plt.grid()
# plt.title('acc')
# plt.ylabel('acc')
# plt.xlabel('epoch')
# plt.legend(['acc', 'val_acc'])
# plt.show
# '''
# loss = 3.0406737327575684
# accuracy = 0.3928000032901764
# batch_size=64, validation_split=0.25
# loss = 5.080616474151611
# accuracy = 0.33799999952316284
# ic| f'{걸린시간}분': '3.5분'
# 모델수정 / patience=7,epochs=100, batch_size=64, validation_split=0.25
# loss = 2.777371406555176
# accuracy = 0.376800000667572
# '''
|
[
"[email protected]"
] | |
c97e9f32dd8b94b6bb3365179ef73965eccd8be5
|
bedae10cbaf676d8f309fa593028558d9a6e9c6b
|
/Algorithm/Easy/1000+/1206NextGreaterElementI.py
|
dfcffec1d8a09564dfb341ed4eb30870284fee73
|
[
"MIT"
] |
permissive
|
MartinYan623/Lint-Code
|
5800d61a54f87306c25ff2e3d535145312b42c66
|
57d2fa441d6496234615736e3f55d0b71aaa51dc
|
refs/heads/master
| 2021-06-06T13:51:19.587424 | 2021-04-21T12:23:19 | 2021-04-21T12:23:19 | 139,412,536 | 0 | 0 | null | 2020-08-08T10:28:52 | 2018-07-02T08:18:11 |
Python
|
UTF-8
|
Python
| false | false | 627 |
py
|
class Solution:
"""
@param nums1: an array
@param nums2: an array
@return: find all the next greater numbers for nums1's elements in the corresponding places of nums2
"""
def nextGreaterElement(self, nums1, nums2):
# Write your code here
for i in range(len(nums1)):
index=nums2.index(nums1[i])
flag=False
for j in range(index+1,len(nums2)):
if nums2[j]>nums1[i]:
flag=True
nums1[i]=nums2[j]
break
if flag==False:
nums1[i]=-1
return nums1
|
[
"[email protected]"
] | |
964de307289972354a1b551f7c32d12f000e98d4
|
95ec5d4d14516be1a1fdcc8bd1fb29279dfaff3c
|
/settings.py
|
513790faf5a80aaabdf246439d26eae875211e35
|
[] |
no_license
|
gitter-badger/dev4gov.org
|
16e25621a81552a6458cdd21cb96f17c7e222350
|
a03165e921d0e76ad4283c970b3e0540f7d53c75
|
refs/heads/master
| 2021-01-18T06:52:15.360799 | 2011-08-21T09:01:55 | 2011-08-21T09:01:55 | 41,620,266 | 0 | 0 | null | 2015-08-30T07:25:32 | 2015-08-30T07:25:32 | null |
UTF-8
|
Python
| false | false | 5,041 |
py
|
# Django settings for dev4gov_org project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'f4x@q6h+!nk6&=nf#ro5hh(p-%!ohxm_s70dyd7e@1@7@t)s3g'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'dev4gov_org.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
[
"[email protected]"
] | |
96dbfb206fea6616d529302a4dd2d8b79d04dcdb
|
4e8876d7b29cf9fb05849da77553b8a7e3783bdc
|
/src/plugins/processing/algs/gdal/contour.py
|
e25947294d997251bea2fcf065aa480f5e025270
|
[] |
no_license
|
hydrology-tep/hep-qgis-plugin-lite
|
48477f504b6fc1a9a9446c7c7f5666f4b2ccfee7
|
781cbaa1b3e9331de6741dd44a22322048ab176c
|
refs/heads/master
| 2021-03-27T17:01:18.284421 | 2018-06-27T12:09:58 | 2018-06-27T12:09:58 | 70,825,462 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,908 |
py
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
contour.py
---------------------
Date : September 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'September 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.core.parameters import ParameterRaster
from processing.core.parameters import ParameterNumber
from processing.core.parameters import ParameterString
from processing.core.outputs import OutputVector
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class contour(GdalAlgorithm):
INPUT_RASTER = 'INPUT_RASTER'
OUTPUT_VECTOR = 'OUTPUT_VECTOR'
INTERVAL = 'INTERVAL'
FIELD_NAME = 'FIELD_NAME'
EXTRA = 'EXTRA'
def getIcon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'contour.png'))
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Contour')
self.group, self.i18n_group = self.trAlgorithm('[GDAL] Extraction')
self.addParameter(ParameterRaster(self.INPUT_RASTER,
self.tr('Input layer'), False))
self.addParameter(ParameterNumber(self.INTERVAL,
self.tr('Interval between contour lines'), 0.0,
99999999.999999, 10.0))
self.addParameter(ParameterString(self.FIELD_NAME,
self.tr('Attribute name (if not set, no elevation attribute is attached)'),
'ELEV', optional=True))
self.addParameter(ParameterString(self.EXTRA,
self.tr('Additional creation parameters'), '', optional=True))
self.addOutput(OutputVector(self.OUTPUT_VECTOR,
self.tr('Contours')))
def getConsoleCommands(self):
output = self.getOutputValue(self.OUTPUT_VECTOR)
interval = unicode(self.getParameterValue(self.INTERVAL))
fieldName = unicode(self.getParameterValue(self.FIELD_NAME))
extra = self.getParameterValue(self.EXTRA)
if extra is not None:
extra = unicode(extra)
arguments = []
if len(fieldName) > 0:
arguments.append('-a')
arguments.append(fieldName)
arguments.append('-i')
arguments.append(interval)
driver = GdalUtils.getVectorDriverFromFileName(output)
arguments.append('-f')
arguments.append(driver)
if extra and len(extra) > 0:
arguments.append(extra)
arguments.append(self.getParameterValue(self.INPUT_RASTER))
arguments.append(output)
return ['gdal_contour', GdalUtils.escapeAndJoin(arguments)]
|
[
"[email protected]"
] | |
ab5d8fbd62d3448fb69cf6581a66121ca6459a25
|
459929ce79538ec69a6f8c32e608f4e484594d68
|
/venv/Lib/site-packages/virtualbox/__about__.py
|
600822f21eb32a6edbdfa087453d0b2e1ea10fc2
|
[
"Apache-2.0"
] |
permissive
|
yychai97/Kubernetes
|
ec2ef2a98a4588b7588a56b9d661d63222278d29
|
2955227ce81bc21f329729737b5c528b02492780
|
refs/heads/master
| 2023-07-02T18:36:41.382362 | 2021-08-13T04:20:27 | 2021-08-13T04:20:27 | 307,412,544 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 303 |
py
|
__title__ = "virtualbox"
__author__ = "Michael Dorman"
__author_email__ = "[email protected]"
__maintainer__ = "Seth Michael Larson"
__maintainer_email__ = "[email protected]"
__version__ = "2.1.1"
__license__ = "Apache-2.0"
__url__ = "https://github.com/sethmlarson/virtualbox-python"
|
[
"[email protected]"
] | |
4749bf6ccf6bd5a56d395c5462ac67cbfea6b435
|
7936ebf5b94c3d153fb55248b52db2eff724427c
|
/11/homework11/zhihu_top100.py
|
6fb64a0e8b94e74945b6a87d6f31271cd6307984
|
[
"MIT"
] |
permissive
|
xiaodongzi/pytohon_teach_material
|
f9e95f7b294a9e49d86d1a8e25cbef5efef3aaf7
|
13ed128a993637d0203f1f8c5419d781d7212883
|
refs/heads/master
| 2021-05-30T09:48:16.898483 | 2016-01-24T17:02:34 | 2016-01-24T17:02:34 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 977 |
py
|
# coding: utf-8
import requests
from pyquery import PyQuery as pq
question_num = 1
page_num = 1
to_stop = False
scrap_questions_num = 100
while True:
url = "http://www.zhihu.com/topic/19776749/top-answers?page=%d" % (page_num)
res = requests.get(url)
# print res.encoding
for p in pq(res.content).find('.feed-main'):
# print type(p)
print question_num, '. ' ,pq(p).find('.question_link').text()
relative_link = pq(p).find('.question_link').attr('href')
absolute_link = 'http://www.zhihu.com' + relative_link
print ' 链接 ', absolute_link
print ' vote: ', pq(p).find('.zm-item-vote-count').text()
print ' 回答摘要'
print ' ', pq(p).find('.zh-summary').text()[:-4]
print '-' * 60
print
if question_num == scrap_questions_num:
to_stop = True
break
question_num += 1
page_num += 1
if to_stop ==True:
break
|
[
"[email protected]"
] | |
0bf78b5a94b1e07dee662b8e341ee34aea435e03
|
54857571461a579bed30cee27871aaa5fe396bcc
|
/nltk-0.9.7/src/nltk/inference/inference.py
|
0b6d64c2a35e50e6cfaa2627aae6c30fe56517a5
|
[] |
no_license
|
ahmedBazaz/affective-text-classification
|
78375182e800b39e0e309e8b469e273c0d9590f0
|
719e9b26e60863c620662564fb9cfeafc004777f
|
refs/heads/master
| 2021-01-10T14:50:01.100274 | 2009-01-09T03:59:01 | 2009-01-09T03:59:01 | 48,296,612 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,176 |
py
|
# Natural Language Toolkit: Interface to Theorem Provers
#
# Author: Dan Garrette <[email protected]>
# Ewan Klein <[email protected]>
#
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
from nltk.sem import logic
import api
import tableau
import prover9
import mace
import resolution
"""
A wrapper module that calls theorem provers and model builders.
"""
def get_prover(goal=None, assumptions=None, prover_name=None):
"""
@param goal: Input expression to prove
@type goal: L{logic.Expression}
@param assumptions: Input expressions to use as assumptions in the proof
@type assumptions: L{list} of logic.Expression objects
"""
if not prover_name:
prover_name = 'Prover9'
if prover_name.lower() == 'tableau':
return api.BaseProverCommand(tableau.Tableau(), goal, assumptions)
elif prover_name.lower() == 'prover9':
return prover9.Prover9Command(goal, assumptions)
elif prover_name.lower() == 'resolution':
return resolution.ResolutionCommand(goal, assumptions)
raise Exception('\'%s\' is not a valid prover name' % prover_name)
def get_model_builder(goal=None, assumptions=None, model_builder_name=None):
"""
@param goal: Input expression to prove
@type goal: L{logic.Expression}
@param assumptions: Input expressions to use as assumptions in the proof
@type assumptions: L{list} of logic.Expression objects
"""
if not model_builder_name:
model_builder_name = 'Mace'
if model_builder_name.lower() == 'mace':
return mace.MaceCommand(goal, assumptions)
def get_parallel_prover_builder(goal=None, assumptions=None,
prover_name='', model_builder_name=''):
prover = get_prover(prover_name=prover_name)
model_builder = get_model_builder(model_builder_name=model_builder_name)
return api.ParallelProverBuilderCommand(prover.get_prover(),
model_builder.get_model_builder(),
goal, assumptions)
def demo():
lp = logic.LogicParser()
a = lp.parse(r'some x.(man(x) and walks(x))')
b = lp.parse(r'some x.(walks(x) and man(x))')
bicond = logic.IffExpression(a, b)
print "Trying to prove:\n '%s <-> %s'" % (a, b)
print 'tableau: %s' % get_prover(bicond, prover_name='tableau').prove()
print 'Prover9: %s' % get_prover(bicond, prover_name='Prover9').prove()
print '\n'
lp = logic.LogicParser()
a = lp.parse(r'all x.(man(x) -> mortal(x))')
b = lp.parse(r'man(socrates)')
c1 = lp.parse(r'mortal(socrates)')
c2 = lp.parse(r'-mortal(socrates)')
print get_prover(c1, [a,b], 'prover9').prove()
print get_prover(c2, [a,b], 'prover9').prove()
print get_model_builder(c1, [a,b], 'mace').build_model()
print get_model_builder(c2, [a,b], 'mace').build_model()
print get_parallel_prover_builder(c1, [a,b]).prove(True)
print get_parallel_prover_builder(c1, [a,b]).build_model(True)
if __name__ == '__main__':
demo()
|
[
"tytung@6129d76e-ddfe-11dd-a37d-c9d1c40e0883"
] |
tytung@6129d76e-ddfe-11dd-a37d-c9d1c40e0883
|
f36f09b4e05bbc16b9f5367879c5ca25aebf7d66
|
bd55c7d73a95caed5f47b0031264ec05fd6ff60a
|
/apps/qa/migrations/0006_coupon_vendor_branch.py
|
35b01a8e9fea5a66447f1b448de6613892793c36
|
[] |
no_license
|
phonehtetpaing/ebdjango
|
3c8610e2d96318aff3b1db89480b2f298ad91b57
|
1b77d7662ec2bce9a6377690082a656c8e46608c
|
refs/heads/main
| 2023-06-26T13:14:55.319687 | 2021-07-21T06:04:58 | 2021-07-21T06:04:58 | 381,564,118 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 597 |
py
|
# Generated by Django 2.0.5 on 2019-03-12 08:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0123_auto_20190312_1736'),
('qa', '0005_auto_20190312_1732'),
]
operations = [
migrations.AddField(
model_name='coupon',
name='vendor_branch',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='coupon_vendor_branch', to='core.VendorBranch', verbose_name='vendor_branch'),
),
]
|
[
"[email protected]"
] | |
5c15afa29895acb8165f67f96d1744092f542d33
|
ed269e9a4d9d6bfbb833381b7aef65a23f391fe2
|
/比赛/5479. 千位分隔数.py
|
f95b565d2780cc9f0cda6a36ec21c68410b1d997
|
[] |
no_license
|
Comyn-Echo/leeCode
|
fcff0d4c4c10209a47bd7c3204e3f64565674c91
|
67e9daecb7ffd8f7bcb2f120ad892498b1219327
|
refs/heads/master
| 2023-04-28T17:35:52.963069 | 2021-05-19T01:52:16 | 2021-05-19T01:52:16 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 758 |
py
|
class Solution(object):
def thousandSeparator(self, n):
"""
:type n: int
:rtype: str
"""
ans =""
res= n % 1000
n = n // 1000
ans = str(res) + ans
if n ==0:
return ans
if len(str(res)) == 2:
ans = "0" + ans
elif len(str(res)) ==1:
ans = "00" + ans
while n !=0:
res = n % 1000
n = n // 1000
ans = str(res) +"." + ans
if n == 0:
return ans
if len(str(res)) == 2:
ans = "0" + ans
elif len(str(res)) ==1:
ans = "00" + ans
return ans
ans = Solution.thousandSeparator(None, 7)
print(ans)
|
[
"[email protected]"
] | |
172a86f3c38e5011aa0bf1ac25cc91867d724c2f
|
9e5353ba6e50f77a40a765bd494d8bfb990c8922
|
/stream_backend/api/serializers.py
|
d5946df70464fd9e7b8cffcfad2c351823f30c86
|
[] |
no_license
|
admiralbolt/stream-stuff
|
d9e24f1d78ac142416525b9b42cc53ef0bc4712a
|
29cfa96f9e8d40c531362aced47ebacadccbe759
|
refs/heads/master
| 2023-08-05T00:02:17.812991 | 2021-09-23T05:47:16 | 2021-09-23T05:47:16 | 261,022,447 | 7 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,357 |
py
|
import json
from rest_framework import serializers
from api import models
class JsonSerializer(serializers.Field):
"""Custom serializer for json fields.
Internally json fields are represented as a string.
Externally it's json. What the fuck did you expect?
"""
def to_representation(self, value):
return json.loads(value) if value else []
def to_internal_value(self, data):
return json.dumps(data)
class CustomEmoteSerializer(serializers.ModelSerializer):
class Meta:
model = models.CustomEmote
fields = "__all__"
class KeyValueSerializer(serializers.ModelSerializer):
"""Serialize a key value pair.
In theory we could use a json serialized field here but I've found that just
doing the translation by hand works better.
"""
class Meta:
model = models.KeyValue
fields = "__all__"
class ScriptSerializer(serializers.ModelSerializer):
"""Serialize a script model."""
class Meta:
model = models.Script
fields = "__all__"
class SoundSerializer(serializers.ModelSerializer):
"""Serialize a sound model."""
class Meta:
model = models.Sound
fields = "__all__"
class TwitchClipSerializer(serializers.ModelSerializer):
"""Serialize dat boi."""
class Meta:
model = models.TwitchClip
fields = "__all__"
|
[
"[email protected]"
] | |
3794ad4e6c4c29f51277e6c3db63938934199c94
|
912b3b5321c7e26887af94cf2f97e4892c8c956a
|
/Day6/1_os模块.py
|
e6c1d55e65f6ce1f83dbdb50a2a2369a9e7f34ed
|
[] |
no_license
|
nmap1208/2016-python-oldboy
|
a3a614694aead518b86bcb75127e1ed2ef94604a
|
873820e30aeb834b6a95bae66259506955436097
|
refs/heads/master
| 2021-05-31T04:43:14.636250 | 2016-05-06T01:24:39 | 2016-05-06T01:24:46 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 293 |
py
|
# -*- coding:utf-8 -*-
import os
a = os.popen('ls')
print(type(a))
print(a.read())
b = os.system('ls')
print(b)
c = os.stat('1_os模块.py')
print(c)
print(os.path.isfile('.'))
print(os.path.isdir('.'))
print()
print(os.path.isfile('1_os模块.py'))
print(os.path.isdir('1_os模块.py'))
|
[
"[email protected]"
] | |
e01e2b05fabcddca2a5a6ff51953f8e148933344
|
34ddec647d6ad357c1527cf713eaeaee4eb575aa
|
/2020/24/part1.py
|
15437d944874572ab3349f6d824f88d3d20bf217
|
[
"Unlicense"
] |
permissive
|
cheshyre/advent-of-code
|
98327c564f6b401244778aaf9a16043000b4d85e
|
7ecb827745bd59e6ad249707bd976888006f935c
|
refs/heads/master
| 2022-12-21T15:53:38.789228 | 2022-12-20T20:07:28 | 2022-12-20T20:07:28 | 75,426,961 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 490 |
py
|
import os
import hex_grid
cur_dir = os.path.dirname(os.path.abspath(__file__))
tiles_count = {}
with open(f"{cur_dir}/input") as f:
for line in f:
instrs = hex_grid.parse_instructions(line)
point = (0, 0)
for i in instrs:
point = hex_grid.apply_instruction_to_point(point, i)
if point in tiles_count:
del tiles_count[point]
else:
tiles_count[point] = 1
print(f"There are {len(tiles_count)} black tiles.")
|
[
"[email protected]"
] | |
8ca1f76025a6c70f3e1501bb42a2497806635dcd
|
bb150497a05203a718fb3630941231be9e3b6a32
|
/framework/e2e/jit/test_TransformerDecoderLayer_base.py
|
a05bd02f5bfde173c39458efb57dde220590c836
|
[] |
no_license
|
PaddlePaddle/PaddleTest
|
4fb3dec677f0f13f7f1003fd30df748bf0b5940d
|
bd3790ce72a2a26611b5eda3901651b5a809348f
|
refs/heads/develop
| 2023-09-06T04:23:39.181903 | 2023-09-04T11:17:50 | 2023-09-04T11:17:50 | 383,138,186 | 42 | 312 | null | 2023-09-13T11:13:35 | 2021-07-05T12:44:59 |
Python
|
UTF-8
|
Python
| false | false | 668 |
py
|
#!/bin/env python
# -*- coding: utf-8 -*-
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
"""
test jit cases
"""
import os
import sys
sys.path.append(os.path.abspath(os.path.dirname(os.getcwd())))
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(os.getcwd())), "utils"))
from utils.yaml_loader import YamlLoader
from jittrans import JitTrans
yaml_path = os.path.join(os.path.abspath(os.path.dirname(os.getcwd())), "yaml", "nn.yml")
yml = YamlLoader(yaml_path)
def test_TransformerDecoderLayer_base():
"""test TransformerDecoderLayer_base"""
jit_case = JitTrans(case=yml.get_case_info("TransformerDecoderLayer_base"))
jit_case.jit_run()
|
[
"[email protected]"
] | |
9f96bd3e842b17ffff0232b9c3744b778aa03a07
|
971e0efcc68b8f7cfb1040c38008426f7bcf9d2e
|
/tests/artificial/transf_None/trend_MovingAverage/cycle_0/ar_/test_artificial_32_None_MovingAverage_0__20.py
|
4d34289521e410c56e4feef0b93dd14485083f72
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
antoinecarme/pyaf
|
a105d172c2e7544f8d580d75f28b751351dd83b6
|
b12db77cb3fa9292e774b2b33db8ce732647c35e
|
refs/heads/master
| 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 |
BSD-3-Clause
| 2023-03-08T21:45:40 | 2016-10-13T09:30:30 |
Python
|
UTF-8
|
Python
| false | false | 262 |
py
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 0, transform = "None", sigma = 0.0, exog_count = 20, ar_order = 0);
|
[
"[email protected]"
] | |
00054f224feac895bdeb59caf0cd9aa1a4ec7ba7
|
be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1
|
/Gauss_v45r8/Gen/DecFiles/options/13102401.py
|
51ba130c83268a3466ef39a7a7bdf749d0a89dca
|
[] |
no_license
|
Sally27/backup_cmtuser_full
|
34782102ed23c6335c48650a6eaa901137355d00
|
8924bebb935b96d438ce85b384cfc132d9af90f6
|
refs/heads/master
| 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,746 |
py
|
# file /home/hep/ss4314/cmtuser/Gauss_v45r8/Gen/DecFiles/options/13102401.py generated: Fri, 27 Mar 2015 15:48:14
#
# Event Type: 13102401
#
# ASCII decay Descriptor: [B_s0 -> rho+ K-]cc
#
from Configurables import Generation
Generation().EventType = 13102401
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bs_rho+K-=DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 531,-531 ]
# Ad-hoc particle gun code
from Configurables import ParticleGun
pgun = ParticleGun("ParticleGun")
pgun.SignalPdgCode = 531
pgun.DecayTool = "EvtGenDecay"
pgun.GenCutTool = "DaughtersInLHCb"
from Configurables import FlatNParticles
pgun.NumberOfParticlesTool = "FlatNParticles"
pgun.addTool( FlatNParticles , name = "FlatNParticles" )
from Configurables import MomentumSpectrum
pgun.ParticleGunTool = "MomentumSpectrum"
pgun.addTool( MomentumSpectrum , name = "MomentumSpectrum" )
pgun.MomentumSpectrum.PdgCodes = [ 531,-531 ]
pgun.MomentumSpectrum.InputFile = "$PGUNSDATAROOT/data/Ebeam4000GeV/MomentumSpectrum_531.root"
pgun.MomentumSpectrum.BinningVariables = "pteta"
pgun.MomentumSpectrum.HistogramPath = "h_pteta"
from Configurables import BeamSpotSmearVertex
pgun.addTool(BeamSpotSmearVertex, name="BeamSpotSmearVertex")
pgun.VertexSmearingTool = "BeamSpotSmearVertex"
pgun.EventType = 13102401
|
[
"[email protected]"
] | |
2a95a869e7d772ab128482d441931e4fa0c543aa
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/FGzWE8vNyxtTrw3Qg_9.py
|
5ad6edc044c49e09b4fc47b751fcc79350dfb72e
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 725 |
py
|
def get_nbrs(grid, r, c):
nbrs = [[r+dr, c+dc] for dr, dc in [[-1,0],[0,1],[1,0],[0,-1]]]
return [[nr, nc] for nr, nc in nbrs if 0<=nr<len(grid) and 0<=nc<len(grid[0]) and grid[nr][nc]==1]
def is_region(grid, r, c):
if grid[r][c] != 1: return False
# set all interconnected cells in region to 0
# using backtracking to cells with multiple neighbours
stack = []
while True:
grid[r][c] = 0
nbrs = get_nbrs(grid, r, c)
if not nbrs:
if not stack: break
r, c = stack.pop()
else:
if len(nbrs) > 1: stack.append([r, c])
r, c = nbrs[0]
return True
def num_regions(grid):
return sum(1 for r in range(len(grid)) for c in range(len(grid[0])) if is_region(grid, r, c))
|
[
"[email protected]"
] | |
1af7478a5ccc39c7e8958468814792161a1bd6df
|
70c3cf5f0c58b0074b33f653500604b5f4f7e198
|
/rm_scraping/scrape.py
|
4de5cfa3e045ca42c9b60e3faf2c82cac2d44c8e
|
[] |
no_license
|
colinmorris/wiki-controversial-titles
|
659a7264c7fe652b696e20414acbd74a4cb1b3f6
|
b089c08655527e10624ecd912a0058fd1f150778
|
refs/heads/master
| 2020-06-03T05:04:07.017575 | 2019-07-27T22:32:34 | 2019-07-27T22:32:34 | 191,450,697 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,391 |
py
|
import csv
import os
import mwclient
import argparse
import pandas as pd
from RM import RM
from constants import *
FLUSH_EVERY = 50
LIMIT = 0
NEXT_ID = 0
def scrape_rms_for_title(title, f_fail, debug=0):
global NEXT_ID
pg = wiki.pages[title]
section_ix = 1
while 1:
try:
section = pg.text(section=section_ix)
except KeyError:
break
if RM.section_is_rm(section):
try:
yield RM(section, title, debug=debug, id=NEXT_ID)
except Exception as e:
row = '{}\t{}\n'.format(title, section_ix)
f_fail.write(row)
print('Exception:', e)
else:
NEXT_ID += 1
section_ix += 1
def flush_rms(rms, rm_w, votes_w, pols_w):
rm_w.writerows(rm.row for rm in rms)
vote_rows = []
pol_rows = []
for rm in rms:
for vote in rm.votes:
vote['rm_id'] = rm.id
vote_rows.extend(rm.votes)
for user, counts in rm.user_to_policies.items():
for pol, n in counts.items():
row = dict(user=user, pol=pol, n=n, rm_id=rm.id)
pol_rows.append(row)
votes_w.writerows(vote_rows)
pols_w.writerows(pol_rows)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--clobber', action='store_true', help='Overwrite existing csv files')
parser.add_argument('-r', '--title-re',
help='Regex to add as an intitle filter to search query')
parser.add_argument('--invert-titlematch', action='store_true',
help='Invert the intitle filter')
args = parser.parse_args()
if args.clobber:
fresh = True
else:
try:
st = os.stat('rms.csv')
except FileNotFoundError:
fresh = True
else:
fresh = st.st_size == 0
extant_pages = set()
if not fresh:
df = pd.read_csv('rms.csv')
NEXT_ID = df['id'].max() + 1
print("Found existing files. Appending. Ids starting at {}".format(NEXT_ID))
extant_pages = set(df['talkpage'].values)
oflag = 'w' if fresh else 'a'
frm = open('rms.csv', oflag)
fvotes = open('votes.csv', oflag)
fpols = open('pols.csv', oflag)
out_rm = csv.DictWriter(frm, RM.COLS)
out_votes = csv.DictWriter(fvotes, RM.VOTE_COLS)
out_pols = csv.DictWriter(fpols, RM.POL_COLS)
writers = [out_rm, out_votes, out_pols]
if fresh:
for wr in writers:
wr.writeheader()
wiki = mwclient.Site(('https', 'en.wikipedia.org'))
query = 'insource:/"{}"/'.format(RMTOP)
if args.title_re:
query += ' {}intitle:/{}/'.format(
('-' if args.invert_titlematch else ''),
args.title_re
)
results = wiki.search(query, namespace=1)
rms = []
failures = []
f_fail = open('failures.tsv', oflag)
i_pg = 0
i_rm = 0
skipped = 0
for result in results:
# Don't rescrape pages we've already done.
if result['title'] in extant_pages:
skipped += 1
continue
for rm in scrape_rms_for_title(result['title'], f_fail):
rms.append(rm)
i_rm += 1
if len(rms) >= FLUSH_EVERY:
flush_rms(rms, out_rm, out_votes, out_pols)
rms = []
if LIMIT and i_rm >= LIMIT:
print("Reached limit. rms={}. Stopping".format(i_rm))
break
i_pg += 1
if i_pg % 100 == 0:
print("i_pg = {}; skipped = {}".format(i_pg, skipped))
if rms:
flush_rms(rms, out_rm, out_votes, out_pols)
for f in [frm, fvotes, fpols, f_fail]:
f.close()
print("Skipped {} pages".format(skipped))
|
[
"[email protected]"
] | |
d6cbdb0585782c2794ba7450f08232c03959e33d
|
eefb06b0d8c8c98c1e9cfc4c3852d5c453eb5429
|
/data/input/alan-hicks/django-dmarc/dmarc/views.py
|
9a3cac700e01af8df8f7ac8922d8369c5b52f135
|
[] |
no_license
|
bopopescu/pythonanalyzer
|
db839453bde13bf9157b76e54735f11c2262593a
|
8390a0139137574ab237b3ff5fe8ea61e8a0b76b
|
refs/heads/master
| 2022-11-22T02:13:52.949119 | 2019-05-07T18:42:52 | 2019-05-07T18:42:52 | 282,079,884 | 0 | 0 | null | 2020-07-23T23:46:09 | 2020-07-23T23:46:08 | null |
UTF-8
|
Python
| false | false | 1,280 |
py
|
#----------------------------------------------------------------------
# Copyright (c) 2015, Persistent Objects Ltd http://p-o.co.uk/
#
# License: BSD
#----------------------------------------------------------------------
"""
DMARC views
http://dmarc.org/resources/specification/
"""
from django.contrib.admin.views.decorators import staff_member_required
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.shortcuts import render
from dmarc.models import Report
# Create your views here.
@staff_member_required
def dmarc_report(request):
report_list = Report.objects.select_related(
'reporter',
).prefetch_related(
'records__results'
).order_by('-date_begin', 'reporter__org_name').all()
paginator = Paginator(report_list, 2)
page = request.GET.get('page')
try:
reports = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
reports = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
reports = paginator.page(paginator.num_pages)
context = {
"reports": reports,
}
return render(request, 'dmarc/report.html', context)
|
[
"[email protected]"
] | |
0e58b2eb4476360bd160080cb9a03e7fcad7a6e2
|
490ffe1023a601760ae7288e86723f0c6e366bba
|
/kolla-docker/zun-ui/zun_ui/enabled/_2340_admin_container_providervms_panel.py
|
4812c633141e50e75d7f5283c994e5efb453fe51
|
[
"Apache-2.0"
] |
permissive
|
bopopescu/Cloud-User-Management
|
89696a5ea5d2f95191327fbeab6c3e400bbfb2b8
|
390988bf4915a276c7bf8d96b62c3051c17d9e6e
|
refs/heads/master
| 2022-11-19T10:09:36.662906 | 2018-11-07T20:28:31 | 2018-11-07T20:28:31 | 281,786,345 | 0 | 0 | null | 2020-07-22T21:26:07 | 2020-07-22T21:26:06 | null |
UTF-8
|
Python
| false | false | 964 |
py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# The slug of the panel to be added to HORIZON_CONFIG. Required.
PANEL = 'container.providervms'
# The slug of the panel group the PANEL is associated with.
PANEL_GROUP = 'container'
# The slug of the dashboard the PANEL associated with. Required.
PANEL_DASHBOARD = 'admin'
# Python panel class of the PANEL to be added.
ADD_PANEL = 'zun_ui.content.container.providervms.panel.Providervms'
|
[
"[email protected]"
] | |
6133de21acc69badb689577af432bce59a5def07
|
14cef240063145bba81d7ac4bd25ed671585527c
|
/core/database/crud/bottify_user.py
|
4433dceb8bad52c68591d531e46bc649e45080ee
|
[] |
no_license
|
Kroonjay/Bottify
|
f139d3cf6753c36b85ec061888a88c9f82dfd715
|
c30c9cf924d19d053b0f678eb9d69143398ea83a
|
refs/heads/main
| 2023-07-30T02:10:08.878698 | 2021-09-29T16:30:35 | 2021-09-29T16:30:35 | 411,117,108 | 0 | 3 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,716 |
py
|
import logging
from databases import Database
from uuid import UUID
from core.security.password import get_password_hash
from core.models.user import BottifyUserInModel, BottifyUserModel
from core.database.tables.bottify_user import get_bottify_user_table
from core.database.helpers import build_model_from_row
user_table = get_bottify_user_table()
async def read_user_by_id(database: Database, user_id: int):
query = user_table.select().where(user_table.c.id == user_id).limit(1)
row = await database.fetch_one(query)
return build_model_from_row(row, BottifyUserModel)
async def read_user_by_guid(database: Database, guid_in: UUID):
if isinstance(guid_in, UUID):
user_guid = guid_in
elif isinstance(guid_in, str):
try:
user_guid = UUID(guid_in)
except ValueError as e:
logging.error(f"Read User by Guid:Failed to Parse UUID from String")
return None
else:
logging.error(
f"Read User by Guid:User GUID must be either UUID or String:Got: {type(guid_in)}"
)
return None
query = user_table.select().where(user_table.c.guid == user_guid).limit(1)
row = await database.fetch_one(query)
return build_model_from_row(row, BottifyUserModel)
async def read_user_by_username(database: Database, username: str):
if not isinstance(username, str):
logging.error(
f"Read User by Username:Username Must be type String:Got: {type(username)}"
)
query = user_table.select().where(user_table.c.username == username).limit(1)
row = await database.fetch_one(query)
return build_model_from_row(row, BottifyUserModel)
async def create_user(database: Database, user_in: BottifyUserInModel):
query = user_table.insert()
hashed_password = get_password_hash(user_in.password)
success = False
if not hashed_password:
logging.error(
f"Create User Error:Failed to Hash Password:User Data: {user_in.json()}"
)
return success
user_data = user_in.dict(exclude={"password"})
user_data.update({"hashed_password": hashed_password})
await database.execute(query, values=user_data)
success = True
return success
async def read_users(database: Database, limit: int):
if not isinstance(limit, int):
logging.error(
f"Read Users Error:Limit Param Must be an Integer:Got: {type(limit)}"
)
query = user_table.select().limit(limit)
users = []
async for row in database.iterate(query):
users.append(build_model_from_row(row, BottifyUserModel))
if not users:
logging.error(f"Read Users Error:Failed to Read Any Users")
return users
|
[
"[email protected]"
] | |
6ec3308ca74aee29ace51e8fb3b39a143120e86f
|
29eacf3b29753d65d8ec0ab4a60ea1f7ddecbd68
|
/tests/api_workflow/test_api_workflow_selection.py
|
73147fd03b6bbd08ffaf12ded248a6f812a0cb81
|
[
"MIT"
] |
permissive
|
lightly-ai/lightly
|
5b655fe283b7cc2ddf1d7f5bd098603fc1cce627
|
5650ee8d4057139acf8aa10c884d5d5cdc2ccb17
|
refs/heads/master
| 2023-08-17T11:08:00.135920 | 2023-08-16T12:43:02 | 2023-08-16T12:43:02 | 303,705,119 | 2,473 | 229 |
MIT
| 2023-09-14T14:47:16 | 2020-10-13T13:02:56 |
Python
|
UTF-8
|
Python
| false | false | 7,897 |
py
|
from typing import List
import pytest
from pytest_mock import MockerFixture
from lightly.active_learning.config.selection_config import SelectionConfig
from lightly.api import ApiWorkflowClient, api_workflow_selection
from lightly.openapi_generated.swagger_client.models import (
JobResultType,
JobState,
JobStatusData,
JobStatusDataResult,
SamplingCreateRequest,
SamplingMethod,
TagData,
)
from tests.api_workflow import utils
def _get_tags(dataset_id: str, tag_name: str = "just-a-tag") -> List[TagData]:
return [
TagData(
id=utils.generate_id(),
dataset_id=dataset_id,
prev_tag_id=None,
bit_mask_data="0x1",
name=tag_name,
tot_size=4,
created_at=1577836800,
changes=[],
)
]
def _get_sampling_create_request(tag_name: str = "new-tag") -> SamplingCreateRequest:
return SamplingCreateRequest(
new_tag_name=tag_name,
method=SamplingMethod.RANDOM,
config={},
)
def test_selection__tag_exists(mocker: MockerFixture) -> None:
tag_name = "some-tag"
mocker.patch.object(ApiWorkflowClient, "__init__", return_value=None)
mocker.patch.object(
ApiWorkflowClient,
"get_all_tags",
return_value=_get_tags(dataset_id=utils.generate_id(), tag_name=tag_name),
)
client = ApiWorkflowClient()
with pytest.raises(RuntimeError) as exception:
client.selection(selection_config=SelectionConfig(name=tag_name))
assert (
str(exception.value) == "There already exists a tag with tag_name some-tag"
)
def test_selection__no_tags(mocker: MockerFixture) -> None:
mocker.patch.object(ApiWorkflowClient, "__init__", return_value=None)
mocker.patch.object(ApiWorkflowClient, "get_all_tags", return_value=[])
client = ApiWorkflowClient()
with pytest.raises(RuntimeError) as exception:
client.selection(selection_config=SelectionConfig(name="some-tag"))
assert str(exception.value) == "There exists no initial-tag for this dataset."
def test_selection(mocker: MockerFixture) -> None:
tag_name = "some-tag"
dataset_id = utils.generate_id()
mocker.patch("time.sleep")
mocker.patch.object(ApiWorkflowClient, "__init__", return_value=None)
mocker.patch.object(
ApiWorkflowClient, "get_all_tags", return_value=_get_tags(dataset_id=dataset_id)
)
mocker.patch.object(
ApiWorkflowClient,
"_create_selection_create_request",
return_value=_get_sampling_create_request(),
)
mocked_selection_api = mocker.MagicMock()
mocked_sampling_response = mocker.MagicMock()
mocked_sampling_response.job_id = utils.generate_id()
mocked_selection_api.trigger_sampling_by_id.return_value = mocked_sampling_response
mocked_jobs_api = mocker.MagicMock()
mocked_get_job_status = mocker.MagicMock(
return_value=JobStatusData(
id=utils.generate_id(),
wait_time_till_next_poll=1,
created_at=0,
status=JobState.FINISHED,
result=JobStatusDataResult(type=JobResultType.SAMPLING, data="new-tag-id"),
)
)
mocked_jobs_api.get_job_status_by_id = mocked_get_job_status
mocked_tags_api = mocker.MagicMock()
client = ApiWorkflowClient()
client._selection_api = mocked_selection_api
client._jobs_api = mocked_jobs_api
client._tags_api = mocked_tags_api
client._dataset_id = dataset_id
client.embedding_id = "embedding-id"
client.selection(selection_config=SelectionConfig(name=tag_name))
mocked_get_job_status.assert_called_once()
mocked_tags_api.get_tag_by_tag_id.assert_called_once_with(
dataset_id=dataset_id, tag_id="new-tag-id"
)
def test_selection__job_failed(mocker: MockerFixture) -> None:
dataset_id = utils.generate_id()
job_id = "some-job-id"
mocker.patch("time.sleep")
mocker.patch.object(ApiWorkflowClient, "__init__", return_value=None)
mocker.patch.object(
ApiWorkflowClient, "get_all_tags", return_value=_get_tags(dataset_id=dataset_id)
)
mocker.patch.object(
ApiWorkflowClient,
"_create_selection_create_request",
return_value=_get_sampling_create_request(),
)
mocked_selection_api = mocker.MagicMock()
mocked_sampling_response = mocker.MagicMock()
mocked_sampling_response.job_id = job_id
mocked_selection_api.trigger_sampling_by_id.return_value = mocked_sampling_response
mocked_jobs_api = mocker.MagicMock()
mocked_get_job_status = mocker.MagicMock(
return_value=JobStatusData(
id=utils.generate_id(),
wait_time_till_next_poll=1,
created_at=0,
status=JobState.FAILED,
error="bad job",
)
)
mocked_jobs_api.get_job_status_by_id = mocked_get_job_status
client = ApiWorkflowClient()
client._selection_api = mocked_selection_api
client._jobs_api = mocked_jobs_api
client._dataset_id = dataset_id
client.embedding_id = "embedding-id"
with pytest.raises(RuntimeError) as exception:
client.selection(selection_config=SelectionConfig(name="some-tag"))
assert str(exception.value) == (
"Selection job with job_id some-job-id failed with error bad job"
)
def test_selection__too_many_errors(mocker: MockerFixture) -> None:
dataset_id = utils.generate_id()
job_id = "some-job-id"
mocker.patch("time.sleep")
mocked_print = mocker.patch("builtins.print")
mocker.patch.object(ApiWorkflowClient, "__init__", return_value=None)
mocker.patch.object(
ApiWorkflowClient, "get_all_tags", return_value=_get_tags(dataset_id=dataset_id)
)
mocker.patch.object(
ApiWorkflowClient,
"_create_selection_create_request",
return_value=_get_sampling_create_request(),
)
mocked_selection_api = mocker.MagicMock()
mocked_sampling_response = mocker.MagicMock()
mocked_sampling_response.job_id = job_id
mocked_selection_api.trigger_sampling_by_id.return_value = mocked_sampling_response
mocked_jobs_api = mocker.MagicMock()
mocked_get_job_status = mocker.MagicMock(
side_effect=[Exception("surprise!") for _ in range(20)]
)
mocked_jobs_api.get_job_status_by_id = mocked_get_job_status
client = ApiWorkflowClient()
client._selection_api = mocked_selection_api
client._jobs_api = mocked_jobs_api
client._dataset_id = dataset_id
client.embedding_id = "embedding-id"
with pytest.raises(Exception) as exception:
client.selection(selection_config=SelectionConfig(name="some-tag"))
assert str(exception.value) == "surprise!"
mocked_print.assert_called_once_with(
"Selection job with job_id some-job-id could not be started "
"because of error: surprise!"
)
def test_upload_scores(mocker: MockerFixture) -> None:
dataset_id = utils.generate_id()
tags = _get_tags(dataset_id=dataset_id, tag_name="initial-tag")
tag_id = tags[0].id
mocker.patch.object(ApiWorkflowClient, "__init__", return_value=None)
mocker.patch.object(
ApiWorkflowClient,
"get_all_tags",
return_value=tags,
)
mocker.patch.object(
api_workflow_selection, "_parse_active_learning_scores", return_value=[1]
)
mocked_api = mocker.MagicMock()
mocked_create_score = mocked_api.create_or_update_active_learning_score_by_tag_id
client = ApiWorkflowClient()
client._scores_api = mocked_api
client._dataset_id = dataset_id
mocked_create_score.reset_mock()
client.upload_scores(al_scores={"score_type": [1, 2, 3]}, query_tag_id=tag_id)
mocked_create_score.assert_called_once()
kwargs = mocked_create_score.call_args[1]
assert kwargs.get("tag_id") == tag_id
|
[
"[email protected]"
] | |
8fe975eac45d0cbc7088a107247e236f4fea121b
|
79a836022275b94b687325ae36980cafe6d66788
|
/setup.py
|
18eba6b9644a02f8b6a1d99711326daac0f2de62
|
[] |
no_license
|
reminder-bot/start
|
1194adede56c46b587e27b003c0c401ceb7b9056
|
33c613d5a9c168635ad221d864e25d27c726ae5a
|
refs/heads/master
| 2020-03-21T23:34:21.716780 | 2018-08-30T20:13:14 | 2018-08-30T20:13:14 | 139,195,178 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,035 |
py
|
import os
import configparser
try:
os.mkdir('../DATA')
except FileExistsError:
pass
files = ['todos']
contents = ['{}']
for fn, content in zip(files, contents):
if fn + '.json' in os.listdir('../DATA/'):
continue
f = open('../DATA/' + fn + '.json', 'w')
f.write(content)
f.close()
if 'config.ini' not in os.listdir('..'):
config = configparser.ConfigParser()
config['DEFAULT'] = {
'token' : 'token',
'dbl_token' : 'discordbotslist token',
'patreon_server' : 'serverid',
'patreon_enabled' : 'yes',
'strings_location' : './languages/'
}
config['WEB'] = {
'DISCORD_OAUTH_CLIENT_ID' : 'id',
'DISCORD_OAUTH_CLIENT_SECRET' : 'secretkey',
'SECRET' : 'secretkey'
}
config['MYSQL'] = {
'user' : 'username',
'passwd' : 'password',
'host' : 'localhost',
'database' : 'reminders'
'database_sfx' : 'soundfx'
}
with open('../config.ini', 'w') as f:
config.write(f)
|
[
"[email protected]"
] | |
d7420989469dab17d9f1146e6f856d16c343fb1e
|
054eefaa17157b32869ea986347b3e539d2bf06b
|
/big_o_coding/Green_06/midterm1.py
|
bbd16356c363c790e981dd3ec7c049cf0c48699b
|
[] |
no_license
|
baocogn/self-learning
|
f2cb2f45f05575b6d195fc3c407daf4edcfe7d0e
|
f50a3946966354c793cac6b28d09cb5dba2ec57a
|
refs/heads/master
| 2021-07-12T23:32:14.728163 | 2019-02-10T14:24:46 | 2019-02-10T14:24:46 | 143,170,276 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 296 |
py
|
n = int(input())
result = 1
if n == 1:
result = 1 * 15000
elif 2 <= n <= 5:
result = 1 * 15000 + (n - 1) * 13500
elif 6 <= n <= 11:
result = 1 * 15000 + 4 * 13500 + (n - 5) * 11000
elif n >= 12:
result = int((1 * 15000 + 4 * 13500 + (n - 5) * 11000) * (1 - 0.1))
print(result )
|
[
"[email protected]"
] | |
addce9e9601f6db6495755d3b9f0ef59ec7bae2b
|
ac6a1789722de5e37be54b39b964beef005d111d
|
/rest_registration/utils/validation.py
|
d23aac4f20409a39eba749fbb9ac6225315dc012
|
[
"MIT"
] |
permissive
|
sunbeamer/django-rest-registration
|
cd194ccf152c62802ca6f7d7a048764da8aadf8a
|
dd25b84d0151630659da4c2c17ed48d26238e006
|
refs/heads/master
| 2023-03-29T10:39:06.225559 | 2021-03-25T23:21:46 | 2021-03-25T23:21:46 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,971 |
py
|
import functools
from collections import OrderedDict
from collections.abc import Mapping
from typing import Any, Callable, Dict, Iterable, List
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError as DjangoValidationError
from django.utils.translation import gettext as _
from rest_framework.exceptions import ErrorDetail, ValidationError
from rest_framework.settings import api_settings
from rest_registration.utils.users import (
build_initial_user,
get_user_by_verification_id
)
Validator = Callable[[Any], None]
def wrap_validation_error_with_field(field_name: str):
def decorator(func: Validator):
@functools.wraps(func)
def wrapper(value: Any) -> None:
try:
func(value)
except ValidationError as exc:
raise ValidationError({field_name: exc.detail}) from None
return wrapper
return decorator
@wrap_validation_error_with_field('password_confirm')
def validate_user_password_confirm(user_data: Dict[str, Any]) -> None:
if user_data['password'] != user_data['password_confirm']:
raise ValidationError(ErrorDetail(
_("Passwords don't match"),
code='passwords-do-not-match'),
)
@wrap_validation_error_with_field('password')
def validate_user_password(user_data: Dict[str, Any]) -> None:
password = user_data['password']
user = build_initial_user(user_data)
return _validate_user_password(password, user)
@wrap_validation_error_with_field('password')
def validate_password_with_user_id(user_data: Dict[str, Any]) -> None:
password = user_data['password']
user_id = user_data['user_id']
user = get_user_by_verification_id(user_id, require_verified=False)
return _validate_user_password(password, user)
def _validate_user_password(password, user) -> None:
try:
validate_password(password, user=user)
except DjangoValidationError as exc:
raise ValidationError(list(exc.messages)) from None
def run_validators(validators: Iterable[Validator], value: Any) -> None:
fields_errors = OrderedDict() # type: Dict[str, Any]
non_field_errors = [] # type: List[Any]
for validator in validators:
try:
validator(value)
except ValidationError as exc:
if isinstance(exc.detail, Mapping):
for field_name, field_errors in exc.detail.items():
fields_errors.setdefault(field_name, []).extend(
field_errors)
elif isinstance(exc.detail, list):
non_field_errors.extend(exc.detail)
if fields_errors:
errors = {}
errors.update(fields_errors)
errors.setdefault(
api_settings.NON_FIELD_ERRORS_KEY, []).extend(non_field_errors)
raise ValidationError(errors)
if non_field_errors:
raise ValidationError(non_field_errors)
|
[
"[email protected]"
] | |
c8275c0263fa17dd5c699419bd33e02aa94828bc
|
384813261c9e8d9ee03e141ba7270c48592064e9
|
/new_project/fastsklearnfeature/interactiveAutoML/new_bench/multiobjective/metalearning/openml_data/private_models/randomforest/sam_node.py
|
8fbb83881a619090a269f8cb2979875d31f3c78e
|
[] |
no_license
|
pratyushagnihotri/DFS
|
b99d87c085e67888b81c19629c338dae92272a3b
|
3b60e574905e93c24a2b883cc251ecc286cb2263
|
refs/heads/master
| 2023-04-18T22:17:36.816581 | 2021-04-20T13:41:29 | 2021-04-20T13:41:29 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,128 |
py
|
''' A class defining the nodes in our Differentially Private Random Decision Forest '''
from collections import defaultdict
import random
import numpy as np
import math
from scipy import stats # for Exponential Mechanism
class node:
def __init__(self, parent_node, split_value_from_parent, splitting_attribute, tree_level, id, children,
svfp_numer=None):
self._parent_node = parent_node
self._split_value_from_parent = split_value_from_parent
self._svfp_numer = svfp_numer
self._splitting_attribute = splitting_attribute
# self._level = tree_level # comment out unless needed. saves memory.
# self._id = id # comment out unless needed. saves memory.
self._children = children
self._class_counts = defaultdict(int)
self._noisy_majority = None
self._empty = 0 # 1 if leaf and has no records
self._sensitivity = -1.0
def add_child(self, child_node):
self._children.append(child_node)
def increment_class_count(self, class_value):
self._class_counts[class_value] += 1
def set_noisy_majority(self, epsilon, class_values):
if not self._noisy_majority and not self._children: # to make sure this code is only run once per leaf
for val in class_values:
if val not in self._class_counts: self._class_counts[val] = 0
if max([v for k, v in self._class_counts.items()]) < 1:
self._empty = 1
self._noisy_majority = random.choice([k for k, v in self._class_counts.items()])
return 0 # we dont want to count purely random flips
else:
all_counts = sorted([v for k, v in self._class_counts.items()], reverse=True)
count_difference = all_counts[0] - all_counts[1]
self._sensitivity = math.exp(-1 * count_difference * epsilon)
self._sens_of_sens = 1.
self._noisy_sensitivity = 1.
self._noisy_majority = self.expo_mech(epsilon, self._sensitivity, self._class_counts)
if self._noisy_majority != int(
max(self._class_counts.keys(), key=(lambda key: self._class_counts[key]))):
# print('majority: '+str(self._noisy_majority)+' vs. max_count: '+str( max(self._class_counts.keys(), key=(lambda key: self._class_counts[key]))))
return 1 # we're summing the flipped majorities
else:
return 0
else:
return 0
def laplace(self, e, counts):
noisy_counts = {}
for label, count in counts.items():
noisy_counts[label] = max(0, int(count + np.random.laplace(scale=float(1. / e))))
return int(max(noisy_counts.keys(), key=(lambda key: noisy_counts[key])))
def expo_mech(self, e, s, counts):
''' For this implementation of the Exponetial Mechanism, we use a piecewise linear scoring function,
where the element with the maximum count has a score of 1, and all other elements have a score of 0. '''
weighted = []
max_count = max([v for k, v in counts.items()])
for label, count in counts.items():
''' if the score is non-monotonic, s needs to be multiplied by 2 '''
if count == max_count:
if s < 1.0e-10:
power = 50 # e^50 is already astronomical. sizes beyond that dont matter
else:
power = min(50, (e * 1) / (2 * s)) # score = 1
else:
power = 0 # score = 0
weighted.append([label, math.exp(power)])
sum = 0.
for label, count in weighted:
sum += count
for i in range(len(weighted)):
weighted[i][1] /= sum
customDist = stats.rv_discrete(name='customDist',
values=([lab for lab, cou in weighted], [cou for lab, cou in weighted]))
best = customDist.rvs()
# print("best_att examples = "+str(customDist.rvs(size=20)))
return int(best)
|
[
"[email protected]"
] | |
44ed7aab029125950b6f5f506929e89f4de0dcdf
|
6968c7f9d2b20b5296663829f99a27d184a59fc1
|
/autodisc/autodisc/gui/jupyter/imagelistwidget.py
|
b6152e78a221de869f622b5b6696836db4ed377e
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
flowersteam/automated_discovery_of_lenia_patterns
|
d42dff37323d51732571b33845c0562d844f498f
|
97cc7cde2120fa95225d1e470e00b8aa8c034e97
|
refs/heads/master
| 2020-06-29T07:08:58.404541 | 2020-05-14T07:37:10 | 2020-05-14T07:37:10 | 200,470,902 | 13 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,855 |
py
|
import autodisc as ad
import ipywidgets
import numpy as np
import IPython.display
class ImageListWidget(ipywidgets.VBox):
@staticmethod
def get_default_gui_config():
default_config = ad.Config()
default_config.elements_per_page = 100
default_config.output_layout = ad.Config()
# default_config.output_layout.border='3px solid black'
default_config.box_layout = ad.Config()
default_config.box_layout.overflow_y = 'scroll'
default_config.box_layout.width = '100%'
default_config.box_layout.height = '500px'
default_config.box_layout.flex_flow = 'row wrap'
default_config.box_layout.display = 'flex'
default_config.content_ouput = ad.Config()
default_config.page_label = ad.Config()
default_config.page_selection = ad.Config()
default_config.page_selection.description = 'Page: '
default_config.previous_page_button = ad.Config()
default_config.previous_page_button.description = '<'
default_config.previous_page_button.layout = ad.Config()
default_config.previous_page_button.layout.width = '20px'
default_config.next_page_button = ad.Config()
default_config.next_page_button.description = '>'
default_config.next_page_button.layout = ad.Config()
default_config.next_page_button.layout.width = '20px'
default_config.button_box = ad.Config()
default_config.button_box.layout = ad.Config()
default_config.button_box.layout.flex_flow = 'row'
default_config.button_box.layout.display = 'flex'
default_config.button_box.layout.align_items = 'center'
default_config.button_box.layout['justify-content'] = 'flex-end'
default_config.button_box.layout.width = '100%'
default_config.image_items = ad.Config()
default_config.image_items.layout = ad.Config()
default_config.image_items.layout.height = '200px'
default_config.image_items.layout.width = '200px'
default_config.image_items.layout.border = '2px solid white'
default_config.image_captions = ad.Config()
return default_config
def __init__(self, images=None, config=None, **kwargs):
self.config = ad.config.set_default_config(kwargs, config, ImageListWidget.get_default_gui_config())
self.images = None
self.main_box = None
self.content_ouput_widget = ipywidgets.Output(**self.config.content_ouput)
self.page_label_widget = ipywidgets.Label(**self.config.page_label, value='of 0')
self.previous_page_button_widget = ipywidgets.Button(**self.config.previous_page_button)
self.previous_page_button_widget.on_click(self.on_prev_page_button_click)
self.page_selection_widget = ipywidgets.Dropdown(**self.config.page_selection)
self.page_selection_widget.observe(self.on_page_selection_change)
self.next_page_button_widget = ipywidgets.Button(**self.config.next_page_button)
self.next_page_button_widget.on_click(self.on_next_page_button_click)
self.page_selection_widget_ignore_next_value_change = False
self.button_box_widget = ipywidgets.Box(
[self.page_selection_widget,
self.page_label_widget,
self.previous_page_button_widget,
self.next_page_button_widget],
**self.config.button_box
)
super().__init__([self.content_ouput_widget, self.button_box_widget], layout=self.config.output_layout)
self.cur_page_idx = 0
if images is not None:
self.update(images)
def update(self, images, captions=None):
self.images = images
self.captions = captions
if self.images is not None and self.images:
# update page selection widget
n_pages = int(np.ceil(len(self.images) / self.config.elements_per_page))
opts = [page_idx + 1 for page_idx in range(n_pages)]
self.page_selection_widget.options = opts
# update number of pages
self.page_label_widget.value = 'of {}'.format(n_pages)
self.update_page_items(0, force_update=True)
else:
self.page_selection_widget.options = []
self.page_label_widget.value = 'of 0'
self.content_ouput_widget.clear_output()
def update_page_items(self, page_idx, force_update=False):
if self.images is not None and self.images:
n_pages = int(np.ceil(len(self.images) / self.config.elements_per_page))
if n_pages == 0:
self.content_ouput_widget.clear_output()
elif page_idx >= 0 and page_idx < n_pages and (self.cur_page_idx != page_idx or force_update):
items = []
self.cur_page_idx = page_idx
start_idx = self.config.elements_per_page * self.cur_page_idx
end_idx = min(self.config.elements_per_page * (self.cur_page_idx + 1), len(self.images))
for image_idx in range(start_idx, end_idx):
image = self.images[image_idx]
item_elems = []
if self.captions is not None:
if image_idx < len(self.captions):
caption_text = self.captions[image_idx]
else:
caption_text = ''
caption_widget = ipywidgets.Label(
value=caption_text,
**self.config.image_captions
)
item_elems.append(caption_widget)
img_widget = ipywidgets.Image(
value=image,
format='png',
**self.config.image_items
)
item_elems.append(img_widget)
items.append(ipywidgets.VBox(item_elems))
self.main_box = ipywidgets.Box(items, layout=self.config.box_layout)
self.content_ouput_widget.clear_output(wait=True)
with self.content_ouput_widget:
IPython.display.display(self.main_box)
self.page_selection_widget.value = page_idx + 1
else:
self.content_ouput_widget.clear_output()
def on_prev_page_button_click(self, button):
self.update_page_items(self.cur_page_idx - 1)
def on_next_page_button_click(self, button):
self.update_page_items(self.cur_page_idx + 1)
def on_page_selection_change(self, change):
if change['type'] == 'change' and change['name'] == 'value':
if self.page_selection_widget.value is not None:
self.update_page_items(self.page_selection_widget.value - 1)
|
[
"[email protected]"
] | |
2ac952f31d08278c866ed2990a35fd7f970f3e15
|
fdf3aff5344271ef69ac7441c5dbca9cbf832cd1
|
/car_location/location/models/__init__.py
|
1219e9aa74d0d07e37129adcf33bba5812ee7ee2
|
[] |
no_license
|
lffsantos/DesafioPython
|
6069b3277780326611e34ae024f7506f3d56c5b4
|
fbc451b77c0310630fd95cbd23c339e194af88d1
|
refs/heads/master
| 2021-01-17T07:42:12.181187 | 2016-01-19T03:39:20 | 2016-01-19T03:39:20 | 49,730,610 | 0 | 0 | null | 2016-01-19T03:39:22 | 2016-01-15T16:25:30 |
JavaScript
|
UTF-8
|
Python
| false | false | 277 |
py
|
__author__ = 'lucas'
from car_location.location.models import categoriaveiculo
from car_location.location.models import veiculo
from car_location.location.models import cliente
from car_location.location.models import locacao
from car_location.location.models import devolucao
|
[
"[email protected]"
] | |
9b86763b34bce30afdb20d256f2e76972cc7a3ed
|
06919b9fd117fce042375fbd51d7de6bb9ae14fc
|
/py/tests/problems/hashtable/sparse_matrix.py
|
6ffec84f318bf38c68b1a11b7e3818d670628f49
|
[
"MIT"
] |
permissive
|
bmoretz/Daily-Coding-Problem
|
0caf2465579e81996869ee3d2c13c9ad5f87aa8f
|
f79e062e9f6e7b18b7e95c071fbe71ad104affcb
|
refs/heads/master
| 2022-12-07T15:41:06.498049 | 2021-11-18T19:45:19 | 2021-11-18T19:45:19 | 226,376,236 | 1 | 0 |
MIT
| 2022-11-22T09:20:23 | 2019-12-06T17:17:00 |
C++
|
UTF-8
|
Python
| false | false | 1,219 |
py
|
import unittest
from dcp.problems.hashtable.sparse_matrix import SparseMatrix
class Test_SparseMatrix(unittest.TestCase):
def setUp(self):
pass
def test_case1(self):
mat = [[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1], \
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1], \
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1], \
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1], \
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1]]
n, m = 100, 100
sm = SparseMatrix(mat, n, m)
non_zero = [[5, 13, 19, 28], \
[5, 13, 19, 28], \
[5, 13, 19, 28], \
[5, 13, 19, 28], \
[5, 13, 19, 28]]
for n, row in enumerate(mat):
for m, _ in enumerate(row):
if m in non_zero[n]:
assert sm.get(n, m) != 0
else:
assert sm.get(n, m) == 0
|
[
"[email protected]"
] | |
4404d9fc262775f54d590079477f8a1ba5b93179
|
e65ae5bd9ae1c93e7117e630f7340bc73aa71212
|
/lib/gevent/greentest/test__semaphore.py
|
480ec0e930466916a152bfe75550bf85470a4e0e
|
[
"MIT"
] |
permissive
|
nadirhamid/oneline
|
e98ff1ed81da0536f9602ecdde2fb2a4fe80d256
|
833ebef0e26ae8e0cc452756381227746d830b23
|
refs/heads/master
| 2021-01-21T04:27:41.715047 | 2016-05-30T03:50:34 | 2016-05-30T03:50:34 | 23,320,578 | 1 | 2 |
NOASSERTION
| 2020-03-12T17:22:24 | 2014-08-25T16:29:36 |
Python
|
UTF-8
|
Python
| false | false | 640 |
py
|
import greentest
import gevent
from gevent.lock import Semaphore
class TestTimeoutAcquire(greentest.TestCase):
# issue 39
def test_acquire_returns_false_after_timeout(self):
s = Semaphore(value=0)
result = s.acquire(timeout=0.01)
assert result is False, repr(result)
def test_release_twice(self):
s = Semaphore()
result = []
s.rawlink(lambda s: result.append('a'))
s.release()
s.rawlink(lambda s: result.append('b'))
s.release()
gevent.sleep(0.001)
self.assertEqual(result, ['a', 'b'])
if __name__ == '__main__':
greentest.main()
|
[
"[email protected]"
] | |
58c7af9907e90657db990a4e460eb35ea902d102
|
f3693916a8b118bf139364604dac3f51235ed613
|
/functional/Components/Authorization_System/Authorization_System_generateToken_POST/test_TC_43372_Authorizationsystems_POST_Pastdate_For_Not_Before_Time.py
|
fbedd554265220c2b614fc0c146a20e9c5d9bc1c
|
[] |
no_license
|
muktabehera/QE
|
e7d62284889d8241d22506f6ee20547f1cfe6db1
|
3fedde591568e35f7b80c5bf6cd6732f8eeab4f8
|
refs/heads/master
| 2021-03-31T02:19:15.369562 | 2018-03-13T02:45:10 | 2018-03-13T02:45:10 | 124,984,177 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,941 |
py
|
# -*- coding: UTF-8 -*-
"""PFE Component Tests - Authorization_Systems.
* TC-43372 - Authorization_Systems POST:
Verify that user is unable to generate Token on providing time before expiration time for 'notBeforeTime' field using request POST /authorizationSystems/{id}/generateToken ".
Equivalent test CURL command:
curl -H "Host: <client_host>" -H "Authorization: Bearer <valid_token>"
-X POST -d @<JSON_data_file> -H "Content-Type: application/json"
"<PF_host>://<client_host>/authorizationSystems/<data_ID1_under_test>/generateToken"
Same, with test data:
curl -H "Host: <client_host>" -H "Authorization: Bearer <valid_token>"
-X POST -d @<JSON_data_file> -H "Content-Type: application/json"
"<PF_host>://<client_host>/authorizationSystems/ab.qumu.com/generateToken"
JSON data sent to PathFinder in this test:
{'audience': 'qed:a1',
'expirationTime': '2017-09-30T06:10:50.714Z',
'generatedToken': 'string',
'issueTime': '2016-01-29T06:10:50.714Z',
'macKey': '123456789012345678901234567890121',
'notBeforeTime': '2017-09-20T06:10:50.714Z',
'permissions': ['MANAGE_SYSTEM', 'MANAGE_CONFIGURATION'],
'qeda': {},
'qedp': {},
'subject': 'sub1',
'url': '',
'useCompactPermissions': True}
"""
import pytest
from qe_common import *
logger = init_logger()
@pytest.mark.draft # remove this after script passed unit tests successfuly
@pytest.mark.components
@pytest.allure.story('Authorization_Systems')
@pytest.allure.feature('POST')
class Test_PFE_Components(object):
"""PFE Authorization_Systems test cases."""
@pytest.allure.link('https://jira.qumu.com/browse/TC-43372')
@pytest.mark.Authorization_Systems
@pytest.mark.POST
def test_TC_43372_POST_Authorization_Systems_Pastdate_For_Not_Before_Time(self, context):
"""TC-43372 - Authorization_Systems-POST
Verify that user is unable to generate Token on providing time before expiration time for 'notBeforeTime' field using request POST /authorizationSystems/{id}/generateToken "."""
# Define a test step
with pytest.allure.step("""Verify that user is unable to generate Token on providing time before expiration time for 'notBeforeTime' field using request POST /authorizationSystems/{id}/generateToken "."""):
### Positive test example
# Test case configuration
tokenGenerationDetails = context.sc.TokenGenerationDetails(
audience='qed:a1',
expirationTime='2017-09-30T06:10:50.714Z',
generatedToken='string',
issueTime='2016-01-29T06:10:50.714Z',
jwtId=None,
macKey='123456789012345678901234567890121',
notBeforeTime='2017-09-20T06:10:50.714Z',
permissions=['MANAGE_SYSTEM', 'MANAGE_CONFIGURATION'],
qeda={},
qedp={},
referrer=None,
subject='sub1',
url='',
useCompactPermissions=True)
# generateToken the Authorization_Systems.
# The `check` call validates return code
# and some of the swagger schema.
# Most schema checks are disabled.
response = check(
context.cl.Authorization_Systems.generateToken(
id='generateToken',
body=tokenGenerationDetails
)
)
### Can add tests here to validate the response content
with pytest.allure.step("""Verify that user is unable to generate Token on providing time before expiration time for 'notBeforeTime' field using request POST /authorizationSystems/{id}/generateToken "."""):
### Negative test example
# Test case configuration
tokenGenerationDetails = context.sc.TokenGenerationDetails(
audience='qed:a1',
expirationTime='2017-09-30T06:10:50.714Z',
generatedToken='string',
issueTime='2016-01-29T06:10:50.714Z',
jwtId=None,
macKey='123456789012345678901234567890121',
notBeforeTime='2017-09-20T06:10:50.714Z',
permissions=['MANAGE_SYSTEM', 'MANAGE_CONFIGURATION'],
qeda={},
qedp={},
referrer=None,
subject='sub1',
url='',
useCompactPermissions=True)
# prepare the request, so we can modify it
request = context.cl.Authorization_Systems.generateToken(
id='generateToken',
body=tokenGenerationDetails
)
### Invalid JSON Error injection example
### Errors that result in valid JSON can be configured above.
### Otherwise, uncomment the code below (request.future....)
# Get the generated payload and corrupt the metric
# request.future.request.data = request.future.request.data.replace(
# '"metric": 1,', '"metric":,'
# )
# generateToken the Authorization_Systems, and check we got the error we expect
try:
client, response = check(
request,
quiet=True, returnResponse=True
)
except (HTTPBadRequest, HTTPForbidden) as e: # 400, 403 error
get_error_message(e) | expect.any(
should.start_with('may not be empty'),
should.start_with('Invalid page parameter specified'),
should.contain('Invalid Authorization Token')
)
else:
raise Exception(
"Expected error message, got {} status code instead.".format(
response.status_code))
|
[
"[email protected]"
] | |
de4031edd500d91f3c5f79daceda0b6ddd0c105d
|
53faa0ef3496997412eb5e697bc85eb09a28f8c9
|
/pipeline/0x02-databases/34-log_stats.py
|
cd026394d8134a1af7cdf365a1a6c146de8897f9
|
[] |
no_license
|
oran2527/holbertonschool-machine_learning
|
aaec2ffe762b959573f98a5f4e002272a5d643a3
|
8761eb876046ad3c0c3f85d98dbdca4007d93cd1
|
refs/heads/master
| 2023-08-14T00:37:31.163130 | 2021-09-20T13:34:33 | 2021-09-20T13:34:33 | 330,999,053 | 0 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 768 |
py
|
#!/usr/bin/env python3
""" stats about Nginx logs stored in MongoDB """
from pymongo import MongoClient
if __name__ == "__main__":
""" stats about Nginx logs stored in MongoDB """
client = MongoClient('mongodb://127.0.0.1:27017')
collection_logs = client.logs.nginx
num_docs = collection_logs.count_documents({})
print("{} logs".format(num_docs))
print("Methods:")
methods = ["GET", "POST", "PUT", "PATCH", "DELETE"]
for method in methods:
num_method = collection_logs.count_documents({"method": method})
print("\tmethod {}: {}".format(method, num_method))
filter_path = {"method": "GET", "path": "/status"}
num_path = collection_logs.count_documents(filter_path)
print("{} status check".format(num_path))
|
[
"[email protected]"
] | |
cbe4d8dfdab89e21fe288bd6986ab78a30943da9
|
a1807bf5ca332fecc7e775c9bde25eeed318db9d
|
/disclosure_backend/tests/test_docgen.py
|
63a82b798a7aed4f2eaba4f1d41ba995ffbe972e
|
[] |
no_license
|
MrMaksimize/disclosure-backend
|
2c6a8936c08cd4c3ff328ee114a8050e410989cf
|
6d97305b4656bd630b9e12aef953daed51c84ed7
|
refs/heads/master
| 2020-12-26T00:46:07.104157 | 2016-01-06T17:04:38 | 2016-01-06T17:04:38 | 49,169,984 | 0 | 0 | null | 2016-02-12T14:05:13 | 2016-01-07T00:19:30 |
Python
|
UTF-8
|
Python
| false | false | 479 |
py
|
import os
from django.conf import settings
from django.core.management import call_command
from rest_framework.test import APITestCase
class DocGenerationTest(APITestCase):
def test_generate_docs(self):
""" Test createcalaccessrawmodeldocs"""
call_command('createcalaccessrawmodeldocs')
# Didn't throw; check some minimum level of output.
docs_dir = os.path.join(settings.REPO_DIR, 'docs')
self.assertTrue(os.path.exists(docs_dir))
|
[
"[email protected]"
] | |
a94dbdf4fc6e774943ac77d02fc7c1c4ab4a4eff
|
99767736ea5f34be4438ce689fc27454dffbf15c
|
/build/lib/sqlalchemy_nav/__init__.py
|
f5667ece8a87d320adf715b232e0f99d96ab7b47
|
[
"MIT"
] |
permissive
|
dsbowen/sqlalchemy-nav
|
4600ff85c99878d98167fee000d5b9cd6a0a90bc
|
d60b28fe74cdde65de68a140d0c2845d92fb9b0f
|
refs/heads/master
| 2020-08-02T09:35:27.233849 | 2020-06-10T16:50:22 | 2020-06-10T16:50:22 | 211,304,650 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 609 |
py
|
"""Mixins for SQLAlchemy-Nav
SQLAlchemy-Nav Mixins can be used to create dynamic navigation bar models
compatible with Bootstrap 4. Navigation bars can contain navigation
items, dropdown items, and custom html.
All models store their HTML in a `MutableSoup` attribute, `body`. This is
essentially a `BeautifulSoup` object which you can use to insert custom
HTML.
`Navitem`s are nested in `Navbar`s, and `Dropdownitem`s are nested in
`Navitem`s.
"""
from sqlalchemy_nav.navbar import NavbarMixin
from sqlalchemy_nav.navitem import NavitemMixin
from sqlalchemy_nav.dropdownitem import DropdownitemMixin
|
[
"[email protected]"
] | |
4239b59efd8be01546de57fd9436920f76c9aaf9
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_089/ch14_2020_03_09_13_31_11_006129.py
|
151ce51fd678c102515933e6bb62da962336d66d
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 136 |
py
|
def calcula_distancia_do_projetil(v,t,h):
D= (v**2/2*9.8)*(1+(1+(2*9.8*h)/v**2*(math.sin(t))**2))*math.sin(2*t)
return D
|
[
"[email protected]"
] | |
a3bd9f7287b261d6b7e3c747f1d10e15bca2a1c1
|
2855f26e603ec7bf5b18876b54b75ee4577bdf2c
|
/witdraw/forms.py
|
65f12090c349714d0754149c7cec48b2f49658bc
|
[] |
no_license
|
zkenstein/ppob_multipay_v2
|
e8ea789c395c6fa5b83ba56fbaf5ea08a2a77a14
|
85296f925acf3e94cc371637805d454581391f6e
|
refs/heads/master
| 2022-03-04T13:53:30.893380 | 2019-11-16T22:49:50 | 2019-11-16T22:49:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,150 |
py
|
from django import forms
from django.contrib.auth.models import User
from .models import Witdraw
MIN_COMMISION = 10000
class WitdrawForm(forms.ModelForm):
class Meta:
model = Witdraw
fields = [
'create_by', 'amount'
]
def __init__(self, *args, **kwargs):
super(WitdrawForm, self).__init__(*args, **kwargs)
self.fields['create_by'].queryset = User.objects.filter(
profile__user_type=2
)
def clean_amount(self):
amount = self.cleaned_data.get('amount')
if amount < MIN_COMMISION:
raise forms.ValidationError('Monimal withdraw 10.000')
return amount
def clean_create_by(self):
usr = self.cleaned_data.get('create_by')
if usr.profile.user_type != 2:
raise forms.ValidationError('User is not an agen')
if usr.profile.ponsel is None or usr.profile.ponsel == '':
raise forms.ValidationError('Ponsel canot be empty')
if usr.profile.wallet.commision < MIN_COMMISION:
raise forms.ValidationError('Commision not enought to withdraw')
return usr
|
[
"[email protected]"
] | |
ac33c346ad83106d32dfc516843c5835c52734e9
|
3ed70536d4d06b2ac43b64976ddc43a5d7025b31
|
/uri1091.py
|
4cb102241664ec3f00f7c77717e8df84b2c4c8f9
|
[] |
no_license
|
LuisHenrique01/Questoes_URI
|
7f1d397e3cd055349939184603eb86cb4bf43d65
|
35c8e77eb7cd9da96df4268b5d71f3ad87446c89
|
refs/heads/master
| 2020-07-22T08:12:12.700484 | 2020-04-12T17:39:29 | 2020-04-12T17:39:29 | 207,126,339 | 3 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 648 |
py
|
def main():
while True:
rep = int(input())
if rep == 0: break
div_x, div_y = list(map(int, input().split()))
for _ in range(rep):
ponto_x, ponto_y = list(map(int, input().split()))
if ponto_x == div_x or ponto_y == div_y:
print('divisa')
elif ponto_x > div_x and ponto_y > div_y:
print('NE')
elif ponto_x > div_x and ponto_y < div_y:
print('SE')
elif ponto_x < div_x and ponto_y > div_y:
print('NO')
else:
print('SO')
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
bbb33c2583e79f2ebfcf80477d93aa479721526b
|
648f742d6db2ea4e97b83c99b6fc49abd59e9667
|
/common/vault/oas/models/contracts_smart_contract.py
|
d5d7062b8e4de5bd91efe115deb981877b802760
|
[] |
no_license
|
jmiller-tm/replit
|
c56ce63718f6eb2d9b53bd09d3f7b3ef3496cb86
|
c8e6af3268c4ef8da66516154850919ea79055dc
|
refs/heads/main
| 2023-08-30T00:49:35.738089 | 2021-11-16T23:09:08 | 2021-11-16T23:09:08 | 428,809,777 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,856 |
py
|
# coding: utf-8
"""
vault/kernel/core_api/proto/v1/accounts/core_api_account_schedule_tags.proto
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: version not set
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ContractsSmartContract(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'code': 'str',
'smart_contract_param_vals': 'dict(str, str)',
'smart_contract_version_id': 'str'
}
attribute_map = {
'code': 'code',
'smart_contract_param_vals': 'smart_contract_param_vals',
'smart_contract_version_id': 'smart_contract_version_id'
}
def __init__(self, code=None, smart_contract_param_vals=None, smart_contract_version_id=None): # noqa: E501
"""ContractsSmartContract - a model defined in Swagger""" # noqa: E501
self._code = None
self._smart_contract_param_vals = None
self._smart_contract_version_id = None
self.discriminator = None
if code is not None:
self.code = code
if smart_contract_param_vals is not None:
self.smart_contract_param_vals = smart_contract_param_vals
if smart_contract_version_id is not None:
self.smart_contract_version_id = smart_contract_version_id
@property
def code(self):
"""Gets the code of this ContractsSmartContract. # noqa: E501
Source code of the Smart Contract that is to be simulated. # noqa: E501
:return: The code of this ContractsSmartContract. # noqa: E501
:rtype: str
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this ContractsSmartContract.
Source code of the Smart Contract that is to be simulated. # noqa: E501
:param code: The code of this ContractsSmartContract. # noqa: E501
:type: str
"""
self._code = code
@property
def smart_contract_param_vals(self):
"""Gets the smart_contract_param_vals of this ContractsSmartContract. # noqa: E501
Values for the Smart Contract parameters. # noqa: E501
:return: The smart_contract_param_vals of this ContractsSmartContract. # noqa: E501
:rtype: dict(str, str)
"""
return self._smart_contract_param_vals
@smart_contract_param_vals.setter
def smart_contract_param_vals(self, smart_contract_param_vals):
"""Sets the smart_contract_param_vals of this ContractsSmartContract.
Values for the Smart Contract parameters. # noqa: E501
:param smart_contract_param_vals: The smart_contract_param_vals of this ContractsSmartContract. # noqa: E501
:type: dict(str, str)
"""
self._smart_contract_param_vals = smart_contract_param_vals
@property
def smart_contract_version_id(self):
"""Gets the smart_contract_version_id of this ContractsSmartContract. # noqa: E501
The ID that will be used as the Smart Contract ID in the simulation and can be referenced by the simulation instructions. # noqa: E501
:return: The smart_contract_version_id of this ContractsSmartContract. # noqa: E501
:rtype: str
"""
return self._smart_contract_version_id
@smart_contract_version_id.setter
def smart_contract_version_id(self, smart_contract_version_id):
"""Sets the smart_contract_version_id of this ContractsSmartContract.
The ID that will be used as the Smart Contract ID in the simulation and can be referenced by the simulation instructions. # noqa: E501
:param smart_contract_version_id: The smart_contract_version_id of this ContractsSmartContract. # noqa: E501
:type: str
"""
self._smart_contract_version_id = smart_contract_version_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ContractsSmartContract, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ContractsSmartContract):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
0db6856e41bbe6b3773d8320f95dd2e5edbcc1d6
|
451f158c20fd425bc9d14c8e27e1a8f415423276
|
/novels_search/config/config.py
|
d38b0dcf8002cdb6327e83d0f826c02ffffffbc9
|
[
"Apache-2.0"
] |
permissive
|
TimeCharmer/novels-search
|
3767a77c237426a66f25287abae3c0a44528cf52
|
ab8152ff12d828dba0a8b52aa9c08675b21a1c5f
|
refs/heads/master
| 2021-01-19T14:21:28.438011 | 2017-04-12T09:37:48 | 2017-04-12T09:37:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,060 |
py
|
#!/usr/bin/env python
import logging
from aiocache import RedisCache
# Search engine
URL_PHONE = 'https://m.baidu.com/s'
URL_PC = 'http://www.baidu.com/s'
BAIDU_RN = 15
SO_URL = "https://www.so.com/s"
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36'
# logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s: %(message)s')
LOGGER = logging.getLogger('novels_search')
# aiocache
REDIS_DICT = dict(
IS_CACHE=True,
REDIS_ENDPOINT="",
REDIS_PORT=6379,
PASSWORD="",
CACHE_DB=0,
SESSION_DB=1,
POOLSIZE=4,
)
AIO_CACHE = RedisCache(endpoint=REDIS_DICT['REDIS_ENDPOINT'], port=REDIS_DICT['REDIS_PORT'], namespace="main")
# mongodb
MONGODB = dict(
HOST="",
PORT="",
USERNAME='',
PASSWORD='',
DATABASE='owllook',
)
# website
WEBSITE = dict(
IS_RUNNING=True,
TOKEN=''
)
AUTH = {
"Owllook-Api-Key": ""
}
HOST = ['owllook.net', 'www.owllook.net', '0.0.0.0:8000']
TIMEZONE = 'Asia/Shanghai'
|
[
"[email protected]"
] | |
a71c39e3394fc5cc6525d2128a4f4548fe0a677b
|
042bd40e554ac7fcd618c334ae98b4f43248a250
|
/interfaces/python/lib/ocean_dummy.py
|
41ebdac07d314bb378d87fb2fc951791b1c79acd
|
[
"Apache-2.0"
] |
permissive
|
kant/ocean-tensor-package
|
8a62df968335de2057ff095f0910e5ad5fcff8e1
|
fb3fcff8bba7f4ef6cd8b8d02f0e1be1258da02d
|
refs/heads/master
| 2020-03-29T04:01:22.064480 | 2018-09-19T19:17:19 | 2018-09-19T19:17:19 | 149,511,923 | 0 | 0 |
Apache-2.0
| 2018-09-19T21:03:14 | 2018-09-19T21:03:14 | null |
UTF-8
|
Python
| false | false | 925 |
py
|
# -------------------------------------------------------------------------
# Copyright 2018, IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
from pyOceanDummy_itf import *
from pyOceanDummy_cpu import *
# The GPU implementation is optional
try :
from pyOceanDummy_gpu import *
except ValueError :
# The module does not exist
pass
|
[
"[email protected]"
] | |
8817c54b5350de86ca658ecf083530659a7b4852
|
ba0e07b34def26c37ee22b9dac1714867f001fa5
|
/unreleased/azure-mgmt-eventhub/azure/mgmt/eventhub/models/consumer_group_create_or_update_parameters.py
|
22ea6ee0888f32dfc1f858599060ca80abe0a49a
|
[
"MIT"
] |
permissive
|
CharaD7/azure-sdk-for-python
|
b11a08ac7d24a22a808a18203072b4c7bd264dfa
|
9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c
|
refs/heads/master
| 2023-05-12T12:34:26.172873 | 2016-10-26T21:35:20 | 2016-10-26T21:35:20 | 72,448,760 | 1 | 0 |
MIT
| 2023-05-04T17:15:01 | 2016-10-31T15:14:09 |
Python
|
UTF-8
|
Python
| false | false | 2,128 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ConsumerGroupCreateOrUpdateParameters(Model):
"""Parameters supplied to the CreateOrUpdate Consumer Group operation.
:param location: Location of the resource.
:type location: str
:param type: ARM type of the namespace.
:type type: str
:param name: Name of the consumer group.
:type name: str
:param created_at: Exact time the message was created.
:type created_at: datetime
:param event_hub_path: The path of the event hub.
:type event_hub_path: str
:param updated_at: The exact time the message has been updated.
:type updated_at: datetime
:param user_metadata: The user metadata.
:type user_metadata: str
"""
_validation = {
'location': {'required': True},
}
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
'event_hub_path': {'key': 'properties.eventHubPath', 'type': 'str'},
'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'},
'user_metadata': {'key': 'properties.userMetadata', 'type': 'str'},
}
def __init__(self, location, type=None, name=None, created_at=None, event_hub_path=None, updated_at=None, user_metadata=None):
self.location = location
self.type = type
self.name = name
self.created_at = created_at
self.event_hub_path = event_hub_path
self.updated_at = updated_at
self.user_metadata = user_metadata
|
[
"[email protected]"
] | |
0db5b3deb80041a74fe00a76329d36249f0746ad
|
42dd79c614b775e6e8e782ea7ab332aef44251b9
|
/extra_apps/xadmin/views/website.py
|
02012eff0b7d66b2d7a36ed53d7a74ac75de61ae
|
[] |
no_license
|
Annihilater/imooc
|
114575638f251a0050a0240d5a25fc69ef07d9ea
|
547046cff32ce413b0a4e21714cb9ab9ce19bc49
|
refs/heads/master
| 2020-05-03T09:06:18.247371 | 2019-12-04T09:24:55 | 2019-12-04T09:24:55 | 178,545,115 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,949 |
py
|
from __future__ import absolute_import
from django.utils.translation import ugettext as _
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.views.decorators.cache import never_cache
from django.contrib.auth.views import LoginView as login
from django.contrib.auth.views import LogoutView as logout
from django.http import HttpResponse
from .base import BaseAdminView, filter_hook
from .dashboard import Dashboard
from xadmin.forms import AdminAuthenticationForm
from xadmin.models import UserSettings
from xadmin.layout import FormHelper
class IndexView(Dashboard):
title = _("Main Dashboard")
icon = "fa fa-dashboard"
def get_page_id(self):
return "home"
class UserSettingView(BaseAdminView):
@never_cache
def post(self, request):
key = request.POST["key"]
val = request.POST["value"]
us, created = UserSettings.objects.get_or_create(user=self.user, key=key)
us.value = val
us.save()
return HttpResponse("")
class LoginView(BaseAdminView):
title = _("Please Login")
login_form = None
login_template = None
@filter_hook
def update_params(self, defaults):
pass
@never_cache
def get(self, request, *args, **kwargs):
context = self.get_context()
helper = FormHelper()
helper.form_tag = False
helper.include_media = False
context.update(
{
"title": self.title,
"helper": helper,
"app_path": request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
}
)
defaults = {
"extra_context": context,
# 'current_app': self.admin_site.name,
"authentication_form": self.login_form or AdminAuthenticationForm,
"template_name": self.login_template or "xadmin/views/login.html",
}
self.update_params(defaults)
# return login(request, **defaults)
return login.as_view(**defaults)(request)
@never_cache
def post(self, request, *args, **kwargs):
return self.get(request)
class LogoutView(BaseAdminView):
logout_template = None
need_site_permission = False
@filter_hook
def update_params(self, defaults):
pass
@never_cache
def get(self, request, *args, **kwargs):
context = self.get_context()
defaults = {
"extra_context": context,
# 'current_app': self.admin_site.name,
"template_name": self.logout_template or "xadmin/views/logged_out.html",
}
if self.logout_template is not None:
defaults["template_name"] = self.logout_template
self.update_params(defaults)
# return logout(request, **defaults)
return logout.as_view(**defaults)(request)
@never_cache
def post(self, request, *args, **kwargs):
return self.get(request)
|
[
"[email protected]"
] | |
592f65c3845cec1b556e21772988fe41c2d61145
|
aca2258cf58e0d2c7e4939e73bcb82b6c135282c
|
/libs/Mailman/mailman/commands/tests/test_membership.py
|
6cf4802c6c8546a83b4d135e007b28482e0492be
|
[] |
no_license
|
masomel/py-import-analysis
|
cfe6749a1d7430b179559b9e0911b8c8df507be7
|
7edf8148e34b9f73ca6433ceb43a1770f4fa32c1
|
refs/heads/master
| 2021-03-16T10:00:24.205301 | 2019-08-01T20:32:34 | 2019-08-01T20:32:34 | 112,668,748 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,117 |
py
|
# Copyright (C) 2016-2017 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""Test the Leave command."""
import unittest
from mailman.app.lifecycle import create_list
from mailman.commands.eml_membership import Leave
from mailman.email.message import Message
from mailman.interfaces.mailinglist import SubscriptionPolicy
from mailman.interfaces.usermanager import IUserManager
from mailman.runners.command import Results
from mailman.testing.helpers import set_preferred
from mailman.testing.layers import ConfigLayer
from zope.component import getUtility
class TestLeave(unittest.TestCase):
layer = ConfigLayer
def setUp(self):
self._mlist = create_list('[email protected]')
self._command = Leave()
def test_confirm_leave_not_a_member(self):
self._mlist.unsubscription_policy = SubscriptionPolicy.confirm
# Try to unsubscribe someone who is not a member. Anne is a real
# user, with a validated address, but she is not a member of the
# mailing list.
anne = getUtility(IUserManager).create_user('[email protected]')
set_preferred(anne)
# Initiate an unsubscription.
msg = Message()
msg['From'] = '[email protected]'
results = Results()
self._command.process(self._mlist, msg, {}, (), results)
self.assertEqual(
str(results).splitlines()[-1],
'leave: [email protected] is not a member of [email protected]')
|
[
"[email protected]"
] | |
4b0e7d05d72b190fc3957af9c61e79e11a21b644
|
abccdbf9b0849b47960c3c352870793405debfed
|
/0x07-python-test_driven_development/4-print_square.py
|
fab6205b3d5afa34eec331a57f3ea50045fc96f1
|
[] |
no_license
|
hunterxx0/holbertonschool-higher_level_programming
|
88b1b0f31b536c6940f2e64a6924a06ba9cbf193
|
44064cf0722cd20d93f58b64ab185d2898770d73
|
refs/heads/master
| 2022-12-20T12:14:15.877147 | 2020-09-24T21:25:54 | 2020-09-24T21:25:54 | 259,276,369 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 451 |
py
|
#!/usr/bin/python3
"""
a square printing function: print_square()::
>>> def print_square(1):
#
"""
def print_square(size):
"""
Prints a square of size 'size'
"""
if type(size) is not int:
raise TypeError('size must be an integer')
elif size < 0:
raise ValueError('size must be >= 0')
else:
for x in range(size):
for y in range(size):
print("#", end="")
print()
|
[
"[email protected]"
] | |
a8c4360159626be4980ee48d7a6491db264ceafc
|
162e2588156cb2c0039c926c5c442363d9f77b00
|
/tests/integration_tests/data_steward/analytics/cdr_ops/report_runner_test.py
|
c00229bcb0b01b9d9828c4aa35f5c20ef5eb9760
|
[
"MIT"
] |
permissive
|
nishanthpp93/curation
|
38be687240b52decc25ffb7b655f25e9faa40e47
|
ac9f38b2f4580ae806121dd929293159132c7d2a
|
refs/heads/develop
| 2022-08-08T20:33:53.125216 | 2021-12-03T21:38:48 | 2021-12-03T21:38:48 | 155,608,471 | 1 | 0 |
MIT
| 2020-10-09T01:14:39 | 2018-10-31T18:54:34 |
Python
|
UTF-8
|
Python
| false | false | 3,081 |
py
|
import os
import unittest
from tempfile import NamedTemporaryFile
from pathlib import PurePath
from bs4 import BeautifulSoup as bs
from analytics.cdr_ops.report_runner import IPYNB_SUFFIX, HTML_SUFFIX, main
TEST_NOTEBOOK = """
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.7.1
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# + tags=["parameters"]
project_id = ''
dataset_id = ''
table_name = ''
# -
print(
f'project_id={project_id}, dataset_id={dataset_id}, table_name={table_name}'
)
"""
class ReportRunnerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('**************************************************************')
print(cls.__name__)
print('**************************************************************')
def setUp(self):
self.temp_notebook_py_file = NamedTemporaryFile('w',
suffix='.py',
delete=True)
self.temp_notebook_py_file.write(TEST_NOTEBOOK.strip())
self.temp_notebook_py_file.flush()
self.notebook_py_path = self.temp_notebook_py_file.name
self.notebook_ipynb_path = PurePath(
self.notebook_py_path).with_suffix(IPYNB_SUFFIX)
self.notebook_html_path = PurePath(
self.notebook_py_path).with_suffix(HTML_SUFFIX)
self.parameters = {
'project_id': 'project_id',
'dataset_id': 'dataset_id',
'table_name': 'condition'
}
def tearDown(self):
# This removes the python file automatically
self.temp_notebook_py_file.close()
# Remove the ipynb and html files
os.remove(self.notebook_ipynb_path)
os.remove(self.notebook_html_path)
def test_main(self):
# Running the notebook and saving to the HTML page
main(self.notebook_py_path, self.parameters, self.notebook_py_path)
# Testing the content of the HTML page
with open(self.notebook_html_path, 'r') as f:
soup = bs(f, parser="lxml", features="lxml")
output_divs = soup.findAll('div', {"class": "jp-RenderedText"})
output_div_count = len(output_divs)
self.assertEqual(
output_div_count, 1,
f'Expected exactly 1 <div class="jp-RenderedText"> element, saw {output_div_count}'
)
output_pres = output_divs[0].findAll('pre')
output_pres_count = len(output_pres)
self.assertEqual(
output_pres_count, 1,
f'Expected exactly one <pre> element under <div class="jp-RenderedText">, saw {output_pres_count}'
)
actual = output_pres[0].get_text().strip()
expected = ', '.join(
[f'{k}={v}' for k, v in self.parameters.items()])
self.assertEqual(actual, expected)
|
[
"[email protected]"
] | |
5846bc204c7e1842e8b5ea77991c70bcba7181e3
|
8e24e8bba2dd476f9fe612226d24891ef81429b7
|
/geeksforgeeks/python/python_all/52_14.py
|
ac7dd979fdab752a1073fb88aae9f43db82f325a
|
[] |
no_license
|
qmnguyenw/python_py4e
|
fb56c6dc91c49149031a11ca52c9037dc80d5dcf
|
84f37412bd43a3b357a17df9ff8811eba16bba6e
|
refs/heads/master
| 2023-06-01T07:58:13.996965 | 2021-06-15T08:39:26 | 2021-06-15T08:39:26 | 349,059,725 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,219 |
py
|
Python – Test for Even values dictionary values lists
Given a dictionary with lists as values, map Boolean values depending upon all
values in List are Even or not.
> **Input** : {“Gfg” : [6, 8, 10], “is” : [8, 10, 12, 16], “Best” : [10, 16,
> 14, 6]}
> **Output** : {‘Gfg’: True, ‘is’: True, ‘Best’: True}
> **Explanation** : All lists have even numbers.
>
> **Input** : {“Gfg” : [6, 5, 10], “is” : [8, 10, 11, 16], “Best” : [10, 16,
> 14, 6]}
> **Output** : {‘Gfg’: False, ‘is’: False, ‘Best’: True}
> **Explanation** : Only “Best” has even numbers.
**Method #1 : Using loop**
This is brute way in which this task can be performed. In this, we iterate for
all the values and check if all list values are Even if yes, we assign key as
True else False.
## Python3
__
__
__
__
__
__
__
# Python3 code to demonstrate working of
# Test for Even values dictionary values lists
# Using loop
# initializing dictionary
test_dict = {"Gfg" : [6, 7, 3],
"is" : [8, 10, 12, 16],
"Best" : [10, 16, 14, 6]}
# printing original dictionary
print("The original dictionary is : " + str(test_dict))
res = dict()
for sub in test_dict:
flag = 1
# checking for even elements
for ele in test_dict[sub]:
if ele % 2 != 0:
flag = 0
break
# adding True if all Even elements
res[sub] = True if flag else False
# printing result
print("The computed dictionary : " + str(res))
---
__
__
**Output**
The original dictionary is : {'Gfg': [6, 7, 3], 'is': [8, 10, 12, 16], 'Best': [10, 16, 14, 6]}
The computed dictionary : {'Gfg': False, 'is': True, 'Best': True}
**Method #2 : Using all() + dictionary comprehension**
This is yet another way in which this task can be performed. In this, we check
for all the elements using all() and dictionary comprehension is used to
remake the result.
## Python3
__
__
__
__
__
__
__
# Python3 code to demonstrate working of
# Test for Even values dictionary values lists
# Using all() + dictionary comprehension
# initializing dictionary
test_dict = {"Gfg" : [6, 7, 3],
"is" : [8, 10, 12, 16],
"Best" : [10, 16, 14, 6]}
# printing original dictionary
print("The original dictionary is : " + str(test_dict))
# using all to check for all even elements
res = {sub : all(ele % 2 == 0 for ele in
test_dict[sub]) for sub in test_dict}
# printing result
print("The computed dictionary : " + str(res))
---
__
__
**Output**
The original dictionary is : {'Gfg': [6, 7, 3], 'is': [8, 10, 12, 16], 'Best': [10, 16, 14, 6]}
The computed dictionary : {'Gfg': False, 'is': True, 'Best': True}
Attention geek! Strengthen your foundations with the **Python Programming
Foundation** Course and learn the basics.
To begin with, your interview preparations Enhance your Data Structures
concepts with the **Python DS** Course.
My Personal Notes _arrow_drop_up_
Save
|
[
"[email protected]"
] | |
8eef75cab1181157c9944e567533b91f03ae8168
|
7f0c02b3eef636cc382484dd8015207c35cc83a8
|
/lib/python/treadmill/runtime/linux/image/_docker.py
|
fa24bbf1455842f1673f31d8a4867769d207bc30
|
[
"Apache-2.0"
] |
permissive
|
ceache/treadmill
|
4efa69482dafb990978bfdcb54b24c16ca5d1147
|
26a1f667fe272ff1762a558acfd66963494020ca
|
refs/heads/master
| 2021-01-12T12:44:13.474640 | 2019-08-20T23:22:37 | 2019-08-20T23:22:37 | 151,146,942 | 0 | 0 |
Apache-2.0
| 2018-10-01T19:31:51 | 2018-10-01T19:31:51 | null |
UTF-8
|
Python
| false | false | 4,405 |
py
|
"""Docker funtion in linux runtime
"""
import grp # pylint: disable=import-error
import io
import logging
import os
from treadmill import exc
from treadmill import fs
from treadmill import subproc
from treadmill import supervisor
from treadmill import utils
from treadmill import dockerutils
from treadmill.appcfg import abort as app_abort
from treadmill.fs import linux as fs_linux
from .. import _manifest
_LOGGER = logging.getLogger(__name__)
_CONTAINER_DOCKER_ENV_DIR = os.path.join('docker', 'env')
_CONTAINER_DOCKER_ETC_DIR = os.path.join('docker', 'etc')
_PASSWD_PATTERN = '{NAME}:x:{UID}:{GID}:{INFO}:{HOME}:{SHELL}'
_GROUP_PATTERN = '{NAME}:x:{GID}'
def _has_docker(app):
return hasattr(app, 'docker') and app.docker
def create_docker_environ_dir(container_dir, root_dir, app):
"""Creates environ dir for docker"""
if not _has_docker(app):
return
env_dir = os.path.join(container_dir, _CONTAINER_DOCKER_ENV_DIR)
env = {}
treadmill_bind_preload_so = os.path.basename(
subproc.resolve('treadmill_bind_preload.so')
)
if app.ephemeral_ports.tcp or app.ephemeral_ports.udp:
env['LD_PRELOAD'] = os.path.join(
_manifest.TREADMILL_BIND_PATH,
'$LIB',
treadmill_bind_preload_so
)
supervisor.create_environ_dir(env_dir, env)
# Bind the environ directory in the container volume
fs.mkdir_safe(os.path.join(root_dir, _CONTAINER_DOCKER_ENV_DIR))
fs_linux.mount_bind(
root_dir, os.path.join(os.sep, _CONTAINER_DOCKER_ENV_DIR),
source=os.path.join(container_dir, _CONTAINER_DOCKER_ENV_DIR),
recursive=False, read_only=True
)
def prepare_docker_daemon_path(newroot_norm, app, data):
"""Mount tmpfs for docker
"""
if not _has_docker(app):
return
# /etc/docker as temp fs as dockerd create /etc/docker/key.json
try:
fs_linux.mount_tmpfs(newroot_norm, '/etc/docker')
except FileNotFoundError as err:
_LOGGER.error('Failed to mount docker tmpfs: %s', err)
# this exception is caught by sproc run to generate abort event
raise exc.ContainerSetupError(
msg=str(err),
reason=app_abort.AbortedReason.UNSUPPORTED,
)
# Setup the dockerd confdir
dockerutils.prepare_docker_confdir(
os.path.join(newroot_norm, 'etc', 'docker'),
app,
data
)
def overlay_docker(container_dir, root_dir, app):
"""Mount etc/hosts for docker container
"""
# FIXME: This path is mounted as RW because ro volume in treadmill
# container can not be mounted in docker 'Error response from
# daemon: chown /etc/hosts: read-only file system.'
if not _has_docker(app):
return
overlay_dir = os.path.join(container_dir, 'overlay')
fs_linux.mount_bind(
root_dir, os.path.join(os.sep, _CONTAINER_DOCKER_ETC_DIR, 'hosts'),
source=os.path.join(overlay_dir, 'etc/hosts'),
recursive=False, read_only=False
)
_create_overlay_passwd(root_dir, app.proid)
_create_overlay_group(root_dir, app.proid)
def _create_overlay_group(root_dir, proid):
"""create a overlay /etc/group in oder to mount into container
"""
path = os.path.join(root_dir, _CONTAINER_DOCKER_ETC_DIR, 'group')
(_uid, gid) = utils.get_uid_gid(proid)
with io.open(path, 'w') as f:
root = _GROUP_PATTERN.format(
NAME='root',
GID=0
)
f.write('{}\n'.format(root))
group = _GROUP_PATTERN.format(
NAME=grp.getgrgid(gid).gr_name,
GID=gid
)
f.write('{}\n'.format(group))
def _create_overlay_passwd(root_dir, proid):
"""create a overlay /etc/passwd in order to mount into container
"""
path = os.path.join(root_dir, _CONTAINER_DOCKER_ETC_DIR, 'passwd')
(uid, gid) = utils.get_uid_gid(proid)
with io.open(path, 'w') as f:
root = _PASSWD_PATTERN.format(
NAME='root',
UID=0,
GID=0,
INFO='root',
HOME='/root',
SHELL='/bin/sh'
)
f.write('{}\n'.format(root))
user = _PASSWD_PATTERN.format(
NAME=proid,
UID=uid,
GID=gid,
INFO='',
HOME='/',
SHELL='/sbin/nologin'
)
f.write('{}\n'.format(user))
|
[
"[email protected]"
] | |
34698f6c132ed077c67d3a15f869d8d78bcefe61
|
3e3863e9eced23d646cd039a395b08ed6d1f3929
|
/training/medium/robbery-optimisation.py
|
ca3d8f40995da67cf5301d321958008d3857282b
|
[] |
no_license
|
Coni63/CG_repo
|
dd608bdbd2560598a72339d150ec003e6b688cac
|
d30e01dfe2a12e26c85799c82cf38e606ffdbc16
|
refs/heads/master
| 2020-06-25T04:03:54.260340 | 2019-10-20T16:16:40 | 2019-10-20T16:16:40 | 199,195,242 | 4 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 492 |
py
|
import sys
import math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
n = int(input())
housevalue = [int(input()) for i in range(n)]
n = len(housevalue)
pn = housevalue[0]
qn = 0
for i in range(1, n):
pn1 = pn
qn1 = qn
pn = qn1 + housevalue[i]
qn = max(pn1, qn1)
# Write an action using print
# To debug: print("Debug messages...", file=sys.stderr)
print(max(pn,qn))
|
[
"="
] |
=
|
fc9f01887c4a6b276e93e9c6fd48ae39dd9e98b0
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/data/p3BR/R2/benchmark/startQiskit_Class66.py
|
de1dd2f58ce2a9d58b5a7c4f8933d6310d93f36a
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,442 |
py
|
# qubit number=3
# total number=11
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.x(input_qubit[2]) # number=2
prog.h(input_qubit[1]) # number=7
prog.cz(input_qubit[2],input_qubit[1]) # number=8
prog.h(input_qubit[1]) # number=9
prog.cx(input_qubit[2],input_qubit[1]) # number=4
prog.cx(input_qubit[2],input_qubit[1]) # number=10
prog.z(input_qubit[2]) # number=3
prog.y(input_qubit[2]) # number=5
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_Class66.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = BasicAer.get_backend('statevector_simulator')
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
|
[
"[email protected]"
] | |
463cdc82d5cd7cd1180bc5eaf2219bb87377ff45
|
c9ad6ad969de505b3c8471c6f46dfd782a0fb498
|
/0x07-python-test_driven_development/2-matrix_divided.py
|
e9a56cfcebadaa9b6d3ac60f351c118538368d68
|
[] |
no_license
|
enterpreneur369/holbertonschool-higher_level_programming
|
002fd5a19b40c8b1db06b34c4344e307f24c17ac
|
dd7d3f14bf3bacb41e2116d732ced78998a4afcc
|
refs/heads/master
| 2022-06-20T00:57:27.736122 | 2020-05-06T14:26:10 | 2020-05-06T14:26:10 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,261 |
py
|
#!/usr/bin/python3
""" Module 2-matrix_mul
Module that contains the function matrix_mul
"""
def matrix_mul(m_a, m_b):
""" Returns a product matrix
Arguments:
m_a (list:int, list:float): First matrix
m_b (list:int, list:float): Second matrix
"""
res = []
row_err = "Each row of the matrix must have the\
same size"
tp_err = "div must be a number"
lt_erra = "m_a must be a list"
lt_errb = "m_b must be a list"
lt2d_erra = "m_a must be a list of lists"
lt2d_errb = "m_b must be a list of lists"
lt_emptya = "m_a can't be empty"
lt_emptyb = "m_a can't be empty"
lte_erra = "m_a should contain only integers or floats"
lte_errb = "m_b should contain only integers or floats"
lte_sizera = "each row of m_a must be of the same size"
lte_sizerb = "each row of m_b must be of the same size"
mul_err = "m_a and m_b can't be multiplied"
if not isinstance(m_a, list):
raise TypeError(lt_erra)
if not isinstance(m_b, list):
raise TypeError(lt_errb)
if m_a[0] is None or not isinstance(m_a[0], list):
raise TypeError(lt2d_erra)
if m_b[0] is None or not isinstance(m_b[0], list):
raise TypeError(lt2d_errb)
if m_a == [] or m_a == [[]]:
raise ValueError(lt_emptya)
if m_b == [] or m_b == [[]]:
raise ValueError(lt_emptyb)
lenr0, lenc0 = len(m_a), len(m_a[0])
i, j = 0, 0
typ = None
for i in range(lenr0):
for j in range(lenc0):
if i == 0 and j == 0:
if isinstance(m_a[i][j], int):
typ = int
elif isinstance(m_a[i][j], float):
typ = float
else:
raise TypeError(lte_erra)
else:
if isinstance(m_a[i][j], typ):
continue
else:
raise TypeError(lte_erra)
lenr0, lenc0 = len(m_b), len(m_b[0])
i, j = 0, 0
typ = None
for i in range(lenr0):
for j in range(lenc0):
if i == 0 and j == 0:
if isinstance(m_b[i][j], int):
typ = int
elif isinstance(m_b[i][j], float):
typ = float
else:
raise TypeError(lte_erra)
else:
if isinstance(m_b[i][j], typ):
continue
else:
raise TypeError(lte_errb)
lenr0, lenc0 = len(m_a), len(m_a[0])
n = lenr0
i, j, cs = 0, 0, 0
for i in range(lenr0):
for j in range(lenc0):
if len(m_a[i]) != lenc0:
raise TypeError(lte_sizera)
lenr0, lenc0 = len(m_b), len(m_b[0])
p = lenc0
i, j, cs = 0, 0, 0
for i in range(lenr0):
for j in range(lenc0):
if len(m_b[i]) != lenc0:
raise TypeError(lte_sizerb)
lenr0, lenc0 = len(m_b), len(m_b[0])
i, k, cs = 0, 0, 0
for i in range(n):
row = []
cs = 0
for k in range(p):
try:
cs += m_a[i][k] * m_b[k][j]
row.append(cs)
except ValueError:
raise ValueError(mul_err)
res.append(row)
return (res)
|
[
"[email protected]"
] | |
b150f199a4268e8ab72d5c9a9ce49b2d6abe73d4
|
698cb8d24879fe75669af6f2667c3f88660a0a1e
|
/deepModel/s11b_ALS_CONCAT.py
|
29627c713b760a2dcfce82233dba73e25b24c24f
|
[] |
no_license
|
HuichuanLI/Recommand-Algorithme
|
c83c5d34d75eebd127e2aef7abc8b7152fc54f96
|
302e14a3f7e5d72ded73b72a538596b6dc1233ff
|
refs/heads/master
| 2023-05-11T03:01:30.940242 | 2023-04-30T08:03:19 | 2023-04-30T08:03:19 | 187,097,782 | 71 | 19 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,293 |
py
|
import numpy as np
from data_set import filepaths as fp
from torch.utils.data import DataLoader
from torch import nn
import torch.nn.functional as F
import torch
from sklearn.metrics import precision_score, recall_score, accuracy_score
from basic_sim import dataloader
class ALS_MLP(nn.Module):
def __init__(self, n_users, n_items, dim):
super(ALS_MLP, self).__init__()
'''
:param n_users: 用户数量
:param n_items: 物品数量
:param dim: 向量维度
'''
# 随机初始化用户的向量
self.users = nn.Embedding(n_users, dim, max_norm=1)
# 随机初始化物品的向量
self.items = nn.Embedding(n_items, dim, max_norm=1)
# 第一层的输入的维度是向量维度乘以2,因为用户与物品拼接之后的向量维度自然是原来2倍。
self.denseLayer1 = self.dense_layer(dim * 2, dim)
self.denseLayer2 = self.dense_layer(dim, dim // 2)
# 修后一层的输出维度是1,该值经Sigmoid激活后即最为模型输出
self.denseLayer3 = self.dense_layer(dim // 2, 1)
self.sigmoid = nn.Sigmoid()
def dense_layer(self, in_features, out_features):
# 每一个mlp单元包含一个线性层和激活层,当前代码中激活层采取Tanh双曲正切函数。
return nn.Sequential(
nn.Linear(in_features, out_features),
nn.Tanh()
)
def forward(self, u, v, isTrain=True):
'''
:param u: 用户索引id shape:[batch_size]
:param i: 用户索引id shape:[batch_size]
:return: 用户向量与物品向量的内积 shape:[batch_size]
'''
# [batch_size, dim]
u = self.users(u)
v = self.items(v)
# [batch_size, dim*2]
uv = torch.cat([u, v], dim=1)
# [batch_size, dim]
uv = self.denseLayer1(uv)
# [batch_size, dim//2]
uv = self.denseLayer2(uv)
# 训练时采取dropout来防止过拟合
if isTrain: uv = F.dropout(uv)
# [batch_size,1]
uv = self.denseLayer3(uv)
# [batch_size]
uv = torch.squeeze(uv)
logit = self.sigmoid(uv)
return logit
def doEva(net, d):
d = torch.LongTensor(d)
u, i, r = d[:, 0], d[:, 1], d[:, 2]
with torch.no_grad():
out = net(u, i, False)
y_pred = np.array([1 if i >= 0.5 else 0 for i in out])
y_true = r.detach().numpy()
p = precision_score(y_true, y_pred)
r = recall_score(y_true, y_pred)
acc = accuracy_score(y_true, y_pred)
return p, r, acc
def train(epochs=10, batchSize=1024, lr=0.001, dim=128, eva_per_epochs=1):
'''
:param epochs: 迭代次数
:param batchSize: 一批次的数量
:param lr: 学习率
:param dim: 用户物品向量的维度
:param eva_per_epochs: 设定每几次进行一次验证
'''
# 读取数据
user_set, item_set, train_set, test_set = \
dataloader.readRecData(fp.Ml_100K.RATING, test_ratio=0.1)
# 初始化ALS模型
net = ALS_MLP(len(user_set), len(item_set), dim)
# 定义优化器
optimizer = torch.optim.AdamW(net.parameters(), lr=lr, weight_decay=0.2)
# 定义损失函数
criterion = torch.nn.BCELoss()
# 开始迭代
for e in range(epochs):
all_lose = 0
# 每一批次地读取数据
for u, i, r in DataLoader(train_set, batch_size=batchSize, shuffle=True):
optimizer.zero_grad()
r = torch.FloatTensor(r.detach().numpy())
result = net(u, i)
loss = criterion(result, r)
all_lose += loss
loss.backward()
optimizer.step()
print('epoch {}, avg_loss = {:.4f}'.format(e, all_lose / (len(train_set) // batchSize)))
# 评估模型
if e % eva_per_epochs == 0:
p, r, acc = doEva(net, train_set)
print('train: Precision {:.4f} | Recall {:.4f} | accuracy {:.4f}'.format(p, r, acc))
p, r, acc = doEva(net, test_set)
print('test: Precision {:.4f} | Recall {:.4f} | accuracy {:.4f}'.format(p, r, acc))
if __name__ == '__main__':
train()
|
[
"[email protected]"
] | |
78b7438d65e518367530ce1ce4adeed283a97e9a
|
002ee33a04a6a74c10be79a2d667871de90fe728
|
/faq/views.py
|
4c8191dd9394c62569a71e75a3d988cd4a34e227
|
[] |
no_license
|
Code-Institute-Submissions/final-milestone-eCommerce
|
dc5866c61acd31bbf59ed31168e3e8110262a737
|
d1547f90dc26ca20be299b98966865ef88df0027
|
refs/heads/master
| 2022-11-26T00:27:32.014852 | 2020-08-07T14:44:55 | 2020-08-07T14:44:55 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 284 |
py
|
from django.shortcuts import render
from .models import FAQ
def show_faqs(request):
"""Renders all of the current frequently asked questions
to the faq.html page
"""
faqs = FAQ.objects.all()
return render(request, 'faq/faq.html', {'faqs': faqs})
|
[
"[email protected]"
] | |
0b026b7588cfd52cc92d6fd76b2985618ef2f533
|
60ca69e2a4c6b05e6df44007fd9e4a4ed4425f14
|
/beginner_contest/175/C.py
|
430190fbe6f342eff4aae54d28abe6bb704ad2fd
|
[
"MIT"
] |
permissive
|
FGtatsuro/myatcoder
|
12a9daafc88efbb60fc0cd8840e594500fc3ee55
|
25a3123be6a6311e7d1c25394987de3e35575ff4
|
refs/heads/master
| 2021-06-13T15:24:07.906742 | 2021-05-16T11:47:09 | 2021-05-16T11:47:09 | 195,441,531 | 0 | 0 |
MIT
| 2021-05-16T11:47:10 | 2019-07-05T16:47:58 |
Python
|
UTF-8
|
Python
| false | false | 331 |
py
|
import sys
input = sys.stdin.readline
sys.setrecursionlimit(10 ** 7)
x, k, d = map(int, input().split())
x = abs(x)
if x - (k * d) >= 0:
print(x - (k * d))
sys.exit(0)
else:
remain = k - (x // d)
p_min = x % d
n_min = abs(p_min -d)
if remain % 2 == 0:
print(p_min)
else:
print(n_min)
|
[
"[email protected]"
] | |
84c2d1b9d4d9b14db0bd5e93aeac841a4e9ea9b0
|
ddd3b6663fbcc5b64fe9a96a3da87dd1460e1ab4
|
/src/routes/user.py
|
8908f3e57f79e7b83ad52961a8451953a51f62fe
|
[] |
no_license
|
ranihorev/scihive-backend
|
3d72e35829d97368a331bc85c362c7af29b63eb9
|
d246a8ed07b0fd793a1a9c3497c976cbd4957b3d
|
refs/heads/master
| 2022-06-17T17:32:35.834425 | 2021-04-02T14:40:07 | 2021-04-02T14:40:07 | 184,781,038 | 13 | 4 | null | 2022-05-25T03:51:56 | 2019-05-03T15:41:16 |
Python
|
UTF-8
|
Python
| false | false | 5,862 |
py
|
import os
from flask import Blueprint, jsonify
import logging
from flask_jwt_extended.view_decorators import jwt_optional
from flask_restful import Api, Resource, abort, reqparse, marshal_with, fields
from flask_jwt_extended import (create_access_token, jwt_required, jwt_refresh_token_required,
get_jwt_identity, get_raw_jwt, set_access_cookies, unset_access_cookies)
from google.oauth2 import id_token
from google.auth.transport import requests
from ..models import User, db, RevokedToken, Paper
from .user_utils import generate_hash, get_jwt_email, get_user_optional, verify_hash, get_user_by_email
from .notifications.index import deserialize_token
app = Blueprint('user', __name__)
api = Api(app)
logger = logging.getLogger(__name__)
parser = reqparse.RequestParser()
parser.add_argument('email', help='This field cannot be blank', required=True)
parser.add_argument('password', help='This field cannot be blank', required=True)
parser.add_argument('username', required=False)
# Based on https://github.com/oleg-agapov/flask-jwt-auth/
def make_error(status_code, message):
response = jsonify()
response.status_code = status_code
return response
class UserRegistration(Resource):
def post(self):
abort(404, message='Password registration has been removed')
def get_user_profile(user: User):
return {'username': user.username, 'firstName': user.first_name,
'lastName': user.last_name, 'email': user.email, 'provider': user.provider}
class UserLogin(Resource):
def post(self):
data = parser.parse_args()
current_user = get_user_by_email(data['email'])
if not current_user:
abort(401, message='User {} doesn\'t exist'.format(data['email']))
elif current_user.pending:
abort(403, message='User is pending. Please log in via Google')
elif current_user.provider:
abort(403, message='For security reasons, please log in via Google')
if verify_hash(data['password'], current_user.password):
access_token = create_access_token(identity=dict(email=data['email']))
resp = jsonify(get_user_profile(current_user))
set_access_cookies(resp, access_token)
return resp
else:
return abort(401, message="Wrong credentials")
class UserLogoutAccess(Resource):
@jwt_required
def post(self):
jti = get_raw_jwt()['jti']
try:
db.session.add(RevokedToken(token=jti))
db.session.commit()
resp = jsonify({'message': 'Access token has been revoked'})
unset_access_cookies(resp)
return resp
except:
return {'message': 'Something went wrong'}, 500
class TokenRefresh(Resource):
@jwt_refresh_token_required
def post(self):
current_user = get_jwt_identity()
access_token = create_access_token(identity=current_user)
return {'access_token': access_token}
class ValidateUser(Resource):
@jwt_optional
def get(self):
user = get_user_optional()
if user:
return get_user_profile(user)
return None
class Unsubscribe(Resource):
@marshal_with({'title': fields.String})
def post(self, token):
try:
email, paper_id = deserialize_token(token)
user = get_user_by_email(email)
# Verify paper exists
paper = Paper.query.get_or_404(paper_id)
except Exception as e:
abort(404, message='invalid token')
return
user.unsubscribed_papers.append(paper)
db.session.commit()
return paper
class GoogleLogin(Resource):
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('token', help='This field cannot be blank', required=True, location='json')
data = parser.parse_args()
try:
info = id_token.verify_oauth2_token(data['token'], requests.Request(), os.environ.get('GOOGLE_CLIENT_ID'))
except ValueError as e:
print(e)
abort(403, message='invalid token')
email = info['email']
current_user_email = get_jwt_email()
if current_user_email and current_user_email != email:
# TODO: Allow linking non-matching email addresses
abort(403, message='Your Google email address does not match your existing user')
# create user if not missing
user = User.query.filter_by(email=email).first()
first_name: str = info.get('given_name')
last_name: str = info.get('family_name')
if not user:
username = '_'.join(filter(None, [first_name, last_name])) or email.split('@')[0]
username.replace(' ', '_')
new_user = User(username=username,
email=email, password='', first_name=first_name, last_name=last_name, provider='Google')
db.session.add(new_user)
db.session.commit()
elif not user.provider:
user.first_name = first_name
user.last_name = last_name
user.provider = 'Google'
user.pending = False
db.session.commit()
access_token = create_access_token(
identity={'email': email, 'provider': 'Google', 'first_name': first_name, 'last_name': last_name})
resp = jsonify({'message': 'User was created/merged'})
set_access_cookies(resp, access_token)
return resp
api.add_resource(GoogleLogin, '/google_login')
api.add_resource(UserRegistration, '/register')
api.add_resource(UserLogin, '/login')
api.add_resource(UserLogoutAccess, '/logout')
api.add_resource(TokenRefresh, '/token/refresh')
api.add_resource(ValidateUser, '/validate')
api.add_resource(Unsubscribe, '/unsubscribe/<token>')
|
[
"[email protected]"
] | |
73797439d36e04dea271e61b61aa8620a1227750
|
f3b233e5053e28fa95c549017bd75a30456eb50c
|
/CDK2_input/L26/26-1S_wat_20Abox/set_1.py
|
36b3bd309b7e3c60deb656a873098ec88d7a6bb5
|
[] |
no_license
|
AnguseZhang/Input_TI
|
ddf2ed40ff1c0aa24eea3275b83d4d405b50b820
|
50ada0833890be9e261c967d00948f998313cb60
|
refs/heads/master
| 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 737 |
py
|
import os
dir = '/mnt/scratch/songlin3/run/CDK2/L26/wat_20Abox/ti_one-step/26_1S/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_1.in'
temp_pbs = filesdir + 'temp_1.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_1.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_1.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
os.system("qsub %s" %(pbs))
os.chdir(dir)
|
[
"[email protected]"
] | |
49b4e0b91d57155a69ce4080265a0ee06dd8bf3c
|
159d4ae61f4ca91d94e29e769697ff46d11ae4a4
|
/venv/lib/python3.9/site-packages/webdriver_manager/archive.py
|
f827dc3151deda496a84de6fc9aa5809d377ab0e
|
[
"MIT"
] |
permissive
|
davidycliao/bisCrawler
|
729db002afe10ae405306b9eed45b782e68eace8
|
f42281f35b866b52e5860b6a062790ae8147a4a4
|
refs/heads/main
| 2023-05-24T00:41:50.224279 | 2023-01-22T23:17:51 | 2023-01-22T23:17:51 | 411,470,732 | 8 | 0 |
MIT
| 2023-02-09T16:28:24 | 2021-09-28T23:48:13 |
Python
|
UTF-8
|
Python
| false | false | 1,045 |
py
|
import tarfile
import zipfile
class Archive(object):
def __init__(self, path: str):
self.file_path = path
def unpack(self, directory):
if self.file_path.endswith(".zip"):
return self.__extract_zip(directory)
elif self.file_path.endswith(".tar.gz"):
return self.__extract_tar_file(directory)
def __extract_zip(self, to_directory):
archive = zipfile.ZipFile(self.file_path)
try:
archive.extractall(to_directory)
except Exception as e:
if e.args[0] not in [26, 13] and e.args[1] not in ['Text file busy', 'Permission denied']:
raise e
return archive.namelist()
def __extract_tar_file(self, to_directory):
try:
tar = tarfile.open(self.file_path, mode="r:gz")
except tarfile.ReadError:
tar = tarfile.open(self.file_path, mode="r:bz2")
members = tar.getmembers()
tar.extractall(to_directory)
tar.close()
return [x.name for x in members]
|
[
"[email protected]"
] | |
3fc799fe13345e1eae8b48fa05b126090829b332
|
5a96112e11834d400a59b76caee33fd63831e273
|
/python3_API_framework_V2/TestCases/test_api_v2.py
|
60c0bfcfe591a2359e2ea5d8e3fd20024415a63f
|
[] |
no_license
|
zhaozongzhao/interface_test
|
d3f93c8220cb5fab5f063ce7e315e54b2f623ce6
|
f63f7e188639b34a8b80c9ce57591d9cabe3f4f8
|
refs/heads/master
| 2020-05-02T03:21:51.633352 | 2019-04-10T15:32:12 | 2019-04-10T15:32:12 | 177,726,854 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,456 |
py
|
import unittest
from Common.DoExcel import DoExcel
import os
from Common import myRequest
import ddt
from Common import dir_config
from Common import myLogger2
import logging
import re
#实例 化日志对象
#logger = MyLogger()
# 获取所有的测试数据
excelfile = dir_config.testcase_dir + "/api_info_1.xlsx"
de = DoExcel(excelfile)
all_case_datas = de.get_caseDatas_all()
print("所有的测试数据", all_case_datas)
global_vars = {}
@ddt.ddt
class Test_Api(unittest.TestCase):
@classmethod
def setUpClass(self):
de.update_init_data()
de.save_excelFile(excelfile)
@ddt.data(*all_case_datas)
def test_api(self,case_data):
global global_vars
# 使用for循环,读取每行测试数据,然后发送http请求。获取响应结果
logging.info("==============开始执行一个接口测试用例,请求数据如下===============")
logging.info("接口请求地址:%s" % case_data["url"])
logging.info("接口请求类型:{0}".format(case_data["method"]))
logging.info("接口请求数据为:{0}".format(case_data["request_data"]))
#动态替换了 - 判断请求数据当中,是否要替换全局变量的值、全局变量是否存在。
if len(global_vars)>0 and case_data["request_data"] is not None:
for key,value in global_vars.items():
if case_data["request_data"].find(key) != -1:
case_data["request_data"] = case_data["request_data"].replace(key,value)
logging.info("动态更新之后的请求数据为:\n{0}".format(case_data["request_data"]))
res = myRequest.myRequest(case_data["url"], case_data["method"], case_data["request_data"])
logging.info("本次接口请求的状态码为:%d" % res.status_code)
logging.info("接口请求的返回数据为:")
logging.info(res.text)
#先要判断测试数据当中,是否有关联字段。。如果有,则需要提取出来。按表达式提取,并且赋给指定变量。
if "related_exp" in case_data.keys():
logging.info("需要从响应结果中提取数据:")
#related_data = parse_response.get_relatedData_from_response(res.text,case_data["related_exp"])
temp = case_data["related_exp"].split("=")
res_id = re.findall(temp[1],res.text)
#动态获取了,成为全局变量。
global_vars[temp[0]] = res_id[0]
logging.info("接口请求的期望数据为:")
logging.info(case_data["expected_data"])
logging.info("期望结果与实际结果的比对方式为:")
if int(case_data["compare_type"]) == 0:
logging.info("全值匹配模式。")
try:
self.assertEqual(res.text,case_data["expected_data"])
logging.info("结果比对成功,测试用例通过")
except AssertionError:
logging.exception("结果比对失败:")
raise AssertionError
else:
logging.info("正则表达式匹配模式。")
re_obj = re.match(case_data["expected_data"],res.text)
self.assertIsNotNone(re_obj, "正则表达式匹配失败!")
logging.info("========================结束一个接口测试用例==========================")
|
[
"[email protected]"
] | |
e1bbce8655b1596bb2a77c6db900e7a854d70cf5
|
2c16e24486ac92bbd37f5c6d0d00ec4ba4d48e56
|
/ex/ex1.py
|
0d5193b36e4107bb3f5edf45a87b64307424927a
|
[] |
no_license
|
alagram/lpthw
|
386b6cf7534e2f7dba2e5832d6975107f27ceb9b
|
656e7526006de80354917da881cbcbb3dbe8523a
|
refs/heads/master
| 2021-01-10T20:55:35.461722 | 2014-09-16T18:33:50 | 2014-09-16T18:33:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 228 |
py
|
# print "Hello World!"
# print "Hello Again"
# print "I like tying this."
# print "This is fun."
# print "Yay! Printing."
# print "I'd much rather you 'not'."
# print 'I "said" do not tocuh this.'
print "I am still printing..."
|
[
"[email protected]"
] | |
46efd06e7181e3095d182fdcacca6baea3973712
|
8d375652e44b67d73102fee7abc1abaab4cb4329
|
/mcompiler/kernel/makeref.py
|
9dfdeb93aea606402be14eef4fbc0d4790b57a87
|
[
"MIT"
] |
permissive
|
paulscottrobson/old-m-versions
|
6d2061e36f2a5aaef388a4786406f876f0a06e0b
|
c2edb4200d32e066223ace4fd05837a485302645
|
refs/heads/master
| 2020-04-04T03:09:25.399283 | 2018-11-01T12:14:57 | 2018-11-01T12:14:57 | 155,709,691 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,608 |
py
|
# ***********************************************************************************************
# ***********************************************************************************************
#
# Name : makeref.py
# Purpose : make reference file.
# Author : Paul Robson ([email protected])
# Created : 17th September 2018
#
# ***********************************************************************************************
# ***********************************************************************************************
import re
references = {}
#
# Read in the listing file, and extract lines with label values on it.
# (first bit is snasm-only)
#
src = [x.strip().lower() for x in open("boot.img.vice").readlines()]
#
# For each line, see if it fits the <label> = $<address>
#
for l in src:
if l.find(" _definition_") >= 0:
#print(l)
m = re.match("^al\s+c\:([0-9a-f]+)\s+_definition_([_0-9a-fmro]+)$",l)
assert m is not None,l
#
# If so, extract name and address
#
name = m.group(2)
address = int(m.group(1),16)
#
# If it is definition, get name, checking if it is a macro and
# convert back to standard ASCII
#
isMacro = False
if name[-6:] == "_macro":
name = name[:-6]
isMacro = True
name = "".join([chr(int(x,16)) for x in name.split("_")])
name = name.lower()
if isMacro:
name = "&&"+name
references[name.lower()] = address
#
# Write the file out.
#
keys = [x for x in references]
keys.sort(key = lambda x:references[x])
ref = "\n".join(["{0}:=${1:06x}".format(x,references[x]) for x in keys])
h = open("boot.dict","w").write(ref+"\n")
|
[
"[email protected]"
] | |
0d2ea1c5f31a044d68ce7bb06f65aaa2ee8a1422
|
327981aeef801fec08305d70270deab6f08bc122
|
/13.tkinter与银行系统实战/thinker/18.Combobox下拉控件.py
|
0dc0692ce37fde2328c063bb484b27127a142176
|
[] |
no_license
|
AWangHe/Python-basis
|
2872db82187b169226271c509778c0798b151f50
|
2e3e9eb6da268f765c7ba04f1aefc644d50c0a29
|
refs/heads/master
| 2020-03-20T12:15:44.491323 | 2018-06-15T08:24:19 | 2018-06-15T08:24:19 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 612 |
py
|
# -*- coding: utf-8 -*-
import tkinter
from tkinter import ttk
#创建主窗口
win = tkinter.Tk()
#设置标题
win.title("魔兽世界")
#设置大小和位置 大小400x400 距离左侧400,距离上侧100
win.geometry("400x400+400+100")
#绑定变量
cv = tkinter.StringVar()
com = ttk.Combobox(win, textvariable = cv)
com.pack()
#设置下拉数据
com["value"] = ("济南", "青岛", "济宁")
#设置默认值
com.current(0)
#绑定事件
def func(event):
print(com.get())
print(cv.get())
com.bind("<<ComboboxSelected>>", func)
win.mainloop()
|
[
"[email protected]"
] | |
c6e6e1ef088631e80462884b26b6f3bdfea593fb
|
77311ad9622a7d8b88707d7cee3f44de7c8860cb
|
/res/scripts/client/messenger/proto/bw_chat2/find_criteria.py
|
ea24304c044114f780246fdbce4279659cbd77a3
|
[] |
no_license
|
webiumsk/WOT-0.9.14-CT
|
9b193191505a4560df4e872e022eebf59308057e
|
cfe0b03e511d02c36ce185f308eb48f13ecc05ca
|
refs/heads/master
| 2021-01-10T02:14:10.830715 | 2016-02-14T11:59:59 | 2016-02-14T11:59:59 | 51,606,676 | 0 | 0 | null | null | null | null |
WINDOWS-1250
|
Python
| false | false | 1,701 |
py
|
# 2016.02.14 12:42:53 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/messenger/proto/bw_chat2/find_criteria.py
from constants import PREBATTLE_TYPE
from messenger.ext import channel_num_gen
from messenger.m_constants import BATTLE_CHANNEL, PROTO_TYPE
from messenger.proto.interfaces import IEntityFindCriteria
class BWBattleChannelFindCriteria(IEntityFindCriteria):
def __init__(self):
super(BWBattleChannelFindCriteria, self).__init__()
self.__ids = []
for item in BATTLE_CHANNEL.ALL:
clientID = channel_num_gen.getClientID4BattleChannel(item.name)
if clientID:
self.__ids.append(clientID)
clientID = channel_num_gen.getClientID4Prebattle(PREBATTLE_TYPE.SQUAD)
if clientID:
self.__ids.append(clientID)
def filter(self, channel):
return channel.getProtoType() is PROTO_TYPE.BW_CHAT2 and channel.getClientID() in self.__ids
class BWPrebattleChannelFindCriteria(IEntityFindCriteria):
def filter(self, channel):
return channel.getProtoType() is PROTO_TYPE.BW_CHAT2 and channel.getPrebattleType()
class BWChatTypeFindCriteria(IEntityFindCriteria):
def __init__(self, chatType):
super(BWChatTypeFindCriteria, self).__init__()
self.__chatType = chatType
def filter(self, channel):
return channel.getProtoType() is PROTO_TYPE.BW_CHAT2 and channel.getProtoData().chatType == self.__chatType
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\messenger\proto\bw_chat2\find_criteria.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.02.14 12:42:53 Střední Evropa (běžný čas)
|
[
"[email protected]"
] | |
8edf548db029dd530fa8bddd6f142a6ecd491f48
|
3dfb4ee39555b30e6e0c6fcdbef371864e69f694
|
/google-cloud-sdk/lib/googlecloudsdk/api_lib/dns/transaction_util.py
|
e0444226d947ade648184a8d0f468d647f579eed
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
MD-Anderson-Bioinformatics/NG-CHM_Galaxy
|
41d1566d5e60416e13e023182ca4351304381a51
|
dcf4886d4ec06b13282143ef795c5f0ff20ffee3
|
refs/heads/master
| 2021-06-02T21:04:12.194964 | 2021-04-29T14:45:32 | 2021-04-29T14:45:32 | 130,249,632 | 0 | 1 | null | 2020-07-24T18:35:21 | 2018-04-19T17:25:33 |
Python
|
UTF-8
|
Python
| false | false | 5,070 |
py
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper methods for record-set transactions."""
import os
from dns import rdatatype
from googlecloudsdk.api_lib.dns import import_util
from googlecloudsdk.api_lib.dns import util
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import resource_printer
from googlecloudsdk.third_party.apis.dns.v1 import dns_v1_messages as messages
import yaml
DEFAULT_PATH = 'transaction.yaml'
class CorruptedTransactionFileError(core_exceptions.Error):
def __init__(self):
super(CorruptedTransactionFileError, self).__init__(
'Corrupted transaction file.\n\n'
'Please abort and start a new transaction.')
def WriteToYamlFile(yaml_file, change):
"""Writes the given change in yaml format to the given file.
Args:
yaml_file: file, File into which the change should be written.
change: Change, Change to be written out.
"""
printer = resource_printer.YamlPrinter(yaml_file)
printer.AddRecord(change)
def _RecordSetsFromDictionaries(record_set_dictionaries):
"""Converts list of record-set dictionaries into list of ResourceRecordSets.
Args:
record_set_dictionaries: [{str:str}], list of record-sets as dictionaries.
Returns:
list of ResourceRecordSets equivalent to given list of yaml record-sets
"""
record_sets = []
for record_set_dict in record_set_dictionaries:
record_set = messages.ResourceRecordSet()
# Need to assign kind to default value for useful equals comparisons.
record_set.kind = record_set.kind
record_set.name = record_set_dict['name']
record_set.ttl = record_set_dict['ttl']
record_set.type = record_set_dict['type']
record_set.rrdatas = record_set_dict['rrdatas']
record_sets.append(record_set)
return record_sets
def ChangeFromYamlFile(yaml_file):
"""Returns the change contained in the given yaml file.
Args:
yaml_file: file, A yaml file with change.
Returns:
Change, the change contained in the given yaml file.
Raises:
CorruptedTransactionFileError: if the record_set_dictionaries are invalid
"""
try:
change_dict = yaml.safe_load(yaml_file) or {}
except yaml.error.YAMLError:
raise CorruptedTransactionFileError()
if (change_dict.get('additions') is None or
change_dict.get('deletions') is None):
raise CorruptedTransactionFileError()
change = messages.Change()
change.additions = _RecordSetsFromDictionaries(change_dict['additions'])
change.deletions = _RecordSetsFromDictionaries(change_dict['deletions'])
return change
def CreateRecordSetFromArgs(args):
"""Creates and returns a record-set from the given args.
Args:
args: The arguments to use to create the record-set.
Raises:
ToolException: If given record-set type is not supported
Returns:
ResourceRecordSet, the record-set created from the given args.
"""
rd_type = rdatatype.from_text(args.type)
if rd_type not in import_util.RDATA_TRANSLATIONS:
raise exceptions.ToolException(
'unsupported record-set type [{0}]'.format(args.type))
record_set = messages.ResourceRecordSet()
# Need to assign kind to default value for useful equals comparisons.
record_set.kind = record_set.kind
record_set.name = util.AppendTrailingDot(args.name)
record_set.ttl = args.ttl
record_set.type = args.type
record_set.rrdatas = args.data
if rd_type is rdatatype.TXT or rd_type is rdatatype.SPF:
record_set.rrdatas = [import_util.QuotedText(datum) for datum in args.data]
return record_set
class TransactionFile(object):
"""Context for reading/writing from/to a transaction file."""
def __init__(self, trans_file_path, mode='r'):
if not os.path.isfile(trans_file_path):
raise exceptions.ToolException(
'transaction not found at [{0}]'.format(trans_file_path))
self.__trans_file_path = trans_file_path
try:
self.__trans_file = open(trans_file_path, mode)
except IOError as exp:
msg = 'unable to open transaction [{0}] because [{1}]'
msg = msg.format(trans_file_path, exp)
raise exceptions.ToolException(msg)
def __enter__(self):
return self.__trans_file
def __exit__(self, typ, value, traceback):
self.__trans_file.close()
if typ is IOError or typ is yaml.YAMLError:
msg = 'unable to read/write transaction [{0}] because [{1}]'
msg = msg.format(self.__trans_file_path, value)
raise exceptions.ToolException(msg)
|
[
"[email protected]"
] | |
58a5ffe0456fe028034da211b3db8c3daf7f4530
|
7642f70954b73aca0d56f03b3e3577ee5648c752
|
/ppm/settings.py
|
5bfa923c2b43f7b3d5b3ae1d8a2264a866af4505
|
[] |
no_license
|
alviandk/ppm
|
8e5dfb2ca9a98b460c9b0a71be68b5310ed56d87
|
eea4d37904f86b4ec9cded6091b89d18244b85a9
|
refs/heads/master
| 2021-01-10T21:05:22.931101 | 2014-11-13T09:24:36 | 2014-11-13T09:24:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,100 |
py
|
"""
Django settings for ppm project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_v)b*pi3yhflh(bvrrk+rq9*fm5=b+@yh03bdgb94h95+1=#w-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'inventory',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ppm.urls'
WSGI_APPLICATION = 'ppm.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'inventory',
'USER' : 'root',
'PASSWORD' : '',
'HOST': '127.0.0.1',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
[
"[email protected]"
] | |
6e612a774a20e51feed223e0a74a18ebcf53f4a2
|
76fa4bc242502bcd9dfe1053c964318b94acc6d8
|
/matplotlib bar chart/df_barplot.py
|
fc8ef89725b545217214b8af713ce4b4e05eb56a
|
[] |
no_license
|
phani-1995/Week3-python_libraries
|
720156098ccab5301a58e39a4dd7af5a19a08008
|
1347b8dfd4980b37471a54ce991c967fdcb32e2b
|
refs/heads/master
| 2021-04-01T17:42:54.855954 | 2020-03-23T06:50:18 | 2020-03-23T06:50:18 | 248,204,612 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 302 |
py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
data=np.array([[2,4,6,8,10],[4,2,4,2,2],[8,3,7,6,4],[5,4,4,4,3],[6,6,8,6,2]])
dataFrame=pd.DataFrame(data,columns=['a','b','c','d','e'], index=["Delhi",'Mumbai','Hyderabad','Pune','Bengalur'])
dataFrame.plot(kind='bar')
plt.show()
|
[
"[email protected]"
] | |
d700bfe0470ed942dca42727b21481b2d69a4bcd
|
5e324af46c554b88b97ee26886b05c88457ff0f5
|
/franchises/models/franchise.py
|
8d73000fdaadc7d85bb373e0c6cadd7335661a11
|
[] |
no_license
|
doubleclickdetroit/dindintonight
|
1bda8851e49782d4dc16ca77d46e4b1f431c2b52
|
9769e1a96730b02511d25af8828b075dff5c35b5
|
refs/heads/master
| 2016-08-04T22:01:08.083566 | 2014-07-26T18:58:58 | 2014-07-26T18:58:58 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 927 |
py
|
from autoslug import AutoSlugField
from django.db import models
from django.db.models.signals import post_save
from core.models import BaseModel
class Franchise(BaseModel):
id = models.AutoField(primary_key=True)
owner = models.OneToOneField('users.User', related_name='franchise_owners')
slug = AutoSlugField(populate_from='name', unique=True, db_index=True)
name = models.CharField(max_length=255)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
app_label = 'franchises'
db_table = 'franchises'
verbose_name = 'Franchise'
verbose_name_plural = 'Franchises'
def __unicode__(self):
return '{0} {1}'.format(self.owner.first_name, self.owner.last_name)
def franchise_post_save_handler(sender, instance, **kwargs):
pass
post_save.connect(franchise_post_save_handler, sender=Franchise)
|
[
"[email protected]"
] | |
fc0b1a61451fe1c4b893d8ea586e3c6d8e04d357
|
7b2a3ea853dc44aea204f02abedaad6a2029f4ff
|
/inv_test.py
|
46e208002c5331c95094449e682798e59a78e53a
|
[] |
no_license
|
NoisyLeon/SW4Py
|
7d45503282dc988b5f886c039706bd79fdd6b339
|
7029f18eb526bcb46b4aa244da1e088ca57a56aa
|
refs/heads/master
| 2020-12-22T14:57:11.265397 | 2016-12-20T18:27:18 | 2016-12-20T18:27:18 | 56,792,735 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 227 |
py
|
# import obspy
#
# net=obspy.core.inventory.network.Network('SW4', ftanparams=[])
# inv=obspy.core.inventory.inventory.Inventory(networks=[net],source='CU')
# sta=obspy.core.inventory.ftanparam.Station('aa',13,132.4214,0.0)
|
[
"[email protected]"
] | |
9b52f8728284f014f32195d6f50595415bcec9bb
|
cf54adda6874a4256401e9e4eb28f353b28ae74b
|
/python-modules/python_call_django_view.py
|
f56832338684b861081db955189ae868d9eae874
|
[] |
no_license
|
oraant/study
|
c0ea4f1a7a8c3558c0eac4b4108bc681a54e8ebf
|
7bce20f2ea191d904b4e932c8d0abe1b70a54f7e
|
refs/heads/master
| 2020-09-23T02:08:07.279705 | 2016-11-21T06:30:26 | 2016-11-21T06:30:26 | 66,995,585 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 656 |
py
|
# coding:utf-8
# # tree /home/oraant/test/django_celery/|grep -v .pyc
# /home/oraant/test/django_celery/
# ├── django_celery
# │ ├── __init__.py
# │ ├── settings.py
# │ ├── urls.py
# │ ├── wsgi.py
# ├── manage.py
# └── myapp
# ├── admin.py
# ├── apps.py
# ├── __init__.py
# ├── migrations
# │ ├── __init__.py
# ├── models.py
# ├── tests.py
# └── views.py
#
# 3 directories, 25 files
import sys
sys.path.append('/home/oraant/test/django_celery/')
from myapp.views import test_add
print test_add(1, 2)
|
[
"[email protected]"
] | |
33a26a9eff1d85003c886ec1259d2874765ba03b
|
a2b6bc9bdd2bdbe5871edb613065dd2397175cb3
|
/中等/旋转图像.py
|
239a028395365d7e1f8543fcf746f87fc6437301
|
[] |
no_license
|
Asunqingwen/LeetCode
|
ed8d2043a31f86e9e256123439388d7d223269be
|
b7c59c826bcd17cb1333571eb9f13f5c2b89b4ee
|
refs/heads/master
| 2022-09-26T01:46:59.790316 | 2022-09-01T08:20:37 | 2022-09-01T08:20:37 | 95,668,066 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,224 |
py
|
'''
给定一个 n × n 的二维矩阵表示一个图像。
将图像顺时针旋转 90 度。
说明:
你必须在原地旋转图像,这意味着你需要直接修改输入的二维矩阵。请不要使用另一个矩阵来旋转图像。
示例 1:
给定 matrix =
[
[1,2,3],
[4,5,6],
[7,8,9]
],
原地旋转输入矩阵,使其变为:
[
[7,4,1],
[8,5,2],
[9,6,3]
]
示例 2:
给定 matrix =
[
[ 5, 1, 9,11],
[ 2, 4, 8,10],
[13, 3, 6, 7],
[15,14,12,16]
],
原地旋转输入矩阵,使其变为:
[
[15,13, 2, 5],
[14, 3, 4, 1],
[12, 6, 8, 9],
[16, 7,10,11]
]
'''
from typing import List
class Solution:
def rotate(self, matrix: List[List[int]]) -> None:
"""
Do not return anything, modify matrix in-place instead.
"""
row = len(matrix)
for i in range(row // 2):
matrix[i][:], matrix[row - i - 1][:] = matrix[row - i - 1][:], matrix[i][:]
for i in range(row):
for j in range(i):
matrix[i][j], matrix[j][i] = matrix[j][i], matrix[i][j]
if __name__ == '__main__':
matrix = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]
]
sol = Solution()
sol.rotate(matrix)
|
[
"[email protected]"
] | |
df4bc3c52cb2cc13ff6155431b8a111077115ef7
|
da6d44b06f631387739d04471920037e8541d6c0
|
/problems/014.py
|
8753c9f24c8c00abf2eddba5325e948652a085c7
|
[
"MIT"
] |
permissive
|
JoshKarpel/euler-python
|
f6d5d5551a0d77565c852e3eb1e89522675824ec
|
9c4a89cfe4b0114d84a82e2b2894c7b8af815e93
|
refs/heads/master
| 2021-09-01T09:07:46.378352 | 2017-12-26T05:39:35 | 2017-12-26T05:39:35 | 64,712,642 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 413 |
py
|
from problems import mymath, utils
@utils.memoize
def collatz_length(n):
if n == 1:
return 1
if n % 2 == 0:
return 1 + collatz_length(n / 2)
else:
return 1 + collatz_length((3 * n) + 1)
def solve():
collatz_lengths = {x: collatz_length(x) for x in range(1, 1000001)}
return mymath.key_of_max_value(collatz_lengths)
if __name__ == '__main__':
print(solve())
|
[
"[email protected]"
] | |
fc0dfd542cb1fc87198d882b23f32e2a923cb059
|
8822149855c27522b54b05f796e292c1c63dbdf6
|
/mnist.py
|
105022e5d79ea5317478d7612e35b04793373105
|
[] |
no_license
|
jaythaceo/TensorFlow-Tutorial
|
3c33844b473e67c63bfa9992c124e22ac2a394c3
|
b4eca4f3f25eeedd868ee2a0645eb617c1b3208a
|
refs/heads/master
| 2021-06-27T01:38:49.942255 | 2017-02-04T23:09:51 | 2017-02-04T23:09:51 | 59,586,904 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,675 |
py
|
# Copyright 2016 Jason "jaythaceo" Brooks. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Builds the MNIST network
Implements the inference/loss/training pattern for model building.
1. inference() - Builds the model as far as is required for running the network
forward to make predictions.
2. loss() - Added to the inference model the layers required to generate loss.
3. training() - Adds to the loss model the Ops required to generate and
apply gradiants.
This file is used by the various "fully_connected_*.py" files and not meant to
be run.
"""
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
batch_size = 128
test_size = 256
def init_weights(shape):
return tf.Variable(tf.random_normal(shape, stddev=0.01))
def model(X, w, w2, w3, w4, w_o, p_keep_conv, p_keep_hidden):
l1a = tf.nn.relu(tf.nn.conv2d(X, w, # l1a shape=(?, 28, 28, 32)
strides=[1, 1, 1, 1], padding='SAME'))
l1 = tf.nn.max_pool(l1a, ksize=[1, 2, 2, 1], # l1 shape=(?, 14, 14, 32)
strides=[1, 2, 2, 1], padding='SAME')
l1 = tf.nn.dropout(l1, p_keep_conv)
l2a = tf.nn.relu(tf.nn.conv2d(l1, w2, # l2a shape=(?, 14, 14, 64)
strides=[1, 1, 1, 1], padding='SAME'))
l2 = tf.nn.max_pool(l2a, ksize=[1, 2, 2, 1], # l2 shape=(?, 7, 7, 64)
strides=[1, 2, 2, 1], padding='SAME')
l2 = tf.nn.dropout(l2, p_keep_conv)
l3a = tf.nn.relu(tf.nn.conv2d(l2, w3, # l3a shape=(?, 7, 7, 128)
strides=[1, 1, 1, 1], padding='SAME'))
l3 = tf.nn.max_pool(l3a, ksize=[1, 2, 2, 1], # l3 shape=(?, 4, 4, 128)
strides=[1, 2, 2, 1], padding='SAME')
l3 = tf.reshape(l3, [-1, w4.get_shape().as_list()[0]]) # reshape to (?, 2048)
l3 = tf.nn.dropout(l3, p_keep_conv)
l4 = tf.nn.relu(tf.matmul(l3, w4))
l4 = tf.nn.dropout(l4, p_keep_hidden)
pyx = tf.matmul(l4, w_o)
return pyx
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels
trX = trX.reshape(-1, 28, 28, 1) # 28x28x1 input img
teX = teX.reshape(-1, 28, 28, 1) # 28x28x1 input img
X = tf.placeholder("float", [None, 28, 28, 1])
Y = tf.placeholder("float", [None, 10])
w = init_weights([3, 3, 1, 32]) # 3x3x1 conv, 32 outputs
w2 = init_weights([3, 3, 32, 64]) # 3x3x32 conv, 64 outputs
w3 = init_weights([3, 3, 64, 128]) # 3x3x32 conv, 128 outputs
w4 = init_weights([128 * 4 * 4, 625]) # FC 128 * 4 * 4 inputs, 625 outputs
w_o = init_weights([625, 10]) # FC 625 inputs, 10 outputs (labels)
p_keep_conv = tf.placeholder("float")
p_keep_hidden = tf.placeholder("float")
py_x = model(X, w, w2, w3, w4, w_o, p_keep_conv, p_keep_hidden)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y))
train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost)
predict_op = tf.argmax(py_x, 1)
# Launch the graph in a session
with tf.Session() as sess:
# you need to initialize all variables
tf.global_variables_initializer().run()
for i in range(100):
training_batch = zip(range(0, len(trX), batch_size),
range(batch_size, len(trX)+1, batch_size))
for start, end in training_batch:
sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end],
p_keep_conv: 0.8, p_keep_hidden: 0.5})
test_indices = np.arange(len(teX)) # Get A Test Batch
np.random.shuffle(test_indices)
test_indices = test_indices[0:test_size]
print(i, np.mean(np.argmax(teY[test_indices], axis=1) ==
sess.run(predict_op, feed_dict={X: teX[test_indices],
p_keep_conv: 1.0,
p_keep_hidden: 1.0})))
|
[
"[email protected]"
] | |
74fdcfd69840950e1b3e336b45fef12d98d7d355
|
91ff6fdf7b2ccc58869d6ad41842f230644952c1
|
/requirements/venky_task/String/7.py
|
4f1a8999bfbb7bfa6d11aac952ba9d77b5cfcd61
|
[] |
no_license
|
KONASANI-0143/Dev
|
dd4564f54117f54ccfa003d1fcec4220e6cbe1f9
|
23d31fbeddcd303a7dc90ac9cfbe2c762d61c61e
|
refs/heads/master
| 2023-08-14T15:59:59.012414 | 2021-10-13T14:54:49 | 2021-10-13T15:10:33 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 183 |
py
|
def venky(s):
n=s.find("not")
m=s.find("poor")
for i in s.split():
if i=="not":
c=s.replace(i,"poor")
print(s[n:]+str(c))
n=input("enter a string :")
venky(n)
|
[
"[email protected]"
] | |
1f8c10416376d98fd9647224d5f6e4826a12517b
|
cd0cf1c75c715a67502ff7f164bb070da78956de
|
/calculation/migrations/0046_auto_20160310_0927.py
|
2a0fc34d174875b0400e1f1c7e5a69becb00158e
|
[] |
no_license
|
nustarnuclear/orient_linux
|
9792fb4319007708861d619dac081fa32206d3f6
|
95082ea56a0dfc248024f9bf54897a017985ccdf
|
refs/heads/master
| 2020-03-28T03:17:02.629719 | 2017-01-04T08:38:16 | 2017-01-04T08:38:16 | 43,117,046 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,065 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import calculation.models
class Migration(migrations.Migration):
dependencies = [
('calculation', '0045_server_queue'),
]
operations = [
migrations.RemoveField(
model_name='server',
name='status',
),
migrations.AddField(
model_name='robintask',
name='log_file',
field=models.FileField(upload_to=calculation.models.get_robintask_upload_path, blank=True, null=True),
),
migrations.AddField(
model_name='robintask',
name='output_file',
field=models.FileField(upload_to=calculation.models.get_robintask_upload_path, blank=True, null=True),
),
migrations.AlterField(
model_name='prerobintask',
name='server',
field=models.ForeignKey(to='calculation.Server', default=calculation.models.server_default, related_name='pre_robin_inputs'),
),
]
|
[
"[email protected]"
] | |
5faf40bbcb2caaa7edd850c568952b71d9a6de70
|
05c22017cde07bb9fdff2c7f03f2602b1cd15323
|
/src/textual/widget.py
|
e43372e6a165b2ec0153e5c91c32e700bf39cf10
|
[
"MIT"
] |
permissive
|
ramiro/textual
|
00b0a7fc6fea95d327455c8328248cd926f3eaff
|
a6a912ab2713b0e1cb668224f7a38f31b1c9939c
|
refs/heads/main
| 2023-06-14T01:22:40.975706 | 2021-07-05T14:06:16 | 2021-07-05T14:06:16 | 383,201,815 | 0 | 0 |
MIT
| 2021-07-05T16:25:54 | 2021-07-05T16:25:53 | null |
UTF-8
|
Python
| false | false | 6,353 |
py
|
from __future__ import annotations
from logging import getLogger
from typing import (
Callable,
cast,
ClassVar,
Generic,
Iterable,
NewType,
TypeVar,
TYPE_CHECKING,
)
from rich.align import Align
from rich.console import Console, RenderableType
from rich.pretty import Pretty
from rich.panel import Panel
import rich.repr
from rich.segment import Segment
from rich.style import Style
from . import events
from ._animator import BoundAnimator
from ._context import active_app
from ._loop import loop_last
from ._line_cache import LineCache
from .message import Message
from .messages import UpdateMessage, LayoutMessage
from .message_pump import MessagePump
from .geometry import Point, Dimensions
from .reactive import Reactive
if TYPE_CHECKING:
from .app import App
from .view import View
WidgetID = NewType("WidgetID", int)
log = getLogger("rich")
@rich.repr.auto
class Widget(MessagePump):
_id: ClassVar[int] = 0
_counts: ClassVar[dict[str, int]] = {}
can_focus: bool = False
def __init__(self, name: str | None = None) -> None:
class_name = self.__class__.__name__
Widget._counts.setdefault(class_name, 0)
Widget._counts[class_name] += 1
_count = self._counts[class_name]
self.id: WidgetID = cast(WidgetID, Widget._id)
Widget._id += 1
self.name = name or f"{class_name}#{_count}"
self.size = Dimensions(0, 0)
self.size_changed = False
self._repaint_required = False
self._layout_required = False
self._animate: BoundAnimator | None = None
super().__init__()
visible: Reactive[bool] = Reactive(True, layout=True)
layout_size: Reactive[int | None] = Reactive(None)
layout_fraction: Reactive[int] = Reactive(1)
layout_minimim_size: Reactive[int] = Reactive(1)
layout_offset_x: Reactive[float] = Reactive(0, layout=True)
layout_offset_y: Reactive[float] = Reactive(0, layout=True)
def __init_subclass__(cls, can_focus: bool = True) -> None:
super().__init_subclass__()
cls.can_focus = can_focus
def __rich_repr__(self) -> rich.repr.RichReprResult:
yield "name", self.name
def __rich__(self) -> RenderableType:
return self.render()
@property
def is_visual(self) -> bool:
return True
@property
def app(self) -> "App":
"""Get the current app."""
return active_app.get()
@property
def console(self) -> Console:
"""Get the current console."""
return active_app.get().console
@property
def root_view(self) -> "View":
"""Return the top-most view."""
return active_app.get().view
@property
def animate(self) -> BoundAnimator:
if self._animate is None:
self._animate = self.app.animator.bind(self)
assert self._animate is not None
return self._animate
@property
def layout_offset(self) -> tuple[int, int]:
"""Get the layout offset as a tuple."""
return (round(self.layout_offset_x), round(self.layout_offset_y))
def require_repaint(self) -> None:
"""Mark widget as requiring a repaint.
Actual repaint is done by parent on idle.
"""
self._repaint_required = True
self.post_message_no_wait(events.Null(self))
def require_layout(self) -> None:
self._layout_required = True
self.post_message_no_wait(events.Null(self))
def check_repaint(self) -> bool:
return self._repaint_required
def check_layout(self) -> bool:
return self._layout_required
def reset_check_repaint(self) -> None:
self._repaint_required = False
def reset_check_layout(self) -> None:
self._layout_required = False
def get_style_at(self, x: int, y: int) -> Style:
offset_x, offset_y = self.root_view.get_offset(self)
return self.root_view.get_style_at(x + offset_x, y + offset_y)
async def forward_event(self, event: events.Event) -> None:
await self.post_message(event)
async def refresh(self) -> None:
"""Re-render the window and repaint it."""
self.require_repaint()
await self.repaint()
async def repaint(self) -> None:
"""Instructs parent to repaint this widget."""
await self.emit(UpdateMessage(self, self))
async def update_layout(self) -> None:
await self.emit(LayoutMessage(self))
def render(self) -> RenderableType:
"""Get renderable for widget.
Returns:
RenderableType: Any renderable
"""
return Panel(
Align.center(Pretty(self), vertical="middle"), title=self.__class__.__name__
)
async def action(self, action: str, *params) -> None:
await self.app.action(action, self)
async def post_message(self, message: Message) -> bool:
if not self.check_message_enabled(message):
return True
return await super().post_message(message)
async def on_event(self, event: events.Event) -> None:
if isinstance(event, events.Resize):
new_size = Dimensions(event.width, event.height)
if self.size != new_size:
self.size = new_size
self.require_repaint()
await super().on_event(event)
async def on_idle(self, event: events.Idle) -> None:
if self.check_layout():
self.reset_check_repaint()
self.reset_check_layout()
await self.update_layout()
elif self.check_repaint():
self.reset_check_repaint()
self.reset_check_layout()
await self.repaint()
async def focus(self) -> None:
await self.app.set_focus(self)
async def capture_mouse(self, capture: bool = True) -> None:
await self.app.capture_mouse(self if capture else None)
async def on_mouse_move(self, event: events.MouseMove) -> None:
style_under_cursor = self.get_style_at(event.x, event.y)
log.debug("%r", style_under_cursor)
async def on_mouse_up(self, event: events.MouseUp) -> None:
style = self.get_style_at(event.x, event.y)
if "@click" in style.meta:
log.debug(style._link_id)
await self.app.action(style.meta["@click"], default_namespace=self)
|
[
"[email protected]"
] | |
ad8df248427f7098d6463b39e0c10612baf026cc
|
807305b8aefbd7aac4f44c67deed06c059ca02d9
|
/src/stk/molecular/topology_graphs/polymer/linear/vertices.py
|
95e4ae66c4c80eab25400a4a05c7c5504fb3b81f
|
[
"MIT"
] |
permissive
|
supramolecular-toolkit/stk
|
c40103b4820c67d110cbddc7be30d9b58d85f7af
|
46f70cd000890ca7c2312cc0fdbab306565f1400
|
refs/heads/master
| 2022-11-27T18:22:25.187588 | 2022-11-16T13:23:11 | 2022-11-16T13:23:11 | 129,884,045 | 22 | 5 |
MIT
| 2019-08-19T18:16:41 | 2018-04-17T09:58:28 |
Python
|
UTF-8
|
Python
| false | false | 6,060 |
py
|
"""
Linear Polymer Vertices
=======================
"""
import logging
from ...topology_graph import Vertex
logger = logging.getLogger(__name__)
class LinearVertex(Vertex):
"""
Represents a vertex in the middle of a linear polymer chain.
"""
def __init__(self, id, position, flip):
"""
Initialize a :class:`.LinearVertex` instance.
Parameters
----------
id : :class:`int`
The id of the vertex.
position : :class:`numpy.ndarray`
The position of the vertex.
flip : :class:`bool`
If ``True`` any building block placed by the vertex will
have its orientation along the chain flipped.
"""
super().__init__(id, position)
self._flip = flip
def get_flip(self):
"""
Return ``True`` if the vertex flips building blocks it places.
Returns
-------
:class:`bool`
``True`` if the vertex flips building blocks it places.
"""
return self._flip
def clone(self):
clone = super().clone()
clone._flip = self._flip
return clone
def place_building_block(self, building_block, edges):
assert building_block.get_num_functional_groups() == 2, (
f"{building_block} needs to have exactly 2 functional "
"groups but has "
f"{building_block.get_num_functional_groups()}."
)
building_block = building_block.with_centroid(
position=self._position,
atom_ids=building_block.get_placer_ids(),
)
fg1, fg2 = building_block.get_functional_groups()
fg1_position = building_block.get_centroid(
atom_ids=fg1.get_placer_ids(),
)
fg2_position = building_block.get_centroid(
atom_ids=fg2.get_placer_ids(),
)
return building_block.with_rotation_between_vectors(
start=fg2_position - fg1_position,
target=[-1 if self._flip else 1, 0, 0],
origin=self._position,
).get_position_matrix()
def map_functional_groups_to_edges(self, building_block, edges):
fg1_id, fg2_id = self._sort_functional_groups(building_block)
edge1_id, edge2_id = self._sort_edges(edges)
return {
fg1_id: edge1_id,
fg2_id: edge2_id,
}
@staticmethod
def _sort_functional_groups(building_block):
fg1, fg2 = building_block.get_functional_groups()
x1, y1, z1 = building_block.get_centroid(
atom_ids=fg1.get_placer_ids(),
)
x2, y2, z2 = building_block.get_centroid(
atom_ids=fg2.get_placer_ids(),
)
return (0, 1) if x1 < x2 else (1, 0)
@staticmethod
def _sort_edges(edges):
edge1, edge2 = edges
x1, y1, z1 = edge1.get_position()
x2, y2, z2 = edge2.get_position()
if x1 < x2:
return edge1.get_id(), edge2.get_id()
else:
return edge2.get_id(), edge1.get_id()
def __str__(self):
return (
f"Vertex(id={self._id}, "
f"position={self._position.tolist()}, "
f"flip={self._flip})"
)
class TerminalVertex(LinearVertex):
"""
Represents a vertex at the end of a polymer chain.
Do not instantiate this class directly, use :class:`.HeadVertex`
or :class:`.TailVertex` instead.
"""
def place_building_block(self, building_block, edges):
if (
building_block.get_num_functional_groups() != 1
and building_block.get_num_placers() > 1
):
return super().place_building_block(building_block, edges)
building_block = building_block.with_centroid(
position=self._position,
atom_ids=building_block.get_placer_ids(),
)
fg, *_ = building_block.get_functional_groups()
fg_centroid = building_block.get_centroid(
atom_ids=fg.get_placer_ids(),
)
core_centroid = building_block.get_centroid(
atom_ids=building_block.get_core_atom_ids(),
)
return building_block.with_rotation_between_vectors(
start=fg_centroid - core_centroid,
# _cap_direction is defined by a subclass.
target=[self._cap_direction, 0, 0],
origin=self._position,
).get_position_matrix()
def map_functional_groups_to_edges(self, building_block, edges):
if building_block.get_num_functional_groups() == 2:
functional_groups = self._sort_functional_groups(
building_block=building_block,
)
index = 1 if self._cap_direction == 1 else 0
return {functional_groups[index]: edges[0].get_id()}
elif building_block.get_num_functional_groups() == 1:
return {0: edges[0].get_id()}
else:
raise ValueError(
"The building block of a polymer "
"must have 1 or 2 functional groups."
)
class HeadVertex(TerminalVertex):
"""
Represents a vertex at the head of a polymer chain.
"""
# The direction to use if the building block placed on the
# vertex only has 1 FunctionalGroup.
_cap_direction = 1
class TailVertex(TerminalVertex):
"""
Represents a vertex at the tail of a polymer chain.
"""
# The direction to use if the building block placed on the
# vertex only has 1 FunctionalGroup.
_cap_direction = -1
class UnaligningVertex(LinearVertex):
"""
Just places a building block, does not align.
"""
def place_building_block(self, building_block, edges):
return building_block.with_centroid(
position=self._position,
atom_ids=building_block.get_placer_ids(),
).get_position_matrix()
def map_functional_groups_to_edges(self, building_block, edges):
return {
fg_id: edge.get_id() for fg_id, edge in enumerate(edges)
}
|
[
"[email protected]"
] | |
9afca773cc3e575a5e99270fc96821846e41becd
|
1eb7fa8b1745d4e51cefb4eceb44621862516aa6
|
/Company Interview/FB/BiggestKValuesInBST.py
|
fd6ddc0d98fe0ee5e2f7a5090dd8918d9e3db922
|
[] |
no_license
|
geniousisme/CodingInterview
|
bd93961d728f1fe266ad5edf91adc5d024e5ca48
|
a64bca9c07a7be8d4060c4b96e89d8d429a7f1a3
|
refs/heads/master
| 2021-01-10T11:15:31.305787 | 2017-03-06T00:03:13 | 2017-03-06T00:03:13 | 43,990,453 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,292 |
py
|
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution1(object):
def biggestKthValues(self, root):
res = []; stack = []
while root or stack:
if root:
stack.append(root)
root = root.right
else:
top = stack.pop()
res.append(top.val)
root = top.left
return res
class Solution(object): # iterative
def biggestKthValues(self, root, k):
res = count = 0; stack = [];
while root or stack:
if root:
stack.append(root)
root = root.right
else:
top = stack.pop()
if count == k - 1:
return top.val
else:
count += 1
root = top.left
return res
if __name__ == "__main__":
s = Solution()
t9 = TreeNode(9)
t1 = TreeNode(1)
t5 = TreeNode(5)
t7 = TreeNode(7)
t13 = TreeNode(13)
t11 = TreeNode(11)
t15 = TreeNode(15)
t9.left = t5
t9.right = t13
t5.left = t1
t5.right = t7
t13.left = t11
t13.right = t15
print s.biggestKthValues(t9, 3)
|
[
"[email protected]"
] | |
5f52fdc03f0db7fb339060a70be115388bb1d11a
|
ed2d96ead522dd4dbd1dfdf4a6a776617f7dbcaf
|
/tutorial/settings.py
|
2ab243e3f117195473def28fa8017680ee721604
|
[] |
no_license
|
Alexmhack/django_rest_quickstart
|
ff83f435b09f6e279d17c87ea53ad5719276d1f9
|
b44be0cb8fd07d00ac8715934b1fe480e833e344
|
refs/heads/master
| 2020-04-01T06:45:04.591779 | 2018-10-14T12:22:00 | 2018-10-14T12:22:00 | 152,962,441 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,417 |
py
|
"""
Django settings for tutorial project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
from decouple import config
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config("PROJECT_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# django app
'quickstart',
# dependencies
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# REST FRAMEWORK
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10,
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
)
}
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
|
[
"[email protected]"
] | |
70bc0f58d1a7260e8aa0c009c423467c33acd8a0
|
e6611443e946d1129985a95bc2dd2afc610f8292
|
/CMS/apps/task_status/migrations/0003_taskstatus_category.py
|
53b4aaa95708f0d4641fb077232356c169f2ceb3
|
[] |
no_license
|
Indus-Action/Campaign-Management-System
|
a761dd9bbc7967f8302bb3283230f87ccc2bd2a6
|
9c6f1193ff897b8cc53f2a1c3bca8d70a890e70f
|
refs/heads/master
| 2020-03-12T19:49:19.329764 | 2018-05-15T06:37:41 | 2018-05-15T06:37:41 | 130,792,314 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 694 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-09-28 06:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('task_status_categories', '0001_initial'),
('task_status', '0002_taskstatus_desc'),
]
operations = [
migrations.AddField(
model_name='taskstatus',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_status', to='task_status_categories.TaskStatusCategory', null=True),
preserve_default=False,
),
]
|
[
"[email protected]"
] | |
fcdc3c6425304d12927eedb5366284da5f8f22cc
|
67b04bf2bdfdfc8de4189a52fe431aa482c375ac
|
/example/app.py
|
531fdcb75688c64a06ec95e82a0c65b0fe75b7d9
|
[
"MIT"
] |
permissive
|
d0ugal/aioauth-client
|
2de6eeb25fd6582a34c8b144fff066d817b011db
|
6fce61642c974ede8d800e476a4a5661778a180d
|
refs/heads/develop
| 2023-04-10T03:59:04.766587 | 2020-01-22T19:46:26 | 2020-01-22T19:46:26 | 235,654,658 | 1 | 0 |
MIT
| 2023-04-04T01:21:32 | 2020-01-22T19:55:56 |
Python
|
UTF-8
|
Python
| false | false | 5,114 |
py
|
""" Aioauth-client example. """
import asyncio
from aiohttp import web
import html
from pprint import pformat
from aioauth_client import (
BitbucketClient,
FacebookClient,
GithubClient,
GoogleClient,
OAuth1Client,
TwitterClient,
YandexClient,
)
app = web.Application()
clients = {
'twitter': {
'class': TwitterClient,
'init': {
'consumer_key': 'oUXo1M7q1rlsPXm4ER3dWnMt8',
'consumer_secret': 'YWzEvXZJO9PI6f9w2FtwUJenMvy9SPLrHOvnNkVkc5LdYjKKup',
},
},
'github': {
'class': GithubClient,
'init': {
'client_id': 'b6281b6fe88fa4c313e6',
'client_secret': '21ff23d9f1cad775daee6a38d230e1ee05b04f7c',
},
},
'google': {
'class': GoogleClient,
'init': {
'client_id': '150775235058-9fmas709maee5nn053knv1heov12sh4n.apps.googleusercontent.com', # noqa
'client_secret': 'df3JwpfRf8RIBz-9avNW8Gx7',
'scope': 'email profile',
},
},
'yandex': {
'class': YandexClient,
'init': {
'client_id': 'e19388a76a824b3385f38beec67f98f1',
'client_secret': '1d2e6fdcc23b45849def6a34b43ac2d8',
},
},
'facebook': {
'class': FacebookClient,
'init': {
'client_id': '384739235070641',
'client_secret': '8e3374a4e1e91a2bd5b830a46208c15a',
'scope': 'email'
},
},
'bitbucket': {
'class': BitbucketClient,
'init': {
'consumer_key': '4DKzbyW8JSbnkFyRS5',
'consumer_secret': 'AvzZhtvRJhrEJMsGAMsPEuHTRWdMPX9z',
},
},
}
@asyncio.coroutine
def index(request):
return web.Response(text="""
<ul>
<li><a href="/oauth/bitbucket">Login with Bitbucket</a></li>
<li><a href="/oauth/facebook">Login with Facebook</a></li>
<li><a href="/oauth/github">Login with Github</a></li>
<li><a href="/oauth/google">Login with Google</a></li>
<li><a href="/oauth/twitter">Login with Twitter</a></li>
</ul>
""", content_type="text/html")
# Simple Github (OAuth2) example (not connected to app)
@asyncio.coroutine
def github(request):
github = GithubClient(
client_id='b6281b6fe88fa4c313e6',
client_secret='21ff23d9f1cad775daee6a38d230e1ee05b04f7c',
)
if 'code' not in request.query:
return web.HTTPFound(github.get_authorize_url(scope='user:email'))
# Get access token
code = request.query['code']
token, _ = yield from github.get_access_token(code)
assert token
# Get a resource `https://api.github.com/user`
response = yield from github.request('GET', 'user')
body = yield from response.read()
return web.Response(body=body, content_type='application/json')
@asyncio.coroutine
def oauth(request):
provider = request.match_info.get('provider')
if provider not in clients:
raise web.HTTPNotFound(reason='Unknown provider')
# Create OAuth1/2 client
Client = clients[provider]['class']
params = clients[provider]['init']
client = Client(**params)
client.params['oauth_callback' if issubclass(Client, OAuth1Client) else 'redirect_uri'] = \
'http://%s%s' % (request.host, request.path)
# Check if is not redirect from provider
if client.shared_key not in request.query:
# For oauth1 we need more work
if isinstance(client, OAuth1Client):
token, secret, _ = yield from client.get_request_token()
# Dirty save a token_secret
# Dont do it in production
request.app.secret = secret
request.app.token = token
# Redirect client to provider
return web.HTTPFound(client.get_authorize_url(access_type='offline'))
# For oauth1 we need more work
if isinstance(client, OAuth1Client):
client.oauth_token_secret = request.app.secret
client.oauth_token = request.app.token
_, meta = yield from client.get_access_token(request.query)
user, info = yield from client.user_info()
text = (
"<a href='/'>back</a><br/><br/>"
"<ul>"
"<li>ID: {u.id}</li>"
"<li>Username: {u.username}</li>"
"<li>First, last name: {u.first_name}, {u.last_name}</li>"
"<li>Gender: {u.gender}</li>"
"<li>Email: {u.email}</li>"
"<li>Link: {u.link}</li>"
"<li>Picture: {u.picture}</li>"
"<li>Country, city: {u.country}, {u.city}</li>"
"</ul>"
).format(u=user)
text += "<pre>%s</pre>" % html.escape(pformat(info))
text += "<pre>%s</pre>" % html.escape(pformat(meta))
return web.Response(text=text, content_type='text/html')
app.router.add_route('GET', '/', index)
app.router.add_route('GET', '/oauth/{provider}', oauth)
loop = asyncio.get_event_loop()
f = loop.create_server(app.make_handler(), '127.0.0.1', 5000)
srv = loop.run_until_complete(f)
print('serving on', srv.sockets[0].getsockname())
try:
loop.run_forever()
except KeyboardInterrupt:
pass
# pylama:ignore=D
|
[
"[email protected]"
] | |
c422911d66d3c472a423daa9aae44836f52b2fba
|
7add1f8fc31b09bb79efd2b25cc15e23666c1d1d
|
/tfx/orchestration/portable/tfx_runner.py
|
c431cf3f36a66e468fcc48190bbaac4331fed4f7
|
[
"Apache-2.0"
] |
permissive
|
twitter-forks/tfx
|
b867e9fee9533029ca799c4a4c5d1c5430ba05fe
|
cb3561224c54a5dad4d5679165d5b3bafc8b451b
|
refs/heads/master
| 2021-11-19T18:45:09.157744 | 2021-10-19T00:02:34 | 2021-10-19T00:02:34 | 205,426,993 | 2 | 1 |
Apache-2.0
| 2021-10-18T21:03:50 | 2019-08-30T17:21:03 |
Python
|
UTF-8
|
Python
| false | false | 1,333 |
py
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definition of TFX runner base class."""
import abc
from typing import Any, Optional, Union
from tfx.orchestration import pipeline as pipeline_py
from tfx.proto.orchestration import pipeline_pb2
class TfxRunner(metaclass=abc.ABCMeta):
"""Base runner class for TFX.
This is the base class for every TFX runner.
"""
@abc.abstractmethod
def run(
self, pipeline: Union[pipeline_pb2.Pipeline,
pipeline_py.Pipeline]) -> Optional[Any]:
"""Runs a TFX pipeline on a specific platform.
Args:
pipeline: a pipeline_pb2.Pipeline message or pipeline.Pipeline instance
representing a pipeline definition.
Returns:
Optional platform-specific object.
"""
pass
|
[
"[email protected]"
] | |
1dc0356232f83b9f82596add14362a858c4e3774
|
1678abd4c1efb74993745b55bf5a5536c2205417
|
/forum/migrations/0010_auto_20200414_2322.py
|
59d3916c979c2e05ef688ccf22bdbfbb16dbbdc9
|
[] |
no_license
|
samosky123/Django-Forum
|
7b8868338d09d4a02b61717454adb2297cafc44e
|
c1ee3c9261479ebf8039c3a6fc9a3aba06d2c870
|
refs/heads/master
| 2023-07-29T09:45:19.810265 | 2020-11-13T16:05:42 | 2020-11-13T16:05:42 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 503 |
py
|
# Generated by Django 2.2 on 2020-04-14 17:22
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('forum', '0009_auto_20200414_2313'),
]
operations = [
migrations.RenameField(
model_name='answer',
old_name='downvote',
new_name='downvotes',
),
migrations.RenameField(
model_name='answer',
old_name='upvote',
new_name='upvotes',
),
]
|
[
"[email protected]"
] | |
682e6e9bf096cd8bc9cecd1f5476499372f6c040
|
61dcd9b485bc5e6d07c4adf14f138eabaa9a23b5
|
/Own practice/2.2-2.10/2.8.py
|
a4281af24752c551736fa11d1a5726365e91a315
|
[] |
no_license
|
bong1915016/Introduction-to-Programming-Using-Python
|
d442d2252d13b731f6cd9c6356032e8b90aba9a1
|
f23e19963183aba83d96d9d8a9af5690771b62c2
|
refs/heads/master
| 2020-09-25T03:09:34.384693 | 2019-11-28T17:33:28 | 2019-11-28T17:33:28 | 225,904,132 | 1 | 0 | null | 2019-12-04T15:56:55 | 2019-12-04T15:56:54 | null |
UTF-8
|
Python
| false | false | 872 |
py
|
"""
程式設計練習題 2.2-2.10 2.8 計算能量.
請撰寫一程式,計算從起始溫度到最後溫度時熱水所需的能量。程式提示使用者數入多少公斤的水、起始溫度
及最後溫度。計算能量的公式如下:
Q = M * (finalTemperature - initialTemperature) * 4184
此處的M逝水的公斤數,溫度是攝氏溫度,而Q是以焦耳(joules)來衡量的能量。
以下是範例輸出的樣本:
```
Enter the amount of water in kilograms: 55.5
Enter the initial temperature: 3.5
Enter the final Temperature:10.5
The energy needed is 1625484.0
```
"""
M = eval(input("Enter the amount of water in kilograms:"))
initialTemperature = eval(input("Enter the initial temperature:"))
finalTemperature = eval(input("Enter the final Temperature:"))
Q = M * (finalTemperature - initialTemperature) * 4184
print("The energy needed is", Q)
|
[
"[email protected]"
] | |
4b4722dc364c71697c33815091831aec2badb373
|
0115cfe0ca89264d3e25616943c3437d24ac0497
|
/pyx/finance/finance.py
|
56f41561431419e6dbb05819a4c64021703e836c
|
[] |
no_license
|
shakfu/polylab
|
9024918681fe4807b4e5e2da4bba04453566bae1
|
9dce4d30120981e34bbbbc6f2caaff6e16a6cfbd
|
refs/heads/master
| 2023-08-18T05:41:01.786936 | 2023-07-30T22:36:52 | 2023-07-30T22:36:52 | 62,841,098 | 3 | 0 | null | 2022-04-21T22:25:43 | 2016-07-07T22:08:47 |
C
|
UTF-8
|
Python
| false | false | 4,309 |
py
|
#!/usr/bin/env python
'''
A set of functions for quick financial analysis of an investment
opportunity and a series of projected cashflows.
For further details and pros/cons of each function please refer
to the respective wikipedia page:
payback_period
http://en.wikipedia.org/wiki/Payback_period
net present value
http://en.wikipedia.org/wiki/Net_present_value
internal rate of return
http://en.wikipedia.org/wiki/Internal_rate_of_return
'''
import sys
def payback_of_investment(investment, cashflows):
"""The payback period refers to the length of time required
for an investment to have its initial cost recovered.
>>> payback_of_investment(200.0, [60.0, 60.0, 70.0, 90.0])
3.1111111111111112
"""
total, years, cumulative = 0.0, 0, []
if not cashflows or (sum(cashflows) < investment):
raise Exception("insufficient cashflows")
for cashflow in cashflows:
total += cashflow
if total < investment:
years += 1
cumulative.append(total)
A = years
B = investment - cumulative[years-1]
C = cumulative[years] - cumulative[years-1]
return A + (B/C)
def payback(cashflows):
"""The payback period refers to the length of time required
for an investment to have its initial cost recovered.
(This version accepts a list of cashflows)
>>> payback([-200.0, 60.0, 60.0, 70.0, 90.0])
3.1111111111111112
"""
investment, cashflows = cashflows[0], cashflows[1:]
if investment < 0 : investment = -investment
return payback_of_investment(investment, cashflows)
def npv(rate, cashflows):
"""The total present value of a time series of cash flows.
>>> npv(0.1, [-100.0, 60.0, 60.0, 60.0])
49.211119459053322
"""
total = 0.0
for i, cashflow in enumerate(cashflows):
total += cashflow / (1 + rate)**i
return total
def irr(cashflows, iterations=100):
"""The IRR or Internal Rate of Return is the annualized effective
compounded return rate which can be earned on the invested
capital, i.e., the yield on the investment.
>>> irr([-100.0, 60.0, 60.0, 60.0])
0.36309653947517645
"""
rate = 1.0
investment = cashflows[0]
for i in range(1, iterations+1):
rate *= (1 - npv(rate, cashflows) / investment)
return rate
def investment_analysis(discount_rate, cashflows):
"""Provides summary investment analysis on a list of cashflows
and a discount_rate.
Assumes that the first element of the list (i.e. at period 0)
is the initial investment with a negative float value.
"""
_npv = npv(discount_rate, cashflows)
ts = [('year', 'cashflow')] + [(str(x), str(y)) for (x,y) in zip(
range(len(cashflows)), cashflows)]
print "-" * 70
for y,c in ts:
print y + (len(c) - len(y) + 1)*' ',
print
for y,c in ts:
print c + ' ',
print
print
print "Discount Rate: %.1f%%" % (discount_rate * 100)
print
print "Payback: %.2f years" % payback(cashflows)
print " IRR: %.2f%%" % (irr(cashflows) * 100)
print " NPV: %s" % _npv
print
print "==> %s investment of %s" % (
("Approve" if _npv > 0 else "Do Not Approve"), str(-cashflows[0]))
print "-" * 70
def main(inputs):
"""commandline entry point
"""
usage = '''Provides analysis of an investment and a series of cashflows.
usage: invest discount_rate [cashflow0, cashflow1, ..., cashflowN]
where
discount_rate is the rate used to discount future cashflows
to their present values
cashflow0 is the investment amount (always a negative value)
cashflow1 .. cashflowN values can be positive (net inflows)
or
negative (net outflows)
for example:
invest 0.05 -10000 6000 6000 6000
'''
try:
rate, cashflows = inputs[0], inputs[1:]
investment_analysis(float(rate), [float(c) for c in cashflows])
except IndexError:
print usage
sys.exit()
main(sys.argv[1:])
|
[
"[email protected]"
] | |
ec63b954fd448cd482cec2bfb15b88afbea89cc4
|
c3ff891e0e23c5f9488508d30349259cc6b64b4d
|
/python练习/基础代码/Demo33.py
|
20ad574ee408810bd6658c854a8dd2e8ce4e4a44
|
[] |
no_license
|
JacksonMike/python_exercise
|
2af2b8913ec8aded8a17a98aaa0fc9c6ccd7ba53
|
7698f8ce260439abb3cbdf478586fa1888791a61
|
refs/heads/master
| 2020-07-14T18:16:39.265372 | 2019-08-30T11:56:29 | 2019-08-30T11:56:29 | 205,370,953 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 157 |
py
|
infor = {"name":"Jim"}
infor["age"] = 19#添加
infor["QQ"] = 10086
infor["QQ"] = 10085#修改
del infor["QQ"] #删除
print(infor.get("name"))#查询
a = {}
|
[
"[email protected]"
] | |
a2397630ed41926dd03f160daaf34fd7b95a8670
|
45ab4c22d918dc4390572f53c267cf60de0d68fb
|
/src/Analysis/Engine/Impl/Typeshed/third_party/2and3/werkzeug/_compat.pyi
|
74981331c7de5221f3fe7114e32b5f8d3c300296
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
sourcegraph/python-language-server
|
580a24fd15fe9d4abeb95e9333d61db1c11a2670
|
64eae156f14aa14642afcac0e7edaf5d7c6d1a1c
|
refs/heads/master
| 2023-04-09T21:17:07.555979 | 2018-12-06T23:25:05 | 2018-12-06T23:25:05 | 155,174,256 | 2 | 2 |
Apache-2.0
| 2018-10-29T08:06:49 | 2018-10-29T08:06:49 | null |
UTF-8
|
Python
| false | false | 1,280 |
pyi
|
import sys
from typing import Any
if sys.version_info < (3,):
import StringIO as BytesIO
else:
from io import StringIO as BytesIO
PY2 = ... # type: Any
WIN = ... # type: Any
unichr = ... # type: Any
text_type = ... # type: Any
string_types = ... # type: Any
integer_types = ... # type: Any
iterkeys = ... # type: Any
itervalues = ... # type: Any
iteritems = ... # type: Any
iterlists = ... # type: Any
iterlistvalues = ... # type: Any
int_to_byte = ... # type: Any
iter_bytes = ... # type: Any
def fix_tuple_repr(obj): ...
def implements_iterator(cls): ...
def implements_to_string(cls): ...
def native_string_result(func): ...
def implements_bool(cls): ...
range_type = ... # type: Any
NativeStringIO = ... # type: Any
def make_literal_wrapper(reference): ...
def normalize_string_tuple(tup): ...
def try_coerce_native(s): ...
wsgi_get_bytes = ... # type: Any
def wsgi_decoding_dance(s, charset='', errors=''): ...
def wsgi_encoding_dance(s, charset='', errors=''): ...
def to_bytes(x, charset=..., errors=''): ...
def to_native(x, charset=..., errors=''): ...
def reraise(tp, value, tb=None): ...
imap = ... # type: Any
izip = ... # type: Any
ifilter = ... # type: Any
def to_unicode(x, charset=..., errors='', allow_none_charset=False): ...
|
[
"[email protected]"
] | |
d911b045663d565be92524dcbdeb0dee537c4ee8
|
a72106acf426859b49be66ec7a1d209d8ffb59d1
|
/importer/indico_importer/converter.py
|
f8b138c0a1ca528382992688f5347a4d08c1ba43
|
[
"MIT"
] |
permissive
|
indico/indico-plugins-attic
|
12502c891805e092b936c42a779fa9c667ee23d6
|
64a6bffe4dc7e30e2874dd4d6aac9908038910f1
|
refs/heads/master
| 2021-06-23T03:51:21.500524 | 2021-03-17T10:35:24 | 2021-03-17T10:35:24 | 201,440,329 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,001 |
py
|
# This file is part of the Indico plugins.
# Copyright (C) 2002 - 2020 CERN
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
APPEND = object()
class RecordConverter(object):
"""
Converts a dictionary or list of dictionaries into another list of dictionaries. The goal
is to alter data fetched from connector class into a format that can be easily read by importer
plugin. The way dictionaries are converted depends on the 'conversion' variable.
conversion = [ (sourceKey, destinationKey, conversionFuncion(optional), converter(optional))... ]
It's a list tuples in which a single element represents a translation that will be made. Every
element of the list is a tuple that consists of from 1 to 4 entries.
The first one is the key name in the source dictionary, the value that applies to this key will
be the subject of the translation. The second is the key in the destination dictionary at which
translated value will be put. If not specified its value will be equal the value of the first
element. If the second element is equal *append* and the converted element is a dictionary or a
list of dictionaries, destination dictionary will be updated by the converted element.Third,
optional, element is the function that will take the value from the source dictionary and return
the value which will be inserted into result dictionary. If the third element is empty
defaultConversionMethod will be called. Fourth, optional, element is a RecordConverter class
which will be executed with converted value as an argument.
"""
conversion = []
@staticmethod
def default_conversion_method(attr):
"""
Method that will be used to convert an entry in dictionary unless other method is specified.
"""
return attr
@classmethod
def convert(cls, record):
"""
Converts a single dictionary or list of dictionaries into converted list of dictionaries.
"""
if isinstance(record, list):
return [cls._convert(r) for r in record]
else:
return [cls._convert(record)]
@classmethod
def _convert_internal(cls, record):
"""
Converts a single dictionary into converted dictionary or list of dictionaries into converted
list of dictionaries. Used while passing dictionaries to another converter.
"""
if isinstance(record, list):
return [cls._convert(r) for r in record]
else:
return cls._convert(record)
@classmethod
def _convert(cls, record):
"""
Core method of the converter. Converts a single dictionary into another dictionary.
"""
if not record:
return {}
converted_dict = {}
for field in cls.conversion:
key = field[0]
if len(field) >= 2 and field[1]:
converted_key = field[1]
else:
converted_key = key
if len(field) >= 3 and field[2]:
conversion_method = field[2]
else:
conversion_method = cls.default_conversion_method
if len(field) >= 4:
converter = field[3]
else:
converter = None
try:
value = conversion_method(record[key])
except KeyError:
continue
if converter:
value = converter._convert_internal(value)
if converted_key is APPEND:
if isinstance(value, list):
for v in value:
converted_dict.update(v)
else:
converted_dict.update(value)
else:
converted_dict[converted_key] = value
return converted_dict
|
[
"[email protected]"
] | |
62a430d7658748dc827ca7a1a71a21975277174b
|
2e70b3ce93762c5b66fba57f8b9cba37aacf0702
|
/new/account/migrations/0005_auto_20190528_1610.py
|
184bec8ce2a97ea9516a3f76e5495f0cfbb17c49
|
[] |
no_license
|
mahidul-islam/jamah
|
02be511fe119e8934ec7d5aa1eaa8e2b24fad246
|
c8ddf9a8094d33e8b1d6cb834eab3d9f18b1a9ea
|
refs/heads/master
| 2022-05-13T15:11:38.609550 | 2019-06-08T04:52:09 | 2019-06-08T04:52:09 | 184,331,276 | 2 | 0 | null | 2022-04-22T21:27:18 | 2019-04-30T21:04:06 |
Python
|
UTF-8
|
Python
| false | false | 541 |
py
|
# Generated by Django 2.2.1 on 2019-05-28 16:10
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('account', '0004_account_mother_account'),
]
operations = [
migrations.AlterField(
model_name='transaction',
name='comes_from',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='transaction_outs', to='account.Account'),
),
]
|
[
"[email protected]"
] | |
1110f7e0dacac9ef0b6b69c736d03aa57d46b364
|
006341ca12525aa0979d6101600e78c4bd9532ab
|
/CMS/Zope-3.2.1/Dependencies/zope.component-Zope-3.2.1/zope.component/bbb/utility.py
|
f626f6c3e1a4329b351d849e1924758ce526722a
|
[
"ZPL-2.1",
"Python-2.0",
"ICU",
"LicenseRef-scancode-public-domain",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"ZPL-2.0"
] |
permissive
|
germanfriday/code-examples-sandbox
|
d0f29e20a3eed1f8430d06441ac2d33bac5e4253
|
4c538584703754c956ca66392fdcecf0a0ca2314
|
refs/heads/main
| 2023-05-30T22:21:57.918503 | 2021-06-15T15:06:47 | 2021-06-15T15:06:47 | 377,200,448 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,507 |
py
|
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""utility service
$Id: utility.py 38178 2005-08-30 21:50:19Z mj $
"""
from zope.component.exceptions import Invalid, ComponentLookupError
from zope.component.interfaces import IUtilityService, IRegistry
from zope.component.service import GlobalService, IService, IServiceDefinition
from zope.component.site import UtilityRegistration
import zope.interface
class IGlobalUtilityService(IUtilityService, IRegistry):
def provideUtility(providedInterface, component, name='', info=''):
"""Provide a utility
A utility is a component that provides an interface.
"""
class UtilityService(object):
"""Provide IUtilityService
Mixin that superimposes utility management on adapter registery
implementation
"""
def __init__(self, sitemanager=None):
self.__parent__ = None
if sitemanager is None:
from zope.component.site import GlobalSiteManager
sitemanager = GlobalSiteManager()
self.sm = sitemanager
def __getattr__(self, name):
attr = getattr(self.sm, name)
if attr is not None:
return attr
attr = getattr(self.sm.utilities, name)
if attr is not None:
return attr
raise AttributeError(name)
class GlobalUtilityService(UtilityService, GlobalService):
zope.interface.implementsOnly(IGlobalUtilityService)
def __init__(self, sitemanager=None):
super(GlobalUtilityService, self).__init__(sitemanager)
def provideUtility(self, providedInterface, component, name='', info=''):
self.sm.provideUtility(providedInterface, component, name, info)
def registrations(self):
for reg in self.sm.registrations():
if isinstance(reg, UtilityRegistration):
if not reg.provided in (IService, IServiceDefinition):
yield reg
|
[
"[email protected]"
] | |
244ecb3d7cda2b212c28968b72151583aa73ab22
|
7fb87945b77d3adaedd8a155c981e97946734e41
|
/packstack/plugins/amqp_002.py
|
bc822100bc810d982c6734ba0f87cfae7797e907
|
[
"Apache-2.0"
] |
permissive
|
Tony910517/openstack
|
916b36368ea9f17958e4eb04bd1f9daf3aba9213
|
4c1380a03c37e7950dcf2bba794e75b7e4a8dfd0
|
refs/heads/master
| 2020-05-20T01:05:22.499224 | 2019-05-07T01:11:05 | 2019-05-07T01:11:05 | 185,292,662 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,981 |
py
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Installs and configures AMQP
"""
from packstack.installer import basedefs
from packstack.installer import validators
from packstack.installer import processors
from packstack.installer import utils
from packstack.modules.common import filtered_hosts
from packstack.modules.documentation import update_params_usage
from packstack.modules.ospluginutils import appendManifestFile
from packstack.modules.ospluginutils import createFirewallResources
from packstack.modules.ospluginutils import getManifestTemplate
from packstack.modules.ospluginutils import generate_ssl_cert
# ------------- AMQP Packstack Plugin Initialization --------------
PLUGIN_NAME = "AMQP"
PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue')
def initConfig(controller):
params = [
{"CMD_OPTION": "amqp-backend",
"PROMPT": "Set the AMQP service backend",
"OPTION_LIST": ["rabbitmq"],
"VALIDATORS": [validators.validate_options],
"DEFAULT_VALUE": "rabbitmq",
"MASK_INPUT": False,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_AMQP_BACKEND",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False,
"DEPRECATES": ['CONFIG_AMQP_SERVER']},
{"CMD_OPTION": "amqp-host",
"PROMPT": "Enter the host for the AMQP service",
"OPTION_LIST": [],
"VALIDATORS": [validators.validate_ssh],
"DEFAULT_VALUE": utils.get_localhost_ip(),
"MASK_INPUT": False,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_AMQP_HOST",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "amqp-enable-ssl",
"PROMPT": "Enable SSL for the AMQP service?",
"OPTION_LIST": ["y", "n"],
"VALIDATORS": [validators.validate_options],
"DEFAULT_VALUE": "n",
"MASK_INPUT": False,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_AMQP_ENABLE_SSL",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "amqp-enable-auth",
"PROMPT": "Enable Authentication for the AMQP service?",
"OPTION_LIST": ["y", "n"],
"VALIDATORS": [validators.validate_options],
"DEFAULT_VALUE": "n",
"MASK_INPUT": False,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_AMQP_ENABLE_AUTH",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
]
update_params_usage(basedefs.PACKSTACK_DOC, params, sectioned=False)
group = {"GROUP_NAME": "AMQP",
"DESCRIPTION": "AMQP Config parameters",
"PRE_CONDITION": False,
"PRE_CONDITION_MATCH": True,
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True}
controller.addGroup(group, params)
params = [
{"CMD_OPTION": "amqp-nss-certdb-pw",
"PROMPT": "Enter the password for NSS certificate database",
"OPTION_LIST": [],
"VALIDATORS": [validators.validate_not_empty],
"DEFAULT_VALUE": "PW_PLACEHOLDER",
"PROCESSORS": [processors.process_password],
"MASK_INPUT": True,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_AMQP_NSS_CERTDB_PW",
"USE_DEFAULT": False,
"NEED_CONFIRM": True,
"CONDITION": False},
]
update_params_usage(basedefs.PACKSTACK_DOC, params, sectioned=False)
group = {"GROUP_NAME": "AMQPSSL",
"DESCRIPTION": "AMQP Config SSL parameters",
"PRE_CONDITION": "CONFIG_AMQP_ENABLE_SSL",
"PRE_CONDITION_MATCH": "y",
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True}
controller.addGroup(group, params)
params = [
{"CMD_OPTION": "amqp-auth-user",
"PROMPT": "Enter the user for amqp authentication",
"OPTION_LIST": [],
"VALIDATORS": [validators.validate_not_empty],
"DEFAULT_VALUE": "amqp_user",
"MASK_INPUT": False,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_AMQP_AUTH_USER",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "amqp-auth-password",
"PROMPT": "Enter the password for user authentication",
"OPTION_LIST": ["y", "n"],
"VALIDATORS": [validators.validate_not_empty],
"PROCESSORS": [processors.process_password],
"DEFAULT_VALUE": "PW_PLACEHOLDER",
"MASK_INPUT": True,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_AMQP_AUTH_PASSWORD",
"USE_DEFAULT": False,
"NEED_CONFIRM": True,
"CONDITION": False},
]
update_params_usage(basedefs.PACKSTACK_DOC, params, sectioned=False)
group = {"GROUP_NAME": "AMQPAUTH",
"DESCRIPTION": "AMQP Config Athentication parameters",
"PRE_CONDITION": "CONFIG_AMQP_ENABLE_AUTH",
"PRE_CONDITION_MATCH": "y",
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True}
controller.addGroup(group, params)
def initSequences(controller):
amqpsteps = [
{'title': 'Adding AMQP manifest entries',
'functions': [create_manifest]}
]
controller.addSequence("Installing AMQP", [], [], amqpsteps)
# ------------------------ step functions -------------------------
def create_manifest(config, messages):
server = utils.ScriptRunner(config['CONFIG_AMQP_HOST'])
if config['CONFIG_AMQP_ENABLE_SSL'] == 'y':
config['CONFIG_AMQP_SSL_ENABLED'] = True
config['CONFIG_AMQP_PROTOCOL'] = 'ssl'
config['CONFIG_AMQP_CLIENTS_PORT'] = "5671"
amqp_host = config['CONFIG_AMQP_HOST']
service = 'AMQP'
ssl_key_file = '/etc/pki/tls/private/ssl_amqp.key'
ssl_cert_file = '/etc/pki/tls/certs/ssl_amqp.crt'
cacert = config['CONFIG_AMQP_SSL_CACERT_FILE'] = (
config['CONFIG_SSL_CACERT']
)
generate_ssl_cert(config, amqp_host, service, ssl_key_file,
ssl_cert_file)
else:
# Set default values
config['CONFIG_AMQP_CLIENTS_PORT'] = "5672"
config['CONFIG_AMQP_SSL_ENABLED'] = False
config['CONFIG_AMQP_PROTOCOL'] = 'tcp'
if config['CONFIG_AMQP_ENABLE_AUTH'] == 'n':
config['CONFIG_AMQP_AUTH_PASSWORD'] = 'guest'
config['CONFIG_AMQP_AUTH_USER'] = 'guest'
manifestfile = "%s_amqp.pp" % config['CONFIG_AMQP_HOST']
manifestdata = getManifestTemplate('amqp')
if config['CONFIG_IP_VERSION'] == 'ipv6':
config['CONFIG_AMQP_HOST_URL'] = "[%s]" % config['CONFIG_AMQP_HOST']
else:
config['CONFIG_AMQP_HOST_URL'] = config['CONFIG_AMQP_HOST']
fw_details = dict()
# All hosts should be able to talk to amqp
for host in filtered_hosts(config, exclude=False):
key = "amqp_%s" % host
fw_details.setdefault(key, {})
fw_details[key]['host'] = "%s" % host
fw_details[key]['service_name'] = "amqp"
fw_details[key]['chain'] = "INPUT"
fw_details[key]['ports'] = ['5671', '5672']
fw_details[key]['proto'] = "tcp"
config['FIREWALL_AMQP_RULES'] = fw_details
manifestdata += createFirewallResources('FIREWALL_AMQP_RULES')
appendManifestFile(manifestfile, manifestdata, 'pre')
|
[
"[email protected]"
] | |
e71f0a615ae491bb9857459804dafdee895970ae
|
fd5bc0e8a3ac2b7ba793287084f725a8cd10b5ef
|
/tests/bench/loadrelated.py
|
9942ed0b1e2b9a0ae4d8a8c7c923e95f0e30e58e
|
[
"BSD-3-Clause"
] |
permissive
|
moyaya/python-stdnet
|
404cb645b80c59b08ce4506480ce897c24032dcd
|
8d6c41ba1ddb8024e6bfab859f99bf96966d04cf
|
refs/heads/master
| 2021-01-24T01:00:18.203118 | 2012-01-13T18:23:20 | 2012-01-13T18:23:20 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 571 |
py
|
'''Benchmark load realted.'''
from stdnet import test, transaction
from stdnet.utils import populate, zip
from examples.data import FinanceTest, Instrument, Fund, Position
class LoadRelatedTest(FinanceTest):
def setUp(self):
self.data.makePositions()
def testLoad(self):
for p in Position.objects.all():
self.assertTrue(p.instrument.name)
def testLoadRelated(self):
for p in Position.objects.all().load_related('instrument'):
self.assertTrue(p.instrument.name)
|
[
"[email protected]"
] | |
a5ae930e8fe263663440b7fda29bd5a056e44d78
|
b589f3997e790c3760ab6ddce1dd1b7813cfab3a
|
/232.py
|
e2c5834920299064245bc4ccf2a5c4e5fe64f1ff
|
[] |
no_license
|
higsyuhing/leetcode_easy
|
56ceb2aab31f7c11671d311552aaf633aadd14a8
|
48d516fdbb086d697e2593a9ce1dbe6f40c3c701
|
refs/heads/master
| 2022-12-04T00:49:33.894066 | 2022-11-15T20:44:36 | 2022-11-15T20:44:36 | 135,224,120 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,159 |
py
|
class MyQueue(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.stack1 = []
self.stack2 = []
self.slen = 0
self.curr = 1 # push stack, 2 for pop stack
def push(self, x):
"""
Push element x to the back of queue.
:type x: int
:rtype: void
"""
if self.curr == 1:
self.stack1.append(x)
self.slen = self.slen + 1
pass
else:
for index in range(self.slen):
self.stack1.append(self.stack2.pop())
pass
self.stack1.append(x)
self.slen = self.slen + 1
self.curr = 1
pass
return
def pop(self):
"""
Removes the element from in front of queue and returns that element.
:rtype: int
"""
if self.slen == 0:
print "Error! "
return 0
if self.curr == 1:
for index in range(self.slen-1):
self.stack2.append(self.stack1.pop())
pass
self.slen = self.slen - 1
self.curr = 2
return self.stack1.pop()
else:
self.slen = self.slen - 1
return self.stack2.pop()
pass
def peek(self):
"""
Get the front element.
:rtype: int
"""
if self.slen == 0:
print "Error! "
return 0
if self.curr == 1:
for index in range(self.slen):
self.stack2.append(self.stack1.pop())
pass
self.curr = 2
return self.stack2[self.slen-1]
else:
return self.stack2[self.slen-1]
pass
def empty(self):
"""
Returns whether the queue is empty.
:rtype: bool
"""
if self.slen == 0:
return True
else:
return False
# Your MyQueue object will be instantiated and called as such:
# obj = MyQueue()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.peek()
# param_4 = obj.empty()
|
[
"[email protected]"
] | |
57cd82ee8cf61947cac176ab1e3935c3582c06d2
|
bc2a96e8b529b0c750f6bc1d0424300af9743904
|
/acapy_client/models/credential_definition_send_request.py
|
8d1879bd302faeaabbeac1ec17a5fbce0eddf4c4
|
[
"Apache-2.0"
] |
permissive
|
TimoGlastra/acapy-client
|
d091fd67c97a57f2b3462353459780281de51281
|
d92ef607ba2ff1152ec15429f2edb20976991424
|
refs/heads/main
| 2023-06-29T22:45:07.541728 | 2021-08-03T15:54:48 | 2021-08-03T15:54:48 | 396,015,854 | 1 | 0 |
Apache-2.0
| 2021-08-14T13:22:28 | 2021-08-14T13:22:27 | null |
UTF-8
|
Python
| false | false | 2,441 |
py
|
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..types import UNSET, Unset
T = TypeVar("T", bound="CredentialDefinitionSendRequest")
@attr.s(auto_attribs=True)
class CredentialDefinitionSendRequest:
""" """
revocation_registry_size: Union[Unset, int] = UNSET
schema_id: Union[Unset, str] = UNSET
support_revocation: Union[Unset, bool] = UNSET
tag: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
revocation_registry_size = self.revocation_registry_size
schema_id = self.schema_id
support_revocation = self.support_revocation
tag = self.tag
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if revocation_registry_size is not UNSET:
field_dict["revocation_registry_size"] = revocation_registry_size
if schema_id is not UNSET:
field_dict["schema_id"] = schema_id
if support_revocation is not UNSET:
field_dict["support_revocation"] = support_revocation
if tag is not UNSET:
field_dict["tag"] = tag
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
revocation_registry_size = d.pop("revocation_registry_size", UNSET)
schema_id = d.pop("schema_id", UNSET)
support_revocation = d.pop("support_revocation", UNSET)
tag = d.pop("tag", UNSET)
credential_definition_send_request = cls(
revocation_registry_size=revocation_registry_size,
schema_id=schema_id,
support_revocation=support_revocation,
tag=tag,
)
credential_definition_send_request.additional_properties = d
return credential_definition_send_request
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
|
[
"[email protected]"
] | |
0bf464fb6204343b71d383b81c94bf835f6e6d58
|
58c34c597e825634fb5833b22e178df4fe570d39
|
/lib/adapter/cheat_cheat.py
|
9844008c6be19c9654ed8c373292de2a9e5132c6
|
[
"MIT",
"CC-BY-SA-3.0"
] |
permissive
|
sullivant/cheat.sh
|
2eb731eb1d7c6b03d65b2dd5f9b6a325b167c005
|
e2e69b61a361751a145b977ca2f58ae4a50d756e
|
refs/heads/master
| 2020-05-30T09:36:58.834850 | 2019-05-31T19:47:53 | 2019-05-31T19:47:53 | 189,649,817 | 1 | 0 |
MIT
| 2019-05-31T19:45:23 | 2019-05-31T19:45:22 | null |
UTF-8
|
Python
| false | false | 549 |
py
|
"""
Adapter for https://github.com/cheat/cheat
Cheatsheets are located in `cheat/cheatsheets/`
Each cheat sheet is a separate file without extension
"""
# pylint: disable=relative-import,abstract-method
from .git_adapter import GitRepositoryAdapter
class Cheat(GitRepositoryAdapter):
"""
cheat/cheat adapter
"""
_adapter_name = "cheat"
_output_format = "code"
_cache_needed = True
_repository_url = "https://github.com/cheat/cheat"
_cheatsheet_files_prefix = "cheat/cheatsheets/"
_cheatsheet_file_mask = "*"
|
[
"[email protected]"
] | |
ade0b23d401c7a201eec94e034a7bb38e413996e
|
9abc2f4fbf1b31b5a56507437b4a8d9c3f3db7e6
|
/users/urls.py
|
98678c65848e0cd95f42b0434063e6edf15da19f
|
[] |
no_license
|
odbalogun/ticketr
|
e9fe8461d66dabe395f0e1af8fbecc67dbb16e97
|
94f24c82f407f861f1614a151feb3fdd62b283e5
|
refs/heads/master
| 2022-11-30T22:40:30.931160 | 2019-08-09T14:34:38 | 2019-08-09T14:34:38 | 188,833,600 | 0 | 0 | null | 2022-11-22T03:50:30 | 2019-05-27T11:50:07 |
Python
|
UTF-8
|
Python
| false | false | 595 |
py
|
from django.urls import path
# from .views import CustomLoginView, UserCreateView, UserListView, CustomLogoutView
from .views import CustomLoginView, CustomLogoutView, ProfileView
from django.contrib.auth.decorators import login_required
app_name = 'users'
urlpatterns = [
# path('', UserListView.as_view(), name='list'),
path('login/', CustomLoginView.as_view(), name='login'),
path('logout/', CustomLogoutView.as_view(), name='logout'),
path('profile/', login_required(ProfileView.as_view()), name='profile')
# path('create/', UserCreateView.as_view(), name='create'),
]
|
[
"[email protected]"
] | |
27282a88578e3530b456399cac4b11018cde5044
|
a8e2c66b3ebadfc17ee9aee197b3f466534cee16
|
/ytn11/wh/wh/items.py
|
0c48330df2942d96bbb37a839696a43850e30629
|
[] |
no_license
|
yintiannong/98kar
|
49b6db186a4543a7c50671df990bb491846c1a98
|
3863529f57e9d2d9bc1bdf8188916e25ad289db0
|
refs/heads/master
| 2022-01-07T05:49:31.566453 | 2019-05-22T07:04:45 | 2019-05-22T07:04:45 | 187,794,966 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 849 |
py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class WhItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
company = scrapy.Field()
company_id = scrapy.Field()
postion_id = scrapy.Field()
company_type = scrapy.Field()
company_size = scrapy.Field()
url = scrapy.Field()
postion = scrapy.Field()
salary = scrapy.Field()
education=scrapy.Field()
address = scrapy.Field()
exe = scrapy.Field()
job_type = scrapy.Field()
update_time = scrapy.Field()
data_from = scrapy.Field()
desc_job = scrapy.Field()
salary2 = scrapy.Field()
conpany_address = scrapy.Field()
phone_num = scrapy.Field()
hr_name = scrapy.Field()
|
[
"[email protected]"
] | |
e8ae67da9e630730ae1df9ffca7fa2d4296f1d26
|
24dac117231c9ca39e09e1fd27db8de295a7fe45
|
/Trident/settings.py
|
2c943c27e79ee0020f4fe655aed1df9d616f3972
|
[] |
no_license
|
rohitrajput-42/Trident
|
784f23b9fa02d405d55715ded627c274a1c887f2
|
0d75ef954c5d6f88d3b4937e90ab9aace120bdb9
|
refs/heads/main
| 2023-06-13T06:10:19.172276 | 2021-07-10T16:25:22 | 2021-07-10T16:25:22 | 384,705,434 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,092 |
py
|
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9ew388dr7b@prao9gium)@&@r0ma0dze5%-1fg!1jiwe)@hcpg'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'home',
'product',
'accounts',
'crispy_forms',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Trident.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Trident.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATIC_ROOT = os.path.join(BASE_DIR, 'assets')
MEDIA_URL = "image/download/"
MEDIA_ROOT = BASE_DIR
LOGIN_REDIRECT_URL = 'home'
LOGOUT_REDIRECT_URL = 'home'
CRISPY_TEMPLATE_PACK = 'bootstrap4'
|
[
"[email protected]"
] | |
311b64e499752b8f19be4d85c59a5b14455ada39
|
a1a57977131ea917a3f3094dae4a3d18846103c0
|
/unittests/pytests/problems/TestTimeStepUser.py
|
c53c39a3337525c4aa9aa5702ae8367062012113
|
[
"MIT"
] |
permissive
|
rwalkerlewis/pylith
|
cef02d5543e99a3e778a1c530967e6b5f1d5dcba
|
8d0170324d3fcdc5e6c4281759c680faa5dd8d38
|
refs/heads/master
| 2023-08-24T18:27:30.877550 | 2020-08-05T16:37:28 | 2020-08-05T16:37:28 | 154,047,591 | 0 | 0 |
MIT
| 2018-10-21T20:05:59 | 2018-10-21T20:05:59 | null |
UTF-8
|
Python
| false | false | 4,095 |
py
|
#!/usr/bin/env python
#
# ======================================================================
#
# Brad T. Aagaard, U.S. Geological Survey
# Charles A. Williams, GNS Science
# Matthew G. Knepley, University of Chicago
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2017 University of California, Davis
#
# See COPYING for license information.
#
# ======================================================================
#
## @file unittests/pytests/problems/TestTimeStepUser.py
## @brief Unit testing of TimeStepUser object.
import unittest
from pylith.problems.TimeStepUser import TimeStepUser
from pyre.units.time import second,year
stepsE = [2*1.0, 2*2.0, 2*3.0]
# ----------------------------------------------------------------------
class Integrator:
def __init__(self, dt):
self.dt = dt
def stableTimeStep(self, mesh):
return self.dt
# ----------------------------------------------------------------------
class TestTimeStepUser(unittest.TestCase):
"""
Unit testing of TimeStepUser object.
"""
def setUp(self):
from spatialdata.units.Nondimensional import Nondimensional
normalizer = Nondimensional()
normalizer._configure()
normalizer.setTimeScale(0.5*year)
tstep = TimeStepUser()
tstep._configure()
tstep.filename = "data/timesteps.txt"
tstep.preinitialize()
tstep.initialize(normalizer)
self.tstep = tstep
return
def test_constructor(self):
"""
Test constructor.
"""
tstep = TimeStepUser()
tstep._configure()
return
def test_initialize(self):
"""
Test initialize().
"""
tstep = self.tstep
for stepE, step in zip(stepsE, tstep.steps):
self.assertEqual(stepE, step)
return
def test_numTimeSteps(self):
"""
Test numTimeSteps().
"""
tstep = self.tstep
self.assertEqual(1, tstep.numTimeSteps())
tstep.totalTimeN = 12.0 / 0.5 # nondimensionalize
self.assertEqual(6, tstep.numTimeSteps())
tstep.loopSteps = True
tstep.totalTimeN = 7.0 / 0.5 # nondimensionalize
self.assertEqual(5, tstep.numTimeSteps())
return
def test_timeStep(self):
"""
Test timeStep().
"""
tstep = self.tstep
step1 = 1.0 / 0.5 # nondimensionalize
step2 = 2.0 / 0.5 # nondimensionalize
step3 = 3.0 / 0.5 # nondimensionalize
integrators = [Integrator(40.0),
Integrator(80.0)]
from pylith.topology.Mesh import Mesh
mesh = Mesh()
self.assertEqual(step1, tstep.timeStep(mesh, integrators))
self.assertEqual(step2, tstep.timeStep(mesh, integrators))
self.assertEqual(step3, tstep.timeStep(mesh, integrators))
self.assertEqual(step3, tstep.timeStep(mesh, integrators))
self.assertEqual(step3, tstep.timeStep(mesh, integrators))
tstep.index = 0
tstep.loopSteps = True
self.assertEqual(step1, tstep.timeStep(mesh, integrators))
self.assertEqual(step2, tstep.timeStep(mesh, integrators))
self.assertEqual(step3, tstep.timeStep(mesh, integrators))
self.assertEqual(step1, tstep.timeStep(mesh, integrators))
self.assertEqual(step2, tstep.timeStep(mesh, integrators))
integrators = [Integrator(0.01),
Integrator(8.0)]
caught = False
try:
tstep.timeStep(mesh, integrators)
except RuntimeError:
caught = True
self.failUnless(caught)
return
def test_currentStep(self):
"""
Test currentStep().
"""
tstep = self.tstep
integrators = [Integrator(4.0),
Integrator(8.0)]
from pylith.topology.Mesh import Mesh
from pylith.mpi.Communicator import petsc_comm_world
mesh = Mesh()
#mesh.setComm(petsc_comm_world())
tstep.timeStep(mesh, integrators)
stepE = 1.0 / 0.5 # Nondimensionalize
self.assertEqual(stepE, tstep.currentStep())
return
def test_factory(self):
"""
Test factory method.
"""
from pylith.problems.TimeStepUser import time_step
ts = time_step()
return
# End of file
|
[
"[email protected]"
] | |
05aa5d78f1a77c1849dde9dff4856a79eddc89a7
|
c1c87cd334972c01935dbb72769064e5d0066ac8
|
/pickpack/robots/scratchpad.py
|
2d469bd1d6d84f59c8bd0ec2db7949dd53ec5962
|
[] |
no_license
|
defgsus/pypickpack
|
576e9471c9cc7cce60c1010d51b4ea85ec00ecfc
|
8a604ec1502c615bf24d77f09d564962c3d04930
|
refs/heads/master
| 2022-12-28T13:17:18.306748 | 2020-10-09T00:50:41 | 2020-10-09T00:50:41 | 269,505,707 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,253 |
py
|
import random
from .base import RobotBase
from .._2d import direction_int
from ..astar import astar_search
from ..log import log
from ..static_map import StaticMap
from ..items import Article, PickOrder
class RandomRobot(RobotBase):
def __init__(self, id):
super().__init__(id)
def process(self, world, time_delta):
if self.is_next_move_frame(world):
if random.randrange(10) == 0:
self.dir_x, self.dir_y = random.choice(((-1, 0), (1, 0), (0, -1), (0, 1)))
if not world.agent_move(self, self.direction):
self.dir_x, self.dir_y = random.choice(((-1, 0), (1, 0), (0, -1), (0, 1)))
class RobotFollowPlayer(RobotBase):
def __init__(self, id):
super().__init__(id)
def process(self, world, time_delta):
if self.is_next_move_frame(world):
way_to_player = astar_search(
self.position, world.player.position,
lambda pos: self.get_adjacent_nodes(world, pos, exclude_agents={self})
)
self.debug_way = None
if way_to_player:
next_pos = way_to_player[1]
dirx, diry = direction_int(self.position, next_pos)
if dirx or diry:
world.agent_move(self, (dirx, diry))
self.debug_way = way_to_player
class Robot(RobotBase):
def __init__(self, id):
super().__init__(id)
self.performance = 0
def copy(self):
c = super().copy()
c.performance = self.performance
return c
def on_has_put(self, item, position=None, other_agent=None):
from ..agents import Package
from ..items import Article
if isinstance(item, Article):
if isinstance(other_agent, Package):
self.performance += 1
def process(self, world, time_delta):
possible_actions = self.get_possible_actions(world)
evaluated_actions = self.evaluate_actions(world, possible_actions)
#possible_actions.sort(key=lambda action: action.get_estimated_cost(world, self))
#log(possible_actions)
#log(evaluated_actions)
if evaluated_actions:
log(evaluated_actions[0])
action = evaluated_actions[0]["action"]
#action = random.choice(possible_actions)
action.execute(world, self)
def get_possible_actions(self, world):
from ..actions import MoveTo, MoveBefore, PickDirection, PutDirection
from ..agents import Player, Package, Shelf, Computer
from ..items import Article, PickOrder
classes_to_approach = (Computer, PickOrder, Player, Robot, Package, Shelf, Article)
possible_actions = [
# MoveBefore(world.player.position),
PickDirection((-1, 0)),
PickDirection((1, 0)),
PickDirection((0, -1)),
PickDirection((0, 1)),
]
for item in self.items:
possible_actions += [
PutDirection((-1, 0), item.id),
PutDirection((1, 0), item.id),
PutDirection((0, -1), item.id),
PutDirection((0, 1), item.id),
]
for klass in classes_to_approach:
agent = world.get_closest_agent(self.position, klass, exclude_agents=[self])
if agent:
possible_actions.append(MoveBefore(agent.position))
return possible_actions
def evaluate_actions(self, world, actions):
ret_actions = []
for action in actions:
value = self._evaluate_action(world, action, depth=1)
if value is not None:
ret_actions.append({
"action": action,
"value": value,
})
ret_actions.sort(key=lambda a: -a["value"])
return ret_actions
def _evaluate_action(self, world, action, depth):
action = action.copy()
world_copy = world.copy()
self_copy = world_copy.agents.get_by_id(self.id)
action_passed = False
for i in range(100):
if not action.execute(world_copy, self_copy):
break
if action.is_finished(world_copy, self_copy):
action_passed = True
break
if not action_passed:
return
cur_value = self_copy.get_heuristic_value(world_copy)
if depth < 1:
return cur_value
best_action, best_value = None, None
new_actions = self_copy.get_possible_actions(world_copy)
for new_action in new_actions:
value = self._evaluate_action(world_copy, new_action, depth - 1)
if value is not None:
if best_value is None or value > best_value:
best_action, best_value = new_action, value
return max(best_value, cur_value) if best_value is not None else cur_value
def get_heuristic_value(self, world):
value = 0
value += min(0, self.max_items - len(self.items) * 4)
value += len(self.items_by_class(Article)) * 2
value += len(self.items_by_class(PickOrder)) * 3
value += self.performance * 5
return value
|
[
"[email protected]"
] | |
b0ebf56863454ffb4571867555552aad6d06569d
|
6527b66fd08d9e7f833973adf421faccd8b765f5
|
/yuancloud/addons/hw_proxy/controllers/main.py
|
1a934348be4f3f21a928d20583d78d39b10c4c17
|
[] |
no_license
|
cash2one/yuancloud
|
9a41933514e57167afb70cb5daba7f352673fb4d
|
5a4fd72991c846d5cb7c5082f6bdfef5b2bca572
|
refs/heads/master
| 2021-06-19T22:11:08.260079 | 2017-06-29T06:26:15 | 2017-06-29T06:26:15 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,759 |
py
|
# -*- coding: utf-8 -*-
import logging
import commands
import json
import os
import os.path
import yuancloud
import time
import random
import subprocess
import json
import werkzeug
import werkzeug.wrappers
_logger = logging.getLogger(__name__)
from yuancloud import http
from yuancloud.http import request
# Those are the builtin raspberry pi USB modules, they should
# not appear in the list of connected devices.
BANNED_DEVICES = set([
"0424:9514", # Standard Microsystem Corp. Builtin Ethernet module
"1d6b:0002", # Linux Foundation 2.0 root hub
"0424:ec00", # Standard Microsystem Corp. Other Builtin Ethernet module
])
# drivers modules must add to drivers an object with a get_status() method
# so that 'status' can return the status of all active drivers
drivers = {}
class Proxy(http.Controller):
def get_status(self):
statuses = {}
for driver in drivers:
statuses[driver] = drivers[driver].get_status()
return statuses
@http.route('/hw_proxy/hello', type='http', auth='none', cors='*')
def hello(self):
return "ping"
@http.route('/hw_proxy/handshake', type='json', auth='none', cors='*')
def handshake(self):
return True
@http.route('/hw_proxy/status', type='http', auth='none', cors='*')
def status_http(self):
resp = """
<!DOCTYPE HTML>
<html>
<head>
<title>YuanCloud's PosBox</title>
<style>
body {
width: 480px;
margin: 60px auto;
font-family: sans-serif;
text-align: justify;
color: #6B6B6B;
}
.device {
border-bottom: solid 1px rgb(216,216,216);
padding: 9px;
}
.device:nth-child(2n) {
background:rgb(240,240,240);
}
</style>
</head>
<body>
<h1>Hardware Status</h1>
<p>The list of enabled drivers and their status</p>
"""
statuses = self.get_status()
for driver in statuses:
status = statuses[driver]
if status['status'] == 'connecting':
color = 'black'
elif status['status'] == 'connected':
color = 'green'
else:
color = 'red'
resp += "<h3 style='color:"+color+";'>"+driver+' : '+status['status']+"</h3>\n"
resp += "<ul>\n"
for msg in status['messages']:
resp += '<li>'+msg+'</li>\n'
resp += "</ul>\n"
resp += """
<h2>Connected Devices</h2>
<p>The list of connected USB devices as seen by the posbox</p>
"""
devices = commands.getoutput("lsusb").split('\n')
count = 0
resp += "<div class='devices'>\n"
for device in devices:
device_name = device[device.find('ID')+2:]
device_id = device_name.split()[0]
if not (device_id in BANNED_DEVICES):
resp+= "<div class='device' data-device='"+device+"'>"+device_name+"</div>\n"
count += 1
if count == 0:
resp += "<div class='device'>No USB Device Found</div>"
resp += "</div>\n</body>\n</html>\n\n"
return request.make_response(resp,{
'Cache-Control': 'no-cache',
'Content-Type': 'text/html; charset=utf-8',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET',
})
@http.route('/hw_proxy/status_json', type='json', auth='none', cors='*')
def status_json(self):
return self.get_status()
@http.route('/hw_proxy/scan_item_success', type='json', auth='none', cors='*')
def scan_item_success(self, ean):
"""
A product has been scanned with success
"""
print 'scan_item_success: ' + str(ean)
@http.route('/hw_proxy/scan_item_error_unrecognized', type='json', auth='none', cors='*')
def scan_item_error_unrecognized(self, ean):
"""
A product has been scanned without success
"""
print 'scan_item_error_unrecognized: ' + str(ean)
@http.route('/hw_proxy/help_needed', type='json', auth='none', cors='*')
def help_needed(self):
"""
The user wants an help (ex: light is on)
"""
print "help_needed"
@http.route('/hw_proxy/help_canceled', type='json', auth='none', cors='*')
def help_canceled(self):
"""
The user stops the help request
"""
print "help_canceled"
@http.route('/hw_proxy/payment_request', type='json', auth='none', cors='*')
def payment_request(self, price):
"""
The PoS will activate the method payment
"""
print "payment_request: price:"+str(price)
return 'ok'
@http.route('/hw_proxy/payment_status', type='json', auth='none', cors='*')
def payment_status(self):
print "payment_status"
return { 'status':'waiting' }
@http.route('/hw_proxy/payment_cancel', type='json', auth='none', cors='*')
def payment_cancel(self):
print "payment_cancel"
@http.route('/hw_proxy/transaction_start', type='json', auth='none', cors='*')
def transaction_start(self):
print 'transaction_start'
@http.route('/hw_proxy/transaction_end', type='json', auth='none', cors='*')
def transaction_end(self):
print 'transaction_end'
@http.route('/hw_proxy/cashier_mode_activated', type='json', auth='none', cors='*')
def cashier_mode_activated(self):
print 'cashier_mode_activated'
@http.route('/hw_proxy/cashier_mode_deactivated', type='json', auth='none', cors='*')
def cashier_mode_deactivated(self):
print 'cashier_mode_deactivated'
@http.route('/hw_proxy/open_cashbox', type='json', auth='none', cors='*')
def open_cashbox(self):
print 'open_cashbox'
@http.route('/hw_proxy/print_receipt', type='json', auth='none', cors='*')
def print_receipt(self, receipt):
print 'print_receipt' + str(receipt)
@http.route('/hw_proxy/is_scanner_connected', type='json', auth='none', cors='*')
def is_scanner_connected(self, receipt):
print 'is_scanner_connected?'
return False
@http.route('/hw_proxy/scanner', type='json', auth='none', cors='*')
def scanner(self, receipt):
print 'scanner'
time.sleep(10)
return ''
@http.route('/hw_proxy/log', type='json', auth='none', cors='*')
def log(self, arguments):
_logger.info(' '.join(str(v) for v in arguments))
@http.route('/hw_proxy/print_pdf_invoice', type='json', auth='none', cors='*')
def print_pdf_invoice(self, pdfinvoice):
print 'print_pdf_invoice' + str(pdfinvoice)
|
[
"[email protected]"
] | |
f7b3b2faa86e0a9a1ac895411d5a0ba761b172ea
|
9907134b0da8e5391c51b00c426c648eece7b4b9
|
/Unidad 2/pfijo.py
|
a7a60477a2df6797926949881bacf7f7f695a593
|
[] |
no_license
|
hectorrdz98/metodos-numericos
|
1fd21593c8c324f0e0e643cc08a8d930ea2e8cf3
|
dab8e9425f454be60a74d30c985a643bcb915ce6
|
refs/heads/master
| 2022-01-22T07:26:48.566615 | 2019-05-29T12:26:43 | 2019-05-29T12:26:43 | 167,975,465 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 588 |
py
|
import math
p0 = 3.8
n = 3
tol = 0.0001
def g(p):
return -4 + (4*p) - (0.5 * p * p)
flag = False
for i in range(n):
p = g(p0)
print('\nVamos en {}, con g(p0)={} y p0={}'.format(i+1,g(p0),p0))
print('El abs={}'.format(math.fabs(p-p0)))
if math.fabs(p-p0) <= tol:
print('\nEl valor de p0={} ya se encuentra dentro de la tol de {} con {} ite'.format(p0,tol,i+1))
flag = True
break
p0 = p
if not flag:
print('\nSe realizaron las {} iteraciones, pero no se llegó a la tol de {}'.format(n,tol))
print('Se llegó a p0={}'.format(p0))
|
[
"="
] |
=
|
32fe115b47214dd5d925bc1419747dfcf52e0871
|
150d9e4cee92be00251625b7f9ff231cc8306e9f
|
/NextGreaterElement.py
|
eba1f8d0ae08308ff8e272cffeec6304822d027f
|
[] |
no_license
|
JerinPaulS/Python-Programs
|
0d3724ce277794be597104d9e8f8becb67282cb0
|
d0778178d89d39a93ddb9b95ca18706554eb7655
|
refs/heads/master
| 2022-05-12T02:18:12.599648 | 2022-04-20T18:02:15 | 2022-04-20T18:02:15 | 216,547,245 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,114 |
py
|
'''
496. Next Greater Element I
The next greater element of some element x in an array is the first greater element that is to the right of x in the same array.
You are given two distinct 0-indexed integer arrays nums1 and nums2, where nums1 is a subset of nums2.
For each 0 <= i < nums1.length, find the index j such that nums1[i] == nums2[j] and determine the next greater element of nums2[j] in nums2. If there is no next greater element, then the answer for this query is -1.
Return an array ans of length nums1.length such that ans[i] is the next greater element as described above.
Example 1:
Input: nums1 = [4,1,2], nums2 = [1,3,4,2]
Output: [-1,3,-1]
Explanation: The next greater element for each value of nums1 is as follows:
- 4 is underlined in nums2 = [1,3,4,2]. There is no next greater element, so the answer is -1.
- 1 is underlined in nums2 = [1,3,4,2]. The next greater element is 3.
- 2 is underlined in nums2 = [1,3,4,2]. There is no next greater element, so the answer is -1.
Example 2:
Input: nums1 = [2,4], nums2 = [1,2,3,4]
Output: [3,-1]
Explanation: The next greater element for each value of nums1 is as follows:
- 2 is underlined in nums2 = [1,2,3,4]. The next greater element is 3.
- 4 is underlined in nums2 = [1,2,3,4]. There is no next greater element, so the answer is -1.
Constraints:
1 <= nums1.length <= nums2.length <= 1000
0 <= nums1[i], nums2[i] <= 104
All integers in nums1 and nums2 are unique.
All the integers of nums1 also appear in nums2.
Follow up: Could you find an O(nums1.length + nums2.length) solution?
'''
class Solution(object):
def nextGreaterElement(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
next_great = {}
stack = []
result = []
for val in nums2:
while len(stack) > 0 and stack[len(stack) - 1] < val:
next_great[stack.pop()] = val
stack.append(val)
print next_great
for val in nums1:
if next_great.has_key(val):
result.append(next_great[val])
else:
result.append(-1)
return result
obj = Solution()
print(obj.nextGreaterElement([4,1,2],[1,3,4,8,7,6,5,10,2]))
print(obj.nextGreaterElement([137,59,92,122,52,131,79,236,94,171,141,86,169,199,248,120,196,168,77,71,5,198,215,230,176,87,189,206,115,76,13,216,197,26,183,54,250,27,109,140,147,25,96,105,30,207,241,8,217,40,0,35,221,191,83,132,9,144,12,91,175,65,170,149,174,82,102,167,62,70,44,143,10,153,160,142,188,81,146,212,15,162,103,163,123,48,245,116,192,14,211,126,63,180,88,155,224,148,134,158,119,165,130,112,166,93,125,1,11,208,150,100,106,194,124,2,184,75,113,104,18,210,202,111,84,223,173,238,41,33,154,47,244,232,249,60,164,227,253,56,157,99,179,6,203,110,127,152,252,55,185,73,67,219,22,156,118,234,37,193,90,187,181,23,220,72,255,58,204,7,107,239,42,139,159,95,45,242,145,172,209,121,24,21,218,246,49,46,243,178,64,161,117,20,214,17,114,69,182,85,229,32,129,29,226,136,39,36,233,43,240,254,57,251,78,51,195,98,205,108,61,66,16,213,19,68,237,190,3,200,133,80,177,97,74,138,38,235,135,186,89,201,4,101,151,31,228,231,34,225,28,222,128,53,50,247],
[137,59,92,122,52,131,79,236,94,171,141,86,169,199,248,120,196,168,77,71,5,198,215,230,176,87,189,206,115,76,13,216,197,26,183,54,250,27,109,140,147,25,96,105,30,207,241,8,217,40,0,35,221,191,83,132,9,144,12,91,175,65,170,149,174,82,102,167,62,70,44,143,10,153,160,142,188,81,146,212,15,162,103,163,123,48,245,116,192,14,211,126,63,180,88,155,224,148,134,158,119,165,130,112,166,93,125,1,11,208,150,100,106,194,124,2,184,75,113,104,18,210,202,111,84,223,173,238,41,33,154,47,244,232,249,60,164,227,253,56,157,99,179,6,203,110,127,152,252,55,185,73,67,219,22,156,118,234,37,193,90,187,181,23,220,72,255,58,204,7,107,239,42,139,159,95,45,242,145,172,209,121,24,21,218,246,49,46,243,178,64,161,117,20,214,17,114,69,182,85,229,32,129,29,226,136,39,36,233,43,240,254,57,251,78,51,195,98,205,108,61,66,16,213,19,68,237,190,3,200,133,80,177,97,74,138,38,235,135,186,89,201,4,101,151,31,228,231,34,225,28,222,128,53,50,247]))
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.