blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2407745c75d7ca85e8683e02f858075f868b447a | 9b639327ffb1ee18e88904bc2e158d55205acc0b | /plot_multifreq_lic.py | cc2727d74c10908c58cfef293ca806223c30a714 | [] | no_license | guanyilun/gc_plots | 6aebfef07013a2d7feb3975a7b76cf3dfc5fbd22 | f4c1c68a3364fe77f949c6121c38448374314c9e | refs/heads/master | 2023-06-25T06:04:17.530867 | 2021-07-19T21:55:17 | 2021-07-19T21:55:17 | 339,585,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,746 | py | """This script produces LIC plot based on multifrequency temperature
plot
"""
import argparse, os, os.path as op
import numpy as np
import matplotlib.pyplot as plt
from pixell import enmap, enplot, utils as u
from matplotlib import colors
from common import *
import lib
import plotstyle
# parser defined in common
parser.add_argument("-T", default=None)
parser.add_argument("-P", default=None)
parser.add_argument("--axis", action='store_true')
parser.add_argument("--figsize", default="(8,4)")
parser.add_argument("--texture", help='file to store texture', default='mf_texture.npy')
parser.add_argument('--force', action='store_true')
args = parser.parse_args()
if not op.exists(args.odir): os.makedirs(args.odir)
if args.figsize: figsize=eval(args.figsize)
else: figsize=None
# define box of interests
box = boxes[args.area]
# load a map for wcs only
imap = load_map(filedb['f090']['coadd'], box=box, fcode='f090')
# load two maps
tmap = np.load(args.T)
# start plotting
popts = {
'origin': 'lower',
}
# plots:
fig, ax = plt.subplots(1, 1, figsize=figsize, subplot_kw={'projection':imap.wcs})
if not args.axis:
ax.axis('off')
plt.tight_layout(h_pad=0.5)
else:
ax.tick_params(axis='x', colors='white', which='both', labelcolor='black')
ax.tick_params(axis='y', colors='white', which='both', labelcolor='black')
ax.set_aspect('equal')
for side in ['left','right','top','bottom']:
ax.spines[side].set_visible(True)
ax.spines[side].set_color('white')
plotstyle.setup_axis(ax, nticks=[10,5])
ax.set_ylabel("$b$")
ax.set_xlabel('$l$')
plt.tight_layout(h_pad=0.1)
# polarization angle plot
# reload imap to get the original resolution
# seed = enmap.rand_gauss(imap[0].shape, imap.wcs)
# seed = enmap.smooth_gauss(seed, 0.5*u.arcmin*u.fwhm)
seed = None
imap = enmap.smooth_gauss(imap, 5*u.arcmin*u.fwhm)
P = np.sum(imap[1:]**2,axis=0)**0.5
if not op.exists(args.texture) or args.force:
theta = lib.Bangle(imap[1], imap[2], toIAU=True)
# no need to add for LIC pi/2
texture = lib.LIC_texture(theta, length=0.1, seed=seed, contrast=True)
np.save(args.texture, texture)
else:
texture = np.load(args.texture)
# boost contrast
curve = lambda x: 1/(1+np.exp(-(x-0.5)))
# texture = curve(texture) # option to adjust contrast of lic texture
alpha = np.min([np.ones_like(texture), 1.2*(P/P.max())**0.7],axis=0)
textures = np.stack([np.ones_like(texture)*alpha]*3+[0.6*texture], axis=2)
ax.imshow(tmap, **popts)
ax.imshow(textures, origin='lower')
# watermark
ax.text(0.84, 0.05, "ACT Collaboration", fontsize=10, color='gray', transform=ax.transAxes, alpha=0.8)
ofile = op.join(args.odir, args.oname)
print("Writing:", ofile)
plt.savefig(ofile, bbox_inches='tight')
| [
"[email protected]"
] | |
6a61f6445c74967d9422d6b4739e394a79e11568 | 6eae4a2f7326238820bca29f9a2d99fb054f91e4 | /src/blog/settings.py | 5348a0f815fe878e1bf8eb108d2fa5a1e15a2eb0 | [
"MIT"
] | permissive | jach58/api-blog | 88e69f80378be4ecfc110aa54363556eb3053c78 | 5e668289856669537f2d6dc7236ec4f1e566e765 | refs/heads/master | 2021-04-03T09:47:39.386947 | 2018-03-11T04:58:56 | 2018-03-11T04:58:56 | 124,724,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,473 | py | """
Django settings for blog project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# BASE_DIR = "/Users/jmitch/desktop/blog/src/"
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'sm@g)(fbwdh5wc*xe@j++m9rh^uza5se9a57c5ptwkg*b@ki0x'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# third party
'crispy_forms',
'markdown_deux',
'pagedown',
'rest_framework',
# local apps
'comments',
'posts',
]
CRISPY_TEMPLATE_PACK = 'bootstrap3'
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
LOGIN_URL = "/login/"
ROOT_URLCONF = 'blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
#'/var/www/static/',
]
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn")
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "media_cdn")
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
),
# 'DEFAULT_PARSER_CLASSES': (
# 'rest_framework.parsers.JSONParser',
# )
'DEFAULT_AUTHENTICATION_CLASSES':(
# 'rest_framework.authentication.SessionAuthentication',
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
# 'rest_framework.authentication.BasicAuthentication'
),
'DEFAULT_PERMISSION_CLASSES':(
'rest_framework.permissions.IsAuthenticated',
# 'rest_framework.permissions.IsAuthenticatedOrReadOnly',
)
}
"""
$ curl -X POST -d "username=square&password=square1234" http://localhost:8000/api/auth/token/
"""
| [
"[email protected]"
] | |
a219c8262e721f6e24772106f5656aa8aa9d534b | 8c46e86716715197038e7976c3adc80bc4141bb0 | /main/admin.py | f6ac995ba0e40da6bd34b832941f0087cc01e3b9 | [
"MIT"
] | permissive | alan-turing-institute/autistica-filemanagement-demo | 080495b3bfd147c9b0afae8b858d08e335c9f4c2 | a1c84233131f6b1f09ad60b24edd97ae06cf8d4d | refs/heads/master | 2022-12-10T14:23:10.544180 | 2019-09-23T12:49:35 | 2019-09-23T12:49:35 | 196,981,366 | 0 | 1 | MIT | 2022-04-22T21:56:36 | 2019-07-15T10:58:05 | Python | UTF-8 | Python | false | false | 113 | py | from django.contrib import admin
from main.models import PublicExperience
admin.site.register(PublicExperience)
| [
"[email protected]"
] | |
3d83d809827e773663de094cbba319678dc088f5 | a7db09b7681e8050e832da8198afde3cbff6da0e | /smap/tests.py | df277b08384afdcc2ed27fbb2b29a2d07740b53e | [
"MIT"
] | permissive | HakoCra/smap | 0146b0b2da7c9d06423d3d49dec22b4b4138576f | 88a6265cbb4777e6d1a032c4c792187debcaf41f | refs/heads/master | 2021-01-11T22:03:07.376411 | 2017-08-29T16:48:43 | 2017-08-29T16:48:43 | 78,906,682 | 0 | 1 | null | 2017-02-19T04:56:08 | 2017-01-14T02:54:02 | Python | UTF-8 | Python | false | false | 12,317 | py | import json
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from .models import Tag, Sumari
class Test(TestCase):
def create_data(self, data):
for sumari in data:
message = sumari["message"]
name = sumari["name"]
lat = sumari["lat"]
lng = sumari["lng"]
new_sumari = Sumari(name=name, message=message, lat=lat, lng=lng)
new_sumari.save()
for tagname in sumari["tags"]:
tag = Tag.get_or_create(tagname)
new_sumari.tags.add(tag)
new_sumari.save()
def test_search_sumaris_by_tag(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"sushi"
},
"name": "すしろー",
"message": "回転寿司うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"sushi"
},
"name": "すしろー",
"message": "うまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
self.assertEqual(len(Sumari.objects.filter(tags__name__in=["meshi"])), 2)
self.assertEqual(len(Sumari.objects.filter(tags__name__in=["ramen"])), 1)
self.assertEqual(len(Sumari.objects.filter(tags__name__in=["sushi"])), 3)
self.assertEqual(len(Sumari.objects.filter(tags__name__in=["ramen", "sushi"])), 4)
def test_search_with_tags_not_exist(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"sushi"
},
"name": "すしろー",
"message": "回転寿司うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"sushi"
},
"name": "すしろー",
"message": "うまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
self.assertEqual(len(Sumari.objects.filter(tags__name__in=["USA"])), 0)
self.assertEqual(len(Sumari.objects.filter(tags__name__in=["---", "0000"])), 0)
def test_post(self):
client = Client()
json_data = {
"name": "函館駅",
"position": {
"lat": 41.773809,
"lng": 140.726467
},
"message": "北海道最南端の駅(大嘘)",
"tags": [
"親の顔より見た光景",
"実家のような安心感",
"リスポーン地点"
]
}
response = client.post(reverse('sumari'), json.dumps(json_data),
content_type="application/json")
self.assertEqual(len(Sumari.objects.filter(tags__name__in=["親の顔より見た光景"])), 1)
self.assertEqual(len(Sumari.objects.filter(tags__name__in=["親の顔"])), 0)
def test_to_json(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
sumari = Sumari.objects.filter(tags__name__in=["meshi"])[0]
obj = sumari.to_json()
self.assertSetEqual(set(obj["tags"]), {"meshi", "ramen"})
self.assertEqual(obj["name"], "山岡屋")
self.assertEqual(obj["message"], "山岡屋うまい")
self.assertEqual(obj["position"]["lat"], 41.773809)
self.assertEqual(obj["position"]["lng"], 140.726467)
def test_search_and_get_as_json(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
json_objs = Sumari.search_with_tags(tags=["meshi"], to_json=True)
self.assertSetEqual({sumari["name"] for sumari in json_objs}, {"山岡屋", "すしろー"})
self.assertEqual(len([sumari["id"] for sumari in json_objs]), 2)
def test_search_with_tags_not_exist_and_get_as_json(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
json_objs = Sumari.search_with_tags(tags=["tokyo"], to_json=True)
self.assertEqual(json_objs, [])
def test_get_sumaris_view(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
client = Client()
response = client.get('/sumari', {'tags': 'meshi,ramen'})
self.assertSetEqual({sumari["name"] for sumari in response.json()}, {"山岡屋", "すしろー"})
response = client.get('/sumari', {'tags': 'ramen'})
self.assertSetEqual({sumari["name"] for sumari in response.json()}, {"山岡屋"})
def test_get_all_tag_view(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
client = Client()
response = client.get('/tag')
self.assertSetEqual({sumari for sumari in response.json()}, {"meshi", "ramen", "sushi"})
def test_get_sumari_id(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
client = Client()
id = Sumari.search_with_tags(tags=["sushi"]).first().id
response = client.get('/sumari/{}'.format(id))
self.assertEqual(response.json()["name"], "すしろー")
def test_update_sumari(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
client = Client()
id = Sumari.search_with_tags(tags=["sushi"]).first().id
json_data = {
"name": "くらずし"
}
response = client.put('/sumari/{}'.format(id), json.dumps(json_data),
content_type="application/json")
name = Sumari.search_with_tags(tags=["sushi"]).first().name
self.assertEqual(name, "くらずし")
def test_json_include_good(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
json_obj = Sumari.search_with_tags(tags=["sushi"], to_json=True)[0]
self.assertEqual(type(json_obj["good"]), int)
def test_post_good(self):
data = [
{
"tags": {
"meshi",
"ramen"
},
"name": "山岡屋",
"message": "山岡屋うまい",
"lat": 41.773809,
"lng": 140.726467,
},
{
"tags": {
"meshi",
"sushi"
},
"name": "すしろー",
"message": "すしうまい",
"lat": 41.773809,
"lng": 140.726467,
}
]
self.create_data(data)
client = Client()
id = Sumari.search_with_tags(tags=["sushi"]).first().id
response = client.post('/sumari/{}/good'.format(id))
good = Sumari.search_with_tags(tags=["sushi"]).first().good
self.assertEqual(good, 1)
| [
"[email protected]"
] | |
83d484a3603a50c3b2228f355c9e23b816b93b7e | 66f62ac192d82e4bd538879b205a5ddf4e005895 | /relative_transformer/src/modules/ops.py | f289b67d247429a263cd3aa5a50946c6de0c05fe | [] | no_license | zqp111/NewMB-813 | a66772811321fd34329416abb2c058d85901f915 | b71cf3f22257b2c8059e3042a53876b5da7aed01 | refs/heads/main | 2023-03-17T18:50:10.487157 | 2021-03-01T12:28:06 | 2021-03-01T12:28:06 | 343,410,746 | 1 | 0 | null | 2021-03-01T12:35:41 | 2021-03-01T12:35:41 | null | UTF-8 | Python | false | false | 21,028 | py | # Copyright (c) Microsoft, Inc. 2020
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
# Author: [email protected]
# Date: 01/15/2020
#
import math
from packaging import version
import torch
from .jit_tracing import traceable
# from configs.options import get_parser
class Config():
window_size = 8
opts = Config()
if version.Version(torch.__version__) >= version.Version('1.0.0'):
from torch import _softmax_backward_data as _softmax_backward_data
else:
from torch import softmax_backward_data as _softmax_backward_data
__all__ = ['StableDropout', 'MaskedLayerNorm', 'XSoftmax', 'LocalXSoftmax', 'LocalXSoftmaxLocalSpan',
'SoftLocalXSoftmax']
@traceable
class XSoftmax(torch.autograd.Function):
""" Masked Softmax which is optimized for saving memory
Args:
input (:obj:`torch.tensor`): The input tensor that will apply softmax.
mask (:obj:`torch.IntTensor`): The mask matrix where 0 indicate that element will be ignored in the softmax caculation.
dim (int): The dimenssion that will apply softmax.
Example::
import torch
from DeBERTa.deberta import XSoftmax
# Make a tensor
x = torch.randn([4,20,100])
# Create a mask
mask = (x>0).int()
y = XSoftmax.apply(x, mask, dim=-1)
"""
@staticmethod
def forward(self, input, mask, dim):
"""
"""
# print("="*50)
# print("4. Xsoftmax:")
# print("input: ", input.shape)
# print("mask: ", mask.shape)
# print("dim: ", dim)
self.dim = dim
if version.Version(torch.__version__) >= version.Version('1.2.0a'):
rmask = ~(mask.bool())
else:
rmask = (1 - mask).byte() # This line is not supported by Onnx tracing.
output = input.masked_fill(rmask, float('-inf'))
output = torch.softmax(output, self.dim)
output.masked_fill_(rmask, 0)
self.save_for_backward(output)
return output
@staticmethod
def backward(self, grad_output):
"""
"""
output, = self.saved_tensors
inputGrad = _softmax_backward_data(grad_output, output, self.dim, output)
return inputGrad, None, None
@traceable
class LocalXSoftmax(torch.autograd.Function):
""" Masked Softmax which is optimized for saving memory
Args:
input (:obj:`torch.tensor`): The input tensor that will apply softmax.
mask (:obj:`torch.IntTensor`): The mask matrix where 0 indicate that element will be ignored in the softmax caculation.
dim (int): The dimenssion that will apply softmax.
Example::
import torch
from DeBERTa.deberta import XSoftmax
# Make a tensor
x = torch.randn([4,20,100])
# Create a mask
mask = (x>0).int()
y = XSoftmax.apply(x, mask, dim=-1)
"""
@staticmethod
def forward(self, input, mask, dim):
"""
"""
# print("="*50)
# print("4. Xsoftmax:")
# print("input: ", input.shape)
# print("mask: ", mask.shape)
# print("dim: ", dim)
self.dim = dim
if version.Version(torch.__version__) >= version.Version('1.2.0a'):
rmask = ~(mask.bool())
else:
rmask = (1 - mask).byte() # This line is not supported by Onnx tracing.
output = input.masked_fill(rmask, float('-inf'))
WINDOW_SIZE = opts.window_size
topk_scores, topk_ids = output.topk(k=WINDOW_SIZE, dim=-1, largest=True) # [bs, head, q_len, k]
threshod = topk_scores[:, :, :, -1].unsqueeze(-1).detach() # [bs, head, q_len, 1]
output = output.masked_fill(output < threshod, -1e9)
# print("output: ", output.shape, output[0, 0, 0, :])
output = torch.softmax(output, self.dim)
output.masked_fill_(rmask, 0)
self.save_for_backward(output)
return output
@staticmethod
def backward(self, grad_output):
"""
"""
output, = self.saved_tensors
inputGrad = _softmax_backward_data(grad_output, output, self.dim, output)
return inputGrad, None, None
def build_relative_position(query_size, key_size, device):
""" Build relative position according to the query and key
We assume the absolute position of query :math:`P_q` is range from (0, query_size) and the absolute position of key :math:`P_k` is range from (0, key_size),
The relative positions from query to key is
:math:`R_{q \\rightarrow k} = P_q - P_k`
Args:
query_size (int): the length of query
key_size (int): the length of key
Return:
:obj:`torch.LongTensor`: A tensor with shape [1, query_size, key_size]
"""
q_ids = torch.arange(query_size, dtype=torch.long, device=device)
k_ids = torch.arange(key_size, dtype=torch.long, device=device)
rel_pos_ids = q_ids[:, None] - k_ids.view(1, -1).repeat(query_size, 1)
rel_pos_ids = rel_pos_ids[:query_size, :]
rel_pos_ids = rel_pos_ids.unsqueeze(0)
return rel_pos_ids
@traceable
class SoftLocalXSoftmax(torch.autograd.Function):
""" Masked Softmax which is optimized for saving memory
Args:
input (:obj:`torch.tensor`): The input tensor that will apply softmax.
mask (:obj:`torch.IntTensor`): The mask matrix where 0 indicate that element will be ignored in the softmax caculation.
dim (int): The dimenssion that will apply softmax.
Example::
import torch
from DeBERTa.deberta import XSoftmax
# Make a tensor
x = torch.randn([4,20,100])
# Create a mask
mask = (x>0).int()
y = XSoftmax.apply(x, mask, dim=-1)
"""
@staticmethod
def forward(self, input, mask, dim):
"""
"""
# print("="*50)
# print("4. Xsoftmax:")
# print("input: ", input.shape)
# print("mask: ", mask.shape)
# print("dim: ", dim)
self.dim = dim
if version.Version(torch.__version__) >= version.Version('1.2.0a'):
rmask = ~(mask.bool())
else:
rmask = (1 - mask).byte() # This line is not supported by Onnx tracing.
# print("rmask: ", rmask.shape, rmask[0, 0, :20, :20])
output = input.masked_fill(rmask, float('-inf')) # [bs, heads, length, length]
# output = input.masked_fill(rmask, 1e-9) # [bs, heads, length, length]
# print("output-1: ", output[0, 0, :20, :20])
bs, heads, length, _ = output.size()
WINDOW_SIZE = opts.window_size
# TODO. soft select.
distribution = output.softmax(-1) # [bs, heads, length, length]
# print("distribution: ", distribution.shape, distribution[0, 0, :30, :30])
position = torch.arange(0, length).reshape(1, length).repeat(length, 1).unsqueeze_(0).unsqueeze_(1).to(
distribution.device) # [bs, heads, length, length]
position = position.float() * mask.float()
# print("position: ", position[0, 0, :20, :20])
mu = torch.sum(distribution * position, dim=-1, keepdim=True) # [bs, heads, length, 1]
# print("mu: ", mu[0, 0, :20, :20])
# local_position = torch.arange(0, 2 * WINDOW_SIZE).reshape(1, 2 * WINDOW_SIZE) + torch.arange(
# 0, length).reshape(length, 1)
# local_position = local_position.unsqueeze_(0).unsqueeze_(0).float().to(mu.device) # [1, 1, length, window_size]
local_position = torch.arange(0, length).reshape(1, length).repeat(length, 1).to(mu.device)
local_position = local_position.unsqueeze_(0).unsqueeze_(0).float() * mask.float() # [1, 1, length, length]
# local_size = torch.sum(mask.float(), dim=-1, keepdim=True).to(mu.device) # [bs, 1, length, 1]
# print("local_position: ", local_position[0, 0, :20, :20])
# print("local_size: ", local_size[0, 0, :20, :20])
# print("local_position - mu: ", (local_position - mu)[0, 0, :20, :20]) # [bs, 1, length, length]
local_position_mu = (local_position - mu) * mask.float()
# print("local_position - mu: ", local_position_mu[0, 0, :20, :20]) # [bs, 1, length, length]
# 这里的 (x-mu) 的均值计算有问题
# skewness = (torch.sum(local_position_mu **3, dim=-1, keepdim=True) / local_size) / (
# (torch.sum(local_position_mu **2, dim=-1, keepdim=True)/ local_size) ** (3/2))
skewness = (torch.sum(local_position_mu **3 * distribution, dim=-1, keepdim=True)) / (
(torch.sum(local_position_mu **2 * distribution, dim=-1, keepdim=True)) ** (3/2))
# kurtosis = (torch.sum(local_position_mu ** 4, dim=-1, keepdim=True)/ local_size) / (
# (torch.sum(local_position_mu ** 2, dim=-1, keepdim=True) / local_size) ** 2) - 3
kurtosis = (torch.sum(local_position_mu ** 4 * distribution, dim=-1, keepdim=True)) / (
(torch.sum(local_position_mu ** 2 * distribution, dim=-1, keepdim=True)) ** 2) - 3
# print("skewness: ", skewness.shape, skewness[0, 0, :20])
# print("kurtosis: ", kurtosis.shape, kurtosis[0, 0, :20])
lambda_r = 1.0
lrr_right = lambda_r * torch.exp(skewness) * torch.exp(-kurtosis) * WINDOW_SIZE
lambda_l = 1.0
lrr_left = lambda_l * torch.exp(-skewness) * torch.exp(-kurtosis) * WINDOW_SIZE
# print("lrr_left: ", lrr_left[0, 0, :20], lrr_left.shape)
# print("lrr_right: ", lrr_right[0, 0, :20], lrr_right.shape)
# local_win = torch.arange(0, length).reshape(1, length).to(mu.device)
# local_win = local_win.unsqueeze_(0).unsqueeze_(-1).float() # [1, 1, length, 1]
local_win = mu
lrr_left = (local_win - lrr_left)
zero = torch.zeros_like(lrr_left).to(lrr_left.device)
lrr_left = torch.where(lrr_left > 0, lrr_left, zero)
lrr_right = (local_win + lrr_right)
max = (torch.ones_like(lrr_right) * (length-1)).to(lrr_right.device)
lrr_right = torch.where(lrr_right > (length-1), max, lrr_right)
# print("local_win: ", local_win[0, 0, :20], local_win.shape)
# print("lrr_left: ", lrr_left[0, 0, :20], lrr_left.shape)
# print("lrr_right: ", lrr_right[0, 0, :20], lrr_right.shape)
# new local_position
left_mask = (local_position - lrr_left).long()
# print("left_mask: ", left_mask[0, 0, :20, :20])
left_mask = (left_mask > 0).long()
# print("left_mask: ", left_mask[0, 0, :20, :20])
right_mask = (local_position - lrr_right).long()
right_mask = (right_mask < 0).long()
# print("right_mask: ", right_mask[0, 0, :20, :20])
final_mask = left_mask & right_mask
# print("final_mask: ", final_mask[0, 0, :20, :20])
# # exit()
if version.Version(torch.__version__) >= version.Version('1.2.0a'):
rfinal_mask = ~(final_mask.bool())
else:
rfinal_mask = (1 - final_mask).byte() # This line is not supported by Onnx tracing.
# print("final_mask: ", final_mask[0, 0, :20, :20])
output = output.masked_fill(rfinal_mask, -1e9)
# print("output-2: ", output[0, 0, :20, :20])
# TODO. Laplacian kernel
gamma = 1.0 / (2 * WINDOW_SIZE)
relative_matrix = build_relative_position(length, length, output.device).unsqueeze_(0)
# print("relative_matrix: ", relative_matrix.shape, relative_matrix[0, 0, :20, :20])
lap_kernel = torch.exp(-gamma * torch.abs(relative_matrix).float())
# print("lap_kernel: ", lap_kernel.shape, lap_kernel[0, 0, :, :])
output = lap_kernel * output
# print("output-2: ", output[0, 0, :20, :20])
# exit()
# # TODO. hard select
# WINDOW_SIZE = opts.window_size
# topk_scores, topk_ids = output.topk(k=WINDOW_SIZE, dim=-1, largest=True) # [bs, head, q_len, k]
# threshod = topk_scores[:, :, :, -1].unsqueeze(-1).detach() # [bs, head, q_len, 1]
# output = output.masked_fill(output < threshod, -1e9)
# print("output: ", output.shape, output[0, 0, 0, :])
output = torch.softmax(output, self.dim)
# output.masked_fill_(rmask, 0)
output.masked_fill_(rmask & rfinal_mask, 0)
# print("output-3: ", output[0, 0, :20, :20])
# exit()
self.save_for_backward(output)
return output
@staticmethod
def backward(self, grad_output):
"""
"""
output, = self.saved_tensors
inputGrad = _softmax_backward_data(grad_output, output, self.dim, output)
return inputGrad, None, None
@traceable
class LocalXSoftmaxLocalSpan(torch.autograd.Function):
""" Masked Softmax which is optimized for saving memory
Args:
input (:obj:`torch.tensor`): The input tensor that will apply softmax.
mask (:obj:`torch.IntTensor`): The mask matrix where 0 indicate that element will be ignored in the softmax caculation.
dim (int): The dimenssion that will apply softmax.
Example::
import torch
from DeBERTa.deberta import XSoftmax
# Make a tensor
x = torch.randn([4,20,100])
# Create a mask
mask = (x>0).int()
y = XSoftmax.apply(x, mask, dim=-1)
"""
@staticmethod
def forward(self, input, mask, dim, src_length):
"""
"""
# print("="*50)
# print("4. Xsoftmax:")
# print("input: ", input.shape)
# print("mask: ", mask.shape)
# print("dim: ", dim)
self.dim = dim
if version.Version(torch.__version__) >= version.Version('1.2.0a'):
rmask = ~(mask.bool())
else:
rmask = (1 - mask).byte() # This line is not supported by Onnx tracing.
# print(rmask[0, 0, :20, :20])
output = input.masked_fill(rmask, float('-inf'))
# print(output.shape, output[0, 0, -20:, -20:])
# TODO. Soft select. adaptively select the local-span, which is about 80% of the total.
WINDOW_SIZE = opts.window_size
distribution = output.softmax(-1) # [bs, heads, length, length]
span_local_mask = torch.ones_like(distribution).cuda()
bs, heads, length, _ = distribution.size()
for b in range(bs):
for h in range(heads):
cue_length = src_length[b]
for l in range(src_length[b]):
percentage = distribution[b, h, l, l] # current position
span_local_mask[b, h, l, l] = 0
cur = l
cur_left, cur_right = cur - 1, cur + 1
while percentage < 0.8:
# print("======= cur: ", cur, cur_left, cur_right, percentage)
if cur_left < max(0, l - WINDOW_SIZE) and cur_right <= min(l + WINDOW_SIZE, cue_length - 1):
percentage += distribution[b, h, l, cur_right]
span_local_mask[b, h, l, cur_right] = 0
cur_right += 1
elif cur_left >= max(0, l - WINDOW_SIZE) and cur_right > min(l + WINDOW_SIZE, cue_length - 1):
percentage += distribution[b, h, l, cur_left]
span_local_mask[b, h, l, cur_left] = 0
cur_left -= 1
elif (cur_left >= max(0, l - WINDOW_SIZE) and cur_right <= min(l + WINDOW_SIZE,
cue_length - 1)):
if distribution[b, h, l, cur_left] > distribution[b, h, l, cur_right]:
percentage += distribution[b, h, l, cur_left]
span_local_mask[b, h, l, cur_left] = 0
cur_left -= 1
else:
percentage += distribution[b, h, l, cur_right]
span_local_mask[b, h, l, cur_right] = 0
cur_right += 1
else:
raise ValueError("It is impossible!")
# print("span_local_mask: ", span_local_mask.shape, span_local_mask[0, 0, :20, :20])
output = output.masked_fill(span_local_mask.byte(), -1e9)
# print(output.shape, output[0, 0, -20:, -20:])
# exit()
# TODO, hard select using topk function
WINDOW_SIZE = opts.window_size
topk_scores, topk_ids = output.topk(k=WINDOW_SIZE, dim=-1, largest=True) # [bs, head, q_len, k]
threshod = topk_scores[:, :, :, -1].unsqueeze(-1).detach() # [bs, head, q_len, 1]
output = output.masked_fill(output < threshod, -1e9)
output = torch.softmax(output, self.dim)
output.masked_fill_(rmask, 0)
self.save_for_backward(output)
return output
@staticmethod
def backward(self, grad_output):
"""
"""
output, = self.saved_tensors
inputGrad = _softmax_backward_data(grad_output, output, self.dim, output)
return inputGrad, None, None, None
class DropoutContext(object):
def __init__(self):
self.dropout = 0
self.mask = None
self.scale = 1
self.reuse_mask = True
def get_mask(input, local_context):
if not isinstance(local_context, DropoutContext):
dropout = local_context
mask = None
else:
dropout = local_context.dropout
dropout *= local_context.scale
mask = local_context.mask if local_context.reuse_mask else None
if dropout > 0 and mask is None:
if version.Version(torch.__version__) >= version.Version('1.2.0a'):
mask = (1 - torch.empty_like(input).bernoulli_(1 - dropout)).bool()
else:
mask = (1 - torch.empty_like(input).bernoulli_(1 - dropout)).byte()
if isinstance(local_context, DropoutContext):
if local_context.mask is None:
local_context.mask = mask
return mask, dropout
@traceable
class XDropout(torch.autograd.Function):
@staticmethod
def forward(ctx, input, local_ctx):
mask, dropout = get_mask(input, local_ctx)
ctx.scale = 1.0 / (1 - dropout)
if dropout > 0:
ctx.save_for_backward(mask)
return input.masked_fill(mask, 0) * ctx.scale
else:
return input
@staticmethod
def backward(ctx, grad_output):
if ctx.scale > 1:
mask, = ctx.saved_tensors
return grad_output.masked_fill(mask, 0) * ctx.scale, None
else:
return grad_output, None
class StableDropout(torch.nn.Module):
""" Optimized dropout module for stabilizing the training
Args:
drop_prob (float): the dropout probabilities
"""
def __init__(self, drop_prob):
super().__init__()
self.drop_prob = drop_prob
self.count = 0
self.context_stack = None
def forward(self, x):
""" Call the module
Args:
x (:obj:`torch.tensor`): The input tensor to apply dropout
"""
if self.training and self.drop_prob > 0:
return XDropout.apply(x, self.get_context())
return x
def clear_context(self):
self.count = 0
self.context_stack = None
def init_context(self, reuse_mask=True, scale=1):
if self.context_stack is None:
self.context_stack = []
self.count = 0
for c in self.context_stack:
c.reuse_mask = reuse_mask
c.scale = scale
def get_context(self):
if self.context_stack is not None:
if self.count >= len(self.context_stack):
self.context_stack.append(DropoutContext())
ctx = self.context_stack[self.count]
ctx.dropout = self.drop_prob
self.count += 1
return ctx
else:
return self.drop_prob
def MaskedLayerNorm(layerNorm, input, mask=None):
""" Masked LayerNorm which will apply mask over the output of LayerNorm to avoid inaccurate updatings to the LayerNorm module.
Args:
layernorm (:obj:`~DeBERTa.deberta.BertLayerNorm`): LayerNorm module or function
input (:obj:`torch.tensor`): The input tensor
mask (:obj:`torch.IntTensor`): The mask to applied on the output of LayerNorm where `0` indicate the output of that element will be ignored, i.e. set to `0`
Example::
# Create a tensor b x n x d
x = torch.randn([1,10,100])
m = torch.tensor([[1,1,1,0,0,0,0,0,0,0]], dtype=torch.int)
LayerNorm = DeBERTa.deberta.BertLayerNorm(100)
y = MaskedLayerNorm(LayerNorm, x, m)
"""
output = layerNorm(input).to(input)
if mask is None:
return output
if mask.dim() != input.dim():
if mask.dim() == 4:
mask = mask.squeeze(1).squeeze(1)
mask = mask.unsqueeze(2)
mask = mask.to(output.dtype)
return output * mask
| [
"[email protected]"
] | |
7aef8b494a915a43e4a8e942382d1bf445e7d293 | 4fa3cbe2fd0331c8b4e441ff7fca3cbf178f146e | /lectures/code/string_format.py | ff6b7d053dbd2b6110be39d21ffd24194203a4df | [
"MIT"
] | permissive | naskoch/python_course | ee909243b5be730694b0f8a1a9f9e8722235e625 | 84adfd3f8d48ca3ad5837f7acc59d2fa051e95d3 | refs/heads/master | 2020-04-03T09:52:45.858591 | 2016-09-26T16:44:20 | 2016-09-26T16:44:20 | 69,268,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | fl = 0.23
wo = 'Hello'
inte = 12
print "s: {} \t f: {:0.1f} \n i: {}".format(wo, fl, inte)
# s: Hello f: 0.2
# i: 12
| [
"[email protected]"
] | |
7d3b0358b02431cb03dbd79b5b445f66d3ffe27c | 8e6e70f3c6e5aed96960a6de8983e72eafae1862 | /wheelcms_valve/tests/conftest.py | 5591a16354335efcbb43f8c77d1a730b58f56e24 | [] | no_license | wheelcms/wheelcms_valve | c7988c2fe89f85e978272ed319e7057553dd24e9 | 8ea19cb8eb0081857b120c0f9bf55c128ee5d471 | refs/heads/master | 2021-01-19T08:01:43.063297 | 2015-04-04T18:28:20 | 2015-04-04T18:28:20 | 9,416,780 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | from twotest.fixtures import client, django_client
from wheelcms_axle.tests.fixtures import root, localtyperegistry, localtemplateregistry
| [
"[email protected]"
] | |
04ffa2e8f7455958544e91ecde11a70434d25d2a | e67bd40ae85e09f832773b825bed6c43a6ba1f54 | /pp_oscdriver.py | e21ffc62e00d94178eb3e07cedba1609ffe7df65 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-mit-taylor-variant"
] | permissive | KenT2/pipresents-beep | e00a516041edce05c09893c859096faeded29e90 | 0b1c008097bc1679966ba5fbf8f2ed150bb21fd3 | refs/heads/master | 2023-08-05T03:23:26.881544 | 2023-07-28T15:47:54 | 2023-07-28T15:47:54 | 140,758,841 | 56 | 18 | NOASSERTION | 2021-04-27T08:13:27 | 2018-07-12T19:57:02 | Python | UTF-8 | Python | false | false | 18,392 | py |
"""
added source name to loopback and info request
Heavily modified from the examples here, with thanks:
receiving OSC with pyOSC
https://trac.v2.nl/wiki/pyOSC
example by www.ixi-audio.net based on pyOSC documentation
this is a very basic example, for detailed info on pyOSC functionality check the OSC.py file
or run pydoc pyOSC.py. you can also get the docs by opening a python shell and doing
>>> import OSC
>>> help(OSC)
"""
import os
from pp_utils import Monitor
from pp_oscconfig import OSCConfig
from pp_timeofday import TimeOfDay
import threading
import configparser
import OSC_plus as OSC
import socket
class myOSCServer(OSC.OSCServer):
allow_reuse_address=True
print_tracebacks = True
class OSCDriver(object):
# executed by main program
def init(self,pp_profile,manager_unit,preferred_interface,my_ip,show_command_callback,input_event_callback,animate_callback):
self.pp_profile=pp_profile
self.show_command_callback=show_command_callback
self.input_event_callback=input_event_callback
self.animate_callback=animate_callback
self.mon=Monitor()
config_file=self.pp_profile + os.sep +'pp_io_config'+os.sep+ 'osc.cfg'
if not os.path.exists(config_file):
self.mon.err(self, 'OSC Configuration file not found: '+config_file)
return'error','OSC Configuration file nof found: '+config_file
self.mon.log(self, 'OSC Configuration file found at: '+config_file)
self.osc_config=OSCConfig()
# reads config data
if self.osc_config.read(config_file) ==False:
return 'error','failed to read osc.cfg'
# unpack config data and initialise
if self.osc_config.this_unit_name =='':
return 'error','OSC Config - This Unit has no name'
if len(self.osc_config.this_unit_name.split())>1:
return 'error','OSC config - This Unit Name not a single word: '+self.osc_config.this_unit_name
self.this_unit_name=self.osc_config.this_unit_name
if self.osc_config.this_unit_ip=='':
self.this_unit_ip=my_ip
else:
self.this_unit_ip=self.osc_config.this_unit_ip
if self.osc_config.slave_enabled == 'yes':
if not self.osc_config.listen_port.isdigit():
return 'error','OSC Config - Listen port is not a positve number: '+ self.osc_config.listen_port
self.listen_port= self.osc_config.listen_port
if self.osc_config.master_enabled == 'yes':
if not self.osc_config.reply_listen_port.isdigit():
return 'error','OSC Config - Reply Listen port is not a positve number: '+ self.osc_config.reply_listen_port
self.reply_listen_port= self.osc_config.reply_listen_port
# prepare the list of slaves
status,message=self.parse_slaves()
if status=='error':
return status,message
self.prefix='/pipresents'
self.this_unit='/' + self.this_unit_name
self.input_server=None
self.input_reply_client=None
self.input_st=None
self.output_client=None
self.output_reply_server=None
self.output_reply_st=None
if self.osc_config.slave_enabled == 'yes' and self.osc_config.master_enabled == 'yes' and self.listen_port == self.reply_listen_port:
# The two listen ports are the same so use one server for input and output
#start the client that sends commands to the slaves
self.output_client=OSC.OSCClient()
self.mon.log(self, 'sending commands to slaves and replies to master on: '+self.reply_listen_port)
#start the input+output reply server
self.mon.log(self, 'listen to commands and replies from slave units using: ' + self.this_unit_ip+':'+self.reply_listen_port)
self.output_reply_server=myOSCServer((self.this_unit_ip,int(self.reply_listen_port)),self.output_client)
self.add_default_handler(self.output_reply_server)
self.add_input_handlers(self.output_reply_server)
self.add_output_reply_handlers(self.output_reply_server)
self.input_server=self.output_reply_server
else:
if self.osc_config.slave_enabled == 'yes':
# we want this to be a slave to something else
# start the client that sends replies to controlling unit
self.input_reply_client=OSC.OSCClient()
#start the input server
self.mon.log(self, 'listening to commands on: ' + self.this_unit_ip+':'+self.listen_port)
self.input_server=myOSCServer((self.this_unit_ip,int(self.listen_port)),self.input_reply_client)
self.add_default_handler(self.input_server)
self.add_input_handlers(self.input_server)
# print(self.pretty_list(self.input_server.getOSCAddressSpace(),'\n'))
if self.osc_config.master_enabled =='yes':
#we want to control other units
#start the client that sends commands to the slaves
self.output_client=OSC.OSCClient()
self.mon.log(self, 'sending commands to slaves on port: '+self.reply_listen_port)
#start the output reply server
self.mon.log(self, 'listen to replies from slave units using: ' + self.this_unit_ip+':'+self.reply_listen_port)
self.output_reply_server=myOSCServer((self.this_unit_ip,int(self.reply_listen_port)),self.output_client)
self.add_default_handler(self.output_reply_server)
self.add_output_reply_handlers(self.output_reply_server)
return 'normal','osc.cfg read'
def terminate(self):
if self.input_server != None:
self.input_server.close()
if self.output_reply_server != None:
self.output_reply_server.close()
self.mon.log(self, 'Waiting for Server threads to finish')
if self.input_st != None:
self.input_st.join() ##!!!
if self.output_reply_st != None:
self.output_reply_st.join() ##!!!
self.mon.log(self,'server threads closed')
if self.input_reply_client !=None:
self.input_reply_client.close()
if self.output_client !=None:
self.output_client.close()
def start_server(self):
# Start input Server
self.mon.log(self,'Starting input OSCServer')
if self.input_server != None:
self.input_st = threading.Thread( target = self.input_server.serve_forever )
self.input_st.start()
# Start output_reply server
self.mon.log(self,'Starting output reply OSCServer')
if self.output_reply_server != None:
self.output_reply_st = threading.Thread( target = self.output_reply_server.serve_forever )
self.output_reply_st.start()
def parse_slaves(self):
name_list=self.osc_config.slave_units_name.split()
ip_list=self.osc_config.slave_units_ip.split()
if len(name_list)==0:
return 'error','OSC Config - List of slaves name is empty'
if len(name_list) != len(ip_list):
return 'error','OSC Config - Lengths of list of slaves name and slaves IP is different'
self.slave_name_list=[]
self.slave_ip_list=[]
for i, name in enumerate(name_list):
self.slave_name_list.append(name)
self.slave_ip_list.append(ip_list[i])
return 'normal','slaves parsed'
def parse_osc_command(self,fields):
# send message to slave unit - INTERFACE WITH pipresents
if len(fields) <2:
return 'error','too few fields in OSC command '+' '.join(fields)
to_unit_name=fields[0]
show_command=fields[1]
# print 'FIELDS ',fields
# send an arbitary osc message
if show_command == 'send':
if len(fields)>2:
osc_address= fields[2]
arg_list=[]
if len(fields)>3:
arg_list=fields[3:]
else:
return 'error','OSC - wrong nmber of fields in '+ ' '.join(fields)
elif show_command in ('open','close','openexclusive'):
if len(fields)==3:
osc_address=self.prefix+'/'+ to_unit_name + '/core/'+ show_command
arg_list= [fields[2]]
else:
return 'error','OSC - wrong number of fields in '+ ' '.join(fields)
elif show_command =='monitor':
if fields[2] in ('on','off'):
osc_address=self.prefix+'/'+ to_unit_name + '/core/'+ show_command
arg_list=[fields[2]]
else:
self.mon.err(self,'OSC - illegal state in '+ show_command + ' '+fields[2])
elif show_command =='event':
if len(fields)==3:
osc_address=self.prefix+'/'+ to_unit_name + '/core/'+ show_command
arg_list= [fields[2]]
elif show_command == 'animate':
if len(fields)>2:
osc_address=self.prefix+'/'+ to_unit_name + '/core/'+ show_command
arg_list= fields[2:]
else:
return 'error','OSC - wrong nmber of fields in '+ ' '.join(fields)
elif show_command in ('closeall','exitpipresents','shutdownnow','reboot'):
if len(fields)==2:
osc_address=self.prefix+'/'+ to_unit_name + '/core/'+ show_command
arg_list= []
else:
return 'error','OSC - wrong nmber of fields in '+ ' '.join(fields)
elif show_command in ('loopback','server-info'):
if len(fields)==2:
osc_address=self.prefix+'/'+ to_unit_name + '/system/'+ show_command
arg_list= []
else:
return 'error','OSC - wrong nmber of fields in '+ ' '.join(fields)
else:
return 'error','OSC - unkown command in '+ ' '.join(fields)
ip=self.find_ip(to_unit_name,self.slave_name_list,self.slave_ip_list)
if ip=='':
return 'warn','OSC Unit Name not in the list of slaves: '+ to_unit_name
self.sendto(ip,osc_address,arg_list)
return 'normal','osc command sent'
def find_ip(self,name,name_list,ip_list):
i=0
for j in name_list:
if j == name:
break
i=i+1
if i==len(name_list):
return ''
else:
return ip_list[i]
def sendto(self,ip,osc_address,arg_list):
# print ip,osc_address,arg_list
if self.output_client is None:
self.mon.warn(self,'Master not enabled, ignoring OSC command')
return
msg = OSC.OSCMessage()
# print address
msg.setAddress(osc_address)
for arg in arg_list:
# print arg
msg.append(arg)
try:
self.output_client.sendto(msg,(ip,int(self.reply_listen_port)))
self.mon.log(self,'Sent OSC command: '+osc_address+' '+' '.join(arg_list) + ' to '+ ip +':'+self.reply_listen_port)
except Exception as e:
self.mon.warn(self,'error in client when sending OSC command: '+ str(e))
# **************************************
# Handlers for fallback
# **************************************
def add_default_handler(self,server):
server.addMsgHandler('default', self.no_match_handler)
def no_match_handler(self,addr, tags, stuff, source):
text= "No handler for message from %s" % OSC.getUrlStr(source)+'\n'
text+= " %s" % addr+ self.pretty_list(stuff,'')
self.mon.warn(self,text)
return None
# **************************************
# Handlers for Slave (input)
# **************************************
def add_input_handlers(self,server):
server.addMsgHandler(self.prefix + self.this_unit+"/system/server-info", self.server_info_handler)
server.addMsgHandler(self.prefix + self.this_unit+"/system/loopback", self.loopback_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/open', self.open_show_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/close', self.close_show_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/openexclusive', self.openexclusive_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/closeall', self.closeall_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/exitpipresents', self.exitpipresents_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/shutdownnow', self.shutdownnow_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/reboot', self.reboot_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/event', self.input_event_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/animate', self.animate_handler)
server.addMsgHandler(self.prefix+ self.this_unit+'/core/monitor', self.monitor_handler)
# reply to master unit with name of this unit and commands
def server_info_handler(self,addr, tags, stuff, source):
msg = OSC.OSCMessage(self.prefix+'/system/server-info-reply')
msg.append(self.this_unit_name)
msg.append(self.input_server.getOSCAddressSpace())
self.mon.log(self,'Sent Server Info reply to %s:' % OSC.getUrlStr(source))
return msg
# reply to master unit with a loopback message
def loopback_handler(self,addr, tags, stuff, source):
msg = OSC.OSCMessage(self.prefix+'/system/loopback-reply')
self.mon.log(self,'Sent loopback reply to %s:' % OSC.getUrlStr(source))
return msg
def open_show_handler(self,address, tags, args, source):
self.prepare_show_command_callback('open ',args,1)
def openexclusive_handler(self,address, tags, args, source):
self.prepare_show_command_callback('openexclusive ',args,1)
def close_show_handler(self,address, tags, args, source):
self.prepare_show_command_callback('close ', args,1)
def closeall_handler(self,address, tags, args, source):
self.prepare_show_command_callback('closeall',args,0)
def monitor_handler(self,address, tags, args, source):
self.prepare_show_command_callback('monitor ', args,1)
def exitpipresents_handler(self,address, tags, args, source):
self.prepare_show_command_callback('exitpipresents',args,0)
def reboot_handler(self,address, tags, args, source):
self.prepare_show_command_callback('reboot',args,0)
def shutdownnow_handler(self,address, tags, args, source):
self.prepare_show_command_callback('shutdownnow',args,0)
def prepare_show_command_callback(self,command,args,limit):
if len(args) == limit:
if limit !=0:
self.mon.sched(self,TimeOfDay.now,'Received from OSC: '+ command + ' ' +args[0])
self.show_command_callback(command+args[0])
else:
self.mon.sched(self,TimeOfDay.now,'Received from OSC: '+ command)
self.show_command_callback(command)
else:
self.mon.warn(self,'OSC show command does not have '+limit +' argument - ignoring')
def input_event_handler(self,address, tags, args, source):
if len(args) == 1:
self.input_event_callback(args[0],'OSC')
else:
self.mon.warn(self,'OSC input event does not have 1 argument - ignoring')
def animate_handler(self,address, tags, args, source):
if len(args) !=0:
# delay symbol,param_type,param_values,req_time as a string
text='0 '
for arg in args:
text= text+ arg + ' '
text = text + '0'
# print(text)
self.animate_callback(text)
else:
self.mon.warn(self,'OSC output event has no arguments - ignoring')
# **************************************
# Handlers for Master- replies from slaves (output)
# **************************************
# reply handlers do not have the destinatuion unit in the address as they are always sent to the originator
def add_output_reply_handlers(self,server):
server.addMsgHandler(self.prefix+"/system/server-info-reply", self.server_info_reply_handler)
server.addMsgHandler(self.prefix+"/system/loopback-reply", self.loopback_reply_handler)
# print result of info request from slave unit
def server_info_reply_handler(self,addr, tags, stuff, source):
self.mon.log(self,'server info reply from slave '+OSC.getUrlStr(source)+ self.pretty_list(stuff,'\n'))
print('Received reply to Server-Info command from slave: ',OSC.getUrlStr(source), self.pretty_list(stuff,'\n'))
return None
#print result of info request from slave unit
def loopback_reply_handler(self,addr, tags, stuff, source):
self.mon.log(self,'server info reply from slave '+OSC.getUrlStr(source)+ self.pretty_list(stuff,'\n'))
print('Received reply to Loopback command from slave: ' + OSC.getUrlStr(source)+ ' '+ self.pretty_list(stuff,'\n'))
return None
def pretty_list(self,fields, separator):
text=' '
for field in fields:
text += str(field) + separator
return text+'\n'
if __name__ == '__main__':
def pretty_list(fields):
text=' '
for field in fields:
text += str(field) + ' '
return text
def show_command_callback(text):
pass
print('show control command: '+text)
def input_event_callback(text):
pass
print('input event: '+ text)
def output_event_callback(args):
pass
print('animate: ' + pretty_list(args))
od = OSCDriver('/home/pi/pipresents',show_command_callback,input_event_callback,output_event_callback)
| [
"[email protected]"
] | |
7a1a17f7d564853eb7d0e2b10e90489f14279b8a | 0c4b33d04cf7fb73b3752b03af89eeaf76b8a0d2 | /第14章-网络编程/1.py | 699170cf14d9fa720ccd1731412ecb2f52e1ece3 | [] | no_license | kingflyfly/python_study | 3b3ab427d23174b61b8f14c223059cfa9f303219 | 8a63a7c11b408bbc11a2b636517beaa424b37725 | refs/heads/master | 2020-06-11T01:39:52.655730 | 2020-03-24T16:09:39 | 2020-03-24T16:09:39 | 193,817,757 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | a,b = {"yanzhe1":"yanzhe","shanshan":"shanshan1"}.items()
print(type(a))
if a == "yanzhe1":
print("ok")
else:print("fial")
print(a)
print(b) | [
"[email protected]"
] | |
ecb1c8c9b5ecc3253e822b5e31639049244ed39d | 0ad7476f82d662249d13527219c45916cc6fb063 | /bayesian_AB/client_server_practice/server_starter.py | 3687225b896a5015fb7e697403d26b5ee12a7524 | [] | no_license | duilee/ab_testing | 87d40edb0836cd78bf0e75f41947510c3f598316 | d4bb900da4cf84cd28e5fcb8d37a2b6481eb94d8 | refs/heads/master | 2023-07-19T04:46:32.052780 | 2023-07-06T11:30:12 | 2023-07-06T11:30:12 | 331,349,413 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,830 | py | # From the course: Bayesin Machine Learning in Python: A/B Testing
# https://deeplearningcourses.com/c/bayesian-machine-learning-in-python-ab-testing
# https://www.udemy.com/bayesian-machine-learning-in-python-ab-testing
from __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future
import numpy as np
from flask import Flask, jsonify, request
from scipy.stats import beta
# create an app
app = Flask(__name__)
# define bandits
# there's no "pull arm" here
# since that's technically now the user/client
class Bandit:
def __init__(self, name):
self.name = name
self.clks = 0
self.views = 0
def sample(self):
a = 1 + self.clks
b = 1 + self.views - self.clks
return np.random.beta(a, b)
def add_clicks(self):
self.clks += 1
def add_view(self):
self.views += 1
if self.views % 50 == 0:
print("%s: clks=%s, views=%s" % (self.name, self.clks, self.views))
# TODO - what else does the Bandit need to do?
# initialize bandits
banditA = Bandit('A')
banditB = Bandit('B')
@app.route('/get_ad')
def get_ad():
sampleA = banditA.sample()
sampleB = banditB.sample()
if sampleA > sampleB:
ad = 'A'
banditA.add_view()
return jsonify({'advertisement_id': ad})
else:
ad = 'B'
banditB.add_view()
return jsonify({'advertisement_id': ad})
@app.route('/click_ad', methods=['POST'])
def click_ad():
result = 'OK'
if request.form['advertisement_id'] == 'A':
# TODO
banditA.add_clicks()
elif request.form['advertisement_id'] == 'B':
# TODO
banditB.add_clicks()
else:
result = 'Invalid Input.'
# nothing to return really
return jsonify({'result': result})
if __name__ == '__main__':
app.run(host='127.0.0.1', port='8888') | [
"[email protected]"
] | |
ea3803063ed3dd780f0c88023d17df56d5909cf1 | 56be7f6b6a1243c532af9ea98310ccea165a1e66 | /day27/s21crm - 2 - 部门列表/s21crm/urls.py | c0b31e33ed739fdfa064389c6eec0fd0b734ef8e | [] | no_license | 214031230/Python21 | 55b0405ec4ad186b052cde7ebfb3f4bb636a3f30 | d7fc68d3d23345df5bfb09d4a84686c8b49a5ad7 | refs/heads/master | 2021-05-26T06:00:53.393577 | 2019-01-09T02:29:04 | 2019-01-09T02:29:04 | 127,778,172 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 924 | py | """s21crm URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from crm.views import depart
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^depart/list/', depart.depart_list, name='depart_list'),
url(r'^depart/add/', depart.depart_add, name='depart_add'),
]
| [
"[email protected]"
] | |
ef9d0219b3defe3ab0832191018e283c65de037e | c8baca8894b080e612ba2babf766698c737faef2 | /config.py | 54a15867adb3fa3cd41fadc5a6754fcb3d333e0c | [] | no_license | xiaobin1234/AutoTPshop | 8796a690c4d1f3ada3ca23fba65ad3995e54ad6a | 70b2caaa49b3ad4725df2295b75bbc846d67e64d | refs/heads/master | 2021-02-18T01:45:24.479029 | 2020-03-05T12:17:16 | 2020-03-05T12:17:16 | 245,145,850 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 66 | py | import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
| [
"[email protected]"
] | |
83bd32474d5fdd7943df61a2f550898f0e745c32 | 5b9035dbfe0750e9933728f9631ad7a183dd3429 | /17/00/weakref.ref.callback.py | 1e2d84ae969b05e9575767bcd18a17d0192a8745 | [
"CC0-1.0"
] | permissive | pylangstudy/201709 | 271efbd4f337d912d0ca958a621eb2a040091528 | 53d868786d7327a83bfa7f4149549c6f9855a6c6 | refs/heads/master | 2021-01-21T12:16:21.950493 | 2017-09-30T00:02:34 | 2017-09-30T00:02:34 | 102,058,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 609 | py | import weakref
class ExpensiveObject(object):
def __del__(self): print('Deleting ... %s' % self)
def callback(reference): print('callback(', reference, ')')
obj = ExpensiveObject()
r = weakref.ref(obj, callback)
print('obj:', obj) # <__main__.ExpensiveObject object at 0xb710adac>
print('ref:', r) # <weakref at 0xb70cb414; to 'ExpensiveObject' at 0xb710adac>
print('r():', r()) # <__main__.ExpensiveObject object at 0xb710adac>
del obj
print('Deleted obj !')
print('r():', r()) #None
print('ref:', r) #<weakref at 0xb70cb414; dead>
print('obj:', obj) #NameError: name 'obj' is not defined
| [
"[email protected]"
] | |
447309d3a7c5cd11e434e9eebbca587f1745d7b5 | 04f5b7913f5802813ed5b9b894d9723a96893d29 | /xonsh2/prompt/__init__.py | a6524670600a5456eef8be8b014b0213eefb254b | [
"BSD-2-Clause"
] | permissive | anki-code/xonsh2 | b0e52f01119622b383b37a27658c3615507ef6e7 | bd96fcdce9319ab6b90c7d9ac47d2249b61144d0 | refs/heads/master | 2023-01-04T09:19:43.857637 | 2020-11-01T17:10:44 | 2020-11-01T17:10:44 | 309,102,477 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | # amalgamate exclude
import os as _os
if _os.getenv("XONSH_DEBUG", ""):
pass
else:
import sys as _sys
try:
from xonsh2.prompt import __amalgam__
cwd = __amalgam__
_sys.modules["xonsh2.prompt.cwd"] = __amalgam__
env = __amalgam__
_sys.modules["xonsh2.prompt.env"] = __amalgam__
gitstatus = __amalgam__
_sys.modules["xonsh2.prompt.gitstatus"] = __amalgam__
job = __amalgam__
_sys.modules["xonsh2.prompt.job"] = __amalgam__
times = __amalgam__
_sys.modules["xonsh2.prompt.times"] = __amalgam__
vc = __amalgam__
_sys.modules["xonsh2.prompt.vc"] = __amalgam__
base = __amalgam__
_sys.modules["xonsh2.prompt.base"] = __amalgam__
del __amalgam__
except ImportError:
pass
del _sys
del _os
# amalgamate end
| [
"a"
] | a |
938319c0ec9bf23932b135e6b736177504f56448 | 444a9480bce2035565332d4d4654244c0b5cd47b | /research/cv/LearningToSeeInTheDark/src/unet_parts.py | 98222051e28a37247b798b96c9df9180649f7d6c | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] | permissive | mindspore-ai/models | 7ede9c6454e77e995e674628204e1c6e76bd7b27 | eab643f51336dbf7d711f02d27e6516e5affee59 | refs/heads/master | 2023-07-20T01:49:34.614616 | 2023-07-17T11:43:18 | 2023-07-17T11:43:18 | 417,393,380 | 301 | 92 | Apache-2.0 | 2023-05-17T11:22:28 | 2021-10-15T06:38:37 | Python | UTF-8 | Python | false | false | 3,576 | py | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Unet Components"""
import mindspore.nn as nn
import mindspore.ops.operations as F
from mindspore.ops import Maximum
from mindspore.ops import DepthToSpace as dts
from mindspore.common.initializer import TruncatedNormal
from mindspore.common.initializer import XavierUniform
import mindspore as ms
ms.set_seed(1212)
class LRelu(nn.Cell):
""" activation function """
def __init__(self):
super(LRelu, self).__init__()
self.max = Maximum()
def construct(self, x):
""" construct of lrelu activation """
return self.max(x * 0.2, x)
class DoubleConv(nn.Cell):
"""conv2d for two times with lrelu activation"""
def __init__(self, in_channels, out_channels, mid_channels=None):
super(DoubleConv, self).__init__()
if not mid_channels:
mid_channels = out_channels
self.kernel_init = XavierUniform()
self.double_conv = nn.SequentialCell(
[nn.Conv2d(in_channels, mid_channels, kernel_size=3, stride=1, pad_mode="same",
weight_init=self.kernel_init), LRelu(),
nn.Conv2d(mid_channels, out_channels, kernel_size=3, stride=1, pad_mode="same",
weight_init=self.kernel_init), LRelu()])
def construct(self, x):
""" construct of double conv2d """
return self.double_conv(x)
class Down(nn.Cell):
"""Downscaling with maxpool then double conv"""
def __init__(self, in_channels, out_channels):
super(Down, self).__init__()
self.maxpool_conv = nn.SequentialCell(
[nn.MaxPool2d(kernel_size=2, stride=2, pad_mode="same"),
DoubleConv(in_channels, out_channels)]
)
def construct(self, x):
""" construct of down cell """
return self.maxpool_conv(x)
class Up(nn.Cell):
"""Upscaling then double conv"""
def __init__(self, in_channels, out_channels):
super(Up, self).__init__()
self.concat = F.Concat(axis=1)
self.kernel_init = TruncatedNormal(0.02)
self.conv = DoubleConv(in_channels, out_channels)
self.up = nn.Conv2dTranspose(in_channels, in_channels // 2, kernel_size=2, stride=2,
pad_mode='same', weight_init=self.kernel_init)
def construct(self, x1, x2):
""" construct of up cell """
x1 = self.up(x1)
x = self.concat((x1, x2))
return self.conv(x)
class OutConv(nn.Cell):
"""trans data into RGB channels"""
def __init__(self, in_channels, out_channels):
super(OutConv, self).__init__()
self.kernel_init = XavierUniform()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1, pad_mode='same', weight_init=self.kernel_init)
self.DtS = dts(block_size=2)
def construct(self, x):
""" construct of last conv """
x = self.conv(x)
x = self.DtS(x)
return x
| [
"[email protected]"
] | |
4d5abe700c5d48838af29b5be52a1e7aa59eee03 | ba5c4c07d11e6f529ba7184ef3fe1ab0d61a19e6 | /examples/btn_start.py | 6b1c2aec64fb2f2ea08c9cf654e1fd702b12c056 | [] | no_license | slightlynybbled/tkmats | 920033adef46f41b67e99f3e7ba1b13a1c7ff4c5 | 73e03e519287b09436f547a532fbd33c1ce05cca | refs/heads/master | 2020-04-30T17:58:19.036996 | 2020-02-19T03:44:33 | 2020-02-19T03:44:33 | 176,996,416 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,287 | py | import logging
from random import random, choice
from time import sleep
import tkinter as tk
from mats import Test
from mats import TestSequence
from tkmats import TkMatsFrame
# The CommunicationTest class shows the minimum test structure that might
# be reasonably be implemented. Only the `execute()` method is implemented.
class CommunicationTest(Test):
def __init__(self, loglevel=logging.INFO):
super().__init__(moniker='communications test',
pass_if=True,
loglevel=loglevel)
# overriding the execute method
def execute(self, is_passing):
# a normal test would set `test_is_passing` based on real conditions, we
# are implementing a random value here simply for illustrative purposes
passing = choice([True] * 3 + [False])
# should return a (key, value) which are the results of the test
return passing
# The PumpFlowTest implements the `setup' and `teardown` methods as well
# in order to demonstrate what that may look like
class PumpFlowTest(Test):
def __init__(self, loglevel=logging.INFO):
super().__init__(moniker='pump flow test',
min_value=5.6, max_value=6.4,
loglevel=loglevel)
def setup(self, is_passing):
# setting the speed of the pump might be something done in the setup,
# including the wait time to speed up the pump, which we will
# simulate with a 2s sleep
sleep(2.0)
def execute(self, is_passing):
# simulate long-running process, such as
# several flow measurement/averaging cycles
sleep(0.1)
flow = 5.5 + random()
# should return a (key, value) tuple which are the results of the test
return flow
def teardown(self, is_passing):
# again, simulating another long-running process...
sleep(0.1)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
# create the sequence of test objects
sequence = [CommunicationTest(), PumpFlowTest()]
ts = TestSequence(sequence=sequence, auto_run=False, loglevel=logging.DEBUG)
window = tk.Tk()
tkate_frame = TkMatsFrame(window, ts, vertical=True)
tkate_frame.grid()
window.mainloop()
| [
"[email protected]"
] | |
ab061fb9bc9e9a17133655c37bf8f7f9b529bc18 | 076dd40fcb9283a8e3d66dd3fa3745826b887378 | /kashgari/embeddings/__init__.py | 066291c4f5f8347689c72990be315e70c02a4db2 | [
"MIT"
] | permissive | gongqingyi-github/Kashgari | 54bb53bb618b9791433a61a7fd5e73f4951873f1 | efc9510ed53f5bb78183e66d96d57a55cc290a91 | refs/heads/master | 2020-04-18T16:51:22.609685 | 2019-01-26T02:26:54 | 2019-01-26T02:26:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | py | # encoding: utf-8
"""
@author: BrikerMan
@contact: [email protected]
@blog: https://eliyar.biz
@version: 1.0
@license: Apache Licence
@file: __init__.py.py
@time: 2019-01-19 09:57
"""
from .embeddings import BERTEmbedding
from .embeddings import BaseEmbedding
from .embeddings import CustomEmbedding
from .embeddings import WordEmbeddings
from .embeddings import get_embedding_by_conf
if __name__ == "__main__":
print("Hello world")
| [
"[email protected]"
] | |
98b29e78f354f896f27d9785a107e5bae46cb53a | 8f70ad12af7eba07efa52eb29b8f99ed3900dbb9 | /AGTGA data/AGTGA/posifon/posifon 2/TestSuite/TestSuite/TestCase01.py | 038064c093137ade8a3097f9ca295bbb0ed13f50 | [] | no_license | Georgesarkis/AGTGARowData | 768952dc03dc342bcbe0902bf2fb1720853d0e14 | e1faa7dc820b051a73b0844eac545e597a97da16 | refs/heads/master | 2022-10-01T17:06:04.758751 | 2020-06-05T07:25:41 | 2020-06-05T07:25:41 | 267,772,437 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,717 | py | import time
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from TestSuite.TestSuiteHelper import ElementFinder
port = 'http://localhost:4723/wd/hub'
driver = webdriver.Remote(command_executor=port, desired_capabilities={'automationName' : 'UiAutomator2','deviceName': 'Moto G (5)','platformName': 'Android', 'app': 'C:/Users/ze0396/Desktop/AGTGA/APKS/posifon.apk' , 'autoGrantPermissions' : 'true', 'appWaitActivity' : '*.*','fullreset' : 'false','noReset' : 'true' } )
time.sleep(2)
time.sleep(2)
el = ElementFinder(driver, 312,633)
el.click()
el.send_keys('[email protected]')
time.sleep(2)
el = ElementFinder(driver, 312,810)
el.click()
el.send_keys('Sommar2018')
driver.back()
time.sleep(2)
el = ElementFinder(driver, 216,1479)
el.click()
time.sleep(2)
el = ElementFinder(driver, 777,1017)
el.click()
time.sleep(2)
el = ElementFinder(driver, 177,1339)
el.click()
time.sleep(2)
el = ElementFinder(driver, 731,283)
el.click()
time.sleep(2)
el = ElementFinder(driver, 405,920)
el.click()
time.sleep(2)
el = ElementFinder(driver, 554,115)
el.click()
time.sleep(2)
el = ElementFinder(driver, 39,1441)
el.click()
time.sleep(2)
el = ElementFinder(driver, 39,1581)
el.click()
time.sleep(2)
el = ElementFinder(driver, 0,72)
el.click()
time.sleep(2)
el = ElementFinder(driver, 48,660)
el.click()
time.sleep(2)
el = ElementFinder(driver, 0,72)
el.click()
time.sleep(2)
el = ElementFinder(driver, 0,72)
el.click()
time.sleep(2)
el = ElementFinder(driver, 969,312)
el.click()
time.sleep(2)
el = ElementFinder(driver, 969,940)
el.click()
driver.press_keycode(3)
driver.close_app()
driver.quit()
print('TestCase finished successfully') | [
"[email protected]"
] | |
d7a1221002f13976a612e4408bfb46073ae336ea | 6f170878756cc2ee76ae70ab220201aa5c742f34 | /tf_api/core/box_predictor.py | af0bf1ff054a19b03e0bdf918ec3d44e512dd8b5 | [
"CC-BY-4.0"
] | permissive | abhineet123/animal_detection | 293e4491d574e88aab73a2bd7bea0a05db7f3662 | be0dd60d2b56b267f329b7be71d7f037499f98bc | refs/heads/master | 2023-07-22T18:13:00.506591 | 2021-10-28T01:04:35 | 2021-10-28T01:04:35 | 217,391,182 | 31 | 8 | null | 2023-05-22T22:26:54 | 2019-10-24T20:37:32 | Python | UTF-8 | Python | false | false | 25,358 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Box predictor for object detectors.
Box predictors are classes that take a high level
image feature map as input and produce two predictions,
(1) a tensor encoding box locations, and
(2) a tensor encoding classes for each box.
These components are passed directly to loss functions
in our detection models.
These modules are separated from the main model since the same
few box predictor architectures are shared across many models.
"""
from abc import abstractmethod
import tensorflow as tf
from tf_api.utils import ops
from tf_api.utils import shape_utils
from tf_api.utils import static_shape
slim = tf.contrib.slim
BOX_ENCODINGS = 'box_encodings'
CLASS_PREDICTIONS_WITH_BACKGROUND = 'class_predictions_with_background'
MASK_PREDICTIONS = 'mask_predictions'
class BoxPredictor(object):
"""BoxPredictor."""
def __init__(self, is_training, num_classes):
"""Constructor.
Args:
is_training: Indicates whether the BoxPredictor is in training mode.
num_classes: number of classes. Note that num_classes *does not*
include the background category, so if groundtruth labels take values
in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the
assigned classification targets can range from {0,... K}).
"""
self._is_training = is_training
self._num_classes = num_classes
@property
def num_classes(self):
return self._num_classes
def predict(self, image_features, num_predictions_per_location, scope,
**params):
"""Computes encoded object locations and corresponding confidences.
Takes a high level image feature map as input and produce two predictions,
(1) a tensor encoding box locations, and
(2) a tensor encoding class scores for each corresponding box.
In this interface, we only assume that two tensors are returned as output
and do not assume anything about their shapes.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
scope: Variable and Op scope name.
**params: Additional keyword arguments for specific implementations of
BoxPredictor.
Returns:
A dictionary containing at least the following tensors.
box_encodings: A float tensor of shape
[batch_size, num_anchors, q, code_size] representing the location of
the objects, where q is 1 or the number of classes.
class_predictions_with_background: A float tensor of shape
[batch_size, num_anchors, num_classes + 1] representing the class
predictions for the proposals.
"""
with tf.variable_scope(scope):
return self._predict(image_features, num_predictions_per_location,
**params)
# TODO: num_predictions_per_location could be moved to constructor.
# This is currently only used by ConvolutionalBoxPredictor.
@abstractmethod
def _predict(self, image_features, num_predictions_per_location, **params):
"""Implementations must override this method.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
**params: Additional keyword arguments for specific implementations of
BoxPredictor.
Returns:
A dictionary containing at least the following tensors.
box_encodings: A float tensor of shape
[batch_size, num_anchors, q, code_size] representing the location of
the objects, where q is 1 or the number of classes.
class_predictions_with_background: A float tensor of shape
[batch_size, num_anchors, num_classes + 1] representing the class
predictions for the proposals.
"""
pass
class RfcnBoxPredictor(BoxPredictor):
"""RFCN Box Predictor.
Applies a position sensitve ROI pooling on position sensitive feature maps to
predict classes and refined locations. See https://arxiv.org/abs/1605.06409
for details.
This is used for the second stage of the RFCN meta architecture. Notice that
locations are *not* shared across classes, thus for each anchor, a separate
prediction is made for each class.
"""
def __init__(self,
is_training,
num_classes,
conv_hyperparams,
num_spatial_bins,
depth,
crop_size,
box_code_size):
"""Constructor.
Args:
is_training: Indicates whether the BoxPredictor is in training mode.
num_classes: number of classes. Note that num_classes *does not*
include the background category, so if groundtruth labels take values
in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the
assigned classification targets can range from {0,... K}).
conv_hyperparams: Slim arg_scope with hyperparameters for conolutional
layers.
num_spatial_bins: A list of two integers `[spatial_bins_y,
spatial_bins_x]`.
depth: Target depth to reduce the input feature maps to.
crop_size: A list of two integers `[crop_height, crop_width]`.
box_code_size: Size of encoding for each box.
"""
super(RfcnBoxPredictor, self).__init__(is_training, num_classes)
self._conv_hyperparams = conv_hyperparams
self._num_spatial_bins = num_spatial_bins
self._depth = depth
self._crop_size = crop_size
self._box_code_size = box_code_size
@property
def num_classes(self):
return self._num_classes
def _predict(self, image_features, num_predictions_per_location,
proposal_boxes):
"""Computes encoded object locations and corresponding confidences.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
Currently, this must be set to 1, or an error will be raised.
proposal_boxes: A float tensor of shape [batch_size, num_proposals,
box_code_size].
Returns:
box_encodings: A float tensor of shape
[batch_size, 1, num_classes, code_size] representing the
location of the objects.
class_predictions_with_background: A float tensor of shape
[batch_size, 1, num_classes + 1] representing the class
predictions for the proposals.
Raises:
ValueError: if num_predictions_per_location is not 1.
"""
if num_predictions_per_location != 1:
raise ValueError('Currently RfcnBoxPredictor only supports '
'predicting a single box per class per location.')
batch_size = tf.shape(proposal_boxes)[0]
num_boxes = tf.shape(proposal_boxes)[1]
def get_box_indices(proposals):
proposals_shape = proposals.get_shape().as_list()
if any(dim is None for dim in proposals_shape):
proposals_shape = tf.shape(proposals)
ones_mat = tf.ones(proposals_shape[:2], dtype=tf.int32)
multiplier = tf.expand_dims(
tf.range(start=0, limit=proposals_shape[0]), 1)
return tf.reshape(ones_mat * multiplier, [-1])
net = image_features
with slim.arg_scope(self._conv_hyperparams):
net = slim.conv2d(net, self._depth, [1, 1], scope='reduce_depth')
# Location predictions.
location_feature_map_depth = (self._num_spatial_bins[0] *
self._num_spatial_bins[1] *
self.num_classes *
self._box_code_size)
location_feature_map = slim.conv2d(net, location_feature_map_depth,
[1, 1], activation_fn=None,
scope='refined_locations')
box_encodings = ops.position_sensitive_crop_regions(
location_feature_map,
boxes=tf.reshape(proposal_boxes, [-1, self._box_code_size]),
box_ind=get_box_indices(proposal_boxes),
crop_size=self._crop_size,
num_spatial_bins=self._num_spatial_bins,
global_pool=True)
box_encodings = tf.squeeze(box_encodings, squeeze_dims=[1, 2])
box_encodings = tf.reshape(box_encodings,
[batch_size * num_boxes, 1, self.num_classes,
self._box_code_size])
# Class predictions.
total_classes = self.num_classes + 1 # Account for background class.
class_feature_map_depth = (self._num_spatial_bins[0] *
self._num_spatial_bins[1] *
total_classes)
class_feature_map = slim.conv2d(net, class_feature_map_depth, [1, 1],
activation_fn=None,
scope='class_predictions')
class_predictions_with_background = ops.position_sensitive_crop_regions(
class_feature_map,
boxes=tf.reshape(proposal_boxes, [-1, self._box_code_size]),
box_ind=get_box_indices(proposal_boxes),
crop_size=self._crop_size,
num_spatial_bins=self._num_spatial_bins,
global_pool=True)
class_predictions_with_background = tf.squeeze(
class_predictions_with_background, squeeze_dims=[1, 2])
class_predictions_with_background = tf.reshape(
class_predictions_with_background,
[batch_size * num_boxes, 1, total_classes])
return {BOX_ENCODINGS: box_encodings,
CLASS_PREDICTIONS_WITH_BACKGROUND:
class_predictions_with_background}
class MaskRCNNBoxPredictor(BoxPredictor):
"""Mask R-CNN Box Predictor.
See Mask R-CNN: He, K., Gkioxari, G., Dollar, P., & Girshick, R. (2017).
Mask R-CNN. arXiv preprint arXiv:1703.06870.
This is used for the second stage of the Mask R-CNN detector where proposals
cropped from an image are arranged along the batch dimension of the input
image_features tensor. Notice that locations are *not* shared across classes,
thus for each anchor, a separate prediction is made for each class.
In addition to predicting boxes and classes, optionally this class allows
predicting masks and/or keypoints inside detection boxes.
Currently this box predictor makes per-class predictions; that is, each
anchor makes a separate box prediction for each class.
"""
def __init__(self,
is_training,
num_classes,
fc_hyperparams,
use_dropout,
dropout_keep_prob,
box_code_size,
conv_hyperparams=None,
predict_instance_masks=False,
mask_height=14,
mask_width=14,
mask_prediction_conv_depth=256,
predict_keypoints=False):
"""Constructor.
Args:
is_training: Indicates whether the BoxPredictor is in training mode.
num_classes: number of classes. Note that num_classes *does not*
include the background category, so if groundtruth labels take values
in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the
assigned classification targets can range from {0,... K}).
fc_hyperparams: Slim arg_scope with hyperparameters for fully
connected ops.
use_dropout: Option to use dropout or not. Note that a single dropout
op is applied here prior to both box and class predictions, which stands
in contrast to the ConvolutionalBoxPredictor below.
dropout_keep_prob: Keep probability for dropout.
This is only used if use_dropout is True.
box_code_size: Size of encoding for each box.
conv_hyperparams: Slim arg_scope with hyperparameters for convolution
ops.
predict_instance_masks: Whether to predict object masks inside detection
boxes.
mask_height: Desired output mask height. The default value is 14.
mask_width: Desired output mask width. The default value is 14.
mask_prediction_conv_depth: The depth for the first conv2d_transpose op
applied to the image_features in the mask prediciton branch.
predict_keypoints: Whether to predict keypoints insde detection boxes.
Raises:
ValueError: If predict_instance_masks or predict_keypoints is true.
"""
super(MaskRCNNBoxPredictor, self).__init__(is_training, num_classes)
self._fc_hyperparams = fc_hyperparams
self._use_dropout = use_dropout
self._box_code_size = box_code_size
self._dropout_keep_prob = dropout_keep_prob
self._conv_hyperparams = conv_hyperparams
self._predict_instance_masks = predict_instance_masks
self._mask_height = mask_height
self._mask_width = mask_width
self._mask_prediction_conv_depth = mask_prediction_conv_depth
self._predict_keypoints = predict_keypoints
if self._predict_keypoints:
raise ValueError('Keypoint prediction is unimplemented.')
if ((self._predict_instance_masks or self._predict_keypoints) and
self._conv_hyperparams is None):
raise ValueError('`conv_hyperparams` must be provided when predicting '
'masks.')
@property
def num_classes(self):
return self._num_classes
def _predict(self, image_features, num_predictions_per_location):
"""Computes encoded object locations and corresponding confidences.
Flattens image_features and applies fully connected ops (with no
non-linearity) to predict box encodings and class predictions. In this
setting, anchors are not spatially arranged in any way and are assumed to
have been folded into the batch dimension. Thus we output 1 for the
anchors dimension.
Also optionally predicts instance masks.
The mask prediction head is based on the Mask RCNN paper with the following
modifications: We replace the deconvolution layer with a bilinear resize
and a convolution.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
Currently, this must be set to 1, or an error will be raised.
Returns:
A dictionary containing the following tensors.
box_encodings: A float tensor of shape
[batch_size, 1, num_classes, code_size] representing the
location of the objects.
class_predictions_with_background: A float tensor of shape
[batch_size, 1, num_classes + 1] representing the class
predictions for the proposals.
If predict_masks is True the dictionary also contains:
instance_masks: A float tensor of shape
[batch_size, 1, num_classes, image_height, image_width]
If predict_keypoints is True the dictionary also contains:
keypoints: [batch_size, 1, num_keypoints, 2]
Raises:
ValueError: if num_predictions_per_location is not 1.
"""
if num_predictions_per_location != 1:
raise ValueError('Currently FullyConnectedBoxPredictor only supports '
'predicting a single box per class per location.')
spatial_averaged_image_features = tf.reduce_mean(image_features, [1, 2],
keep_dims=True,
name='AvgPool')
flattened_image_features = slim.flatten(spatial_averaged_image_features)
if self._use_dropout:
flattened_image_features = slim.dropout(flattened_image_features,
keep_prob=self._dropout_keep_prob,
is_training=self._is_training)
with slim.arg_scope(self._fc_hyperparams):
box_encodings = slim.fully_connected(
flattened_image_features,
self._num_classes * self._box_code_size,
activation_fn=None,
scope='BoxEncodingPredictor')
class_predictions_with_background = slim.fully_connected(
flattened_image_features,
self._num_classes + 1,
activation_fn=None,
scope='ClassPredictor')
box_encodings = tf.reshape(
box_encodings, [-1, 1, self._num_classes, self._box_code_size])
class_predictions_with_background = tf.reshape(
class_predictions_with_background, [-1, 1, self._num_classes + 1])
predictions_dict = {
BOX_ENCODINGS: box_encodings,
CLASS_PREDICTIONS_WITH_BACKGROUND: class_predictions_with_background
}
if self._predict_instance_masks:
with slim.arg_scope(self._conv_hyperparams):
upsampled_features = tf.image.resize_bilinear(
image_features,
[self._mask_height, self._mask_width],
align_corners=True)
upsampled_features = slim.conv2d(
upsampled_features,
num_outputs=self._mask_prediction_conv_depth,
kernel_size=[2, 2])
mask_predictions = slim.conv2d(upsampled_features,
num_outputs=self.num_classes,
activation_fn=None,
kernel_size=[3, 3])
instance_masks = tf.expand_dims(tf.transpose(mask_predictions,
perm=[0, 3, 1, 2]),
axis=1,
name='MaskPredictor')
predictions_dict[MASK_PREDICTIONS] = instance_masks
return predictions_dict
class ConvolutionalBoxPredictor(BoxPredictor):
"""Convolutional Box Predictor.
Optionally add an intermediate 1x1 convolutional layer after features and
predict in parallel branches box_encodings and
class_predictions_with_background.
Currently this box predictor assumes that predictions are "shared" across
classes --- that is each anchor makes box predictions which do not depend
on class.
"""
def __init__(self,
is_training,
num_classes,
conv_hyperparams,
min_depth,
max_depth,
num_layers_before_predictor,
use_dropout,
dropout_keep_prob,
kernel_size,
box_code_size,
apply_sigmoid_to_scores=False,
class_prediction_bias_init=0.0):
"""Constructor.
Args:
is_training: Indicates whether the BoxPredictor is in training mode.
num_classes: number of classes. Note that num_classes *does not*
include the background category, so if groundtruth labels take values
in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the
assigned classification targets can range from {0,... K}).
conv_hyperparams: Slim arg_scope with hyperparameters for convolution ops.
min_depth: Minumum feature depth prior to predicting box encodings
and class predictions.
max_depth: Maximum feature depth prior to predicting box encodings
and class predictions. If max_depth is set to 0, no additional
feature map will be inserted before location and class predictions.
num_layers_before_predictor: Number of the additional conv layers before
the predictor.
use_dropout: Option to use dropout for class prediction or not.
dropout_keep_prob: Keep probability for dropout.
This is only used if use_dropout is True.
kernel_size: Size of final convolution kernel. If the
spatial resolution of the feature map is smaller than the kernel size,
then the kernel size is automatically set to be
min(feature_width, feature_height).
box_code_size: Size of encoding for each box.
apply_sigmoid_to_scores: if True, apply the sigmoid on the output
class_predictions.
class_prediction_bias_init: constant value to initialize bias of the last
conv2d layer before class prediction.
Raises:
ValueError: if min_depth > max_depth.
"""
super(ConvolutionalBoxPredictor, self).__init__(is_training, num_classes)
if min_depth > max_depth:
raise ValueError('min_depth should be less than or equal to max_depth')
self._conv_hyperparams = conv_hyperparams
self._min_depth = min_depth
self._max_depth = max_depth
self._num_layers_before_predictor = num_layers_before_predictor
self._use_dropout = use_dropout
self._kernel_size = kernel_size
self._box_code_size = box_code_size
self._dropout_keep_prob = dropout_keep_prob
self._apply_sigmoid_to_scores = apply_sigmoid_to_scores
self._class_prediction_bias_init = class_prediction_bias_init
def _predict(self, image_features, num_predictions_per_location):
"""Computes encoded object locations and corresponding confidences.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
Returns:
A dictionary containing the following tensors.
box_encodings: A float tensor of shape [batch_size, num_anchors, 1,
code_size] representing the location of the objects, where
num_anchors = feat_height * feat_width * num_predictions_per_location
class_predictions_with_background: A float tensor of shape
[batch_size, num_anchors, num_classes + 1] representing the class
predictions for the proposals.
"""
# Add a slot for the background class.
num_class_slots = self.num_classes + 1
net = image_features
with slim.arg_scope(self._conv_hyperparams), \
slim.arg_scope([slim.dropout], is_training=self._is_training):
# Add additional conv layers before the class predictor.
features_depth = static_shape.get_depth(image_features.get_shape())
depth = max(min(features_depth, self._max_depth), self._min_depth)
tf.logging.info('depth of additional conv before box predictor: {}'.
format(depth))
if depth > 0 and self._num_layers_before_predictor > 0:
for i in range(self._num_layers_before_predictor):
net = slim.conv2d(
net, depth, [1, 1], scope='Conv2d_%d_1x1_%d' % (i, depth))
with slim.arg_scope([slim.conv2d], activation_fn=None,
normalizer_fn=None, normalizer_params=None):
box_encodings = slim.conv2d(
net, num_predictions_per_location * self._box_code_size,
[self._kernel_size, self._kernel_size],
scope='BoxEncodingPredictor')
if self._use_dropout:
net = slim.dropout(net, keep_prob=self._dropout_keep_prob)
class_predictions_with_background = slim.conv2d(
net, num_predictions_per_location * num_class_slots,
[self._kernel_size, self._kernel_size], scope='ClassPredictor',
biases_initializer=tf.constant_initializer(
self._class_prediction_bias_init))
if self._apply_sigmoid_to_scores:
class_predictions_with_background = tf.sigmoid(
class_predictions_with_background)
combined_feature_map_shape = shape_utils.combined_static_and_dynamic_shape(
image_features)
box_encodings = tf.reshape(
box_encodings, tf.stack([combined_feature_map_shape[0],
combined_feature_map_shape[1] *
combined_feature_map_shape[2] *
num_predictions_per_location,
1, self._box_code_size]))
class_predictions_with_background = tf.reshape(
class_predictions_with_background,
tf.stack([combined_feature_map_shape[0],
combined_feature_map_shape[1] *
combined_feature_map_shape[2] *
num_predictions_per_location,
num_class_slots]))
return {BOX_ENCODINGS: box_encodings,
CLASS_PREDICTIONS_WITH_BACKGROUND:
class_predictions_with_background}
| [
"[email protected]"
] | |
2b7e4631b4f29246d007524d058aaac3d67a8629 | 4c91879e3bb3ef24cd4d1d2c79eedecc7030c2e8 | /python/191_number_of_1_bits.py | 131d0670c4972e14a4560d586b35fe92f7a62424 | [
"MIT"
] | permissive | PepSalehi/leetcode-soln | a47d827c2973ad06d22d0b8b83f2fadfb5b283d1 | cbf2db0d81d5ef98f48c8d1df486559f89142bfd | refs/heads/master | 2020-09-15T14:14:11.422791 | 2018-04-19T15:27:38 | 2018-04-19T15:27:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | """
Write a function that takes an unsigned integer and returns the number of ’1'
bits it has (also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary representation
00000000000000000000000000001011, so the function should return 3.
Credits:
Special thanks to @ts for adding this problem and creating all test cases.
"""
class Solution(object):
def hammingWeight(self, n):
"""
:type n: int
:rtype: int
"""
sum1=0
while n>0:
sum1+=n%2
n/=2
return sum1
| [
"[email protected]"
] | |
4b25a66c3105bc9c257188f19dd63ec0e14d457a | af7050b659e48a979809a705066baf7cd1a84255 | /350_intersection-of-two-arrays-ii.py | 72c4b527f143261a9739c1272e905024c6f6b4b7 | [] | no_license | zyk930/leetcode | b9547cbbeaf0202c2bb3e1a22d30f1ecddd4244e | 27c9da844550080c41fae60906274347f9e62919 | refs/heads/master | 2020-04-10T15:13:45.886717 | 2019-06-04T01:37:10 | 2019-06-04T01:37:10 | 161,101,906 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,302 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/12/21 10:10
# @Author : zyk
'''
给定两个数组,编写一个函数来计算它们的交集。
示例 1:
输入: nums1 = [1,2,2,1], nums2 = [2,2]
输出: [2,2]
示例 2:
输入: nums1 = [4,9,5], nums2 = [9,4,9,8,4]
输出: [4,9]
说明:
输出结果中每个元素出现的次数,应与元素在两个数组中出现的次数一致。
我们可以不考虑输出结果的顺序。
进阶:
如果给定的数组已经排好序呢?你将如何优化你的算法?
如果 nums1 的大小比 nums2 小很多,哪种方法更优?
如果 nums2 的元素存储在磁盘上,磁盘内存是有限的,并且你不能一次加载所有的元素到内存中,你该怎么办?
'''
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
ans = []
nums1.sort()
nums2.sort()
i = j = 0
while i < len(nums1) and j < len(nums2):
if nums1[i] < nums2[j]:
i += 1
elif nums1[i] > nums2[j]:
j += 1
else:
ans.append(nums1[i])
i += 1
j += 1
return ans | [
"[email protected]"
] | |
0c8c29ae00b620614383c24a6b9db48d3c488117 | 27a8692c6bed25bd92868a519d95e9570ea204cd | /bot/handlers/stickers/remove.py | d477b5721bf292f79c74e4fc29fa806ac467e504 | [
"MIT"
] | permissive | metti61680/sticker-thief | f3d7fc49a3337eaead84d4d682fac98cd367fc5a | 3006b8367f8e09aab1e60428338021b95e5d0b13 | refs/heads/master | 2020-12-14T02:23:41.684888 | 2019-12-04T11:27:28 | 2019-12-04T11:27:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,203 | py | import logging
# noinspection PyPackageRequirements
from telegram.ext import (
CommandHandler,
MessageHandler,
ConversationHandler,
CallbackContext,
Filters
)
# noinspection PyPackageRequirements
from telegram import ChatAction, Update
from bot import stickersbot
from bot.strings import Strings
from bot.sticker import StickerFile
import bot.sticker.error as error
from ..fallback_commands import cancel_command
from ...utils import decorators
from ...utils import utils
logger = logging.getLogger(__name__)
WAITING_STICKERS = range(1)
@decorators.action(ChatAction.TYPING)
@decorators.restricted
@decorators.failwithmessage
def on_remove_command(update: Update, _):
logger.info('/remove')
update.message.reply_text(Strings.REMOVE_STICKER_SELECT_STICKER)
return WAITING_STICKERS
@decorators.action(ChatAction.TYPING)
@decorators.failwithmessage
def on_sticker_receive(update: Update, context: CallbackContext):
logger.info('user sent the stciker to add')
sticker = StickerFile(update.message.sticker)
pack_link = utils.name2link(update.message.sticker.set_name)
try:
sticker.remove_from_set(context.bot)
except error.PackInvalid:
update.message.reply_html(Strings.REMOVE_STICKER_FOREIGN_PACK.format(pack_link), quote=True)
except error.PackNotModified:
update.message.reply_html(Strings.REMOVE_STICKER_ALREADY_DELETED.format(pack_link), quote=True)
except error.UnknwonError as e:
update.message.reply_html(Strings.REMOVE_STICKER_GENERIC_ERROR.format(pack_link, e.message), quote=True)
else:
# success
update.message.reply_html(Strings.REMOVE_STICKER_SUCCESS.format(pack_link), quote=True)
finally:
# wait for other stickers
return WAITING_STICKERS
stickersbot.add_handler(ConversationHandler(
name='adding_stickers',
entry_points=[CommandHandler(['remove', 'rem', 'r'], on_remove_command)],
states={
WAITING_STICKERS: [MessageHandler(
Filters.sticker | Filters.document.category('image/png'),
on_sticker_receive
)]
},
fallbacks=[CommandHandler(['cancel', 'c', 'done', 'd'], cancel_command)]
))
| [
"[email protected]"
] | |
672321ebf82e31c14af3274cd5b61a7650715780 | baf3736092f9aecf79fc717b6d5efc19c5ac3ba9 | /ArticleSpider_splash/pipelines.py | 74a77623ba5d113d94d8f288a8a4a0d1124ad2ac | [] | no_license | tang1323/ArticleSpider_splash | 6215224cd5c36a243b950590faf7da2671c4014a | 992e50f93ba705ffbf3d8282f0c47cd3c9f638f2 | refs/heads/master | 2023-04-08T13:07:26.526856 | 2021-04-15T01:55:13 | 2021-04-15T01:55:13 | 358,095,518 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,016 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import codecs # 可以避免编码的繁锁的工作
import json
from scrapy.pipelines.images import ImagesPipeline
from scrapy.exporters import JsonItemExporter
from twisted.enterprise import adbapi # 可以将mysqldb操作换成一个异步操作
import MySQLdb
import MySQLdb.cursors
# from models.es_types import ArticleType
# from w3lib.html import remove_tags
class ArticlespiderPipeline(object):
def process_item(self, item, spider):
return item
# 保存到json文件中的类
class JsonWithEncodingPipeline(object):# 保存在本地
#自定义json文件的导出
def __init__(self):
self.file = codecs.open('article.json', 'w', encoding = "utf-8")
def process_item(self, item, spider):# process_item一定要这样写,而且参数要一样
lines = json.dumps(dict(item), ensure_ascii= False) + "\n"# 将item转换成dict
self.file.write(lines)# 将文件写入到lines中
return item
def spider_closed(self, spider):# 关闭文件,spider_closed一定要这样写,而且参数要一样
self.file.close()
class MysqlPipeline(object):
# 采用同步的机制写入mysql中,量少的时候可以用这个方法,但量多就采用twisted这个框架去写,在下面那个MysqlTwistedPipline方法就是标准写法
def __init__(self):
self.conn = MySQLdb.connect('localhost', 'tangming', '130796', 'article_spider', charset="utf8",use_unicode = True)
self.cursor = self.conn.cursor()# 执行数据库用cursor
def process_item(self, item, spider):# 写mysql语句的函数
insert_sql = """
insert into cnblogs_article(title, url, url_object_id, front_image_url, front_image_path, praise_nums, comment_nums, tags, content, create_date, fav_nums)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)ON DUPLICATE KEY UPDATE fav_nums = VALUES(fav_nums)
"""
params = list()
params.append(item.get("title", ""))
params.append(item.get("url", ""))
params.append(item.get("url_object_id", ""))
front_image = ",".join(item.get("front_image_url", []))# 只有这个还是list类型,只能在最后入库的这里.join转换成字符串类型
params.append(front_image)
params.append(item.get("front_image_path", ""))
params.append(item.get("praise_nums", 0))# 没有数据则为0
params.append(item.get("comment_nums", 0))# 没有数据则为0
params.append(item.get("tags", ""))
params.append(item.get("content", []))
params.append(item.get("create_date", "1970-07-01"))
params.append(item.get("fav_nums", 0))# 没有数据则为0
self.cursor.execute(insert_sql,tuple(params))# 同步执行,如果不执行完这步就不会执行完下一步
self.conn.commit()# 同步执行,如果不执行完这步就不会执行完下一步
# 那么就需要一种异步执行
return item
# 这就是异步插入数据库
class MysqlTwistedPipline(object):
def __init__(self, dbpool):# 接收参数
self.dbpool = dbpool
@classmethod
def from_settings(cls, settings):
# from MySQLdb.cursors import DictCursor
dbparms = dict(
host = settings["MYSQL_HOST"],
db = settings["MYSQL_DBNAME"],
user = settings["MYSQL_USER"],
password = settings["MYSQL_PASSWORD"],
charset ='utf8',
cursorclass = MySQLdb.cursors.DictCursor,
use_unicode = True
)
dbpool = adbapi.ConnectionPool("MySQLdb", **dbparms)# ConnectionPool这是一个连接池,这是一个关键
return cls(dbpool)
def process_item(self, item, spider):# 写mysql语句的函数
# 使用twisted 将mysql插入变成异步执行
query = self.dbpool.runInteraction(self.do_insert, item) # dbpool是一个容器
query.addErrback(self.handle_error, item, spider) # 处理异常,handle_error是随便定义的一个方法,item和spider是自己返回一个错误信息,想要返回什么自己往里加
return item
def handle_error(self,failure, item, spider): # failure是自己传的
# 处理异步插入的异常
print(failure) # 这一步很重要,是调试的根本入口,就是在爬取数据入数据库的时候出现异常都 是这里调试出来的
def do_insert(self, cursor, item):# 这个cursor是adbapi自己传进来的
# 执行具体的插入
# 根据不同的item构建不同的sql语句并插入到mysql中
# insert_sql = """
# insert into cnblogs_article(title, url, url_object_id, front_image_url, front_image_path, praise_nums, comment_nums, tags, content, create_date, fav_nums)
# VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)ON DUPLICATE KEY UPDATE create_date = VALUES(create_date)
# """
# params = list()
# params.append(item.get("title", ""))
# params.append(item.get("url", ""))
# params.append(item.get("url_object_id", ""))
# front_image = ",".join(item.get("front_image_url", []))
# params.append(front_image)
# params.append(item.get("front_image_path", ""))
# params.append(item.get("praise_nums", 0)) # 没有数据则为0
# params.append(item.get("comment_nums", 0)) # 没有数据则为0
# params.append(item.get("tags", ""))
# params.append(item.get("content", []))
# params.append(item.get("create_date", "1970-07-01"))
# params.append(item.get("fav_nums", 0)) # 没有数据则为0
# cursor.execute(insert_sql, tuple(params))
insert_sql, params = item.get_insert_sql()
cursor.execute(insert_sql, params)
class JsonExporterPipleline(object):
# 调用scrapy提供的json export导出json文件
def __init__(self):
self.file = codecs.open('articleexport.json', 'wb')
self.exporter = JsonItemExporter(self.file, encoding = "utf-8", ensure_ascii= False)
self.exporter.start_exporting()
def spider_closed(self, spider):
self.exporter.finish_exporting()
self.file.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
# 只处理封面图片
class ArticleImagePipeline(ImagesPipeline):
def item_completed(self, results, item, info):
# 判断如果没有封面图片时不执行以下语句,有就执行,好比知乎没有封面图片
if "front_image_url" in item:
image_file_path = ""
for ok, value in results:
image_file_path = value["path"]
item["front_image_path"] = image_file_path
return item
| [
"[email protected]"
] | |
d0db4dd53fc928a47070993d812e526112a25bb3 | 4a0f8c5c0e8324fa614da776f2a704b5c369ccbb | /Contact_maps/dealFile.py | 5816c559c07cbd917443fb8e7f1b4a6c81f4de53 | [] | no_license | magic2du/contact_matrix | 9f8ae868d71e7e5c8088bf22a9407ea3eb073be6 | 957e2ead76fabc0299e36c1435162edd574f4fd5 | refs/heads/master | 2021-01-18T21:15:07.341341 | 2015-09-16T02:14:53 | 2015-09-16T02:14:53 | 24,237,641 | 0 | 0 | null | 2015-09-10T19:58:24 | 2014-09-19T16:48:37 | null | UTF-8 | Python | false | false | 2,197 | py | import os
def readListFile(filename):#read file:filename lines into list
data_file = open(filename)
data = []
for line in data_file.readlines():
line = line.strip()
#print line
data.append(line)
data_file.close()
print "number of lines in %s:" %filename +str(len(data))
return data
def readGrepFile(filename):#read ddis greped log File into list.ie SUCCESS_log_file
data_file = open(filename)
data = []
for line in data_file.readlines():
temp1 = line.split(" ")
line = temp1[3].split(",")
line=line[0]
line=line.strip()
data.append(line)
''' print(line)
line = line.split(" ")
t1 = line[1].split(":")
t2 = line[2].split(":")
tmp = [float(t1[1]), float(t2[1]), int(line[0])]
data.append(tmp)
print tmp'''
print "number of lines in %s:" %filename +str(len(data))
data_file.close()
return data
def writeListFile(filename,lst):# write lst into filename.
data_file = open(filename,'w')
for item in lst:
data_file.write("%s\n" % item)
print "number of lines wrote in %s:" %filename +str(len(lst))
data_file.close()
def dealLogFile(filename,listfile):
#grep log file(filename) to ERROR and SUCCESS file: Get the Notfinished ddis in todolist file (listfile) write into NotFinished_log
sh='grep ERROR: '+filename+'>ERROR_'+filename
sh2='grep SUCCESS: '+filename+'>SUCCESS_'+filename
os.system(sh)
os.system(sh2)
List1=readListFile(listfile)
List2=readGrepFile('SUCCESS_'+filename)
List3=readGrepFile('ERROR_'+filename)
List4=list(set(List1)-set(List2)-set(List3))
writeListFile('NotFinished_'+filename,List4)
def grepLogFile(filename):
#grep log file(filename) to ERROR and SUCCESS file:
sh='grep ERROR: '+filename+'>ERROR_'+filename
sh2='grep SUCCESS: '+filename+'>SUCCESS_'+filename
os.system(sh)
print sh
os.system(sh2)
print sh2
def readDDIsFile(filename):#read ddi file into a list with two domain names seperated.
data_file = open(filename)
data = []
for line in data_file.readlines():
line = line.strip()
try:
[domain1,domain2]=line.split('_int_')
data.append([domain1,domain2])
except:
print line
data_file.close()
print "number of ddis in %s:" %filename +str(len(data))
return data
| [
"[email protected]"
] | |
9be4c0cfe1528de5ee29c120480cb7e74ee1c110 | b3ad6d480873ac91fc284efc6341568649898684 | /cohort/week6/cs1.py | b50661dfbe30716c9a6b8392c7f7626cd65dd870 | [
"MIT"
] | permissive | jamestiotio/DW2020 | e88af5f84f477e911b8414c02893da039fff9cf0 | 1639ccbcf77b64719bdc29bf2a373d19296fbd75 | refs/heads/master | 2022-10-30T12:48:39.147705 | 2022-08-14T12:16:35 | 2022-08-14T12:16:35 | 269,898,881 | 0 | 1 | MIT | 2021-11-04T16:30:20 | 2020-06-06T07:03:23 | Jupyter Notebook | UTF-8 | Python | false | false | 142 | py | def reverse(string):
new_string = ""
for char in range(1, len(string) + 1):
new_string += string[-char]
return new_string | [
"[email protected]"
] | |
975de9975a9a39cbe4c0d7727322685bc5762de1 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03645/s702030481.py | 0676433bb304179255b0f58e28e7315e9f4ba85a | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 371 | py | from sys import stdin
def input():
return stdin.readline().strip()
n, m = map(int, input().split())
edge = [[] for _ in range(n)]
for _ in range(m):
i, j = map(int, input().split())
i -= 1
j -= 1
edge[i].append(j)
edge[j].append(i)
for i in edge[0]:
if n - 1 in edge[i]:
print('POSSIBLE')
exit()
else:
print('IMPOSSIBLE') | [
"[email protected]"
] | |
a89f7551c6e7110c925d5c588dffb97eee504470 | c4c159a21d2f1ea0d7dfaa965aeff01c8ef70dce | /flask/flaskenv/Lib/site-packages/tensorflow/_api/v1/strings/__init__.py | ce1ec660353ce6c9b3396ee91e02d9336d97ded8 | [] | no_license | AhsonAslam/webapi | 54cf7466aac4685da1105f9fb84c686e38f92121 | 1b2bfa4614e7afdc57c9210b0674506ea70b20b5 | refs/heads/master | 2020-07-27T06:05:36.057953 | 2019-09-17T06:35:33 | 2019-09-17T06:35:33 | 208,895,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:99d3d433550637101bcb96e96fa3ad768bd4fcf5d594eb911c360868a4a5bf1f
size 2083
| [
"github@cuba12345"
] | github@cuba12345 |
c2e523ab1ff7e56a1209026225b6a1f5d1049f9a | d62863d049c0206bfa744ca4c9e886030bfce1ab | /apps/sw_shop/sw_order/filters.py | 8fbf2cab68517611516c4be1eb8e79e99f52ae1a | [] | no_license | jurgeon018/box | 51738b99e640202936ed72357d3c67d2517e589b | 50b84a0afa73fab85a00eef54194f3c126d15397 | refs/heads/master | 2021-07-17T13:37:08.665292 | 2020-10-15T09:50:33 | 2020-10-15T09:50:33 | 232,013,297 | 0 | 1 | null | 2020-03-27T02:16:44 | 2020-01-06T03:01:34 | Python | UTF-8 | Python | false | false | 142 | py | from admin_auto_filters.filters import AutocompleteSelect
class TagsFilter(AutocompleteSelect):
title = 'тег'
field_name = 'tags'
| [
"[email protected]"
] | |
d1548533bbd4772134b1eb35f4625917f8311929 | 4c2c1775b6b319ae07155f46e70a6726ab0980c2 | /algo/algo_code/personal/cvr_space/model_train_exp/script/model_predict.py | d83956c20f098ea44510c425f8fc4512584682ff | [] | no_license | kiminh/util | 8e4b204849a57941120e37c9330772f03c8892d0 | 763a71031d9c0ef207b87dc03ebc55208a2dd5ad | refs/heads/master | 2022-06-09T06:09:13.221754 | 2020-04-27T04:23:00 | 2020-04-27T04:23:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | import sys
import math
model_dict = {}
for raw in open(sys.argv[1]):
felds = raw.strip("\n\r").split(" ")
if len(felds) < 2:
continue
model_dict[felds[0]] = float(felds[1])
for raw in open(sys.argv[2]):
s = 0
for sp in raw.strip().split()[1:]:
w = model_dict.get(sp, 0)
s += model_dict.get(sp, 0)
print 1.0 / (1.0 + math.exp(-s))
| [
"[email protected]"
] | |
6d72c628644e2d398d8db125a9bc5a7d8ef0a069 | 79baf4404e51bdc0f33038b3b16bea86ff09e82f | /azext_iot/sdk/deviceupdate/controlplane/operations/__init__.py | fcfbd4b1f52e9fbba906f0b37ee993a23e66a0b3 | [
"MIT"
] | permissive | Azure/azure-iot-cli-extension | 80b6cb29e907f7512c7361a85d6bfdea5ae2dd9e | bdbe65c3874ff632c2eba25c762e9ea8e9175b5f | refs/heads/dev | 2023-09-04T10:57:16.118442 | 2023-08-28T17:12:05 | 2023-08-28T17:12:05 | 103,456,760 | 95 | 80 | NOASSERTION | 2023-09-13T00:02:54 | 2017-09-13T22:04:36 | Python | UTF-8 | Python | false | false | 1,443 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._device_update_operations import DeviceUpdateOperationsMixin
from ._accounts_operations import AccountsOperations
from ._instances_operations import InstancesOperations
from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
from ._private_link_resources_operations import PrivateLinkResourcesOperations
from ._private_endpoint_connection_proxies_operations import PrivateEndpointConnectionProxiesOperations
from ._operations import Operations
from ._patch import __all__ as _patch_all
from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
from ._patch import patch_sdk as _patch_sdk
__all__ = [
'DeviceUpdateOperationsMixin',
'AccountsOperations',
'InstancesOperations',
'PrivateEndpointConnectionsOperations',
'PrivateLinkResourcesOperations',
'PrivateEndpointConnectionProxiesOperations',
'Operations',
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk() | [
"[email protected]"
] | |
3d556d8e0c6be1368b829f2f9e0d84e4eea75160 | db12b990924703cd74748d8585cd9c11fafa6746 | /h2o-py/h2o/schemas/metadata.py | 9582a1aaee3cc62f7c74ffa2f0fa2e14384a9fc0 | [
"Apache-2.0"
] | permissive | h2oai/h2o-3 | 919019a8f297eec676011a9cfd2cc2d97891ce14 | d817ab90c8c47f6787604a0b9639b66234158228 | refs/heads/master | 2023-08-17T18:50:17.732191 | 2023-08-17T16:44:42 | 2023-08-17T16:44:42 | 17,371,412 | 6,872 | 2,345 | Apache-2.0 | 2023-09-14T18:05:40 | 2014-03-03T16:08:07 | Jupyter Notebook | UTF-8 | Python | false | false | 2,142 | py | # -*- encoding: utf-8 -*-
#
# Copyright 2016 H2O.ai; Apache License Version 2.0 (see LICENSE for details)
#
# noinspection PyUnresolvedReferences
from h2o.utils.compatibility import * # NOQA
class H2OMetadataV3(object):
@classmethod
def make(cls, json_kv_pairs):
return cls(json_kv_pairs)
def __init__(self, json_kv_pairs):
self._schemas = next((v for k, v in json_kv_pairs if k == 'schemas'), []) or []
self._schema = self._schemas[0] if self._schemas else None
self._routes = next((v for k, v in json_kv_pairs if k == 'routes'), []) or []
@property
def name(self):
return self._schema.get('name') if self._schema else None
@property
def fields(self):
return [_Field(f) for f in self._schema.get('fields')] if self._schema else None
@property
def routes(self):
return [_Route(r) for r in self._routes]
def __repr__(self):
return repr({k: getattr(self, k) for k in dir(self) if not k.startswith('_')})
class _Field(object):
def __init__(self, j_field):
self._field = j_field
@property
def name(self):
return self._field.get('name')
@property
def is_schema(self):
return self._field.get('is_schema')
@property
def help(self):
return self._field.get('help')
def __repr__(self):
return repr({k: getattr(self, k) for k in dir(self) if not k.startswith('_')})
class _Route(object):
def __init__(self, j_route):
self._route = j_route
@property
def http_method(self):
return self._route.get('http_method')
@property
def url_pattern(self):
return self._route.get('url_pattern')
@property
def summary(self):
return self._route.get('summary')
@property
def input_schema(self):
return self._route.get('input_schema')
@property
def output_schema(self):
return self._route.get('output_schema')
def __repr__(self):
return repr({k: getattr(self, k) for k in dir(self) if not k.startswith('_')})
| [
"[email protected]"
] | |
734bf8211cf76d87497a9023f0a6036d2c89b55b | 6a33cb94d4af1d8a7329ddc6c9d42f870c35bb2f | /python/euler88.py | 438947a9585065bb73e52c6e4fe46d5db173dc3c | [] | no_license | vochong/project-euler | 836321cc8e7d2e7cdf22b3b136d44dcba74a8701 | 6a0c7103861ff825bf84800b6e2e62819a41e36d | refs/heads/master | 2020-04-29T10:41:48.487159 | 2018-09-19T00:13:34 | 2018-09-19T00:13:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | def prodsum(prod, sm, facs, curfac, mins):
k = prod - sm + facs
if k < 12000:
mins[k] = min(mins[k], prod)
for fac in range(curfac, 24000/prod):
prodsum(prod*fac, sm+fac, facs+1, fac, mins)
def euler88():
mins = [24000]*12000
prodsum(1, 1, 1, 2, mins)
return sum(set(mins[2:]))
if __name__ == "__main__":
print euler88()
| [
"[email protected]"
] | |
0de714e49f871d8ef0eaad488040bcbabbacca73 | f3cdb2bae2ca6cbd045941ae0c2f4052e52de622 | /p2p/dataset.py | 04e28cc0ef5d167401736ee3081182e3dbb724bd | [
"Apache-2.0"
] | permissive | IQTLabs/3-D_DataAugmentation | bab6aead07235cccb2056b0ce25179e5bb871a82 | 3eb7fe156906df46151de5c4472274a1ccdcfbed | refs/heads/master | 2023-01-21T15:57:39.173956 | 2020-12-03T20:23:18 | 2020-12-03T20:23:18 | 298,663,332 | 0 | 0 | Apache-2.0 | 2023-01-23T13:56:28 | 2020-09-25T19:32:59 | Jupyter Notebook | UTF-8 | Python | false | false | 2,028 | py | import numpy as np
from PIL import Image
from torch.utils.data import Dataset
from torchvision import transforms
__all__ = ['P2PDataset']
class P2PDataset(Dataset):
""" Pose to Pose dataset definition loads two frame/pose pairs
"""
def __init__(self, df=None, transform=None, data_path=''):
""" Dataset initialization
Parameters
----------
df : pd.DataFrame
Dataframe with datapoint metadata
transform : torchvision.transforms
Frame preprocessing transforms. Nt applied to poses
data_path : str
Global path to data directory
Returns
-------
"""
self.df = df
self.data_path = data_path
if transform is None:
self.transform = transforms.ToTensor()
else:
self.transform = transform
self.to_tensor = transforms.ToTensor()
def __getitem__(self, idx):
""" Returns dataset item
Parameters
----------
idx : int
Index for desired datapoint
Returns
-------
frames[0] : torch.tensor
First (input) frame
frames[1] : torch.tensor
Second (target) frame
pose : torch.tensor
Second (target) pose
"""
entry = self.df.iloc[idx]
dir_path = '{}/{}/{}'.format(self.data_path,
entry['name'], entry['snippet'])
index = np.random.choice([x for x in range(5)], 2, replace=False)
frames = [self.transform(Image.open(
'{}/frame_{}.jpg'.format(dir_path, x))) for x in index]
pose = self.to_tensor(Image.open(
'{}/pose_{}.jpg'.format(dir_path, index[-1])))
return frames[0], frames[1], pose
def __len__(self):
""" Lenght of dataset
Parameters
----------
Returns
-------
len : int
Len of dataframe/dataset
"""
return len(self.df)
| [
"[email protected]"
] | |
c2615df1c73b8edf1406f870f6e2f819b8bd4f9f | 25d4c31d5ebe470118b14beb84f3cd1e53d99c15 | /01_Tutorials/Udemy Kurs Ehical Hacking/06_Praxisprojekt_Firefox_Daten_auslesen/48_Profilordner_Firefox_Alle_OS_Call_function.py | a59ecffd3590e860a7e464367aafb21873f9b8a1 | [] | no_license | daltdoerfer/Python_Templates-1 | ea4b59489feb7b7617e81b7c94d4375dbf25def3 | c2471cebeaf20bbfdfd3fd263d458e5a67ad8d1e | refs/heads/master | 2023-05-10T15:07:10.109280 | 2021-06-08T06:45:53 | 2021-06-08T06:45:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 846 | py | # Doku places.sqlite https://developer.mozilla.org/en-US/docs/Mozilla/Tech/Places/Database
# z.B: Bookmarks in moz_boookmarks
import os
import sqlite3 # Datenbankspaket zum auslesen der Datenbank
import pandas as pd
from get_firefox_path import get_firefox_path
# Alternativ: import get_firefox_path as gfp # -> Hier nüsste jedoch der Aufruf der Funktions wie folgt lauten gfd.get_firefox_path()
##############################################################################
# Funktionsaufruf
##############################################################################
path = get_firefox_path("places.sqlite")
#os.startfile('C:\\Users\\James/AppData/Roaming/Mozilla/Firefox/Profiles') # Öffnen zum betrachten
conn = sqlite3.connect(path)
print(conn)
# Ausgabe in Pandas
df = pd.read_sql("SELECT * FROM moz_bookmarks", conn)
print(df)
| [
"[email protected]"
] | |
bd5e4997bb0c3e02480b1d1c0c9fa0581edf3442 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/davison.py | c3e85a811b86444a5ada880723d58cfd45457d4f | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 238 | py | ii = [('WilbRLW4.py', 1), ('PettTHE.py', 3), ('ChalTPW2.py', 1), ('GellWPT.py', 2), ('ClarGE2.py', 2), ('GellWPT2.py', 1), ('FitzRNS4.py', 1), ('RoscTTI.py', 1), ('JacoWHI2.py', 2), ('JacoWHI.py', 1), ('ClarGE3.py', 1), ('RogeSIP.py', 2)] | [
"[email protected]"
] | |
4e10eac91084153cc536425f96e080401d2b33d9 | 6679fd1102802bf190294ef43c434b6047840dc2 | /openconfig_bindings/bgp/neighbors/neighbor/graceful_restart/config/__init__.py | 9dc0f15194dac761e4f0d1481680c9a59d024546 | [] | no_license | robshakir/pyangbind-openconfig-napalm | d49a26fc7e38bbdb0419c7ad1fbc590b8e4b633e | 907979dc14f1578f4bbfb1c1fb80a2facf03773c | refs/heads/master | 2023-06-13T17:17:27.612248 | 2016-05-10T16:46:58 | 2016-05-10T16:46:58 | 58,091,515 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,255 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp - based on the path /bgp/neighbors/neighbor/graceful-restart/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to graceful-restart
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__enabled','__restart_time','__stale_routes_time','__helper_only',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__stale_routes_time = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="stale-routes-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='decimal64', is_config=True)
self.__restart_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..4096']}), is_leaf=True, yang_name="restart-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='uint16', is_config=True)
self.__enabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=True)
self.__helper_only = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="helper-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'bgp', u'neighbors', u'neighbor', u'graceful-restart', u'config']
def _get_enabled(self):
"""
Getter method for enabled, mapped from YANG variable /bgp/neighbors/neighbor/graceful_restart/config/enabled (boolean)
YANG Description: Enable or disable the graceful-restart capability.
"""
return self.__enabled
def _set_enabled(self, v, load=False):
"""
Setter method for enabled, mapped from YANG variable /bgp/neighbors/neighbor/graceful_restart/config/enabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enabled() directly.
YANG Description: Enable or disable the graceful-restart capability.
"""
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enabled must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=True)""",
})
self.__enabled = t
if hasattr(self, '_set'):
self._set()
def _unset_enabled(self):
self.__enabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=True)
def _get_restart_time(self):
"""
Getter method for restart_time, mapped from YANG variable /bgp/neighbors/neighbor/graceful_restart/config/restart_time (uint16)
YANG Description: Estimated time (in seconds) for the local BGP speaker to
restart a session. This value is advertise in the graceful
restart BGP capability. This is a 12-bit value, referred to
as Restart Time in RFC4724. Per RFC4724, the suggested
default value is <= the hold-time value.
"""
return self.__restart_time
def _set_restart_time(self, v, load=False):
"""
Setter method for restart_time, mapped from YANG variable /bgp/neighbors/neighbor/graceful_restart/config/restart_time (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_restart_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_restart_time() directly.
YANG Description: Estimated time (in seconds) for the local BGP speaker to
restart a session. This value is advertise in the graceful
restart BGP capability. This is a 12-bit value, referred to
as Restart Time in RFC4724. Per RFC4724, the suggested
default value is <= the hold-time value.
"""
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..4096']}), is_leaf=True, yang_name="restart-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='uint16', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """restart_time must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..4096']}), is_leaf=True, yang_name="restart-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='uint16', is_config=True)""",
})
self.__restart_time = t
if hasattr(self, '_set'):
self._set()
def _unset_restart_time(self):
self.__restart_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..4096']}), is_leaf=True, yang_name="restart-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='uint16', is_config=True)
def _get_stale_routes_time(self):
"""
Getter method for stale_routes_time, mapped from YANG variable /bgp/neighbors/neighbor/graceful_restart/config/stale_routes_time (decimal64)
YANG Description: An upper-bound on the time thate stale routes will be
retained by a router after a session is restarted. If an
End-of-RIB (EOR) marker is received prior to this timer
expiring stale-routes will be flushed upon its receipt - if
no EOR is received, then when this timer expires stale paths
will be purged. This timer is referred to as the
Selection_Deferral_Timer in RFC4724
"""
return self.__stale_routes_time
def _set_stale_routes_time(self, v, load=False):
"""
Setter method for stale_routes_time, mapped from YANG variable /bgp/neighbors/neighbor/graceful_restart/config/stale_routes_time (decimal64)
If this variable is read-only (config: false) in the
source YANG file, then _set_stale_routes_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_stale_routes_time() directly.
YANG Description: An upper-bound on the time thate stale routes will be
retained by a router after a session is restarted. If an
End-of-RIB (EOR) marker is received prior to this timer
expiring stale-routes will be flushed upon its receipt - if
no EOR is received, then when this timer expires stale paths
will be purged. This timer is referred to as the
Selection_Deferral_Timer in RFC4724
"""
try:
t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="stale-routes-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='decimal64', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """stale_routes_time must be of a type compatible with decimal64""",
'defined-type': "decimal64",
'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="stale-routes-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='decimal64', is_config=True)""",
})
self.__stale_routes_time = t
if hasattr(self, '_set'):
self._set()
def _unset_stale_routes_time(self):
self.__stale_routes_time = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="stale-routes-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='decimal64', is_config=True)
def _get_helper_only(self):
"""
Getter method for helper_only, mapped from YANG variable /bgp/neighbors/neighbor/graceful_restart/config/helper_only (boolean)
YANG Description: Enable graceful-restart in helper mode only. When this
leaf is set, the local system does not retain forwarding
its own state during a restart, but supports procedures
for the receiving speaker, as defined in RFC4724.
"""
return self.__helper_only
def _set_helper_only(self, v, load=False):
"""
Setter method for helper_only, mapped from YANG variable /bgp/neighbors/neighbor/graceful_restart/config/helper_only (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_helper_only is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_helper_only() directly.
YANG Description: Enable graceful-restart in helper mode only. When this
leaf is set, the local system does not retain forwarding
its own state during a restart, but supports procedures
for the receiving speaker, as defined in RFC4724.
"""
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="helper-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """helper_only must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="helper-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=True)""",
})
self.__helper_only = t
if hasattr(self, '_set'):
self._set()
def _unset_helper_only(self):
self.__helper_only = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="helper-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=True)
enabled = property(_get_enabled, _set_enabled)
restart_time = property(_get_restart_time, _set_restart_time)
stale_routes_time = property(_get_stale_routes_time, _set_stale_routes_time)
helper_only = property(_get_helper_only, _set_helper_only)
_pyangbind_elements = {'enabled': enabled, 'restart_time': restart_time, 'stale_routes_time': stale_routes_time, 'helper_only': helper_only, }
| [
"[email protected]"
] | |
4f442defc3e20cd08d0d0bd1c7cfecc372987eaf | f52997ac7e1b41f34018c3a0028ced8638072b2b | /src/feedback/migrations/0002_migrate_search_feedback.py | 99e69225ca6320d0a38a794a12ad8bb49a371897 | [
"MIT"
] | permissive | uktrade/digital-workspace-v2 | 49fae1fca819b625c6f6949fb5ce51b89fbcab96 | 7e328d0d55c9aa73be61f476823a743d96e792d0 | refs/heads/main | 2023-09-03T12:03:47.016608 | 2023-09-01T12:07:55 | 2023-09-01T12:07:55 | 232,302,840 | 6 | 0 | MIT | 2023-09-13T15:50:24 | 2020-01-07T10:41:18 | Python | UTF-8 | Python | false | false | 999 | py | # Generated by Django 4.1.10 on 2023-08-08 14:35
from django.db import migrations
def migrate_search_feedback(apps, schema_editor):
Feedback = apps.get_model("django_feedback_govuk", "Feedback")
SearchFeedbackV1 = apps.get_model("feedback", "SearchFeedbackV1")
for feedback in Feedback.objects.all():
search_feedback = SearchFeedbackV1.objects.create(
submitter=feedback.submitter,
satisfaction=feedback.satisfaction,
comment=feedback.comment,
)
# Update the base feedback model with the submitted_at field to override the auto_now_add
SearchFeedbackV1.objects.filter(pk=search_feedback.pk).update(
submitted_at=feedback.submitted_at,
)
feedback.delete()
class Migration(migrations.Migration):
initial = True
dependencies = [
("feedback", "0001_initial"),
]
operations = [
migrations.RunPython(migrate_search_feedback, migrations.RunPython.noop)
]
| [
"[email protected]"
] | |
965bc7e325875ed234ebf3d269bc9a012f110885 | c987e888b0ccd9051e26335b3641cbd80aa14e2a | /tests/circular/template/test_context.py | 5db0e9c17d97674ab5b8c8d6f013bfc57113b782 | [
"MIT"
] | permissive | jonathanverner/circular | fa47eef5f2914da8540d0c0c50f3fe5d2d87d598 | e29bb9cc846566943febd8ba85104d796943819c | refs/heads/master | 2020-12-04T11:49:48.587539 | 2017-08-17T11:12:58 | 2017-08-17T11:12:58 | 66,577,154 | 7 | 2 | null | null | null | null | UTF-8 | Python | false | false | 882 | py | import asyncio
import pytest
from src.circular.template.context import Context
def test_extension():
base = Context()
base.a = 10
base.c = 30
child = Context(base=base)
# Child should have access to parent
assert child.a == 10
# The _get method should work for accessing parent
assert child._get('a') == 10
# Child should not be allowed to modify parent
child.a = 20
assert child.a == 20
assert base.a == 10
# Attributes should propagate recursively
second_child = Context(base=child)
assert second_child.c == 30
assert second_child.a == 20
def test_future(event_loop):
asyncio.set_event_loop(event_loop)
ctx = Context()
fut = asyncio.async(asyncio.sleep(0.1, result=3))
ctx.test = fut
assert hasattr(ctx, 'test') is False
event_loop.run_until_complete(fut)
assert ctx.test == 3
| [
"[email protected]"
] | |
d9a7f7e7bc166bb6f1556dbcbf9c4e875c736b66 | e4dd3e5d76073b2ba2c8a06a713582a7b8fd6983 | /eveauth/models/role.py | 2bf7e073cbbce0bddcd8bc43d7af18ff587e8483 | [] | no_license | extraquoo/avrse-auth | 792f1f217c682dfdace1467d81f2225976078750 | 5c94ac5e61954e37cc52dda0e884977b01eeff2a | refs/heads/master | 2020-05-20T00:24:58.466016 | 2018-10-17T22:40:53 | 2018-10-17T22:40:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | from django.db import models
from .character import Character
class Role(models.Model):
character = models.ForeignKey(Character, related_name="roles")
name = models.CharField(max_length=128, db_index=True)
def __str__(self):
return "%s on %s" % (self.name, self.character.name) | [
"[email protected]"
] | |
7a5f2c55c36a06658250b34cb109fbd1e78770d3 | 46279163a543cd8820bdc38133404d79e787c5d2 | /caffe2/python/operator_test/instance_norm_test.py | fb4f3c935ba8e1014e403825550754eddd382d98 | [
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | erwincoumans/pytorch | 31738b65e7b998bfdc28d0e8afa7dadeeda81a08 | ae9f39eb580c4d92157236d64548b055f71cf14b | refs/heads/master | 2023-01-23T10:27:33.628897 | 2020-12-06T01:22:00 | 2020-12-06T01:23:40 | 318,930,000 | 5 | 1 | NOASSERTION | 2020-12-06T01:58:57 | 2020-12-06T01:58:56 | null | UTF-8 | Python | false | false | 9,926 | py |
import numpy as np
from hypothesis import given, assume, settings
import hypothesis.strategies as st
from caffe2.python import core, model_helper, brew, utils
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import unittest
import os
class TestInstanceNorm(serial.SerializedTestCase):
def _get_inputs(self, N, C, H, W, order):
input_data = np.random.rand(N, C, H, W).astype(np.float32)
if order == 'NHWC':
# Allocate in the same order as NCHW and transpose to make sure
# the inputs are identical on freshly-seeded calls.
input_data = utils.NCHW2NHWC(input_data)
elif order != "NCHW":
raise Exception('unknown order type ({})'.format(order))
scale_data = np.random.rand(C).astype(np.float32)
bias_data = np.random.rand(C).astype(np.float32)
return input_data, scale_data, bias_data
def _get_op(self, device_option, store_mean, store_inv_stdev, epsilon,
order, inplace=False):
outputs = ['output' if not inplace else "input"]
if store_mean or store_inv_stdev:
outputs += ['mean']
if store_inv_stdev:
outputs += ['inv_stdev']
op = core.CreateOperator(
'InstanceNorm',
['input', 'scale', 'bias'],
outputs,
order=order,
epsilon=epsilon,
device_option=device_option)
return op
def _feed_inputs(self, input_blobs, device_option):
names = ['input', 'scale', 'bias']
for name, blob in zip(names, input_blobs):
self.ws.create_blob(name).feed(blob, device_option=device_option)
@given(gc=hu.gcs['gc'],
dc=hu.gcs['dc'],
N=st.integers(1, 4),
C=st.integers(1, 4),
H=st.integers(2, 4),
W=st.integers(2, 4),
order=st.sampled_from(['NCHW', 'NHWC']),
epsilon=st.floats(1e-6, 1e-4),
store_mean=st.booleans(),
seed=st.integers(0, 1000),
store_inv_stdev=st.booleans())
@settings(deadline=1000)
def test_instance_norm_gradients(
self, gc, dc, N, C, H, W, order, store_mean, store_inv_stdev,
epsilon, seed):
np.random.seed(seed)
# force store_inv_stdev if store_mean to match existing forward pass
# implementation
store_inv_stdev |= store_mean
op = self._get_op(
device_option=gc,
store_mean=store_mean,
store_inv_stdev=store_inv_stdev,
epsilon=epsilon,
order=order)
input_data = np.arange(N * C * H * W).astype(np.float32)
np.random.shuffle(input_data)
if order == "NCHW":
input_data = input_data.reshape(N, C, H, W)
else:
input_data = input_data.reshape(N, H, W, C)
scale_data = np.random.randn(C).astype(np.float32)
bias_data = np.random.randn(C).astype(np.float32)
input_blobs = (input_data, scale_data, bias_data)
output_indices = [0]
# if store_inv_stdev is turned on, store_mean must also be forced on
if store_mean or store_inv_stdev:
output_indices += [1]
if store_inv_stdev:
output_indices += [2]
self.assertDeviceChecks(dc, op, input_blobs, output_indices)
# The gradient only flows from output #0 since the other two only
# store the temporary mean and inv_stdev buffers.
# Check dl/dinput
self.assertGradientChecks(gc, op, input_blobs, 0, [0])
# Check dl/dscale
self.assertGradientChecks(gc, op, input_blobs, 1, [0])
# Check dl/dbias
self.assertGradientChecks(gc, op, input_blobs, 2, [0])
@given(gc=hu.gcs['gc'],
dc=hu.gcs['dc'],
N=st.integers(2, 10),
C=st.integers(3, 10),
H=st.integers(5, 10),
W=st.integers(7, 10),
seed=st.integers(0, 1000),
epsilon=st.floats(1e-6, 1e-4),
store_mean=st.booleans(),
store_inv_stdev=st.booleans())
def test_instance_norm_layout(self, gc, dc, N, C, H, W, store_mean,
store_inv_stdev, epsilon, seed):
# force store_inv_stdev if store_mean to match existing forward pass
# implementation
store_inv_stdev |= store_mean
outputs = {}
for order in ('NCHW', 'NHWC'):
np.random.seed(seed)
input_blobs = self._get_inputs(N, C, H, W, order)
self._feed_inputs(input_blobs, device_option=gc)
op = self._get_op(
device_option=gc,
store_mean=store_mean,
store_inv_stdev=store_inv_stdev,
epsilon=epsilon,
order=order)
self.ws.run(op)
outputs[order] = self.ws.blobs['output'].fetch()
np.testing.assert_allclose(
outputs['NCHW'],
utils.NHWC2NCHW(outputs["NHWC"]),
atol=1e-4,
rtol=1e-4)
@serial.given(gc=hu.gcs['gc'],
dc=hu.gcs['dc'],
N=st.integers(2, 10),
C=st.integers(3, 10),
H=st.integers(5, 10),
W=st.integers(7, 10),
order=st.sampled_from(['NCHW', 'NHWC']),
epsilon=st.floats(1e-6, 1e-4),
store_mean=st.booleans(),
seed=st.integers(0, 1000),
store_inv_stdev=st.booleans(),
inplace=st.booleans())
def test_instance_norm_reference_check(
self, gc, dc, N, C, H, W, order, store_mean, store_inv_stdev,
epsilon, seed, inplace):
np.random.seed(seed)
# force store_inv_stdev if store_mean to match existing forward pass
# implementation
store_inv_stdev |= store_mean
if order != "NCHW":
assume(not inplace)
inputs = self._get_inputs(N, C, H, W, order)
op = self._get_op(
device_option=gc,
store_mean=store_mean,
store_inv_stdev=store_inv_stdev,
epsilon=epsilon,
order=order,
inplace=inplace)
def ref(input_blob, scale_blob, bias_blob):
if order == 'NHWC':
input_blob = utils.NHWC2NCHW(input_blob)
mean_blob = input_blob.reshape((N, C, -1)).mean(axis=2)
inv_stdev_blob = 1.0 / \
np.sqrt(input_blob.reshape((N, C, -1)).var(axis=2) + epsilon)
# _bc indicates blobs that are reshaped for broadcast
scale_bc = scale_blob[np.newaxis, :, np.newaxis, np.newaxis]
mean_bc = mean_blob[:, :, np.newaxis, np.newaxis]
inv_stdev_bc = inv_stdev_blob[:, :, np.newaxis, np.newaxis]
bias_bc = bias_blob[np.newaxis, :, np.newaxis, np.newaxis]
normalized_blob = scale_bc * (input_blob - mean_bc) * inv_stdev_bc \
+ bias_bc
if order == 'NHWC':
normalized_blob = utils.NCHW2NHWC(normalized_blob)
if not store_mean and not store_inv_stdev:
return normalized_blob,
elif not store_inv_stdev:
return normalized_blob, mean_blob
else:
return normalized_blob, mean_blob, inv_stdev_blob
self.assertReferenceChecks(gc, op, inputs, ref)
@given(gc=hu.gcs['gc'],
dc=hu.gcs['dc'],
N=st.integers(2, 10),
C=st.integers(3, 10),
H=st.integers(5, 10),
W=st.integers(7, 10),
order=st.sampled_from(['NCHW', 'NHWC']),
epsilon=st.floats(1e-6, 1e-4),
store_mean=st.booleans(),
seed=st.integers(0, 1000),
store_inv_stdev=st.booleans())
def test_instance_norm_device_check(
self, gc, dc, N, C, H, W, order, store_mean, store_inv_stdev,
epsilon, seed):
np.random.seed(seed)
# force store_inv_stdev if store_mean to match existing forward pass
# implementation
store_inv_stdev |= store_mean
inputs = self._get_inputs(N, C, H, W, order)
op = self._get_op(
device_option=gc,
store_mean=store_mean,
store_inv_stdev=store_inv_stdev,
epsilon=epsilon,
order=order)
self.assertDeviceChecks(dc, op, inputs, [0])
@given(is_test=st.booleans(),
N=st.integers(2, 10),
C=st.integers(3, 10),
H=st.integers(5, 10),
W=st.integers(7, 10),
order=st.sampled_from(['NCHW', 'NHWC']),
epsilon=st.floats(1e-6, 1e-4),
seed=st.integers(0, 1000))
def test_instance_norm_model_helper(
self, N, C, H, W, order, epsilon, seed, is_test):
np.random.seed(seed)
model = model_helper.ModelHelper(name="test_model")
brew.instance_norm(
model,
'input',
'output',
C,
epsilon=epsilon,
order=order,
is_test=is_test)
input_blob = np.random.rand(N, C, H, W).astype(np.float32)
if order == 'NHWC':
input_blob = utils.NCHW2NHWC(input_blob)
self.ws.create_blob('input').feed(input_blob)
self.ws.create_net(model.param_init_net).run()
self.ws.create_net(model.net).run()
if is_test:
scale = self.ws.blobs['output_s'].fetch()
assert scale is not None
assert scale.shape == (C, )
bias = self.ws.blobs['output_b'].fetch()
assert bias is not None
assert bias.shape == (C, )
output_blob = self.ws.blobs['output'].fetch()
if order == 'NHWC':
output_blob = utils.NHWC2NCHW(output_blob)
assert output_blob.shape == (N, C, H, W)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
bdc3df8f4e675387177e4c1e95dbfe2ce8ad0c84 | a46b064486b703b5424a5e59fb6d567a0c08d480 | /scripts/pushmsg | fed86e723478dbd82c088dc2756750e5c891a332 | [
"MIT"
] | permissive | nick-youngblut/pushmsg | 1dd3ca23dbfa8277f92b7261c5eabeb6ea5bd3c6 | 389cd22476077198593bd4b4af3900fd1644da65 | refs/heads/master | 2022-07-23T14:52:46.886835 | 2020-05-23T19:21:22 | 2020-05-23T19:21:22 | 71,820,460 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 530 | #!/usr/bin/env python
import sys
import argparse
import pushmsg
parser = argparse.ArgumentParser(
description='Send messages with Pushbullet',
epilog='For all options, see the line magic help'
)
parser.add_argument('msg', help='Message to send via Pushbullet')
parser.add_argument('--version', action='version', version='0.0.1')
if __name__ == '__main__':
args = parser.parse_known_args()
sys.argv[-1] = '"' + sys.argv[-1] + '"'
line = ' '.join(sys.argv[1:])
p = pushmsg.PushMsg()
p.pushmsg(line)
| [
"[email protected]"
] | ||
64811b756a4e41173c5e5898912f0448a3f966dc | 1a3e6ff7b86fa34e4ef88f3e0fe7e1472f7f6821 | /vortex-methods/vel_integration.py | 7700104a2b37c85b4b1ae011fe0449ca7db1b4e2 | [] | no_license | rbonvall/tesis | 0c901746c1b93aa300c928104455e23ef93bcf87 | a93a07965387fc5a944a39eb734cfc34d0c09404 | refs/heads/master | 2020-05-30T22:09:38.621467 | 2011-07-04T14:47:33 | 2011-07-04T14:47:33 | 213,789 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 912 | py | #!/usr/bin/env python
from numpy import zeros_like
import functools
import vm
def euler(x, y, circ, dt, squared_blob_size):
u, v = vm.eval_velocity(x, y, circ=circ,
squared_blob_size=squared_blob_size)
return u, v
def runge_kutta(x, y, circ, dt, squared_blob_size):
u, v = zeros_like(x), zeros_like(y)
eval_velocity = functools.partial(vm.eval_velocity, circ=circ,
squared_blob_size=squared_blob_size)
kx, ky = eval_velocity(x, y) # k1
u += kx/6
v += ky/6
dx, dy = kx * (dt/2), ky * (dt/2)
kx, ky = eval_velocity(x + dx, y + dy) # k2
u += kx/3
v += ky/3
dx, dy = kx * (dt/2), ky * (dt/2)
kx, ky = eval_velocity(x + dx, y + dy) # k3
u += kx/3
v += ky/3
dx, dy = kx * dt, ky * dt
kx, ky = eval_velocity(x + dx, y + dy) # k4
u += kx/6
v += ky/6
return u, v
| [
"[email protected]"
] | |
821fd4f3941c3016fc767da9f978015d7ee9d854 | 416e303e3c64fbc3571f204c3b3b281b4ce642be | /examples/1.3/special_batch_compo/list_dataset.py | 4d37a894ac6da61939269cd7c3d88ef00cef73b7 | [
"Apache-2.0"
] | permissive | fastestimator-util/fastestimator-misc | 6cba0f25ee5e9ace30bef392adc8081777db510f | c46e901d84745f35b7b49bdbb7b7121d39759b3f | refs/heads/master | 2023-08-09T13:58:27.846807 | 2023-07-27T01:27:32 | 2023-07-27T01:27:32 | 208,510,459 | 8 | 9 | Apache-2.0 | 2023-07-27T01:27:41 | 2019-09-14T22:14:56 | Python | UTF-8 | Python | false | false | 1,455 | py | import pdb
from os import pipe
from torch.utils.data import Dataset
import fastestimator as fe
from fastestimator.architecture.tensorflow import LeNet
from fastestimator.dataset import BatchDataset
from fastestimator.dataset.data import mnist
from fastestimator.op.numpyop.univariate import ExpandDims, Minmax
from fastestimator.op.tensorop import TensorOp
from fastestimator.op.tensorop.loss import CrossEntropy
from fastestimator.op.tensorop.model import ModelOp, UpdateOp
class NegativeImageSimulatedTube(Dataset):
def __init__(self, ds):
self.ds = ds
def __getitem__(self, idx):
# create your 5 simulated image here, for simplicity, I will just copy the same image 5 times
image = self.ds[idx]["x"]
label = self.ds[idx]["y"]
return [{"x": image, "y": label} for _ in range(5)]
def __len__(self):
return len(self.ds)
def get_estimator():
ds, _ = mnist.load_data()
ds = NegativeImageSimulatedTube(ds)
pipeline = fe.Pipeline(train_data=ds, ops=[ExpandDims(inputs="x", outputs="x"), Minmax(inputs="x", outputs="x")])
model = fe.build(model_fn=LeNet, optimizer_fn="adam")
network = fe.Network(ops=[
ModelOp(model=model, inputs="x", outputs="y_pred"),
CrossEntropy(inputs=("y_pred", "y"), outputs="ce"),
UpdateOp(model=model, loss_name="ce")
])
estimator = fe.Estimator(pipeline=pipeline, network=network, epochs=2)
return estimator
| [
"[email protected]"
] | |
698bdeeb48ab9e92ecab93669f2d8d302bc0673a | abd7504f6562babf79fb4e86af7529b2cb40fb54 | /tests/pyre.pkg/descriptors/dataDescriptor_set.py | 7d7a5fd65cabe1f2c11b54febcf9ac6f7623788f | [] | no_license | aivazis/p2 | 266c1728554b3f7a89e72f09ba2d9e5ff8d4447d | fd9a82d7dafa815dd68f679eb2b4b1a6287d02ea | refs/heads/main | 2022-01-08T12:45:16.646028 | 2022-01-01T17:31:10 | 2022-01-01T17:31:10 | 225,452,981 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,247 | py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis <[email protected]>
# (c) 1998-2022 all rights reserved
def test():
"""
Verify that the {__set__} method behaves as expected
"""
# get the descriptor class
from p2.descriptors.DataDescriptor import DataDescriptor as descriptor
# make a subclass
class trait(descriptor):
# that implements the protocol
def __get__(self, instance, cls):
# if this instance access
if instance is not None:
# retrieve the value from the {instance} inventory
return instance.inventory[self]
# otherwise, just return myself
return self
def __set__(self, instance, value):
# store {value} in the {instance} inventory
instance.inventory[self] = value
# all done
return self
# make a client
class Client:
raw = descriptor()
cooked = trait()
# metamethods
def __init__(self, **kwds):
# chain up
super().__init__(**kwds)
# initialize my inventory
self.inventory = {}
# all done
return
# instantiate
client = Client()
# first set the value of the base descriptor
try:
# this should raise an error
client.raw = 5
# so we shouldn't get here
assert False, "unreachable"
# trap the expected failure
except AttributeError as error:
# unpack the arguments
desc, instance = error.args
# verify that the instance is correct
assert instance is client
# access the functional descriptor before we ever set its value
try:
# the lookup is expected to fail
client.cooked
# so trap it
except KeyError as error:
# get the key that cause the lookup to fail
key, *_ = error.args
# verify it's the trait we accessed
assert key == Client.cooked
# set the value
client.cooked = True
# and check
assert client.cooked is True
# all done
return
# main
if __name__ == "__main__":
# run the test
test()
# end of file
| [
"[email protected]"
] | |
57e3464578970c186bbf72034ecffedcb4f1e8e3 | 2b6ca87b32c18a1e48ffb64675abc97fda3bc6f6 | /src/onecontainer_api/routers/ai.py | c017be766571aee37b503baa3b212c8fb8a5d995 | [
"BSD-3-Clause"
] | permissive | gabrielbriones/oneContainer-API | 7882b86ff1c0b4fb4461de09deb96633e7fbed51 | ea81ed31b921711d38d352ee3a9e56bd4231bf43 | refs/heads/main | 2023-01-22T17:45:20.345662 | 2020-12-04T20:50:18 | 2020-12-04T20:50:18 | 318,630,019 | 0 | 0 | null | 2020-12-04T20:39:23 | 2020-12-04T20:39:23 | null | UTF-8 | Python | false | false | 2,008 | py | """AI vertical entrypoint."""
from typing import List
from fastapi import APIRouter, Depends, File, UploadFile
import databases
from onecontainer_api import models, schemas, errors
from onecontainer_api.routers import services, drivers
import re
router = APIRouter()
@router.get("/ai/{service_id}/usage",
description="Get functions available for this service")
async def usage(service_id: str, sync: bool = False, ttl: int = 3600, db: databases.Database = Depends(models.get_db)):
service = await services.get_service(service_id, db)
if service.driver:
driver = await drivers.get_driver(service.driver)
return drivers.service_stack(driver, service, "get", "/usage", sync=sync, ttl=ttl)
raise errors.ServiceException(service.id, errors.NO_DRV_ERROR, "Service has no driver assigned")
@router.post("/ai/{service_id}/serve",
description="Load a model")
async def serve(service_id: str, model_meta: schemas.AIModelMeta, sync: bool = False, ttl: int = 3600, db: databases.Database = Depends(models.get_db)):
service = await services.get_service(service_id, db)
if service.driver:
driver = await drivers.get_driver(service.driver)
return drivers.service_stack(driver, service, "post", "/serve", data=model_meta.dict(), sync=sync, ttl=ttl)
raise errors.ServiceException(service.id, errors.NO_DRV_ERROR, "Service has no driver assigned")
@router.post("/ai/{service_id}/predict",
description="Execute an inference over an image")
async def predict(service_id: str, image_file: UploadFile = File(...), sync: bool = False, ttl: int = 3600, db: databases.Database = Depends(models.get_db)):
service = await services.get_service(service_id, db)
if service.driver:
driver = await drivers.get_driver(service.driver)
return drivers.service_stack(driver, service, "post", "/predict", data=image_file, sync=sync, ttl=ttl)
raise errors.ServiceException(service.id, errors.NO_DRV_ERROR, "Service has no driver assigned")
| [
"[email protected]"
] | |
487a6cba227881b051690e18f580cda7c3918873 | e77cbe31ed7eb052571a41cd7d68d110d3ca20ad | /procurement_request/wizard/procurement_request_wizard.py | b6a19b8f2dcd61f67039a71e4d929959d90cfdde | [] | no_license | lopin123/falinwa_branch | 237fa59d934e0672d1c55b95e619a7f4c97eb3b4 | 051821ec3d2691338953a38a5aed64a60c35113e | refs/heads/master | 2021-01-02T22:34:36.390758 | 2015-04-08T07:58:27 | 2015-04-08T07:58:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,316 | py | # -*- coding: utf-8 -*-
from openerp.osv import fields, orm
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
from dateutil.relativedelta import relativedelta
from datetime import date
class procurement_request_wizard(orm.TransientModel):
_name = "procurement.request.wizard"
_description = "Procurement Request Wizard"
_columns = {
'product_id' : fields.many2one('product.product','Product',required=True),
'product_qty' : fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure')),
'date_order': fields.date('Order Date', required=True),
'partner_id' : fields.many2one('res.partner', 'Supplier', required=True),
'date_planned' : fields.date('Expected Date'),
}
def _get_supplier(self, cr, uid, context=None):
if context is None:
context = {}
product_obj = self.pool.get('product.product')
supplier_obj = self.pool.get('res.partner')
res = supplier_obj.search(cr, uid, [('name', '=', 'SUPPLIER TO BE DEFINED'),
('supplier', '=', True)],
limit=1)
if context.get('active_id',False):
product_id = product_obj.browse(cr, uid, context.get('active_id',False))
if product_id.seller_ids:
res = [product_id.seller_ids[0].name.id]
return res and res[0] or False
_defaults = {
'date_order': fields.date.context_today,
'product_qty': lambda *args: 1.0,
'partner_id': _get_supplier,
}
def make_procurement_request(self, cr, uid, ids, context=None):
if context is None:
context = {}
data_wizard = self.browse(cr, uid, ids, context)[0]
purchase_order_obj = self.pool.get('purchase.order')
warehouse_obj = self.pool.get('stock.warehouse')
warehouse_id = warehouse_obj.search(cr, uid, [], context=context)[0]
wh = warehouse_obj.browse(cr ,uid ,warehouse_id , context=context)
purchase_order_obj.create(cr, uid, {
'req_product_id' : data_wizard.product_id.id,
'req_product_description' : data_wizard.product_id.name,
'req_uom_id' : data_wizard.product_id.uom_po_id.id,
'req_product_qty' : data_wizard.product_qty,
'location_id' : wh.wh_input_stock_loc_id.id,
'date_order' : data_wizard.date_order+ ' 00:00:00',
'partner_id' : data_wizard.partner_id.id,
'pricelist_id' : data_wizard.partner_id.property_product_pricelist_purchase.id,
'origin' : 'Direct from Product',
'minimum_planned_date' : data_wizard.date_planned,
},context=context)
return {
'type': 'ir.actions.act_window',
'name': 'Procurement Request',
'res_model': 'purchase.order',
'view_mode': 'tree',
'view_type': 'form',
'view_id': self.pool.get('ir.model.data').get_object_reference(cr, uid, 'procurement_request', 'fal_procurement_request_tree')[1],
'target': 'current',
'nodestroy': False,
'domain': '[("state","=","procurement_request")]',
}
#end of procurement_request_wizard() | [
"[email protected]"
] | |
6cf1014ce60dbc8ae7505b3d9a75e4852b1a3698 | 8f1c3c76bf8514818b733ba29fe575d8a5243add | /eduerp_attendance/models/__init__.py | e27612b75f471c9162dd01c4529d7609eddb4edc | [
"Apache-2.0"
] | permissive | westlyou/eduerp | 27f1c7dcd0d2badf50cb6c69f5e761d7f0c6a898 | 968d79b5adc729bc81192604f1fc223517d38ccf | refs/heads/master | 2021-06-04T05:11:13.858246 | 2016-09-12T07:21:17 | 2016-09-12T07:21:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 350 | py | # -*- coding: utf-8 -*-
###############################################################################
#
###############################################################################
from . import attendance_line
from . import attendance_register
from . import attendance_sheet
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"[email protected]"
] | |
9814950516db9ba9bb772ad323f3ccd46b929a7a | b72c37e3ccda507b231649cddd5c7845c6c34ba1 | /PythonBasic/Day10/eval.py | 7f3920d20dfe006d406b4920d5251b7016469ca2 | [] | no_license | ljrdemail/AID1810 | 51c61c255b5c5efc1dc642b46691a614daedd85e | b417bd831bc1550ab953ce7ca23f54e34b8b2692 | refs/heads/master | 2020-04-24T09:45:14.781612 | 2019-02-21T11:26:49 | 2019-02-21T11:26:49 | 171,866,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | s = "1+2*3"
print(eval(s))
x = 100
y = 200
v2 = eval("x+y")
print(v2)
| [
"root"
] | root |
f9a52da70f400cadaddf780a7db144d291e1f193 | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/iotcentral/_inputs.py | 037588016bbe804fa5963d69b4ae4f963d2bd3ef | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,024 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from ._enums import *
__all__ = [
'AppSkuInfoArgs',
]
@pulumi.input_type
class AppSkuInfoArgs:
def __init__(__self__, *,
name: pulumi.Input[Union[str, 'AppSku']]):
"""
Information about the SKU of the IoT Central application.
:param pulumi.Input[Union[str, 'AppSku']] name: The name of the SKU.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> pulumi.Input[Union[str, 'AppSku']]:
"""
The name of the SKU.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[Union[str, 'AppSku']]):
pulumi.set(self, "name", value)
| [
"[email protected]"
] | |
894b65e24be5bbe3bb8ae5de43cf5a4301381c52 | 7cc141beb2948f64e1b187862108b883f09bf71c | /NotasCorretagens/chromedriver.py | 66a8495368cc630687cbb1b08c9fedc457e3a6c0 | [
"MIT"
] | permissive | felipemaion/scraping_xpi | 5829a83d67da398cccd4d91c096108fe6f3cf0a7 | 522a5955c05a7da1e70055f7668f0e5e3593cf72 | refs/heads/master | 2020-05-02T15:39:59.604808 | 2019-04-01T07:58:05 | 2019-04-01T07:58:05 | 178,048,850 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,578 | py | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait, Select
from selenium.webdriver.support.expected_conditions import presence_of_element_located
class Scraper:
def __init__(self, *args, **kwargs):
self.chrome_path = r'/Users/maion/bin/chromedriver'
self.driver = webdriver.Chrome(executable_path=self.chrome_path)
self.driver.get("https://portal.xpi.com.br")
# return super().__init__(*args, **kwargs)
def minha_conta(self):
return self.driver.find_element_by_xpath("""//*[@id="yield-portal-header"]/header/section[2]/div/nav/ul/li[1]/span""").click()
def notas_corretagens(self):
return self.driver.find_element_by_xpath("""//*[@id="yield-portal-header"]/header/section[2]/div/nav/ul/li[1]/ul/li[2]/dl/dd[4]/a""").click()
def combo_box(self):
return Select(self.driver.find_element_by_xpath("""//*[@id="Data"]"""))
def define_tipo_relatorio(self):
return self.driver.find_element_by_xpath("""//*[@id="rdbXP"]""").click()
def gera_relatorio(self):
return self.driver.find_element_by_xpath("""//*[@id="stNotasCor"]/article/div/div/span[4]/button""").click()
def baixa_relatorio(self):
return self.driver.find_element_by_xpath("""//*[@id="icon"]""")
def patrimonio(self):
return self.driver.find_element_by_xpath("""/html/body/div[2]/section/div[3]/div[1]/div/div[4]/p[1]/span/span""").text
scraper = Scraper()
| [
"[email protected]"
] | |
e3a292441f1962e474bd358f71425ba3ac374a99 | 8f48d12b88048e424ebb0d72ca6dfab5cf12ae0f | /1500_1999/1910.py | 05f3524ffd5384049fc271225340944a7db37961 | [] | no_license | renjieliu/leetcode | e1caf13c18a8107ed9252588b339fb76bcb1b246 | 4668b64fcb9320b6c316d8608fc61911ce43b6c7 | refs/heads/master | 2023-03-18T18:16:06.187741 | 2023-03-14T20:31:59 | 2023-03-14T20:31:59 | 128,823,819 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 467 | py | class Solution:
def removeOccurrences(self, s: str, part: str) -> str:
output = ""
for c in s:
output += c
while len(output) >= len(part) and output[-len(part):] == part:
output = output[:-len(part)]
return output
# previous approach
# class Solution:
# def removeOccurrences(self, s: str, part: str) -> str:
# while part in s:
# s = s.replace(part, '')
# return s
| [
"[email protected]"
] | |
54902efb2e961b3c0e80bfdb0b26efaa72d60f79 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_313/ch54_2020_03_31_00_22_57_884915.py | d4a200b530199c65a717529b916d673f8331f920 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 182 | py | def calcula_fibonacci(n):
fibonacci = [0]*100
fibonacci[1] = 1
fibonacci[0] = 1
n=2
while(n<100):
fibonacci[n] = fibonacci[n-1]+fibonacci[n-2]
n = n + 1 | [
"[email protected]"
] | |
81f2858581aeddee8d0342e21d1ce3c9479530e8 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02262/s141204203.py | 4d4e0e7ed1cedd407f0385ff3cc2d2558a5c8cde | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | cnt = 0
m = 0
g = []
def insertion_sort(a, n, g):
# cnt+=1 のときにローカルスコープの外を見にいくようにするため
global cnt
for i in range(g, n):
v = a[i]
k = i - g
while k >= 0 and a[k] > v:
a[k+g] = a[k]
k -= g
cnt += 1
a[k+g] = v
def shell_sort(a, n):
global m, g
h = 1
while h <= n:
g.append(h)
h = h * 3 + 1
g.reverse()
m = len(g)
for i in range(m):
insertion_sort(a, n, g[i])
n = int(input())
a = [int(input()) for i in range(n)]
shell_sort(a, n)
print(m)
print(*g)
print(cnt)
for i in a:
print(i)
| [
"[email protected]"
] | |
537ea782435fb47f81c6431c0278bb50ccfe0c70 | d8afb2e678d9da745a114e8feffee930218716b4 | /backend/adv_test_3_23538/settings.py | ae68df3785b6f69fbc2d8f0ca426a63043c5a8f4 | [] | no_license | crowdbotics-apps/adv-test-3-23538 | fd03929a6a4d8e984c2daad895d0c20fd80b4df5 | e2af927fa78de374384d9d6b59a74207f4209bba | refs/heads/master | 2023-02-12T15:18:53.318287 | 2020-12-24T19:38:53 | 2020-12-24T19:38:53 | 324,224,353 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,035 | py | """
Django settings for adv_test_3_23538 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'modules',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'adv_test_3_23538.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'adv_test_3_23538.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
4016835924c889c2ef390dd8f82ba30088442636 | 0cd799684098c374ec6d0806410f2077814b2e9e | /advisor/migrations/0002_booking.py | 7e9a8232b8d404a10fdf566e1ecff43de219c083 | [
"MIT"
] | permissive | diyajaiswal11/User_Advisor | bc2e6828f899e08dcf3a1ef0fbe041a8013c43b4 | 332001e874add115b19cccd2fb0b6622321f32c2 | refs/heads/main | 2023-04-17T07:48:24.801798 | 2021-04-25T11:25:30 | 2021-04-25T11:25:30 | 361,345,324 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py | # Generated by Django 3.2 on 2021-04-25 09:56
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('advisor', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Booking',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('booking_time', models.DateTimeField()),
('advisor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='advisor.advisor')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='booking', to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
23495c8203860fae627a8100b1b8e2e9ba365996 | e58b3b41505eea2848624c69282327b8531c5a9d | /apps/operations/adminx.py | 10c0907278f8e6753f7998be043e6b8374d71df9 | [] | no_license | GoYMS/mxonline | 9d4a9624921a389308896ddba9a817282e910532 | 3b9e27aaaf8f47a89083806cb8f885a5b8c31c36 | refs/heads/master | 2022-12-13T11:50:24.218015 | 2020-02-21T07:25:34 | 2020-02-21T07:25:34 | 242,054,819 | 1 | 0 | null | 2022-12-08T06:19:00 | 2020-02-21T04:34:42 | JavaScript | UTF-8 | Python | false | false | 1,608 | py | import xadmin
from apps.operations.models import UserAsk,CourseComments,UserCourse,UserFavorite,UserMessage,Banner
class BannerAdmin(object):
list_display = ['title', 'image', 'url', 'datetime','index']
search_fields = ['title', 'image', 'url','index']
list_filter = ['title', 'image', 'url', 'datetime','index']
class UserAskAdmin(object):
list_display = ['title', 'image', 'url','index']
search_fields = ['title', 'image', 'url','index']
list_filter = ['title', 'image', 'url','index']
class UserCourseAdmin(object):
list_display = ['user', 'course', 'datetime']
search_fields = ['user', 'course']
list_filter = ['user', 'course', 'datetime']
class UserMessageAdmin(object):
list_display = ['user', 'message', 'has_read','datetime']
search_fields = ['user','message', 'has_read']
list_filter = ['user', 'message', 'has_read', 'datetime']
class CourseCommentsAdmin(object):
list_display = ['user', 'course','comments', 'datetime']
search_fields = ['user', 'course','comments']
list_filter = ['user', 'course', 'comments','datetime']
class UserFavoriteAdmin(object):
list_display = ['user', 'fav_id', 'fav_type', 'datetime']
search_fields = ['user', 'fav_id', 'fav_type']
list_filter = ['user', 'fav_id', 'fav_type', 'datetime']
xadmin.site.register(Banner,BannerAdmin)
xadmin.site.register(UserAsk,UserAskAdmin)
xadmin.site.register(UserCourse,UserCourseAdmin)
xadmin.site.register(UserMessage,UserMessageAdmin)
xadmin.site.register(CourseComments,CourseCommentsAdmin)
xadmin.site.register(UserFavorite,UserFavoriteAdmin) | [
"[email protected]"
] | |
3e439ae8454afc8a677657435ca9e6aa7819fae8 | c38b72d9003283979e5902735bb123643ba4fba9 | /backend/home/migrations/0002_load_initial_data.py | 58c76493163b4c675b73dd1999db358280227029 | [] | no_license | crowdbotics-apps/hugz-club-22211 | e486a1588d6a3e6f2e1ffa9466c3dcc46fe13176 | 3b5b344a49254f2d95d5780d1c8171d3d774133b | refs/heads/master | 2023-06-30T21:45:53.007449 | 2020-11-02T13:56:53 | 2020-11-02T13:56:53 | 309,385,160 | 0 | 1 | null | 2021-08-03T20:05:12 | 2020-11-02T13:53:57 | JavaScript | UTF-8 | Python | false | false | 1,290 | py | from django.db import migrations
def create_customtext(apps, schema_editor):
CustomText = apps.get_model("home", "CustomText")
customtext_title = "hugz club"
CustomText.objects.create(title=customtext_title)
def create_homepage(apps, schema_editor):
HomePage = apps.get_model("home", "HomePage")
homepage_body = """
<h1 class="display-4 text-center">hugz club</h1>
<p class="lead">
This is the sample application created and deployed from the Crowdbotics app.
You can view list of packages selected for this application below.
</p>"""
HomePage.objects.create(body=homepage_body)
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "hugz-club-22211.botics.co"
site_params = {
"name": "hugz club",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("home", "0001_initial"),
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_customtext),
migrations.RunPython(create_homepage),
migrations.RunPython(create_site),
]
| [
"[email protected]"
] | |
83c98a7314dbb6e447542e4db0c9c74799724ab0 | 28e8ab381a8c1b4321cd83acff6aa33468166d6b | /python3.4Smartforest/lib/python3.4/site-packages/django/db/models/fields/related_lookups.py | de10ef835e1b5bc97d453c1cbe325ee794746ac2 | [
"MIT"
] | permissive | letouriste001/SmartForest_2.0 | 343e13bc085d753be2af43aecfb74a5fffaa5e3b | 109b78bf1e8c8404800f377ab969395ccbb617be | refs/heads/master | 2020-12-21T16:54:22.865824 | 2016-08-11T14:17:45 | 2016-08-11T14:17:45 | 59,734,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,241 | py | from django.db.models.lookups import (
Exact, GreaterThan, GreaterThanOrEqual, In, IsNull, LessThan,
LessThanOrEqual,
)
class MultiColSource(object):
contains_aggregate = False
def __init__(self, alias, targets, sources, field):
self.targets, self.sources, self.field, self.alias = targets, sources, field, alias
self.output_field = self.field
def __repr__(self):
return "{}({}, {})".format(
self.__class__.__name__, self.alias, self.field)
def relabeled_clone(self, relabels):
return self.__class__(relabels.get(self.alias, self.alias),
self.targets, self.sources, self.field)
def get_normalized_value(value, lhs):
from django.db.models import Model
if isinstance(value, Model):
value_list = []
sources = lhs.output_field.get_path_info()[-1].target_fields
for source in sources:
while not isinstance(value, source.model) and source.remote_field:
source = source.remote_field.model._meta.get_field(source.remote_field.field_name)
try:
value_list.append(getattr(value, source.attname))
except AttributeError:
# A case like Restaurant.objects.filter(place=restaurant_instance),
# where place is a OneToOneField and the primary key of Restaurant.
return (value.pk,)
return tuple(value_list)
if not isinstance(value, tuple):
return (value,)
return value
class RelatedIn(In):
def get_prep_lookup(self):
if not isinstance(self.lhs, MultiColSource) and self.rhs_is_direct_value():
# If we get here, we are dealing with single-column relations.
self.rhs = [get_normalized_value(val, self.lhs)[0] for val in self.rhs]
# We need to run the related field's get_prep_lookup(). Consider case
# ForeignKey to IntegerField given value 'abc'. The ForeignKey itself
# doesn't have validation for non-integers, so we must run validation
# using the target field.
if hasattr(self.lhs.output_field, 'get_path_info'):
# Run the target field's get_prep_lookup. We can safely assume there is
# only one as we don't get to the direct value branch otherwise.
self.rhs = self.lhs.output_field.get_path_info()[-1].target_fields[-1].get_prep_lookup(
self.lookup_name, self.rhs)
return super(RelatedIn, self).get_prep_lookup()
def as_sql(self, compiler, connection):
if isinstance(self.lhs, MultiColSource):
# For multicolumn lookups we need to build a multicolumn where clause.
# This clause is either a SubqueryConstraint (for values that need to be compiled to
# SQL) or a OR-combined list of (col1 = val1 AND col2 = val2 AND ...) clauses.
from django.db.models.sql.where import WhereNode, SubqueryConstraint, AND, OR
root_constraint = WhereNode(connector=OR)
if self.rhs_is_direct_value():
values = [get_normalized_value(value, self.lhs) for value in self.rhs]
for value in values:
value_constraint = WhereNode()
for source, target, val in zip(self.lhs.sources, self.lhs.targets, value):
lookup_class = target.get_lookup('exact')
lookup = lookup_class(target.get_col(self.lhs.alias, source), val)
value_constraint.add(lookup, AND)
root_constraint.add(value_constraint, OR)
else:
root_constraint.add(
SubqueryConstraint(
self.lhs.alias, [target.column for target in self.lhs.targets],
[source.name for source in self.lhs.sources], self.rhs),
AND)
return root_constraint.as_sql(compiler, connection)
else:
return super(RelatedIn, self).as_sql(compiler, connection)
class RelatedLookupMixin(object):
def get_prep_lookup(self):
if not isinstance(self.lhs, MultiColSource) and self.rhs_is_direct_value():
# If we get here, we are dealing with single-column relations.
self.rhs = get_normalized_value(self.rhs, self.lhs)[0]
# We need to run the related field's get_prep_lookup(). Consider case
# ForeignKey to IntegerField given value 'abc'. The ForeignKey itself
# doesn't have validation for non-integers, so we must run validation
# using the target field.
if hasattr(self.lhs.output_field, 'get_path_info'):
# Get the target field. We can safely assume there is only one
# as we don't get to the direct value branch otherwise.
self.rhs = self.lhs.output_field.get_path_info()[-1].target_fields[-1].get_prep_lookup(
self.lookup_name, self.rhs)
return super(RelatedLookupMixin, self).get_prep_lookup()
def as_sql(self, compiler, connection):
if isinstance(self.lhs, MultiColSource):
assert self.rhs_is_direct_value()
self.rhs = get_normalized_value(self.rhs, self.lhs)
from django.db.models.sql.where import WhereNode, AND
root_constraint = WhereNode()
for target, source, val in zip(self.lhs.targets, self.lhs.sources, self.rhs):
lookup_class = target.get_lookup(self.lookup_name)
root_constraint.add(
lookup_class(target.get_col(self.lhs.alias, source), val), AND)
return root_constraint.as_sql(compiler, connection)
return super(RelatedLookupMixin, self).as_sql(compiler, connection)
class RelatedExact(RelatedLookupMixin, Exact):
pass
class RelatedLessThan(RelatedLookupMixin, LessThan):
pass
class RelatedGreaterThan(RelatedLookupMixin, GreaterThan):
pass
class RelatedGreaterThanOrEqual(RelatedLookupMixin, GreaterThanOrEqual):
pass
class RelatedLessThanOrEqual(RelatedLookupMixin, LessThanOrEqual):
pass
class RelatedIsNull(RelatedLookupMixin, IsNull):
pass
| [
"[email protected]"
] | |
657f17c89bb380087b63af9fc39c0adbc2c84463 | 63bf6161532eefa72aa3be8b01cde601b08507dc | /python-mapping-example/fhir_model_generator/tests/model/relatedperson_tests.py | 629062d82fc060e23224a32864793023ac5e11af | [
"Apache-2.0"
] | permissive | Healthedata1/mFHIR | 4ef370b87e03e973918e5683977d32fe262655bc | 1b4ea441cfa08b661416a3badedf7e90f2809163 | refs/heads/master | 2022-12-10T21:07:03.948406 | 2021-06-18T01:58:23 | 2021-06-18T01:58:23 | 129,964,251 | 9 | 5 | null | 2022-12-09T05:23:54 | 2018-04-17T20:57:15 | HTML | UTF-8 | Python | false | false | 10,491 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.1-9346c8cc45 on 2020-02-10.
# 2020, SMART Health IT.
import os
import io
import unittest
import json
from model import relatedperson
from model.fhirdate import FHIRDate
class RelatedPersonTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or \
os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'fhir-parser', 'downloads'))
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("RelatedPerson", js["resourceType"])
return relatedperson.RelatedPerson(js)
def testRelatedPerson1(self):
inst = self.instantiate_from("relatedperson-example-peter.json")
self.assertIsNotNone(inst, "Must have instantiated a RelatedPerson instance")
self.implRelatedPerson1(inst)
js = inst.as_json()
self.assertEqual("RelatedPerson", js["resourceType"])
inst2 = relatedperson.RelatedPerson(js)
self.implRelatedPerson1(inst2)
def implRelatedPerson1(self, inst):
self.assertEqual(inst.address[0].city, "PleasantVille")
self.assertEqual(inst.address[0].line[0], "534 Erewhon St")
self.assertEqual(inst.address[0].postalCode, "3999")
self.assertEqual(inst.address[0].state, "Vic")
self.assertEqual(inst.address[0].use, "home")
self.assertEqual(inst.gender, "male")
self.assertEqual(inst.id, "peter")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.name[0].family, "Chalmers")
self.assertEqual(inst.name[0].given[0], "Peter")
self.assertEqual(inst.name[0].given[1], "James")
self.assertEqual(inst.name[0].use, "official")
self.assertEqual(inst.period.start.date, FHIRDate("2012-03-11").date)
self.assertEqual(inst.period.start.as_json(), "2012-03-11")
self.assertEqual(inst.photo[0].contentType, "image/jpeg")
self.assertEqual(inst.photo[0].url, "Binary/f012")
self.assertEqual(inst.relationship[0].coding[0].code, "C")
self.assertEqual(inst.relationship[0].coding[0].system, "http://terminology.hl7.org/CodeSystem/v2-0131")
self.assertEqual(inst.telecom[0].system, "phone")
self.assertEqual(inst.telecom[0].use, "work")
self.assertEqual(inst.telecom[0].value, "(03) 5555 6473")
self.assertEqual(inst.text.status, "generated")
def testRelatedPerson2(self):
inst = self.instantiate_from("relatedperson-example-f001-sarah.json")
self.assertIsNotNone(inst, "Must have instantiated a RelatedPerson instance")
self.implRelatedPerson2(inst)
js = inst.as_json()
self.assertEqual("RelatedPerson", js["resourceType"])
inst2 = relatedperson.RelatedPerson(js)
self.implRelatedPerson2(inst2)
def implRelatedPerson2(self, inst):
self.assertEqual(inst.gender, "female")
self.assertEqual(inst.id, "f001")
self.assertEqual(inst.identifier[0].system, "urn:oid:2.16.840.1.113883.2.4.6.3")
self.assertEqual(inst.identifier[0].type.text, "BSN")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.name[0].family, "Abels")
self.assertEqual(inst.name[0].given[0], "Sarah")
self.assertEqual(inst.name[0].use, "usual")
self.assertEqual(inst.relationship[0].coding[0].code, "SIGOTHR")
self.assertEqual(inst.relationship[0].coding[0].system, "http://terminology.hl7.org/CodeSystem/v3-RoleCode")
self.assertEqual(inst.telecom[0].system, "phone")
self.assertEqual(inst.telecom[0].use, "mobile")
self.assertEqual(inst.telecom[0].value, "0690383372")
self.assertEqual(inst.telecom[1].system, "email")
self.assertEqual(inst.telecom[1].use, "home")
self.assertEqual(inst.telecom[1].value, "[email protected]")
self.assertEqual(inst.text.status, "generated")
def testRelatedPerson3(self):
inst = self.instantiate_from("relatedperson-example-newborn-mom.json")
self.assertIsNotNone(inst, "Must have instantiated a RelatedPerson instance")
self.implRelatedPerson3(inst)
js = inst.as_json()
self.assertEqual("RelatedPerson", js["resourceType"])
inst2 = relatedperson.RelatedPerson(js)
self.implRelatedPerson3(inst2)
def implRelatedPerson3(self, inst):
self.assertTrue(inst.active)
self.assertEqual(inst.address[0].line[0], "2222 Home Street")
self.assertEqual(inst.address[0].use, "home")
self.assertEqual(inst.birthDate.date, FHIRDate("1973-05-31").date)
self.assertEqual(inst.birthDate.as_json(), "1973-05-31")
self.assertEqual(inst.gender, "female")
self.assertEqual(inst.id, "newborn-mom")
self.assertEqual(inst.identifier[0].system, "http://hl7.org/fhir/sid/us-ssn")
self.assertEqual(inst.identifier[0].type.coding[0].code, "SS")
self.assertEqual(inst.identifier[0].type.coding[0].system, "http://terminology.hl7.org/CodeSystem/v2-0203")
self.assertEqual(inst.identifier[0].value, "444222222")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.name[0].family, "Everywoman")
self.assertEqual(inst.name[0].given[0], "Eve")
self.assertEqual(inst.name[0].use, "official")
self.assertEqual(inst.relationship[0].coding[0].code, "NMTH")
self.assertEqual(inst.relationship[0].coding[0].display, "natural mother")
self.assertEqual(inst.relationship[0].coding[0].system, "http://terminology.hl7.org/CodeSystem/v3-RoleCode")
self.assertEqual(inst.relationship[0].text, "Natural Mother")
self.assertEqual(inst.telecom[0].system, "phone")
self.assertEqual(inst.telecom[0].use, "work")
self.assertEqual(inst.telecom[0].value, "555-555-2003")
self.assertEqual(inst.text.status, "generated")
def testRelatedPerson4(self):
inst = self.instantiate_from("relatedperson-example.json")
self.assertIsNotNone(inst, "Must have instantiated a RelatedPerson instance")
self.implRelatedPerson4(inst)
js = inst.as_json()
self.assertEqual("RelatedPerson", js["resourceType"])
inst2 = relatedperson.RelatedPerson(js)
self.implRelatedPerson4(inst2)
def implRelatedPerson4(self, inst):
self.assertTrue(inst.active)
self.assertEqual(inst.address[0].city, "Paris")
self.assertEqual(inst.address[0].country, "FRA")
self.assertEqual(inst.address[0].line[0], "43, Place du Marché Sainte Catherine")
self.assertEqual(inst.address[0].postalCode, "75004")
self.assertEqual(inst.gender, "female")
self.assertEqual(inst.id, "benedicte")
self.assertEqual(inst.identifier[0].system, "urn:oid:1.2.250.1.61")
self.assertEqual(inst.identifier[0].type.text, "INSEE")
self.assertEqual(inst.identifier[0].use, "usual")
self.assertEqual(inst.identifier[0].value, "272117510400399")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.name[0].family, "du Marché")
self.assertEqual(inst.name[0].given[0], "Bénédicte")
self.assertEqual(inst.photo[0].contentType, "image/jpeg")
self.assertEqual(inst.photo[0].url, "Binary/f016")
self.assertEqual(inst.relationship[0].coding[0].code, "N")
self.assertEqual(inst.relationship[0].coding[0].system, "http://terminology.hl7.org/CodeSystem/v2-0131")
self.assertEqual(inst.relationship[0].coding[1].code, "WIFE")
self.assertEqual(inst.relationship[0].coding[1].system, "http://terminology.hl7.org/CodeSystem/v3-RoleCode")
self.assertEqual(inst.telecom[0].system, "phone")
self.assertEqual(inst.telecom[0].value, "+33 (237) 998327")
self.assertEqual(inst.text.status, "generated")
def testRelatedPerson5(self):
inst = self.instantiate_from("relatedperson-example-f002-ariadne.json")
self.assertIsNotNone(inst, "Must have instantiated a RelatedPerson instance")
self.implRelatedPerson5(inst)
js = inst.as_json()
self.assertEqual("RelatedPerson", js["resourceType"])
inst2 = relatedperson.RelatedPerson(js)
self.implRelatedPerson5(inst2)
def implRelatedPerson5(self, inst):
self.assertEqual(inst.birthDate.date, FHIRDate("1963").date)
self.assertEqual(inst.birthDate.as_json(), "1963")
self.assertEqual(inst.gender, "female")
self.assertEqual(inst.id, "f002")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.name[0].text, "Ariadne Bor-Jansma")
self.assertEqual(inst.name[0].use, "usual")
self.assertEqual(inst.period.start.date, FHIRDate("1975").date)
self.assertEqual(inst.period.start.as_json(), "1975")
self.assertEqual(inst.photo[0].contentType, "image/jpeg")
self.assertEqual(inst.relationship[0].coding[0].code, "SIGOTHR")
self.assertEqual(inst.relationship[0].coding[0].system, "http://terminology.hl7.org/CodeSystem/v3-RoleCode")
self.assertEqual(inst.telecom[0].system, "phone")
self.assertEqual(inst.telecom[0].use, "home")
self.assertEqual(inst.telecom[0].value, "+31201234567")
self.assertEqual(inst.text.status, "generated")
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
] | |
a8c9f7cf950293dcc4cd6b1a2029129783a07270 | 41bee87d712a9460ab2d79a7439cd9f98e861d63 | /TgwlDataCenter/TgwlDataCenter/main/views.py | a4ea7f9d3fa2fca724acbb0daa9c2a22beb5e1aa | [] | no_license | liulixiang1988/iis_flask_demo | e72e209281441521b2b952dbf3547a5dc6507ec1 | f1d77f363b64fc52859c83fbffb6da563df561c4 | refs/heads/master | 2021-01-10T08:12:20.164278 | 2015-11-27T01:59:45 | 2015-11-27T01:59:45 | 46,954,319 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 949 | py | # -*- coding:utf-8 -*-
from datetime import datetime
from flask import render_template
from TgwlDataCenter import db
from TgwlDataCenter.models import Fruit
from TgwlDataCenter.main import main
@main.route('/')
@main.route('/home')
def home():
"""Renders the home page."""
fruit = Fruit(fruit=u"苹果")
db.session.add(fruit)
db.session.commit()
return render_template(
'index.html',
title='Home Page',
year=datetime.now().year,
)
@main.route('/contact')
def contact():
"""Renders the contact page."""
return render_template(
'contact.html',
title='Contact',
year=datetime.now().year,
message='Your contact page.'
)
@main.route('/about')
def about():
"""Renders the about page."""
return render_template(
'about.html',
title='About',
year=datetime.now().year,
message='Your application description page.'
)
| [
"[email protected]"
] | |
2c896f2e371789e95671bb85e89272c49208ec25 | 52f00638c8773b001da5a341e16abc05934457f8 | /rlpyt/agents/pg/mujoco.py | 0ac8ff5e3dad5ea0dc9f92de401c713934064014 | [
"MIT"
] | permissive | abagaria/rlpyt | 8e72dde5f3750c72da1fd8a97badf2c9691ea633 | 9d35217f2ecec60891753cf313d482d7887c16e1 | refs/heads/master | 2020-09-04T00:16:35.320779 | 2019-11-21T19:21:49 | 2019-11-21T19:21:49 | 219,614,849 | 0 | 1 | MIT | 2019-11-04T23:17:11 | 2019-11-04T23:17:10 | null | UTF-8 | Python | false | false | 1,049 | py |
from rlpyt.agents.pg.gaussian import (GaussianPgAgent,
RecurrentGaussianPgAgent, AlternatingRecurrentGaussianPgAgent)
from rlpyt.models.pg.mujoco_ff_model import MujocoFfModel
from rlpyt.models.pg.mujoco_lstm_model import MujocoLstmModel
class MujocoMixin:
def make_env_to_model_kwargs(self, env_spaces):
assert len(env_spaces.action.shape) == 1
return dict(observation_shape=env_spaces.observation.shape,
action_size=env_spaces.action.shape[0])
class MujocoFfAgent(MujocoMixin, GaussianPgAgent):
def __init__(self, ModelCls=MujocoFfModel, **kwargs):
super().__init__(ModelCls=ModelCls, **kwargs)
class MujocoLstmAgent(MujocoMixin, RecurrentGaussianPgAgent):
def __init__(self, ModelCls=MujocoLstmModel, **kwargs):
super().__init__(ModelCls=ModelCls, **kwargs)
class AlternatingMujocoLstmAgent(MujocoMixin,
AlternatingRecurrentGaussianPgAgent):
def __init__(self, ModelCls=MujocoLstmModel, **kwargs):
super().__init__(ModelCls=ModelCls, **kwargs)
| [
"[email protected]"
] | |
72dc80d819fcaab11a208c849483dcbb988c9a1f | 4a31308430d06cc3743e0dcc52501c0addd19008 | /nodeA/p2p/core_node_list.py | a6782b3f7f3b6d28e02543a5bfab7d3c94796956 | [] | no_license | hyo07/bc2odpt-dev | b22ad08139c311164cabce63b547dd076df52c08 | 2f11ae5b4dad410ba66179c7701b2e25ceeff371 | refs/heads/master | 2022-12-12T18:16:23.250608 | 2020-11-06T07:23:06 | 2020-11-06T07:23:06 | 247,711,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,002 | py | import threading
class CoreNodeList:
def __init__(self):
self.lock = threading.Lock()
self.list = set()
def add(self, peer):
"""
Coreノードをリストに追加する。
param:
peer : Coreノードとして格納されるノードの接続情報(IPアドレスとポート番号)
"""
with self.lock:
print('Adding peer: ', peer)
self.list.add((peer))
print('Current Core List: ', self.list)
def remove(self, peer):
"""
離脱したと判断されるCoreノードをリストから削除する。
param:
peer : 削除するノードの接続先情報(IPアドレスとポート番号)
"""
with self.lock:
if peer in self.list:
print('Removing peer: ', peer)
self.list.remove(peer)
print('Current Core list: ', self.list)
def overwrite(self, new_list):
"""
複数のpeerの生存確認を行った後で一括での上書き処理をしたいような場合はこちら
"""
with self.lock:
print('core node list will be going to overwrite')
self.list = new_list
print('Current Core list: ', self.list)
def get_list(self):
"""
現在接続状態にあるPeerの一覧を返却する
"""
li = set(self.list)
return li
def get_length(self):
return len(self.list)
def get_c_node_info(self):
"""
リストのトップにあるPeerを返却する
"""
return list(self.list)[0]
def has_this_peer(self, peer):
"""
与えられたpeerがリストに含まれているか?をチェックする
param:
peer : IPアドレスとポート番号のタプル
return:
True or False
"""
return peer in self.list
| [
"[email protected]"
] | |
084e4cf81257ae12d846254bbff0055434fc7d89 | e97e727972149063b3a1e56b38961d0f2f30ed95 | /test/test_double_operation_resource.py | 31eb32a0b4acb0777dcf050bca89159001193ac1 | [] | no_license | knetikmedia/knetikcloud-python-client | f3a485f21c6f3e733a864194c9acf048943dece7 | 834a24415385c906732437970db105e1bc71bde4 | refs/heads/master | 2021-01-12T10:23:35.307479 | 2018-03-14T16:04:24 | 2018-03-14T16:04:24 | 76,418,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,073 | py | # coding: utf-8
"""
Knetik Platform API Documentation latest
This is the spec for the Knetik API. Use this in conjunction with the documentation found at https://knetikcloud.com.
OpenAPI spec version: latest
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import knetik_cloud
from knetik_cloud.rest import ApiException
from knetik_cloud.models.double_operation_resource import DoubleOperationResource
class TestDoubleOperationResource(unittest.TestCase):
""" DoubleOperationResource unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testDoubleOperationResource(self):
"""
Test DoubleOperationResource
"""
# FIXME: construct object with mandatory attributes with example values
#model = knetik_cloud.models.double_operation_resource.DoubleOperationResource()
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
c5e3cf8ccaee00baf6e761221f47c8375c551c50 | 35b6013c1943f37d1428afd2663c8aba0a02628d | /enterpriseknowledgegraph/search/lookup_sample_test.py | 14bb48921ebb2818291664e810db90c14a1145eb | [
"Apache-2.0"
] | permissive | GoogleCloudPlatform/python-docs-samples | d2a251805fbeab15d76ed995cf200727f63f887d | 44e819e713c3885e38c99c16dc73b7d7478acfe8 | refs/heads/main | 2023-08-28T12:52:01.712293 | 2023-08-28T11:18:28 | 2023-08-28T11:18:28 | 35,065,876 | 7,035 | 7,593 | Apache-2.0 | 2023-09-14T20:20:56 | 2015-05-04T23:26:13 | Jupyter Notebook | UTF-8 | Python | false | false | 1,013 | py | # Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import lookup_sample
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
location = "global"
ids = ["c-024dcv3mk"]
languages = ["en"]
def test_lookup(capsys):
lookup_sample.lookup_sample(
project_id=project_id,
location=location,
ids=ids,
languages=languages,
)
out, _ = capsys.readouterr()
assert "Name: Google" in out
assert "Types" in out
assert "Cloud MID" in out
| [
"[email protected]"
] | |
0675a7e82f00c13dde5bb2af921db8747c34bf25 | ca18d8953ab75d6921ff0ddf3c41459438b8cddd | /qwergram_bots/smtp/get_articles.py | ae50ac0ed376d7cff455c74a8cfa8640c7d8ea99 | [] | no_license | qwergram/automatic-happiness | f00eb64f32947d88b0ea827fc515501ca38deebb | af86e6a5ca5300a6327a5cf6ace7c001f17e028c | refs/heads/master | 2021-01-17T10:19:43.913698 | 2016-06-03T05:40:24 | 2016-06-03T05:40:24 | 58,597,301 | 0 | 1 | null | 2016-06-03T06:06:15 | 2016-05-12T01:39:52 | Python | UTF-8 | Python | false | false | 9,073 | py | """Read emails from $EMAIL_ADDR and post them to the api."""
import imaplib
import smtplib
import email
import datetime
import time
from smtp.email_template import EMAIL_CONTENTS
from requests.auth import HTTPBasicAuth
HOUR = 0
def http_get(*args, **kwargs):
return requests.get(*args, **kwargs)
def http_post(*args, **kwargs):
return requests.post(*args, **kwargs)
def http_put(*args, **kwargs):
return requests.put(*args, **kwargs)
class Hydrogen(object):
"""
Hydrogen Bot:
A bot that reads my emails and saves the proper emails to the API endpoint.
"""
def __init__(self, email_addr, email_pass, email_imap, email_admin):
self.emails = []
self.in_inbox = False
self.opened_inbox = False
self.authenticated = False
self.connected = False
self.raw_emails = False
self.parsed = False
self.email_addr = email_addr
self.email_pass = email_pass
self.email_imap = email_imap
self.admin_email = email_admin
def connect(self):
self.connected = True
self.mail = imaplib.IMAP4_SSL(self.email_imap)
def authenticate(self):
if self.connected:
self.authenticated = True
self.mail.login(self.email_addr, self.email_pass)
else:
raise EnvironmentError("Connect to the server first (Hydrogen.connect)")
def checkout_inbox(self):
if self.authenticated:
self.in_inbox = True
self.mail.select('Inbox')
else:
raise EnvironmentError('Authenticate first (Hydrogen.authenticate)')
def get_emails(self):
if self.in_inbox:
(status, emails) = self.mail.search(None, 'ALL')
if emails[0]:
self.emails = emails[0].split(b' ')
else:
import sys; sys.exit(0)
self.opened_inbox = True
else:
raise EnvironmentError('Checkout the inbox first (Hydrogen.checkout_inbox)')
def read_emails(self):
if self.opened_inbox:
to_delete = self.emails[:]
for i, email_num in enumerate(self.emails):
(rv, data) = self.mail.fetch(email_num, '(RFC822)')
self.emails[i] = email.message_from_string(data[0][1].decode())
for email_num in to_delete:
self.checkout_inbox()
self.mail.copy(b'1', b'[Gmail]/Trash')
self.raw_emails = True
else:
raise EnvironmentError('Fetch the emails first (Hydrogen.get_emails)')
def parse_emails(self):
if self.raw_emails:
for i, message in enumerate(self.emails):
parsed_email = {
'subject': message['Subject'],
'time': message['Date'],
'from': message['from'],
'content': str(message.get_payload()[0]).split('\n\n', 1)[1].replace('\n', ' ')
}
self.emails[i] = parsed_email
self.parsed = True
else:
raise EnvironmentError('Read the emails first (Hydrogen.read_emails)')
def filter_emails(self):
if self.parsed:
emails = []
for message in self.emails:
if (
message['subject'].endswith('.article.txt') or
message['subject'].endswith('.draft.txt')
) and (
self.admin_email in message['from']
):
emails.append({
"title": message['subject'].replace('.article.txt', '').replace('.draft.txt', ''),
"content": message['content'].replace('\r', ''),
"draft": message['subject'].endswith('draft.txt'),
"original_idea": None,
})
self.filtered = True
self.emails = emails
else:
raise EnvironmentError('Parse the emails first (Hydrogen.parse_emails)')
class Lithium(object):
"""
Lithium Bot:
A bot that takes the articles HydrogenBot spits out and then
emails me in 24 hrs, asking for final approval before submitting them to
the API with draft: False.
"""
def __init__(
self, articles, local_endpoint, public_endpoint, admin_user,
email_admin, email_addr, email_pass, email_host, email_port,
admin_pass
):
self.articles = articles
self.email_queue = []
self.local_endpoint = local_endpoint
self.public_endpoint = public_endpoint
self.admin_user = admin_user
self.admin_name = admin_user.capitalize()
self.admin_pass = admin_pass
self.admin_email = email_admin
self.email_addr = email_addr
self.email_pass = email_pass
self.email_smtp = "{}:{}".format(email_host, email_port)
self.wait_period = 24
self.review_period = 12
def submit_articles(self):
for article in self.articles:
email_queue = False
if not article['draft']:
email_queue = article['draft'] = True
response = http_post(
self.local_endpoint + 'articles/',
data=article,
auth=HTTPBasicAuth(self.admin_user, self.admin_pass),
)
assert response.ok, response.json()
if email_queue:
self.email_queue.append(response.json())
def send_emails(self):
for i, email_content in enumerate(self.email_queue):
server = smtplib.SMTP(self.email_smtp)
server.ehlo()
server.starttls()
server.login(self.email_addr, self.email_pass)
server.sendmail(self.email_addr, self.admin_email, email_content)
def format_emails(self):
for i, article in enumerate(self.email_queue):
email_contents = EMAIL_CONTENTS.format(
from_addr=self.email_addr,
to_addr=self.admin_email,
article_number=article['url'].split('/')[-2],
admin=self.admin_name,
wait_period=self.wait_period,
title=article['title'],
review_period=self.review_period,
link=article['url'].replace(self.local_endpoint, self.public_endpoint),
content=article['content'],
)
self.email_queue[i] = email_contents
def publish_articles(self):
for email_ in self.email_queue:
subject_line = email_.split('\r\n')[2]
article_pk = subject_line.split('#', 1)[-1].strip()
target_endpoint = self.local_endpoint + 'articles/{}/'.format(article_pk)
response = http_get(target_endpoint).json()
date_created = response['date_created'].split('.')[0] # Ignore the seconds decimal places
date_created = datetime.datetime.strptime(date_created, '%Y-%m-%dT%H:%M:%S')
if datetime.datetime.now() > date_created + datetime.timedelta(hours=self.wait_period - 1):
response = http_put(
target_endpoint,
data={
"draft": False,
"content": response['content'],
"title": response['title'],
},
auth=HTTPBasicAuth(self.admin_user, self.admin_pass)
)
assert response.ok, response.json()
def main():
while True:
Bot = Hydrogen(
email_addr=EMAIL_ADDR,
email_pass=EMAIL_PASS,
email_imap=EMAIL_IMAP,
email_admin=EMAIL_ADMIN,
)
Bot.connect()
Bot.authenticate()
Bot.checkout_inbox()
Bot.get_emails()
Bot.read_emails()
Bot.parse_emails()
Bot.filter_emails()
Bot2 = Lithium(
articles=Bot.emails,
local_endpoint=LOCAL_ENDPOINT,
public_endpoint=PUBLIC_ENDPOINT,
admin_user=ADMIN_USER,
email_admin=EMAIL_ADMIN,
email_addr=EMAIL_ADDR,
email_pass=EMAIL_PASS,
email_host=EMAIL_HOST,
email_port=EMAIL_PORT,
admin_pass=ADMIN_PASS
)
Bot2.submit_articles()
Bot2.format_emails()
Bot2.send_emails()
# Wait 23 hours
Bot2.publish_articles()
time.sleep(HOUR)
if __name__ == "__main__":
import os
import requests
HOUR = 60 * 60 * 60
EMAIL_ADDR = os.environ['EMAIL_ADDR']
EMAIL_PASS = os.environ['EMAIL_PASS']
EMAIL_IMAP = os.environ['EMAIL_IMAP']
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_PORT = os.environ['EMAIL_PORT']
EMAIL_ADMIN = os.environ['EMAIL_ADMIN']
ADMIN_USER = os.environ['ADMIN_USER']
ADMIN_PASS = os.environ['ADMIN_PASS']
LOCAL_ENDPOINT = "http://127.0.0.1:8000/api/v1/"
PUBLIC_ENDPOINT = "http://{}/api/v1/".format(os.environ['SERVER_LOCATION'])
main()
| [
"[email protected]"
] | |
8cac4644eff66d8248b36ad0fe230a008a9a7619 | ae1870a6da50a292f86aa631acbea5b02bf7e057 | /mootdx/contrib/pytdx/trade/trade.py | ccb2911feeec4ef82a12d7e6e4fbb17821b5b43b | [
"MIT"
] | permissive | sxlxnyw/mootdx | 30ccbfd362836b78326997fdade1b072cac02b6e | dd3065f3189eacc0ba6efbd17f60e9848bbffcd4 | refs/heads/master | 2022-11-27T17:46:15.388082 | 2020-08-06T06:45:06 | 2020-08-06T06:45:06 | 288,402,383 | 2 | 0 | MIT | 2020-08-18T08:43:53 | 2020-08-18T08:43:52 | null | UTF-8 | Python | false | false | 8,507 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Aug 21 14:24:31 2017
Trade 模块需要配合TdxTradeServer使用...
@author: RainX
"""
import base64
import json
import urllib
import requests
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
try:
from pytdx.log import log
except ImportError:
log = lambda x: None
try:
import pandas as pd
except ImportError as e:
pass
class TdxTradeApiParams:
"""
0 资金
1 股份
2 当日委托
3 当日成交
4 可撤单
5 股东代码
6 融资余额
7 融券余额
8 可融证券
9
10
11
12 可申购新股查询
13 新股申购额度查询
14 配号查询
15 中签查询
"""
QUERY_CATEGORY_CASH = 0
QUERY_CATEGORY_STOCKS = 1
QUERY_CATEGORY_ORDER_OF_TODAY = 2
QUERY_CATEGORY_DEAL_OF_TODAY = 3
QUERY_CATEGORY_CANCELABLE_ORDER = 4
QUERY_CATEGORY_SHAREHOLDERS_CODE = 5
QUERY_CATEGORY_BALANCE_OF_MARGIN_LOAN = 6
QUERY_CATEGORY_BALANCE_OF_STOCK_LOAN = 7
QUERY_CATEGORY_OPERABLE_MARGIN_SOTCK = 8
QUERY_CATEGORY_NEW_STOCKS = 12
QUERY_CATEGORY_NEW_STOCKS_QUOTA = 13
QUERY_CATEGORY_NEW_STOCK_NUMBER = 14
QUERY_CATEGORY_NEW_STOCK_HIT = 15
class TdxTradeApi(object):
def __init__(self,
endpoint="http://127.0.0.1:10092/api",
encoding="utf-8",
enc_key=None,
enc_iv=None):
self._endpoint = endpoint
self._encoding = "utf-8"
if enc_key == None or enc_iv == None:
self._transport_enc = False
self._transport_enc_key = None
self._transport_enc_iv = None
self._cipher = None
else:
self._transport_enc = True
self._transport_enc_key = enc_key
self._transport_enc_iv = enc_iv
backend = default_backend()
self._cipher = Cipher(algorithms.AES(enc_key),
modes.CBC(enc_iv),
backend=backend)
self._session = requests.Session()
def call(self, func, params=None):
json_obj = {"func": func}
if params is not None:
json_obj["params"] = params
if self._transport_enc:
data_to_send = self.encrypt(json_obj)
response = self._session.post(self._endpoint, data=data_to_send)
else:
response = self._session.post(self._endpoint, json=json_obj)
response.encoding = self._encoding
text = response.text
if self._transport_enc:
decoded_text = self.decrypt(text)
log.debug(decoded_text)
return json.loads(decoded_text)
else:
return json.loads(text)
def encrypt(self, source_obj):
encrypter = self._cipher.encryptor()
source = json.dumps(source_obj)
source = source.encode(self._encoding)
need_to_padding = 16 - (len(source) % 16)
if need_to_padding > 0:
source = source + b'\x00' * need_to_padding
enc_data = encrypter.update(source) + encrypter.finalize()
b64_enc_data = base64.encodebytes(enc_data)
return urllib.parse.quote(b64_enc_data)
def decrypt(self, source):
decrypter = self._cipher.decryptor()
source = urllib.parse.unquote(source)
source = base64.decodebytes(source.encode("utf-8"))
data_bytes = decrypter.update(source) + decrypter.finalize()
return data_bytes.rstrip(b"\x00").decode(self._encoding)
def data_to_df(self, result):
if 'data' in result:
data = result['data']
return pd.DataFrame(data=data)
# ------ functions
def ping(self):
return self.call("ping", {})
def logon(self, ip, port, version, yyb_id, account_id, trade_account,
jy_passwrod, tx_password):
return self.call(
"logon", {
"ip": ip,
"port": port,
"version": version,
"yyb_id": yyb_id,
"account_no": account_id,
"trade_account": trade_account,
"jy_password": jy_passwrod,
"tx_password": tx_password
})
def logoff(self, client_id):
return self.call("logoff", {"client_id": client_id})
def query_data(self, client_id, category):
return self.call("query_data", {
"client_id": client_id,
"category": category
})
def send_order(self, client_id, category, price_type, gddm, zqdm, price,
quantity):
return self.call(
"send_order", {
'client_id': client_id,
'category': category,
'price_type': price_type,
'gddm': gddm,
'zqdm': zqdm,
'price': price,
'quantity': quantity
})
def cancel_order(self, client_id, exchange_id, hth):
return self.call("cancel_order", {
'client_id': client_id,
'exchange_id': exchange_id,
'hth': hth
})
def get_quote(self, client_id, code):
return self.call("get_quote", {
'client_id': client_id,
'code': code,
})
def repay(self, client_id, amount):
return self.call("repay", {'client_id': client_id, 'amount': amount})
def query_history_data(self, client_id, category, begin_date, end_date):
return self.call(
'query_history_data', {
'client_id': client_id,
'category': category,
'begin_date': begin_date,
'end_date': end_date
})
def query_datas(self, client_id, categories):
return self.call('query_datas', {
'client_id': client_id,
'categories': categories
})
def get_quotes(self, client_id, codes):
return self.call("get_quotes", {'client_id': client_id, 'zqdms': codes})
def send_orders(self, client_id, orders):
"""
发送订单
:param client_id:
:param orders:
格式
[
{
"category": xx,
"price_type" :xx,
"price": xx,
"gddm": xx,
"zqdm": xx,
"quantity": xx,
},
{
....
},
{
....
}
]
:return:
"""
return self.call("send_orders", {
'client_id': client_id,
"orders": orders
})
def cancel_orders(self, client_id, orders):
"""
撤销订单
:param client_id:
:param orders:
格式
[
{
"exchange_id": xx,
"hth": xx
},
{
....
},
{
....
}
]
:return:
"""
return self.call("cancel_orders", {
'client_id': client_id,
"orders": orders
})
def get_active_clients(self):
return self.call(func="get_active_clients")
if __name__ == "__main__":
import os
# api = TdxTradeApi(endpoint="http://10.11.5.215:10092/api", enc_key=b"4f1cf3fec4c84c84", enc_iv=b"0c78abc083b011e7")
api = TdxTradeApi(endpoint="http://10.11.5.215:10092/api")
print("---Ping---")
result = api.ping()
print(result)
print("---登入---")
acc = os.getenv("TDX_ACCOUNT", "")
password = os.getenv("TDX_PASS", "")
result = api.logon("202.108.253.186", 7708, "8.23", 32, acc, acc, password,
"")
print(result)
if result["success"]:
client_id = result["data"]["client_id"]
for i in (0, 1, 2, 3, 4, 5, 6, 7, 8, 12, 13, 14, 15):
print("---查询信息 cate=%d--" % i)
print(api.data_to_df(api.query_data(client_id, i)))
print("---查询报价---")
print(api.data_to_df(api.get_quote(client_id, '600315')))
print("---批量查询报价---")
print(api.data_to_df(api.get_quotes(client_id, ['600315', '000001'])))
print("---批量查询信息")
print(api.data_to_df(api.query_datas(client_id, [0, 1, 2])))
print("---登出---")
print(api.logoff(client_id))
| [
"[email protected]"
] | |
d7c11080139ed0bff1800582fef010d1bedb96c7 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_201/1514.py | 0f27373d40ddcd82cb07423f15c79eea3f3c6422 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 736 | py | from math import floor, ceil
def get_result(num, k):
if num == k:
return 0, 0
if num == 2:
return 1, 0
if k == 1:
if num % 2 == 0:
return (num / 2), (num / 2) - 1
else:
return (num - 1) / 2, (num - 1) / 2
num_1 = ceil((num - 1) / 2)
num_2 = floor((num - 1) / 2)
if k % 2 == 0:
return get_result(num_1, ceil((k - 1) / 2))
else:
return get_result(num_2, floor((k - 1) / 2))
def solve():
n, k = [int(s) for s in input().split(" ")]
return get_result(n, k)
t = int(input())
for i in range(1, t + 1):
result = solve()
print("Case #{}: {} {}".format(i, int(result[0]), int(result[1])))
| [
"[email protected]"
] | |
76d9587e6d2da6a2ae65a1af0c795772945e3433 | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/batch/v20170901/outputs.py | 452a83712bf6f04f51188e3ad3d12c90641da9b1 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 90,752 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = [
'ApplicationPackageReferenceResponse',
'ApplicationPackageResponse',
'AutoScaleRunErrorResponse',
'AutoScaleRunResponse',
'AutoScaleSettingsResponse',
'AutoStoragePropertiesResponse',
'AutoUserSpecificationResponse',
'CertificateReferenceResponse',
'CloudServiceConfigurationResponse',
'DataDiskResponse',
'DeleteCertificateErrorResponse',
'DeploymentConfigurationResponse',
'EnvironmentSettingResponse',
'FixedScaleSettingsResponse',
'ImageReferenceResponse',
'InboundNatPoolResponse',
'KeyVaultReferenceResponse',
'LinuxUserConfigurationResponse',
'MetadataItemResponse',
'NetworkConfigurationResponse',
'NetworkSecurityGroupRuleResponse',
'OSDiskResponse',
'PoolEndpointConfigurationResponse',
'ResizeErrorResponse',
'ResizeOperationStatusResponse',
'ResourceFileResponse',
'ScaleSettingsResponse',
'StartTaskResponse',
'TaskSchedulingPolicyResponse',
'UserAccountResponse',
'UserIdentityResponse',
'VirtualMachineConfigurationResponse',
'WindowsConfigurationResponse',
]
@pulumi.output_type
class ApplicationPackageReferenceResponse(dict):
def __init__(__self__, *,
id: str,
version: Optional[str] = None):
"""
:param str version: If this is omitted, and no default version is specified for this application, the request fails with the error code InvalidApplicationPackageReferences. If you are calling the REST API directly, the HTTP status code is 409.
"""
pulumi.set(__self__, "id", id)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
If this is omitted, and no default version is specified for this application, the request fails with the error code InvalidApplicationPackageReferences. If you are calling the REST API directly, the HTTP status code is 409.
"""
return pulumi.get(self, "version")
@pulumi.output_type
class ApplicationPackageResponse(dict):
"""
An application package which represents a particular version of an application.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "lastActivationTime":
suggest = "last_activation_time"
elif key == "storageUrl":
suggest = "storage_url"
elif key == "storageUrlExpiry":
suggest = "storage_url_expiry"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ApplicationPackageResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ApplicationPackageResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ApplicationPackageResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
format: str,
id: str,
last_activation_time: str,
state: str,
storage_url: str,
storage_url_expiry: str,
version: str):
"""
An application package which represents a particular version of an application.
:param str format: The format of the application package, if the package is active.
:param str id: The ID of the application.
:param str last_activation_time: The time at which the package was last activated, if the package is active.
:param str state: The current state of the application package.
:param str storage_url: The URL for the application package in Azure Storage.
:param str storage_url_expiry: The UTC time at which the Azure Storage URL will expire.
:param str version: The version of the application package.
"""
pulumi.set(__self__, "format", format)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "last_activation_time", last_activation_time)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "storage_url", storage_url)
pulumi.set(__self__, "storage_url_expiry", storage_url_expiry)
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def format(self) -> str:
"""
The format of the application package, if the package is active.
"""
return pulumi.get(self, "format")
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the application.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lastActivationTime")
def last_activation_time(self) -> str:
"""
The time at which the package was last activated, if the package is active.
"""
return pulumi.get(self, "last_activation_time")
@property
@pulumi.getter
def state(self) -> str:
"""
The current state of the application package.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="storageUrl")
def storage_url(self) -> str:
"""
The URL for the application package in Azure Storage.
"""
return pulumi.get(self, "storage_url")
@property
@pulumi.getter(name="storageUrlExpiry")
def storage_url_expiry(self) -> str:
"""
The UTC time at which the Azure Storage URL will expire.
"""
return pulumi.get(self, "storage_url_expiry")
@property
@pulumi.getter
def version(self) -> str:
"""
The version of the application package.
"""
return pulumi.get(self, "version")
@pulumi.output_type
class AutoScaleRunErrorResponse(dict):
def __init__(__self__, *,
code: str,
message: str,
details: Optional[Sequence['outputs.AutoScaleRunErrorResponse']] = None):
"""
:param str code: An identifier for the error. Codes are invariant and are intended to be consumed programmatically.
:param str message: A message describing the error, intended to be suitable for display in a user interface.
"""
pulumi.set(__self__, "code", code)
pulumi.set(__self__, "message", message)
if details is not None:
pulumi.set(__self__, "details", details)
@property
@pulumi.getter
def code(self) -> str:
"""
An identifier for the error. Codes are invariant and are intended to be consumed programmatically.
"""
return pulumi.get(self, "code")
@property
@pulumi.getter
def message(self) -> str:
"""
A message describing the error, intended to be suitable for display in a user interface.
"""
return pulumi.get(self, "message")
@property
@pulumi.getter
def details(self) -> Optional[Sequence['outputs.AutoScaleRunErrorResponse']]:
return pulumi.get(self, "details")
@pulumi.output_type
class AutoScaleRunResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "evaluationTime":
suggest = "evaluation_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AutoScaleRunResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AutoScaleRunResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AutoScaleRunResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
evaluation_time: str,
error: Optional['outputs.AutoScaleRunErrorResponse'] = None,
results: Optional[str] = None):
"""
:param str results: Each variable value is returned in the form $variable=value, and variables are separated by semicolons.
"""
pulumi.set(__self__, "evaluation_time", evaluation_time)
if error is not None:
pulumi.set(__self__, "error", error)
if results is not None:
pulumi.set(__self__, "results", results)
@property
@pulumi.getter(name="evaluationTime")
def evaluation_time(self) -> str:
return pulumi.get(self, "evaluation_time")
@property
@pulumi.getter
def error(self) -> Optional['outputs.AutoScaleRunErrorResponse']:
return pulumi.get(self, "error")
@property
@pulumi.getter
def results(self) -> Optional[str]:
"""
Each variable value is returned in the form $variable=value, and variables are separated by semicolons.
"""
return pulumi.get(self, "results")
@pulumi.output_type
class AutoScaleSettingsResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "evaluationInterval":
suggest = "evaluation_interval"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AutoScaleSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AutoScaleSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AutoScaleSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
formula: str,
evaluation_interval: Optional[str] = None):
"""
:param str evaluation_interval: If omitted, the default value is 15 minutes (PT15M).
"""
pulumi.set(__self__, "formula", formula)
if evaluation_interval is not None:
pulumi.set(__self__, "evaluation_interval", evaluation_interval)
@property
@pulumi.getter
def formula(self) -> str:
return pulumi.get(self, "formula")
@property
@pulumi.getter(name="evaluationInterval")
def evaluation_interval(self) -> Optional[str]:
"""
If omitted, the default value is 15 minutes (PT15M).
"""
return pulumi.get(self, "evaluation_interval")
@pulumi.output_type
class AutoStoragePropertiesResponse(dict):
"""
Contains information about the auto-storage account associated with a Batch account.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "lastKeySync":
suggest = "last_key_sync"
elif key == "storageAccountId":
suggest = "storage_account_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AutoStoragePropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AutoStoragePropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AutoStoragePropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
last_key_sync: str,
storage_account_id: str):
"""
Contains information about the auto-storage account associated with a Batch account.
:param str last_key_sync: The UTC time at which storage keys were last synchronized with the Batch account.
:param str storage_account_id: The resource ID of the storage account to be used for auto-storage account.
"""
pulumi.set(__self__, "last_key_sync", last_key_sync)
pulumi.set(__self__, "storage_account_id", storage_account_id)
@property
@pulumi.getter(name="lastKeySync")
def last_key_sync(self) -> str:
"""
The UTC time at which storage keys were last synchronized with the Batch account.
"""
return pulumi.get(self, "last_key_sync")
@property
@pulumi.getter(name="storageAccountId")
def storage_account_id(self) -> str:
"""
The resource ID of the storage account to be used for auto-storage account.
"""
return pulumi.get(self, "storage_account_id")
@pulumi.output_type
class AutoUserSpecificationResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "elevationLevel":
suggest = "elevation_level"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AutoUserSpecificationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AutoUserSpecificationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AutoUserSpecificationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
elevation_level: Optional[str] = None,
scope: Optional[str] = None):
"""
:param str elevation_level: nonAdmin - The auto user is a standard user without elevated access. admin - The auto user is a user with elevated access and operates with full Administrator permissions. The default value is nonAdmin.
:param str scope: pool - specifies that the task runs as the common auto user account which is created on every node in a pool. task - specifies that the service should create a new user for the task. The default value is task.
"""
if elevation_level is not None:
pulumi.set(__self__, "elevation_level", elevation_level)
if scope is not None:
pulumi.set(__self__, "scope", scope)
@property
@pulumi.getter(name="elevationLevel")
def elevation_level(self) -> Optional[str]:
"""
nonAdmin - The auto user is a standard user without elevated access. admin - The auto user is a user with elevated access and operates with full Administrator permissions. The default value is nonAdmin.
"""
return pulumi.get(self, "elevation_level")
@property
@pulumi.getter
def scope(self) -> Optional[str]:
"""
pool - specifies that the task runs as the common auto user account which is created on every node in a pool. task - specifies that the service should create a new user for the task. The default value is task.
"""
return pulumi.get(self, "scope")
@pulumi.output_type
class CertificateReferenceResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "storeLocation":
suggest = "store_location"
elif key == "storeName":
suggest = "store_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CertificateReferenceResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CertificateReferenceResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CertificateReferenceResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
store_location: Optional[str] = None,
store_name: Optional[str] = None,
visibility: Optional[Sequence[str]] = None):
"""
:param str store_location: The default value is currentUser. This property is applicable only for pools configured with Windows nodes (that is, created with cloudServiceConfiguration, or with virtualMachineConfiguration using a Windows image reference). For Linux compute nodes, the certificates are stored in a directory inside the task working directory and an environment variable AZ_BATCH_CERTIFICATES_DIR is supplied to the task to query for this location. For certificates with visibility of 'remoteUser', a 'certs' directory is created in the user's home directory (e.g., /home/{user-name}/certs) and certificates are placed in that directory.
:param str store_name: This property is applicable only for pools configured with Windows nodes (that is, created with cloudServiceConfiguration, or with virtualMachineConfiguration using a Windows image reference). Common store names include: My, Root, CA, Trust, Disallowed, TrustedPeople, TrustedPublisher, AuthRoot, AddressBook, but any custom store name can also be used. The default value is My.
:param Sequence[str] visibility: Values are:
starttask - The user account under which the start task is run.
task - The accounts under which job tasks are run.
remoteuser - The accounts under which users remotely access the node.
You can specify more than one visibility in this collection. The default is all accounts.
"""
pulumi.set(__self__, "id", id)
if store_location is not None:
pulumi.set(__self__, "store_location", store_location)
if store_name is not None:
pulumi.set(__self__, "store_name", store_name)
if visibility is not None:
pulumi.set(__self__, "visibility", visibility)
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="storeLocation")
def store_location(self) -> Optional[str]:
"""
The default value is currentUser. This property is applicable only for pools configured with Windows nodes (that is, created with cloudServiceConfiguration, or with virtualMachineConfiguration using a Windows image reference). For Linux compute nodes, the certificates are stored in a directory inside the task working directory and an environment variable AZ_BATCH_CERTIFICATES_DIR is supplied to the task to query for this location. For certificates with visibility of 'remoteUser', a 'certs' directory is created in the user's home directory (e.g., /home/{user-name}/certs) and certificates are placed in that directory.
"""
return pulumi.get(self, "store_location")
@property
@pulumi.getter(name="storeName")
def store_name(self) -> Optional[str]:
"""
This property is applicable only for pools configured with Windows nodes (that is, created with cloudServiceConfiguration, or with virtualMachineConfiguration using a Windows image reference). Common store names include: My, Root, CA, Trust, Disallowed, TrustedPeople, TrustedPublisher, AuthRoot, AddressBook, but any custom store name can also be used. The default value is My.
"""
return pulumi.get(self, "store_name")
@property
@pulumi.getter
def visibility(self) -> Optional[Sequence[str]]:
"""
Values are:
starttask - The user account under which the start task is run.
task - The accounts under which job tasks are run.
remoteuser - The accounts under which users remotely access the node.
You can specify more than one visibility in this collection. The default is all accounts.
"""
return pulumi.get(self, "visibility")
@pulumi.output_type
class CloudServiceConfigurationResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "osFamily":
suggest = "os_family"
elif key == "currentOSVersion":
suggest = "current_os_version"
elif key == "targetOSVersion":
suggest = "target_os_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CloudServiceConfigurationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CloudServiceConfigurationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CloudServiceConfigurationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
os_family: str,
current_os_version: Optional[str] = None,
target_os_version: Optional[str] = None):
"""
:param str os_family: Possible values are: 2 - OS Family 2, equivalent to Windows Server 2008 R2 SP1. 3 - OS Family 3, equivalent to Windows Server 2012. 4 - OS Family 4, equivalent to Windows Server 2012 R2. 5 - OS Family 5, equivalent to Windows Server 2016. For more information, see Azure Guest OS Releases (https://azure.microsoft.com/documentation/articles/cloud-services-guestos-update-matrix/#releases).
:param str current_os_version: This may differ from targetOSVersion if the pool state is Upgrading. In this case some virtual machines may be on the targetOSVersion and some may be on the currentOSVersion during the upgrade process. Once all virtual machines have upgraded, currentOSVersion is updated to be the same as targetOSVersion.
:param str target_os_version: The default value is * which specifies the latest operating system version for the specified OS family.
"""
pulumi.set(__self__, "os_family", os_family)
if current_os_version is not None:
pulumi.set(__self__, "current_os_version", current_os_version)
if target_os_version is not None:
pulumi.set(__self__, "target_os_version", target_os_version)
@property
@pulumi.getter(name="osFamily")
def os_family(self) -> str:
"""
Possible values are: 2 - OS Family 2, equivalent to Windows Server 2008 R2 SP1. 3 - OS Family 3, equivalent to Windows Server 2012. 4 - OS Family 4, equivalent to Windows Server 2012 R2. 5 - OS Family 5, equivalent to Windows Server 2016. For more information, see Azure Guest OS Releases (https://azure.microsoft.com/documentation/articles/cloud-services-guestos-update-matrix/#releases).
"""
return pulumi.get(self, "os_family")
@property
@pulumi.getter(name="currentOSVersion")
def current_os_version(self) -> Optional[str]:
"""
This may differ from targetOSVersion if the pool state is Upgrading. In this case some virtual machines may be on the targetOSVersion and some may be on the currentOSVersion during the upgrade process. Once all virtual machines have upgraded, currentOSVersion is updated to be the same as targetOSVersion.
"""
return pulumi.get(self, "current_os_version")
@property
@pulumi.getter(name="targetOSVersion")
def target_os_version(self) -> Optional[str]:
"""
The default value is * which specifies the latest operating system version for the specified OS family.
"""
return pulumi.get(self, "target_os_version")
@pulumi.output_type
class DataDiskResponse(dict):
"""
Data Disk settings which will be used by the data disks associated to Compute Nodes in the pool.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "diskSizeGB":
suggest = "disk_size_gb"
elif key == "storageAccountType":
suggest = "storage_account_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DataDiskResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DataDiskResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DataDiskResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
disk_size_gb: int,
lun: int,
caching: Optional[str] = None,
storage_account_type: Optional[str] = None):
"""
Data Disk settings which will be used by the data disks associated to Compute Nodes in the pool.
:param int lun: The lun is used to uniquely identify each data disk. If attaching multiple disks, each should have a distinct lun.
:param str caching: Values are:
none - The caching mode for the disk is not enabled.
readOnly - The caching mode for the disk is read only.
readWrite - The caching mode for the disk is read and write.
The default value for caching is none. For information about the caching options see: https://blogs.msdn.microsoft.com/windowsazurestorage/2012/06/27/exploring-windows-azure-drives-disks-and-images/.
:param str storage_account_type: If omitted, the default is "Standard_LRS". Values are:
Standard_LRS - The data disk should use standard locally redundant storage.
Premium_LRS - The data disk should use premium locally redundant storage.
"""
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
pulumi.set(__self__, "lun", lun)
if caching is not None:
pulumi.set(__self__, "caching", caching)
if storage_account_type is not None:
pulumi.set(__self__, "storage_account_type", storage_account_type)
@property
@pulumi.getter(name="diskSizeGB")
def disk_size_gb(self) -> int:
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter
def lun(self) -> int:
"""
The lun is used to uniquely identify each data disk. If attaching multiple disks, each should have a distinct lun.
"""
return pulumi.get(self, "lun")
@property
@pulumi.getter
def caching(self) -> Optional[str]:
"""
Values are:
none - The caching mode for the disk is not enabled.
readOnly - The caching mode for the disk is read only.
readWrite - The caching mode for the disk is read and write.
The default value for caching is none. For information about the caching options see: https://blogs.msdn.microsoft.com/windowsazurestorage/2012/06/27/exploring-windows-azure-drives-disks-and-images/.
"""
return pulumi.get(self, "caching")
@property
@pulumi.getter(name="storageAccountType")
def storage_account_type(self) -> Optional[str]:
"""
If omitted, the default is "Standard_LRS". Values are:
Standard_LRS - The data disk should use standard locally redundant storage.
Premium_LRS - The data disk should use premium locally redundant storage.
"""
return pulumi.get(self, "storage_account_type")
@pulumi.output_type
class DeleteCertificateErrorResponse(dict):
"""
An error response from the Batch service.
"""
def __init__(__self__, *,
code: str,
message: str,
details: Optional[Sequence['outputs.DeleteCertificateErrorResponse']] = None,
target: Optional[str] = None):
"""
An error response from the Batch service.
:param str code: An identifier for the error. Codes are invariant and are intended to be consumed programmatically.
:param str message: A message describing the error, intended to be suitable for display in a user interface.
:param Sequence['DeleteCertificateErrorResponse'] details: A list of additional details about the error.
:param str target: The target of the particular error. For example, the name of the property in error.
"""
pulumi.set(__self__, "code", code)
pulumi.set(__self__, "message", message)
if details is not None:
pulumi.set(__self__, "details", details)
if target is not None:
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def code(self) -> str:
"""
An identifier for the error. Codes are invariant and are intended to be consumed programmatically.
"""
return pulumi.get(self, "code")
@property
@pulumi.getter
def message(self) -> str:
"""
A message describing the error, intended to be suitable for display in a user interface.
"""
return pulumi.get(self, "message")
@property
@pulumi.getter
def details(self) -> Optional[Sequence['outputs.DeleteCertificateErrorResponse']]:
"""
A list of additional details about the error.
"""
return pulumi.get(self, "details")
@property
@pulumi.getter
def target(self) -> Optional[str]:
"""
The target of the particular error. For example, the name of the property in error.
"""
return pulumi.get(self, "target")
@pulumi.output_type
class DeploymentConfigurationResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cloudServiceConfiguration":
suggest = "cloud_service_configuration"
elif key == "virtualMachineConfiguration":
suggest = "virtual_machine_configuration"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DeploymentConfigurationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DeploymentConfigurationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DeploymentConfigurationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cloud_service_configuration: Optional['outputs.CloudServiceConfigurationResponse'] = None,
virtual_machine_configuration: Optional['outputs.VirtualMachineConfigurationResponse'] = None):
"""
:param 'CloudServiceConfigurationResponse' cloud_service_configuration: This property and virtualMachineConfiguration are mutually exclusive and one of the properties must be specified. This property cannot be specified if the Batch account was created with its poolAllocationMode property set to 'UserSubscription'.
:param 'VirtualMachineConfigurationResponse' virtual_machine_configuration: This property and cloudServiceConfiguration are mutually exclusive and one of the properties must be specified.
"""
if cloud_service_configuration is not None:
pulumi.set(__self__, "cloud_service_configuration", cloud_service_configuration)
if virtual_machine_configuration is not None:
pulumi.set(__self__, "virtual_machine_configuration", virtual_machine_configuration)
@property
@pulumi.getter(name="cloudServiceConfiguration")
def cloud_service_configuration(self) -> Optional['outputs.CloudServiceConfigurationResponse']:
"""
This property and virtualMachineConfiguration are mutually exclusive and one of the properties must be specified. This property cannot be specified if the Batch account was created with its poolAllocationMode property set to 'UserSubscription'.
"""
return pulumi.get(self, "cloud_service_configuration")
@property
@pulumi.getter(name="virtualMachineConfiguration")
def virtual_machine_configuration(self) -> Optional['outputs.VirtualMachineConfigurationResponse']:
"""
This property and cloudServiceConfiguration are mutually exclusive and one of the properties must be specified.
"""
return pulumi.get(self, "virtual_machine_configuration")
@pulumi.output_type
class EnvironmentSettingResponse(dict):
def __init__(__self__, *,
name: str,
value: Optional[str] = None):
pulumi.set(__self__, "name", name)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> Optional[str]:
return pulumi.get(self, "value")
@pulumi.output_type
class FixedScaleSettingsResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "nodeDeallocationOption":
suggest = "node_deallocation_option"
elif key == "resizeTimeout":
suggest = "resize_timeout"
elif key == "targetDedicatedNodes":
suggest = "target_dedicated_nodes"
elif key == "targetLowPriorityNodes":
suggest = "target_low_priority_nodes"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FixedScaleSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FixedScaleSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FixedScaleSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
node_deallocation_option: Optional[str] = None,
resize_timeout: Optional[str] = None,
target_dedicated_nodes: Optional[int] = None,
target_low_priority_nodes: Optional[int] = None):
"""
:param str node_deallocation_option: If omitted, the default value is Requeue.
:param str resize_timeout: The default value is 15 minutes. Timeout values use ISO 8601 format. For example, use PT10M for 10 minutes. The minimum value is 5 minutes. If you specify a value less than 5 minutes, the Batch service rejects the request with an error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request).
:param int target_dedicated_nodes: At least one of targetDedicatedNodes, targetLowPriority nodes must be set.
:param int target_low_priority_nodes: At least one of targetDedicatedNodes, targetLowPriority nodes must be set.
"""
if node_deallocation_option is not None:
pulumi.set(__self__, "node_deallocation_option", node_deallocation_option)
if resize_timeout is not None:
pulumi.set(__self__, "resize_timeout", resize_timeout)
if target_dedicated_nodes is not None:
pulumi.set(__self__, "target_dedicated_nodes", target_dedicated_nodes)
if target_low_priority_nodes is not None:
pulumi.set(__self__, "target_low_priority_nodes", target_low_priority_nodes)
@property
@pulumi.getter(name="nodeDeallocationOption")
def node_deallocation_option(self) -> Optional[str]:
"""
If omitted, the default value is Requeue.
"""
return pulumi.get(self, "node_deallocation_option")
@property
@pulumi.getter(name="resizeTimeout")
def resize_timeout(self) -> Optional[str]:
"""
The default value is 15 minutes. Timeout values use ISO 8601 format. For example, use PT10M for 10 minutes. The minimum value is 5 minutes. If you specify a value less than 5 minutes, the Batch service rejects the request with an error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request).
"""
return pulumi.get(self, "resize_timeout")
@property
@pulumi.getter(name="targetDedicatedNodes")
def target_dedicated_nodes(self) -> Optional[int]:
"""
At least one of targetDedicatedNodes, targetLowPriority nodes must be set.
"""
return pulumi.get(self, "target_dedicated_nodes")
@property
@pulumi.getter(name="targetLowPriorityNodes")
def target_low_priority_nodes(self) -> Optional[int]:
"""
At least one of targetDedicatedNodes, targetLowPriority nodes must be set.
"""
return pulumi.get(self, "target_low_priority_nodes")
@pulumi.output_type
class ImageReferenceResponse(dict):
def __init__(__self__, *,
id: Optional[str] = None,
offer: Optional[str] = None,
publisher: Optional[str] = None,
sku: Optional[str] = None,
version: Optional[str] = None):
"""
:param str id: This property is mutually exclusive with other properties. The virtual machine image must be in the same region and subscription as the Azure Batch account. For information about the firewall settings for Batch node agent to communicate with Batch service see https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration .
:param str offer: For example, UbuntuServer or WindowsServer.
:param str publisher: For example, Canonical or MicrosoftWindowsServer.
:param str sku: For example, 14.04.0-LTS or 2012-R2-Datacenter.
:param str version: A value of 'latest' can be specified to select the latest version of an image. If omitted, the default is 'latest'.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if offer is not None:
pulumi.set(__self__, "offer", offer)
if publisher is not None:
pulumi.set(__self__, "publisher", publisher)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
This property is mutually exclusive with other properties. The virtual machine image must be in the same region and subscription as the Azure Batch account. For information about the firewall settings for Batch node agent to communicate with Batch service see https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration .
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def offer(self) -> Optional[str]:
"""
For example, UbuntuServer or WindowsServer.
"""
return pulumi.get(self, "offer")
@property
@pulumi.getter
def publisher(self) -> Optional[str]:
"""
For example, Canonical or MicrosoftWindowsServer.
"""
return pulumi.get(self, "publisher")
@property
@pulumi.getter
def sku(self) -> Optional[str]:
"""
For example, 14.04.0-LTS or 2012-R2-Datacenter.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
A value of 'latest' can be specified to select the latest version of an image. If omitted, the default is 'latest'.
"""
return pulumi.get(self, "version")
@pulumi.output_type
class InboundNatPoolResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "backendPort":
suggest = "backend_port"
elif key == "frontendPortRangeEnd":
suggest = "frontend_port_range_end"
elif key == "frontendPortRangeStart":
suggest = "frontend_port_range_start"
elif key == "networkSecurityGroupRules":
suggest = "network_security_group_rules"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in InboundNatPoolResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
InboundNatPoolResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
InboundNatPoolResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
backend_port: int,
frontend_port_range_end: int,
frontend_port_range_start: int,
name: str,
protocol: str,
network_security_group_rules: Optional[Sequence['outputs.NetworkSecurityGroupRuleResponse']] = None):
"""
:param int backend_port: This must be unique within a Batch pool. Acceptable values are between 1 and 65535 except for 22, 3389, 29876 and 29877 as these are reserved. If any reserved values are provided the request fails with HTTP status code 400.
:param int frontend_port_range_end: Acceptable values range between 1 and 65534 except ports from 50000 to 55000 which are reserved by the Batch service. All ranges within a pool must be distinct and cannot overlap. If any reserved or overlapping values are provided the request fails with HTTP status code 400.
:param int frontend_port_range_start: Acceptable values range between 1 and 65534 except ports from 50000 to 55000 which are reserved. All ranges within a pool must be distinct and cannot overlap. If any reserved or overlapping values are provided the request fails with HTTP status code 400.
:param str name: The name must be unique within a Batch pool, can contain letters, numbers, underscores, periods, and hyphens. Names must start with a letter or number, must end with a letter, number, or underscore, and cannot exceed 77 characters. If any invalid values are provided the request fails with HTTP status code 400.
:param Sequence['NetworkSecurityGroupRuleResponse'] network_security_group_rules: The maximum number of rules that can be specified across all the endpoints on a Batch pool is 25. If no network security group rules are specified, a default rule will be created to allow inbound access to the specified backendPort. If the maximum number of network security group rules is exceeded the request fails with HTTP status code 400.
"""
pulumi.set(__self__, "backend_port", backend_port)
pulumi.set(__self__, "frontend_port_range_end", frontend_port_range_end)
pulumi.set(__self__, "frontend_port_range_start", frontend_port_range_start)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "protocol", protocol)
if network_security_group_rules is not None:
pulumi.set(__self__, "network_security_group_rules", network_security_group_rules)
@property
@pulumi.getter(name="backendPort")
def backend_port(self) -> int:
"""
This must be unique within a Batch pool. Acceptable values are between 1 and 65535 except for 22, 3389, 29876 and 29877 as these are reserved. If any reserved values are provided the request fails with HTTP status code 400.
"""
return pulumi.get(self, "backend_port")
@property
@pulumi.getter(name="frontendPortRangeEnd")
def frontend_port_range_end(self) -> int:
"""
Acceptable values range between 1 and 65534 except ports from 50000 to 55000 which are reserved by the Batch service. All ranges within a pool must be distinct and cannot overlap. If any reserved or overlapping values are provided the request fails with HTTP status code 400.
"""
return pulumi.get(self, "frontend_port_range_end")
@property
@pulumi.getter(name="frontendPortRangeStart")
def frontend_port_range_start(self) -> int:
"""
Acceptable values range between 1 and 65534 except ports from 50000 to 55000 which are reserved. All ranges within a pool must be distinct and cannot overlap. If any reserved or overlapping values are provided the request fails with HTTP status code 400.
"""
return pulumi.get(self, "frontend_port_range_start")
@property
@pulumi.getter
def name(self) -> str:
"""
The name must be unique within a Batch pool, can contain letters, numbers, underscores, periods, and hyphens. Names must start with a letter or number, must end with a letter, number, or underscore, and cannot exceed 77 characters. If any invalid values are provided the request fails with HTTP status code 400.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def protocol(self) -> str:
return pulumi.get(self, "protocol")
@property
@pulumi.getter(name="networkSecurityGroupRules")
def network_security_group_rules(self) -> Optional[Sequence['outputs.NetworkSecurityGroupRuleResponse']]:
"""
The maximum number of rules that can be specified across all the endpoints on a Batch pool is 25. If no network security group rules are specified, a default rule will be created to allow inbound access to the specified backendPort. If the maximum number of network security group rules is exceeded the request fails with HTTP status code 400.
"""
return pulumi.get(self, "network_security_group_rules")
@pulumi.output_type
class KeyVaultReferenceResponse(dict):
"""
Identifies the Azure key vault associated with a Batch account.
"""
def __init__(__self__, *,
id: str,
url: str):
"""
Identifies the Azure key vault associated with a Batch account.
:param str id: The resource ID of the Azure key vault associated with the Batch account.
:param str url: The URL of the Azure key vault associated with the Batch account.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def id(self) -> str:
"""
The resource ID of the Azure key vault associated with the Batch account.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def url(self) -> str:
"""
The URL of the Azure key vault associated with the Batch account.
"""
return pulumi.get(self, "url")
@pulumi.output_type
class LinuxUserConfigurationResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sshPrivateKey":
suggest = "ssh_private_key"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in LinuxUserConfigurationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
LinuxUserConfigurationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
LinuxUserConfigurationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
gid: Optional[int] = None,
ssh_private_key: Optional[str] = None,
uid: Optional[int] = None):
"""
:param int gid: The uid and gid properties must be specified together or not at all. If not specified the underlying operating system picks the gid.
:param str ssh_private_key: The private key must not be password protected. The private key is used to automatically configure asymmetric-key based authentication for SSH between nodes in a Linux pool when the pool's enableInterNodeCommunication property is true (it is ignored if enableInterNodeCommunication is false). It does this by placing the key pair into the user's .ssh directory. If not specified, password-less SSH is not configured between nodes (no modification of the user's .ssh directory is done).
:param int uid: The uid and gid properties must be specified together or not at all. If not specified the underlying operating system picks the uid.
"""
if gid is not None:
pulumi.set(__self__, "gid", gid)
if ssh_private_key is not None:
pulumi.set(__self__, "ssh_private_key", ssh_private_key)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter
def gid(self) -> Optional[int]:
"""
The uid and gid properties must be specified together or not at all. If not specified the underlying operating system picks the gid.
"""
return pulumi.get(self, "gid")
@property
@pulumi.getter(name="sshPrivateKey")
def ssh_private_key(self) -> Optional[str]:
"""
The private key must not be password protected. The private key is used to automatically configure asymmetric-key based authentication for SSH between nodes in a Linux pool when the pool's enableInterNodeCommunication property is true (it is ignored if enableInterNodeCommunication is false). It does this by placing the key pair into the user's .ssh directory. If not specified, password-less SSH is not configured between nodes (no modification of the user's .ssh directory is done).
"""
return pulumi.get(self, "ssh_private_key")
@property
@pulumi.getter
def uid(self) -> Optional[int]:
"""
The uid and gid properties must be specified together or not at all. If not specified the underlying operating system picks the uid.
"""
return pulumi.get(self, "uid")
@pulumi.output_type
class MetadataItemResponse(dict):
"""
The Batch service does not assign any meaning to this metadata; it is solely for the use of user code.
"""
def __init__(__self__, *,
name: str,
value: str):
"""
The Batch service does not assign any meaning to this metadata; it is solely for the use of user code.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
return pulumi.get(self, "value")
@pulumi.output_type
class NetworkConfigurationResponse(dict):
"""
The network configuration for a pool.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endpointConfiguration":
suggest = "endpoint_configuration"
elif key == "subnetId":
suggest = "subnet_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NetworkConfigurationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NetworkConfigurationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NetworkConfigurationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
endpoint_configuration: Optional['outputs.PoolEndpointConfigurationResponse'] = None,
subnet_id: Optional[str] = None):
"""
The network configuration for a pool.
:param 'PoolEndpointConfigurationResponse' endpoint_configuration: Pool endpoint configuration is only supported on pools with the virtualMachineConfiguration property.
:param str subnet_id: The virtual network must be in the same region and subscription as the Azure Batch account. The specified subnet should have enough free IP addresses to accommodate the number of nodes in the pool. If the subnet doesn't have enough free IP addresses, the pool will partially allocate compute nodes, and a resize error will occur. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet. The specified subnet must allow communication from the Azure Batch service to be able to schedule tasks on the compute nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the compute nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the compute nodes to unusable. For pools created via virtualMachineConfiguration the Batch account must have poolAllocationMode userSubscription in order to use a VNet. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication. For pools created with a virtual machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. For pools created with a cloud service configuration, enable ports 10100, 20100, and 30100. Also enable outbound connections to Azure Storage on port 443. For more details see: https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration
"""
if endpoint_configuration is not None:
pulumi.set(__self__, "endpoint_configuration", endpoint_configuration)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
@property
@pulumi.getter(name="endpointConfiguration")
def endpoint_configuration(self) -> Optional['outputs.PoolEndpointConfigurationResponse']:
"""
Pool endpoint configuration is only supported on pools with the virtualMachineConfiguration property.
"""
return pulumi.get(self, "endpoint_configuration")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[str]:
"""
The virtual network must be in the same region and subscription as the Azure Batch account. The specified subnet should have enough free IP addresses to accommodate the number of nodes in the pool. If the subnet doesn't have enough free IP addresses, the pool will partially allocate compute nodes, and a resize error will occur. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet. The specified subnet must allow communication from the Azure Batch service to be able to schedule tasks on the compute nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the compute nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the compute nodes to unusable. For pools created via virtualMachineConfiguration the Batch account must have poolAllocationMode userSubscription in order to use a VNet. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication. For pools created with a virtual machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. For pools created with a cloud service configuration, enable ports 10100, 20100, and 30100. Also enable outbound connections to Azure Storage on port 443. For more details see: https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration
"""
return pulumi.get(self, "subnet_id")
@pulumi.output_type
class NetworkSecurityGroupRuleResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sourceAddressPrefix":
suggest = "source_address_prefix"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NetworkSecurityGroupRuleResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NetworkSecurityGroupRuleResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NetworkSecurityGroupRuleResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
access: str,
priority: int,
source_address_prefix: str):
"""
:param int priority: Priorities within a pool must be unique and are evaluated in order of priority. The lower the number the higher the priority. For example, rules could be specified with order numbers of 150, 250, and 350. The rule with the order number of 150 takes precedence over the rule that has an order of 250. Allowed priorities are 150 to 3500. If any reserved or duplicate values are provided the request fails with HTTP status code 400.
:param str source_address_prefix: Valid values are a single IP address (i.e. 10.10.10.10), IP subnet (i.e. 192.168.1.0/24), default tag, or * (for all addresses). If any other values are provided the request fails with HTTP status code 400.
"""
pulumi.set(__self__, "access", access)
pulumi.set(__self__, "priority", priority)
pulumi.set(__self__, "source_address_prefix", source_address_prefix)
@property
@pulumi.getter
def access(self) -> str:
return pulumi.get(self, "access")
@property
@pulumi.getter
def priority(self) -> int:
"""
Priorities within a pool must be unique and are evaluated in order of priority. The lower the number the higher the priority. For example, rules could be specified with order numbers of 150, 250, and 350. The rule with the order number of 150 takes precedence over the rule that has an order of 250. Allowed priorities are 150 to 3500. If any reserved or duplicate values are provided the request fails with HTTP status code 400.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="sourceAddressPrefix")
def source_address_prefix(self) -> str:
"""
Valid values are a single IP address (i.e. 10.10.10.10), IP subnet (i.e. 192.168.1.0/24), default tag, or * (for all addresses). If any other values are provided the request fails with HTTP status code 400.
"""
return pulumi.get(self, "source_address_prefix")
@pulumi.output_type
class OSDiskResponse(dict):
def __init__(__self__, *,
caching: Optional[str] = None):
"""
:param str caching: Default value is none.
"""
if caching is not None:
pulumi.set(__self__, "caching", caching)
@property
@pulumi.getter
def caching(self) -> Optional[str]:
"""
Default value is none.
"""
return pulumi.get(self, "caching")
@pulumi.output_type
class PoolEndpointConfigurationResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "inboundNatPools":
suggest = "inbound_nat_pools"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PoolEndpointConfigurationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PoolEndpointConfigurationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PoolEndpointConfigurationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
inbound_nat_pools: Sequence['outputs.InboundNatPoolResponse']):
"""
:param Sequence['InboundNatPoolResponse'] inbound_nat_pools: The maximum number of inbound NAT pools per Batch pool is 5. If the maximum number of inbound NAT pools is exceeded the request fails with HTTP status code 400.
"""
pulumi.set(__self__, "inbound_nat_pools", inbound_nat_pools)
@property
@pulumi.getter(name="inboundNatPools")
def inbound_nat_pools(self) -> Sequence['outputs.InboundNatPoolResponse']:
"""
The maximum number of inbound NAT pools per Batch pool is 5. If the maximum number of inbound NAT pools is exceeded the request fails with HTTP status code 400.
"""
return pulumi.get(self, "inbound_nat_pools")
@pulumi.output_type
class ResizeErrorResponse(dict):
def __init__(__self__, *,
code: str,
message: str,
details: Optional[Sequence['outputs.ResizeErrorResponse']] = None):
"""
:param str code: An identifier for the error. Codes are invariant and are intended to be consumed programmatically.
:param str message: A message describing the error, intended to be suitable for display in a user interface.
"""
pulumi.set(__self__, "code", code)
pulumi.set(__self__, "message", message)
if details is not None:
pulumi.set(__self__, "details", details)
@property
@pulumi.getter
def code(self) -> str:
"""
An identifier for the error. Codes are invariant and are intended to be consumed programmatically.
"""
return pulumi.get(self, "code")
@property
@pulumi.getter
def message(self) -> str:
"""
A message describing the error, intended to be suitable for display in a user interface.
"""
return pulumi.get(self, "message")
@property
@pulumi.getter
def details(self) -> Optional[Sequence['outputs.ResizeErrorResponse']]:
return pulumi.get(self, "details")
@pulumi.output_type
class ResizeOperationStatusResponse(dict):
"""
Describes either the current operation (if the pool AllocationState is Resizing) or the previously completed operation (if the AllocationState is Steady).
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "nodeDeallocationOption":
suggest = "node_deallocation_option"
elif key == "resizeTimeout":
suggest = "resize_timeout"
elif key == "startTime":
suggest = "start_time"
elif key == "targetDedicatedNodes":
suggest = "target_dedicated_nodes"
elif key == "targetLowPriorityNodes":
suggest = "target_low_priority_nodes"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ResizeOperationStatusResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ResizeOperationStatusResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ResizeOperationStatusResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
errors: Optional[Sequence['outputs.ResizeErrorResponse']] = None,
node_deallocation_option: Optional[str] = None,
resize_timeout: Optional[str] = None,
start_time: Optional[str] = None,
target_dedicated_nodes: Optional[int] = None,
target_low_priority_nodes: Optional[int] = None):
"""
Describes either the current operation (if the pool AllocationState is Resizing) or the previously completed operation (if the AllocationState is Steady).
:param Sequence['ResizeErrorResponse'] errors: This property is set only if an error occurred during the last pool resize, and only when the pool allocationState is Steady.
:param str node_deallocation_option: The default value is requeue.
:param str resize_timeout: The default value is 15 minutes. The minimum value is 5 minutes. If you specify a value less than 5 minutes, the Batch service returns an error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request).
"""
if errors is not None:
pulumi.set(__self__, "errors", errors)
if node_deallocation_option is not None:
pulumi.set(__self__, "node_deallocation_option", node_deallocation_option)
if resize_timeout is not None:
pulumi.set(__self__, "resize_timeout", resize_timeout)
if start_time is not None:
pulumi.set(__self__, "start_time", start_time)
if target_dedicated_nodes is not None:
pulumi.set(__self__, "target_dedicated_nodes", target_dedicated_nodes)
if target_low_priority_nodes is not None:
pulumi.set(__self__, "target_low_priority_nodes", target_low_priority_nodes)
@property
@pulumi.getter
def errors(self) -> Optional[Sequence['outputs.ResizeErrorResponse']]:
"""
This property is set only if an error occurred during the last pool resize, and only when the pool allocationState is Steady.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter(name="nodeDeallocationOption")
def node_deallocation_option(self) -> Optional[str]:
"""
The default value is requeue.
"""
return pulumi.get(self, "node_deallocation_option")
@property
@pulumi.getter(name="resizeTimeout")
def resize_timeout(self) -> Optional[str]:
"""
The default value is 15 minutes. The minimum value is 5 minutes. If you specify a value less than 5 minutes, the Batch service returns an error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request).
"""
return pulumi.get(self, "resize_timeout")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> Optional[str]:
return pulumi.get(self, "start_time")
@property
@pulumi.getter(name="targetDedicatedNodes")
def target_dedicated_nodes(self) -> Optional[int]:
return pulumi.get(self, "target_dedicated_nodes")
@property
@pulumi.getter(name="targetLowPriorityNodes")
def target_low_priority_nodes(self) -> Optional[int]:
return pulumi.get(self, "target_low_priority_nodes")
@pulumi.output_type
class ResourceFileResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "blobSource":
suggest = "blob_source"
elif key == "filePath":
suggest = "file_path"
elif key == "fileMode":
suggest = "file_mode"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ResourceFileResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ResourceFileResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ResourceFileResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
blob_source: str,
file_path: str,
file_mode: Optional[str] = None):
"""
:param str blob_source: This URL must be readable using anonymous access; that is, the Batch service does not present any credentials when downloading the blob. There are two ways to get such a URL for a blob in Azure storage: include a Shared Access Signature (SAS) granting read permissions on the blob, or set the ACL for the blob or its container to allow public access.
:param str file_mode: This property applies only to files being downloaded to Linux compute nodes. It will be ignored if it is specified for a resourceFile which will be downloaded to a Windows node. If this property is not specified for a Linux node, then a default value of 0770 is applied to the file.
"""
pulumi.set(__self__, "blob_source", blob_source)
pulumi.set(__self__, "file_path", file_path)
if file_mode is not None:
pulumi.set(__self__, "file_mode", file_mode)
@property
@pulumi.getter(name="blobSource")
def blob_source(self) -> str:
"""
This URL must be readable using anonymous access; that is, the Batch service does not present any credentials when downloading the blob. There are two ways to get such a URL for a blob in Azure storage: include a Shared Access Signature (SAS) granting read permissions on the blob, or set the ACL for the blob or its container to allow public access.
"""
return pulumi.get(self, "blob_source")
@property
@pulumi.getter(name="filePath")
def file_path(self) -> str:
return pulumi.get(self, "file_path")
@property
@pulumi.getter(name="fileMode")
def file_mode(self) -> Optional[str]:
"""
This property applies only to files being downloaded to Linux compute nodes. It will be ignored if it is specified for a resourceFile which will be downloaded to a Windows node. If this property is not specified for a Linux node, then a default value of 0770 is applied to the file.
"""
return pulumi.get(self, "file_mode")
@pulumi.output_type
class ScaleSettingsResponse(dict):
"""
Defines the desired size of the pool. This can either be 'fixedScale' where the requested targetDedicatedNodes is specified, or 'autoScale' which defines a formula which is periodically reevaluated. If this property is not specified, the pool will have a fixed scale with 0 targetDedicatedNodes.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "autoScale":
suggest = "auto_scale"
elif key == "fixedScale":
suggest = "fixed_scale"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ScaleSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ScaleSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ScaleSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
auto_scale: Optional['outputs.AutoScaleSettingsResponse'] = None,
fixed_scale: Optional['outputs.FixedScaleSettingsResponse'] = None):
"""
Defines the desired size of the pool. This can either be 'fixedScale' where the requested targetDedicatedNodes is specified, or 'autoScale' which defines a formula which is periodically reevaluated. If this property is not specified, the pool will have a fixed scale with 0 targetDedicatedNodes.
:param 'AutoScaleSettingsResponse' auto_scale: This property and fixedScale are mutually exclusive and one of the properties must be specified.
:param 'FixedScaleSettingsResponse' fixed_scale: This property and autoScale are mutually exclusive and one of the properties must be specified.
"""
if auto_scale is not None:
pulumi.set(__self__, "auto_scale", auto_scale)
if fixed_scale is not None:
pulumi.set(__self__, "fixed_scale", fixed_scale)
@property
@pulumi.getter(name="autoScale")
def auto_scale(self) -> Optional['outputs.AutoScaleSettingsResponse']:
"""
This property and fixedScale are mutually exclusive and one of the properties must be specified.
"""
return pulumi.get(self, "auto_scale")
@property
@pulumi.getter(name="fixedScale")
def fixed_scale(self) -> Optional['outputs.FixedScaleSettingsResponse']:
"""
This property and autoScale are mutually exclusive and one of the properties must be specified.
"""
return pulumi.get(self, "fixed_scale")
@pulumi.output_type
class StartTaskResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "commandLine":
suggest = "command_line"
elif key == "environmentSettings":
suggest = "environment_settings"
elif key == "maxTaskRetryCount":
suggest = "max_task_retry_count"
elif key == "resourceFiles":
suggest = "resource_files"
elif key == "userIdentity":
suggest = "user_identity"
elif key == "waitForSuccess":
suggest = "wait_for_success"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in StartTaskResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
StartTaskResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
StartTaskResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
command_line: Optional[str] = None,
environment_settings: Optional[Sequence['outputs.EnvironmentSettingResponse']] = None,
max_task_retry_count: Optional[int] = None,
resource_files: Optional[Sequence['outputs.ResourceFileResponse']] = None,
user_identity: Optional['outputs.UserIdentityResponse'] = None,
wait_for_success: Optional[bool] = None):
"""
:param str command_line: The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. Required if any other properties of the startTask are specified.
:param int max_task_retry_count: The Batch service retries a task if its exit code is nonzero. Note that this value specifically controls the number of retries. The Batch service will try the task once, and may then retry up to this limit. For example, if the maximum retry count is 3, Batch tries the task up to 4 times (one initial try and 3 retries). If the maximum retry count is 0, the Batch service does not retry the task. If the maximum retry count is -1, the Batch service retries the task without limit.
:param 'UserIdentityResponse' user_identity: If omitted, the task runs as a non-administrative user unique to the task.
:param bool wait_for_success: If true and the start task fails on a compute node, the Batch service retries the start task up to its maximum retry count (maxTaskRetryCount). If the task has still not completed successfully after all retries, then the Batch service marks the compute node unusable, and will not schedule tasks to it. This condition can be detected via the node state and scheduling error detail. If false, the Batch service will not wait for the start task to complete. In this case, other tasks can start executing on the compute node while the start task is still running; and even if the start task fails, new tasks will continue to be scheduled on the node. The default is false.
"""
if command_line is not None:
pulumi.set(__self__, "command_line", command_line)
if environment_settings is not None:
pulumi.set(__self__, "environment_settings", environment_settings)
if max_task_retry_count is not None:
pulumi.set(__self__, "max_task_retry_count", max_task_retry_count)
if resource_files is not None:
pulumi.set(__self__, "resource_files", resource_files)
if user_identity is not None:
pulumi.set(__self__, "user_identity", user_identity)
if wait_for_success is not None:
pulumi.set(__self__, "wait_for_success", wait_for_success)
@property
@pulumi.getter(name="commandLine")
def command_line(self) -> Optional[str]:
"""
The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. Required if any other properties of the startTask are specified.
"""
return pulumi.get(self, "command_line")
@property
@pulumi.getter(name="environmentSettings")
def environment_settings(self) -> Optional[Sequence['outputs.EnvironmentSettingResponse']]:
return pulumi.get(self, "environment_settings")
@property
@pulumi.getter(name="maxTaskRetryCount")
def max_task_retry_count(self) -> Optional[int]:
"""
The Batch service retries a task if its exit code is nonzero. Note that this value specifically controls the number of retries. The Batch service will try the task once, and may then retry up to this limit. For example, if the maximum retry count is 3, Batch tries the task up to 4 times (one initial try and 3 retries). If the maximum retry count is 0, the Batch service does not retry the task. If the maximum retry count is -1, the Batch service retries the task without limit.
"""
return pulumi.get(self, "max_task_retry_count")
@property
@pulumi.getter(name="resourceFiles")
def resource_files(self) -> Optional[Sequence['outputs.ResourceFileResponse']]:
return pulumi.get(self, "resource_files")
@property
@pulumi.getter(name="userIdentity")
def user_identity(self) -> Optional['outputs.UserIdentityResponse']:
"""
If omitted, the task runs as a non-administrative user unique to the task.
"""
return pulumi.get(self, "user_identity")
@property
@pulumi.getter(name="waitForSuccess")
def wait_for_success(self) -> Optional[bool]:
"""
If true and the start task fails on a compute node, the Batch service retries the start task up to its maximum retry count (maxTaskRetryCount). If the task has still not completed successfully after all retries, then the Batch service marks the compute node unusable, and will not schedule tasks to it. This condition can be detected via the node state and scheduling error detail. If false, the Batch service will not wait for the start task to complete. In this case, other tasks can start executing on the compute node while the start task is still running; and even if the start task fails, new tasks will continue to be scheduled on the node. The default is false.
"""
return pulumi.get(self, "wait_for_success")
@pulumi.output_type
class TaskSchedulingPolicyResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "nodeFillType":
suggest = "node_fill_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in TaskSchedulingPolicyResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
TaskSchedulingPolicyResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
TaskSchedulingPolicyResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
node_fill_type: str):
pulumi.set(__self__, "node_fill_type", node_fill_type)
@property
@pulumi.getter(name="nodeFillType")
def node_fill_type(self) -> str:
return pulumi.get(self, "node_fill_type")
@pulumi.output_type
class UserAccountResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "elevationLevel":
suggest = "elevation_level"
elif key == "linuxUserConfiguration":
suggest = "linux_user_configuration"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in UserAccountResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
UserAccountResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
UserAccountResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
name: str,
password: str,
elevation_level: Optional[str] = None,
linux_user_configuration: Optional['outputs.LinuxUserConfigurationResponse'] = None):
"""
:param str elevation_level: nonAdmin - The auto user is a standard user without elevated access. admin - The auto user is a user with elevated access and operates with full Administrator permissions. The default value is nonAdmin.
:param 'LinuxUserConfigurationResponse' linux_user_configuration: This property is ignored if specified on a Windows pool. If not specified, the user is created with the default options.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "password", password)
if elevation_level is not None:
pulumi.set(__self__, "elevation_level", elevation_level)
if linux_user_configuration is not None:
pulumi.set(__self__, "linux_user_configuration", linux_user_configuration)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def password(self) -> str:
return pulumi.get(self, "password")
@property
@pulumi.getter(name="elevationLevel")
def elevation_level(self) -> Optional[str]:
"""
nonAdmin - The auto user is a standard user without elevated access. admin - The auto user is a user with elevated access and operates with full Administrator permissions. The default value is nonAdmin.
"""
return pulumi.get(self, "elevation_level")
@property
@pulumi.getter(name="linuxUserConfiguration")
def linux_user_configuration(self) -> Optional['outputs.LinuxUserConfigurationResponse']:
"""
This property is ignored if specified on a Windows pool. If not specified, the user is created with the default options.
"""
return pulumi.get(self, "linux_user_configuration")
@pulumi.output_type
class UserIdentityResponse(dict):
"""
Specify either the userName or autoUser property, but not both.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "autoUser":
suggest = "auto_user"
elif key == "userName":
suggest = "user_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in UserIdentityResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
UserIdentityResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
UserIdentityResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
auto_user: Optional['outputs.AutoUserSpecificationResponse'] = None,
user_name: Optional[str] = None):
"""
Specify either the userName or autoUser property, but not both.
:param 'AutoUserSpecificationResponse' auto_user: The userName and autoUser properties are mutually exclusive; you must specify one but not both.
:param str user_name: The userName and autoUser properties are mutually exclusive; you must specify one but not both.
"""
if auto_user is not None:
pulumi.set(__self__, "auto_user", auto_user)
if user_name is not None:
pulumi.set(__self__, "user_name", user_name)
@property
@pulumi.getter(name="autoUser")
def auto_user(self) -> Optional['outputs.AutoUserSpecificationResponse']:
"""
The userName and autoUser properties are mutually exclusive; you must specify one but not both.
"""
return pulumi.get(self, "auto_user")
@property
@pulumi.getter(name="userName")
def user_name(self) -> Optional[str]:
"""
The userName and autoUser properties are mutually exclusive; you must specify one but not both.
"""
return pulumi.get(self, "user_name")
@pulumi.output_type
class VirtualMachineConfigurationResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "imageReference":
suggest = "image_reference"
elif key == "nodeAgentSkuId":
suggest = "node_agent_sku_id"
elif key == "dataDisks":
suggest = "data_disks"
elif key == "licenseType":
suggest = "license_type"
elif key == "osDisk":
suggest = "os_disk"
elif key == "windowsConfiguration":
suggest = "windows_configuration"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in VirtualMachineConfigurationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
VirtualMachineConfigurationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
VirtualMachineConfigurationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
image_reference: 'outputs.ImageReferenceResponse',
node_agent_sku_id: str,
data_disks: Optional[Sequence['outputs.DataDiskResponse']] = None,
license_type: Optional[str] = None,
os_disk: Optional['outputs.OSDiskResponse'] = None,
windows_configuration: Optional['outputs.WindowsConfigurationResponse'] = None):
"""
:param str node_agent_sku_id: The Batch node agent is a program that runs on each node in the pool, and provides the command-and-control interface between the node and the Batch service. There are different implementations of the node agent, known as SKUs, for different operating systems. You must specify a node agent SKU which matches the selected image reference. To get the list of supported node agent SKUs along with their list of verified image references, see the 'List supported node agent SKUs' operation.
:param Sequence['DataDiskResponse'] data_disks: This property must be specified if the compute nodes in the pool need to have empty data disks attached to them.
:param str license_type: This only applies to images that contain the Windows operating system, and should only be used when you hold valid on-premises licenses for the nodes which will be deployed. If omitted, no on-premises licensing discount is applied. Values are:
Windows_Server - The on-premises license is for Windows Server.
Windows_Client - The on-premises license is for Windows Client.
:param 'WindowsConfigurationResponse' windows_configuration: This property must not be specified if the imageReference specifies a Linux OS image.
"""
pulumi.set(__self__, "image_reference", image_reference)
pulumi.set(__self__, "node_agent_sku_id", node_agent_sku_id)
if data_disks is not None:
pulumi.set(__self__, "data_disks", data_disks)
if license_type is not None:
pulumi.set(__self__, "license_type", license_type)
if os_disk is not None:
pulumi.set(__self__, "os_disk", os_disk)
if windows_configuration is not None:
pulumi.set(__self__, "windows_configuration", windows_configuration)
@property
@pulumi.getter(name="imageReference")
def image_reference(self) -> 'outputs.ImageReferenceResponse':
return pulumi.get(self, "image_reference")
@property
@pulumi.getter(name="nodeAgentSkuId")
def node_agent_sku_id(self) -> str:
"""
The Batch node agent is a program that runs on each node in the pool, and provides the command-and-control interface between the node and the Batch service. There are different implementations of the node agent, known as SKUs, for different operating systems. You must specify a node agent SKU which matches the selected image reference. To get the list of supported node agent SKUs along with their list of verified image references, see the 'List supported node agent SKUs' operation.
"""
return pulumi.get(self, "node_agent_sku_id")
@property
@pulumi.getter(name="dataDisks")
def data_disks(self) -> Optional[Sequence['outputs.DataDiskResponse']]:
"""
This property must be specified if the compute nodes in the pool need to have empty data disks attached to them.
"""
return pulumi.get(self, "data_disks")
@property
@pulumi.getter(name="licenseType")
def license_type(self) -> Optional[str]:
"""
This only applies to images that contain the Windows operating system, and should only be used when you hold valid on-premises licenses for the nodes which will be deployed. If omitted, no on-premises licensing discount is applied. Values are:
Windows_Server - The on-premises license is for Windows Server.
Windows_Client - The on-premises license is for Windows Client.
"""
return pulumi.get(self, "license_type")
@property
@pulumi.getter(name="osDisk")
def os_disk(self) -> Optional['outputs.OSDiskResponse']:
return pulumi.get(self, "os_disk")
@property
@pulumi.getter(name="windowsConfiguration")
def windows_configuration(self) -> Optional['outputs.WindowsConfigurationResponse']:
"""
This property must not be specified if the imageReference specifies a Linux OS image.
"""
return pulumi.get(self, "windows_configuration")
@pulumi.output_type
class WindowsConfigurationResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "enableAutomaticUpdates":
suggest = "enable_automatic_updates"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in WindowsConfigurationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
WindowsConfigurationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
WindowsConfigurationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enable_automatic_updates: Optional[bool] = None):
"""
:param bool enable_automatic_updates: If omitted, the default value is true.
"""
if enable_automatic_updates is not None:
pulumi.set(__self__, "enable_automatic_updates", enable_automatic_updates)
@property
@pulumi.getter(name="enableAutomaticUpdates")
def enable_automatic_updates(self) -> Optional[bool]:
"""
If omitted, the default value is true.
"""
return pulumi.get(self, "enable_automatic_updates")
| [
"[email protected]"
] | |
615a7ce2809c9bdded6aa8d12c3fb35fe9b1ec63 | d094ba0c8a9b1217fbf014aa79a283a49aabe88c | /env/share/doc/networkx-2.2/examples/drawing/plot_four_grids.py | 9123bb27725a34a30be61f73d9402afefc496e05 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | Raniac/NEURO-LEARN | d9274e0baadd97bb02da54bdfcf6ca091fc1c703 | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | refs/heads/master | 2022-12-25T23:46:54.922237 | 2020-09-06T03:15:14 | 2020-09-06T03:15:14 | 182,013,100 | 9 | 2 | Apache-2.0 | 2022-12-09T21:01:00 | 2019-04-18T03:57:00 | CSS | UTF-8 | Python | false | false | 838 | py | #!/usr/bin/env python
"""
==========
Four Grids
==========
Draw a graph with matplotlib.
You must have matplotlib for this to work.
"""
# Author: Aric Hagberg ([email protected])
# Copyright (C) 2004-2018
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
import matplotlib.pyplot as plt
import networkx as nx
G = nx.grid_2d_graph(4, 4) # 4x4 grid
pos = nx.spring_layout(G, iterations=100)
plt.subplot(221)
nx.draw(G, pos, font_size=8)
plt.subplot(222)
nx.draw(G, pos, node_color='k', node_size=0, with_labels=False)
plt.subplot(223)
nx.draw(G, pos, node_color='g', node_size=250, with_labels=False, width=6)
plt.subplot(224)
H = G.to_directed()
nx.draw(H, pos, node_color='b', node_size=20, with_labels=False)
plt.show()
| [
"[email protected]"
] | |
2fcd265f4cfb612be33f39b9c2a640b0e8ebb430 | fd18ce27b66746f932a65488aad04494202e2e0d | /d11_spider/codes/Adv_Spider/Adv_Spider/spiders/baidu_request.py | 2700834d6cf9b738aef84a4995acbedad18f4c50 | [] | no_license | daofeng123/ClassCodes | 1acbd843836e550c9cebf67ef21dfca9f6b9fc87 | fbcd1f24d79b8bb56ad0669b07ad118064609612 | refs/heads/master | 2020-06-24T12:34:28.148197 | 2019-08-15T03:56:40 | 2019-08-15T03:56:40 | 198,963,469 | 3 | 0 | null | 2019-07-26T06:53:45 | 2019-07-26T06:53:44 | null | UTF-8 | Python | false | false | 1,268 | py | # -*- coding: utf-8 -*-
import json
import scrapy
import scrapy.http
class BaiduRequestSpider(scrapy.Spider):
name = 'baidu_request'
allowed_domains = ['fanyi.baidu.com']
start_urls = ['https://fanyi.baidu.com/sug']
def start_requests(self):
# Request
headers = {
'content-type': 'application/x-www-form-urlencoded',
}
trnaslator_1 = scrapy.Request(
url=self.start_urls[0],
callback=self.get_translate,
method='POST', # 官方文档说需要大小,实际大小写都没有问题
headers=headers, # --> 指定表单格式
dont_filter=True, # 同一个请求url,防止被过滤
body='kw=test'.encode()) # 使用body提交,必须手工指定表单格式
# FormRequest
form = {
'kw': 'test'
}
trnaslator_2 = scrapy.FormRequest(
url=self.start_urls[0],
callback=self.get_translate,
dont_filter=True,
formdata=form) # 不适合使用表单提交
return [trnaslator_1, trnaslator_2]
def get_translate(self, response):
print('--------------------')
result = json.loads(response.text)
print(result)
| [
"[email protected]"
] | |
703e62e2db63fdf7b1f11b8d7276719c0d544cd9 | 3f9e960174cfc5c8bd6827ce5362124c467a3952 | /python/data_structure/api_fields_values_histogram.py | 09fed45101551344fdd765d4719736c3bf9bb234 | [] | no_license | monobinab/python | f3ec6d462d7149c007ac9e14e72132eae73b4acd | 265621b045969c819eb86fa7ba2a3bdfad34ecb6 | refs/heads/master | 2020-12-03T00:04:29.185880 | 2017-07-01T18:53:11 | 2017-07-01T20:06:16 | 95,982,002 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | py | #!/usr/bin/env python
#this script parses storm log files and creates pipe delilited storm log files
import sys
from datetime import date, timedelta, datetime
#from dateutil import parser
counts = dict()
tmp_lst = list()
# input comes from STDIN (standard input)
for line in sys.stdin:
client = ""
try:
if line is not None and "api: response" in line:
fields = line.split("|")
client = fields[3].strip()
counts[client] = counts.get(client, 0) + 1
except:
continue;
#print('%s' % (counts))
for key, val in counts.items():
tmp_lst.append((key, val))
for k,v in tmp_lst:
outline = str(k) + "|" + str(v)
print('%s' % (outline))
#print('%s' % (outline))
#outline = counts
| [
"[email protected]"
] | |
7bd38216ab778e3afaf00fe52d4e142572267d0c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03862/s008273879.py | 920a1b732a14199ec1f52a59849810aee0710dc2 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 210 | py | n, x = map(int, input().split())
A = list(map(int, input().split()))
ans = 0
for i in range(n - 1):
if A[i] + A[i + 1] > x:
ans += A[i] + A[i + 1] - x
A[i + 1] = max(x - A[i], 0)
print(ans) | [
"[email protected]"
] | |
246f7e3ba7f6dd25ba307531bd7af5a603be58c7 | 802c002ecd90be6b489e0250ec4454a11c17ed1d | /src/homework/homework13/main_homework7.py | 301a2f393bbefdf4b2ea9b60e17fa62bb2949b97 | [
"MIT"
] | permissive | acc-cosc-1336/cosc-1336-spring-2018-EricScotty | 3a088285ae853a9ff5d747f2301f755c5e3749b3 | 80c0249a583dc178cfc7bb95b851d7f3240dc3e9 | refs/heads/master | 2021-09-14T05:54:02.132705 | 2018-05-08T23:27:04 | 2018-05-08T23:27:04 | 118,071,042 | 0 | 0 | MIT | 2018-04-23T02:51:05 | 2018-01-19T03:22:13 | Python | UTF-8 | Python | false | false | 969 | py | from src.homework.homework7 import get_p_distance_matrix, print_get_p_distance_matrix
'''
Write a main function to...
Read p_distance.dat file
From the file data, create a two-dimensional list like the following example:
[
['T','T','T','C','C','A','T','T','T','A'],
['G','A','T','T','C','A','T','T','T','C'],
['T','T','T','C','C','A','T','T','T','T'],
['G','T','T','C','C','A','T','T','T','A']
]
Pass the list to the get_p_distance_matrix function as an argument
Display the p distance matrix to screen
'''
def main():
matrix = []
file_object = open('p_distance.dat', 'r')
content = file_object.readlines()
for line in content[:]:
line = line.rstrip('\n').replace(" ", "")
matrix_line = []
for letter in line:
matrix_line.append(letter)
matrix.append(matrix_line)
results_mtrx = get_p_distance_matrix(matrix)
print_get_p_distance_matrix(results_mtrx)
main()
| [
"[email protected]"
] | |
1dbb1088e45851aaa83d57c2a6646e4158d3da5a | d746f9f262961fd4c65eb332d8325f7fdacf3757 | /dingureu/settings.py | f9c67e688925e9835faf2ffc1592a29418a4d0d6 | [] | no_license | Ganodab-Brothers/dingureu-django-backend | c69c84e48c64874bb283fec07dee2c203fca6b08 | 2aa37bbdd5c8b003b0e73854c2eca23a3deccb06 | refs/heads/master | 2023-04-20T21:10:26.686591 | 2021-04-28T00:33:39 | 2021-04-28T00:33:39 | 359,873,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,866 | py | """
Django settings for dingureu project.
Generated by 'django-admin startproject' using Django 3.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
from config import envs
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = envs.SECRET_KEY
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = envs.DEBUG == 'true'
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'drf_yasg',
'user',
'file',
'article',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'dingureu.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'dingureu.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': envs.DB_HOST,
'PORT': envs.DB_PORT,
'USER': envs.DB_USER,
'PASSWORD': envs.DB_PASSWORD,
'NAME': envs.DB_NAME,
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME':
'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR / 'static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
AUTH_USER_MODEL = 'user.User'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES':
('rest_framework_simplejwt.authentication.JWTAuthentication', )
}
SWAGGER_SETTINGS = {
'SECURITY_DEFINITIONS': {
'Bearer': {
'type': 'apiKey',
'name': 'Authorization',
'in': 'header',
}
},
'USE_SESSION_AUTH': False,
} | [
"[email protected]"
] | |
3771a201b73d42edf636e5b428045ba1ac66f5c6 | 27e890f900bd4bfb2e66f4eab85bc381cf4d5d3f | /tests/unit/modules/network/fortios/test_fortios_wireless_controller_hotspot20_anqp_venue_name.py | bdf6ef323ce1aea195b6df34a3ba3bf890a406c3 | [] | no_license | coll-test/notstdlib.moveitallout | eb33a560070bbded5032385d0aea2f3cf60e690b | 0987f099b783c6cf977db9233e1c3d9efcbcb3c7 | refs/heads/master | 2020-12-19T22:28:33.369557 | 2020-01-23T18:51:26 | 2020-01-23T18:51:26 | 235,865,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,760 | py | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible_collections.notstdlib.moveitallout.plugins.modules import fortios_wireless_controller_hotspot20_anqp_venue_name
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.fortios_wireless_controller_hotspot20_anqp_venue_name.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_wireless_controller_hotspot20_anqp_venue_name_creation(mocker):
schema_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_anqp_venue_name': {
'name': 'default_name_3',
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_venue_name.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'name': 'default_name_3',
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-venue-name', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_wireless_controller_hotspot20_anqp_venue_name_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_anqp_venue_name': {
'name': 'default_name_3',
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_venue_name.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'name': 'default_name_3',
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-venue-name', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_wireless_controller_hotspot20_anqp_venue_name_removal(mocker):
schema_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'wireless_controller_hotspot20_anqp_venue_name': {
'name': 'default_name_3',
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_venue_name.fortios_wireless_controller_hotspot20(input_data, fos_instance)
delete_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-venue-name', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_wireless_controller_hotspot20_anqp_venue_name_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'wireless_controller_hotspot20_anqp_venue_name': {
'name': 'default_name_3',
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_venue_name.fortios_wireless_controller_hotspot20(input_data, fos_instance)
delete_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-venue-name', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_wireless_controller_hotspot20_anqp_venue_name_idempotent(mocker):
schema_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_anqp_venue_name': {
'name': 'default_name_3',
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_venue_name.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'name': 'default_name_3',
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-venue-name', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_wireless_controller_hotspot20_anqp_venue_name_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_anqp_venue_name': {
'random_attribute_not_valid': 'tag',
'name': 'default_name_3',
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_venue_name.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'name': 'default_name_3',
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-venue-name', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| [
"[email protected]"
] | |
651237ce3104a4d44722f7c75a12ee6769a94487 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03937/s470682175.py | 1480749348da6cfb1296018b442d2120f2ab5b1e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 539 | py | H,W=map(int,input().split())
field=[]
for i in range(H):
s=input()
a=[]
for j in range(W):
a.append(s[j])
field.append(a)
def dfs(x,y):
field[x][y]='.'
dx=[0,1]
dy=[1,0]
for i in range(2):
nx=x+dx[i]
ny=y+dy[i]
if 0<=nx and nx<H and 0<=ny and ny<W and field[nx][ny]=="#":
dfs(nx,ny)
break
return
dfs(0,0)
for i in range(H):
for j in range(W):
if field[i][j]=='#':
print('Impossible')
exit()
print('Possible') | [
"[email protected]"
] | |
5677e9d43343d9fc12b254471edc7a2171109106 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_149/ch21_2019_04_02_16_06_50_086554.py | 3aa4e84d55d5d3fd7844bb18f9713fcc3c6ff6ca | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | valor=float(input('Digite o valor da conta: '))
print('Valor da conta com 10%: {0} R$'.format(valor-(valor*0.1))) | [
"[email protected]"
] | |
e58c18ae7deb5e3276db3668b31f0aac64272045 | 8ea7912401d3f609765738f38b44561e4b2dbd5f | /tests/__init__.py | efa5acf44b1b4a1e8786eee4fbe0aab916e05cd1 | [
"MIT"
] | permissive | zeroam/sshmng | 1b46099ef6d9b174e7debf64e35663c6998b01c9 | c3b705aeb6b086ec02664e74a31650ec2ffc41f6 | refs/heads/master | 2023-03-15T13:52:02.056589 | 2021-03-22T12:16:32 | 2021-03-22T12:16:32 | 343,767,017 | 1 | 1 | MIT | 2021-03-22T12:16:33 | 2021-03-02T12:29:04 | Python | UTF-8 | Python | false | false | 36 | py | """Unit test package for sshmng."""
| [
"[email protected]"
] | |
283e213ddbd425032b95f95fc7675f17753b61d8 | 3a6d382503e11753dd81b291145847a2eabb8ec6 | /util/lmk/lmk.py | e8bec87cf504fdbb8c9fd60491dd617c7d103d6a | [
"MIT"
] | permissive | QuXing9/phd | 7e6f107c20e0b3b1de2b25eb99e0b640a4a0bfcf | 58ba53b6d78515ed555e40527f6923e28941cc19 | refs/heads/master | 2022-02-27T03:29:05.126378 | 2019-10-22T02:46:57 | 2019-10-22T02:46:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,016 | py | #!/usr/bin/env python
"""let me know - Email output of command upon completion
Attributes
----------
__description__ : str
Package description.
DEFAULT_CFG : str
Default configuration.
DEFAULT_CFG_PATH : str
Default path to the configuration file.
E_CFG : int
Non-zero return code for configuration errors.
E_SMTP : int
Non-zero return code for fatal SMTP errors.
"""
from __future__ import print_function
import argparse
import cgi
import os
import smtplib
import socket
import string
import subprocess
import sys
from datetime import datetime
from email.mime.application import MIMEApplication
import humanize
# Python 2 and 3 have different email module layouts:
if sys.version_info >= (3, 0):
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
else:
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
DEFAULT_CFG_PATH = os.path.expanduser('~/.lmkrc')
__description__ = """\
{bin}: let me know. Patiently awaits the completion of the
specified command, and emails you with the output and result.
Examples
--------
Run a command using lmk to receive an email when it completes, containing its
output and return code:
$ lmk './experiments -n 100'
Alternatively, pipe the output of commands to lmk to receive an email when they
complete:
$ (./experiment1.sh; experiment2.py -n 100) 2>&1 | lmk -
Configuration
-------------
The file {cfg} contains the configuration settings. Modify
the smtp and message settings to suit.
Made with \033[1;31m♥\033[0;0m by Chris Cummins.
<https://github.com/ChrisCummins/phd>\
""".format(bin=sys.argv[0], cfg=DEFAULT_CFG_PATH)
DEFAULT_CFG = """\
; lkm config <https://github.com/ChrisCummins/phd>
; Configure smtp section to your outgoing mailbox.
; Shell variables are expanded in this file.
[smtp]
Host: smtp.gmail.com
Port: 587
Username: $LMK_USER
Password: $LMK_PWD
[exec]
Shell: /bin/bash
[messages]
From: $USER@$HOST
To: $MAILTO
"""
E_CFG = 2
E_SMTP = 3
class colors:
"""
Shell escape codes.
"""
reset = '\033[0;0m'
red = '\033[1;31m'
blue = '\033[1;34m'
cyan = '\033[1;36m'
green = '\033[0;32m'
bold = '\033[;1m'
reverse = '\033[;7m'
class ArgumentParser(argparse.ArgumentParser):
"""
Specialized argument parser, with --version flag.
"""
def __init__(self, *args, **kwargs):
"""
See python argparse.ArgumentParser.__init__().
"""
super(ArgumentParser, self).__init__(*args, **kwargs)
self.add_argument(
'--version',
action='store_true',
help='show version information and exit')
self.add_argument(
'--create-config', action='store_true',
help='create configuration file and exit')
def parse_args(self, args=sys.argv[1:], namespace=None):
"""
See python argparse.ArgumentParser.parse_args().
"""
# --version option overrides the normal argument parsing process.
if '--version' in args:
print('lmk master, made with {c.red}♥{c.reset} by '
'Chris Cummins <[email protected]>'.format(c=colors))
sys.exit(0)
if '--create-config' in args:
get_cfg_path()
sys.exit(0)
return super(ArgumentParser, self).parse_args(args, namespace)
def parse_args(args):
"""
Parse command line options.
Returns
-------
str
Command to execute.
"""
parser = ArgumentParser(
description=__description__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'-e', '--only-errors', action='store_true',
help='only notify if command fails')
parser.add_argument(
'command', metavar='<command>',
help='command to execute, or "-" to read from stdin')
return parser.parse_args(args)
def create_default_cfg(path):
"""
Create default configuration file.
Parameters
----------
path : str
Path of cfg file to create.
"""
with open(path, 'w') as outfile:
print(DEFAULT_CFG, end='', file=outfile)
os.chmod(path, 384) # 384 == 0o600
print(
'{c.bold}[lmk] created default configuration file {path}{c.reset}'
.format(c=colors, path=path),
file=sys.stderr)
def parse_str(str_, substitutions={}):
"""
Parse a string, escaping shell and special variables.
Rudimentary, crummy bash variable parser.
Parameters
----------
str_ : str
String to parse.
substitutions : Dict[str, lambda: str]
A dictionary of substitution functions.
"""
def expandvar():
if ''.join(varname) in substitutions:
var = substitutions[''.join(varname)]()
else:
var = os.environ.get(''.join(varname), '')
out.append(var)
BASH_VAR_CHARS = string.ascii_letters + string.digits + '_'
# parser state
out = []
varname = []
invar = False
escape = False
for c in str_:
if c == '\\':
if escape:
# '\\' -> '\'
out.append('\\')
escape = False
else:
escape = True
elif c == '$':
if escape:
# '\$' -> '$'
out.append('$')
escape = False
else:
if invar:
# '$foo$bar' -> $(foo) $(bar)
expandvar()
varname = []
invar = True
elif c == ' ':
escape = False
if invar:
# '$foo ' -> $(foo)' '
expandvar()
varname = []
invar = False
out.append(' ')
else:
if invar:
if c in BASH_VAR_CHARS:
varname.append(c)
else:
# '$foo@' -> $(foo)'@'
expandvar()
varname = []
invar = False
out.append(c)
else:
escape = False
out.append(c)
if invar:
expandvar()
return ''.join(out)
def load_cfg(path=None):
"""
Parse configuration.
In case of error, kills process with status E_CFG.
Returns
-------
ConfigParser
Parsed configuration.
"""
def _verify(stmt, *msg, **kwargs):
sep = kwargs.get('sep', ' ')
if not stmt:
print(
'{c.bold}{c.red}[lmk] {msg}{c.reset}'.format(
c=colors, msg=sep.join(msg)), file=sys.stderr)
sys.exit(E_CFG)
if sys.version_info >= (3, 0):
from configparser import ConfigParser
else:
from ConfigParser import ConfigParser
if path is None:
path = get_cfg_path()
cfg = ConfigParser()
cfg.read(path)
_verify('smtp' in cfg, 'config file %s contains no [smtp] section' % path)
_verify('host' in cfg['smtp'], 'no host in %s:smtp' % path)
_verify('port' in cfg['smtp'], 'no port in %s:smtp' % path)
_verify('username' in cfg['smtp'], 'no username in %s:smtp' % path)
_verify('password' in cfg['smtp'], 'no password in %s:smtp' % path)
_verify('messages' in cfg,
'config file %s contains no [messages] section' % path)
_verify('from' in cfg['messages'], 'no from address in %s:messages' % path)
_verify('to' in cfg['messages'], 'no to address in %s:messages' % path)
parse = lambda x: parse_str(x, {'HOST': lambda: socket.gethostname()})
cfg['smtp']['host'] = parse(cfg['smtp']['host'])
cfg['smtp']['port'] = parse(cfg['smtp']['port'])
cfg['smtp']['username'] = parse(cfg['smtp']['username'])
cfg['smtp']['password'] = parse(cfg['smtp']['password'])
_verify(cfg['smtp']['host'], 'stmp host is empty. Check %s' % path)
_verify(cfg['smtp']['port'], 'stmp port is empty. Check %s' % path)
_verify(cfg['smtp']['username'], 'stmp username is empty. Check %s' % path)
_verify(cfg['smtp']['password'], 'stmp password is empty. Check %s' % path)
cfg['messages']['from'] = parse(cfg['messages']['from'])
cfg['messages']['to'] = parse(cfg['messages']['to'])
# note: 'subject' variables are parsed after command completion,
# so we can substitue in outcomes.
if 'exec' not in cfg:
cfg.add_section('exec')
if 'shell' not in cfg['exec']:
cfg['exec']['shell'] = '/bin/sh'
# add runtime metadata
cfg.add_section('/run')
cfg['/run']['path'] = path
return cfg
def get_smtp_server(cfg):
"""
Create a connection an SMTP server.
In case of an error, this function kills the process.
Remove to close connections with quit().
Parameters
----------
cfg : ConfigParser
Configuration.
Returns
-------
SMTP
SMTP Server.
"""
def _error(*msg, **kwargs):
sep = kwargs.get('sep', ' ')
print('{c.bold}{c.red}[lmk] {msg}{c.reset}'.format(
c=colors, msg=sep.join(msg)), file=sys.stderr)
sys.exit(E_SMTP)
try:
server = smtplib.SMTP(cfg['smtp']['host'], int(cfg['smtp']['port']))
server.starttls()
server.login(cfg['smtp']['username'], cfg['smtp']['password'])
return server
except smtplib.SMTPHeloError:
_error('connection to {host}:{port} failed'.format(
host=cfg['smtp']['host'], port=cfg['smtp']['port']))
except smtplib.SMTPAuthenticationError:
_error('smtp authentication failed. Check username and password in '
'%s' % cfg['/run']['path'])
except smtplib.SMTPServerDisconnected:
_error('{host}:{port} disconnected. Check smtp settings in {cfg_path}'
.format(host=cfg['smtp']['host'], port=cfg['smtp']['port'],
cfg_path=cfg['/run']['path']), file=sys.stderr)
except smtplib.SMTPException:
_error('unknown error from {host}:{port}'.format(
host=cfg['smtp']['host'], port=cfg['smtp']['port']))
def send_email_smtp(cfg, server, msg):
"""
Send an email.
Parameters
----------
server : SMTP
SMTP server.
msg : MIMEMultipart
Message to send.
Returns
-------
bool
True is send suceeded, else false.
"""
def _error(*msg, **kwargs):
sep = kwargs.get('sep', ' ')
print(
'{c.bold}{c.red}[lmk] {msg}{c.reset}'.format(c=colors,
msg=sep.join(msg)),
file=sys.stderr)
return False
recipient = msg['To'].strip()
if not recipient:
return _error('no recipient')
try:
server.sendmail(msg['From'], msg['To'], msg.as_string())
print(
'{c.bold}{c.cyan}[lmk] {recipient} notified{c.reset}'.format(
c=colors, recipient=recipient),
file=sys.stderr)
return True
except smtplib.SMTPHeloError:
return _error('connection to {host}:{port} failed'.format(
host=cfg['smtp']['host'], port=cfg['smtp']['port']))
except smtplib.SMTPDataError:
return _error('unknown error from {host}:{port}'.format(
host=cfg['smtp']['host'], port=cfg['smtp']['port']))
except smtplib.SMTPRecipientsRefused:
return _error('recipient {recipient} refused'.format(
recipient=recipient))
except smtplib.SMTPSenderRefused:
return _error('sender {from_} refused'.format(from_=msg['From']))
return False
def build_html_message_body(output, command=None, returncode=None,
date_started=None, date_ended=None,
runtime=None, snip_after=220, snip_to=200):
"""
Parameters
----------
command : str
The command which was run.
output : str
The output of the command.
runtime : Tuple(datetime, datetime)
The command start and end dates.
snip_after : int (optional)
The maximum number of lines to permit before snipping message.
snip_to : int optional
If the number of lines exceeds snip_after, snip to this many number of
lines.
Returns
-------
Tuple(str, bool)
The HTML body string, and a boolean value signifying whether the
output was tuncated.
"""
if snip_to > snip_after:
raise ValueError("snip_to must be <= snip_after")
user = os.environ['USER']
host = socket.gethostname()
cwd = os.getcwd()
lmk = '<a href="github.com/ChrisCummins/phd">lmk</a>'
me = '<a href="http://chriscummins.cc">Chris Cummins</a>'
prompt_css = ";".join([
"font-family:'Courier New', monospace",
"font-weight:700",
"font-size:14px",
"padding-right:10px",
"color:#000",
"text-align:right",
])
command_css = ";".join([
"font-family:'Courier New', monospace",
"font-weight:700",
"font-size:14px",
"color:#000",
])
lineno_css = ";".join([
"font-family:'Courier New', monospace",
"font-size:14px",
"padding-right:10px",
"color:#666",
"text-align:right",
])
line_css = ";".join([
"font-family:'Courier New', monospace",
"font-size:14px",
"color:#000",
])
# metadata block
html = '<table>\n'
style = 'padding-right:15px;'
if date_started:
delta = humanize.naturaltime(datetime.now() - date_started)
html += (u' <tr><td style="{style}">Started</td>'
u'<td>{date_started} ({delta})</td></tr>\n'
.format(style=style, date_started=date_started, delta=delta))
if date_ended:
html += (u' <tr><td style="{style}">Completed</td>'
u'<td>{date_ended}</td></tr>\n'
.format(style=style, date_ended=date_ended))
if returncode is not None:
html += (u' <tr><td style="{style}">Return code</td>'
u'<td style="font-weight:700;">{returncode}</td></tr>\n'
.format(style=style, returncode=returncode))
html += (u' <tr><td style="{style}">Working directory</td>'
u'<td>{cwd}</td></tr>\n'
.format(style=style, cwd=cwd))
html += '</table>\n<hr style="margin-top:20px;"/>\n'
# output
html += '<table>\n'
# command line invocation
if command is not None:
command_html = cgi.escape(command)
html += u"""\
<tr style="line-height:1em;">
<td style="{prompt_css}">$</td>
<td style="{command_css}">{command_html}</td>
</tr>
""".format(prompt_css=prompt_css, command_css=command_css,
command_html=command_html)
# command output
lines = output.split('\n')
truncated = False
if len(lines) > snip_after:
truncated = True
# truncated report. First and last lines of output
line_nums = range(1, snip_to // 2 + 1)
for line, lineno in zip(lines[:snip_to // 2], line_nums):
line_html = cgi.escape(line)
html += u"""\
<tr style="line-height:1em;">
<td style="{lineno_css}">{lineno}</td>
<td style="{line_css}">{line_html}</td>
</tr>
""".format(lineno_css=lineno_css, lineno=lineno, line_css=line_css,
line_html=line_html)
num_omitted = len(lines) - 200
html += "</table>"
html += "... ({num_omitted} lines snipped)".format(num_omitted=num_omitted)
html += "<table>\n"
line_nums = range(len(lines) - snip_to // 2 + 1, len(lines) + 1)
for line, lineno in zip(lines[-snip_to // 2:], line_nums):
line_html = cgi.escape(line)
html += u"""\
<tr style="line-height:1em;">
<td style="{lineno_css}">{lineno}</td>
<td style="{line_css}">{line_html}</td>
</tr>
""".format(lineno_css=lineno_css, lineno=lineno, line_css=line_css,
line_html=line_html)
else:
# full length report
for line, lineno in zip(lines, range(1, len(lines) + 1)):
try:
line = line.decode('utf-8')
except AttributeError: # str.decode() depends on Python version.
pass
line_html = cgi.escape(line)
html += u"""
<tr style="line-height:1em;">
<td style="{lineno_css}">{lineno}</td>
<td style="{line_css}">{line_html}</td>
</tr>
""".format(lineno_css=lineno_css, lineno=lineno, line_css=line_css,
line_html=line_html)
html += u'</table>\n'
# footer
html += u"""\
</table>
<hr style="margin-top:20px;"/>
<center style="color:#626262;">
{lmk} made with ♥ by {me}
</center>
""".format(lmk=lmk, me=me)
return html, truncated
def get_cfg_path():
"""
Get path to config file.
If config file not found, kills the process with E_CFG.
Returns
-------
str
Config path.
"""
cfg_path = os.path.expanduser(os.environ.get('LMK_CFG', DEFAULT_CFG_PATH))
if not os.path.exists(cfg_path) and cfg_path == DEFAULT_CFG_PATH:
create_default_cfg(cfg_path)
elif not os.path.exists(cfg_path):
print(
'{c.bold}{c.red}$LMK_CFG ({cfg_path}) not found{c.reset}'.format(
c=colors, cfg_path=cfg_path),
file=sys.stderr)
sys.exit(E_CFG)
return cfg_path
def check_connection(cfg=None):
if cfg is None:
cfg = load_cfg()
get_smtp_server(cfg).quit()
def build_message_subject(output, command=None, returncode=None, cfg=None,
date_started=None, date_ended=None):
"""
Build message subject line.
Returns
-------
str
Unicode message subject.
"""
user = os.environ['USER']
host = socket.gethostname()
if command is not None and returncode is not None:
happy_sad = u'🙈' if returncode else u'✔'
return u'{user}@{host} {happy_sad} $ {command}'.format(
user=user, host=host, happy_sad=happy_sad, command=command)
elif command is not None:
return u'{user}@{host} $ {command}'.format(
user=user, host=host, command=command)
elif date_started is not None:
delta = humanize.naturaltime(datetime.now() - date_started)
return u'{user}@{host} finished job started {delta}'.format(
user=user, host=host, delta=delta)
else:
return u'{user}@{host} finished job'.format(user=user, host=host)
def let_me_know(output, command=None, returncode=None, cfg=None,
date_started=None, date_ended=None):
if cfg is None:
cfg = load_cfg()
subject = build_message_subject(
output=output, command=command, returncode=returncode,
date_started=date_started, date_ended=date_ended)
html, truncated = build_html_message_body(
output=output, command=command, returncode=returncode,
date_started=date_started, date_ended=date_ended)
if sys.version_info < (3, 0):
html = html.encode('utf-8')
msg = MIMEMultipart()
msg['From'] = cfg['messages']['from']
msg['Subject'] = subject
msg.attach(MIMEText(html, 'html'))
if truncated:
attachment = MIMEApplication(output, Name="output.txt")
attachment['Content-Disposition'] = 'attachment; filename="output.txt"'
msg.attach(attachment)
server = get_smtp_server(cfg)
for recipient in cfg['messages']['to'].split(','):
msg['To'] = cfg['messages']['to']
send_email_smtp(cfg, server, msg)
server.quit()
def read_from_stdin():
cfg = load_cfg()
check_connection(cfg)
date_started = datetime.now()
out = []
for line in sys.stdin:
sys.stdout.write(line)
out.append(line)
date_ended = datetime.now()
output = ''.join(out).rstrip()
let_me_know(
output=output, cfg=cfg, date_started=date_started,
date_ended=date_ended)
def run_subprocess(command, only_errors=False):
cfg = load_cfg()
check_connection(cfg)
date_started = datetime.now()
out = []
process = subprocess.Popen(
command,
shell=True,
executable=cfg['exec']['shell'],
universal_newlines=True,
bufsize=1,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if sys.version_info >= (3, 0):
output_iter = process.stdout
else:
output_iter = iter(process.stdout.readline, b'')
with process.stdout:
for line in output_iter:
sys.stdout.write(line)
out.append(line)
process.wait()
date_ended = datetime.now()
output = ''.join(out).rstrip()
returncode = process.returncode
if returncode or not only_errors:
let_me_know(
output=output, command=command, returncode=returncode, cfg=cfg,
date_started=date_started, date_ended=date_ended)
return returncode
def main():
args = parse_args(sys.argv[1:])
try:
if args.command == '-':
# check that command line usage is correct
if args.only_errors:
print('{c.bold}{c.red}[lmk] --only-errors option cannot be '
'used with stdin{c.reset}'.format(c=colors))
sys.exit(1)
read_from_stdin()
else:
sys.exit(run_subprocess(args.command, only_errors=args.only_errors))
except KeyboardInterrupt:
print('{c.bold}{c.red}[lmk] aborted{c.reset}'.format(c=colors))
sys.exit(1)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
1d6b97e765f3a7f16caed9f39bd488474adf650e | b0dbd2e4dd83fe012cde29c8474bae5e33c23e2a | /harbor_client/model/robot_create_v1.py | 2f99d134e25272b617002f74eb94bb0b6fce1cd4 | [] | no_license | DevotionZhu/harbor-python-client-api | 0ba3999e5af126dbe97f0234c4a9601660a97dbb | f0cc6c453b488d5f456eff94000156182eb3a468 | refs/heads/master | 2023-04-17T22:06:06.024871 | 2021-04-11T22:20:28 | 2021-04-11T22:20:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,200 | py | """
Harbor API
These APIs provide services for manipulating Harbor project. # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from harbor_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from harbor_client.model.access import Access
globals()['Access'] = Access
class RobotCreateV1(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'name': (str,), # noqa: E501
'description': (str,), # noqa: E501
'expires_at': (int,), # noqa: E501
'access': ([Access],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'name': 'name', # noqa: E501
'description': 'description', # noqa: E501
'expires_at': 'expires_at', # noqa: E501
'access': 'access', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""RobotCreateV1 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
name (str): The name of robot account. [optional] # noqa: E501
description (str): The description of robot account. [optional] # noqa: E501
expires_at (int): The expiration time on or after which the JWT MUST NOT be accepted for processing.. [optional] # noqa: E501
access ([Access]): The permission of robot account. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| [
"[email protected]"
] | |
80acd454dc42c8718c69d967d6cb1d86e6ac4d21 | a9e81c87022fdde86d47a4ec1e74791da8aa0e30 | /tensorflow-learning/base/loss-function/03-sigmoid.py | 0f8dabd13b8e2db95a38496a8efb6bcc08f2cfd1 | [
"Apache-2.0"
] | permissive | ymli1997/deeplearning-notes | c5c6926431b7efc1c6823d85e3eb470f3c986494 | f2317d80cd998305814f988e5000241797205b63 | refs/heads/master | 2020-07-29T11:15:43.689307 | 2018-05-05T10:58:18 | 2018-05-05T10:58:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | # -*- coding:utf-8 -*-
import numpy as np
import tensorflow as tf
a = tf.constant([[1.0,2.0],[1.0,2.0],[1.0,2.0]])
res = tf.nn.sigmoid(a)
init_opt = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_opt)
result = sess.run(res)
print(result) | [
"[email protected]"
] | |
168f2b6ee6d80686195d7b2050a91cb4bf2b82ea | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /Sklearn_scipy_numpy/source/scipy/sparse/linalg/_onenormest.py | e34652a68df67fc873eecf2ed42327786c624f1e | [
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | false | 15,138 | py | """Sparse block 1-norm estimator.
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from scipy.sparse.linalg import aslinearoperator
__all__ = ['onenormest']
def onenormest(A, t=2, itmax=5, compute_v=False, compute_w=False):
"""
Compute a lower bound of the 1-norm of a sparse matrix.
Parameters
----------
A : ndarray or other linear operator
A linear operator that can be transposed and that can
produce matrix products.
t : int, optional
A positive parameter controlling the tradeoff between
accuracy versus time and memory usage.
Larger values take longer and use more memory
but give more accurate output.
itmax : int, optional
Use at most this many iterations.
compute_v : bool, optional
Request a norm-maximizing linear operator input vector if True.
compute_w : bool, optional
Request a norm-maximizing linear operator output vector if True.
Returns
-------
est : float
An underestimate of the 1-norm of the sparse matrix.
v : ndarray, optional
The vector such that ||Av||_1 == est*||v||_1.
It can be thought of as an input to the linear operator
that gives an output with particularly large norm.
w : ndarray, optional
The vector Av which has relatively large 1-norm.
It can be thought of as an output of the linear operator
that is relatively large in norm compared to the input.
Notes
-----
This is algorithm 2.4 of [1].
In [2] it is described as follows.
"This algorithm typically requires the evaluation of
about 4t matrix-vector products and almost invariably
produces a norm estimate (which is, in fact, a lower
bound on the norm) correct to within a factor 3."
.. versionadded:: 0.13.0
References
----------
.. [1] Nicholas J. Higham and Francoise Tisseur (2000),
"A Block Algorithm for Matrix 1-Norm Estimation,
with an Application to 1-Norm Pseudospectra."
SIAM J. Matrix Anal. Appl. Vol. 21, No. 4, pp. 1185-1201.
.. [2] Awad H. Al-Mohy and Nicholas J. Higham (2009),
"A new scaling and squaring algorithm for the matrix exponential."
SIAM J. Matrix Anal. Appl. Vol. 31, No. 3, pp. 970-989.
"""
# Check the input.
A = aslinearoperator(A)
if A.shape[0] != A.shape[1]:
raise ValueError('expected the operator to act like a square matrix')
# If the operator size is small compared to t,
# then it is easier to compute the exact norm.
# Otherwise estimate the norm.
n = A.shape[1]
if t >= n:
A_explicit = np.asarray(aslinearoperator(A).matmat(np.identity(n)))
if A_explicit.shape != (n, n):
raise Exception('internal error: ',
'unexpected shape ' + str(A_explicit.shape))
col_abs_sums = abs(A_explicit).sum(axis=0)
if col_abs_sums.shape != (n, ):
raise Exception('internal error: ',
'unexpected shape ' + str(col_abs_sums.shape))
argmax_j = np.argmax(col_abs_sums)
v = elementary_vector(n, argmax_j)
w = A_explicit[:, argmax_j]
est = col_abs_sums[argmax_j]
else:
est, v, w, nmults, nresamples = _onenormest_core(A, A.H, t, itmax)
# Report the norm estimate along with some certificates of the estimate.
if compute_v or compute_w:
result = (est,)
if compute_v:
result += (v,)
if compute_w:
result += (w,)
return result
else:
return est
def _blocked_elementwise(func):
"""
Decorator for an elementwise function, to apply it blockwise along
first dimension, to avoid excessive memory usage in temporaries.
"""
block_size = 2**20
def wrapper(x):
if x.shape[0] < block_size:
return func(x)
else:
y0 = func(x[:block_size])
y = np.zeros((x.shape[0],) + y0.shape[1:], dtype=y0.dtype)
y[:block_size] = y0
del y0
for j in range(block_size, x.shape[0], block_size):
y[j:j+block_size] = func(x[j:j+block_size])
return y
return wrapper
@_blocked_elementwise
def sign_round_up(X):
"""
This should do the right thing for both real and complex matrices.
From Higham and Tisseur:
"Everything in this section remains valid for complex matrices
provided that sign(A) is redefined as the matrix (aij / |aij|)
(and sign(0) = 1) transposes are replaced by conjugate transposes."
"""
Y = X.copy()
Y[Y == 0] = 1
Y /= np.abs(Y)
return Y
@_blocked_elementwise
def _max_abs_axis1(X):
return np.max(np.abs(X), axis=1)
def _sum_abs_axis0(X):
block_size = 2**20
r = None
for j in range(0, X.shape[0], block_size):
y = np.sum(np.abs(X[j:j+block_size]), axis=0)
if r is None:
r = y
else:
r += y
return r
def elementary_vector(n, i):
v = np.zeros(n, dtype=float)
v[i] = 1
return v
def vectors_are_parallel(v, w):
# Columns are considered parallel when they are equal or negative.
# Entries are required to be in {-1, 1},
# which guarantees that the magnitudes of the vectors are identical.
if v.ndim != 1 or v.shape != w.shape:
raise ValueError('expected conformant vectors with entries in {-1,1}')
n = v.shape[0]
return np.dot(v, w) == n
def every_col_of_X_is_parallel_to_a_col_of_Y(X, Y):
for v in X.T:
if not any(vectors_are_parallel(v, w) for w in Y.T):
return False
return True
def column_needs_resampling(i, X, Y=None):
# column i of X needs resampling if either
# it is parallel to a previous column of X or
# it is parallel to a column of Y
n, t = X.shape
v = X[:, i]
if any(vectors_are_parallel(v, X[:, j]) for j in range(i)):
return True
if Y is not None:
if any(vectors_are_parallel(v, w) for w in Y.T):
return True
return False
def resample_column(i, X):
X[:, i] = np.random.randint(0, 2, size=X.shape[0])*2 - 1
def less_than_or_close(a, b):
return np.allclose(a, b) or (a < b)
def _algorithm_2_2(A, AT, t):
"""
This is Algorithm 2.2.
Parameters
----------
A : ndarray or other linear operator
A linear operator that can produce matrix products.
AT : ndarray or other linear operator
The transpose of A.
t : int, optional
A positive parameter controlling the tradeoff between
accuracy versus time and memory usage.
Returns
-------
g : sequence
A non-negative decreasing vector
such that g[j] is a lower bound for the 1-norm
of the column of A of jth largest 1-norm.
The first entry of this vector is therefore a lower bound
on the 1-norm of the linear operator A.
This sequence has length t.
ind : sequence
The ith entry of ind is the index of the column A whose 1-norm
is given by g[i].
This sequence of indices has length t, and its entries are
chosen from range(n), possibly with repetition,
where n is the order of the operator A.
Notes
-----
This algorithm is mainly for testing.
It uses the 'ind' array in a way that is similar to
its usage in algorithm 2.4. This algorithm 2.2 may be easier to test,
so it gives a chance of uncovering bugs related to indexing
which could have propagated less noticeably to algorithm 2.4.
"""
A_linear_operator = aslinearoperator(A)
AT_linear_operator = aslinearoperator(AT)
n = A_linear_operator.shape[0]
# Initialize the X block with columns of unit 1-norm.
X = np.ones((n, t))
if t > 1:
X[:, 1:] = np.random.randint(0, 2, size=(n, t-1))*2 - 1
X /= float(n)
# Iteratively improve the lower bounds.
# Track extra things, to assert invariants for debugging.
g_prev = None
h_prev = None
k = 1
ind = range(t)
while True:
Y = np.asarray(A_linear_operator.matmat(X))
g = _sum_abs_axis0(Y)
best_j = np.argmax(g)
g.sort()
g = g[::-1]
S = sign_round_up(Y)
Z = np.asarray(AT_linear_operator.matmat(S))
h = _max_abs_axis1(Z)
# If this algorithm runs for fewer than two iterations,
# then its return values do not have the properties indicated
# in the description of the algorithm.
# In particular, the entries of g are not 1-norms of any
# column of A until the second iteration.
# Therefore we will require the algorithm to run for at least
# two iterations, even though this requirement is not stated
# in the description of the algorithm.
if k >= 2:
if less_than_or_close(max(h), np.dot(Z[:, best_j], X[:, best_j])):
break
ind = np.argsort(h)[::-1][:t]
h = h[ind]
for j in range(t):
X[:, j] = elementary_vector(n, ind[j])
# Check invariant (2.2).
if k >= 2:
if not less_than_or_close(g_prev[0], h_prev[0]):
raise Exception('invariant (2.2) is violated')
if not less_than_or_close(h_prev[0], g[0]):
raise Exception('invariant (2.2) is violated')
# Check invariant (2.3).
if k >= 3:
for j in range(t):
if not less_than_or_close(g[j], g_prev[j]):
raise Exception('invariant (2.3) is violated')
# Update for the next iteration.
g_prev = g
h_prev = h
k += 1
# Return the lower bounds and the corresponding column indices.
return g, ind
def _onenormest_core(A, AT, t, itmax):
"""
Compute a lower bound of the 1-norm of a sparse matrix.
Parameters
----------
A : ndarray or other linear operator
A linear operator that can produce matrix products.
AT : ndarray or other linear operator
The transpose of A.
t : int, optional
A positive parameter controlling the tradeoff between
accuracy versus time and memory usage.
itmax : int, optional
Use at most this many iterations.
Returns
-------
est : float
An underestimate of the 1-norm of the sparse matrix.
v : ndarray, optional
The vector such that ||Av||_1 == est*||v||_1.
It can be thought of as an input to the linear operator
that gives an output with particularly large norm.
w : ndarray, optional
The vector Av which has relatively large 1-norm.
It can be thought of as an output of the linear operator
that is relatively large in norm compared to the input.
nmults : int, optional
The number of matrix products that were computed.
nresamples : int, optional
The number of times a parallel column was observed,
necessitating a re-randomization of the column.
Notes
-----
This is algorithm 2.4.
"""
# This function is a more or less direct translation
# of Algorithm 2.4 from the Higham and Tisseur (2000) paper.
A_linear_operator = aslinearoperator(A)
AT_linear_operator = aslinearoperator(AT)
if itmax < 2:
raise ValueError('at least two iterations are required')
if t < 1:
raise ValueError('at least one column is required')
n = A.shape[0]
if t >= n:
raise ValueError('t should be smaller than the order of A')
# Track the number of big*small matrix multiplications
# and the number of resamplings.
nmults = 0
nresamples = 0
# "We now explain our choice of starting matrix. We take the first
# column of X to be the vector of 1s [...] This has the advantage that
# for a matrix with nonnegative elements the algorithm converges
# with an exact estimate on the second iteration, and such matrices
# arise in applications [...]"
X = np.ones((n, t), dtype=float)
# "The remaining columns are chosen as rand{-1,1},
# with a check for and correction of parallel columns,
# exactly as for S in the body of the algorithm."
if t > 1:
for i in range(1, t):
# These are technically initial samples, not resamples,
# so the resampling count is not incremented.
resample_column(i, X)
for i in range(t):
while column_needs_resampling(i, X):
resample_column(i, X)
nresamples += 1
# "Choose starting matrix X with columns of unit 1-norm."
X /= float(n)
# "indices of used unit vectors e_j"
ind_hist = np.zeros(0, dtype=np.intp)
est_old = 0
S = np.zeros((n, t), dtype=float)
k = 1
ind = None
while True:
Y = np.asarray(A_linear_operator.matmat(X))
nmults += 1
mags = _sum_abs_axis0(Y)
est = np.max(mags)
best_j = np.argmax(mags)
if est > est_old or k == 2:
if k >= 2:
ind_best = ind[best_j]
w = Y[:, best_j]
# (1)
if k >= 2 and est <= est_old:
est = est_old
break
est_old = est
S_old = S
if k > itmax:
break
S = sign_round_up(Y)
del Y
# (2)
if every_col_of_X_is_parallel_to_a_col_of_Y(S, S_old):
break
if t > 1:
# "Ensure that no column of S is parallel to another column of S
# or to a column of S_old by replacing columns of S by rand{-1,1}."
for i in range(t):
while column_needs_resampling(i, S, S_old):
resample_column(i, S)
nresamples += 1
del S_old
# (3)
Z = np.asarray(AT_linear_operator.matmat(S))
nmults += 1
h = _max_abs_axis1(Z)
del Z
# (4)
if k >= 2 and max(h) == h[ind_best]:
break
# "Sort h so that h_first >= ... >= h_last
# and re-order ind correspondingly."
#
# Later on, we will need at most t+len(ind_hist) largest
# entries, so drop the rest
ind = np.argsort(h)[::-1][:t+len(ind_hist)].copy()
del h
if t > 1:
# (5)
# Break if the most promising t vectors have been visited already.
if np.in1d(ind[:t], ind_hist).all():
break
# Put the most promising unvisited vectors at the front of the list
# and put the visited vectors at the end of the list.
# Preserve the order of the indices induced by the ordering of h.
seen = np.in1d(ind, ind_hist)
ind = np.concatenate((ind[~seen], ind[seen]))
for j in range(t):
X[:, j] = elementary_vector(n, ind[j])
new_ind = ind[:t][~np.in1d(ind[:t], ind_hist)]
ind_hist = np.concatenate((ind_hist, new_ind))
k += 1
v = elementary_vector(n, ind_best)
return est, v, w, nmults, nresamples
| [
"[email protected]"
] | |
3fbe1bceff869e837fbf8bc1c9ada9178969a38c | e7d791d3f24076cad781b963ed4375cace3ba734 | /examples/dataloader_test.py | fac7ec386d4585dafd320a52387fd369c2cf7a49 | [
"Apache-2.0"
] | permissive | jiniaoxu/bi-lstm-crf | d9512cc524f8a5d60509391d3eb0e931d299b230 | 0667537207b8cb43b68fe5ab4378f770d05bf45a | refs/heads/master | 2020-04-13T18:06:51.797880 | 2018-12-24T08:12:44 | 2018-12-24T08:12:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | from dltokenizer.data_loader import DataLoader
if __name__ == '__main__':
data_loader = DataLoader("../data/src_dict.json", "../data/tgt_dict.json", batch_size=64)
generator = data_loader.generator("../data/2014")
for _ in range(1):
sent, chunk = next(generator)
assert len(sent) == len(chunk)
print(sent.shape)
print(chunk.shape)
| [
"[email protected]"
] | |
02701604a58c249a1488f5470f94671ab4bf44ad | 15b801de0dd019411135aad75f7cd653061489d3 | /actions/migrations/0001_initial.py | d0c21952f659882959d85157b7158114b4c98957 | [] | no_license | 007vict/bookmarksbyexample | ff73befa059de10cd4b425b615678ac0fd1fa528 | 6b614affe46ebc2ac0687ed5cdf50c6439784932 | refs/heads/master | 2020-04-29T16:59:37.434190 | 2019-03-22T21:29:27 | 2019-03-22T21:29:27 | 176,282,231 | 0 | 0 | null | 2019-03-18T19:15:24 | 2019-03-18T12:38:50 | Python | UTF-8 | Python | false | false | 1,262 | py | # Generated by Django 2.2rc1 on 2019-03-22 15:10
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Actions',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('verb', models.CharField(max_length=255)),
('target_id', models.PositiveIntegerField(blank=True, db_index=True, null=True)),
('created', models.DateTimeField(auto_now_add=True, db_index=True)),
('target_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='target_obj', to='contenttypes.ContentType')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='actions', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-created',),
},
),
]
| [
"[email protected]"
] | |
e0bb05f7819d66e7ae57c40d5566afcc54aeedac | e4920c4fe4290bde524e0c141189f80fddfe44b7 | /dashboard/urls.py | 524c5746de8285b0cc666f48b3c5563297731e11 | [] | no_license | ShahadatShuvo/Django_portfolio | 42af2b0aa686bff08730cdb105f95d6b63adb620 | 795ed7cbb6444245af08582ea63f57a0f32679a0 | refs/heads/master | 2023-05-30T01:43:50.409584 | 2021-06-14T08:29:18 | 2021-06-14T08:29:18 | 376,372,632 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 925 | py | from django.urls import path
from django.contrib.auth.views import LogoutView
from .views import (
dashboard,
profile,
profile_edit,
messages,
messages_api,
projects,
projects_api,
LoginView,
EducationView,
)
app_name = 'dashboard'
urlpatterns = [
path('', dashboard, name='dashboard'),
path('profile/', profile, name='profile'),
path('profile/edit/', profile_edit, name='profile_edit'),
path('messages', messages, name='messages'),
path('messages/api/', messages_api, name='messages_api'),
path('projects', projects, name='projects'),
path('projects/api/', projects_api, name='projects_api'),
path('education/', EducationView.as_view(), name='education'),
# path('education/', EducationView.as_view(), name='one_education'),
path('login/', LoginView.as_view(), name='login'),
path('logout/', LogoutView.as_view(), name='logout'),
]
| [
"[email protected]"
] | |
b8aa7c0fbbbc85ced858cb68b0a50f1132f1d81e | f6fee9397e858bce2d2e4258602d4a029b9a859e | /LinkedList/Leetcode 2. Add Two Numbers.py | 1e00848552ec84c31c195e4f04828dcbc7479c86 | [
"MIT"
] | permissive | sriharsha004/LeetCode | e180dc4c0e1af217d3fbe026650b4035c50a388b | 95ca845e40c7c9f8ba589a45332791d5bbf49bbf | refs/heads/master | 2022-12-07T11:37:28.843751 | 2020-08-21T03:05:45 | 2020-08-21T03:05:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
dummy = ListNode(0)
cur = dummy
carry = 0
while l1 or l2:
sums = 0
if l1:
sums += l1.val
l1 = l1.next
if l2:
sums += l2.val
l2 = l2.next
sums += carry
cur.next = ListNode(sums%10)
cur = cur.next
carry = sums//10
if carry:
cur.next = ListNode(carry)
return dummy.next | [
"[email protected]"
] | |
f57f7a2a1489715fa67b8cc3ca5912999c0b2b63 | 0f41b3564b08045f56b8ee4743ef8834b88a274e | /otree/management/commands/resetdb.py | b7de7fb73d101b0efdeaccd536b8a8f86fc9d5d5 | [
"MIT"
] | permissive | bjgoode/otree-core | b04911f00671ef6bbfeeb184359133f85ec221cb | ab6bbcbdb53cb1d74b205f04f16eb40ea099a45d | refs/heads/master | 2021-05-11T18:31:00.398740 | 2018-01-17T23:28:22 | 2018-01-17T23:28:22 | 117,827,105 | 1 | 0 | null | 2018-01-17T11:27:21 | 2018-01-17T11:27:21 | null | UTF-8 | Python | false | false | 6,623 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
# IMPORTS
# =============================================================================
import logging
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.db import connections, transaction
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.autodetector import MigrationAutodetector
import six
from unittest import mock
# =============================================================================
# LOGGER
# =============================================================================
logger = logging.getLogger('otree')
def drop_tables_command(db_engine):
if 'sqlite3' in db_engine:
return 'DROP TABLE {table};'
if 'oracle' in db_engine:
return 'DROP TABLE "{table}" CASCADE CONSTRAINTS;'
if 'postgres' in db_engine:
return 'DROP TABLE "{table}" CASCADE;'
if 'mysql' in db_engine:
return (
'SET FOREIGN_KEY_CHECKS = 0;'
'DROP TABLE {table} CASCADE;'
'SET FOREIGN_KEY_CHECKS = 1;')
raise ValueError(
'resetdb command does not recognize DB engine "{}"'.format(db_engine))
class Command(BaseCommand):
help = (
"Resets your development database to a fresh state. "
"All data will be deleted.")
def add_arguments(self, parser):
ahelp = (
'Tells the resetdb command to NOT prompt the user for '
'input of any kind.')
parser.add_argument(
'--noinput', action='store_false', dest='interactive',
default=True, help=ahelp)
def _confirm(self):
self.stdout.write(
"This will delete and recreate your database. ")
answer = six.moves.input("Proceed? (y or n): ")
if answer:
return answer[0].lower() == 'y'
return False
def _drop_table_stmt(self, dbconf):
engine = dbconf["ENGINE"]
return drop_tables_command(engine)
def _get_tables(self, db):
tables = []
out = six.StringIO()
call_command('inspectdb', database=db, no_color=True, stdout=out)
for line in out.getvalue().splitlines():
line = line.strip()
if line.startswith("db_table = '"):
tablename = line.replace(
"db_table = '", "", 1).replace("'", "").strip()
tables.append(tablename)
return tuple(reversed(tables))
def _drop_tables(self, tables, db, dt_stmt):
with connections[db].cursor() as cursor:
for table in tables:
stmt = dt_stmt.format(table=table)
cursor.execute(stmt)
def handle(self, **options):
if options.pop("interactive") and not self._confirm():
self.stdout.write('Canceled.')
return
for db, dbconf in six.iteritems(settings.DATABASES):
db_engine = dbconf['ENGINE']
if 'postgresql' in db_engine.lower():
db_engine = 'PostgreSQL'
elif 'sqlite' in db_engine.lower():
db_engine = 'SQLite'
elif 'mysql' in db_engine.lower():
db_engine = 'MySQL'
logger.info("Database engine: {}".format(db_engine))
dt_stmt = self._drop_table_stmt(dbconf)
logger.info("Retrieving Existing Tables...")
tables = self._get_tables(db)
logger.info("Dropping Tables...")
# use a transaction to prevent the DB from getting in an erroneous
# state, which can result in a different error message when resetdb
# is run again, making the original error hard to trace.
with transaction.atomic(
using=connections[db].alias,
savepoint=connections[db].features.can_rollback_ddl
):
self._drop_tables(tables, db, dt_stmt)
logger.info("Creating Database '{}'...".format(db))
self.syncdb(db, options)
# second call to 'migrate', simply to
# fake migrations so that runserver doesn't complain
# about unapplied migrations
# note: In 1.9, will need to pass --run-syncdb flag
call_command(
'migrate', database=db, fake=True,
interactive=False, **options)
# mention the word 'columns' here, so people make the connection
# between columns and resetdb, so that when they get a 'no such column'
# error, they know how to fix it.
# (An alternative is to generically catch "no such column" errors,
# but I recall that this was difficult - because there were many
# code paths or exception classes. Could re-investigate.)
logger.info('Created new tables and columns.')
@mock.patch.object(
MigrationLoader, 'migrations_module',
return_value='migrations nonexistent hack')
@mock.patch.object(
MigrationAutodetector, 'changes', return_value=False)
def syncdb(self, db, options, *mocked_args):
'''
patch .migrations_module() to return a nonexistent module,
instead of app_name.migrations.
because this module is not found,
migration system will assume the app has no migrations,
and run syncdb instead.
Hack so that migrate can't find migrations files
this way, syncdb will be run instead of migrate.
This is preferable because
users who are used to running "otree resetdb"
may not know how to run 'otree makemigrations'.
This means their migration files will not be up to date,
ergo migrate will create tables with an outdated schema.
after the majority of oTree users have this new version
of resetdb, we can add a migrations/ folder to each app
in the sample games and the app template,
and deprecate resetdb
and instead use "otree makemigrations" and "otree migrate".
also, syncdb is faster than migrate, and there is no
advantage to migrate since it's being run on a newly
created DB anyway.
also patch MigrationAutodetector.changes() to suppress the warning
"Your models have changes that are not yet reflected in a migration..."
'''
call_command(
'migrate', database=db,
interactive=False, **options)
| [
"[email protected]"
] | |
8aa639aff98132e95e43aa967b1655b5351b5783 | 880d9cc2704f7de649ad4455dd7ec2806b6a9e95 | /PythonExam/北京理工大学Python语言程序设计-Book/Chapter6/e10.3CalThreeKingdoms.py | 7dfc8f674c32c92b48171bf8ba1a75ce241cab31 | [] | no_license | shunz/Python-100-Days_Practice | 14795757effcff50a4644f57c5c109fa1c9c38ac | 82f508ff6911ce3aa5c5a69cd481a6cc87f02258 | refs/heads/master | 2020-12-26T18:52:32.755384 | 2020-04-07T15:49:36 | 2020-04-07T15:49:36 | 237,604,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,784 | py | """《三国演义》人物出场统计
- 输入
- 从网上找到三国演义全文,保存为threekingdoms.txt
- 处理
1. 分解并提取中文文章的单词
- 采用jieba对文章分词
- 排除不是人物的单词,用集合类型构建一个排除词汇库
- 合并同人不同名的单词,比如:刘备=刘玄德=玄德=刘皇叔=主公
2. 对每个单词进行计数
- 将单词保存在变量word中
- 使用一个字典类型counts={}
- counts[word] = counts.get(word, 0) + 1 # 新词加入字典,计为1;旧词累加出现次数
3. 对单词的统计值从高到低进行排序
- 由于字典类型没有顺序,需要转换为列表类型,然后使用sort()和lambda函数配合实现根据单词出现的次数对元素进行排序
- 输出
- 输出前10个高频词语,并格式化打印输出
"""
import jieba
excludes = {'却说','将军','二人','不可','荆州','不能','商议','如此','如何','军士','左右','军马','引兵','次日',\
'大喜','天下','东吴','于是'}
txt = open('threekingdoms.txt', 'r').read().lower()
words = jieba.lcut(txt)
counts = {}
for w in words:
if w not in excludes and len(w) != 1:
if w in ['诸葛亮', '孔明曰']:
w = '孔明'
elif w in ['关公', '云长']:
w = '关羽'
elif w in ['玄德', '玄德曰']:
w = '刘备'
elif w in ['孟德', '丞相']:
w = '曹操'
counts[w] = counts.get(w, 0) + 1
items = list(counts.items())
items.sort(key=lambda x: x[1], reverse=True)
for i in range(9):
word, count = items[i]
print(f'{word:<10}{count:>5}')
| [
"[email protected]"
] | |
11f87053a9b778f57300d4430253f97fcf77ccc6 | 3a533d1503f9a1c767ecd3a29885add49fff4f18 | /saleor/graphql/translations/mutations/product_translate.py | bfb252870e111b3c02e367b5f1d589c8e89a8ce5 | [
"BSD-3-Clause"
] | permissive | jonserna/saleor | 0c1e4297e10e0a0ce530b5296f6b4488f524c145 | b7d1b320e096d99567d3fa7bc4780862809d19ac | refs/heads/master | 2023-06-25T17:25:17.459739 | 2023-06-19T14:05:41 | 2023-06-19T14:05:41 | 186,167,599 | 0 | 0 | BSD-3-Clause | 2019-12-29T15:46:40 | 2019-05-11T18:21:31 | TypeScript | UTF-8 | Python | false | false | 2,059 | py | import graphene
from ....core.tracing import traced_atomic_transaction
from ....permission.enums import SitePermissions
from ....product import models as product_models
from ...channel import ChannelContext
from ...core import ResolveInfo
from ...core.enums import LanguageCodeEnum
from ...core.types import TranslationError
from ...plugins.dataloaders import get_plugin_manager_promise
from ...product.types import Product
from .utils import BaseTranslateMutation, TranslationInput
class ProductTranslate(BaseTranslateMutation):
class Arguments:
id = graphene.ID(
required=True,
description="Product ID or ProductTranslatableContent ID.",
)
language_code = graphene.Argument(
LanguageCodeEnum, required=True, description="Translation language code."
)
input = TranslationInput(required=True)
class Meta:
description = "Creates/updates translations for a product."
model = product_models.Product
object_type = Product
error_type_class = TranslationError
error_type_field = "translation_errors"
permissions = (SitePermissions.MANAGE_TRANSLATIONS,)
@classmethod
def perform_mutation( # type: ignore[override]
cls, _root, info: ResolveInfo, /, *, id, input, language_code
):
node_id = cls.clean_node_id(id)[0]
instance = cls.get_node_or_error(info, node_id, only_type=Product)
cls.validate_input(input)
manager = get_plugin_manager_promise(info.context).get()
with traced_atomic_transaction():
translation, created = instance.translations.update_or_create(
language_code=language_code, defaults=input
)
product = ChannelContext(node=instance, channel_slug=None)
if created:
cls.call_event(manager.translation_created, translation)
else:
cls.call_event(manager.translation_updated, translation)
return cls(**{cls._meta.return_field_name: product})
| [
"[email protected]"
] | |
f792e5de0ef32f047b98b4a3d27b410d7185e5ad | d69bff5c124177d16074034da7b87859a4e9a525 | /src/pretix/plugins/statistics/__init__.py | eb7d476351730b84ea392eb9ec1ac39e48ad3ec5 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | snadal/pretix | f377313d52cb57d0193c8d102af1e47f64de7ce6 | 430ccece9a3af6fd93c51626a9551ef79cee8002 | refs/heads/master | 2021-02-15T09:17:07.063355 | 2020-03-04T11:44:37 | 2020-03-04T11:44:37 | 244,885,007 | 0 | 0 | NOASSERTION | 2020-03-04T11:38:09 | 2020-03-04T11:38:09 | null | UTF-8 | Python | false | false | 588 | py | from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from pretix import __version__ as version
class StatisticsApp(AppConfig):
name = 'pretix.plugins.statistics'
verbose_name = _("Statistics")
class PretixPluginMeta:
name = _("Statistics")
author = _("the pretix team")
version = version
category = 'FEATURE'
description = _("This plugin shows you various statistics.")
def ready(self):
from . import signals # NOQA
default_app_config = 'pretix.plugins.statistics.StatisticsApp'
| [
"[email protected]"
] | |
f16a8c1bae7847be6dfceb67897dee835eca9958 | 907b3bbd44c95be1542a36feaadb6a71b724579f | /files/home/gcloud/google-cloud-sdk/lib/surface/container/builds/create.py | c0d1af759913e065a72a78222c6ad59bf6ef3554 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | vo0doO/com.termux | 2d8f536c1a5dbd7a091be0baf181e51f235fb941 | c97dd7b906e5ef3ec157581fd0bcadd3e3fc220e | refs/heads/master | 2020-12-24T09:40:30.612130 | 2016-11-21T07:47:25 | 2016-11-21T07:47:25 | 73,282,539 | 2 | 2 | null | 2020-07-24T21:33:03 | 2016-11-09T12:33:01 | Python | UTF-8 | Python | false | false | 919 | py | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create build command."""
from googlecloudsdk.calliope import base
# Importing the beta version of this command to reduce repetition.
from surface.container.builds import submit
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Create(submit.Submit):
"""Create a build using the Google Container Builder service."""
| [
"[email protected]"
] | |
eacc4ec788812a02d38d19d23aea55d3f053a2dc | b3699724907850fd26cbce4509fec83a33b89760 | /python/ray/autoscaler/_private/event_summarizer.py | d877bf424459d75777a728931f35e79993c01ecb | [
"Apache-2.0",
"MIT"
] | permissive | BonsaiAI/ray | 5e2f26a81d865a795261d11f9182aca7f07c7b97 | 941d30f082fe879ea30618af14327c25b5a21a74 | refs/heads/master | 2023-06-12T05:15:29.370188 | 2021-05-06T07:03:53 | 2021-05-06T07:03:53 | 233,708,687 | 3 | 5 | Apache-2.0 | 2023-05-27T08:06:37 | 2020-01-13T22:41:47 | Python | UTF-8 | Python | false | false | 1,430 | py | from typing import Any, Callable, Dict, List
class EventSummarizer:
"""Utility that aggregates related log messages to reduce log spam."""
def __init__(self):
self.events_by_key: Dict[str, int] = {}
def add(self, template: str, *, quantity: Any,
aggregate: Callable[[Any, Any], Any]) -> None:
"""Add a log message, which will be combined by template.
Args:
template (str): Format string with one placeholder for quantity.
quantity (Any): Quantity to aggregate.
aggregate (func): Aggregation function used to combine the
quantities. The result is inserted into the template to
produce the final log message.
"""
# Enforce proper sentence structure.
if not template.endswith("."):
template += "."
if template in self.events_by_key:
self.events_by_key[template] = aggregate(
self.events_by_key[template], quantity)
else:
self.events_by_key[template] = quantity
def summary(self) -> List[str]:
"""Generate the aggregated log summary of all added events."""
out = []
for template, quantity in self.events_by_key.items():
out.append(template.format(quantity))
return out
def clear(self) -> None:
"""Clear the events added."""
self.events_by_key.clear()
| [
"[email protected]"
] | |
06314f3ca3db5a2d02cba00204b892ee97077b78 | aafba3346120db47cf87ba67dee21848576c337f | /beet/cmds/plotnft_funcs.py | 70271f1226437a44e86815952b6492edb4905af0 | [
"Apache-2.0"
] | permissive | beetseeds/beet-blockchain | 9f7fa9e221364bb865a8b9f60455afc82b4a022b | e5d93f1f9041c48dd0c38416d845c8675bf22738 | refs/heads/main | 2023-07-14T21:30:18.089664 | 2021-09-10T01:40:00 | 2021-09-10T01:40:00 | 401,708,903 | 5 | 3 | Apache-2.0 | 2021-09-05T09:26:51 | 2021-08-31T13:14:50 | Python | UTF-8 | Python | false | false | 16,252 | py | from collections import Counter
import aiohttp
import asyncio
import functools
import json
import time
from pprint import pprint
from typing import List, Dict, Optional, Callable
from beet.cmds.wallet_funcs import print_balance, wallet_coin_unit
from beet.pools.pool_wallet_info import PoolWalletInfo, PoolSingletonState
from beet.protocols.pool_protocol import POOL_PROTOCOL_VERSION
from beet.rpc.farmer_rpc_client import FarmerRpcClient
from beet.rpc.wallet_rpc_client import WalletRpcClient
from beet.types.blockchain_format.sized_bytes import bytes32
from beet.server.server import ssl_context_for_root
from beet.ssl.create_ssl import get_mozilla_ca_crt
from beet.util.bech32m import encode_puzzle_hash
from beet.util.byte_types import hexstr_to_bytes
from beet.util.config import load_config
from beet.util.default_root import DEFAULT_ROOT_PATH
from beet.util.ints import uint16, uint32
from beet.wallet.transaction_record import TransactionRecord
from beet.wallet.util.wallet_types import WalletType
async def create_pool_args(pool_url: str) -> Dict:
try:
async with aiohttp.ClientSession() as session:
async with session.get(f"{pool_url}/pool_info", ssl=ssl_context_for_root(get_mozilla_ca_crt())) as response:
if response.ok:
json_dict = json.loads(await response.text())
else:
raise ValueError(f"Response from {pool_url} not OK: {response.status}")
except Exception as e:
raise ValueError(f"Error connecting to pool {pool_url}: {e}")
if json_dict["relative_lock_height"] > 1000:
raise ValueError("Relative lock height too high for this pool, cannot join")
if json_dict["protocol_version"] != POOL_PROTOCOL_VERSION:
raise ValueError(f"Incorrect version: {json_dict['protocol_version']}, should be {POOL_PROTOCOL_VERSION}")
header_msg = f"\n---- Pool parameters fetched from {pool_url} ----"
print(header_msg)
pprint(json_dict)
print("-" * len(header_msg))
return json_dict
async def create(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
state = args["state"]
prompt = not args.get("yes", False)
# Could use initial_pool_state_from_dict to simplify
if state == "SELF_POOLING":
pool_url: Optional[str] = None
relative_lock_height = uint32(0)
target_puzzle_hash = None # wallet will fill this in
elif state == "FARMING_TO_POOL":
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
enforce_https = config["full_node"]["selected_network"] == "mainnet"
pool_url = str(args["pool_url"])
if enforce_https and not pool_url.startswith("https://"):
print(f"Pool URLs must be HTTPS on mainnet {pool_url}. Aborting.")
return
json_dict = await create_pool_args(pool_url)
relative_lock_height = json_dict["relative_lock_height"]
target_puzzle_hash = hexstr_to_bytes(json_dict["target_puzzle_hash"])
else:
raise ValueError("Plot NFT must be created in SELF_POOLING or FARMING_TO_POOL state.")
pool_msg = f" and join pool: {pool_url}" if pool_url else ""
print(f"Will create a plot NFT{pool_msg}.")
if prompt:
user_input: str = input("Confirm [n]/y: ")
else:
user_input = "yes"
if user_input.lower() == "y" or user_input.lower() == "yes":
try:
tx_record: TransactionRecord = await wallet_client.create_new_pool_wallet(
target_puzzle_hash,
pool_url,
relative_lock_height,
"localhost:5000",
"new",
state,
)
start = time.time()
while time.time() - start < 10:
await asyncio.sleep(0.1)
tx = await wallet_client.get_transaction(str(1), tx_record.name)
if len(tx.sent_to) > 0:
print(f"Transaction submitted to nodes: {tx.sent_to}")
print(f"Do beet wallet get_transaction -f {fingerprint} -tx 0x{tx_record.name} to get status")
return None
except Exception as e:
print(f"Error creating plot NFT: {e}")
return
print("Aborting.")
async def pprint_pool_wallet_state(
wallet_client: WalletRpcClient,
wallet_id: int,
pool_wallet_info: PoolWalletInfo,
address_prefix: str,
pool_state_dict: Dict,
plot_counts: Counter,
):
if pool_wallet_info.current.state == PoolSingletonState.LEAVING_POOL and pool_wallet_info.target is None:
expected_leave_height = pool_wallet_info.singleton_block_height + pool_wallet_info.current.relative_lock_height
print(f"Current state: INVALID_STATE. Please leave/join again after block height {expected_leave_height}")
else:
print(f"Current state: {PoolSingletonState(pool_wallet_info.current.state).name}")
print(f"Current state from block height: {pool_wallet_info.singleton_block_height}")
print(f"Launcher ID: {pool_wallet_info.launcher_id}")
print(
"Target address (not for plotting): "
f"{encode_puzzle_hash(pool_wallet_info.current.target_puzzle_hash, address_prefix)}"
)
print(f"Number of plots: {plot_counts[pool_wallet_info.p2_singleton_puzzle_hash]}")
print(f"Owner public key: {pool_wallet_info.current.owner_pubkey}")
print(
f"Pool contract address (use ONLY for plotting - do not send money to this address): "
f"{encode_puzzle_hash(pool_wallet_info.p2_singleton_puzzle_hash, address_prefix)}"
)
if pool_wallet_info.target is not None:
print(f"Target state: {PoolSingletonState(pool_wallet_info.target.state).name}")
print(f"Target pool URL: {pool_wallet_info.target.pool_url}")
if pool_wallet_info.current.state == PoolSingletonState.SELF_POOLING.value:
balances: Dict = await wallet_client.get_wallet_balance(str(wallet_id))
balance = balances["confirmed_wallet_balance"]
typ = WalletType(int(WalletType.POOLING_WALLET))
address_prefix, scale = wallet_coin_unit(typ, address_prefix)
print(f"Claimable balance: {print_balance(balance, scale, address_prefix)}")
if pool_wallet_info.current.state == PoolSingletonState.FARMING_TO_POOL:
print(f"Current pool URL: {pool_wallet_info.current.pool_url}")
if pool_wallet_info.launcher_id in pool_state_dict:
print(f"Current difficulty: {pool_state_dict[pool_wallet_info.launcher_id]['current_difficulty']}")
print(f"Points balance: {pool_state_dict[pool_wallet_info.launcher_id]['current_points']}")
num_points_found_24h = len(pool_state_dict[pool_wallet_info.launcher_id]["points_found_24h"])
if num_points_found_24h > 0:
num_points_ack_24h = len(pool_state_dict[pool_wallet_info.launcher_id]["points_acknowledged_24h"])
success_pct = num_points_ack_24h / num_points_found_24h
print(f"Percent Successful Points (24h): {success_pct:.2%}")
print(f"Relative lock height: {pool_wallet_info.current.relative_lock_height} blocks")
payout_instructions: str = pool_state_dict[pool_wallet_info.launcher_id]["pool_config"]["payout_instructions"]
try:
payout_address = encode_puzzle_hash(bytes32.fromhex(payout_instructions), address_prefix)
print(f"Payout instructions (pool will pay to this address): {payout_address}")
except Exception:
print(f"Payout instructions (pool will pay you with this): {payout_instructions}")
if pool_wallet_info.current.state == PoolSingletonState.LEAVING_POOL:
expected_leave_height = pool_wallet_info.singleton_block_height + pool_wallet_info.current.relative_lock_height
if pool_wallet_info.target is not None:
print(f"Expected to leave after block height: {expected_leave_height}")
async def show(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
self_hostname = config["self_hostname"]
farmer_rpc_port = config["farmer"]["rpc_port"]
farmer_client = await FarmerRpcClient.create(self_hostname, uint16(farmer_rpc_port), DEFAULT_ROOT_PATH, config)
address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
summaries_response = await wallet_client.get_wallets()
wallet_id_passed_in = args.get("id", None)
plot_counts: Counter = Counter()
try:
pool_state_list: List = (await farmer_client.get_pool_state())["pool_state"]
harvesters = await farmer_client.get_harvesters()
for d in harvesters["harvesters"]:
for plot in d["plots"]:
if plot.get("pool_contract_puzzle_hash", None) is not None:
# Non pooled plots will have a None pool_contract_puzzle_hash
plot_counts[hexstr_to_bytes(plot["pool_contract_puzzle_hash"])] += 1
except Exception as e:
if isinstance(e, aiohttp.ClientConnectorError):
print(
f"Connection error. Check if farmer is running at {farmer_rpc_port}."
f" You can run the farmer by:\n beet start farmer-only"
)
else:
print(f"Exception from 'wallet' {e}")
farmer_client.close()
await farmer_client.await_closed()
return
pool_state_dict: Dict[bytes32, Dict] = {
hexstr_to_bytes(pool_state_item["pool_config"]["launcher_id"]): pool_state_item
for pool_state_item in pool_state_list
}
if wallet_id_passed_in is not None:
for summary in summaries_response:
typ = WalletType(int(summary["type"]))
if summary["id"] == wallet_id_passed_in and typ != WalletType.POOLING_WALLET:
print(f"Wallet with id: {wallet_id_passed_in} is not a pooling wallet. Please provide a different id.")
return
pool_wallet_info, _ = await wallet_client.pw_status(wallet_id_passed_in)
await pprint_pool_wallet_state(
wallet_client,
wallet_id_passed_in,
pool_wallet_info,
address_prefix,
pool_state_dict,
plot_counts,
)
else:
print(f"Wallet height: {await wallet_client.get_height_info()}")
print(f"Sync status: {'Synced' if (await wallet_client.get_synced()) else 'Not synced'}")
for summary in summaries_response:
wallet_id = summary["id"]
typ = WalletType(int(summary["type"]))
if typ == WalletType.POOLING_WALLET:
print(f"Wallet id {wallet_id}: ")
pool_wallet_info, _ = await wallet_client.pw_status(wallet_id)
await pprint_pool_wallet_state(
wallet_client,
wallet_id,
pool_wallet_info,
address_prefix,
pool_state_dict,
plot_counts,
)
print("")
farmer_client.close()
await farmer_client.await_closed()
async def get_login_link(launcher_id_str: str) -> None:
launcher_id: bytes32 = hexstr_to_bytes(launcher_id_str)
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
self_hostname = config["self_hostname"]
farmer_rpc_port = config["farmer"]["rpc_port"]
farmer_client = await FarmerRpcClient.create(self_hostname, uint16(farmer_rpc_port), DEFAULT_ROOT_PATH, config)
try:
login_link: Optional[str] = await farmer_client.get_pool_login_link(launcher_id)
if login_link is None:
print("Was not able to get login link.")
else:
print(login_link)
except Exception as e:
if isinstance(e, aiohttp.ClientConnectorError):
print(
f"Connection error. Check if farmer is running at {farmer_rpc_port}."
f" You can run the farmer by:\n beet start farmer-only"
)
else:
print(f"Exception from 'farmer' {e}")
finally:
farmer_client.close()
await farmer_client.await_closed()
async def submit_tx_with_confirmation(
message: str, prompt: bool, func: Callable, wallet_client: WalletRpcClient, fingerprint: int, wallet_id: int
):
print(message)
if prompt:
user_input: str = input("Confirm [n]/y: ")
else:
user_input = "yes"
if user_input.lower() == "y" or user_input.lower() == "yes":
try:
tx_record: TransactionRecord = await func()
start = time.time()
while time.time() - start < 10:
await asyncio.sleep(0.1)
tx = await wallet_client.get_transaction(str(1), tx_record.name)
if len(tx.sent_to) > 0:
print(f"Transaction submitted to nodes: {tx.sent_to}")
print(f"Do beet wallet get_transaction -f {fingerprint} -tx 0x{tx_record.name} to get status")
return None
except Exception as e:
print(f"Error performing operation on Plot NFT -f {fingerprint} wallet id: {wallet_id}: {e}")
return
print("Aborting.")
async def join_pool(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
enforce_https = config["full_node"]["selected_network"] == "mainnet"
pool_url: str = args["pool_url"]
if enforce_https and not pool_url.startswith("https://"):
print(f"Pool URLs must be HTTPS on mainnet {pool_url}. Aborting.")
return
wallet_id = args.get("id", None)
prompt = not args.get("yes", False)
try:
async with aiohttp.ClientSession() as session:
async with session.get(f"{pool_url}/pool_info", ssl=ssl_context_for_root(get_mozilla_ca_crt())) as response:
if response.ok:
json_dict = json.loads(await response.text())
else:
print(f"Response not OK: {response.status}")
return
except Exception as e:
print(f"Error connecting to pool {pool_url}: {e}")
return
if json_dict["relative_lock_height"] > 1000:
print("Relative lock height too high for this pool, cannot join")
return
if json_dict["protocol_version"] != POOL_PROTOCOL_VERSION:
print(f"Incorrect version: {json_dict['protocol_version']}, should be {POOL_PROTOCOL_VERSION}")
return
pprint(json_dict)
msg = f"\nWill join pool: {pool_url} with Plot NFT {fingerprint}."
func = functools.partial(
wallet_client.pw_join_pool,
wallet_id,
hexstr_to_bytes(json_dict["target_puzzle_hash"]),
pool_url,
json_dict["relative_lock_height"],
)
await submit_tx_with_confirmation(msg, prompt, func, wallet_client, fingerprint, wallet_id)
async def self_pool(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
wallet_id = args.get("id", None)
prompt = not args.get("yes", False)
msg = f"Will start self-farming with Plot NFT on wallet id {wallet_id} fingerprint {fingerprint}."
func = functools.partial(wallet_client.pw_self_pool, wallet_id)
await submit_tx_with_confirmation(msg, prompt, func, wallet_client, fingerprint, wallet_id)
async def inspect_cmd(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
wallet_id = args.get("id", None)
pool_wallet_info, unconfirmed_transactions = await wallet_client.pw_status(wallet_id)
print(
{
"pool_wallet_info": pool_wallet_info,
"unconfirmed_transactions": [
{"sent_to": tx.sent_to, "transaction_id": tx.name.hex()} for tx in unconfirmed_transactions
],
}
)
async def claim_cmd(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
wallet_id = args.get("id", None)
msg = f"\nWill claim rewards for wallet ID: {wallet_id}."
func = functools.partial(
wallet_client.pw_absorb_rewards,
wallet_id,
)
await submit_tx_with_confirmation(msg, False, func, wallet_client, fingerprint, wallet_id)
| [
"[email protected]"
] | |
0d1c07214d02cff04c05d3e2658206175b689f11 | 166e2e2095ca86e44735d3dd85b54d3f30e03baf | /app/__init__.py | f8617206c3f0409fdb445c1498a0a463afbb73f6 | [
"MIT"
] | permissive | joss13aws/damgteam | e04502e7c89142a9e060a2d879cfcb9cf0d2e29a | ce5beedc5b187141bcec76c15011fe8c4ae33743 | refs/heads/master | 2022-01-25T08:22:44.387636 | 2018-09-12T07:15:39 | 2018-09-12T07:15:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 896 | py | # -*- coding: utf-8 -*-
"""
Script Name: __init__.py.py
Author: Do Trinh/Jimmy - 3D artist.
Description:
"""
# -------------------------------------------------------------------------------------------------------------
from __future__ import absolute_import
import os, subprocess
BASE = os.path.dirname(__file__).split(__name__)[0]
if __name__ == '__main__':
ROOT = BASE.split('app')[0]
else:
ROOT = (os.path.dirname(__file__).split(__name__)[0])
try:
os.getenv('ROOT')
except KeyError:
subprocess.Popen('SetX {} %CD%'.format('ROOT'), shell=True).wait()
else:
if os.getenv('ROOT') != ROOT:
subprocess.Popen('SetX {} %CD%'.format('ROOT'), shell=True).wait()
# -------------------------------------------------------------------------------------------------------------
# Created by panda on 24/08/2018 - 1:28 AM
# © 2017 - 2018 DAMGteam. All rights reserved | [
"[email protected]"
] | |
44c47b384ed863cdf879918152b36a475360e439 | 3b84c4b7b16ccfd0154f8dcb75ddbbb6636373be | /google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/ssh_utils.py | 3202d910b3cb387c13be5ad292755430c5662abd | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | twistedpair/google-cloud-sdk | 37f04872cf1ab9c9ce5ec692d2201a93679827e3 | 1f9b424c40a87b46656fc9f5e2e9c81895c7e614 | refs/heads/master | 2023-08-18T18:42:59.622485 | 2023-08-15T00:00:00 | 2023-08-15T12:14:05 | 116,506,777 | 58 | 24 | null | 2022-02-14T22:01:53 | 2018-01-06T18:40:35 | Python | UTF-8 | Python | false | false | 46,404 | py | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for subcommands that need to SSH into virtual machine guests."""
import errno
import getpass
import logging
import os
import re
import subprocess
import enum
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import constants
from googlecloudsdk.api_lib.compute import gaia_utils
from googlecloudsdk.api_lib.compute import metadata_utils
from googlecloudsdk.api_lib.compute import path_simplifier
from googlecloudsdk.api_lib.compute import request_helper
from googlecloudsdk.api_lib.compute import time_utils
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.api_lib.compute.users import client as user_client
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import config
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.console import progress_tracker
from googlecloudsdk.core.util import files
from googlecloudsdk.core.util import platforms
# The maximum amount of time to wait for a newly-added SSH key to
# propagate before giving up.
_SSH_KEY_PROPAGATION_TIMEOUT_SEC = 60
# `ssh` exits with this exit code in the event of an SSH error (as opposed to a
# successful `ssh` execution where the *command* errored).
_SSH_ERROR_EXIT_CODE = 255
# Normally, all SSH output is simply returned to the user (or sent to
# /dev/null if user output is disabled). For testing, this value can be
# overridden with a file path.
SSH_OUTPUT_FILE = None
class SetProjectMetadataError(core_exceptions.Error):
pass
class SshLikeCmdFailed(core_exceptions.Error):
"""Raise for a failure when invoking ssh, scp, or similar."""
def __init__(self, cmd, message=None, return_code=None):
if not (message or return_code):
raise ValueError('One of message or return_code is required.')
self.cmd = cmd
message_text = '[{0}]'.format(message) if message else None
return_code_text = ('return code [{0}]'.format(return_code)
if return_code else None)
why_failed = ' and '.join(filter(None, [message_text, return_code_text]))
super(SshLikeCmdFailed, self).__init__(
'[{0}] exited with {1}. See '
'https://cloud.google.com/compute/docs/troubleshooting#ssherrors '
'for troubleshooting hints.'.format(self.cmd, why_failed),
exit_code=return_code)
def _IsValidSshUsername(user):
# All characters must be ASCII, and no spaces are allowed
# This may grant false positives, but will prevent backwards-incompatible
# behavior.
return all(ord(c) < 128 and c != ' ' for c in user)
# TODO(user): This function can be dropped 1Q2017.
def _IsPublicKeyCorrupt95Through97(key):
"""Returns True if the encoded public key has the release 95.0.0 corruption.
Windows corruption checks for release 95.0.0 through 97.0.0.
Corrupt Windows encoded keys have these properties:
type: 'ssh-rsa'
exponent: 65537
length: 256
next byte: bit 0x80 set
A valid key either has exponent != 65537 or:
type: 'ssh-rsa'
exponent: 65537
length: 257
next byte: 0
Args:
key: The base64 encoded public key.
Returns:
True if the encoded public key has the release 95.0.0 corruption.
"""
# The corruption only happened on Windows.
if not platforms.OperatingSystem.IsWindows():
return False
# All corrupt encodings have the same encoded prefix (up to the second to
# last byte of the modulus size).
prefix = 'AAAAB3NzaC1yc2EAAAADAQABAAAB'
if not key.startswith(prefix):
return False
# The next 3 base64 chars determine the next 2 encoded bytes.
modulus = key[len(prefix):len(prefix) + 3]
# The last byte of the size must be 01 and the first byte of the modulus must
# be 00, and that corresponds to one of two base64 encodings:
if modulus in ('AQC', 'AQD'):
return False
# Looks bad.
return True
class KeyFileStatus(enum.Enum):
PRESENT = 'OK'
ABSENT = 'NOT FOUND'
BROKEN = 'BROKEN'
class KeyFileKind(enum.Enum):
"""List of supported (by gcloud) key file kinds."""
PRIVATE = 'private'
PUBLIC = 'public'
PPK = 'PuTTY PPK'
class KeyFilesVerifier(object):
"""Checks if SSH key files are correct.
- Populates list of SSH key files (key pair, ppk key on Windows).
- Checks if files are present and (to basic extent) correct.
- Can remove broken key (if permitted by user).
- Provides status information.
"""
class KeyFileData(object):
def __init__(self, filename):
# We keep filename as file handle. Filesystem race is impossible to avoid
# in this design as we spawn a subprocess and pass in filename.
# TODO(b/33288605) fix it.
self.filename = filename
self.status = None
def __init__(self, private_key_file, public_key_file):
self.keys = {
KeyFileKind.PRIVATE: self.KeyFileData(private_key_file),
KeyFileKind.PUBLIC: self.KeyFileData(public_key_file)
}
if platforms.OperatingSystem.IsWindows():
self.keys[KeyFileKind.PPK] = self.KeyFileData(private_key_file + '.ppk')
def _StatusMessage(self):
"""Prepares human readable SSH key status information."""
messages = []
key_padding = 0
status_padding = 0
for kind in self.keys:
data = self.keys[kind]
key_padding = max(key_padding, len(kind.value))
status_padding = max(status_padding, len(data.status.value))
for kind in self.keys:
data = self.keys[kind]
messages.append('{} {} [{}]\n'.format(
(kind.value + ' key').ljust(key_padding + 4),
('(' + data.status.value + ')') .ljust(status_padding + 2),
data.filename))
messages.sort()
return ''.join(messages)
def Validate(self):
"""Performs minimum key files validation.
Returns:
PRESENT if private and public meet minimum key file requirements.
ABSENT if there is no sign of public nor private key file.
BROKEN if there is some key, but it is broken or incomplete.
"""
def ValidateFile(kind):
status_or_line = self._WarnOrReadFirstKeyLine(self.keys[kind].filename,
kind.value)
if isinstance(status_or_line, KeyFileStatus):
return status_or_line
else: # returned line - present
self.keys[kind].first_line = status_or_line
return KeyFileStatus.PRESENT
for file_kind in self.keys:
self.keys[file_kind].status = ValidateFile(file_kind)
# The remaining checks are for the public key file.
# Must have at least 2 space separated fields.
if self.keys[KeyFileKind.PUBLIC].status is KeyFileStatus.PRESENT:
fields = self.keys[KeyFileKind.PUBLIC].first_line.split(' ')
if len(fields) < 2 or _IsPublicKeyCorrupt95Through97(fields[1]):
log.warn(
'The public SSH key file for Google Compute Engine is corrupt.')
self.keys[KeyFileKind.PUBLIC].status = KeyFileStatus.BROKEN
# Summary
collected_values = [x.status for x in self.keys.itervalues()]
if all(x == KeyFileStatus.ABSENT for x in collected_values):
return KeyFileStatus.ABSENT
elif all(x == KeyFileStatus.PRESENT for x in collected_values):
return KeyFileStatus.PRESENT
else:
return KeyFileStatus.BROKEN
# TODO(b/33193000) Change non-interactive behavior for 2.06.2017 Release cut
def RemoveKeyFilesIfPermittedOrFail(self, force_key_file_overwrite):
"""Removes all SSH key files if user permitted this behavior.
User can express intent through --(no)--force-key-file-overwrite flag or
prompt (only in interactive mode). Default behavior is to be
non-destructive.
Args:
force_key_file_overwrite: bool, value of the flag specified or not by user
"""
permissive = True # TODO(b/33193000) Flip this bool value
message = 'Your SSH key files are broken.\n' + self._StatusMessage()
if force_key_file_overwrite is False:
raise console_io.OperationCancelledError(message + 'Operation aborted.')
message += 'We are going to overwrite all above files.'
if force_key_file_overwrite:
# self.force_key_file_overwrite is True
log.warn(message)
else:
# self.force_key_file_overwrite is None
# Deprecated code path is triggered only when flags are not provided.
# Show deprecation warning only in that case.
# Show deprecation warning before prompt to increase chance user read
# this.
# TODO(b/33193000) Remove this deprecation warning
log.warn('Permissive behavior in non-interactive mode is DEPRECATED '
'and will be removed 1st Jun 2017.\n'
'Use --no-force-key-file-overwrite flag to opt-in for new '
'behavior now.\n'
'If You want to preserve old behavior, You can opt-out from '
'new behavior using --force-key-file-overwrite flag.')
try:
console_io.PromptContinue(message, default=False,
throw_if_unattended=permissive,
cancel_on_no=True)
except console_io.UnattendedPromptError:
# Used to workaround default in non-interactive prompt for old behavior
pass # TODO(b/33193000) Remove this - exception will not be raised
# Remove existing broken key files and prepare to regenerate them.
# User agreed.
for key_file in self.keys.viewvalues():
try:
os.remove(key_file.filename)
except OSError as e:
# May be due to the fact that key_file.filename points to a directory
if e.errno == errno.EISDIR:
raise
def _WarnOrReadFirstKeyLine(self, path, kind):
"""Returns the first line from the key file path.
A None return indicates an error and is always accompanied by a log.warn
message.
Args:
path: The path of the file to read from.
kind: The kind of key file, 'private' or 'public'.
Returns:
None (and prints a log.warn message) if the file does not exist, is not
readable, or is empty. Otherwise returns the first line utf8 decoded.
"""
try:
with open(path) as f:
# Decode to utf8 to handle any unicode characters. Key data is base64
# encoded so it cannot contain any unicode. Comments may contain
# unicode, but they are ignored in the key file analysis here, so
# replacing invalid chars with ? is OK.
line = f.readline().strip().decode('utf8', 'replace')
if line:
return line
msg = 'is empty'
status = KeyFileStatus.BROKEN
except IOError as e:
if e.errno == errno.ENOENT:
msg = 'does not exist'
status = KeyFileStatus.ABSENT
else:
msg = 'is not readable'
status = KeyFileStatus.BROKEN
log.warn('The %s SSH key file for Google Compute Engine %s.', kind, msg)
return status
def GetDefaultSshUsername(warn_on_account_user=False):
"""Returns the default username for ssh.
The default username is the local username, unless that username is invalid.
In that case, the default username is the username portion of the current
account.
Emits a warning if it's not using the local account username.
Args:
warn_on_account_user: bool, whether to warn if using the current account
instead of the local username.
Returns:
str, the default SSH username.
"""
user = getpass.getuser()
if not _IsValidSshUsername(user):
full_account = properties.VALUES.core.account.Get(required=True)
account_user = gaia_utils.MapGaiaEmailToDefaultAccountName(full_account)
if warn_on_account_user:
log.warn('Invalid characters in local username [{0}]. '
'Using username corresponding to active account: [{1}]'.format(
user, account_user))
user = account_user
return user
def UserHost(user, host):
"""Returns a string of the form user@host."""
if user:
return user + '@' + host
else:
return host
def GetExternalIPAddress(instance_resource, no_raise=False):
"""Returns the external IP address of the instance.
Args:
instance_resource: An instance resource object.
no_raise: A boolean flag indicating whether or not to return None instead of
raising.
Raises:
ToolException: If no external IP address is found for the instance_resource
and no_raise is False.
Returns:
A string IP or None is no_raise is True and no ip exists.
"""
if instance_resource.networkInterfaces:
access_configs = instance_resource.networkInterfaces[0].accessConfigs
if access_configs:
ip_address = access_configs[0].natIP
if ip_address:
return ip_address
elif not no_raise:
raise exceptions.ToolException(
'Instance [{0}] in zone [{1}] has not been allocated an external '
'IP address yet. Try rerunning this command later.'.format(
instance_resource.name,
path_simplifier.Name(instance_resource.zone)))
if no_raise:
return None
raise exceptions.ToolException(
'Instance [{0}] in zone [{1}] does not have an external IP address, '
'so you cannot SSH into it. To add an external IP address to the '
'instance, use [gcloud compute instances add-access-config].'
.format(instance_resource.name,
path_simplifier.Name(instance_resource.zone)))
def _RunExecutable(cmd_args, strict_error_checking=True,
ignore_ssh_errors=False):
"""Run the given command, handling errors appropriately.
Args:
cmd_args: list of str, the arguments (including executable path) to run
strict_error_checking: bool, whether a non-zero, non-255 exit code should be
considered a failure.
ignore_ssh_errors: bool, when true ignore all errors, including the 255
exit code.
Returns:
int, the return code of the command
Raises:
SshLikeCmdFailed: if the command failed (based on the command exit code and
the strict_error_checking flag)
"""
outfile = SSH_OUTPUT_FILE or os.devnull
with open(outfile, 'w') as output_file:
if log.IsUserOutputEnabled() and not SSH_OUTPUT_FILE:
stdout, stderr = None, None
else:
stdout, stderr = output_file, output_file
if (platforms.OperatingSystem.IsWindows() and
not cmd_args[0].endswith('winkeygen.exe')):
# TODO(user): b/25126583 will drop StrictHostKeyChecking=no and 'y'.
# PuTTY and friends always prompt on fingerprint mismatch. A 'y' response
# adds/updates the fingerprint registry entry and proceeds. The prompt
# will appear once for each new/changed host. Redirecting stdin is not a
# problem. Even interactive ssh is not a problem because a separate PuTTY
# term is used and it ignores the calling process stdin.
stdin = subprocess.PIPE
else:
stdin = None
try:
proc = subprocess.Popen(
cmd_args, stdin=stdin, stdout=stdout, stderr=stderr)
if stdin == subprocess.PIPE:
# Max one prompt per host and there can't be more hosts than args.
proc.communicate('y\n' * len(cmd_args))
returncode = proc.wait()
except OSError as e:
raise SshLikeCmdFailed(cmd_args[0], message=e.strerror)
if not ignore_ssh_errors:
if ((returncode and strict_error_checking) or
returncode == _SSH_ERROR_EXIT_CODE):
raise SshLikeCmdFailed(cmd_args[0], return_code=returncode)
return returncode
def _GetMetadataKey(iam_ssh_keys):
"""Get the metadata key name for the desired SSH key metadata.
There are four SSH key related metadata pairs:
* Per-project 'sshKeys': this grants SSH access to VMs project-wide.
* Per-instance 'sshKeys': this is used to grant access to an individual
instance. For historical reasons, it acts as an override to the
project-global value.
* Per-instance 'block-project-ssh-keys': this determines whether 'ssh-keys'
overrides or adds to the per-project 'sshKeys'
* Per-instance 'ssh-keys': this also grants access to an individual
instance, but acts in addition or as an override to the per-project
'sshKeys' depending on 'block-project-ssh-keys'
Args:
iam_ssh_keys: bool. If False, give the name of the original SSH metadata key
(that overrides the project-global SSH metadata key). If True, give the
name of the IAM SSH metadata key (that works in conjunction with the
project-global SSH key metadata).
Returns:
str, the corresponding metadata key name.
"""
if iam_ssh_keys:
metadata_key = constants.SSH_KEYS_INSTANCE_RESTRICTED_METADATA_KEY
else:
metadata_key = constants.SSH_KEYS_METADATA_KEY
return metadata_key
def _GetSSHKeysFromMetadata(metadata, iam_keys=False):
"""Returns the value of the "sshKeys" metadata as a list."""
if not metadata:
return []
for item in metadata.items:
if item.key == _GetMetadataKey(iam_keys):
return [key.strip() for key in item.value.split('\n') if key]
return []
def _PrepareSSHKeysValue(ssh_keys):
"""Returns a string appropriate for the metadata.
Values from are taken from the tail until either all values are
taken or _MAX_METADATA_VALUE_SIZE_IN_BYTES is reached, whichever
comes first. The selected values are then reversed. Only values at
the head of the list will be subject to removal.
Args:
ssh_keys: A list of keys. Each entry should be one key.
Returns:
A new-line-joined string of SSH keys.
"""
keys = []
bytes_consumed = 0
for key in reversed(ssh_keys):
num_bytes = len(key + '\n')
if bytes_consumed + num_bytes > constants.MAX_METADATA_VALUE_SIZE_IN_BYTES:
log.warn('The following SSH key will be removed from your project '
'because your sshKeys metadata value has reached its '
'maximum allowed size of {0} bytes: {1}'
.format(constants.MAX_METADATA_VALUE_SIZE_IN_BYTES, key))
else:
keys.append(key)
bytes_consumed += num_bytes
keys.reverse()
return '\n'.join(keys)
def _AddSSHKeyToMetadataMessage(message_classes, user, public_key, metadata,
iam_keys=False):
"""Adds the public key material to the metadata if it's not already there."""
entry = u'{user}:{public_key}'.format(
user=user, public_key=public_key)
ssh_keys = _GetSSHKeysFromMetadata(metadata, iam_keys=iam_keys)
log.debug('Current SSH keys in project: {0}'.format(ssh_keys))
if entry in ssh_keys:
return metadata
else:
ssh_keys.append(entry)
return metadata_utils.ConstructMetadataMessage(
message_classes=message_classes,
metadata={
_GetMetadataKey(iam_keys): _PrepareSSHKeysValue(ssh_keys)},
existing_metadata=metadata)
def ReadFile(file_path):
"""Returns the contents of the file or ''."""
try:
with open(file_path) as f:
return f.read()
except IOError as e:
if e.errno == errno.ENOENT:
return ''
else:
raise exceptions.ToolException('There was a problem reading [{0}]: {1}'
.format(file_path, e.message))
def UpdateKnownHostsFile(known_hosts_file, hostname, host_key,
overwrite_keys=False):
"""Update the known_hosts file entry for the given hostname.
If there is no entry for the give hostname, it will be added. If there is
an entry already and overwrite_keys is False, nothing will be changed. If
there is an entry and overwrite_keys is True, the key will be updated if it
has changed.
Args:
known_hosts_file: str, The full path of the known_hosts file to update.
hostname: str, The hostname for the known_hosts entry.
host_key: str, The host key for the given hostname.
overwrite_keys: bool, If true, will overwrite the entry corresponding to
hostname with the new host_key if it already exists. If false and an
entry already exists for hostname, will ignore the new host_key value.
"""
known_hosts_contents = ReadFile(known_hosts_file)
key_list = known_hosts_contents.splitlines()
found_key_entry = None
new_key_entry = '{0} {1}'.format(hostname, host_key)
for key in key_list:
if key.startswith(hostname):
found_key_entry = key
break
if overwrite_keys and found_key_entry:
if found_key_entry != new_key_entry:
key_list.remove(found_key_entry)
found_key_entry = None
if not found_key_entry:
key_list.append(new_key_entry)
new_contents = '\n'.join(key_list) + '\n'
with files.OpenForWritingPrivate(known_hosts_file) as f:
f.write(new_contents)
def _SdkHelperBin():
"""Returns the SDK helper executable bin directory."""
return os.path.join(config.Paths().sdk_root, 'bin', 'sdk')
def _MetadataHasBlockProjectSshKeys(metadata):
"""Return true if the metadata has 'block-project-ssh-keys' set and 'true'."""
if not (metadata and metadata.items):
return False
matching_values = [item.value for item in metadata.items
if item.key == constants.SSH_KEYS_BLOCK_METADATA_KEY]
if not matching_values:
return False
return matching_values[0].lower() == 'true'
class BaseSSHCommand(base_classes.BaseCommand):
"""Base class for subcommands that need to connect to instances using SSH.
Subclasses can call EnsureSSHKeyIsInProject() to make sure that the
user's public SSH key is placed in the project metadata before
proceeding.
"""
@staticmethod
def Args(parser):
ssh_key_file = parser.add_argument(
'--ssh-key-file',
help='The path to the SSH key file.')
ssh_key_file.detailed_help = """\
The path to the SSH key file. By default, this is ``{0}''.
""".format(constants.DEFAULT_SSH_KEY_FILE)
force_key_file_overwrite = parser.add_argument(
'--force-key-file-overwrite',
action='store_true',
default=None,
help=('Enable/Disable force overwrite of the files associated with a '
'broken SSH key.')
)
force_key_file_overwrite.detailed_help = """\
If enabled gcloud will regenerate and overwrite the files associated
with a broken SSH key without asking for confirmation in both
interactive and non-interactive environment.
If disabled gcloud will not attempt to regenerate the files associated
with a broken SSH key and fail in both interactive and non-interactive
environment.
"""
# Last line empty to preserve spacing between last paragraph and calliope
# attachment "Use --no-force-key-file-overwrite to disable."
def GetProject(self, project):
"""Returns the project object.
Args:
project: str, the project we are requesting or None for value from
from properties
Returns:
The project object
"""
errors = []
objects = list(request_helper.MakeRequests(
requests=[(self.compute.projects,
'Get',
self.messages.ComputeProjectsGetRequest(
project=project or properties.VALUES.core.project.Get(
required=True),
))],
http=self.http,
batch_url=self.batch_url,
errors=errors,
custom_get_requests=None))
if errors:
utils.RaiseToolException(
errors,
error_message='Could not fetch project resource:')
return objects[0]
def _SetProjectMetadata(self, new_metadata):
"""Sets the project metadata to the new metadata."""
compute = self.compute
errors = []
list(request_helper.MakeRequests(
requests=[
(compute.projects,
'SetCommonInstanceMetadata',
self.messages.ComputeProjectsSetCommonInstanceMetadataRequest(
metadata=new_metadata,
project=properties.VALUES.core.project.Get(
required=True),
))],
http=self.http,
batch_url=self.batch_url,
errors=errors,
custom_get_requests=None))
if errors:
utils.RaiseException(
errors,
SetProjectMetadataError,
error_message='Could not add SSH key to project metadata:')
def SetProjectMetadata(self, new_metadata):
"""Sets the project metadata to the new metadata with progress tracker."""
with progress_tracker.ProgressTracker('Updating project ssh metadata'):
self._SetProjectMetadata(new_metadata)
def _SetInstanceMetadata(self, instance, new_metadata):
"""Sets the project metadata to the new metadata."""
compute = self.compute
errors = []
# API wants just the zone name, not the full URL
zone = instance.zone.split('/')[-1]
list(request_helper.MakeRequests(
requests=[
(compute.instances,
'SetMetadata',
self.messages.ComputeInstancesSetMetadataRequest(
instance=instance.name,
metadata=new_metadata,
project=properties.VALUES.core.project.Get(
required=True),
zone=zone
))],
http=self.http,
batch_url=self.batch_url,
errors=errors,
custom_get_requests=None))
if errors:
utils.RaiseToolException(
errors,
error_message='Could not add SSH key to instance metadata:')
def SetInstanceMetadata(self, instance, new_metadata):
"""Sets the instance metadata to the new metadata with progress tracker."""
with progress_tracker.ProgressTracker('Updating instance ssh metadata'):
self._SetInstanceMetadata(instance, new_metadata)
def EnsureSSHKeyIsInInstance(self, user, instance, iam_keys=False):
"""Ensures that the user's public SSH key is in the instance metadata.
Args:
user: str, the name of the user associated with the SSH key in the
metadata
instance: Instance, ensure the SSH key is in the metadata of this instance
iam_keys: bool. If False, write to the original SSH metadata key (that
overrides the project-global SSH metadata key). If true, write to the
new SSH metadata key (that works in union with the project-global SSH
key metadata).
Returns:
bool, True if the key was newly added, False if it was in the metadata
already
"""
# First, grab the public key from the user's computer. If the public key
# doesn't already exist, GetPublicKey() should create it.
public_key = self.GetPublicKey()
new_metadata = _AddSSHKeyToMetadataMessage(self.messages, user, public_key,
instance.metadata,
iam_keys=iam_keys)
if new_metadata != instance.metadata:
self.SetInstanceMetadata(instance, new_metadata)
return True
else:
return False
def EnsureSSHKeyIsInProject(self, user, project_name=None):
"""Ensures that the user's public SSH key is in the project metadata.
Args:
user: str, the name of the user associated with the SSH key in the
metadata
project_name: str, the project SSH key will be added to
Returns:
bool, True if the key was newly added, False if it was in the metadata
already
"""
# First, grab the public key from the user's computer. If the public key
# doesn't already exist, GetPublicKey() should create it.
public_key = self.GetPublicKey()
# Second, let's make sure the public key is in the project metadata.
project = self.GetProject(project_name)
existing_metadata = project.commonInstanceMetadata
new_metadata = _AddSSHKeyToMetadataMessage(
self.messages, user, public_key, existing_metadata)
if new_metadata != existing_metadata:
self.SetProjectMetadata(new_metadata)
return True
else:
return False
def _EnsureSSHKeyExistsForUser(self, fetcher, user):
"""Ensure the user's public SSH key is known by the Account Service."""
public_key = self.GetPublicKey()
should_upload = True
try:
user_info = fetcher.LookupUser(user)
except user_client.UserException:
owner_email = gaia_utils.GetAuthenticatedGaiaEmail(self.http)
fetcher.CreateUser(user, owner_email)
user_info = fetcher.LookupUser(user)
for remote_public_key in user_info.publicKeys:
if remote_public_key.key.rstrip() == public_key:
expiration_time = remote_public_key.expirationTimestamp
if expiration_time and time_utils.IsExpired(expiration_time):
# If a key is expired we remove and reupload
fetcher.RemovePublicKey(
user_info.name, remote_public_key.fingerprint)
else:
should_upload = False
break
if should_upload:
fetcher.UploadPublicKey(user, public_key)
return True
def GetPublicKey(self):
"""Generates an SSH key using ssh-keygen (if necessary) and returns it."""
public_ssh_key_file = self.ssh_key_file + '.pub'
key_files_summary = KeyFilesVerifier(self.ssh_key_file, public_ssh_key_file)
key_files_validity = key_files_summary.Validate()
if key_files_validity is KeyFileStatus.BROKEN:
key_files_summary.RemoveKeyFilesIfPermittedOrFail(
self.force_key_file_overwrite)
# Fallthrough
if key_files_validity is not KeyFileStatus.PRESENT:
if key_files_validity is KeyFileStatus.ABSENT:
# If key is broken, message is already displayed
log.warn('You do not have an SSH key for Google Compute Engine.')
log.warn('[%s] will be executed to generate a key.',
self.ssh_keygen_executable)
ssh_directory = os.path.dirname(public_ssh_key_file)
if not os.path.exists(ssh_directory):
if console_io.PromptContinue(
'This tool needs to create the directory [{0}] before being able '
'to generate SSH keys.'.format(ssh_directory)):
files.MakeDir(ssh_directory, 0700)
else:
raise exceptions.ToolException('SSH key generation aborted by user.')
keygen_args = [self.ssh_keygen_executable]
if platforms.OperatingSystem.IsWindows():
# No passphrase in the current implementation.
keygen_args.append(self.ssh_key_file)
else:
if properties.VALUES.core.disable_prompts.GetBool():
# Specify empty passphrase on command line
keygen_args.extend(['-P', ''])
keygen_args.extend([
'-t', 'rsa',
'-f', self.ssh_key_file,
])
_RunExecutable(keygen_args)
with open(public_ssh_key_file) as f:
# We get back a unicode list of keys for the remaining metadata, so
# convert to unicode. Assume UTF 8, but if we miss a character we can just
# replace it with a '?'. The only source of issues would be the hostnames,
# which are relatively inconsequential.
return f.readline().strip().decode('utf8', 'replace')
@property
def resource_type(self):
return 'instances'
def Run(self, args):
"""Subclasses must call this in their Run() before continuing."""
# Used in GetPublicKey
self.force_key_file_overwrite = args.force_key_file_overwrite
if platforms.OperatingSystem.IsWindows():
scp_command = 'pscp'
ssh_command = 'plink'
ssh_keygen_command = 'winkeygen'
ssh_term_command = 'putty'
# The ssh helper executables are installed in this dir only.
path = _SdkHelperBin()
self.ssh_term_executable = files.FindExecutableOnPath(
ssh_term_command, path=path)
else:
scp_command = 'scp'
ssh_command = 'ssh'
ssh_keygen_command = 'ssh-keygen'
ssh_term_command = None
path = None
self.ssh_term_executable = None
self.scp_executable = files.FindExecutableOnPath(scp_command, path=path)
self.ssh_executable = files.FindExecutableOnPath(ssh_command, path=path)
self.ssh_keygen_executable = files.FindExecutableOnPath(
ssh_keygen_command, path=path)
if (not self.scp_executable or
not self.ssh_executable or
not self.ssh_keygen_executable or
ssh_term_command and not self.ssh_term_executable):
raise exceptions.ToolException('Your platform does not support OpenSSH.')
self.ssh_key_file = os.path.realpath(os.path.expanduser(
args.ssh_key_file or constants.DEFAULT_SSH_KEY_FILE))
self.known_hosts_file = os.path.realpath(os.path.expanduser(
constants.GOOGLE_SSH_KNOWN_HOSTS_FILE))
class BaseSSHCLICommand(BaseSSHCommand):
"""Base class for subcommands that use ssh or scp."""
@staticmethod
def Args(parser):
BaseSSHCommand.Args(parser)
parser.add_argument(
'--dry-run',
action='store_true',
help=('If provided, prints the command that would be run to standard '
'out instead of executing it.'))
plain = parser.add_argument(
'--plain',
action='store_true',
help='Suppresses the automatic addition of ssh/scp flags.')
plain.detailed_help = """\
Suppresses the automatic addition of *ssh(1)*/*scp(1)* flags. This flag
is useful if you want to take care of authentication yourself or
use specific ssh/scp features.
"""
strict_host_key = parser.add_argument(
'--strict-host-key-checking',
choices=['yes', 'no', 'ask'],
help='Override the default behavior for ssh/scp StrictHostKeyChecking')
strict_host_key.detailed_help = """\
Override the default behavior of StrictHostKeyChecking. By default,
StrictHostKeyChecking is set to 'no' the first time you connect to an
instance and will be set to 'yes' for all subsequent connections. Use
this flag to specify a value for the connection.
"""
def GetDefaultFlags(self):
"""Returns a list of default commandline flags."""
return [
'-i', self.ssh_key_file,
'-o', 'UserKnownHostsFile={0}'.format(self.known_hosts_file),
'-o', 'IdentitiesOnly=yes', # ensure our SSH key trumps any ssh_agent
'-o', 'CheckHostIP=no'
]
def GetInstance(self, instance_ref):
"""Fetch an instance based on the given instance_ref."""
request = (self.compute.instances,
'Get',
self.messages.ComputeInstancesGetRequest(
instance=instance_ref.Name(),
project=instance_ref.project,
zone=instance_ref.zone))
errors = []
objects = list(request_helper.MakeRequests(
requests=[request],
http=self.http,
batch_url=self.batch_url,
errors=errors,
custom_get_requests=None))
if errors:
utils.RaiseToolException(
errors,
error_message='Could not fetch instance:')
return objects[0]
def WaitUntilSSHable(self, user, args, instance):
"""Blocks until SSHing to the given host succeeds."""
external_ip_address = GetExternalIPAddress(instance)
ssh_args_for_polling = [self.ssh_executable]
ssh_args_for_polling.extend(self.GetDefaultFlags())
ssh_args_for_polling.extend(self.GetHostKeyArgs(args, instance))
ssh_args_for_polling.append(UserHost(user, external_ip_address))
ssh_args_for_polling.append('true')
ssh_args_for_polling = self.LocalizeCommand(ssh_args_for_polling)
start_sec = time_utils.CurrentTimeSec()
while True:
logging.debug('polling instance for SSHability')
retval = subprocess.call(ssh_args_for_polling)
if retval == 0:
break
if (time_utils.CurrentTimeSec() - start_sec >
_SSH_KEY_PROPAGATION_TIMEOUT_SEC):
raise exceptions.ToolException(
'Could not SSH to the instance. It is possible that '
'your SSH key has not propagated to the instance yet. '
'Try running this command again. If you still cannot connect, '
'verify that the firewall and instance are set to accept '
'ssh traffic.')
time_utils.Sleep(5)
def _LocalizeWindowsCommand(self, cmd_args):
"""Translate cmd_args[1:] from ssh form to plink/putty form.
The translations are:
ssh form plink/putty form
======== ================
-i PRIVATE_KEY_FILE -i PRIVATE_KEY_FILE.ppk
-o ANYTHING <ignore>
-p PORT -P PORT
[USER]@HOST [USER]@HOST
-BOOLEAN_FLAG -BOOLEAN_FLAG
-FLAG WITH_VALUE -FLAG WITH_VALUE
POSITIONAL POSITIONAL
Args:
cmd_args: [str], The command line that will be executed.
Returns:
Returns translated_cmd_args, the localized command line.
"""
positionals = 0
cmd_args = list(cmd_args) # Get a mutable copy.
translated_args = [cmd_args.pop(0)]
while cmd_args: # Each iteration processes 1 or 2 args.
arg = cmd_args.pop(0)
if arg == '-i' and cmd_args:
# -i private_key_file -- use private_key_file.ppk -- if it doesn't exist
# then winkeygen will be called to generate it before attempting to
# connect.
translated_args.append(arg)
translated_args.append(cmd_args.pop(0) + '.ppk')
elif arg == '-o' and cmd_args:
# Ignore `-o anything'.
cmd_args.pop(0)
elif arg == '-p' and cmd_args:
# -p PORT => -P PORT
translated_args.append('-P')
translated_args.append(cmd_args.pop(0))
elif arg in ['-2', '-a', '-C', '-l', '-load', '-m', '-pw', '-R', '-T',
'-v', '-x'] and cmd_args:
# Pass through putty/plink flag with value.
translated_args.append(arg)
translated_args.append(cmd_args.pop(0))
elif arg.startswith('-'):
# Pass through putty/plink Boolean flags
translated_args.append(arg)
else:
positionals += 1
translated_args.append(arg)
# If there is only 1 positional then it must be [USER@]HOST and we should
# use self.ssh_term_executable to open an xterm window.
if positionals == 1 and translated_args[0] == self.ssh_executable:
translated_args[0] = self.ssh_term_executable
return translated_args
def LocalizeCommand(self, cmd_args):
"""Translates an ssh/scp command line to match the local implementation.
Args:
cmd_args: [str], The command line that will be executed.
Returns:
Returns translated_cmd_args, the localized command line.
"""
if platforms.OperatingSystem.IsWindows():
return self._LocalizeWindowsCommand(cmd_args)
return cmd_args
def IsHostKeyAliasInKnownHosts(self, host_key_alias):
known_hosts = ReadFile(self.known_hosts_file)
if known_hosts:
return host_key_alias in known_hosts
else:
return False
def GetHostKeyArgs(self, args, instance):
"""Returns default values for HostKeyAlias and StrictHostKeyChecking.
Args:
args: argparse.Namespace, The calling command invocation args.
instance: Instance resource that ssh/scp is connecting to.
Returns:
list, list of arguments to add to the ssh command line.
"""
if args.plain or platforms.OperatingSystem.IsWindows():
return []
host_key_alias = 'compute.{0}'.format(instance.id)
if args.strict_host_key_checking:
strict_host_key_value = args.strict_host_key_checking
elif self.IsHostKeyAliasInKnownHosts(host_key_alias):
strict_host_key_value = 'yes'
else:
strict_host_key_value = 'no'
cmd_args = ['-o', 'HostKeyAlias={0}'.format(host_key_alias), '-o',
'StrictHostKeyChecking={0}'.format(strict_host_key_value)]
return cmd_args
def ActuallyRun(self, args, cmd_args, user, instance, project,
strict_error_checking=True, use_account_service=False,
wait_for_sshable=True, ignore_ssh_errors=False):
"""Runs the scp/ssh command specified in cmd_args.
If the scp/ssh command exits non-zero, this command will exit with the same
exit code.
Args:
args: argparse.Namespace, The calling command invocation args.
cmd_args: [str], The argv for the command to execute.
user: str, The user name.
instance: Instance, the instance to connect to
project: str, the project instance is in
strict_error_checking: bool, whether to fail on a non-zero, non-255 exit
code (alternative behavior is to return the exit code
use_account_service: bool, when false upload ssh keys to project metadata.
wait_for_sshable: bool, when false skip the sshability check.
ignore_ssh_errors: bool, when true ignore all errors, including the 255
exit code.
Returns:
int, the exit code of the command that was run
"""
cmd_args = self.LocalizeCommand(cmd_args)
if args.dry_run:
log.out.Print(' '.join(cmd_args))
return
if args.plain:
keys_newly_added = []
elif use_account_service:
fetcher = user_client.UserResourceFetcher(
self.clouduseraccounts, self.project, self.http, self.batch_url)
keys_newly_added = self._EnsureSSHKeyExistsForUser(fetcher, user)
else:
# There are two kinds of metadata: project-wide metadata and per-instance
# metadata. There are four SSH-key related metadata keys:
#
# * project['sshKeys']: shared project-wide
# * instance['sshKeys']: legacy. Acts as an override to project['sshKeys']
# * instance['block-project-ssh-keys']: If true, instance['ssh-keys']
# overrides project['sshKeys']. Otherwise, keys from both metadata
# pairs are valid.
# * instance['ssh-keys']: Acts either in conjunction with or as an
# override to project['sshKeys'], depending on
# instance['block-project-ssh-keys']
#
# SSH-like commands work by copying a relevant SSH key to
# the appropriate metadata value. The VM grabs keys from the metadata as
# follows (pseudo-Python):
#
# def GetAllSshKeys(project, instance):
# if 'sshKeys' in instance.metadata:
# return (instance.metadata['sshKeys'] +
# instance.metadata['ssh-keys'])
# elif instance.metadata['block-project-ssh-keys'] == 'true':
# return instance.metadata['ssh-keys']
# else:
# return (instance.metadata['ssh-keys'] +
# project.metadata['sshKeys'])
#
if _GetSSHKeysFromMetadata(instance.metadata):
# If we add a key to project-wide metadata but the per-instance
# 'sshKeys' metadata exists, we won't be able to ssh in because the VM
# won't check the project-wide metadata. To avoid this, if the instance
# has per-instance SSH key metadata, we add the key there instead.
keys_newly_added = self.EnsureSSHKeyIsInInstance(user, instance)
elif _MetadataHasBlockProjectSshKeys(instance.metadata):
# If the instance 'ssh-keys' metadata overrides the project-wide
# 'sshKeys' metadata, we should put our key there.
keys_newly_added = self.EnsureSSHKeyIsInInstance(user, instance,
iam_keys=True)
else:
# Otherwise, try to add to the project-wide metadata. If we don't have
# permissions to do that, add to the instance 'ssh-keys' metadata.
try:
keys_newly_added = self.EnsureSSHKeyIsInProject(user, project)
except SetProjectMetadataError:
log.info('Could not set project metadata:', exc_info=True)
# If we can't write to the project metadata, it may be because of a
# permissions problem (we could inspect this exception object further
# to make sure, but because we only get a string back this would be
# fragile). If that's the case, we want to try the writing to the
# iam_keys metadata (we may have permissions to write to instance
# metadata). We prefer this to the per-instance override of the
# project metadata.
log.info('Attempting to set instance metadata.')
keys_newly_added = self.EnsureSSHKeyIsInInstance(user, instance,
iam_keys=True)
if keys_newly_added and wait_for_sshable:
self.WaitUntilSSHable(user, args, instance)
logging.debug('%s command: %s', cmd_args[0], ' '.join(cmd_args))
return _RunExecutable(cmd_args, strict_error_checking=strict_error_checking,
ignore_ssh_errors=ignore_ssh_errors)
# A remote path has three parts host[@user]:[path], where @user and path are
# optional.
# A host:
# - cannot start with '.'
# - cannot contain ':', '/', '\\', '@'
# A user:
# - cannot contain ':'.
# A path:
# - can be anything
_SSH_REMOTE_PATH_REGEX = r'[^.:/\\@][^:/\\@]*(@[^:]*)?:'
def IsScpLocalPath(path):
"""Checks if path is an scp local file path.
Args:
path: The path name to check.
Returns:
True if path is an scp local path, false if it is a remote path.
"""
# Paths that start with a drive are local. _SSH_REMOTE_PATH_REGEX could match
# path for some os implementations, so the drive test must be done before the
# pattern match.
if os.path.splitdrive(path)[0]:
return True
# Paths that match _SSH_REMOTE_PATH_REGEX are not local.
if re.match(_SSH_REMOTE_PATH_REGEX, path):
return False
# Otherwise the path is local.
return True
| [
"[email protected]"
] | |
fa4720480d99598478d88893f74730d1b74f153b | bfd6ac084fcc08040b94d310e6a91d5d804141de | /PulseSequences2/SidebandOptimization.py | 5b3223a3788320f39d1161c1d19f3a5584fa78af | [] | no_license | jqwang17/HaeffnerLabLattice | 3b1cba747b8b62cada4467a4ea041119a7a68bfa | 03d5bedf64cf63efac457f90b189daada47ff535 | refs/heads/master | 2020-12-07T20:23:32.251900 | 2019-11-11T19:26:41 | 2019-11-11T19:26:41 | 232,792,450 | 1 | 0 | null | 2020-01-09T11:23:28 | 2020-01-09T11:23:27 | null | UTF-8 | Python | false | false | 2,590 | py | from common.devel.bum.sequences.pulse_sequence import pulse_sequence
from labrad.units import WithUnit as U
from treedict import TreeDict
class SidebandOptimization(pulse_sequence):
scannable_params = {'SidebandCooling.sideband_cooling_amplitude_854' : [(-30., -6., 3., 'dBm'), 'current'],
'SidebandCooling.stark_shift' : [(-50.0, 50.0, 2.5, 'kHz'), 'current']}
show_params= [
'SidebandCooling.line_selection',
'SidebandCooling.sideband_cooling_amplitude_729',
'SidebandCooling.sideband_cooling_amplitude_854',
'SidebandCooling.sideband_cooling_amplitude_866',
'SidebandCooling.selection_sideband',
'SidebandCooling.order',
'SidebandCooling.stark_shift',
'SidebandCooling.cooling_cycles',
'RabiFlopping.rabi_amplitude_729',
'RabiFlopping.duration',
'RabiFlopping.line_selection',
'RabiFlopping.selection_sideband',
'RabiFlopping.order',
]
def sequence(self):
from StatePreparation import StatePreparation
from subsequences.RabiExcitation import RabiExcitation
from subsequences.StateReadout import StateReadout
from subsequences.TurnOffAll import TurnOffAll
## calculate the scan params
rf = self.parameters.RabiFlopping
freq_729=self.calc_freq(rf.line_selection , rf.selection_sideband , rf.order)
self.end = U(10., 'us')
self.addSequence(TurnOffAll)
self.addSequence(StatePreparation)
self.addSequence(RabiExcitation,{'Excitation_729.rabi_excitation_frequency': freq_729,
'Excitation_729.rabi_excitation_amplitude': rf.rabi_amplitude_729,
'Excitation_729.rabi_excitation_duration': rf.duration })
self.addSequence(StateReadout)
@classmethod
def run_initial(cls,cxn, parameters_dict):
print "Switching the 866DP to auto mode"
cxn.pulser.switch_auto('866DP')
@classmethod
def run_in_loop(cls,cxn, parameters_dict, data, x):
#print "Running in loop Rabi_floping"
pass
@classmethod
def run_finally(cls,cxn, parameters_dict, data, x):
print "switching the 866 back to ON"
cxn.pulser.switch_manual('866DP', True)
| [
"[email protected]"
] | |
e8a8ecb58043ca609ec177e0f5e27fae7d5abada | fdbb74a95924e2677466614f6ab6e2bb13b2a95a | /third_party/python/Lib/http/client.py | 619f67f41b59fa45985383e152639c8e2ea7868d | [
"ISC",
"Python-2.0",
"GPL-1.0-or-later",
"LicenseRef-scancode-python-cwi",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-other-copyleft"
] | permissive | jart/cosmopolitan | fb11b5658939023977060a7c6c71a74093d9cb44 | 0d748ad58e1063dd1f8560f18a0c75293b9415b7 | refs/heads/master | 2023-09-06T09:17:29.303607 | 2023-09-02T03:49:13 | 2023-09-02T03:50:18 | 272,457,606 | 11,887 | 435 | ISC | 2023-09-14T17:47:58 | 2020-06-15T14:16:13 | C | UTF-8 | Python | false | false | 55,660 | py | r"""HTTP/1.1 client library
<intro stuff goes here>
<other stuff, too>
HTTPConnection goes through a number of "states", which define when a client
may legally make another request or fetch the response for a particular
request. This diagram details these state transitions:
(null)
│
│ HTTPConnection()
↓
Idle
│
│ putrequest()
↓
Request-started
│
│ ( putheader() )* endheaders()
↓
Request-sent
│└─────────────────────────────┐
│ │ getresponse() raises
│ response = getresponse() │ ConnectionError
↓ ↓
Unread-response Idle
[Response-headers-read]
│└────────────────────┐
│ │
│ response.read() │ putrequest()
↓ ↓
Idle Req-started-unread-response
┌───────┘│
│ │
response.read() │ │ ( putheader() )* endheaders()
↓ ↓
Request-started Req-sent-unread-response
│
│ response.read()
↓
Request-sent
This diagram presents the following rules:
-- a second request may not be started until {response-headers-read}
-- a response [object] cannot be retrieved until {request-sent}
-- there is no differentiation between an unread response body and a
partially read response body
Note: this enforcement is applied by the HTTPConnection class. The
HTTPResponse class does not enforce this state machine, which
implies sophisticated clients may accelerate the request/response
pipeline. Caution should be taken, though: accelerating the states
beyond the above pattern may imply knowledge of the server's
connection-close behavior for certain requests. For example, it
is impossible to tell whether the server will close the connection
UNTIL the response headers have been read; this means that further
requests cannot be placed into the pipeline until it is known that
the server will NOT be closing the connection.
Logical State __state __response
───────────── ─────── ──────────
Idle _CS_IDLE None
Request-started _CS_REQ_STARTED None
Request-sent _CS_REQ_SENT None
Unread-response _CS_IDLE <response_class>
Req-started-unread-response _CS_REQ_STARTED <response_class>
Req-sent-unread-response _CS_REQ_SENT <response_class>
"""
import email.parser
import email.message
import http
import io
import os
import re
import tls
import socket
import collections
from urllib.parse import urlsplit
from encodings import idna, iso8859_1
# HTTPMessage, parse_headers(), and the HTTP status code constants are
# intentionally omitted for simplicity
__all__ = ["HTTPResponse", "HTTPConnection", "HTTPSConnection",
"HTTPException", "NotConnected", "UnknownProtocol",
"UnknownTransferEncoding", "UnimplementedFileMode",
"IncompleteRead", "InvalidURL", "ImproperConnectionState",
"CannotSendRequest", "CannotSendHeader", "ResponseNotReady",
"BadStatusLine", "LineTooLong", "RemoteDisconnected", "error",
"responses"]
HTTP_PORT = 80
HTTPS_PORT = 443
_UNKNOWN = 'UNKNOWN'
# connection states
_CS_IDLE = 'Idle'
_CS_REQ_STARTED = 'Request-started'
_CS_REQ_SENT = 'Request-sent'
# hack to maintain backwards compatibility
globals().update(http.HTTPStatus.__members__)
# another hack to maintain backwards compatibility
# Mapping status codes to official W3C names
responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()}
# maximal amount of data to read at one time in _safe_read
MAXAMOUNT = 1048576
# maximal line length when calling readline().
_MAXLINE = 65536
_MAXHEADERS = 100
# Header name/value ABNF (http://tools.ietf.org/html/rfc7230#section-3.2)
#
# VCHAR = %x21-7E
# obs-text = %x80-FF
# header-field = field-name ":" OWS field-value OWS
# field-name = token
# field-value = *( field-content / obs-fold )
# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
# field-vchar = VCHAR / obs-text
#
# obs-fold = CRLF 1*( SP / HTAB )
# ; obsolete line folding
# ; see Section 3.2.4
# token = 1*tchar
#
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
# / DIGIT / ALPHA
# ; any VCHAR, except delimiters
#
# VCHAR defined in http://tools.ietf.org/html/rfc5234#appendix-B.1
# the patterns for both name and value are more lenient than RFC
# definitions to allow for backwards compatibility
_is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch
_is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search
# These characters are not allowed within HTTP URL paths.
# See https://tools.ietf.org/html/rfc3986#section-3.3 and the
# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition.
# Prevents CVE-2019-9740. Includes control characters such as \r\n.
# We don't restrict chars above \x7f as putrequest() limits us to ASCII.
_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
# Arguably only these _should_ allowed:
# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
# We are more lenient for assumed real world compatibility purposes.
# These characters are not allowed within HTTP method names
# to prevent http header injection.
_contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]')
# We always set the Content-Length header for these methods because some
# servers will otherwise respond with a 411
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
def _encode(data, name='data'):
"""Call data.encode("latin-1") but show a better error message."""
try:
return data.encode("latin-1")
except UnicodeEncodeError as err:
raise UnicodeEncodeError(
err.encoding,
err.object,
err.start,
err.end,
"%s (%.20r) is not valid Latin-1. Use %s.encode('utf-8') "
"if you want to send it encoded in UTF-8." %
(name.title(), data[err.start:err.end], name)) from None
class HTTPMessage(email.message.Message):
# XXX The only usage of this method is in
# http.server.CGIHTTPRequestHandler. Maybe move the code there so
# that it doesn't need to be part of the public API. The API has
# never been defined so this could cause backwards compatibility
# issues.
def getallmatchingheaders(self, name):
"""Find all header lines matching a given header name.
Look through the list of headers and find all lines matching a given
header name (and their continuation lines). A list of the lines is
returned, without interpretation. If the header does not occur, an
empty list is returned. If the header occurs multiple times, all
occurrences are returned. Case is not important in the header name.
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.keys():
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(line)
return lst
def _read_headers(fp):
"""Reads potential header lines into a list from a file pointer.
Length of line is limited by _MAXLINE, and number of
headers is limited by _MAXHEADERS.
"""
headers = []
while True:
line = fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("header line")
headers.append(line)
if len(headers) > _MAXHEADERS:
raise HTTPException("got more than %d headers" % _MAXHEADERS)
if line in (b'\r\n', b'\n', b''):
break
return headers
def parse_headers(fp, _class=HTTPMessage):
"""Parses only RFC2822 headers from a file pointer.
email Parser wants to see strings rather than bytes.
But a TextIOWrapper around self.rfile would buffer too many bytes
from the stream, bytes which we later need to read as bytes.
So we read the correct bytes here, as bytes, for email Parser
to parse.
"""
headers = _read_headers(fp)
hstring = b''.join(headers).decode('iso-8859-1')
return email.parser.Parser(_class=_class).parsestr(hstring)
class HTTPResponse(io.BufferedIOBase):
# See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details.
# The bytes from the socket object are iso-8859-1 strings.
# See RFC 2616 sec 2.2 which notes an exception for MIME-encoded
# text following RFC 2047. The basic status line parsing only
# accepts iso-8859-1.
def __init__(self, sock, debuglevel=0, method=None, url=None):
# If the response includes a content-length header, we need to
# make sure that the client doesn't read more than the
# specified number of bytes. If it does, it will block until
# the server times out and closes the connection. This will
# happen if a self.fp.read() is done (without a size) whether
# self.fp is buffered or not. So, no self.fp.read() by
# clients unless they know what they are doing.
if type(sock) is tls.TLS:
self.fp = io.BufferedReader(socket.SocketIO(sock, "r"),
io.DEFAULT_BUFFER_SIZE)
else:
self.fp = sock.makefile("rb")
self.debuglevel = debuglevel
self._method = method
# The HTTPResponse object is returned via urllib. The clients
# of http and urllib expect different attributes for the
# headers. headers is used here and supports urllib. msg is
# provided as a backwards compatibility layer for http
# clients.
self.headers = self.msg = None
# from the Status-Line of the response
self.version = _UNKNOWN # HTTP-Version
self.status = _UNKNOWN # Status-Code
self.reason = _UNKNOWN # Reason-Phrase
self.chunked = _UNKNOWN # is "chunked" being used?
self.chunk_left = _UNKNOWN # bytes left to read in current chunk
self.length = _UNKNOWN # number of bytes left in response
self.will_close = _UNKNOWN # conn will close at end of response
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
raise RemoteDisconnected("Remote end closed connection without"
" response")
try:
version, status, reason = line.split(None, 2)
except ValueError:
try:
version, status = line.split(None, 1)
reason = ""
except ValueError:
# empty version will cause next test to fail.
version = ""
if not version.startswith("HTTP/"):
self._close_conn()
raise BadStatusLine(line)
# The status code is a three-digit number
try:
status = int(status)
if status < 100 or status > 999:
raise BadStatusLine(line)
except ValueError:
raise BadStatusLine(line)
return version, status, reason
def begin(self):
if self.headers is not None:
# we've already started reading the response
return
# read until we get a non-100 response
while True:
version, status, reason = self._read_status()
if status != CONTINUE:
break
# skip the header from the 100 response
skipped_headers = _read_headers(self.fp)
if self.debuglevel > 0:
print("headers:", skipped_headers)
del skipped_headers
self.code = self.status = status
self.reason = reason.strip()
if version in ("HTTP/1.0", "HTTP/0.9"):
# Some servers might still return "0.9", treat it as 1.0 anyway
self.version = 10
elif version.startswith("HTTP/1."):
self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1
else:
raise UnknownProtocol(version)
self.headers = self.msg = parse_headers(self.fp)
if self.debuglevel > 0:
for hdr in self.headers:
print("header:", hdr + ":", self.headers.get(hdr))
# are we using the chunked-style of transfer encoding?
tr_enc = self.headers.get("transfer-encoding")
if tr_enc and tr_enc.lower() == "chunked":
self.chunked = True
self.chunk_left = None
else:
self.chunked = False
# will the connection close at the end of the response?
self.will_close = self._check_close()
# do we have a Content-Length?
# NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
self.length = None
length = self.headers.get("content-length")
# are we using the chunked-style of transfer encoding?
tr_enc = self.headers.get("transfer-encoding")
if length and not self.chunked:
try:
self.length = int(length)
except ValueError:
self.length = None
else:
if self.length < 0: # ignore nonsensical negative lengths
self.length = None
else:
self.length = None
# does the body have a fixed length? (of zero)
if (status == NO_CONTENT or status == NOT_MODIFIED or
100 <= status < 200 or # 1xx codes
self._method == "HEAD"):
self.length = 0
# if the connection remains open, and we aren't using chunked, and
# a content-length was not provided, then assume that the connection
# WILL close.
if (not self.will_close and
not self.chunked and
self.length is None):
self.will_close = True
def _check_close(self):
conn = self.headers.get("connection")
if self.version == 11:
# An HTTP/1.1 proxy is assumed to stay open unless
# explicitly closed.
conn = self.headers.get("connection")
if conn and "close" in conn.lower():
return True
return False
# Some HTTP/1.0 implementations have support for persistent
# connections, using rules different than HTTP/1.1.
# For older HTTP, Keep-Alive indicates persistent connection.
if self.headers.get("keep-alive"):
return False
# At least Akamai returns a "Connection: Keep-Alive" header,
# which was supposed to be sent by the client.
if conn and "keep-alive" in conn.lower():
return False
# Proxy-Connection is a netscape hack.
pconn = self.headers.get("proxy-connection")
if pconn and "keep-alive" in pconn.lower():
return False
# otherwise, assume it will close
return True
def _close_conn(self):
fp = self.fp
self.fp = None
fp.close()
def close(self):
try:
super().close() # set "closed" flag
finally:
if self.fp:
self._close_conn()
# These implementations are for the benefit of io.BufferedReader.
# XXX This class should probably be revised to act more like
# the "raw stream" that BufferedReader expects.
def flush(self):
super().flush()
if self.fp:
self.fp.flush()
def readable(self):
"""Always returns True"""
return True
# End of "raw stream" methods
def isclosed(self):
"""True if the connection is closed."""
# NOTE: it is possible that we will not ever call self.close(). This
# case occurs when will_close is TRUE, length is None, and we
# read up to the last byte, but NOT past it.
#
# IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be
# called, meaning self.isclosed() is meaningful.
return self.fp is None
def read(self, amt=None):
if self.fp is None:
return b""
if self._method == "HEAD":
self._close_conn()
return b""
if amt is not None:
# Amount is given, implement using readinto
b = bytearray(amt)
n = self.readinto(b)
return memoryview(b)[:n].tobytes()
else:
# Amount is not given (unbounded read) so we must check self.length
# and self.chunked
if self.chunked:
return self._readall_chunked()
if self.length is None:
s = self.fp.read()
else:
try:
s = self._safe_read(self.length)
except IncompleteRead:
self._close_conn()
raise
self.length = 0
self._close_conn() # we read everything
return s
def readinto(self, b):
"""Read up to len(b) bytes into bytearray b and return the number
of bytes read.
"""
if self.fp is None:
return 0
if self._method == "HEAD":
self._close_conn()
return 0
if self.chunked:
return self._readinto_chunked(b)
if self.length is not None:
if len(b) > self.length:
# clip the read to the "end of response"
b = memoryview(b)[0:self.length]
# we do not use _safe_read() here because this may be a .will_close
# connection, and the user is reading more bytes than will be provided
# (for example, reading in 1k chunks)
n = self.fp.readinto(b)
if not n and b:
# Ideally, we would raise IncompleteRead if the content-length
# wasn't satisfied, but it might break compatibility.
self._close_conn()
elif self.length is not None:
self.length -= n
if not self.length:
self._close_conn()
return n
def _read_next_chunk_size(self):
# Read the next chunk size from the file
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("chunk size")
i = line.find(b";")
if i >= 0:
line = line[:i] # strip chunk-extensions
try:
return int(line, 16)
except ValueError:
# close the connection as protocol synchronisation is
# probably lost
self._close_conn()
raise
def _read_and_discard_trailer(self):
# read and discard trailer up to the CRLF terminator
### note: we shouldn't have any trailers!
while True:
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("trailer line")
if not line:
# a vanishingly small number of sites EOF without
# sending the trailer
break
if line in (b'\r\n', b'\n', b''):
break
def _get_chunk_left(self):
# return self.chunk_left, reading a new chunk if necessary.
# chunk_left == 0: at the end of the current chunk, need to close it
# chunk_left == None: No current chunk, should read next.
# This function returns non-zero or None if the last chunk has
# been read.
chunk_left = self.chunk_left
if not chunk_left: # Can be 0 or None
if chunk_left is not None:
# We are at the end of chunk, discard chunk end
self._safe_read(2) # toss the CRLF at the end of the chunk
try:
chunk_left = self._read_next_chunk_size()
except ValueError:
raise IncompleteRead(b'')
if chunk_left == 0:
# last chunk: 1*("0") [ chunk-extension ] CRLF
self._read_and_discard_trailer()
# we read everything; close the "file"
self._close_conn()
chunk_left = None
self.chunk_left = chunk_left
return chunk_left
def _readall_chunked(self):
assert self.chunked != _UNKNOWN
value = []
try:
while True:
chunk_left = self._get_chunk_left()
if chunk_left is None:
break
value.append(self._safe_read(chunk_left))
self.chunk_left = 0
return b''.join(value)
except IncompleteRead:
raise IncompleteRead(b''.join(value))
def _readinto_chunked(self, b):
assert self.chunked != _UNKNOWN
total_bytes = 0
mvb = memoryview(b)
try:
while True:
chunk_left = self._get_chunk_left()
if chunk_left is None:
return total_bytes
if len(mvb) <= chunk_left:
n = self._safe_readinto(mvb)
self.chunk_left = chunk_left - n
return total_bytes + n
temp_mvb = mvb[:chunk_left]
n = self._safe_readinto(temp_mvb)
mvb = mvb[n:]
total_bytes += n
self.chunk_left = 0
except IncompleteRead:
raise IncompleteRead(bytes(b[0:total_bytes]))
def _safe_read(self, amt):
"""Read the number of bytes requested, compensating for partial reads.
Normally, we have a blocking socket, but a read() can be interrupted
by a signal (resulting in a partial read).
Note that we cannot distinguish between EOF and an interrupt when zero
bytes have been read. IncompleteRead() will be raised in this
situation.
This function should be used when <amt> bytes "should" be present for
reading. If the bytes are truly not available (due to EOF), then the
IncompleteRead exception can be used to detect the problem.
"""
s = []
while amt > 0:
chunk = self.fp.read(min(amt, MAXAMOUNT))
if not chunk:
raise IncompleteRead(b''.join(s), amt)
s.append(chunk)
amt -= len(chunk)
return b"".join(s)
def _safe_readinto(self, b):
"""Same as _safe_read, but for reading into a buffer."""
total_bytes = 0
mvb = memoryview(b)
while total_bytes < len(b):
if MAXAMOUNT < len(mvb):
temp_mvb = mvb[0:MAXAMOUNT]
n = self.fp.readinto(temp_mvb)
else:
n = self.fp.readinto(mvb)
if not n:
raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b))
mvb = mvb[n:]
total_bytes += n
return total_bytes
def read1(self, n=-1):
"""Read with at most one underlying system call. If at least one
byte is buffered, return that instead.
"""
if self.fp is None or self._method == "HEAD":
return b""
if self.chunked:
return self._read1_chunked(n)
if self.length is not None and (n < 0 or n > self.length):
n = self.length
try:
result = self.fp.read1(n)
except ValueError:
if n >= 0:
raise
# some implementations, like BufferedReader, don't support -1
# Read an arbitrarily selected largeish chunk.
result = self.fp.read1(16*1024)
if not result and n:
self._close_conn()
elif self.length is not None:
self.length -= len(result)
return result
def peek(self, n=-1):
# Having this enables IOBase.readline() to read more than one
# byte at a time
if self.fp is None or self._method == "HEAD":
return b""
if self.chunked:
return self._peek_chunked(n)
return self.fp.peek(n)
def readline(self, limit=-1):
if self.fp is None or self._method == "HEAD":
return b""
if self.chunked:
# Fallback to IOBase readline which uses peek() and read()
return super().readline(limit)
if self.length is not None and (limit < 0 or limit > self.length):
limit = self.length
result = self.fp.readline(limit)
if not result and limit:
self._close_conn()
elif self.length is not None:
self.length -= len(result)
return result
def _read1_chunked(self, n):
# Strictly speaking, _get_chunk_left() may cause more than one read,
# but that is ok, since that is to satisfy the chunked protocol.
chunk_left = self._get_chunk_left()
if chunk_left is None or n == 0:
return b''
if not (0 <= n <= chunk_left):
n = chunk_left # if n is negative or larger than chunk_left
read = self.fp.read1(n)
self.chunk_left -= len(read)
if not read:
raise IncompleteRead(b"")
return read
def _peek_chunked(self, n):
# Strictly speaking, _get_chunk_left() may cause more than one read,
# but that is ok, since that is to satisfy the chunked protocol.
try:
chunk_left = self._get_chunk_left()
except IncompleteRead:
return b'' # peek doesn't worry about protocol
if chunk_left is None:
return b'' # eof
# peek is allowed to return more than requested. Just request the
# entire chunk, and truncate what we get.
return self.fp.peek(chunk_left)[:chunk_left]
def fileno(self):
return self.fp.fileno()
def getheader(self, name, default=None):
'''Returns the value of the header matching *name*.
If there are multiple matching headers, the values are
combined into a single string separated by commas and spaces.
If no matching header is found, returns *default* or None if
the *default* is not specified.
If the headers are unknown, raises http.client.ResponseNotReady.
'''
if self.headers is None:
raise ResponseNotReady()
headers = self.headers.get_all(name) or default
if isinstance(headers, str) or not hasattr(headers, '__iter__'):
return headers
else:
return ', '.join(headers)
def getheaders(self):
"""Return list of (header, value) tuples."""
if self.headers is None:
raise ResponseNotReady()
return list(self.headers.items())
# We override IOBase.__iter__ so that it doesn't check for closed-ness
def __iter__(self):
return self
# For compatibility with old-style urllib responses.
def info(self):
'''Returns an instance of the class mimetools.Message containing
meta-information associated with the URL.
When the method is HTTP, these headers are those returned by
the server at the head of the retrieved HTML page (including
Content-Length and Content-Type).
When the method is FTP, a Content-Length header will be
present if (as is now usual) the server passed back a file
length in response to the FTP retrieval request. A
Content-Type header will be present if the MIME type can be
guessed.
When the method is local-file, returned headers will include
a Date representing the file's last-modified time, a
Content-Length giving file size, and a Content-Type
containing a guess at the file's type. See also the
description of the mimetools module.
'''
return self.headers
def geturl(self):
'''Return the real URL of the page.
In some cases, the HTTP server redirects a client to another
URL. The urlopen() function handles this transparently, but in
some cases the caller needs to know which URL the client was
redirected to. The geturl() method can be used to get at this
redirected URL.
'''
return self.url
def getcode(self):
'''Return the HTTP status code that was sent with the response,
or None if the URL is not an HTTP URL.
'''
return self.status
class HTTPConnection:
_http_vsn = 11
_http_vsn_str = 'HTTP/1.1'
response_class = HTTPResponse
default_port = HTTP_PORT
auto_open = 1
debuglevel = 0
@staticmethod
def _is_textIO(stream):
"""Test whether a file-like object is a text or a binary stream.
"""
return isinstance(stream, io.TextIOBase)
@staticmethod
def _get_content_length(body, method):
"""Get the content-length based on the body.
If the body is None, we set Content-Length: 0 for methods that expect
a body (RFC 7230, Section 3.3.2). We also set the Content-Length for
any method if the body is a str or bytes-like object and not a file.
"""
if body is None:
# do an explicit check for not None here to distinguish
# between unset and set but empty
if method.upper() in _METHODS_EXPECTING_BODY:
return 0
else:
return None
if hasattr(body, 'read'):
# file-like object.
return None
try:
# does it implement the buffer protocol (bytes, bytearray, array)?
mv = memoryview(body)
return mv.nbytes
except TypeError:
pass
if isinstance(body, str):
return len(body)
return None
def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
self.timeout = timeout
self.source_address = source_address
self.sock = None
self._buffer = []
self.__response = None
self.__state = _CS_IDLE
self._method = None
self._tunnel_host = None
self._tunnel_port = None
self._tunnel_headers = {}
(self.host, self.port) = self._get_hostport(host, port)
self._validate_host(self.host)
# This is stored as an instance variable to allow unit
# tests to replace it with a suitable mockup
self._create_connection = socket.create_connection
def set_tunnel(self, host, port=None, headers=None):
"""Set up host and port for HTTP CONNECT tunnelling.
In a connection that uses HTTP CONNECT tunneling, the host passed to the
constructor is used as a proxy server that relays all communication to
the endpoint passed to `set_tunnel`. This done by sending an HTTP
CONNECT request to the proxy server when the connection is established.
This method must be called before the HTML connection has been
established.
The headers argument should be a mapping of extra HTTP headers to send
with the CONNECT request.
"""
if self.sock:
raise RuntimeError("Can't set up tunnel for established connection")
self._tunnel_host, self._tunnel_port = self._get_hostport(host, port)
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def _get_hostport(self, host, port):
if port is None:
i = host.rfind(':')
j = host.rfind(']') # ipv6 addresses have [...]
if i > j:
try:
port = int(host[i+1:])
except ValueError:
if host[i+1:] == "": # http://foo.com:/ == http://foo.com/
port = self.default_port
else:
raise InvalidURL("nonnumeric port: '%s'" % host[i+1:])
host = host[:i]
else:
port = self.default_port
if host and host[0] == '[' and host[-1] == ']':
host = host[1:-1]
return (host, port)
def set_debuglevel(self, level):
self.debuglevel = level
def _tunnel(self):
connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self._tunnel_host,
self._tunnel_port)
connect_bytes = connect_str.encode("ascii")
self.send(connect_bytes)
for header, value in self._tunnel_headers.items():
header_str = "%s: %s\r\n" % (header, value)
header_bytes = header_str.encode("latin-1")
self.send(header_bytes)
self.send(b'\r\n')
response = self.response_class(self.sock, method=self._method)
(version, code, message) = response._read_status()
if code != http.HTTPStatus.OK:
self.close()
raise OSError("Tunnel connection failed: %d %s" % (code,
message.strip()))
while True:
line = response.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("header line")
if not line:
# for sites which EOF without sending a trailer
break
if line in (b'\r\n', b'\n', b''):
break
if self.debuglevel > 0:
print('header:', line.decode())
def connect(self):
"""Connect to the host and port specified in __init__."""
self.sock = self._create_connection(
(self.host,self.port), self.timeout, self.source_address)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self._tunnel_host:
self._tunnel()
def close(self):
"""Close the connection to the HTTP server."""
self.__state = _CS_IDLE
try:
sock = self.sock
if sock:
self.sock = None
try:
sock.close() # close it manually... there may be other refs
except OSError:
pass # TODO(jart): deal with https fd ownership
finally:
response = self.__response
if response:
self.__response = None
response.close()
def send(self, data):
"""Send `data' to the server.
``data`` can be a string object, a bytes object, an array object, a
file-like object that supports a .read() method, or an iterable object.
"""
if self.sock is None:
if self.auto_open:
self.connect()
else:
raise NotConnected()
if self.debuglevel > 0:
print("send:", repr(data))
blocksize = 8192
if hasattr(data, "read") :
if self.debuglevel > 0:
print("sendIng a read()able")
encode = self._is_textIO(data)
if encode and self.debuglevel > 0:
print("encoding file using iso-8859-1")
while 1:
datablock = data.read(blocksize)
if not datablock:
break
if encode:
datablock = datablock.encode("iso-8859-1")
self.sock.sendall(datablock)
return
try:
self.sock.sendall(data)
except TypeError:
if isinstance(data, collections.Iterable):
for d in data:
self.sock.sendall(d)
else:
raise TypeError("data should be a bytes-like object "
"or an iterable, got %r" % type(data))
def _output(self, s):
"""Add a line of output to the current request buffer.
Assumes that the line does *not* end with \\r\\n.
"""
self._buffer.append(s)
def _read_readable(self, readable):
blocksize = 8192
if self.debuglevel > 0:
print("sendIng a read()able")
encode = self._is_textIO(readable)
if encode and self.debuglevel > 0:
print("encoding file using iso-8859-1")
while True:
datablock = readable.read(blocksize)
if not datablock:
break
if encode:
datablock = datablock.encode("iso-8859-1")
yield datablock
def _send_output(self, message_body=None, encode_chunked=False):
"""Send the currently buffered request and clear the buffer.
Appends an extra \\r\\n to the buffer.
A message_body may be specified, to be appended to the request.
"""
self._buffer.extend((b"", b""))
msg = b"\r\n".join(self._buffer)
del self._buffer[:]
self.send(msg)
if message_body is not None:
# create a consistent interface to message_body
if hasattr(message_body, 'read'):
# Let file-like take precedence over byte-like. This
# is needed to allow the current position of mmap'ed
# files to be taken into account.
chunks = self._read_readable(message_body)
else:
try:
# this is solely to check to see if message_body
# implements the buffer API. it /would/ be easier
# to capture if PyObject_CheckBuffer was exposed
# to Python.
memoryview(message_body)
except TypeError:
try:
chunks = iter(message_body)
except TypeError:
raise TypeError("message_body should be a bytes-like "
"object or an iterable, got %r"
% type(message_body))
else:
# the object implements the buffer interface and
# can be passed directly into socket methods
chunks = (message_body,)
for chunk in chunks:
if not chunk:
if self.debuglevel > 0:
print('Zero length chunk ignored')
continue
if encode_chunked and self._http_vsn == 11:
# chunked encoding
chunk = f'{len(chunk):X}\r\n'.encode('ascii') + chunk \
+ b'\r\n'
self.send(chunk)
if encode_chunked and self._http_vsn == 11:
# end chunked transfer
self.send(b'0\r\n\r\n')
def putrequest(self, method, url, skip_host=False,
skip_accept_encoding=False):
"""Send a request to the server.
`method' specifies an HTTP request method, e.g. 'GET'.
`url' specifies the object being requested, e.g. '/index.html'.
`skip_host' if True does not add automatically a 'Host:' header
`skip_accept_encoding' if True does not add automatically an
'Accept-Encoding:' header
"""
# if a prior response has been completed, then forget about it.
if self.__response and self.__response.isclosed():
self.__response = None
# in certain cases, we cannot issue another request on this connection.
# this occurs when:
# 1) we are in the process of sending a request. (_CS_REQ_STARTED)
# 2) a response to a previous request has signalled that it is going
# to close the connection upon completion.
# 3) the headers for the previous response have not been read, thus
# we cannot determine whether point (2) is true. (_CS_REQ_SENT)
#
# if there is no prior response, then we can request at will.
#
# if point (2) is true, then we will have passed the socket to the
# response (effectively meaning, "there is no prior response"), and
# will open a new one when a new request is made.
#
# Note: if a prior response exists, then we *can* start a new request.
# We are not allowed to begin fetching the response to this new
# request, however, until that prior response is complete.
#
if self.__state == _CS_IDLE:
self.__state = _CS_REQ_STARTED
else:
raise CannotSendRequest(self.__state)
self._validate_method(method)
# Save the method for use later in the response phase
self._method = method
url = url or '/'
self._validate_path(url)
request = '%s %s %s' % (method, url, self._http_vsn_str)
self._output(self._encode_request(request))
if self._http_vsn == 11:
# Issue some standard headers for better HTTP/1.1 compliance
if not skip_host:
# this header is issued *only* for HTTP/1.1
# connections. more specifically, this means it is
# only issued when the client uses the new
# HTTPConnection() class. backwards-compat clients
# will be using HTTP/1.0 and those clients may be
# issuing this header themselves. we should NOT issue
# it twice; some web servers (such as Apache) barf
# when they see two Host: headers
# If we need a non-standard port,include it in the
# header. If the request is going through a proxy,
# but the host of the actual URL, not the host of the
# proxy.
netloc = ''
if url.startswith('http'):
nil, netloc, nil, nil, nil = urlsplit(url)
if netloc:
try:
netloc_enc = netloc.encode("ascii")
except UnicodeEncodeError:
netloc_enc = netloc.encode("idna")
self.putheader('Host', netloc_enc)
else:
if self._tunnel_host:
host = self._tunnel_host
port = self._tunnel_port
else:
host = self.host
port = self.port
try:
host_enc = host.encode("ascii")
except UnicodeEncodeError:
host_enc = host.encode("idna")
# As per RFC 273, IPv6 address should be wrapped with []
# when used as Host header
if host.find(':') >= 0:
host_enc = b'[' + host_enc + b']'
if port == self.default_port:
self.putheader('Host', host_enc)
else:
host_enc = host_enc.decode("ascii")
self.putheader('Host', "%s:%s" % (host_enc, port))
# note: we are assuming that clients will not attempt to set these
# headers since *this* library must deal with the
# consequences. this also means that when the supporting
# libraries are updated to recognize other forms, then this
# code should be changed (removed or updated).
# we only want a Content-Encoding of "identity" since we don't
# support encodings such as x-gzip or x-deflate.
if not skip_accept_encoding:
self.putheader('Accept-Encoding', 'identity')
# we can accept "chunked" Transfer-Encodings, but no others
# NOTE: no TE header implies *only* "chunked"
#self.putheader('TE', 'chunked')
# if TE is supplied in the header, then it must appear in a
# Connection header.
#self.putheader('Connection', 'TE')
else:
# For HTTP/1.0, the server will assume "not chunked"
pass
def _encode_request(self, request):
# ASCII also helps prevent CVE-2019-9740.
return request.encode('ascii')
def _validate_method(self, method):
"""Validate a method name for putrequest."""
# prevent http header injection
match = _contains_disallowed_method_pchar_re.search(method)
if match:
raise ValueError(
f"method can't contain control characters. {method!r} "
f"(found at least {match.group()!r})")
def _validate_path(self, url):
"""Validate a url for putrequest."""
# Prevent CVE-2019-9740.
match = _contains_disallowed_url_pchar_re.search(url)
if match:
raise InvalidURL(f"URL can't contain control characters. {url!r} "
f"(found at least {match.group()!r})")
def _validate_host(self, host):
"""Validate a host so it doesn't contain control characters."""
# Prevent CVE-2019-18348.
match = _contains_disallowed_url_pchar_re.search(host)
if match:
raise InvalidURL(f"URL can't contain control characters. {host!r} "
f"(found at least {match.group()!r})")
def putheader(self, header, *values):
"""Send a request header line to the server.
For example: h.putheader('Accept', 'text/html')
"""
if self.__state != _CS_REQ_STARTED:
raise CannotSendHeader()
if hasattr(header, 'encode'):
header = header.encode('ascii')
if not _is_legal_header_name(header):
raise ValueError('Invalid header name %r' % (header,))
values = list(values)
for i, one_value in enumerate(values):
if hasattr(one_value, 'encode'):
values[i] = one_value.encode('latin-1')
elif isinstance(one_value, int):
values[i] = str(one_value).encode('ascii')
if _is_illegal_header_value(values[i]):
raise ValueError('Invalid header value %r' % (values[i],))
value = b'\r\n\t'.join(values)
header = header + b': ' + value
self._output(header)
def endheaders(self, message_body=None, *, encode_chunked=False):
"""Indicate that the last header line has been sent to the server.
This method sends the request to the server. The optional message_body
argument can be used to pass a message body associated with the
request.
"""
if self.__state == _CS_REQ_STARTED:
self.__state = _CS_REQ_SENT
else:
raise CannotSendHeader()
self._send_output(message_body, encode_chunked=encode_chunked)
def request(self, method, url, body=None, headers={}, *,
encode_chunked=False):
"""Send a complete request to the server."""
self._send_request(method, url, body, headers, encode_chunked)
def _send_request(self, method, url, body, headers, encode_chunked):
# Honor explicitly requested Host: and Accept-Encoding: headers.
header_names = frozenset(k.lower() for k in headers)
skips = {}
if 'host' in header_names:
skips['skip_host'] = 1
if 'accept-encoding' in header_names:
skips['skip_accept_encoding'] = 1
self.putrequest(method, url, **skips)
# chunked encoding will happen if HTTP/1.1 is used and either
# the caller passes encode_chunked=True or the following
# conditions hold:
# 1. content-length has not been explicitly set
# 2. the body is a file or iterable, but not a str or bytes-like
# 3. Transfer-Encoding has NOT been explicitly set by the caller
if 'content-length' not in header_names:
# only chunk body if not explicitly set for backwards
# compatibility, assuming the client code is already handling the
# chunking
if 'transfer-encoding' not in header_names:
# if content-length cannot be automatically determined, fall
# back to chunked encoding
encode_chunked = False
content_length = self._get_content_length(body, method)
if content_length is None:
if body is not None:
if self.debuglevel > 0:
print('Unable to determine size of %r' % body)
encode_chunked = True
self.putheader('Transfer-Encoding', 'chunked')
else:
self.putheader('Content-Length', str(content_length))
else:
encode_chunked = False
for hdr, value in headers.items():
self.putheader(hdr, value)
if isinstance(body, str):
# RFC 2616 Section 3.7.1 says that text default has a
# default charset of iso-8859-1.
body = _encode(body, 'body')
self.endheaders(body, encode_chunked=encode_chunked)
def getresponse(self):
"""Get the response from the server.
If the HTTPConnection is in the correct state, returns an
instance of HTTPResponse or of whatever object is returned by
the response_class variable.
If a request has not been sent or if a previous response has
not be handled, ResponseNotReady is raised. If the HTTP
response indicates that the connection should be closed, then
it will be closed before the response is returned. When the
connection is closed, the underlying socket is closed.
"""
# if a prior response has been completed, then forget about it.
if self.__response and self.__response.isclosed():
self.__response = None
# if a prior response exists, then it must be completed (otherwise, we
# cannot read this response's header to determine the connection-close
# behavior)
#
# note: if a prior response existed, but was connection-close, then the
# socket and response were made independent of this HTTPConnection
# object since a new request requires that we open a whole new
# connection
#
# this means the prior response had one of two states:
# 1) will_close: this connection was reset and the prior socket and
# response operate independently
# 2) persistent: the response was retained and we await its
# isclosed() status to become true.
#
if self.__state != _CS_REQ_SENT or self.__response:
raise ResponseNotReady(self.__state)
if self.debuglevel > 0:
response = self.response_class(self.sock, self.debuglevel,
method=self._method)
else:
response = self.response_class(self.sock, method=self._method)
try:
try:
response.begin()
except ConnectionError:
self.close()
raise
assert response.will_close != _UNKNOWN
self.__state = _CS_IDLE
if response.will_close:
# this effectively passes the connection to the response
self.close()
else:
# remember this, so we can tell when it is complete
self.__response = response
return response
except:
response.close()
raise
class HTTPSConnection(HTTPConnection):
"This class allows communication via SSL."
default_port = HTTPS_PORT
def __init__(self, host, port=None, key_file=None, cert_file=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, *, context=None,
check_hostname=None):
super(HTTPSConnection, self).__init__(host, port, timeout,
source_address)
self._check_hostname = check_hostname
if context is not None:
raise ValueError('context parameter not supported yet')
if key_file is not None:
raise ValueError('key_file parameter not supported yet')
if cert_file is not None:
raise ValueError('cert_file parameter not supported yet')
def connect(self):
"Connect to a host on a given (SSL) port."
super().connect()
self.sock = tls.newclient(self.sock.fileno(), self.host, self.sock)
self.sock.handshake()
class HTTPException(Exception):
# Subclasses that define an __init__ must call Exception.__init__
# or define self.args. Otherwise, str() will fail.
pass
class NotConnected(HTTPException):
pass
class InvalidURL(HTTPException):
pass
class UnknownProtocol(HTTPException):
def __init__(self, version):
self.args = version,
self.version = version
class UnknownTransferEncoding(HTTPException):
pass
class UnimplementedFileMode(HTTPException):
pass
class IncompleteRead(HTTPException):
def __init__(self, partial, expected=None):
self.args = partial,
self.partial = partial
self.expected = expected
def __repr__(self):
if self.expected is not None:
e = ', %i more expected' % self.expected
else:
e = ''
return '%s(%i bytes read%s)' % (self.__class__.__name__,
len(self.partial), e)
def __str__(self):
return repr(self)
class ImproperConnectionState(HTTPException):
pass
class CannotSendRequest(ImproperConnectionState):
pass
class CannotSendHeader(ImproperConnectionState):
pass
class ResponseNotReady(ImproperConnectionState):
pass
class BadStatusLine(HTTPException):
def __init__(self, line):
if not line:
line = repr(line)
self.args = line,
self.line = line
class LineTooLong(HTTPException):
def __init__(self, line_type):
HTTPException.__init__(self, "got more than %d bytes when reading %s"
% (_MAXLINE, line_type))
class RemoteDisconnected(ConnectionResetError, BadStatusLine):
def __init__(self, *pos, **kw):
BadStatusLine.__init__(self, "")
ConnectionResetError.__init__(self, *pos, **kw)
# for backwards compatibility
error = HTTPException
| [
"[email protected]"
] | |
abc5778f8d9954d10c526765fe14fd1b7d08ab46 | 33789ec4b4a2d35f06cd89068f90686650057102 | /object_oriented_programming/polymorphism/function_overloading.py | 2b41cb5df64489594b40d2cdb5f9750b69d6a1df | [] | no_license | unnievarghese/originalpythonluminar | d9c342a5b34b7236bf34f8a53b35188b12e90b3d | 616929ff471700248b0619c35025562c131ea459 | refs/heads/master | 2023-03-01T20:59:41.165467 | 2021-01-23T11:26:09 | 2021-01-23T11:26:09 | 315,814,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 606 | py | def add():
print('inside no arg add method')
def add(num):
print('inside 1 arg add method')
def add(num1,num2):
print('inside 2 arg add method')
# def without a class is funtion
#fucntion overloading is same as method overriding but without class
# same fuction but different number of arguments
print(add(1,2)) #this one works because it has two arg and add function with two parameter works because it is
#recently implemented
print(add(1)) #this one wont work becuase it has only 1 arg and add function with 1 parameter
# is not last implemented | [
"[email protected]"
] | |
be86e46364d706fb41a1344fe6cdb49253da255f | 3f345ac4cad6dc931260ab40c40d0977ba51db77 | /src/__init__.py | 24b34714951bc77838edb3c7468a1acd0abc9a0b | [] | no_license | fran-jo/EngineME | f586831f55942320a0dc07dbf70a409c2fc475d5 | 69555183d5b8a3bc3c4a0c406da2a58b2f9fcb70 | refs/heads/master | 2020-03-23T21:03:39.305112 | 2019-02-09T22:03:09 | 2019-02-09T22:03:09 | 142,078,920 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | from eme.logiclayer.engine import enginedym
from eme.logiclayer.engine import engineomc
from eme.logiclayer.command import CommandOMC
# from engine.engineJModelica import EngineJM | [
"[email protected]"
] | |
96e4c704ecf4e1e33f84c8abce3e5307dfcb01bc | 33294c238bd5c6ad0cb69d7b6d6922a54b1f7d95 | /src/wrf/g_omega.py | f2f70ba8836d6204a9722f15fdb14897dc289f60 | [
"Apache-2.0"
] | permissive | NCAR/wrf-python | a3b81aa0de3c7dd8b20d390bd949e3f4e3100bed | 79dda8329dd814aaba44cddf62cd12db0f5e2e97 | refs/heads/develop | 2023-06-30T03:14:14.380762 | 2023-06-16T22:26:09 | 2023-06-16T22:26:09 | 59,517,733 | 384 | 155 | Apache-2.0 | 2023-06-23T19:43:18 | 2016-05-23T20:55:40 | Python | UTF-8 | Python | false | false | 3,272 | py | from __future__ import (absolute_import, division, print_function)
from .constants import Constants
from .destag import destagger
from .extension import _omega, _tk
from .util import extract_vars
from .metadecorators import copy_and_set_metadata
@copy_and_set_metadata(copy_varname="T", name="omega",
description="omega",
units="Pa s-1")
def get_omega(wrfin, timeidx=0, method="cat", squeeze=True, cache=None,
meta=True, _key=None):
"""Return Omega.
This functions extracts the necessary variables from the NetCDF file
object in order to perform the calculation.
Args:
wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`, or an \
iterable): WRF-ARW NetCDF
data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile`
or an iterable sequence of the aforementioned types.
timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`, optional): The
desired time index. This value can be a positive integer,
negative integer, or
:data:`wrf.ALL_TIMES` (an alias for None) to return
all times in the file or sequence. The default is 0.
method (:obj:`str`, optional): The aggregation method to use for
sequences. Must be either 'cat' or 'join'.
'cat' combines the data along the Time dimension.
'join' creates a new dimension for the file index.
The default is 'cat'.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
cache (:obj:`dict`, optional): A dictionary of (varname, ndarray)
that can be used to supply pre-extracted NetCDF variables to the
computational routines. It is primarily used for internal
purposes, but can also be used to improve performance by
eliminating the need to repeatedly extract the same variables
used in multiple diagnostics calculations, particularly when using
large sequences of files.
Default is None.
meta (:obj:`bool`, optional): Set to False to disable metadata and
return :class:`numpy.ndarray` instead of
:class:`xarray.DataArray`. Default is True.
_key (:obj:`int`, optional): A caching key. This is used for internal
purposes only. Default is None.
Returns:
:class:`xarray.DataArray` or :class:`numpy.ndarray`: Omega.
If xarray is
enabled and the *meta* parameter is True, then the result will be a
:class:`xarray.DataArray` object. Otherwise, the result will be a
:class:`numpy.ndarray` object with no metadata.
"""
varnames = ("T", "P", "W", "PB", "QVAPOR")
ncvars = extract_vars(wrfin, timeidx, varnames, method, squeeze, cache,
meta=False, _key=_key)
t = ncvars["T"]
p = ncvars["P"]
w = ncvars["W"]
pb = ncvars["PB"]
qv = ncvars["QVAPOR"]
wa = destagger(w, -3)
full_t = t + Constants.T_BASE
full_p = p + pb
tk = _tk(full_p, full_t)
omega = _omega(qv, tk, wa, full_p)
return omega
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.