blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d02edfa3c34f2b0e8cdf7fcee43f0673d659cfa9 | 72d6b3ab3fc2c7014967a156de082d1c617cbf0f | /网优日常/制作手机型号数据库/制作用户手机型号数据库.py | 716e6b00a29239dcec0f36eca0ff7a9880dc6903 | [] | no_license | fengmingshan/python | 19a1732591ad061a8291c7c84e6f00200c106f38 | b35dbad091c9feb47d1f0edd82e568c066f3c6e9 | refs/heads/master | 2021-06-03T08:35:50.019745 | 2021-01-19T15:12:01 | 2021-01-19T15:12:01 | 117,310,092 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,125 | py | # -*- coding: utf-8 -*-
# @Author: Administrator
# @Date: 2019-09-07 11:17:31
# @Last Modified by: Administrator
# @Last Modified time: 2019-09-07 16:10:16
import pandas as pd
import os
data_path = 'D:/2019年工作/2019年8月4G网络扩频方案/诺基亚大数据平台/'
user_files = ['qujing_rmk1_temp_20190903.csv','qujing_rmk1_temp_20190808.csv']
os.chdir(data_path)
df_uesr = pd.DataFrame()
for file in user_files:
reader = pd.read_csv(file, engine = 'python',encoding='utf-8',chunksize = 100000)
for df_tmp in reader:
df_uesr = df_uesr.append(df_tmp)
df_uesr = df_uesr[['rmk1','brand','product']]
df_uesr = df_uesr[(~df_uesr['rmk1'].isnull())&(~df_uesr['brand'].isnull())]
df_uesr.drop_duplicates(['rmk1','brand','product'],keep='first',inplace=True)
df_uesr.reset_index(inplace = True)
df_uesr['是否支持800M'] = ''
df_uesr['备注(芯片)'] = ''
df_uesr['rmk1'] = df_uesr['rmk1'].astype(int)
df_uesr.rename(columns = {'rmk1':'号码',
'brand':'厂家','product':'型号'},inplace=True)
with open('曲靖手机型号库.csv','w') as writer:
df_uesr.to_csv(writer,index=False) | [
"[email protected]"
] | |
10a0372584f4d22c91b7131dc54958785772dded | df7b40e95718ac0f6071a0ba571b42efc81cf6de | /configs/dmnet/dmnet_r50-d8_512x512_80k_ade20k.py | 74f6d6a85a06e96580a3c8d5843f660c85bca5ad | [
"Apache-2.0"
] | permissive | shinianzhihou/ChangeDetection | 87fa2c498248e6124aeefb8f0ee8154bda36deee | 354e71234bef38b6e142b6ba02f23db958582844 | refs/heads/master | 2023-01-23T20:42:31.017006 | 2023-01-09T11:37:24 | 2023-01-09T11:37:24 | 218,001,748 | 162 | 29 | Apache-2.0 | 2022-11-03T04:11:00 | 2019-10-28T08:41:54 | Python | UTF-8 | Python | false | false | 250 | py | _base_ = [
'../_base_/models/dmnet_r50-d8.py', '../_base_/datasets/ade20k.py',
'../_base_/default_runtime.py', '../_base_/schedules/schedule_80k.py'
]
model = dict(
decode_head=dict(num_classes=150), auxiliary_head=dict(num_classes=150))
| [
"[email protected]"
] | |
326899684aa667bc3ef82d30a16914eb4ded3f0d | 89b3f158659080efab8854b9f086ee62f06abc7d | /example.py | f95f9fc4b847a9429432bcbc8a1aaba5dd1d3706 | [] | no_license | rgerkin/pystan-sklearn | 8ff3d7ee8450fe58b2d6a2e5ae3076daa8d16477 | 5c5cffe5389abb58fa85d0a47bd4760128b19d8a | refs/heads/master | 2020-04-05T23:40:37.547640 | 2017-08-06T22:27:25 | 2017-08-06T22:27:25 | 29,157,427 | 37 | 4 | null | null | null | null | UTF-8 | Python | false | false | 3,655 | py | import numpy as np
from scipy.stats import norm
from sklearn.model_selection import ShuffleSplit,GridSearchCV
from pystan_sklearn import StanEstimator
#############################################################
# All of this from the eight schools example.
schools_code = """
data {
int<lower=0> J; // number of schools
real y[J]; // estimated treatment effects
real<lower=0> sigma[J]; // s.e. of effect estimates
}
parameters {
real mu;
real<lower=0> tau;
real eta[J];
}
transformed parameters {
real theta[J];
for (j in 1:J)
theta[j] = mu + tau * eta[j];
}
model {
eta ~ normal(0, 1);
y ~ normal(theta, sigma);
}
"""
schools_dat = {'J': 8,
'y': [28, 8, -3, 7, -1, 1, 18, 12],
'sigma': [15, 10, 16, 11, 9, 11, 10, 18]}
#############################################################
# First we have to make an estimator specific to our model.
# For now, I don't have a good way of automatically implementing this
# in a general way based on the model code.
class EightSchoolsEstimator(StanEstimator):
# Implement a make_data method for the estimator.
# This tells the sklearn estimator what things to pass along
# as data to the Stan model.
# This is trivial here but can be more complex for larger models.
def make_data(self,search_data=None):
data = schools_dat
if search_data:
data.update({key:value[0] for key,value in search_data.items()})
return data
# Implement a predict_ method for the estimator.
# This tells the sklearn estimator how to make a prediction for one sample.
# This is based on the prediction for the mean theta above.
def predict_(self,X,j):
theta_j = self.mu + self.tau * self.eta[j];
return (theta_j,self.sigma[j])
# Implement a score_ method for the estimator.
# This tells the sklearn estimator how to score one observed sample against
# the prediction from the model.
# It is based on the fitted values of theta and sigma.
def score_(self,prediction,y):
likelihoods = np.zeros(len(y))
for j,(theta_j,sigma_j) in enumerate(prediction):
likelihoods[j] = norm.pdf(y[j],theta_j,sigma_j)
return np.log(likelihoods).sum()
# Initialize StanEstimator instance.
estimator = EightSchoolsEstimator()
# Compile the model code.
estimator.set_model(schools_code)
# Search over these parameter values.
search_data = {'mu':[0.3,1.0,3.0]}
# Create a data dictionary for use with the estimator.
# Note that this 'data' means different things in sklearn and Stan.
data = estimator.make_data(search_data=search_data)
# Set the data (set estimator attributes).
estimator.set_data(data)
# Set the y data.
# Use the observed effect from the Stan code here (e.g. "y").
y = data['y']
# Set the X data, i.e. the covariates.
# In this example there is no X data so we just use an array of ones.
X = np.ones((len(y),1))
#vstack((data['subject_ids'],data['test_ids'])).transpose()
# Fraction of data held out for testing.
test_size = 2.0/len(y)
# A cross-validation class from sklearn.
# Use the sample size variable from the Stan code here (e.g. "J").
cv = ShuffleSplit(n_splits=10, test_size=test_size)
# A grid search class over parameters from sklearn.
grid = GridSearchCV(estimator, search_data, cv=cv)
# Fit the model over the parameter grid.
grid.fit(X,y)
# Print the parameter values with the best scores (best predictive accuracy).
print(grid.best_params_)
| [
"[email protected]"
] | |
a0cbf85988f3c5372ea17c52a6e4c13dae5c4882 | c0af53ffc1a030ffe228f5ab8e2833fdcf2fadee | /Decompiler/p/Cassius/Cassius.py | 41198748999469e4ab291d4dc13966fa1391e041 | [
"MIT"
] | permissive | GeofrontTeam/EDDecompiler | ab7f2f35e36c5197cee70bb38b0ab6afa7dd297b | 5017ec026ff7f96d1b22094e5fcd69821f176f04 | refs/heads/master | 2023-05-11T04:43:56.682639 | 2021-05-31T23:27:48 | 2021-05-31T23:27:48 | 306,936,019 | 6 | 1 | MIT | 2021-05-24T22:12:34 | 2020-10-24T17:29:04 | Python | UTF-8 | Python | false | false | 70 | py | from ActionHelper import *
from Voice import *
from ChrFile import * | [
"[email protected]"
] | |
f16542003c0cf3db0d3dbe1c97a817a5d5a4ab12 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/anagram/4cea4799f8d04747920f4fa0133e14c2.py | 3ac7214609467216359f63db380f0fee5c7bb09c | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 256 | py | def is_anagram_of(word, cwrd, swrd):
caps = word.upper()
return cwrd != caps and swrd == sorted(caps)
def detect_anagrams(word, words):
cwrd = word.upper()
swrd = sorted(cwrd)
return list(filter(lambda s: is_anagram_of(s, cwrd, swrd), words))
| [
"[email protected]"
] | |
1dcb23d7909b6dd031ad32006240c79be5c1aff9 | d66aa4c77f65bb837e07626c696b6dc886c7b1c1 | /base/Chapter-1/Chapter-1-31.py | db95b375d7349f4744a25dd67cc200ccb1f9bb5d | [] | no_license | silianpan/Excel_to_Python | 2a789aec0eb38d3178be6dd44205792624d0d4c4 | 1c5890988c99b2939c4d98bb6a881e15d6c3ad7d | refs/heads/master | 2021-07-09T00:25:54.665343 | 2021-05-04T11:25:18 | 2021-05-04T11:25:18 | 242,090,461 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2021/5/2 上午9:30
# @Author : silianpan
# @Site :
# @File : py
# @Software: PyCharm
print('{:.2f}'.format(3.1415926)) #返回3.14。
print('{:.2%}'.format(0.1415926)) #返回14.16%。 | [
"[email protected]"
] | |
03f6b2457e35e2c95cd510820a75fb3a05ad86a4 | 0c1d6b8dff8bedfffa8703015949b6ca6cc83f86 | /lib/worklists/operator/CT/v4.0/business/VDSL_4+2/SIP_Enable/script.py | fbbdc60c98a8c24ab613f572d5dbb73c2483dd5e | [] | no_license | samwei8/TR069 | 6b87252bd53f23c37186c9433ce4d79507b8c7dd | 7f6b8d598359c6049a4e6cb1eb1db0899bce7f5c | refs/heads/master | 2021-06-21T11:07:47.345271 | 2017-08-08T07:14:55 | 2017-08-08T07:14:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,583 | py | #coding:utf-8
# -----------------------------rpc --------------------------
import os
import sys
#debug
DEBUG_UNIT = False
if (DEBUG_UNIT):
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1)
parent3 = os.path.dirname(parent2)
parent4 = os.path.dirname(parent3) # tr069v3\lib
parent5 = os.path.dirname(parent4) # tr069v3\
sys.path.insert(0, parent4)
sys.path.insert(0, os.path.join(parent4, 'common'))
sys.path.insert(0, os.path.join(parent4, 'worklist'))
sys.path.insert(0, os.path.join(parent4, 'usercmd'))
sys.path.insert(0, os.path.join(parent5, 'vendor'))
from TR069.lib.common.event import *
from TR069.lib.common.error import *
from time import sleep
import TR069.lib.common.logs.log as log
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1) # dir is system
try:
i = sys.path.index(parent2)
if (i !=0):
# stratege= boost priviledge
sys.path.pop(i)
sys.path.insert(0, parent2)
except Exception,e:
sys.path.insert(0, parent2)
import _Common
reload(_Common)
from _Common import *
import _VOIP
reload(_VOIP)
from _VOIP import VOIP
def test_script(obj):
"""
"""
sn = obj.sn # 取得SN号
DeviceType = "VDSL" # 绑定tr069模板类型.只支持ADSL\LAN\EPON三种
AccessMode = 'DHCP' # WAN接入模式,可选PPPoE_Bridge,PPPoE,DHCP,Static
rollbacklist = [] # 存储工单失败时需回退删除的实例.目前缺省是不开启回退
# 初始化日志
obj.dict_ret.update(str_result=u"开始执行工单:%s........\n" %
os.path.basename(os.path.dirname(__file__)))
# data传参
ProxyServer = obj.dict_data.get("ProxyServer")[0]
ProxyServerPort = obj.dict_data.get("ProxyServerPort")[0]
RegistrarServer = obj.dict_data.get("RegistrarServer")[0]
RegistrarServerPort = obj.dict_data.get("RegistrarServerPort")[0]
OutboundProxy = obj.dict_data.get("OutboundProxy")[0]
OutboundProxyPort = obj.dict_data.get("OutboundProxyPort")[0]
X_CT_COM_Standby_ProxyServer = obj.dict_data.get("X_CT_COM_Standby_ProxyServer")[0]
X_CT_COM_Standby_ProxyServerPort = obj.dict_data.get("X_CT_COM_Standby_ProxyServerPort")[0]
X_CT_COM_Standby_RegistrarServer = obj.dict_data.get("X_CT_COM_Standby_RegistrarServer")[0]
X_CT_COM_Standby_RegistrarServerPort = obj.dict_data.get("X_CT_COM_Standby_RegistrarServerPort")[0]
X_CT_COM_Standby_OutboundProxy = obj.dict_data.get("X_CT_COM_Standby_OutboundProxy")[0]
X_CT_COM_Standby_OutboundProxyPort = obj.dict_data.get("X_CT_COM_Standby_OutboundProxyPort")[0]
AuthUserName1 = obj.dict_data.get("AuthUserName1")[0]
AuthPassword1 = obj.dict_data.get("AuthPassword1")[0]
AuthUserName2 = obj.dict_data.get("AuthUserName2")[0]
AuthPassword2 = obj.dict_data.get("AuthPassword2")[0]
PVC_OR_VLAN = obj.dict_data.get("PVC_OR_VLAN")[0] # ADSL上行只关心PVC值,LAN和EPON上行则关心VLAN值
X_CT_COM_ServiceList = obj.dict_data.get("X_CT_COM_ServiceList")[0]
WANEnable_Switch = obj.dict_data.get("WANEnable_Switch")[0]
# "InternetGatewayDevice.Services.VoiceService.1."
dict_voiceservice = {"VoiceProfile.1.SIP.ProxyServer":[1, ProxyServer],
"VoiceProfile.1.SIP.ProxyServerPort":[1, ProxyServerPort],
"VoiceProfile.1.SIP.ProxyServerTransport":[0, "Null"],
"VoiceProfile.1.SIP.RegistrarServer":[1, RegistrarServer],
"VoiceProfile.1.SIP.RegistrarServerPort":[1, RegistrarServerPort],
"VoiceProfile.1.SIP.RegistrarServerTransport":[0, "Null"],
"VoiceProfile.1.SIP.OutboundProxy":[1, OutboundProxy],
"VoiceProfile.1.SIP.OutboundProxyPort":[1, OutboundProxyPort],
"VoiceProfile.1.SIP.X_CT-COM_Standby-ProxyServer":[1, X_CT_COM_Standby_ProxyServer],
"VoiceProfile.1.SIP.X_CT-COM_Standby-ProxyServerPort":[1, X_CT_COM_Standby_ProxyServerPort],
"VoiceProfile.1.SIP.X_CT-COM_Standby-ProxyServerTransport":[0, "Null"],
"VoiceProfile.1.SIP.X_CT-COM_Standby-RegistrarServer":[1, X_CT_COM_Standby_RegistrarServer],
"VoiceProfile.1.SIP.X_CT-COM_Standby-RegistrarServerPort":[1, X_CT_COM_Standby_RegistrarServerPort],
"VoiceProfile.1.SIP.X_CT-COM_Standby-RegistrarServerTransport":[0, "Null"],
"VoiceProfile.1.SIP.X_CT-COM_Standby-OutboundProxy":[1, X_CT_COM_Standby_OutboundProxy],
"VoiceProfile.1.SIP.X_CT-COM_Standby-OutboundProxyPort":[1, X_CT_COM_Standby_OutboundProxyPort],
"VoiceProfile.1.SIP.UserAgentDomain":[0, "Null"],
"VoiceProfile.1.SIP.UserAgentPort":[0, "Null"],
"VoiceProfile.1.SIP.UserAgentTransport":[0, "Null"],
"VoiceProfile.1.SIP.VLANIDMark":[0, "Null"],
"VoiceProfile.1.SIP.X_CT-COM_802-1pMark":[0, "Null"],
"VoiceProfile.1.SIP.DSCPMark":[0, "Null"],
"VoiceProfile.1.SIP.X_CT-COM_HeartbeatSwitch":[0, "Null"],
"VoiceProfile.1.SIP.X_CT-COM_HeartbeatCycle":[0, "Null"],
"VoiceProfile.1.SIP.X_CT-COM_HeartbeatCount":[0, "Null"],
"VoiceProfile.1.SIP.X_CT-COM_SessionUpdateTimer":[0, "Null"],
"VoiceProfile.1.SIP.RegisterRetryInterval":[0, "Null"],
"VoiceProfile.1.SIP.RegisterExpires":[0, "Null"],
"VoiceProfile.1.SIP.ImplicitRegistrationEnable":[0, "Null"],
"VoiceProfile.1.Line.1.SIP.AuthUserName":[1, AuthUserName1],
"VoiceProfile.1.Line.1.SIP.AuthPassword":[1, AuthPassword1],
"VoiceProfile.1.Line.2.SIP.AuthUserName":[1, AuthUserName2],
"VoiceProfile.1.Line.2.SIP.AuthPassword":[1, AuthPassword2],
"VoiceProfile.1.Line.1.Enable":[1, "Enabled"],
"VoiceProfile.1.Line.2.Enable":[1, "Enabled"]}
# 对X_CT_COM_LanInterface重新解析,兼容GUI或RF传参数LAN1,lan1格式
#ret, X_CT_COM_LanInterface = ParseLANName(X_CT_COM_LanInterface)
#if ret == ERR_FAIL:
# info = u'输入的X_CT_COM_LanInterface参数错误'
# obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
# return ret_res
# WANDSLLinkConfig节点参数
if PVC_OR_VLAN == "":
PVC_OR_VLAN_flag = 0
else:
PVC_OR_VLAN_flag = 1
dict_wanlinkconfig = {'Enable':[1, '1'],
'X_CT-COM_Mode':[PVC_OR_VLAN_flag, '2'],
'X_CT-COM_VLAN':[PVC_OR_VLAN_flag, PVC_OR_VLAN]}
# WANPPPConnection节点参数
# 注意:X_CT-COM_IPMode节点有些V4版本没有做,所以不能使能为1.实际贝曼工单也是没有下发的
dict_wanpppconnection = {}
# WANIPConnection节点参数
dict_wanipconnection = {'Enable':[1, '1'],
'ConnectionType':[1, 'IP_Routed'],
'Name':[0, 'Null'],
'NATEnabled':[0, 'Null'],
'AddressingType':[1, 'DHCP'],
'ExternalIPAddress':[0, '10.10.10.10'],
'SubnetMask':[0, '255.255.255.0'],
'DefaultGateway':[0, '10.10.10.1'],
'DNSEnabled':[0, 'Null'],
'DNSServers':[0, '10.10.10.2'],
'X_CT-COM_LanInterface':[0, "Null"],
'X_CT-COM_ServiceList':[1, X_CT_COM_ServiceList]}
# 执行VOIP开通工单
ret, ret_data = VOIP(obj, sn, WANEnable_Switch, DeviceType,
AccessMode, PVC_OR_VLAN,
dict_voiceservice,
dict_wanlinkconfig,
dict_wanpppconnection, dict_wanipconnection,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
if __name__ == '__main__':
log_dir = g_prj_dir
log.start(name="nwf", directory=log_dir, level="DebugWarn")
log.set_file_id(testcase_name="tr069")
obj = MsgWorklistExecute(id_="1")
obj.sn = "2013012901"
dict_data = {"ProxyServer":("172.24.55.67","1"),
"ProxyServerPort":("5060","2"),
"RegistrarServer":("172.24.55.67","3"),
"RegistrarServerPort":("5060","4"),
"OutboundProxy":("0.0.0.0","5"),
"OutboundProxyPort":("5060","6"),
"X_CT_COM_Standby_ProxyServer":("172.24.55.67","7"),
"X_CT_COM_Standby_ProxyServerPort":("5060","8"),
"X_CT_COM_Standby_RegistrarServer":("172.24.55.67","9"),
"X_CT_COM_Standby_RegistrarServerPort":("5060","10"),
"X_CT_COM_Standby_OutboundProxy":("0.0.0.0","11"),
"X_CT_COM_Standby_OutboundProxyPort":("5060","12"),
"AuthUserName1":("55511021","13"),
"AuthPassword1":("55511021","14"),
"AuthUserName2":("55511022","15"),
"AuthPassword2":("55511022","16"),
"PVC_OR_VLAN":("63", "17"),
"X_CT_COM_ServiceList":("VOIP", "18"),
"WANEnable_Switch":("1", "19")}
obj.dict_data = dict_data
try:
ret = test_script(obj)
if ret == ERR_SUCCESS:
print u"测试成功"
else:
print u"测试失败"
print "****************************************"
print obj.dict_ret["str_result"]
except Exception, e:
print u"测试异常" | [
"[email protected]"
] | |
3f7e43c1e6b0b4badb465fe876338ee60accd0a7 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /detective_write_1/invitation_reject.py | ce13843a11cb17b879320e1411eaf61fd08d6b0a | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,174 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_one_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/detective/reject-invitation.html
if __name__ == '__main__':
"""
accept-invitation : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/detective/accept-invitation.html
list-invitations : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/detective/list-invitations.html
"""
parameter_display_string = """
# graph-arn : The ARN of the behavior graph to reject the invitation to.
The member accountâs current member status in the behavior graph must be INVITED .
"""
add_option_dict = {}
#######################################################################
# parameter display string
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_one_parameter("detective", "reject-invitation", "graph-arn", add_option_dict)
| [
"[email protected]"
] | |
fa38e08a52ba666bef919145d71861e1e85a3d49 | aaf64078ce90bb552651602ee09690b7da94d368 | /valeria_crowdbotics_31/urls.py | 7a022b0cde20aca314541b13432bc1e4711999ef | [] | no_license | crowdbotics-users/valeria-crowdbotics-31 | b625fa9ff0e451ed7ed99ed59d69b4ac8054a92f | 4db0c8c0b0feef7f084d1bcbc16585b1e6a03786 | refs/heads/master | 2021-04-15T03:47:17.140916 | 2018-03-22T20:55:00 | 2018-03-22T20:55:00 | 126,393,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 867 | py | """valeria_crowdbotics_31 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url('', include('home.urls')),
url('', include('home.urls')),
url(r'^admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
75720c382037205156ee15f807fa63fac4eddf1f | ccf94dcb6b1500fcbbd56964ae8c4832a496b8b3 | /python/baiduads-sdk-auto/baiduads/keyword/model/delete_word_response_wrapper.py | 8ff0fce2fe3e1fece7f14185ec5036e07d8ec9bf | [
"Apache-2.0"
] | permissive | baidu/baiduads-sdk | 24c36b5cf3da9362ec5c8ecd417ff280421198ff | 176363de5e8a4e98aaca039e4300703c3964c1c7 | refs/heads/main | 2023-06-08T15:40:24.787863 | 2023-05-20T03:40:51 | 2023-05-20T03:40:51 | 446,718,177 | 16 | 11 | Apache-2.0 | 2023-06-02T05:19:40 | 2022-01-11T07:23:17 | Python | UTF-8 | Python | false | false | 11,623 | py | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from baiduads.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from baiduads.exceptions import ApiAttributeError
def lazy_import():
from baiduads.common.model.api_response_header import ApiResponseHeader
from baiduads.keyword.model.delete_word_response_wrapper_body import DeleteWordResponseWrapperBody
globals()['ApiResponseHeader'] = ApiResponseHeader
globals()['DeleteWordResponseWrapperBody'] = DeleteWordResponseWrapperBody
class DeleteWordResponseWrapper(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'header': (ApiResponseHeader,), # noqa: E501
'body': (DeleteWordResponseWrapperBody,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'header': 'header', # noqa: E501
'body': 'body', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""DeleteWordResponseWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiResponseHeader): [optional] # noqa: E501
body (DeleteWordResponseWrapperBody): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""DeleteWordResponseWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiResponseHeader): [optional] # noqa: E501
body (DeleteWordResponseWrapperBody): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| [
"[email protected]"
] | |
7ad240c7bf533482774d94f1fb2d74b20b186759 | 543286f4fdefe79bd149ff6e103a2ea5049f2cf4 | /Exercicios&cursos/Minhas_coisas/ex09.py | 9639c0ea8b41077893e4d94489c1272c84db99b9 | [] | no_license | antonioleitebr1968/Estudos-e-Projetos-Python | fdb0d332cc4f12634b75984bf019ecb314193cc6 | 9c9b20f1c6eabb086b60e3ba1b58132552a84ea6 | refs/heads/master | 2022-04-01T20:03:12.906373 | 2020-02-13T16:20:51 | 2020-02-13T16:20:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | from time import sleep
a = []
def lista(a = []):
for c in range(0, 5):
a.append(int(input(f'{c + 1}º Digite um número: ')))
print(f'lista de números digitados: ', end=' ')
for n in a:
print(f'{n}', end='|')
lista()
sleep(2)
print('\nProximo...')
sleep(2)
lista()
| [
"[email protected]"
] | |
9c796ae92d466001acc3155f24e74c72685703fe | 3cfd135a00bbe03a354ec1e516bca9e224655f46 | /sdk/python/test/test_v1beta1_when_expression.py | 28d7521035c39c7023c8a43f8155bb0ad73c848e | [
"Apache-2.0"
] | permissive | FogDong/experimental | c7fc3a38aeaf9e2dbc9b06390a1c2e764ddda291 | 971004ba2ccfbceec4b677ee745fed7fd9ac6635 | refs/heads/main | 2023-04-23T01:49:20.602679 | 2021-05-05T12:36:52 | 2021-05-05T14:11:43 | 367,241,487 | 1 | 0 | Apache-2.0 | 2021-05-14T03:44:34 | 2021-05-14T03:44:34 | null | UTF-8 | Python | false | false | 2,226 | py | # Copyright 2020 The Tekton Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Tekton
Tekton Pipeline # noqa: E501
The version of the OpenAPI document: v0.17.2
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import tekton_pipeline
from tekton_pipeline.models.v1beta1_when_expression import V1beta1WhenExpression # noqa: E501
from tekton_pipeline.rest import ApiException
class TestV1beta1WhenExpression(unittest.TestCase):
"""V1beta1WhenExpression unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test V1beta1WhenExpression
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = tekton_pipeline.models.v1beta1_when_expression.V1beta1WhenExpression() # noqa: E501
if include_optional :
return V1beta1WhenExpression(
input = '0',
operator = '0',
values = [
'0'
],
)
else :
return V1beta1WhenExpression(
input = '0',
operator = '0',
values = [
'0'
],
)
def testV1beta1WhenExpression(self):
"""Test V1beta1WhenExpression"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
e6660d23b39c1d5d5b9e794617440f216d3f4f34 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_regretted.py | 89fb7fbc999a393637811f0bbe73809c6f184826 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py |
#calss header
class _REGRETTED():
def __init__(self,):
self.name = "REGRETTED"
self.definitions = regret
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['regret']
| [
"[email protected]"
] | |
3a7a2810d740f825a10f529564b84a662467cf35 | ba1776805c3e4305bb868e1ee6929e4b07751090 | /backend/task/migrations/0001_initial.py | c0b2e884d2b1049f30e47ffb276505a3382fc690 | [] | no_license | crowdbotics-apps/seton-20675 | c30edcfae478317a598955d5963c776b28e108b9 | 1318ffdd08cad0bd83b7699e6c6ee73b2737af1d | refs/heads/master | 2022-12-26T11:54:47.261026 | 2020-09-25T15:40:15 | 2020-09-25T15:40:15 | 298,612,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,121 | py | # Generated by Django 2.2.16 on 2020-09-25 15:39
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('task_profile', '0001_initial'),
('location', '0001_initial'),
('task_category', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('details', models.TextField()),
('frequency', models.CharField(max_length=7)),
('size', models.CharField(max_length=6)),
('is_confirmed', models.BooleanField()),
('status', models.CharField(max_length=10)),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('timestamp_confirmed', models.DateTimeField(blank=True, null=True)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_category', to='task_category.Category')),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_customer', to='task_profile.CustomerProfile')),
('location', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='task_location', to='location.TaskLocation')),
('subcategory', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_subcategory', to='task_category.Subcategory')),
('tasker', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_tasker', to='task_profile.TaskerProfile')),
],
),
migrations.CreateModel(
name='TaskTransaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(max_length=10)),
('timestamp_completed', models.DateTimeField(blank=True, null=True)),
('date', models.DateField(blank=True, null=True)),
('timestamp_started', models.DateTimeField(blank=True, null=True)),
('task', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tasktransaction_task', to='task.Task')),
],
),
migrations.CreateModel(
name='Rating',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.FloatField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('review', models.TextField(blank=True, null=True)),
('customer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='rating_customer', to='task_profile.CustomerProfile')),
('tasker', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rating_tasker', to='task_profile.TaskerProfile')),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message', models.TextField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='message_customer', to='task_profile.CustomerProfile')),
('task', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='message_task', to='task.Task')),
('tasker', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='message_tasker', to='task_profile.TaskerProfile')),
],
),
]
| [
"[email protected]"
] | |
20d321261341e05d7ce4783500097fb8094b3994 | 905f8aa9c460615e2360b3406bdae1f5f6e10632 | /Python-Study-Week3/96.py | 73766925d9b8a08f73fa2d43f38c873bba99c4f9 | [] | no_license | puze8681/2020-Python-Study | a7c9f89310ae29d5b2aa1a1da6dd4524ca78b8fc | a79bf1802a2fdbfc0c797979cef7e5530515ac55 | refs/heads/master | 2022-11-23T09:53:15.017457 | 2020-08-02T07:49:31 | 2020-08-02T07:49:31 | 278,815,225 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462 | py | def checkPassword(password):
isLength = False
isUpper = False
isLower = False
isNumber = False
if len(password) > 7:
isLength = True
for c in password:
if c.isupper():
isUpper = True
elif c.islower():
isLower = True
elif c.isdigit():
isNumber = True
print(isLength and isUpper and isLower and isNumber)
checkPassword(input('비밀번호를 입력해주세요. '))
| [
"[email protected]"
] | |
8c5b17c8fb9f1d518e1dfa9747ce3fc857e3fe86 | 14f4d045750f7cf45252838d625b2a761d5dee38 | /argo/argo/models/io_k8s_api_core_v1_limit_range_spec.py | a18f58d2bf450455420374445993aff438273f94 | [] | no_license | nfillot/argo_client | cf8d7413d728edb4623de403e03d119fe3699ee9 | c8cf80842f9eebbf4569f3d67b9d8eff4ba405fa | refs/heads/master | 2020-07-11T13:06:35.518331 | 2019-08-26T20:54:07 | 2019-08-26T20:54:07 | 204,546,868 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,644 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.14.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from argo.models.io_k8s_api_core_v1_limit_range_item import IoK8sApiCoreV1LimitRangeItem # noqa: F401,E501
class IoK8sApiCoreV1LimitRangeSpec(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'limits': 'list[IoK8sApiCoreV1LimitRangeItem]'
}
attribute_map = {
'limits': 'limits'
}
def __init__(self, limits=None): # noqa: E501
"""IoK8sApiCoreV1LimitRangeSpec - a model defined in Swagger""" # noqa: E501
self._limits = None
self.discriminator = None
self.limits = limits
@property
def limits(self):
"""Gets the limits of this IoK8sApiCoreV1LimitRangeSpec. # noqa: E501
Limits is the list of LimitRangeItem objects that are enforced. # noqa: E501
:return: The limits of this IoK8sApiCoreV1LimitRangeSpec. # noqa: E501
:rtype: list[IoK8sApiCoreV1LimitRangeItem]
"""
return self._limits
@limits.setter
def limits(self, limits):
"""Sets the limits of this IoK8sApiCoreV1LimitRangeSpec.
Limits is the list of LimitRangeItem objects that are enforced. # noqa: E501
:param limits: The limits of this IoK8sApiCoreV1LimitRangeSpec. # noqa: E501
:type: list[IoK8sApiCoreV1LimitRangeItem]
"""
if limits is None:
raise ValueError("Invalid value for `limits`, must not be `None`") # noqa: E501
self._limits = limits
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(IoK8sApiCoreV1LimitRangeSpec, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IoK8sApiCoreV1LimitRangeSpec):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
b8c96de649bce16d8648e375b09f40d49f46c951 | 7889f7f0532db6a7f81e6f8630e399c90438b2b9 | /1.2.1/mpl_examples/api/watermark_image.py | 3d437193a05aab48891107cebb75a25db44afd54 | [] | no_license | matplotlib/matplotlib.github.com | ef5d23a5bf77cb5af675f1a8273d641e410b2560 | 2a60d39490941a524e5385670d488c86083a032c | refs/heads/main | 2023-08-16T18:46:58.934777 | 2023-08-10T05:07:57 | 2023-08-10T05:08:30 | 1,385,150 | 25 | 59 | null | 2023-08-30T15:59:50 | 2011-02-19T03:27:35 | null | UTF-8 | Python | false | false | 543 | py | """
Use a PNG file as a watermark
"""
from __future__ import print_function
import numpy as np
import matplotlib
import matplotlib.cbook as cbook
import matplotlib.image as image
import matplotlib.pyplot as plt
datafile = cbook.get_sample_data('logo2.png', asfileobj=False)
print ('loading %s' % datafile)
im = image.imread(datafile)
im[:,:,-1] = 0.5 # set the alpha channel
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(np.random.rand(20), '-o', ms=20, lw=2, alpha=0.7, mfc='orange')
ax.grid()
fig.figimage(im, 10, 10)
plt.show()
| [
"[email protected]"
] | |
dc3265b6110502d07abce19e2e5c99cb1a0e6024 | d700b9ad1e0b7225871b65ce0dafb27fb408c4bc | /students/k3343/laboratory_works/Shupak_Valentina/laboratory_work_1/hotels_app/apps.py | f3c0bc898a2bee6a8b35f38f5da1c2ccedcbc72b | [
"MIT"
] | permissive | TonikX/ITMO_ICT_WebProgramming_2020 | a8c573ed467fdf99327777fb3f3bfeee5714667b | ba566c1b3ab04585665c69860b713741906935a0 | refs/heads/master | 2023-01-11T22:10:17.003838 | 2020-10-22T11:22:03 | 2020-10-22T11:22:03 | 248,549,610 | 10 | 71 | MIT | 2023-01-28T14:04:21 | 2020-03-19T16:18:55 | Python | UTF-8 | Python | false | false | 99 | py | from django.apps import AppConfig
class HotelsAppConfig(AppConfig):
name = 'hotels_app'
| [
"[email protected]"
] | |
f54560be25ee1a1316421a7fb7246bcae0c7b928 | c46754b9600a12df4f9d7a6320dfc19aa96b1e1d | /src/transformers/models/funnel/convert_funnel_original_tf_checkpoint_to_pytorch.py | 848101f083582bafa26e58c87aaa612502f3f79c | [
"Apache-2.0"
] | permissive | huggingface/transformers | ccd52a0d7c59e5f13205f32fd96f55743ebc8814 | 4fa0aff21ee083d0197a898cdf17ff476fae2ac3 | refs/heads/main | 2023-09-05T19:47:38.981127 | 2023-09-05T19:21:33 | 2023-09-05T19:21:33 | 155,220,641 | 102,193 | 22,284 | Apache-2.0 | 2023-09-14T20:44:49 | 2018-10-29T13:56:00 | Python | UTF-8 | Python | false | false | 2,335 | py | # coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convert Funnel checkpoint."""
import argparse
import torch
from transformers import FunnelBaseModel, FunnelConfig, FunnelModel, load_tf_weights_in_funnel
from transformers.utils import logging
logging.set_verbosity_info()
def convert_tf_checkpoint_to_pytorch(tf_checkpoint_path, config_file, pytorch_dump_path, base_model):
# Initialise PyTorch model
config = FunnelConfig.from_json_file(config_file)
print(f"Building PyTorch model from configuration: {config}")
model = FunnelBaseModel(config) if base_model else FunnelModel(config)
# Load weights from tf checkpoint
load_tf_weights_in_funnel(model, config, tf_checkpoint_path)
# Save pytorch-model
print(f"Save PyTorch model to {pytorch_dump_path}")
torch.save(model.state_dict(), pytorch_dump_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# Required parameters
parser.add_argument(
"--tf_checkpoint_path", default=None, type=str, required=True, help="Path to the TensorFlow checkpoint path."
)
parser.add_argument(
"--config_file",
default=None,
type=str,
required=True,
help="The config json file corresponding to the pre-trained model. \nThis specifies the model architecture.",
)
parser.add_argument(
"--pytorch_dump_path", default=None, type=str, required=True, help="Path to the output PyTorch model."
)
parser.add_argument(
"--base_model", action="store_true", help="Whether you want just the base model (no decoder) or not."
)
args = parser.parse_args()
convert_tf_checkpoint_to_pytorch(
args.tf_checkpoint_path, args.config_file, args.pytorch_dump_path, args.base_model
)
| [
"[email protected]"
] | |
56b19f05304bab70188096e3ac2eb03470123e18 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/59/usersdata/159/61693/submittedfiles/testes.py | 5cfde5c4eb116f499ea4d22d2e9e042e3654972b | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 899 | py | # -*- coding: utf-8 -*-
def cortel1(a):
for i in range (0,a.shape[0],1):
for j in range (0,a.shape[1],1):
if a[i,j]==1:
return i
def cortel2(a):
for i in range (0,a.shape[0],1):
for j in range (0,a.shape[1],1):
if a[i,j]==1:
l2=i
return l2
def cortec1(a):
for j in range (0,a.shape[1],1):
for i in range (0,a.shape[0],1):
if a[i,j]==1:
return j
def cortec2(a):
for j in range (0,a.shape[1],1):
for i in range (0,a.shape[0],1):
if a[i,j]==1:
c2=j
return c2
n=int(input('N de linhas:'))
m=int(input('N de colunas:'))
a=np.zeros((n,m))
for i in range (0,a.shape[0],1):
for j in range (0,a.shape[1],1):
a[i,j]=int(input('Valor:'))
l1=cortel1(a)
l2=cortel2(a)
c1=cortec1(a)
c2=cortec2(a)
print([l1:l2+1,c1,c2+1]) | [
"[email protected]"
] | |
3b98c1f8f6ac6c56e19c3aa095b0d314a653e4ae | 5ac61540ee978a088457257c81d1a297ebc8002f | /app/conf/development/settings.py | 2fce1d09fa31723900fe386680a2b2fde32d26a6 | [] | no_license | yuis-ice/django-qa | 4a18732bcad8af04b442a134856b26f0b4bdd833 | c680c647d788a0bf55b535c8a89ada16e6edba4d | refs/heads/main | 2023-03-24T19:18:23.777044 | 2021-03-22T12:00:23 | 2021-03-22T12:00:23 | 350,327,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,284 | py | import os
import warnings
from django.utils.translation import ugettext_lazy as _
from os.path import dirname
warnings.simplefilter('error', DeprecationWarning)
BASE_DIR = dirname(dirname(dirname(dirname(os.path.abspath(__file__)))))
CONTENT_DIR = os.path.join(BASE_DIR, 'content')
SECRET_KEY = 'NhfTvayqggTBPswCXXhWaN69HuglgZIkM'
DEBUG = True
ALLOWED_HOSTS = [
'localhost',
'0.0.0.0',
'127.0.0.1',
'.example.com',
'.ngrok.io'
]
SITE_ID = 1
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
# Vendor apps
'bootstrap4',
# Application apps
'main',
'accounts',
'django_extensions',
"qaapp.apps.QaappConfig",
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(CONTENT_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
EMAIL_FILE_PATH = os.path.join(CONTENT_DIR, 'tmp/emails')
# EMAIL_HOST_USER = '[email protected]'
# DEFAULT_FROM_EMAIL = '[email protected]'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_USE_TLS = True
EMAIL_PORT = 587
# EMAIL_HOST_PASSWORD = "Qy..."
# EMAIL_HOST_PASSWORD = os.environ("EMAIL_HOST_PASSWORD")
# print os.environ.get('HOME')
EMAIL_HOST_PASSWORD = os.environ["EMAIL_HOST_PASSWORD"]
EMAIL_HOST_USER = '[email protected]'
DEFAULT_FROM_EMAIL = '[email protected]'
# STRIPE_ENDPOINT_SECRET = os.environ("STRIPE_ENDPOINT_SECRET")
print(
# os.environ["USERNAME"]
# os.environ["EMAIL_HOST_PASSWORD"]
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
ENABLE_USER_ACTIVATION = True
DISABLE_USERNAME = False
LOGIN_VIA_EMAIL = True
LOGIN_VIA_EMAIL_OR_USERNAME = False
LOGIN_REDIRECT_URL = 'index'
LOGIN_URL = 'accounts:log_in'
USE_REMEMBER_ME = True
RESTORE_PASSWORD_VIA_EMAIL_OR_USERNAME = False
ENABLE_ACTIVATION_AFTER_EMAIL_CHANGE = True
SIGN_UP_FIELDS = ['username', 'first_name', 'last_name', 'email', 'password1', 'password2']
if DISABLE_USERNAME:
SIGN_UP_FIELDS = ['first_name', 'last_name', 'email', 'password1', 'password2']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
USE_I18N = True
USE_L10N = True
LANGUAGE_CODE = 'en'
LANGUAGES = [
('en', _('English')),
('ru', _('Russian')),
('zh-Hans', _('Simplified Chinese')),
]
TIME_ZONE = 'UTC'
USE_TZ = True
STATIC_ROOT = os.path.join(CONTENT_DIR, 'static')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(CONTENT_DIR, 'media')
MEDIA_URL = '/media/'
STATICFILES_DIRS = [
os.path.join(CONTENT_DIR, 'assets'),
]
LOCALE_PATHS = [
os.path.join(CONTENT_DIR, 'locale')
]
| [
"[email protected]"
] | |
11b7a83487e395526625f14abb6b3b0ae324ccfb | 6a856fd7e8714de86d96bba85bc48cd8828fa319 | /calendar_caldav/__openerp__.py | 1092f60de3c2ad086942fa3ddb56c930e05a3e0a | [] | no_license | gfcapalbo/odoo-calendar | 5ad35bfaa649e094c7159fa6fbce36ab5d6a4105 | dd7d1972f62db60f8d8ed620e2137838dd746720 | refs/heads/master | 2022-04-23T01:59:48.000726 | 2019-09-30T13:48:49 | 2019-09-30T13:48:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,693 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution, third party addon
# Copyright (C) 2004-2016 Vertel AB (<http://vertel.se>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Calendar ics-urls',
'version': '0.1',
'category': 'Tools',
'summary': 'Subscription on calendar.ics-urls',
'licence': 'AGPL-3',
'description': """
Adds and updates calendar objects according to an ics-url
""",
'author': 'Vertel AB',
'website': 'http://www.vertel.se',
'depends': ['calendar',],
'external_dependencies': {
'python': ['icalendar', 'urllib2'],
},
'data': [ 'res_partner_view.xml',
#'security/ir.model.access.csv',
'res_partner_data.xml'
],
'application': False,
'installable': True,
'demo': ['calendar_ics_demo.xml',],
}
# vim:expandtab:smartindent:tabstop=4s:softtabstop=4:shiftwidth=4:
| [
"[email protected]"
] | |
87dc142a9d447e24e68ae05514e8a4185a6b313f | 596c229c82d6c4a3edab0bc6f95175767019e431 | /xtk.py | 50d4b1305e2b000e1a0baf04247b51165893e2f0 | [] | no_license | Carl4/xtk-ipython | 412209f14b370cb8a56aea39b86706d022a3cfc9 | 9d00c018c6de1a50899d178f272a877406801a86 | refs/heads/master | 2020-12-24T14:18:25.062025 | 2012-08-09T20:21:13 | 2012-08-09T20:21:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,973 | py | """Display classes for the XTK JavaScript library.
The XTK JavaScript library uses WebGL to render 3D visualizations. It can
generate those visualizations based a range of standard 3D data files types,
including .vtk and .stl. This module makes it possible to render these
visualizations in the IPython Notebook.
A simple example would be::
from IPython.lib.xtkdisplay import Mesh
Mesh('http://x.babymri.org/?skull.vtk', opacity=0.5, magicmode=True)
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2012 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
from IPython.core.display import Javascript
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
code = """
container.show();
var id = 'xtkwidget_' + utils.uuid();
var xtkdiv = $('<div/>').attr('id',id);
xtkdiv.css('background-color','%s').width(%i).height(%i);
element.append(xtkdiv);
var r = new X.renderer3D();
r.container = id;
r.init();
var m = new X.mesh();
m.file = "%s";
m.magicmode = %s;
m.opacity = %f;
r.add(m);
r.render();
"""
class Mesh(object):
"""Display an XTK mesh object using a URL."""
def __init__(self, url, width=400, height=300, magicmode=False, opacity=1.0, bgcolor='#000'):
"""Create an XTK mesh from a URL.
Parameters
==========
url : str
The URL to the data files to render. This can be an absolute URL or one that is
relative to the notebook server ('files/mymesh.vtk').
width : int
The width in pixels of the XTK widget.
height : int
The height in pixels of the XTK widget.
magicmode : bool
Enable magicmode, which colors points based on their positions.
opacity : float
The mesh's opacity in the range 0.0 to 1.0.
bgcolor : str
The XTK widget's background color.
"""
self.url = url
self.width = width
self.height = height
self.magicmode = 'true' if magicmode else 'false'
self.opacity = opacity
self.bgcolor = bgcolor
def _repr_javascript_(self):
js = code % (self.bgcolor, self.width, self.height, self.url, self.magicmode, self.opacity)
#js = Javascript(js, lib='http://get.goXTK.com/xtk_edge.js')
js = Javascript(js, lib='files/xtk_edge.js')
return js._repr_javascript_()
| [
"[email protected]"
] | |
30a86403610010e51fb0891fbfe913a8af4df42f | 1a166165ab8287d01cbb377a13efdb5eff5dfef0 | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_03_01/aio/operations/_ddos_custom_policies_operations.py | ffd282337030804cb9773e5f5645b3c7d4ff09fd | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | manoj0806/azure-sdk-for-python | 7a14b202ff80f528abd068bf50334e91001a9686 | aab999792db1132232b2f297c76800590a901142 | refs/heads/master | 2023-04-19T16:11:31.984930 | 2021-04-29T23:19:49 | 2021-04-29T23:19:49 | 363,025,016 | 1 | 0 | MIT | 2021-04-30T04:23:35 | 2021-04-30T04:23:35 | null | UTF-8 | Python | false | false | 20,334 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DdosCustomPoliciesOperations:
"""DdosCustomPoliciesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified DDoS custom policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_custom_policy_name: The name of the DDoS custom policy.
:type ddos_custom_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
ddos_custom_policy_name=ddos_custom_policy_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def get(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
**kwargs
) -> "_models.DdosCustomPolicy":
"""Gets information about the specified DDoS custom policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_custom_policy_name: The name of the DDoS custom policy.
:type ddos_custom_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DdosCustomPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.DdosCustomPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosCustomPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
parameters: "_models.DdosCustomPolicy",
**kwargs
) -> "_models.DdosCustomPolicy":
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosCustomPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'DdosCustomPolicy')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
parameters: "_models.DdosCustomPolicy",
**kwargs
) -> AsyncLROPoller["_models.DdosCustomPolicy"]:
"""Creates or updates a DDoS custom policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_custom_policy_name: The name of the DDoS custom policy.
:type ddos_custom_policy_name: str
:param parameters: Parameters supplied to the create or update operation.
:type parameters: ~azure.mgmt.network.v2020_03_01.models.DdosCustomPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DdosCustomPolicy or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_03_01.models.DdosCustomPolicy]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosCustomPolicy"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
ddos_custom_policy_name=ddos_custom_policy_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> "_models.DdosCustomPolicy":
"""Update a DDoS custom policy tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_custom_policy_name: The name of the DDoS custom policy.
:type ddos_custom_policy_name: str
:param parameters: Parameters supplied to update DDoS custom policy resource tags.
:type parameters: ~azure.mgmt.network.v2020_03_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DdosCustomPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.DdosCustomPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosCustomPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
| [
"[email protected]"
] | |
7ec4d7758529a0aba4601c41c3d5cac1f7fa8ea6 | ae8e083cc3f7cf50449633c1d18cd3ffd184ba78 | /peloton/lib/pelotonApi.py | 31ffe58ca6dc6166b9aa843ace930e1de2a9c751 | [] | no_license | jrodens/Halogen_Reporting | 8577e0817555b4c48d9cf27bdd080599def37275 | 48ce2891c96cb6aacb50b3f28a4f63834a0147ac | refs/heads/master | 2021-06-19T07:33:50.376807 | 2021-06-17T18:30:27 | 2021-06-17T18:30:27 | 221,496,157 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,383 | py | import requests, json
import logging
import sys
from . import util
class PelotonApi:
"""Main Peloton Api Class"""
def __init__(self, user_email, user_password):
self.logger = logging.getLogger('peloton-to-garmin.PelotonApi')
assert user_email is not None and user_email != "", "Please specify your Peloton login email."
assert user_password is not None and user_password != "", "Please specify your Peloton login password."
self.http_base = "https://api.pelotoncycle.com/api/"
self.session = requests.Session()
auth_endpoint = "https://api.pelotoncycle.com/auth/login"
payload = {
'username_or_email': user_email,
'password': user_password
}
response = self.session.post(auth_endpoint, json=payload, verify=True)
parsed_response = util.parse_response(response)
util.handle_error(response)
self.user_id = parsed_response['user_id']
self.session_id = parsed_response['session_id']
def getAuthCookie(self):
cookies = dict(peloton_session_id=self.session_id)
return cookies
def getXWorkouts(self, numWorkouts):
"""
Gets the latest x workouts from Peloton.
"""
query = "user/" + self.user_id + "/workouts?joins=ride&limit="+ str(numWorkouts) +"&page=0&sort_by=-created"
url = util.full_url(self.http_base, query)
workouts = util.getResponse(self.session, url, {}, self.getAuthCookie())
data = workouts["data"]
self.logger.debug("getXWorkouts: {}".format(data))
return data
def getLatestWorkout(self):
"""
Gets the latest workout from Peloton.
"""
query = "user/" + self.user_id + "/workouts?joins=ride&limit=1&page=0&sort_by=-created"
url = util.full_url(self.http_base, query)
workouts = util.getResponse(self.session, url, {}, self.getAuthCookie())
data = workouts["data"][0]
self.logger.debug("getLatestWorkout: {}".format(data))
return data
def getWorkoutById(self, workoutId):
"""
Gets workout from Peloton by id.
"""
query = "workout/" + workoutId + "?joins=ride,ride.instructor,user"
url = util.full_url(self.http_base, query)
data = util.getResponse(self.session, url, {}, self.getAuthCookie())
self.logger.debug("getWorkoutById: {}".format(data))
return data
def getWorkoutSamplesById(self, workoutId):
"""
Gets workout samples from Peloton by id.
"""
query = "workout/" + workoutId + "/performance_graph?every_n=1"
url = util.full_url(self.http_base, query)
data = util.getResponse(self.session, url, {}, self.getAuthCookie())
self.logger.debug("getWorkoutSamplesById: {}".format(data))
return data
def getWorkoutSummaryById(self, workoutId):
"""
Gets workout summary from Peloton by id.
"""
query = "workout/" + workoutId + "/summary"
url = util.full_url(self.http_base, query)
data = util.getResponse(self.session, url, {}, self.getAuthCookie())
self.logger.debug("getWorkoutSummaryById: {}".format(data))
return data
| [
"[email protected]"
] | |
d06273f05e1c19cf6ad1276aeab10b4dfc6df013 | 51ff94526b5f96211ff2480fd9516facbf9ba48f | /decoupled_dj/settings/production.py | 09a7f53228673cf937a78e0273f259f68bfd7276 | [
"MIT"
] | permissive | leohakim/decoupled_django | 59eecec59b1ac050748d3c0cd994d63ee50359b2 | 1271ab5ab796211b9dea4bad21e6ad0bf026ff07 | refs/heads/main | 2023-07-25T04:04:49.821417 | 2021-09-12T17:53:39 | 2021-09-12T17:53:39 | 392,573,892 | 0 | 0 | MIT | 2021-09-12T17:53:40 | 2021-08-04T06:21:17 | JavaScript | UTF-8 | Python | false | false | 482 | py | from .base import * # noqa
import os
SECURE_SSL_REDIRECT = True
ALLOWED_HOSTS = env.list("ALLOWED_HOSTS")
STATIC_ROOT = env("STATIC_ROOT")
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
CORS_ALLOWED_ORIGINS = env.list(
"CORS_ALLOWED_ORIGINS",
default=[]
)
# Disable Browsable DRF API
REST_FRAMEWORK = {
**REST_FRAMEWORK,
"DEFAULT_RENDERER_CLASSES": ["rest_framework.renderers.JSONRenderer"]
}
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
) | [
"[email protected]"
] | |
a8a8a56438186a798ca35121e14dbbdb0e69eee3 | 3bcc247a2bc1e0720f0344c96f17aa50d4bcdf2d | /第二阶段笔记/pythonweb/项目/day02/client.py | dc47accf85daea800f2b3a1201207ff8f61e0d79 | [] | no_license | qianpeng-shen/Study_notes | 6f77f21a53266476c3c81c9cf4762b2efbf821fa | 28fb9a1434899efc2d817ae47e94c31e40723d9c | refs/heads/master | 2021-08-16T19:12:57.926127 | 2021-07-06T03:22:05 | 2021-07-06T03:22:05 | 181,856,924 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,969 | py | from socket import *
import sys
#客户端:图形界面打印 提出请求 接受反馈 反馈展示
class Thtpclient(object):
def __init__(self,s):
self.s=s
def do_loging(self,data):
self.s.send(data.encode())
while True:
a=input("请输入用户名")
self.s.send(a.encode())
b=input("请输入密码")
self.s.send(b.encode())
ab=self.s.recv(1024).decode()
if ab=="Y":
do_chuli()
else:
continue
def do_register(self,data):
self.s.send(data).encode()
while True:
a=input("请输入用户名")
self.s.send(a.encode())
b=input("请输入密码")
self.s.send(b.encode())
ab=self.s.recv(1024).decode()
if ab=="Y":
do_chuli()
else:
continue
def do_chuli(self):
print("=====请选择=====")
print("=====query=====")
print("====register====")
a=input("请输入您的选项")
self.s.send(a.endoce())
if a=="query":
self.do_query()
elif a=="register":
self.do_register()
def do_query(self):
pass
def do_register(self):
pass
def main():
if len(sys.argv)<3:
sys.exit("输入格式错误,请重新输入")
HOST=sys.argv[1]
PORT=int(sys.argv[2])
ADDR=(HOST,PORT)
s=socket()
BUFFRSIZE=1024
s.connect(ADDR)
tftp=Thtpclient(s)
while True:
print("======选择选项======")
print("========登录========")
print("========注册========")
print("========退出========")
print("====================")
data=input("请输入命令>>>")
if data=="登录":
tftp.do_loging(data)
elif data=="注册":
tftp.do_register(data)
if __name__=="__main__":
main() | [
"[email protected]"
] | |
9d4c3e38e13d47bacccd79351bcb1bc247d1ca48 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_185/ch27_2019_04_03_19_33_50_998384.py | 6646db477c839919b1968fb60cf16c03efa53827 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | número_cigarros = int(input("Quantos cigarros você fuma por dia? "))
anos_fumando = int(input("Quantos anos você fuma? "))
tempo_perdido = número_cigarros * 10
print(tempo_perdido)
| [
"[email protected]"
] | |
826877b4ba4da3d0add67615d4aebf46c9a6e11b | 2ad52a65c45051f26fe26631a31f80279522ddb7 | /src/PointCloudOps/src/scripts/pc_subs.py | 598cb5d99c493c75f85d9be61667544260470e12 | [] | no_license | aryamansriram/Movel_Nav | a64c32528b7ce0a5a19127ba3a9379dca0201356 | 0e5e64232a01771999d34694f3bf6840f0c1e3ee | refs/heads/master | 2023-01-03T20:35:22.041816 | 2020-10-21T13:37:11 | 2020-10-21T13:37:11 | 305,279,271 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,721 | py | #!/usr/bin/env python
import rospy
from sensor_msgs.msg import PointCloud2
from sensor_msgs import point_cloud2
from visualization_msgs.msg import Marker
from geometry_msgs.msg import Point
from std_msgs.msg import ColorRGBA,String
import numpy as np
import random
def cluster(arr,iterations,n_clusters=2):
"""
Finds clusters for a given n-d array
:param arr: numpy array, consisting of depth from point cloud
:param iterations: int, number of iterations to run the clustering algorithm for
:param n_clusters: int, number of clusters, 2 by default
:return: tuple of two lists which are clusters of points
"""
# Initialize random clusters
c1 = random.randint(0,len(arr)-1)
c2 = random.randint(0,len(arr)-1)
# Initialize centroids of those random clusters
cent_1 = arr[c1]
cent_2 = arr[c2]
# For a given number of iterations build the given clusters
for i in range(iterations):
clus_1 = []
clus_2 = []
for ii,xyz in enumerate(arr):
d1 = np.sum((xyz-cent_1)**2)
d2 = np.sum((xyz - cent_2)**2)
if d1 == min(d1,d2):
clus_1.append(xyz)
else:
clus_2.append(xyz)
cent_1 = sum(clus_1)/len(clus_1)
cent_2 = sum(clus_2)/len(clus_2)
return [clus_1,clus_2]
def get_dataset(gen):
"""
:param gen: Generator object, generator of point cloud point's x,y and z co-ordinates
:return: numpy array of x,y and z points
"""
# Initialize x,y and z lists
x = []
y = []
z = []
# Iterate through generator to add points to the list
for p in gen:
x.append(p[0])
y.append(p[1])
z.append(p[2])
# Reshape arrays to get them ready for concatenation
x = np.array(x).reshape(-1,1)
y = np.array(y).reshape(-1,1)
z = np.array(z).reshape(-1,1)
# Build combined array consisting of all arrays together
dset = np.concatenate([x,y,z],axis=1)
return dset
class PC_Marker:
def __init__(self):
rospy.Subscriber("/camera/depth/points", PointCloud2, self.callback)
def callback(self,data):
gen = point_cloud2.read_points(data, field_names=("x", "y", "z"), skip_nans=True)
rospy.loginfo("Framing dataset..")
frame = get_dataset(gen)
rospy.loginfo("Clustering...")
clusters = cluster(frame,iterations=3)
print("Cluster 1 length: ",len(clusters[0]))
print("Cluster 2 length: ",len(clusters[1]))
def listener():
rospy.init_node("pc_listener",anonymous=True)
PC_Marker()
rospy.loginfo("Waiting.....")
rospy.sleep(5)
rospy.spin()
if __name__=="__main__":
listener()
| [
"[email protected]"
] | |
3cb10ad3a6af182a5ff9913827a1e36d8dd95b2c | ac879dd916f2d5282e4cf092325791b90b4d32d4 | /recipes/forms.py | fe825561b043b263a3d8ce77ce6e1aee671d77d8 | [] | no_license | GBrachetta/recipes | 2d04ff223663983d2e30ffb8300cc0e0d9a7955e | 018cc34292f5f4c3441c99cc9d8ab4de0ce8cae7 | refs/heads/master | 2023-01-21T13:14:54.572067 | 2020-12-01T23:06:26 | 2020-12-01T23:06:26 | 314,708,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 822 | py | from django import forms
# from .widgets import CustomClearableFileInput
from .models import Recipe, Category
from .widgets import CustomClearableFileInput
class RecipeForm(forms.ModelForm):
"""Recipe form"""
class Meta:
model = Recipe
fields = (
"category",
"name",
"description",
"instructions",
"difficulty",
"price",
"time",
"image",
"tags",
)
widgets = {"tags": forms.TextInput(attrs={"data-role": "tagsinput"})}
image = forms.ImageField(
label="Image", required=False, widget=CustomClearableFileInput
)
thumbnail = image.hidden_widget
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ("name",)
| [
"[email protected]"
] | |
4e9493f5fb751d5f49549eadfda4c732a0177b21 | 043baf7f2cd8e40150bbd4c178879a5dd340348d | /children/forms.py | be8a8841d2651142291d5cacaeba886565a02057 | [] | no_license | tjguk/ironcage | 1d6d70445b1da9642e1c70c72832c2738f9a942e | 914b8e60819be7b449ecc77933df13f8b100adb0 | refs/heads/master | 2021-05-06T10:20:35.486184 | 2017-11-20T16:34:17 | 2017-11-20T16:34:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | from django import forms
from .models import Order
class OrderForm(forms.ModelForm):
class Meta:
model = Order
fields = [
'adult_name',
'adult_email_addr',
'adult_phone_number',
'accessibility_reqs',
'dietary_reqs',
]
class TicketForm(forms.Form):
name = forms.CharField()
date_of_birth = forms.DateField(
required=False,
widget=forms.DateInput(attrs={
'data-provide': 'datepicker',
'data-date-format': 'yyyy-mm-dd',
}),
)
TicketFormSet = forms.formset_factory(
TicketForm,
min_num=1,
extra=0,
can_delete=True
)
| [
"[email protected]"
] | |
025c1ab960687bd69b7672012af72be3bb1037f3 | 75f2841320a9528e8e5a7e9e196bd101c85dcbf3 | /backend/__init__.py | f5b54a0315f64d168ac296853dba8af06ce299d3 | [] | no_license | therealrahulsahu/com_scrap | d422a540bc370df8e5f0f6c216d1fbaa00c556fd | a31e0a821e46c3ab7429eec62d7777dab84c2f48 | refs/heads/master | 2020-07-16T02:03:02.790758 | 2019-09-01T15:44:29 | 2019-09-01T15:44:29 | 205,696,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35 | py | from .my_code import run as my_code | [
"[email protected]"
] | |
cba5da974162d165da60ce37bf03defd5dc4ac89 | 1b95c01768f769522b7b7f855a0399fa20a11912 | /yatpd/train/simple_train.py | 5def073456d8e3ee625ac3fe74c6bc4dac970688 | [
"MIT"
] | permissive | eriche2016/yatpd | 36b350abcae0900063370aed13cd82d16236be22 | d5e103b61a745484872c0168ddd9c7c9e9cea391 | refs/heads/master | 2021-01-17T12:06:43.152458 | 2015-06-13T08:16:26 | 2015-06-13T08:16:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,083 | py | # -*- coding: utf-8 -*-
import cv2
import numpy as np
from ..utils import timer
from ..utils import img_trans
from ..utils import hog2hognmf
from sklearn import svm
from sklearn.ensemble import AdaBoostClassifier
@timer
def simple_train(img_data_list, channel_type, feature_type, classifier):
''' Use HOG/HOG-NMF to train a model.
NOTE: the size of image should be same.
Parameters
----------
img_data_list: list of tuple
Img_data_list is a list that consists of tuple like (img_data, flag).
For flag, 1 stand for positive data, -1 stand for negative data.
channel_type: str
Gray | LUV | Gabor | DoG
feature_type: str
HOG | HOG-NMF
classifier: str
AdaBoost | SVM
'''
img_size = img_data_list[0][0].shape
img_feature_list = []
img_flag_list = []
for img_data in img_data_list:
channel_list = img_trans(img_data[0], channel_type)
img_feature = np.array([], dtype=np.float32)
hog = cv2.HOGDescriptor()
for channel in channel_list:
if feature_type == 'HOG' or feature_type == 'HOG-NMF':
hog_feature = hog.compute(channel)
if feature_type == 'HOG':
img_feature = np.append(img_feature, hog_feature[:, 0])
else:
img_feature = np.append(img_feature,
hog2hognmf(hog_feature[:, 0]))
img_feature_list.append(img_feature)
img_flag_list.append(img_data[1])
img_flag_list = np.array(img_flag_list, dtype=np.int32)
img_feature_list = np.array(img_feature_list, dtype=np.float32)
if classifier == 'AdaBoost':
boost_model = AdaBoostClassifier(n_estimators=800)
boost_model.fit(img_feature_list, img_flag_list)
return boost_model, img_size
elif classifier == 'SVM':
svm_model = svm.SVC(kernel='rbf')
svm_model.fit(img_feature_list, img_flag_list)
return svm_model, img_size
else:
raise Exception('Classifier doesn\'t support %s' % classifier)
| [
"[email protected]"
] | |
93c03d96e326c2691b8bb363e4d89b20b4c7e926 | 5442daf4ce09928d6bdd728064ed94be28af96b8 | /tests/test_crispy_utils.py | 7e09f38bd0269ad22bd134a29dde05c3e812654c | [
"MIT"
] | permissive | moshthepitt/django-vega-admin | dc00ffd070207aaed400d3deb9bb503b611f6ac6 | 865774e51b3a2c2df81fec1f212acc3bdcea9eaa | refs/heads/master | 2021-07-10T10:49:48.413006 | 2020-06-26T11:29:01 | 2020-06-26T11:29:01 | 157,973,263 | 4 | 0 | MIT | 2020-06-26T11:27:00 | 2018-11-17T10:20:04 | Python | UTF-8 | Python | false | false | 1,626 | py | """module for crispy_utils tests."""
from django.conf import settings
from django.template import Context, Template
from django.test import TestCase, override_settings
from vega_admin.crispy_utils import get_form_actions, get_form_helper_class, get_layout
from tests.artist_app.forms import PlainArtistForm
@override_settings(
ROOT_URLCONF="tests.artist_app.urls", VEGA_ACTION_COLUMN_NAME="Actions"
)
class TestCrispyUtils(TestCase):
"""Test class for crispy utils."""
def test_get_form_actions_no_cancel(self):
"""Test get_form_actions with no cancel."""
form_helper = get_form_helper_class()
layout = get_layout(["name"])
form_actions = get_form_actions(cancel_url=None, button_div_css_class="xxx")
layout.append(form_actions)
form_helper.layout = layout
template = Template(
"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
"""
)
context = Context({"form": PlainArtistForm(), "form_helper": form_helper})
html = template.render(context)
expected_html = """
<div class="col-md-12">
<div class="xxx">
<input
type="submit"
name="submit"
value="Submit"
class="btn btn-primary btn-block vega-submit"
id="submit-id-submit"
/>
</div>
</div>
"""
assert "vega-cancel" not in html
assert settings.VEGA_CANCEL_TEXT not in html
self.assertInHTML(expected_html, html)
| [
"[email protected]"
] | |
6402bd53a3fdf59432d1860e917426ed25cc5995 | 773b22ef1658bbc165d910a65a40327eb4c52c4a | /api/repository/clean.py | 29a10a979a2c562880d1d6223f7bccb9d368057c | [] | no_license | AneresArsenal/feedback-main | e6d4cd98150366f353f42aa09b8f5d4751def6cb | 88bd2ba62d1c1fd6bff659832d216fca5db50740 | refs/heads/master | 2022-12-24T08:13:43.544028 | 2020-09-24T20:37:44 | 2020-09-24T20:37:44 | 300,140,625 | 0 | 0 | null | 2020-10-01T04:21:27 | 2020-10-01T04:21:26 | null | UTF-8 | Python | false | false | 372 | py | from sqlalchemy_api_handler import logger
from utils.db import db
def clean():
logger.info('clean all the database...')
for table in reversed(db.metadata.sorted_tables):
print("Clearing table {table_name}...".format(table_name=table))
db.session.execute(table.delete())
db.session.commit()
logger.info('clean all the database...Done.')
| [
"[email protected]"
] | |
a91fb57384638740d71ac032b7f29a78e739f4cf | 8d02b867eaa5d7aedb80ae31cec5dfe7b0201d1f | /Ch_03 - Computing with Numbers/slope_finder.py | a46f0535cd980bd113bc1558a5c79d5585028c4a | [] | no_license | capncrockett/beedle_book | df17f632990edf4dfae82ccedb5f8d2d07385c00 | d65315ddff20fb0ef666c610dbe4634dff0a621a | refs/heads/main | 2023-07-23T08:33:17.275029 | 2021-09-01T02:47:08 | 2021-09-01T02:47:08 | 401,894,762 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 914 | py | # slope_finder
# A program to find the slope of two points on a graph.
#
# def main():
# print("This program calculates the slope of a line.")
#
# x1, y1 = float(input("Please enter the 1st set of coordinates "
# "separated by a comma: "))
# x2, y2 = float(input("Please enter the 2nd set of coordinates "
# "separated by a comma: "))
# slope = y2 - y1 / x2 - x1
# print("The slope of your line is", slope)
#
#
# main()
#
def main():
print("This program calculates the slope of a line.")
x1, y1 = eval(input("Please enter the 1st set of coordinates "
"separated by a comma: "))
x2, y2 = eval(input("Please enter the 2nd set of coordinates "
"separated by a comma: "))
slope = y2 - y1 / x2 - x1
print("The slope of your line is", slope)
main()
| [
"[email protected]"
] | |
994f5184627dab5f3fb69e3131a94a718cb9e007 | eda28f8b12170fd1feabfa619bf3abfb5ac925ab | /app/account/views.py | 8e38e06573228a4c46ac5e6a1915fe150da6454b | [
"MIT"
] | permissive | h1-the-swan/science_history_institute_chp_app | eb00afdbb3ecfc746afed2e9116a64571919bb41 | 0e99dec17403dfcaa2e7fbcd0374c39a773445b1 | refs/heads/master | 2022-12-14T11:34:36.354560 | 2019-04-24T15:26:03 | 2019-04-24T15:26:03 | 204,818,577 | 0 | 0 | MIT | 2022-12-08T02:48:55 | 2019-08-28T01:04:40 | Python | UTF-8 | Python | false | false | 13,494 | py | import os
from flask import (
Blueprint,
flash,
redirect,
render_template,
request,
url_for,
current_app,
abort,
)
from flask_login import (
current_user,
login_required,
login_user,
logout_user,
)
from flask_rq import get_queue
from app import db
from app.account.forms import (
ChangeEmailForm,
ChangePasswordForm,
CreatePasswordForm,
LoginForm,
RegistrationForm,
RequestResetPasswordForm,
ResetPasswordForm,
UpdateProfileForm,
)
# from app.email import send_email
from app.models import User
from requests.exceptions import HTTPError
# from app.hypothesis import HypothesisClient
# hypothesis_service = os.environ.get('HYPOTHESIS_SERVICE', 'http://localhost:5000')
# hyp_client = HypothesisClient(authority=os.environ['HYPOTHESIS_AUTHORITY'],
# client_id=os.environ['HYPOTHESIS_CLIENT_ID'],
# client_secret=os.environ['HYPOTHESIS_CLIENT_SECRET'],
# jwt_client_id=os.environ['HYPOTHESIS_JWT_CLIENT_ID'],
# jwt_client_secret=os.environ['HYPOTHESIS_JWT_CLIENT_SECRET'],
# service=hypothesis_service)
account = Blueprint('account', __name__)
@account.route('/login', methods=['GET', 'POST'])
def login():
"""Log in an existing user."""
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first() \
or User.query.filter_by(username=form.email.data).first()
if user is not None and user.password_hash is not None and \
user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
flash('You are now logged in. Welcome back!', 'success')
return redirect(request.args.get('next') or url_for('main.index'))
else:
flash('Invalid email or password.', 'form-error')
return render_template('account/login.html', form=form)
@account.route('/register', methods=['GET', 'POST'])
def register():
"""Register a new user, and send them a confirmation email."""
form = RegistrationForm()
if form.validate_on_submit():
user = User(
# first_name=form.first_name.data,
# last_name=form.last_name.data,
username=form.username.data,
full_name=form.full_name.data,
email=form.email.data,
password=form.password.data)
# try:
# hyp_client.create_account(user.email, email=user.email,
# display_name=user.email)
# except HTTPError as ex:
# # FIXME: Make the service respond with an appropriate status code and
# # machine-readable error if the user account already exists
# email_err = 'user with email address {} already exists'.format(user.email)
# username_err = 'user with username {} already exists'.format(user.email)
# content = ex.response.content
# if email_err not in content and username_err not in content:
# raise ex
# username = "{}_{}".format(user.first_name.lower(), user.last_name.lower())
current_app.hypothesis_client.create_account(user.username, email=user.email,
display_name=user.full_name)
db.session.add(user)
db.session.commit()
# token = user.generate_confirmation_token()
# confirm_link = url_for('account.confirm', token=token, _external=True)
# get_queue().enqueue(
# send_email,
# recipient=user.email,
# subject='Confirm Your Account',
# template='account/email/confirm',
# user=user,
# confirm_link=confirm_link)
# flash('A confirmation link has been sent to {}.'.format(user.email),
# 'warning')
# Bypass email confirmation
user.force_confirm_account()
return redirect(url_for('main.index'))
# return redirect(url_for('account.manage'))
return render_template('account/register.html', form=form)
@account.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.', 'info')
return redirect(url_for('main.index'))
@account.route('/manage', methods=['GET', 'POST'])
@account.route('/manage/info', methods=['GET', 'POST'])
@login_required
def manage():
"""Display a user's account information."""
form = UpdateProfileForm()
if current_user.bio:
form.bio.data = current_user.bio
if current_user.website:
form.website.data = current_user.website
if form.validate_on_submit():
current_user.bio = form.bio.data
current_user.website = form.website.data
db.session.add(current_user)
db.session.commit()
return render_template('account/manage.html', user=current_user, form=form)
@account.route('/reset-password', methods=['GET', 'POST'])
def reset_password_request():
"""Respond to existing user's request to reset their password."""
if not current_user.is_anonymous:
return redirect(url_for('main.index'))
form = RequestResetPasswordForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user:
token = user.generate_password_reset_token()
reset_link = url_for(
'account.reset_password', token=token, _external=True)
# get_queue().enqueue(
# send_email,
# recipient=user.email,
# subject='Reset Your Password',
# template='account/email/reset_password',
# user=user,
# reset_link=reset_link,
# next=request.args.get('next'))
flash('A password reset link has been sent to {}.'.format(
form.email.data), 'warning')
return redirect(url_for('account.login'))
return render_template('account/reset_password.html', form=form)
# @account.route('/reset-password/<token>', methods=['GET', 'POST'])
# def reset_password(token):
# """Reset an existing user's password."""
# if not current_user.is_anonymous:
# return redirect(url_for('main.index'))
# form = ResetPasswordForm()
# if form.validate_on_submit():
# user = User.query.filter_by(email=form.email.data).first()
# if user is None:
# flash('Invalid email address.', 'form-error')
# return redirect(url_for('main.index'))
# if user.reset_password(token, form.new_password.data):
# flash('Your password has been updated.', 'form-success')
# return redirect(url_for('account.login'))
# else:
# flash('The password reset link is invalid or has expired.',
# 'form-error')
# return redirect(url_for('main.index'))
# return render_template('account/reset_password.html', form=form)
#
#
@account.route('/manage/change-password', methods=['GET', 'POST'])
@login_required
def change_password():
flash('This feature is currently unavailable', 'error')
return redirect(url_for('account.manage'))
# """Change an existing user's password."""
# form = ChangePasswordForm()
# if form.validate_on_submit():
# if current_user.verify_password(form.old_password.data):
# current_user.password = form.new_password.data
# db.session.add(current_user)
# db.session.commit()
# flash('Your password has been updated.', 'form-success')
# return redirect(url_for('main.index'))
# else:
# flash('Original password is invalid.', 'form-error')
# return render_template('account/manage.html', form=form)
#
#
@account.route('/manage/change-email', methods=['GET', 'POST'])
@login_required
def change_email_request():
flash('This feature is currently unavailable', 'error')
return redirect(url_for('account.manage'))
# """Respond to existing user's request to change their email."""
# form = ChangeEmailForm()
# if form.validate_on_submit():
# if current_user.verify_password(form.password.data):
# new_email = form.email.data
# token = current_user.generate_email_change_token(new_email)
# change_email_link = url_for(
# 'account.change_email', token=token, _external=True)
# get_queue().enqueue(
# send_email,
# recipient=new_email,
# subject='Confirm Your New Email',
# template='account/email/change_email',
# # current_user is a LocalProxy, we want the underlying user
# # object
# user=current_user._get_current_object(),
# change_email_link=change_email_link)
# flash('A confirmation link has been sent to {}.'.format(new_email),
# 'warning')
# return redirect(url_for('main.index'))
# else:
# flash('Invalid email or password.', 'form-error')
# return render_template('account/manage.html', form=form)
#
#
# @account.route('/manage/change-email/<token>', methods=['GET', 'POST'])
# @login_required
# def change_email(token):
# """Change existing user's email with provided token."""
# if current_user.change_email(token):
# flash('Your email address has been updated.', 'success')
# else:
# flash('The confirmation link is invalid or has expired.', 'error')
# return redirect(url_for('main.index'))
@account.route('/confirm-account')
@login_required
def confirm_request():
"""Respond to new user's request to confirm their account."""
token = current_user.generate_confirmation_token()
confirm_link = url_for('account.confirm', token=token, _external=True)
# get_queue().enqueue(
# send_email,
# recipient=current_user.email,
# subject='Confirm Your Account',
# template='account/email/confirm',
# # current_user is a LocalProxy, we want the underlying user object
# user=current_user._get_current_object(),
# confirm_link=confirm_link)
flash('A new confirmation link has been sent to {}.'.format(
current_user.email), 'warning')
return redirect(url_for('main.index'))
@account.route('/confirm-account/<token>')
@login_required
def confirm(token):
"""Confirm new user's account with provided token."""
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm_account(token):
flash('Your account has been confirmed.', 'success')
else:
flash('The confirmation link is invalid or has expired.', 'error')
return redirect(url_for('main.index'))
# @account.route(
# '/join-from-invite/<int:user_id>/<token>', methods=['GET', 'POST'])
# def join_from_invite(user_id, token):
# """
# Confirm new user's account with provided token and prompt them to set
# a password.
# """
# if current_user is not None and current_user.is_authenticated:
# flash('You are already logged in.', 'error')
# return redirect(url_for('main.index'))
#
# new_user = User.query.get(user_id)
# if new_user is None:
# return redirect(404)
#
# if new_user.password_hash is not None:
# flash('You have already joined.', 'error')
# return redirect(url_for('main.index'))
#
# if new_user.confirm_account(token):
# form = CreatePasswordForm()
# if form.validate_on_submit():
# new_user.password = form.password.data
# db.session.add(new_user)
# db.session.commit()
# flash('Your password has been set. After you log in, you can '
# 'go to the "Your Account" page to review your account '
# 'information and settings.', 'success')
# return redirect(url_for('account.login'))
# return render_template('account/join_invite.html', form=form)
# else:
# flash('The confirmation link is invalid or has expired. Another '
# 'invite email with a new link has been sent to you.', 'error')
# token = new_user.generate_confirmation_token()
# invite_link = url_for(
# 'account.join_from_invite',
# user_id=user_id,
# token=token,
# _external=True)
# get_queue().enqueue(
# send_email,
# recipient=new_user.email,
# subject='You Are Invited To Join',
# template='account/email/invite',
# user=new_user,
# invite_link=invite_link)
# return redirect(url_for('main.index'))
#
@account.before_app_request
def before_request():
"""Force user to confirm email before accessing login-required routes."""
if current_user.is_authenticated \
and not current_user.confirmed \
and request.endpoint[:8] != 'account.' \
and request.endpoint != 'static':
return redirect(url_for('account.unconfirmed'))
@account.route('/unconfirmed')
def unconfirmed():
"""Catch users with unconfirmed emails."""
if current_user.is_anonymous or current_user.confirmed:
return redirect(url_for('main.index'))
return render_template('account/unconfirmed.html')
| [
"[email protected]"
] | |
88fdcdea81eeb89cf861cd9e458a1b569a781c33 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_205/ch10_2020_02_29_12_38_51_302898.py | bc49ee2926284436bdc7d9ea050112036b7c8e26 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py | def volume_da_pizza (z,a):
y = z*z*a*3
return y
print (volume_da_pizza (3,2)) | [
"[email protected]"
] | |
f158728483dad3598e7d227f6daed5a12266b1c5 | 494a0ba52d3204cb0082f01ae58cfdfc74895ba2 | /thisIsCodingTest/Greedy/4.don'tMakeMoney.py | cfd68e9c3d4b5509207a1dfd383cc5ae7aa7e36f | [] | no_license | mhee4321/python_algorithm | 52331721c49399af35ffc863dd1d9b8e39cea26a | 96dd78390ba735dd754930affb3b72bebbbe5104 | refs/heads/master | 2023-04-26T09:27:40.760958 | 2021-05-16T12:12:39 | 2021-05-16T12:12:39 | 327,462,537 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 174 | py | n = int(input())
data = list(map(int, input().split()))
data.sort()
answer = 1
for x in data:
if answer < x:
break
answer += x
print(answer)
# 1 1 2 3 9
| [
"[email protected]"
] | |
520dd38b8e9b3ae3f346b6acbaa3d29266973d10 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/bob/3e15831531564bb59d8c8e12ee82db64.py | 295e31e58c33b9db852cf9e133b741699b77845e | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 254 | py | def hey(what):
what = what.strip()
if what.isupper() == True:
return "Whoa, chill out!"
elif len(what) == 0:
return "Fine. Be that way!"
elif what.endswith('?'):
return "Sure."
else:
return "Whatever."
| [
"[email protected]"
] | |
827745662ba46429c9ffe39f0fc6c46e5945dd07 | 050e3bfbbc7aba577f3120588233ee908668e37d | /settings.py | 398abb5e23fe7d3dc9a2bc83d9e46033ee1925f4 | [
"MIT"
] | permissive | datamade/represent-boundaries | 9457411d29751e0f373bd62e88cb1d4d09932375 | 13a789b6d1f4f3a3b076f90c33a12254b8e21433 | refs/heads/master | 2020-04-08T20:59:24.970558 | 2015-09-21T20:08:14 | 2015-09-21T20:08:14 | 42,890,604 | 1 | 0 | null | 2015-09-21T20:02:31 | 2015-09-21T20:02:30 | null | UTF-8 | Python | false | false | 419 | py | """
To run `django-admin.py syncdb --settings settings --noinput` before testing.
"""
SECRET_KEY = 'x'
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'travis_ci_test',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.gis',
'boundaries',
)
MIDDLEWARE_CLASSES = ()
| [
"[email protected]"
] | |
311c3d454e7edc077485e30a033d615ed1fcdb37 | b5f05426d811303c0bc2d37a7ebff67cc369f536 | /python/ltp/data/dataset/mixed.py | 215a149e9e17a5be8c1b01ea716abf05b3e02d37 | [] | no_license | chenwangwww/paddlehub | 54a310c2b627868aa22e6172497d60ddd2291d24 | 8583a705af6f82512ea5473f3d8961a798852913 | refs/heads/master | 2023-03-13T10:17:55.589558 | 2021-03-01T02:35:43 | 2021-03-01T02:35:43 | 293,667,091 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 876 | py | from typing import List, Dict
from . import Dataset
class MixedDataset(Dataset, alias='mixed'):
def __init__(self, path: List[Dict], file, fields, text='text'):
datasets = {}
if isinstance(file, str):
file = [file] * len(path)
for dataset, file in zip(path, file):
init = {}
name = dataset['name']
for key, value in dataset.items():
if key != 'name' and key != 'path':
init[key] = value
datasets[name] = Dataset.from_params(init, path=dataset['path'], file=file, fields=fields)
examples = []
for name, dataset in datasets.items():
for example in dataset.examples:
setattr(example, text, (name, *getattr(example, text)))
examples.append(example)
super().__init__(examples, fields)
| [
"[email protected]"
] | |
06e79651a9a40c27557771921e288b416f12f990 | 72d8b5139f0ed9ce273e44004032457e367d4336 | /capchat_solver/venv/local/lib/python2.7/site-packages/ipywidgets/widgets/widget_box.py | 8814201e077182ddae3d115a4a98dee9d84bd4f1 | [
"MIT"
] | permissive | bngabonziza/CaptchaGAN | 98d7bc8c555f18c8ad13bd8eb340ad0b3c5fb900 | 228e93a5f6a7cee240f82c60950de121d64451c2 | refs/heads/main | 2023-02-02T18:49:14.299220 | 2020-12-22T07:07:56 | 2020-12-22T07:07:56 | 369,036,980 | 1 | 0 | MIT | 2021-05-20T00:39:38 | 2021-05-20T00:39:38 | null | UTF-8 | Python | false | false | 1,984 | py | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Box class.
Represents a container that can be used to group other widgets.
"""
from .widget import register, widget_serialization
from .domwidget import DOMWidget
from .widget_core import CoreWidget
from .widget_layout import Layout
from traitlets import Unicode, Tuple, Int, CaselessStrEnum, Instance, default
from warnings import warn
@register('Jupyter.Box')
class Box(DOMWidget, CoreWidget):
"""Displays multiple widgets in a group."""
_model_module = Unicode('jupyter-js-widgets').tag(sync=True)
_view_module = Unicode('jupyter-js-widgets').tag(sync=True)
_model_name = Unicode('BoxModel').tag(sync=True)
_view_name = Unicode('BoxView').tag(sync=True)
# Child widgets in the container.
# Using a tuple here to force reassignment to update the list.
# When a proper notifying-list trait exists, that is what should be used here.
children = Tuple().tag(sync=True, **widget_serialization)
box_style = CaselessStrEnum(
values=['success', 'info', 'warning', 'danger', ''], default_value='',
help="""Use a predefined styling for the box.""").tag(sync=True)
def __init__(self, children = (), **kwargs):
kwargs['children'] = children
super(Box, self).__init__(**kwargs)
self.on_displayed(Box._fire_children_displayed)
def _fire_children_displayed(self):
for child in self.children:
child._handle_displayed()
@register('Jupyter.VBox')
class VBox(Box):
"""Displays multiple widgets vertically using the flexible box model."""
_model_name = Unicode('VBoxModel').tag(sync=True)
_view_name = Unicode('VBoxView').tag(sync=True)
@register('Jupyter.HBox')
class HBox(Box):
"""Displays multiple widgets horizontally using the flexible box model."""
_model_name = Unicode('HBoxModel').tag(sync=True)
_view_name = Unicode('HBoxView').tag(sync=True)
| [
"[email protected]"
] | |
1b4e022dff8eea45419057adda0defc9e6f6d3cc | 792588db469538c1f93efd2c26f0819d6e96aff5 | /comma-delimited-handlers/handlers_new/belgium.py | e423b3eca75d9ab4f34984e1bc300a20be4cde1b | [] | no_license | fillingthemoon/go-cart-misc | 855696a3895ba5754a11ed0407069b6e35ee27e0 | 714513acfe2f5ad08442603443213c97f6bdae22 | refs/heads/main | 2023-08-21T23:08:48.686377 | 2021-10-05T11:08:42 | 2021-10-05T11:08:42 | 373,039,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 986 | py | import settings
import handlers.base_handler
import csv
class CartogramHandler(handlers.base_handler.BaseCartogramHandler):
def get_name(self):
return "Belgium"
def get_gen_file(self):
return "{}/bel_processedmap.json".format(settings.CARTOGRAM_DATA_DIR)
def validate_values(self, values):
if len(values) != 3:
return False
for v in values:
if type(v) != float:
return False
return True
def gen_area_data(self, values):
return """cartogram_id,Region Data,Region Name,Inset
1,{},Bruxelles,L
2,{},Vlaanderen,R
3,{},Wallonie,R""".format(*values)
def expect_geojson_output(self):
return True
def csv_to_area_string_and_colors(self, csvfile):
return self.order_by_example(csv.reader(csvfile), "Region", 0, 1, 2, 3, ["Bruxelles","Vlaanderen","Wallonie"], [0.0 for i in range(0,3)], {"Bruxelles":"1","Vlaanderen":"2","Wallonie":"3"})
| [
"[email protected]"
] | |
57cf999e0573e95f9f9800365682268c35ba82f4 | 89c4a43a505df8fdf1f0d7386988c4896c2e631b | /google/ads/googleads/v6/services/services/mobile_device_constant_service/transports/__init__.py | 8e1632f7594fa991dfbe9b8918509dc8f2cceec9 | [
"Apache-2.0"
] | permissive | hurricanelennane/google-ads-python | a0a1fed690776a8bb2e81f637eb7eae10fb4992f | 310a488b6fdad9d5beea8fa4b166edce779a2511 | refs/heads/master | 2023-07-04T03:07:53.344466 | 2021-07-16T19:06:36 | 2021-07-16T19:06:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,107 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import MobileDeviceConstantServiceTransport
from .grpc import MobileDeviceConstantServiceGrpcTransport
# Compile a registry of transports.
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[MobileDeviceConstantServiceTransport]]
_transport_registry["grpc"] = MobileDeviceConstantServiceGrpcTransport
__all__ = (
"MobileDeviceConstantServiceTransport",
"MobileDeviceConstantServiceGrpcTransport",
)
| [
"[email protected]"
] | |
cb421fd6828fa29ddee8d01e8968e522c0f4ba44 | 95040d09957e612ed0701c93aec91988aa901ef3 | /mail/migrations/0008_partnerschool.py | 8ef59d173fdd351b06f88a26abd1cf042c1ee9d4 | [] | permissive | mitodl/micromasters | 12160b1bd3654e58c4b35df11688cec486166a71 | d6564caca0b7bbfd31e67a751564107fd17d6eb0 | refs/heads/master | 2023-06-27T22:31:29.388574 | 2023-06-12T18:37:46 | 2023-06-12T18:37:46 | 52,919,185 | 35 | 21 | BSD-3-Clause | 2023-09-13T18:17:10 | 2016-03-01T23:53:17 | Python | UTF-8 | Python | false | false | 572 | py | # Generated by Django 2.1.2 on 2019-03-13 20:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mail', '0007_sentautomaticemail_status'),
]
operations = [
migrations.CreateModel(
name='PartnerSchool',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('email', models.TextField()),
],
),
]
| [
"[email protected]"
] | |
f0e5e410b33f8dbb4607a62866bf9bcd9a8bf356 | 4809471274d6e136ac66d1998de5acb185d1164e | /pypureclient/flasharray/FA_2_4/models/pod.py | 4791431224b817961e737fe308abc1524a73957d | [
"BSD-2-Clause"
] | permissive | astrojuanlu/py-pure-client | 053fef697ad03b37ba7ae21a0bbb466abf978827 | 6fa605079950765c316eb21c3924e8329d5e3e8a | refs/heads/master | 2023-06-05T20:23:36.946023 | 2021-06-28T23:44:24 | 2021-06-28T23:44:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,133 | py | # coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_4 import models
class Pod(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'name': 'str',
'arrays': 'list[PodArrayStatus]',
'destroyed': 'bool',
'failover_preferences': 'list[Reference]',
'footprint': 'int',
'mediator': 'str',
'mediator_version': 'str',
'source': 'FixedReference',
'space': 'PodSpace',
'time_remaining': 'int',
'requested_promotion_state': 'str',
'promotion_status': 'str',
'link_source_count': 'int',
'link_target_count': 'int',
'array_count': 'int'
}
attribute_map = {
'id': 'id',
'name': 'name',
'arrays': 'arrays',
'destroyed': 'destroyed',
'failover_preferences': 'failover_preferences',
'footprint': 'footprint',
'mediator': 'mediator',
'mediator_version': 'mediator_version',
'source': 'source',
'space': 'space',
'time_remaining': 'time_remaining',
'requested_promotion_state': 'requested_promotion_state',
'promotion_status': 'promotion_status',
'link_source_count': 'link_source_count',
'link_target_count': 'link_target_count',
'array_count': 'array_count'
}
required_args = {
}
def __init__(
self,
id=None, # type: str
name=None, # type: str
arrays=None, # type: List[models.PodArrayStatus]
destroyed=None, # type: bool
failover_preferences=None, # type: List[models.Reference]
footprint=None, # type: int
mediator=None, # type: str
mediator_version=None, # type: str
source=None, # type: models.FixedReference
space=None, # type: models.PodSpace
time_remaining=None, # type: int
requested_promotion_state=None, # type: str
promotion_status=None, # type: str
link_source_count=None, # type: int
link_target_count=None, # type: int
array_count=None, # type: int
):
"""
Keyword args:
id (str): A globally unique, system-generated ID. The ID cannot be modified and cannot refer to another resource.
name (str): A user-specified name. The name must be locally unique and can be changed.
arrays (list[PodArrayStatus]): A list of arrays over which the pod is stretched. If there are two or more arrays in the stretched pod, all data in the pod is synchronously replicated between all of the arrays within the pod.
destroyed (bool): Returns a value of `true` if the pod has been destroyed and is pending eradication. The `time_remaining` value displays the amount of time left until the destroyed pod is permanently eradicated. Before the `time_remaining` period has elapsed, the destroyed pod can be recovered by setting `destroyed=false`. Once the `time_remaining` period has elapsed, the pod is permanently eradicated and can no longer be recovered.
failover_preferences (list[Reference]): Determines which array within a stretched pod should be given priority to stay online should the arrays ever lose contact with each other. The current array and any peer arrays that are connected to the current array for synchronous replication can be added to a pod for failover preference. By default, `failover_preferences=null`, meaning no arrays have been configured for failover preference.
footprint (int): The maximum amount of physical space the pod would take up on any array, ignoring any data shared outside the pod. Measured in bytes. The footprint metric is mostly used for capacity planning.
mediator (str): The URL of the mediator for the pod. By default, the Pure1 Cloud Mediator (`purestorage`) serves as the mediator.
mediator_version (str): The mediator version.
source (FixedReference): The source pod from where data is cloned to create the new pod.
space (PodSpace)
time_remaining (int): The amount of time left until the destroyed pod is permanently eradicated. Measured in milliseconds. Before the `time_remaining` period has elapsed, the destroyed pod can be recovered by setting `destroyed=false`.
requested_promotion_state (str): Valid values are `promoted` and `demoted`. Patch `requested_promotion_state` to `demoted` to demote the pod so that it can be used as a link target for continuous replication between pods. Demoted pods do not accept write requests, and a destroyed version of the pod with `undo-demote` appended to the pod name is created on the array with the state of the pod when it was in the promoted state. Patch `requested_promotion_state` to `promoted` to start the process of promoting the pod. The `promotion_status` indicates when the pod has been successfully promoted. Promoted pods stop incorporating replicated data from the source pod and start accepting write requests. The replication process does not stop as the source pod continues replicating data to the pod. The space consumed by the unique replicated data is tracked by the `space.journal` field of the pod.
promotion_status (str): Current promotion status of a pod. Valid values are `promoted`, `demoted`, and `promoting`. The `promoted` status indicates that the pod has been promoted. The pod takes writes from hosts instead of incorporating replicated data. This is the default mode for a pod when it is created. The `demoted` status indicates that the pod has been demoted. The pod does not accept write requests and is ready to be used as a link target. The `promoting` status indicates that the pod is in an intermediate status between `demoted` and `promoted` while the promotion process is taking place.
link_source_count (int): Number of source pods that link to the pod.
link_target_count (int): Number of target pods that link to the pod.
array_count (int): Number of arrays to which this pod connects.
"""
if id is not None:
self.id = id
if name is not None:
self.name = name
if arrays is not None:
self.arrays = arrays
if destroyed is not None:
self.destroyed = destroyed
if failover_preferences is not None:
self.failover_preferences = failover_preferences
if footprint is not None:
self.footprint = footprint
if mediator is not None:
self.mediator = mediator
if mediator_version is not None:
self.mediator_version = mediator_version
if source is not None:
self.source = source
if space is not None:
self.space = space
if time_remaining is not None:
self.time_remaining = time_remaining
if requested_promotion_state is not None:
self.requested_promotion_state = requested_promotion_state
if promotion_status is not None:
self.promotion_status = promotion_status
if link_source_count is not None:
self.link_source_count = link_source_count
if link_target_count is not None:
self.link_target_count = link_target_count
if array_count is not None:
self.array_count = array_count
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `Pod`".format(key))
if key == "footprint" and value is not None:
if value < 0:
raise ValueError("Invalid value for `footprint`, must be a value greater than or equal to `0`")
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Pod, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Pod):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
047c8570dbd7a9979adb3f4a999019c4595d2716 | bd6a48ed22f777272459a62fe596c95be23c45ea | /actions/general_conversations.py | ffe5616d009354a866a431e6a7744f704f7dbaf1 | [
"MIT"
] | permissive | bhaveshAn/Lucy | 75527c7c5a3202737e2b9aad2cc41de2ac3b2c01 | 9ea97184c725a10a041af64cad0ef4b533be42ad | refs/heads/nextgen | 2022-12-15T01:40:05.980712 | 2018-10-09T10:36:38 | 2018-10-09T10:50:39 | 105,390,713 | 3 | 0 | MIT | 2022-12-08T02:56:54 | 2017-09-30T18:17:46 | JavaScript | UTF-8 | Python | false | false | 1,254 | py | import random
def who_are_you(text):
messages = ['I am Lucy , your own personal assistant.',
'Lucy, didnt I tell you before?',
'You asked that so many times! I am Lucy']
return (random.choice(messages))
def toss_coin(text):
outcomes = ['heads', 'tails']
return ('I just flipped a coin. It shows ' + random.choice(outcomes))
def how_am_i(text):
replies = [
'You are the coolest person, I have ever seen !',
'My knees go weak when I see you.',
'You look like the kindest person that I have met.'
]
return (random.choice(replies))
def who_am_i(text):
return ('You are a brilliant person. I love you!')
def where_born(text):
return ('I was created by a person named Bhaavesh, in India')
def how_are_you(text):
return ('I am fine, thank you.')
def are_you_up(text):
return ('For you , always.')
def love_you(text):
replies = [
'I love you too.',
'You are looking for love in the wrong place.'
]
return (random.choice(replies))
def marry_me(text):
return ('I have been receiving a lot of marriage proposals recently.')
def undefined(text):
return ('I dont know what that means!')
| [
"[email protected]"
] | |
511b75d2a573ad687481453bb4f2e78747a62ab4 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-koomessage/huaweicloudsdkkoomessage/v1/model/show_template_video_thumbnail_request.py | b60ed229081c6590c57c4bb23205bdd2219792c3 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 3,295 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ShowTemplateVideoThumbnailRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'aim_resource_id': 'str'
}
attribute_map = {
'aim_resource_id': 'aim_resource_id'
}
def __init__(self, aim_resource_id=None):
"""ShowTemplateVideoThumbnailRequest
The model defined in huaweicloud sdk
:param aim_resource_id: 目标资源ID
:type aim_resource_id: str
"""
self._aim_resource_id = None
self.discriminator = None
self.aim_resource_id = aim_resource_id
@property
def aim_resource_id(self):
"""Gets the aim_resource_id of this ShowTemplateVideoThumbnailRequest.
目标资源ID
:return: The aim_resource_id of this ShowTemplateVideoThumbnailRequest.
:rtype: str
"""
return self._aim_resource_id
@aim_resource_id.setter
def aim_resource_id(self, aim_resource_id):
"""Sets the aim_resource_id of this ShowTemplateVideoThumbnailRequest.
目标资源ID
:param aim_resource_id: The aim_resource_id of this ShowTemplateVideoThumbnailRequest.
:type aim_resource_id: str
"""
self._aim_resource_id = aim_resource_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowTemplateVideoThumbnailRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
f808c2dec7038684227a969c77ec85da2d810821 | cb6461bfae8b0935b7885697dad0df60670da457 | /pychron/core/ui/stage_component_editor.py | a0a0cf8b879ef9140e2e2faa9f9fd8a2ec920165 | [
"Apache-2.0"
] | permissive | USGSMenloPychron/pychron | 00e11910511ca053e8b18a13314da334c362695a | 172993793f25a82ad986e20e53e979324936876d | refs/heads/develop | 2021-01-12T14:09:18.983658 | 2018-02-06T14:25:05 | 2018-02-06T14:25:05 | 69,751,244 | 0 | 0 | null | 2016-10-01T16:59:46 | 2016-10-01T16:59:46 | null | UTF-8 | Python | false | false | 1,231 | py | # ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from pychron.core.ui.factory import toolkit_factory
# ============= standard library imports ========================
# ============= local library imports ==========================
LaserComponentEditor = toolkit_factory('stage_component_editor', 'LaserComponentEditor')
VideoComponentEditor = toolkit_factory('video_component_editor', 'VideoComponentEditor')
# ============= EOF =============================================
| [
"[email protected]"
] | |
d8efbfaa14a1bf5828f27bf5e561c462979192ae | fdec477002fb0c5f013faf369d2a1e782172a1d6 | /COVID19/venv/bin/chardetect | 525781e65b86d8847e9998f1a63f6ab58ca7061d | [] | no_license | aimiranarzhigitova/API_projects | 19fb416479e5a76dab760f38621e643e2db609cb | 8256cc1bc8dc939453c61a39215e89dbd96fecb1 | refs/heads/master | 2023-05-16T08:52:51.209458 | 2021-06-06T09:44:53 | 2021-06-06T09:44:53 | 374,322,074 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | #!/home/azat/djangoProject/COVID19/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
acfc0da063ac7021901b3a177fe6594803dca92a | 9a9e739dcc559476ba796510182374ad460f2f8b | /PA2/PA2 ANSWERS 2015/10/Asitha/pa2-10-2015.py | 0ca5112ad2669202668f7987b7e00f616fa5b28a | [] | no_license | Divisekara/Python-Codes-First-sem | 542e8c0d4a62b0f66c598ff68a5c1c37c20e484d | e4ca28f07ecf96181af3c528d74377ab02d83353 | refs/heads/master | 2022-11-28T01:12:51.283260 | 2020-08-01T08:55:53 | 2020-08-01T08:55:53 | 284,220,850 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 991 | py | def getText():
try:
fo=open("FileIn.txt","r")
L=fo.read().split()
fo.close()
except IOError:
print "File not found"
pass
else:
if len(L)<1000:
for i in L:
if len(i)<20 and i.isalpha()==True:
continue
else:
print "Invalid nput"
break
else:
return L
else:
print "Invalid input."
def showResult(L):
answer=[]
for i in L:
word=list(i.lower())
sorted_word=sorted(word)
if word==sorted_word:
answer.append(i)
display=" ".join(answer)
print display
return display
def saveFile(s):
try:
fc=open("result.txt","w")
fc.write(s)
fc.close()
except IOError:
print "File error."
pass
try:
saveFile(showResult(getText()))
except:
print "Something went wrong"
pass
| [
"[email protected]"
] | |
0c668919b3e9661f7ddb288bd2e56acec04ce962 | d6168650746aca20909e7b5452a768151271eb52 | /cmdb/utils.py | 5c4d6abd5c1ef0965b28e111b0518942ec2c718e | [] | no_license | xiaozhiqi2016/cmdb_project | 296f2346aa97c89be2e0344fe6f4fc265b62f6a4 | 0b960908a21b9f4f1f5b53c8d05fc4694fea229c | refs/heads/master | 2021-01-12T16:15:59.068570 | 2016-10-26T04:10:30 | 2016-10-26T04:10:30 | 71,965,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,182 | py | #_*_coding:utf-8_*_
import time,hashlib,json
from cmdb import models
from django.shortcuts import render,HttpResponse
from cmdb_project import settings
from django.core.exceptions import ObjectDoesNotExist
def json_date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.strftime("%Y-%m-%d")
def json_datetime_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.strftime("%Y-%m-%d %H:%M:%S")
def gen_token(username,timestamp,token):
token_format = "%s\n%s\n%s" %(username,timestamp,token)
obj = hashlib.md5()
obj.update(token_format.encode())
#print '--->token format:[%s]'% token_format
return obj.hexdigest()[10:17]
def token_required(func):
def wrapper(*args,**kwargs):
response = {"errors":[]}
get_args = args[0].GET
username = get_args.get("user")
token_md5_from_client = get_args.get("token")
timestamp = get_args.get("timestamp")
if not username or not timestamp or not token_md5_from_client:
response['errors'].append({"auth_failed":"This api requires token authentication!"})
return HttpResponse(json.dumps(response))
try:
user_obj = models.UserProfile.objects.get(email=username)
token_md5_from_server = gen_token(username,timestamp,user_obj.token)
if token_md5_from_client != token_md5_from_server:
response['errors'].append({"auth_failed":"Invalid username or token_id"})
else:
if abs(time.time() - int(timestamp)) > settings.TOKEN_TIMEOUT:# default timeout 120
response['errors'].append({"auth_failed":"The token is expired!"})
else:
pass #print "\033[31;1mPass authentication\033[0m"
print("\033[41;1m;%s ---client:%s\033[0m" %(time.time(),timestamp), time.time() - int(timestamp))
except ObjectDoesNotExist as e:
response['errors'].append({"auth_failed":"Invalid username or token_id"})
if response['errors']:
return HttpResponse(json.dumps(response))
else:
return func(*args,**kwargs)
return wrapper
| [
"[email protected]"
] | |
60385688c1813b80672e133d1f7208c42b0af423 | 3aab36e615166507e8970aaeac223a4526abdef1 | /crawl_cazipcode/db.py | 64efb2bb4cfcedbf0f5f44bcea36819ed8e745ea | [] | no_license | MacHu-GWU/crawl_cazipcode-project | b783f5e2f5e72d0feb3c98e07951d3bbd38ef772 | 5417a3fba147249d26dd1891845742198797e162 | refs/heads/master | 2020-12-02T06:24:20.216975 | 2017-07-13T19:14:48 | 2017-07-13T19:14:48 | 96,827,670 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pymongo
client = pymongo.MongoClient()
db = client.get_database("cazipcode")
c_province = db.get_collection("province")
c_city = db.get_collection("city")
c_postalcode = db.get_collection("postalcode")
| [
"[email protected]"
] | |
b4c06335dc66816ec8e9a7a7159927f1d0f7588c | bed77414283c5b51b0263103ec5055fa70e7ee3a | /scripts/ldifdiff | a047b48af9eb030e16c7521a2d43b6813fb32531 | [
"Apache-2.0"
] | permissive | UniversitaDellaCalabria/IdM | a24535156d8ee1f416aec0c0844fbc3e39e08280 | 0c80bc1a192e8f3075c941ca2d89773bca25e892 | refs/heads/master | 2020-12-20T09:09:12.320470 | 2020-07-23T03:43:48 | 2020-07-23T03:43:48 | 236,024,963 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,889 | #! /bin/env python3
import base64
flag_binary=0
class EOFException(Exception):
pass
def read_line(file):
while True:
line = file.readline()
if not line:
raise EOFException
if line[0]!='#':
return line.rstrip('\n')
def parse_line(line):
if line[0]==' ':
return (None, line[1:])
pos = line.find(':')
if pos:
if flag_binary and line[pos+1] == ':':
return (line[:pos], ": "+base64.standard_b64decode(line[pos+2:]))
else:
return (line[:pos], line[pos:])
else:
raise ValueError
def read_dn(file):
line_dn = read_line(file)
if not line_dn:
return read_dn(file)
dn = parse_line(line_dn);
d = list()
try:
while True:
line = read_line(file)
if not line: break
pair = parse_line(line)
if pair[0]:
d.append(pair)
else:
kv = d[-1]
d[-1] = (kv[0], kv[1]+pair[1])
except EOFException:
pass
d.sort()
return (dn[1], d)
def read_ldif(file):
d = dict()
try:
while True:
entry = read_dn(file)
d[entry[0]] = entry[1]
except EOFException:
pass
return d
def count_key(kv):
d = dict()
for k,v in kv:
if k in d:
d[k] += 1
else:
d[k] = 1
return d
def makeldif_modify(dn, kv1, kv2):
keycount1 = count_key(kv1)
keycount2 = count_key(kv2)
s = 'changetype: modify\n'
i, j = 0, 0
def _add(kv):
return 'add: %s\n%s%s\n-\n'%(kv[0], kv[0], kv[1])
def _del(kv):
return 'delete: %s\n%s%s\n-\n'%(kv[0], kv[0], kv[1])
def _replace(kv1, kv2):
return 'replace: %s\n%s%s\n-\n'%(kv1[0], kv2[0], kv2[1])
return kv2[0]+kv2[1]+'\n'
while i<len(kv1) or j<len(kv2):
if i==len(kv1):
s += _add(kv2[j])
j += 1
elif j==len(kv2):
s += _del(kv1[i])
i += 1
elif kv1[i] == kv2[j]:
i, j = i+1, j+1
elif kv1[i][0] < kv2[j][0]:
s += _del(kv1[i])
i += 1
elif kv2[j][0] < kv1[i][0]:
s += _add(kv2[j])
j += 1
# now kv1[i][0] = kv2[j][0]
elif keycount1[kv1[i][0]]==1 and keycount2[kv2[j][0]]==1:
s += _replace(kv1[i], kv2[j])
i, j = i+1, j+1
elif kv1[i] < kv2[j]:
s += _del(kv1[i])
i += 1
elif kv2[j] < kv1[i]:
s += _add(kv2[j])
j += 1
if s:
return '\ndn%s\n%s\n'%(dn, s)
else:
return ''
def makeldif_delete(dn, kv):
s = 'changetype: delete\n'
return '\ndn%s\n%s\n'%(dn, s)
def makeldif_add(dn, kv):
s = 'changetype: add\n'
for x in kv:
s += '%s%s\n'%x
return '\ndn%s\n%s\n'%(dn, s)
def compare(ldif1, ldif2, file):
seen = set()
for dn, kv in ldif2.items():
if dn in ldif1:
kv1 = ldif1[dn]
if kv1 != kv:
s = makeldif_modify(dn, ldif1[dn], kv)
file.write(s)
else:
s = makeldif_add(dn, kv)
file.write(s)
seen.add(dn)
for dn, kv in ldif1.items():
if not (dn in seen):
s = makeldif_delete(dn, kv)
file.write(s)
if __name__=='__main__':
import sys
i=1; n=0
while i < len(sys.argv):
ai = sys.argv[i]
i += 1
if ai=='-b':
flag_binary = 1
elif n==0:
f1 = ai
n += 1
elif n==1:
f2 = ai
n += 1
else:
sys.stderr.write('error: too many arguments: "%s"\n'%ai)
sys.exit(1)
ldif1 = read_ldif(open(f1))
ldif2 = read_ldif(open(f2))
compare(ldif1, ldif2, sys.stdout)
| [
"[email protected]"
] | ||
6e55c2b9d8515500c7c6d10587cb588cbc524b0a | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /120_design_patterns/019_observer/examples/3-Observer Pattern/Observer/Observer/observer/__init__.py | 2aeb77cfe3a025472706654c9e1ec39a168df7a2 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 89 | py | from observer.observer_abc import AbsObserver
from observer.subject_abc import AbsSubject | [
"[email protected]"
] | |
f1d469dcacbfece7c0cd49d613c40d0e35f4714c | 06bc4f76ba6099277d408ddc16edd95fabcd95d0 | /ext/sam/ext/call_py_fort/src/callpy.py | 7135234d212f60abffac2534e8544dc702db0f6c | [
"Apache-2.0",
"MIT"
] | permissive | kbren/uwnet | 31d4267b23012cda61646b94aa8a9e283017f83b | aac01e243c19686b10c214b1c56b0bb7b7e06a07 | refs/heads/master | 2020-04-06T20:31:25.849132 | 2018-11-14T22:02:52 | 2018-11-14T22:05:01 | 157,771,236 | 0 | 0 | MIT | 2018-11-15T20:52:46 | 2018-11-15T20:52:45 | null | UTF-8 | Python | false | false | 2,176 | py | import importlib
import numpy as np
import logging
logging.basicConfig(level=logging.INFO)
# Global state array
STATE = {}
# Create the dictionary mapping ctypes to np dtypes.
ctype2dtype = {}
# Integer types
for prefix in ('int', 'uint'):
for log_bytes in range(4):
ctype = '%s%d_t' % (prefix, 8 * (2**log_bytes))
dtype = '%s%d' % (prefix[0], 2**log_bytes)
# print( ctype )
# print( dtype )
ctype2dtype[ctype] = np.dtype(dtype)
# Floating point types
ctype2dtype['float'] = np.dtype('f4')
ctype2dtype['double'] = np.dtype('f8')
def asarray(ffi, ptr, shape, **kwargs):
length = np.prod(shape)
# Get the canonical C type of the elements of ptr as a string.
T = ffi.getctype(ffi.typeof(ptr).item)
# print( T )
# print( ffi.sizeof( T ) )
if T not in ctype2dtype:
raise RuntimeError("Cannot create an array for element type: %s" % T)
a = np.frombuffer(ffi.buffer(ptr, length * ffi.sizeof(T)), ctype2dtype[T])\
.reshape(shape, **kwargs)
return a
def set_state(args, ffi=None):
tag, t, nx, ny, nz = args
shape = (nz[0], ny[0], nx[0])
tag = ffi.string(tag).decode('UTF-8')
arr = asarray(ffi, t, shape).copy()
STATE[tag] = arr
def set_state_1d(args, ffi=None):
tag, t, n = args
tag = ffi.string(tag).decode('UTF-8')
arr = asarray(ffi, t, (n[0], )).copy()
STATE[tag] = arr
def set_state_scalar(args, ffi=None):
tag, t = args
tag = ffi.string(tag).decode('UTF-8')
STATE[tag] = t[0]
def set_state_char(args, ffi=None):
tag, chr = [ffi.string(x).decode('UTF-8') for x in args]
STATE[tag] = chr
def get_state(args, ffi=None):
tag, t, n = args
tag = ffi.string(tag).decode('UTF-8')
arr = asarray(ffi, t, (n[0], ))
src = STATE.get(tag, np.zeros(n[0]))
arr[:] = src.ravel()
def call_function(module_name, function_name):
"""Call a python function by name"""
# import the python module
mod = importlib.import_module(module_name)
# the function we want to call
fun = getattr(mod, function_name)
# call the function
# this function can edit STATE inplace
fun(STATE)
| [
"[email protected]"
] | |
4bee1decea943573516cb9ba3b0b3128f26467f2 | a8d86cad3f3cc6a977012d007d724bbaf02542f7 | /catalog/dump_test_cases_failed.py | 2d81f9721782f9ea6edef284a93314991bc81f52 | [] | no_license | bopopescu/bigrobot | f8d971183119a1d59f21eb2fc08bbec9ee1d522b | 24dad9fb0044df5a473ce4244932431b03b75695 | refs/heads/master | 2022-11-20T04:55:58.470402 | 2015-03-31T18:14:39 | 2015-03-31T18:14:39 | 282,015,194 | 0 | 0 | null | 2020-07-23T17:29:53 | 2020-07-23T17:29:52 | null | UTF-8 | Python | false | false | 3,262 | py | #!/usr/bin/env python
import os
import sys
import argparse
import pymongo
# Determine BigRobot path(s) based on this executable (which resides in
# the bin/ directory.
bigrobot_path = os.path.dirname(__file__) + '/..'
exscript_path = bigrobot_path + '/vendors/exscript/src'
sys.path.insert(0, bigrobot_path)
sys.path.insert(1, exscript_path)
import autobot.helpers as helpers
from catalog_modules.test_catalog import TestCatalog
def prog_args():
descr = """\
Given the release (RELEASE_NAME env) and build (BUILD_NAME env), print a list
of failed test cases.
Examples:
% RELEASE_NAME="ironhorse" BUILD_NAME="bvs master bcf-2.0.0 fcs" ./dump_test_cases_failed.py
% ./dump_test_cases_failed.py --release ironhorse --build "bvs master aggregated 2014 wk40"
"""
parser = argparse.ArgumentParser(prog='dump_test_cases_failed',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=descr)
parser.add_argument('--build',
help=("Jenkins build string,"
" e.g., 'bvs master #2007'"))
parser.add_argument('--release',
help=("Product release, e.g., 'ironhorse', 'ironhorse-plus', 'jackfrost', etc."))
parser.add_argument('--show-tags', action='store_true', default=False,
help=("Show test case tags"))
_args = parser.parse_args()
# _args.build <=> env BUILD_NAME
if not _args.build and 'BUILD_NAME' in os.environ:
_args.build = os.environ['BUILD_NAME']
elif not _args.build:
helpers.error_exit("Must specify --build option or set environment"
" variable BUILD_NAME")
else:
os.environ['BUILD_NAME'] = _args.build
# _args.release <=> env RELEASE_NAME
if not _args.release and 'RELEASE_NAME' in os.environ:
_args.release = os.environ['RELEASE_NAME']
elif not _args.release:
helpers.error_exit("Must specify --release option or set environment"
" variable RELEASE_NAME")
else:
os.environ['RELEASE_NAME'] = _args.release
_args.release = _args.release.lower()
return _args
def print_failed_tests(args):
db = TestCatalog()
ts_author_dict = db.test_suite_author_mapping(args.build)
query = {"build_name": args.build,
"tags": {"$all": [args.release]},
"status": "FAIL",
}
tc_archive_collection = db.test_cases_archive_collection().find(query).sort(
[("product_suite", pymongo.ASCENDING),
("name", pymongo.ASCENDING)])
total_tc = tc_archive_collection.count()
i = 0
for tc in tc_archive_collection:
i += 1
string = ("TC-%03d: %12s %-55s %s"
% (i,
ts_author_dict[tc["product_suite"]],
tc["product_suite"],
tc["name"]))
if args.show_tags:
string = ("%s %s" % (string, helpers.utf8(tc["tags"])))
print string
print "\nTotal test cases failed: %s" % total_tc
if __name__ == '__main__':
print_failed_tests(prog_args())
| [
"[email protected]"
] | |
e3cb86cd88d107fdef1fb913bb5bc101bdb38507 | e880e70effc786f6fa1c473b90639763c3aa8a7e | /app/__init__.py | ed98052a39997df195431f740706490608770c4e | [
"MIT"
] | permissive | pborne/DCPU-16 | 7595492d7ceafb941e62b16cc5efe4de1d49d8ce | db6f7aa15cdee8f3e285c11ee5dd7d286fe46551 | refs/heads/master | 2022-03-23T13:49:32.144944 | 2013-04-20T18:34:46 | 2013-04-20T18:34:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | from emulator import (
Emulator,
)
from view import (
Frame,
)
| [
"[email protected]"
] | |
777e70d291140c0a3f72e34ff3cc9fd482780f3b | 68cbd4ff1eb57f531809ccc2c0ac63e090cb8e17 | /workspaceclient/tests/common/test_resource.py | 397682fc72354f4fd2ca24b012d2c6dea4bb0598 | [
"Apache-2.0"
] | permissive | I201821180/osc_workspace | f89c0665be22cca89dac70bb537f0e3cdd636334 | ced0c58f724aa04137132da0116e866f320978ec | refs/heads/master | 2020-06-07T05:28:41.035721 | 2017-04-17T09:51:14 | 2017-04-17T09:51:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,705 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import six
from mock import mock
from workspaceclient.common import resource
from workspaceclient.tests import base
from workspaceclient.tests import fakes
class SampleResource(resource.Resource):
pass
class TestResource(base.BaseTestCase):
def test_resource_repr(self):
r = SampleResource(None, dict(foo='bar', baz='spam'))
self.assertEqual('<SampleResource baz=spam, foo=bar>', repr(r))
def test_init_with_attribute_info(self):
r = SampleResource(None, dict(foo='bar', baz='spam'))
self.assertTrue(hasattr(r, 'foo'))
self.assertEqual('bar', r.foo)
self.assertTrue(hasattr(r, 'baz'))
self.assertEqual('spam', r.baz)
def test_resource_lazy_getattr(self):
fake_manager = mock.Mock()
return_resource = SampleResource(None, dict(uuid=mock.sentinel.fake_id,
foo='bar',
name='fake_name'))
fake_manager.get.return_value = return_resource
r = SampleResource(fake_manager,
dict(uuid=mock.sentinel.fake_id, foo='bar'))
self.assertTrue(hasattr(r, 'foo'))
self.assertEqual('bar', r.foo)
self.assertFalse(r.has_attached())
# Trigger load
self.assertEqual('fake_name', r.name)
fake_manager.get.assert_called_once_with(mock.sentinel.fake_id)
self.assertTrue(r.has_attached())
# Missing stuff still fails after a second get
self.assertRaises(AttributeError, getattr, r, 'abc')
def test_eq(self):
# Two resources of the same type with the same id: not equal
r1 = SampleResource(None, {'id': 1, 'name': 'hi'})
r2 = SampleResource(None, {'id': 1, 'name': 'hello'})
self.assertNotEqual(r1, r2)
# Two resources of different types: never equal
r1 = SampleResource(None, {'id': 1})
r2 = fakes.FakeResource(None, {'id': 1})
self.assertNotEqual(r1, r2)
# Two resources with no ID: equal if their info is equal
r1 = SampleResource(None, {'name': 'joe', 'age': 12})
r2 = SampleResource(None, {'name': 'joe', 'age': 12})
self.assertEqual(r1, r2)
def test_resource_object_with_request_id(self):
resp_obj = fakes.create_response()
r = SampleResource(None, {'name': '1'}, resp=resp_obj)
self.assertEqual(fakes.FAKE_REQUEST_ID, r.request_id)
def test_resource_object_with_compute_request_id(self):
resp_obj = fakes.create_response_with_compute_header()
r = SampleResource(None, {'name': '1'}, resp=resp_obj)
self.assertEqual(fakes.FAKE_REQUEST_ID, r.request_id)
class ListWithMetaTest(base.BaseTestCase):
def test_list_with_meta(self):
resp = fakes.create_response()
obj = resource.ListWithMeta([], resp)
self.assertEqual([], obj)
# Check request_ids attribute is added to obj
self.assertTrue(hasattr(obj, 'request_id'))
self.assertEqual(fakes.FAKE_REQUEST_ID, obj.request_id)
class DictWithMetaTest(base.BaseTestCase):
def test_dict_with_meta(self):
resp = fakes.create_response()
obj = resource.DictWithMeta({}, resp)
self.assertEqual({}, obj)
# Check request_id attribute is added to obj
self.assertTrue(hasattr(obj, 'request_id'))
self.assertEqual(fakes.FAKE_REQUEST_ID, obj.request_id)
class TupleWithMetaTest(base.BaseTestCase):
def test_tuple_with_meta(self):
resp = fakes.create_response()
expected_tuple = (1, 2)
obj = resource.TupleWithMeta(expected_tuple, resp)
self.assertEqual(expected_tuple, obj)
# Check request_id attribute is added to obj
self.assertTrue(hasattr(obj, 'request_id'))
self.assertEqual(fakes.FAKE_REQUEST_ID, obj.request_id)
class StrWithMetaTest(base.BaseTestCase):
def test_str_with_meta(self):
resp = fakes.create_response()
obj = resource.StrWithMeta('test-str', resp)
self.assertEqual('test-str', obj)
# Check request_id attribute is added to obj
self.assertTrue(hasattr(obj, 'request_id'))
self.assertEqual(fakes.FAKE_REQUEST_ID, obj.request_id)
class BytesWithMetaTest(base.BaseTestCase):
def test_bytes_with_meta(self):
resp = fakes.create_response()
obj = resource.BytesWithMeta(b'test-bytes', resp)
self.assertEqual(b'test-bytes', obj)
# Check request_id attribute is added to obj
self.assertTrue(hasattr(obj, 'request_id'))
self.assertEqual(fakes.FAKE_REQUEST_ID, obj.request_id)
if six.PY2:
class UnicodeWithMetaTest(base.BaseTestCase):
def test_unicode_with_meta(self):
resp = fakes.create_response()
obj = resource.UnicodeWithMeta(u'test-unicode', resp)
self.assertEqual(u'test-unicode', obj)
# Check request_id attribute is added to obj
self.assertTrue(hasattr(obj, 'request_id'))
self.assertEqual(fakes.FAKE_REQUEST_ID, obj.request_id)
| [
"[email protected]"
] | |
52013e904db26d2c534e570b6278d0e7b32e6bae | 09301c71638abf45230192e62503f79a52e0bd80 | /addons_ned/ned_kcs/stock_contract_allocation.py | 36c3b6ec942a17b17e7881af16d5ccce438e5ed3 | [] | no_license | westlyou/NEDCOFFEE | 24ef8c46f74a129059622f126401366497ba72a6 | 4079ab7312428c0eb12015e543605eac0bd3976f | refs/heads/master | 2020-05-27T06:01:15.188827 | 2017-11-14T15:35:22 | 2017-11-14T15:35:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,042 | py | from openerp import api, fields, models, _
import openerp.addons.decimal_precision as dp
from openerp.osv import expression
from datetime import datetime, timedelta
from openerp.tools import float_is_zero, float_compare, DEFAULT_SERVER_DATETIME_FORMAT
from openerp.tools import float_compare, float_round
from openerp.tools.misc import formatLang
from openerp.exceptions import UserError, ValidationError
from datetime import datetime
import time
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
DATE_FORMAT = "%Y-%m-%d"
#
class StockContractAllocation(models.Model):
_name = "stock.contract.allocation"
allocation_date = fields.Datetime(string='Date')
stack_no = fields.Many2one('stock.stack', string='Stack')
shipping_id = fields.Many2one('shipping.instruction', string='SI no.')
allocating_quantity = fields.Float(string='Allocating quantity')
@api.multi
def load_stack_info(self):
if self.id:
self.allocating_quantity = self.stack_no.remaining_qty
# self.product_id = self.shipping_id.product_id
# self.write(val)
return True | [
"[email protected]"
] | |
dc82df34593b15296aec83294aabc1d96ea8634e | 2e9e994e17456ed06970dccb55c18dc0cad34756 | /atcoder/abc/114/A.py | 58af0ab22a112562c6977959978003572bb5a9ea | [] | no_license | ksomemo/Competitive-programming | a74e86b5e790c6e68e9642ea9e5332440cb264fc | 2a12f7de520d9010aea1cd9d61b56df4a3555435 | refs/heads/master | 2020-12-02T06:46:13.666936 | 2019-05-27T04:08:01 | 2019-05-27T04:08:01 | 96,894,691 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 148 | py | def main():
X = int(input())
if X in (3, 5, 7):
print("YES")
else:
print("NO")
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
6d9ee7671285af8a0f8f4f60aba636a3f9d844d7 | 8d589a41f7bf2ad7a0445bb0405970bb15bb29c4 | /testing/tests/001-main/001-empty/003-criticctl/001-adduser-deluser.py | 4133b77269381bfc38b90d6d1ee39aa3f4604e58 | [
"MIT",
"Apache-2.0"
] | permissive | ryfow/critic | 19d86a8033bdb92bca4787f1fc3396c85300913c | 1803667f73ff736a606a27bb0eb3e527ae1367e9 | refs/heads/master | 2021-01-17T11:40:37.224389 | 2013-06-28T11:11:07 | 2013-07-02T21:01:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,730 | py | # Scenario: Try to add a user 'alice' (already exists).
try:
instance.execute(
["sudo", "criticctl", "adduser",
"--name", "alice",
"--email", "[email protected]",
"--fullname", "'Alice von Testing'",
"--password", "testing"])
except testing.virtualbox.GuestCommandError as error:
if "alice: user exists" not in error.output.splitlines():
logger.error("criticctl failed with unexpected error message:\n%s"
% error.output)
else:
logger.error("incorrect criticctl usage did not fail")
# Scenario: Try to delete the user 'nosuchuser' (no such user).
try:
instance.execute(
["sudo", "criticctl", "deluser",
"--name", "nosuchuser"])
except testing.virtualbox.GuestCommandError as error:
if "nosuchuser: no such user" not in error.output.splitlines():
logger.error("criticctl failed with unexpected error message:\n%s"
% error.output)
else:
logger.error("incorrect criticctl usage did not fail")
# Scenario: Add a user 'extra' and then delete the user again.
try:
instance.execute(
["sudo", "criticctl", "adduser",
"--name", "extra",
"--email", "[email protected]",
"--fullname", "'Extra von Testing'",
"--password", "testing"])
except testing.virtualbox.GuestCommandError as error:
logger.error("correct criticctl usage failed:\n%s"
% error.output)
else:
try:
instance.execute(
["sudo", "criticctl", "deluser",
"--name", "extra"])
except testing.virtualbox.GuestCommandError as error:
logger.error("correct criticctl usage failed:\n%s"
% error.output)
| [
"[email protected]"
] | |
2a75d352afc15ce79df646ef5a6f22801eb2e8d7 | 51cd18da63555c7dc6086cf33c70318443920826 | /authentication/models.py | 54c10dc0a492bcd9d33dd5a2e83db4c68211da13 | [] | no_license | PHONGLEX/todosite | 613861305cc586a568d909e1a56e8b3788c7e0e2 | d0ab48e0efc4fd67b45b779d2ea9a70537b80def | refs/heads/main | 2023-04-30T10:13:36.674114 | 2021-05-23T01:43:01 | 2021-05-23T01:43:01 | 369,522,681 | 0 | 0 | null | 2021-05-23T01:47:46 | 2021-05-21T12:07:16 | Python | UTF-8 | Python | false | false | 219 | py | from django.db import models
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
is_email_verified = models.BooleanField(default=False)
def __str__(self):
return self.email
| [
"[email protected]"
] | |
986ae51a0ebd37cde47abb1c78ce6cd3c11d6e02 | 95efc2300bd2936eb9b4ca8f9cda55764047f094 | /django1/src/bookmark/views.py | d67cd83360a7837595c2524384a53e3602e4f67f | [] | no_license | gittaek/jeong | d207d6e41398803475aff82a49bea01e21a86901 | 20808cbb97daff79a4c0b4a017106519f99d919f | refs/heads/master | 2020-04-21T23:11:17.202531 | 2019-02-10T03:20:57 | 2019-02-10T03:20:57 | 169,938,169 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,272 | py | from django.shortcuts import render
from .models import Bookmark
def index(request):
return render(request, 'bookmark/index.html', {'a':"hello", 'b':[1,2,3,4,5]})
def booklist(request):
#모델클래스.objects.get(): 데이터베이스에 해당 모델클래스로 저장된 객체중 특정조건을 만족하는 객체 한개를 추출하는 함수
#모델클래스.objects.all(): 데이터베이스에 해당 모델클래스로 저장된 모든 객체를 추출
#모델클래스.objects.filter(): 데이터베이스에 특정조건을 만족하는 모든객체를 리스트 형태로 추출
#모델클래스.objects.exclude(): 데이터베이스에 특정조건을 만족하지않는 모든객체를 리스트 형태로 추출
list = Bookmark.objects.all()
return render(request, 'bookmark/booklist.html', {'objs':list})
def getbook(request, bookid):
#객체 한개를 추출할 때, 객체별로 저장된 고유한 id값을 이용해 추출함
#어떤 id값을 가진 객체를 요청했는지 알아야된
#=>뷰함수의 매개변수를 늘림, <form>로 넘어온 데이터 처리\
obj = Bookmark.objects.get(id=bookid)
print(obj)
return render(request, 'bookmark/getbook.html', {'book':obj}) | [
"user@DESKTOP-37GULAI"
] | user@DESKTOP-37GULAI |
d01340261c9c5db96d89a3616408a441baab8061 | 929a816fc299959d0f8eb0dd51d064be2abd6b78 | /LintCode/ladder 12 BFS/1179 · Friend Circles/solution.py | 43ac183b2ae8a89b8fcf334d3270182119a84b20 | [
"MIT"
] | permissive | vincent507cpu/Comprehensive-Algorithm-Solution | 27940da7bc0343921930a2eafbd649da93a5395d | 04e01e49622457f09af2e1133954f043c0c92cb9 | refs/heads/master | 2023-07-20T07:12:15.590313 | 2021-08-23T23:42:17 | 2021-08-23T23:42:17 | 258,644,691 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,040 | py | class Solution:
"""
@param M: a matrix
@return: the total number of friend circles among all the students
"""
def findCircleNum(self, M):
# Write your code here
n = len(M) # 人数
res = 0 # 答案
visited = [False for _ in range(n)] # 标记是否访问过
for i in range(n): # 遍历每个人,如果这个人还没访问过 就从这个人开始做一遍bfs
if not visited[i]:
res += 1
visited[i] = True
queue = [i] # 标记起点并压入队列
while queue:
now = queue.pop(0) # 取出队首
for j in range(n): # 从队首找朋友
if M[now][j] == 1 and not visited[j]: # 找到新朋友(之前没访问过的朋友)就标记访问并压入队列
visited[j] = True
queue.append(j)
return res | [
"[email protected]"
] | |
da30a2b76a19fba4fa60611e04a3e380945acad6 | 0ddd01f6ae2d6ee489cd0f6e6a39c32b6e605b26 | /tests/little_test.py | 91d9d6d977964830220c3f176f75fa2d7d54afe5 | [] | no_license | dhernandd/supervind | ecc965495904c79cee9c19b58265a7642b7c457b | 30f9fad846fe30a8c8bb69b813ad824fa214aad7 | refs/heads/master | 2021-05-03T09:11:24.430166 | 2018-10-04T06:37:26 | 2018-10-04T06:37:26 | 120,571,177 | 2 | 5 | null | 2018-03-17T20:29:16 | 2018-02-07T06:08:39 | Python | UTF-8 | Python | false | false | 3,179 | py | import tensorflow as tf
import numpy as np
import sys
sys.path.append('../code')
from LatEvModels import *
# import seaborn as sns
# import matplotlib.pyplot as plt
#
# data = np.random.randn(20,20)
# sns.heatmap(data)
# plt.show()
# import os
# os.system('say "There is a beer in your fridge."')
# x = tf.placeholder(dtype=tf.float32, shape=[3], n ame='x')
# y = tf.get_variable('y', initializer=2.0)
# z = y*x
# with tf.Session().as_default():
# sess = tf.get_default_session()
# sess.run(tf.global_variables_initializer())
# print(z.eval(feed_dict={'x:0' : [1.0,2.0,3.0]}, session=sess))
# y = 3*x**2
# for yi, xi in zip(tf.unstack(y), tf.unstack(x)):
# print(tf.gradients(yi, x))
#
# examples = tf.split(batch)
# weight_copies = [tf.identity(weights) for x in examples]
# output = tf.stack(f(x, w) in zip(examples, weight_copies))
# cost = cost_function(output)
# per_example_gradients = tf.gradients(cost, weight_copies)
# grads = tf.stack([tf.gradients(yi, xi) for yi, xi in zip(tf.unstack(y), tf.unstack(x))])
# print(grads)
# lambda_grads = lambda _, YX : tf.gradients(YX[0], YX[1])
# elem_grads = tf.scan(lambda_grads, elems=[y[1:], x[1:]],
# initializer=[tf.gradients(y[0:1], x[0:1])])
# g = tf.gradients(y, x)
# with tf.Session() as sess:
# sess.run(tf.global_variables_initializer())
# G = sess.run(g, feed_dict={'x:0' : [3.0, 4.0]})
# print(G)
# a = np.array([1.0,2.0,3.0])
# b = np.array([1.0,2.0,3.0])
#
# A = tf.get_variable('A', initializer=a)
# B = tf.get_variable('B', initializer=b)
#
# with tf.Graph().as_default() as g1:
# with tf.variable_scope('foo', reuse=tf.AUTO_REUSE):
# x = tf.placeholder(dtype=tf.float64, shape=[2], name='x')
# c = tf.get_variable('c', initializer=tf.cast(1.0, tf.float64))
# y = tf.identity(2*x, 'y')
#
# z = tf.identity(3*x*c, 'z')
# g1_def = g1.as_graph_def()
# z1, = tf.import_graph_def(g1_def, input_map={'foo/x:0' : y}, return_elements=["foo/z:0"],
# name='z1')
# init_op = tf.global_variables_initializer()
# print(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='foo'))
#
#
# with tf.Session(graph=g1) as sess:
# sess.run(init_op)
# print(sess.run(z, feed_dict={'foo/x:0' : np.array([1.0, 2.0])}))
# print(sess.run(tf.report_uninitialized_variables()))
# z1 = sess.run(z1, feed_dict={'foo/x:0' : np.array([1.0, 2.0])})
# y = sess.run(y, feed_dict={'x:0' : np.array([1.0, 2.0])})
# print('y:', y)
# print(z1)
# print(A, B)
# print(tf.constant(0.0))
# aux_fn = lambda _, seqs : seqs[0] + seqs[1]
# C = tf.scan(fn=aux_fn, elems=[A, B], initializer=tf.cast(tf.constant(0.0), tf.float64))
#
# elems = np.array([1, 2, 3, 4, 5, 6])
# initializer = np.array(0)
# sum_one = tf.scan(lambda _, x: x[0] - x[1], (elems + 1, elems), initializer)
#
# sess = tf.Session()
# sess.run(tf.global_variables_initializer())
# print(sess.run(C))
# print(sess.run(sum_one))
#
# if __name__ == '__main__':
# t = Test1()
# t.test_simple()
# t.test_simple()
# tf.test.main()
| [
"[email protected]"
] | |
0e57403d465768ce696293f6d04c4118ca31f2dd | 4135192648199d87a6b38c116db651594e64fa35 | /tests/test_configuration.py | d990a1e6609ebef2a65863d6a36fc1a9599898d4 | [
"Apache-2.0"
] | permissive | yuzhougit/taurus | 307458e97ef7ae9707ee02117545d721c419e720 | 50a3fa5043141d96cc63408a42f3cf0e37cbe0a3 | refs/heads/master | 2020-12-11T05:24:07.241315 | 2015-06-24T01:59:25 | 2015-06-24T01:59:25 | 37,914,679 | 0 | 0 | null | 2015-06-23T11:21:18 | 2015-06-23T11:21:18 | null | UTF-8 | Python | false | false | 2,753 | py | import logging
import six
import tempfile
from bzt.engine import Configuration
from bzt.utils import BetterDict
from tests import BZTestCase, __dir__
class TestConfiguration(BZTestCase):
def test_load(self):
obj = Configuration()
configs = [
__dir__() + "/../bzt/10-base.json",
__dir__() + "/json/jmx.json",
__dir__() + "/json/concurrency.json"
]
obj.load(configs)
logging.debug("config:\n%s", obj)
fname = tempfile.mkstemp()[1]
obj.dump(fname, Configuration.JSON)
with open(fname) as fh:
logging.debug("JSON:\n%s", fh.read())
fname = tempfile.mkstemp()[1]
obj.dump(fname, Configuration.YAML)
with open(fname) as fh:
logging.debug("YAML:\n%s", fh.read())
fname = tempfile.mkstemp()[1]
obj.dump(fname, Configuration.INI)
with open(fname) as fh:
logging.debug("INI:\n%s", fh.read())
def test_merge(self):
obj = Configuration()
configs = [
__dir__() + "/yaml/test.yml",
__dir__() + "/json/merge1.json",
__dir__() + "/json/merge2.json",
]
obj.load(configs)
fname = tempfile.mkstemp()[1]
obj.dump(fname, Configuration.JSON)
with open(fname) as fh:
logging.debug("JSON:\n%s", fh.read())
jmeter = obj['modules']['jmeter']
classval = jmeter['class']
self.assertEquals("bzt.modules.jmeter.JMeterExecutor", classval)
self.assertEquals("value", obj['key'])
self.assertEquals(6, len(obj["list-append"]))
self.assertEquals(2, len(obj["list-replace"]))
self.assertEquals(2, len(obj["list-replace-notexistent"]))
self.assertIsInstance(obj["list-complex"][1][0], BetterDict)
self.assertIsInstance(obj["list-complex"][1][0], BetterDict)
self.assertIsInstance(obj["list-complex"][1][0], BetterDict)
self.assertFalse("properties" in jmeter)
fname = tempfile.mkstemp()[1]
obj.dump(fname, Configuration.JSON)
checker = Configuration()
checker.load([fname])
token = checker["list-complex"][1][0]['token']
self.assertNotEquals('test', token)
token_orig = obj["list-complex"][1][0]['token']
self.assertEquals('test', token_orig)
def test_save(self):
obj = Configuration()
obj.merge({
"str": "text",
"uc": six.u("ucstring")
})
fname = tempfile.mkstemp()[1]
obj.dump(fname, Configuration.YAML)
with open(fname) as fh:
written = fh.read()
logging.debug("YAML:\n%s", written)
self.assertNotIn("unicode", written) | [
"[email protected]"
] | |
fc00b8b49d92cf6ab6245bb4265635dd83e1aaa6 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/aio/operations/_virtual_network_gateways_operations.py | b7e1f861fd13d9648803caaeff3b4f8b4e1f01b5 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 130,315 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkGatewaysOperations:
"""VirtualNetworkGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VirtualNetworkGateway",
**kwargs
) -> "_models.VirtualNetworkGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VirtualNetworkGateway",
**kwargs
) -> AsyncLROPoller["_models.VirtualNetworkGateway"]:
"""Creates or updates a virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to create or update virtual network gateway operation.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.VirtualNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def get(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> "_models.VirtualNetworkGateway":
"""Gets the specified virtual network gateway by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.VirtualNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> Optional["_models.VirtualNetworkGateway"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> AsyncLROPoller["_models.VirtualNetworkGateway"]:
"""Updates a virtual network gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to update virtual network gateway tags.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.VirtualNetworkGatewayListResult"]:
"""Gets all virtual network gateways by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_06_01.models.VirtualNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways'} # type: ignore
def list_connections(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> AsyncIterable["_models.VirtualNetworkGatewayListConnectionsResult"]:
"""Gets all the connections in a virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListConnectionsResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_06_01.models.VirtualNetworkGatewayListConnectionsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListConnectionsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_connections.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListConnectionsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_connections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/connections'} # type: ignore
async def _reset_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
gateway_vip: Optional[str] = None,
**kwargs
) -> Optional["_models.VirtualNetworkGateway"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._reset_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if gateway_vip is not None:
query_parameters['gatewayVip'] = self._serialize.query("gateway_vip", gateway_vip, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
async def begin_reset(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
gateway_vip: Optional[str] = None,
**kwargs
) -> AsyncLROPoller["_models.VirtualNetworkGateway"]:
"""Resets the primary of the virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param gateway_vip: Virtual network gateway vip address supplied to the begin reset of the
active-active feature enabled gateway.
:type gateway_vip: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reset_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
gateway_vip=gateway_vip,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
async def _reset_vpn_client_shared_key_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._reset_vpn_client_shared_key_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_reset_vpn_client_shared_key_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/resetvpnclientsharedkey'} # type: ignore
async def begin_reset_vpn_client_shared_key(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Resets the VPN client shared key of the virtual network gateway in the specified resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reset_vpn_client_shared_key_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset_vpn_client_shared_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/resetvpnclientsharedkey'} # type: ignore
async def _generatevpnclientpackage_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
**kwargs
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._generatevpnclientpackage_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevpnclientpackage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
async def begin_generatevpnclientpackage(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
**kwargs
) -> AsyncLROPoller[str]:
"""Generates VPN client package for P2S client of the virtual network gateway in the specified
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._generatevpnclientpackage_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevpnclientpackage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
async def _generate_vpn_profile_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
**kwargs
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._generate_vpn_profile_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generate_vpn_profile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
async def begin_generate_vpn_profile(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
**kwargs
) -> AsyncLROPoller[str]:
"""Generates VPN profile for P2S client of the virtual network gateway in the specified resource
group. Used for IKEV2 and radius based authentication.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._generate_vpn_profile_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generate_vpn_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
async def _get_vpn_profile_package_url_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._get_vpn_profile_package_url_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vpn_profile_package_url_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
async def begin_get_vpn_profile_package_url(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> AsyncLROPoller[str]:
"""Gets pre-generated VPN profile for P2S client of the virtual network gateway in the specified
resource group. The profile needs to be generated first using generateVpnProfile.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_vpn_profile_package_url_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vpn_profile_package_url.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
async def _get_bgp_peer_status_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: Optional[str] = None,
**kwargs
) -> Optional["_models.BgpPeerStatusListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BgpPeerStatusListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._get_bgp_peer_status_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if peer is not None:
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_bgp_peer_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
async def begin_get_bgp_peer_status(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: Optional[str] = None,
**kwargs
) -> AsyncLROPoller["_models.BgpPeerStatusListResult"]:
"""The GetBgpPeerStatus operation retrieves the status of all BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer to retrieve the status of.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either BgpPeerStatusListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.BgpPeerStatusListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpPeerStatusListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_bgp_peer_status_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_bgp_peer_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
async def supported_vpn_devices(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> str:
"""Gets a xml format representation for supported vpn devices.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.supported_vpn_devices.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
supported_vpn_devices.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/supportedvpndevices'} # type: ignore
async def _get_learned_routes_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> Optional["_models.GatewayRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._get_learned_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_learned_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
async def begin_get_learned_routes(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> AsyncLROPoller["_models.GatewayRouteListResult"]:
"""This operation retrieves a list of routes the virtual network gateway has learned, including
routes learned from BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_learned_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_learned_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
async def _get_advertised_routes_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: str,
**kwargs
) -> Optional["_models.GatewayRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._get_advertised_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_advertised_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
async def begin_get_advertised_routes(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: str,
**kwargs
) -> AsyncLROPoller["_models.GatewayRouteListResult"]:
"""This operation retrieves a list of routes the virtual network gateway is advertising to the
specified peer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_advertised_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_advertised_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
async def _set_vpnclient_ipsec_parameters_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
vpnclient_ipsec_params: "_models.VpnClientIPsecParameters",
**kwargs
) -> Optional["_models.VpnClientIPsecParameters"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VpnClientIPsecParameters"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._set_vpnclient_ipsec_parameters_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpnclient_ipsec_params, 'VpnClientIPsecParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_set_vpnclient_ipsec_parameters_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/setvpnclientipsecparameters'} # type: ignore
async def begin_set_vpnclient_ipsec_parameters(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
vpnclient_ipsec_params: "_models.VpnClientIPsecParameters",
**kwargs
) -> AsyncLROPoller["_models.VpnClientIPsecParameters"]:
"""The Set VpnclientIpsecParameters operation sets the vpnclient ipsec policy for P2S client of
virtual network gateway in the specified resource group through Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param vpnclient_ipsec_params: Parameters supplied to the Begin Set vpnclient ipsec parameters
of Virtual Network Gateway P2S client operation through Network resource provider.
:type vpnclient_ipsec_params: ~azure.mgmt.network.v2020_06_01.models.VpnClientIPsecParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnClientIPsecParameters or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VpnClientIPsecParameters]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientIPsecParameters"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._set_vpnclient_ipsec_parameters_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
vpnclient_ipsec_params=vpnclient_ipsec_params,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_set_vpnclient_ipsec_parameters.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/setvpnclientipsecparameters'} # type: ignore
async def _get_vpnclient_ipsec_parameters_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> "_models.VpnClientIPsecParameters":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientIPsecParameters"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._get_vpnclient_ipsec_parameters_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vpnclient_ipsec_parameters_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnclientipsecparameters'} # type: ignore
async def begin_get_vpnclient_ipsec_parameters(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> AsyncLROPoller["_models.VpnClientIPsecParameters"]:
"""The Get VpnclientIpsecParameters operation retrieves information about the vpnclient ipsec
policy for P2S client of virtual network gateway in the specified resource group through
Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The virtual network gateway name.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnClientIPsecParameters or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VpnClientIPsecParameters]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientIPsecParameters"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_vpnclient_ipsec_parameters_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vpnclient_ipsec_parameters.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnclientipsecparameters'} # type: ignore
async def vpn_device_configuration_script(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
parameters: "_models.VpnDeviceScriptParameters",
**kwargs
) -> str:
"""Gets a xml format representation for vpn device configuration script.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection for which the configuration script is generated.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the generate vpn device script operation.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.VpnDeviceScriptParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.vpn_device_configuration_script.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnDeviceScriptParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
vpn_device_configuration_script.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/vpndeviceconfigurationscript'} # type: ignore
async def _start_packet_capture_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: Optional["_models.VpnPacketCaptureStartParameters"] = None,
**kwargs
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._start_packet_capture_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
if parameters is not None:
body_content = self._serialize.body(parameters, 'VpnPacketCaptureStartParameters')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_start_packet_capture_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/startPacketCapture'} # type: ignore
async def begin_start_packet_capture(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: Optional["_models.VpnPacketCaptureStartParameters"] = None,
**kwargs
) -> AsyncLROPoller[str]:
"""Starts packet capture on virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Virtual network gateway packet capture parameters supplied to start packet
capture on gateway.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.VpnPacketCaptureStartParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._start_packet_capture_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start_packet_capture.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/startPacketCapture'} # type: ignore
async def _stop_packet_capture_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnPacketCaptureStopParameters",
**kwargs
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._stop_packet_capture_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnPacketCaptureStopParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_stop_packet_capture_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/stopPacketCapture'} # type: ignore
async def begin_stop_packet_capture(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnPacketCaptureStopParameters",
**kwargs
) -> AsyncLROPoller[str]:
"""Stops packet capture on virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Virtual network gateway packet capture parameters supplied to stop packet
capture on gateway.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.VpnPacketCaptureStopParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._stop_packet_capture_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop_packet_capture.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/stopPacketCapture'} # type: ignore
async def _get_vpnclient_connection_health_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> Optional["_models.VpnClientConnectionHealthDetailListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VpnClientConnectionHealthDetailListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._get_vpnclient_connection_health_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnClientConnectionHealthDetailListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vpnclient_connection_health_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getVpnClientConnectionHealth'} # type: ignore
async def begin_get_vpnclient_connection_health(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs
) -> AsyncLROPoller["_models.VpnClientConnectionHealthDetailListResult"]:
"""Get VPN client connection health detail per P2S client connection of the virtual network
gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnClientConnectionHealthDetailListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VpnClientConnectionHealthDetailListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientConnectionHealthDetailListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_vpnclient_connection_health_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnClientConnectionHealthDetailListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vpnclient_connection_health.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getVpnClientConnectionHealth'} # type: ignore
async def _disconnect_virtual_network_gateway_vpn_connections_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
request: "_models.P2SVpnConnectionRequest",
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._disconnect_virtual_network_gateway_vpn_connections_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(request, 'P2SVpnConnectionRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_disconnect_virtual_network_gateway_vpn_connections_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/disconnectVirtualNetworkGatewayVpnConnections'} # type: ignore
async def begin_disconnect_virtual_network_gateway_vpn_connections(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
request: "_models.P2SVpnConnectionRequest",
**kwargs
) -> AsyncLROPoller[None]:
"""Disconnect vpn connections of virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param request: The parameters are supplied to disconnect vpn connections.
:type request: ~azure.mgmt.network.v2020_06_01.models.P2SVpnConnectionRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._disconnect_virtual_network_gateway_vpn_connections_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
request=request,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_disconnect_virtual_network_gateway_vpn_connections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/disconnectVirtualNetworkGatewayVpnConnections'} # type: ignore
| [
"[email protected]"
] | |
70b88d4e504d3f8be48e6ad99df74771f43ee3c9 | 0e99d2efff685a66869d5a7cd4a68de8955f498c | /leetcode3_tree/tree/test.py | a88e4c84a18be34bef545cd3ce70d81c97cabaf1 | [] | no_license | supercp3/code_leetcode | f303109c70ccdd0baa711cf606d402158b212525 | 1dc6260e229a012111ec4d5e60071c2458ce5002 | refs/heads/master | 2020-03-26T11:33:28.741405 | 2018-10-15T02:18:24 | 2018-10-15T02:18:24 | 144,848,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46 | py | list1=[2,4,3,5,1]
res=sorted(list1)
print(res) | [
"[email protected]"
] | |
a7c5f24fa36d7e83692dcda87fd00da4ff1ddd23 | 23f4584630c4b13fa54ebca8b1345a020605db10 | /test/dataset/test_query.py | 23d2fe8fc6109dd8c89e84054a96f503c2523d66 | [
"MIT"
] | permissive | chenxofhit/singlet | 2aebe1d2a3700d498b1772301611dcebd0ddb966 | c9264cf3451f816f9a256d4aa0b32c8b674638ae | refs/heads/master | 2020-06-04T08:45:47.050305 | 2019-05-01T20:46:02 | 2019-05-01T20:46:02 | 191,950,542 | 2 | 0 | null | 2019-06-14T13:45:50 | 2019-06-14T13:45:50 | null | UTF-8 | Python | false | false | 2,198 | py | #!/usr/bin/env python
# vim: fdm=indent
'''
author: Fabio Zanini
date: 07/08/17
content: Test Dataset class.
'''
import pytest
@pytest.fixture(scope="module")
def ds():
from singlet.dataset import Dataset
return Dataset(
samplesheet='example_sheet_tsv',
counts_table='example_table_tsv',
featuresheet='example_sheet_tsv')
def test_average_samples(ds):
print('Average samples')
ds_avg = ds.average(axis='samples', column='experiment')
assert(tuple(ds_avg.samplenames) == ('exp1', 'test_pipeline'))
print('Done!')
def test_average_features(ds):
print('Average features')
ds_avg = ds.average(axis='features', column='annotation')
assert(tuple(ds_avg.featurenames) == ('gene', 'other', 'spikein'))
print('Done!')
def test_query_samples_meta(ds):
ds_tmp = ds.query_samples_by_metadata(
'experiment == "test_pipeline"',
inplace=False)
assert(tuple(ds_tmp.samplenames) == ('test_pipeline',))
def test_query_samples_name(ds):
ds_tmp = ds.query_samples_by_name(
ds.samplenames[1:3],
inplace=False)
assert(tuple(ds_tmp.samplenames) == tuple(ds.samplenames[1:3]))
def test_query_sample_counts_onegene(ds):
print('Query sample by counts in one gene')
ds_tmp = ds.query_samples_by_counts('KRIT1 > 100', inplace=False)
assert(tuple(ds_tmp.samplenames) == ('third_sample',))
print('Done!')
def test_query_sample_total_counts(ds):
print('Query sample by total counts')
ds_tmp = ds.query_samples_by_counts('total < 3000000', inplace=False)
assert(tuple(ds_tmp.samplenames) == ('second_sample',))
print('Done!')
def test_query_mapped_counts(ds):
print('Query sample by mapped counts')
ds_tmp = ds.query_samples_by_counts('mapped < 1000000', inplace=False)
assert(tuple(ds_tmp.samplenames) == ('second_sample',))
print('Done!')
def test_query_features_counts(ds):
print('Query features by counts')
ds_tmp = ds.query_features_by_counts(
'first_sample > 1000000',
inplace=False)
assert(tuple(ds_tmp.featurenames) == ('__alignment_not_unique',))
print('Done!')
| [
"[email protected]"
] | |
bdad55440d4677d72cd568533e733a8f3f5ef0ab | bb983b38f9be7b6fd4ab1a651484db37c1aeff39 | /1030/python_complex.py | a40445ad017c432fa77d44b282863851697f01cc | [] | no_license | nakanishi-akitaka/python2018_backup | c214df78372cca993d69f8001010ec2f6dcaf1be | 45766d3c3777de2a91b3e2cf50c6bfedca8627da | refs/heads/master | 2023-02-18T08:04:28.625532 | 2022-06-07T01:02:53 | 2022-06-07T01:02:53 | 201,399,236 | 5 | 30 | null | 2023-02-10T21:06:51 | 2019-08-09T05:48:22 | Jupyter Notebook | UTF-8 | Python | false | false | 2,085 | py | # -*- coding: utf-8 -*-
"""
https://note.nkmk.me/python-complex/
Created on Tue Oct 30 12:04:10 2018
@author: Akitaka
"""
c = 3 + 4j
print(c)
print(type(c))
# (3+4j)
# <class 'complex'>
# c = 3 + j
# NameError: name 'j' is not defined
c = 3 + 1j
print(c)
# (3+1j)
c = 3j
print(c)
# 3j
c = 3 + 0j
print(c)
# (3+0j)
c = 1.2e3 + 3j
print(c)
# (1200+3j)
c = complex(3, 4)
print(c)
print(type(c))
# (3+4j)
# <class 'complex'>
#%%
c = 3 + 4j
print(c.real)
print(type(c.real))
# 3.0
# <class 'float'>
print(c.imag)
print(type(c.imag))
# 4.0
# <class 'float'>
# c.real = 5.5
# AttributeError: readonly attribute
#%%
c = 3 + 4j
print(c.conjugate())
# (3-4j)
#%%
c = 3 + 4j
print(abs(c))
# 5.0
c = 1 + 1j
print(abs(c))
# 1.4142135623730951
#%%
import cmath
import math
c = 1 + 1j
print(math.atan2(c.imag, c.real))
# 0.7853981633974483
print(cmath.phase(c))
# 0.7853981633974483
print(cmath.phase(c) == math.atan2(c.imag, c.real))
# True
print(math.degrees(cmath.phase(c)))
# 45.0
#%%
c = 1 + 1j
print(cmath.polar(c))
print(type(cmath.polar(c)))
# (1.4142135623730951, 0.7853981633974483)
# <class 'tuple'>
print(cmath.polar(c)[0] == abs(c))
# True
print(cmath.polar(c)[1] == cmath.phase(c))
# True
print(cmath.rect(1, 1))
# (0.5403023058681398+0.8414709848078965j)
print(cmath.rect(1, 0))
# (1+0j)
print(cmath.rect(cmath.polar(c)[0], cmath.polar(c)[1]))
# (1.0000000000000002+1j)
r = 2
ph = math.pi
print(cmath.rect(r, ph).real == r * math.cos(ph))
# True
print(cmath.rect(r, ph).imag == r * math.sin(ph))
# True
#%%
c1 = 3 + 4j
c2 = 2 - 1j
print(c1 + c2)
# (5+3j)
print(c1 - c2)
# (1+5j)
print(c1 * c2)
# (10+5j)
print(c1 / c2)
# (0.4+2.2j)
print(c1 ** 3)
# (-117+44j)
print((-3 + 4j) ** 0.5)
# (1.0000000000000002+2j)
print((-1) ** 0.5)
# (6.123233995736766e-17+1j)
print(cmath.sqrt(-3 + 4j))
# (1+2j)
print(cmath.sqrt(-1))
# 1j
print(c1 + 3)
# (6+4j)
print(c1 * 0.5)
# (1.5+2j)
| [
"[email protected]"
] | |
c3875ec4422c869b5e828ce009f3eed671c34811 | 30d360f965253167c99f9b4cd41001491aed08af | /Platinum_code/iolib.py | d0c23dd5894713465efb8d4bdd4e61b483d6f033 | [] | no_license | petervanya/PhDcode | d2d9f7170f201d6175fec9c3d4094617a5427fb5 | 891e6812a2699025d26b901c95d0c46a706b0c96 | refs/heads/master | 2020-05-22T06:43:47.293134 | 2018-01-29T12:59:42 | 2018-01-29T12:59:42 | 64,495,043 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,162 | py | #!/usr/bin/env python
"""
Collection of often used functions for
* input/output
* rotating, translating and printing data
NOW MOSTLY OBSOLETE, ONLY *_table FUNCTIONS USED
[email protected], 11/05/15
"""
import numpy as np
from numpy.matlib import repmat
from math import sin, cos
def save_xyz(coords, atom_names, filename):
"""save xyz coords into file"""
f = open(filename,"w")
M, N = coords.shape
for i in range(M):
line = str(atom_names[i])+"\t"
for j in range(N):
line += "%.6f" % coords[i, j] + "\t"
line += "\n"
f.write(line)
f.close()
print "Coords saved to",filename
def print_xyz(coords, atom_names):
M, N = coords.shape
for i in range(M):
line = atom_names[i] + "\t"
for j in range(N):
line += "%.6f" % coords[i, j] + "\t"
print line
def read_xyz(filepath):
f = open(filepath,"r").readlines()
A = np.array([line.split() for line in f])
names = A[:,0]
data = A[:,1:].astype(float)
return names, data
def save_table(A, filepath, header=False, latex=False):
"""save table A into file, possibly with latex formatting"""
f = open(filepath,"w")
M,N = A.shape
if header:
f.write(header)
for i in range(M):
line = ""
for j in range(N):
line += str(A[i, j]) + "\t"
if latex:
line = " & ".join(line.split())
line += " \\\\"
line += "\n"
f.write(line)
f.close()
print "Table saved to",filepath
def print_table(A, header=""):
if header:
print header
M,N = A.shape
for i in range(M):
line=""
for j in range(N):
line += str(A[i, j]) + "\t"
print line
def read_table(filepath):
"""read summary tables
TODO: rewrite in pandas DataFrame"""
f = open(filepath,"r").readlines()
A = np.array([line.rstrip("\n").split("\t") for line in f])
return A
def get_path(Pt_dir, cluster, spin, eta=0, ext=""):
"""get full file path with eta and spin"""
if eta != 0:
path = Pt_dir + "/Pt_Water" + "/Pt" + cluster + "/Eta_" + str(eta) + "/S_" + str(spin) + "/Pt.out"
else:
path = Pt_dir + "/Pt_SP" + "/Pt" + cluster + "/S_" + str(spin) + "/Pt.out"
if ext:
path += "." + ext
return path
def shift(coords, s):
"""shift coordinates by a given vector s"""
N = len(coords)
return coords + repmat(s,N,1)
def rotate_theta(coords, theta):
"""rotate atoms by an angle theta (in radians)"""
N = coords.shape[0]
Rtheta = np.array([[cos(theta),0,-sin(theta)],
[0, 1, 0 ],
[sin(theta),0, cos(theta)]])
for i in range(N):
coords[i,:] = np.dot(Rtheta, coords[i,:])
return coords
def rotate_phi(coords, phi):
"""rotate atoms by angle phi (in radians)"""
N = coords.shape[0]
Rphi = np.array([[cos(phi),-sin(phi),0],
[sin(phi), cos(phi),0],
[0, 0, 1]])
for i in range(N):
coords[i,:] = np.dot(Rphi, coords[i,:])
return coords
| [
"[email protected]"
] | |
25c2f3965b71a6ccabf29b6c92ccc8fe9e114206 | 19c3c6f7570662c4fc49ea6b2479f0ba9b5219a2 | /10818_최소최대.py | d2f7996e44334595a2ba4cb9ffe913fca172dc57 | [] | no_license | 3974kjh/baekjoon | 138b08149a84e1d85f51ff49e70d6e040c6de530 | ba79929af69984ec0674fb37ce76018cceadcee8 | refs/heads/master | 2023-08-05T11:36:27.838122 | 2021-09-15T14:37:14 | 2021-09-15T14:37:14 | 359,791,691 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74 | py | T = int(input())
N = list(map(int, input().split()))
print(min(N), max(N)) | [
"[email protected]"
] | |
b113d75eb834a1baa075683489fe5ee46e7566d0 | 83df6e768cb3ba0dfab44d6ea8f766d9ace6191a | /git-pull-request | d0da808b388a2e3f7dca9df7332bb85cd7bfdf16 | [] | no_license | sumansai14/git-pull-request | 8bf77a813d038f54d938aa9d80fce6be311debbf | 2c79827f4976f131a6b67d846be01bdb79f1835e | refs/heads/master | 2021-01-22T11:10:51.149158 | 2013-09-21T08:03:26 | 2013-09-21T08:03:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,485 | #! /usr/bin/python
import sys
import os
import getpass
import pycurl
from io import BytesIO
import simplejson
from pygit2 import Repository
if __name__ == '__main__':
try:
repository = Repository(os.getcwd())
except KeyError:
raise SystemExit("This is not a valid git repository")
response_buffer = BytesIO()
remotes = repository.remotes
remotes_names = [remote.name for remote in remotes]
remote_upstream = sys.argv[1].split('/')[0]
remote_upstream_base = sys.argv[1].split('/')[1]
remote_origin = sys.argv[2].split('/')[0]
remote_origin_branch = sys.argv[2].split('/')[1]
for remote in remotes:
if remote.name == remote_upstream:
remote_upstream_instance = remote
else:
remote_origin_instance = remote
connection = pycurl.Curl()
connection.setopt(pycurl.URL, 'https://api.github.com/authorizations')
if remote_upstream in remotes_names and remote_origin in remotes_names:
username = raw_input('Username: ')
password = getpass.getpass()
connection.setopt(connection.USERPWD, '%s:%s' % (username, password))
connection.setopt(connection.POST, 1)
connection.setopt(connection.POSTFIELDS, '{"scopes": ["repo"]}')
connection.setopt(connection.WRITEFUNCTION, response_buffer.write)
connection.perform()
connection.close()
connection = pycurl.Curl()
json_response = simplejson.loads(response_buffer.getvalue())
token = json_response['token']
upstream_owner = remote_upstream_instance.url.split('/')[3]
repos = remote_upstream_instance.url.split('/')[4].split('.')[0]
post_json = {}
post_json["title"] = repository.head.get_object().message
post_json["body"] = ""
post_json["head"] = remote_origin_instance.url.split('/')[3] + ':' + remote_origin_branch
post_json["base"] = remote_upstream_base
data = simplejson.dumps(post_json)
connection.setopt(connection.POST, 1)
connection.setopt(connection.WRITEFUNCTION, response_buffer.write)
connection.setopt(connection.POSTFIELDS, data)
connection.setopt(connection.HTTPHEADER, [str('Authorization: token ' + token)])
url = 'https://api.github.com/repos/' + upstream_owner + '/' + repos + '/pulls'
connection.setopt(pycurl.URL, str(url))
connection.perform()
response = response_buffer.getvalue()
print response
| [
"[email protected]"
] | ||
0004d9a94c55fb8335acb6a83e0924a5285ca505 | 04b1803adb6653ecb7cb827c4f4aa616afacf629 | /third_party/blink/renderer/build/scripts/minimize_css.py | b5d6630df41e958b062a04c414d5ec6f4ebd35d0 | [
"BSD-3-Clause",
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"MIT",
"Apache-2.0"
] | permissive | Samsung/Castanets | 240d9338e097b75b3f669604315b06f7cf129d64 | 4896f732fc747dfdcfcbac3d442f2d2d42df264a | refs/heads/castanets_76_dev | 2023-08-31T09:01:04.744346 | 2021-07-30T04:56:25 | 2021-08-11T05:45:21 | 125,484,161 | 58 | 49 | BSD-3-Clause | 2022-10-16T19:31:26 | 2018-03-16T08:07:37 | null | UTF-8 | Python | false | false | 4,038 | py | #!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import functools
import os.path
import re
import sys
import in_generator
class CSSMinimizer(object):
INITIAL = 0
MAYBE_COMMENT_START = 1
INSIDE_COMMENT = 2
MAYBE_COMMENT_END = 3
INSIDE_SINGLE_QUOTE = 4
INSIDE_SINGLE_QUOTE_ESCAPE = 5
INSIDE_DOUBLE_QUOTE = 6
INSIDE_DOUBLE_QUOTE_ESCAPE = 7
def __init__(self):
self._output = ''
self._codeblock = ''
def flush_codeblock(self):
stripped = re.sub(r"\s+", ' ', self._codeblock)
stripped = re.sub(r";?\s*(?P<op>[{};])\s*", r'\g<op>', stripped)
self._output += stripped
self._codeblock = ''
def parse(self, content):
state = self.INITIAL
for char in content:
if state == self.INITIAL:
if char == '/':
state = self.MAYBE_COMMENT_START
elif char == "'":
self.flush_codeblock()
self._output += char
state = self.INSIDE_SINGLE_QUOTE
elif char == '"':
self.flush_codeblock()
self._output += char
state = self.INSIDE_DOUBLE_QUOTE
else:
self._codeblock += char
elif state == self.MAYBE_COMMENT_START:
if char == '*':
self.flush_codeblock()
state = self.INSIDE_COMMENT
else:
self._codeblock += '/' + char
state = self.INITIAL
elif state == self.INSIDE_COMMENT:
if char == '*':
state = self.MAYBE_COMMENT_END
else:
pass
elif state == self.MAYBE_COMMENT_END:
if char == '/':
state = self.INITIAL
else:
state = self.INSIDE_COMMENT
elif state == self.INSIDE_SINGLE_QUOTE:
if char == '\\':
self._output += char
state = self.INSIDE_SINGLE_QUOTE_ESCAPE
elif char == "'":
self._output += char
state = self.INITIAL
else:
self._output += char
elif state == self.INSIDE_SINGLE_QUOTE_ESCAPE:
self._output += char
state = self.INSIDE_SINGLE_QUOTE
elif state == self.INSIDE_DOUBLE_QUOTE:
if char == '\\':
self._output += char
state = self.INSIDE_DOUBLE_QUOTE_ESCAPE
elif char == '"':
self._output += char
state = self.INITIAL
else:
self._output += char
elif state == self.INSIDE_DOUBLE_QUOTE_ESCAPE:
self._output += char
state = self.INSIDE_DOUBLE_QUOTE
self.flush_codeblock()
self._output = self._output.strip()
return self._output
@classmethod
def minimize_css(cls, content):
minimizer = CSSMinimizer()
return minimizer.parse(content)
class CSSMinimizerWriter(in_generator.GenericWriter):
def __init__(self, in_file_paths):
super(CSSMinimizerWriter, self).__init__(in_file_paths)
self._outputs = {}
for in_file_path in in_file_paths:
out_path = os.path.basename(in_file_path)
self._outputs[out_path] = functools.partial(self.generate_implementation, in_file_path)
def generate_implementation(self, in_file_path):
content = ''
with open(os.path.abspath(in_file_path)) as in_file:
content = in_file.read()
return CSSMinimizer.minimize_css(content)
if __name__ == '__main__':
in_generator.Maker(CSSMinimizerWriter).main(sys.argv)
| [
"[email protected]"
] | |
6744e02941dfc0a8d3491dcfdaaacea6d1a6dd78 | f72d2949814a1d1aa179897f0b0bc824d28f3b83 | /utils/pairwise_data_helpers.py | 900c177339f1bcad18f42b3b49c8cebe7ad653a2 | [
"Apache-2.0"
] | permissive | bikong2/Question-Difficulty-Prediction | fe05627a30d9f021dced561ebd83bd1f679b99ac | a5544ea45a82b340a4b9fa867c05dbaa2b233b47 | refs/heads/master | 2020-12-09T09:06:28.540804 | 2019-12-04T11:45:42 | 2019-12-04T11:45:42 | 233,259,520 | 0 | 1 | Apache-2.0 | 2020-01-11T16:14:31 | 2020-01-11T16:14:30 | null | UTF-8 | Python | false | false | 11,317 | py | # -*- coding:utf-8 -*-
__author__ = 'Randolph'
import os
import gensim
import logging
import json
import numpy as np
from collections import OrderedDict
from gensim.models import KeyedVectors
from tflearn.data_utils import pad_sequences
TEXT_DIR = '../data/content.txt'
METADATA_DIR = '../data/metadata.tsv'
def logger_fn(name, input_file, level=logging.INFO):
tf_logger = logging.getLogger(name)
tf_logger.setLevel(level)
log_dir = os.path.dirname(input_file)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
fh = logging.FileHandler(input_file, mode='w')
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
tf_logger.addHandler(fh)
return tf_logger
def cal_doa(front_labels, behind_labels, front_scores, behind_scores):
"""
Get the doa value.
Args:
front_labels: The front true labels
behind_labels: The behind true labels
front_scores: The front predicted scores provided by network
behind_scores: The behind predicted scores provided by network
Returns:
doa: The DOA value
"""
def sig(front_value, behind_value):
if front_value > behind_value:
return 1
else:
return 0
doa, molecule, denominator = 0.0, 0.0, 0.0
data_size = len(front_scores)
for index in range(data_size):
predicted_bool = sig(front_scores[index][0], behind_scores[index][0])
true_bool = sig(front_labels[index], behind_labels[index])
molecule += predicted_bool and true_bool
denominator += true_bool
doa = molecule / denominator
return doa
def create_prediction_file(output_file, all_id, all_labels, all_predict_scores):
"""
Create the prediction file.
Args:
output_file: The all classes predicted results provided by network
all_id: The data record id info provided by class Data
all_labels: The all origin labels
all_predict_scores: The all predict scores by threshold
Raises:
IOError: If the prediction file is not a <.json> file
"""
if not output_file.endswith('.json'):
raise IOError("✘ The prediction file is not a json file."
"Please make sure the prediction data is a json file.")
with open(output_file, 'w') as fout:
data_size = len(all_predict_scores)
for i in range(data_size):
labels = [float(i) for i in all_labels[i]]
predict_scores = [round(float(i), 4) for i in all_predict_scores[i]]
data_record = OrderedDict([
('id', all_id[i]),
('labels', labels),
('predict_scores', predict_scores)
])
fout.write(json.dumps(data_record, ensure_ascii=False) + '\n')
def create_metadata_file(embedding_size, output_file=METADATA_DIR):
"""
Create the metadata file based on the corpus file(Use for the Embedding Visualization later).
Args:
embedding_size: The embedding size
output_file: The metadata file (default: 'metadata.tsv')
Raises:
IOError: If word2vec model file doesn't exist
"""
word2vec_file = '../data/word2vec_' + str(embedding_size) + '.txt'
if not os.path.isfile(word2vec_file):
raise IOError("✘ The word2vec file doesn't exist.")
model = KeyedVectors.load_word2vec_format(open(word2vec_file, 'r'), binary=False, unicode_errors='replace')
word2idx = dict([(k, v.index) for k, v in model.wv.vocab.items()])
word2idx_sorted = [(k, word2idx[k]) for k in sorted(word2idx, key=word2idx.get, reverse=False)]
with open(output_file, 'w+') as fout:
for word in word2idx_sorted:
if word[0] is None:
print("Empty Line, should replaced by any thing else, or will cause a bug of tensorboard")
fout.write('<Empty Line>' + '\n')
else:
fout.write(word[0] + '\n')
def load_word2vec_matrix(embedding_size):
"""
Return the word2vec model matrix.
Args:
embedding_size: The embedding size
Returns:
The word2vec model matrix
Raises:
IOError: If word2vec model file doesn't exist
"""
word2vec_file = '../data/word2vec_' + str(embedding_size) + '.txt'
if not os.path.isfile(word2vec_file):
raise IOError("✘ The word2vec file doesn't exist. ")
model = KeyedVectors.load_word2vec_format(open(word2vec_file, 'r'), binary=False, unicode_errors='replace')
vocab_size = len(model.wv.vocab.items())
vocab = dict([(k, v.index) for k, v in model.wv.vocab.items()])
vector = np.zeros([vocab_size, embedding_size])
for key, value in vocab.items():
if key is not None:
vector[value] = model[key]
return vocab_size, vector
def data_word2vec(input_file, word2vec_model):
"""
Create the research data tokenindex based on the word2vec model file.
Return the class Data(includes the data tokenindex and data labels).
Args:
input_file: The research data
word2vec_model: The word2vec model file
Returns:
The class Data(includes the data tokenindex and data labels)
Raises:
IOError: If the input file is not the .json file
"""
vocab = dict([(k, v.index) for (k, v) in word2vec_model.wv.vocab.items()])
def _token_to_index(content):
result = []
for item in content:
word2id = vocab.get(item)
if word2id is None:
word2id = 0
result.append(word2id)
return result
if not input_file.endswith('.json'):
raise IOError("✘ The research data is not a json file. "
"Please preprocess the research data into the json file.")
with open(input_file) as fin:
id_list = [[], []]
content_index_list = [[], []]
question_index_list = [[], []]
option_index_list = [[], []]
labels_list = [[], []]
total_line = 0
for eachline in fin:
data = json.loads(eachline)
id_list[0].append(data['front_id'])
id_list[1].append(data['behind_id'])
content_index_list[0].append(_token_to_index(data['front_content']))
content_index_list[1].append(_token_to_index(data['behind_content']))
question_index_list[0].append(_token_to_index(data['front_question']))
question_index_list[1].append(_token_to_index(data['behind_question']))
option_index_list[0].append(_token_to_index(data['front_option']))
option_index_list[1].append(_token_to_index(data['behind_option']))
labels_list[0].append(data['front_diff'])
labels_list[1].append(data['behind_diff'])
total_line += 1
class _Data:
def __init__(self):
pass
@property
def number(self):
return total_line
@property
def id(self):
return id_list
@property
def content_index(self):
return content_index_list
@property
def question_index(self):
return question_index_list
@property
def option_index(self):
return option_index_list
@property
def labels(self):
return labels_list
return _Data()
def load_data_and_labels(data_file, embedding_size, data_aug_flag):
"""
Load research data from files, splits the data into words and generates labels.
Return split sentences, labels and the max sentence length of the research data.
Args:
data_file: The research data
embedding_size: The embedding size
data_aug_flag: The flag of data augmented
Returns:
The class Data
Raises:
IOError: If word2vec model file doesn't exist
"""
word2vec_file = '../data/word2vec_' + str(embedding_size) + '.txt'
# Load word2vec model file
if not os.path.isfile(word2vec_file):
raise IOError("✘ The word2vec file doesn't exist. ")
model = KeyedVectors.load_word2vec_format(open(word2vec_file, 'r'), binary=False, unicode_errors='replace')
# Load data from files and split by words
data = data_word2vec(input_file=data_file, word2vec_model=model)
# plot_seq_len(data_file, data)
return data
def pad_data(data, pad_seq_len):
"""
Padding each sentence of research data according to the max sentence length.
Return the padded data and data labels.
Args:
data: The research data
pad_seq_len: The max sentence length of [content, question, option] text
Returns:
pad_content: The padded data
pad_question: The padded data
pad_option: The padded data
labels: The data labels
"""
pad_seq_len_list = list(map(int, pad_seq_len.split(',')))
pad_content_list, pad_question_list, pad_option_list, labels_list = [], [], [], []
for i in data.content_index:
pad_content_list.append(pad_sequences(i, maxlen=pad_seq_len_list[0], value=0.))
for j in data.question_index:
pad_question_list.append(pad_sequences(j, maxlen=pad_seq_len_list[1], value=0.))
for k in data.option_index:
pad_option_list.append(pad_sequences(k, maxlen=pad_seq_len_list[2], value=0.))
for l in data.labels:
labels_list.append([[float(label)] for label in l])
return pad_content_list, pad_question_list, pad_option_list, labels_list
def batch_iter(data_tuples, batch_size, num_epochs, shuffle=True):
"""
含有 yield 说明不是一个普通函数,是一个 Generator.
函数效果:对 data,一共分成 num_epochs 个阶段(epoch),在每个 epoch 内,如果 shuffle=True,就将 data 重新洗牌,
批量生成 (yield) 一批一批的重洗过的 data,每批大小是 batch_size,一共生成 int(len(data)/batch_size)+1 批。
Args:
data_tuples: The data tuples
batch_size: The size of the data batch
num_epochs: The number of epochs
shuffle: Shuffle or not (default: True)
Returns:
A batch iterator for data set
"""
(x_content, x_question, x_option, y) = data_tuples
x_content_front, x_content_behind = x_content[0], x_content[1]
x_question_front, x_question_behind = x_question[0], x_question[1]
x_option_front, x_option_behind = x_option[0], x_option[1]
y_front, y_behind = y[0], y[1]
data = list(zip(x_content_front, x_content_behind,
x_question_front, x_question_behind,
x_option_front, x_option_behind,
y_front, y_behind))
data = np.array(data)
data_size = len(data)
num_batches_per_epoch = int((data_size - 1) / batch_size) + 1
for epoch in range(num_epochs):
# Shuffle the data at each epoch
if shuffle:
shuffle_indices = np.random.permutation(np.arange(data_size))
shuffled_data = data[shuffle_indices]
else:
shuffled_data = data
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
yield shuffled_data[start_index:end_index]
| [
"[email protected]"
] | |
8054f04fd59a5abb4779b2111f0044990155023e | a9c3db07c29a46baf4f88afe555564ed0d8dbf2e | /src/2562-count-ways-to-build-good-strings/count-ways-to-build-good-strings.py | 8e882ad478562e9e000a22806859060bf2a03c5d | [] | no_license | HLNN/leetcode | 86d2f5b390be9edfceadd55f68d94c78bc8b7644 | 35010d67341e6038ae4ddffb4beba4a9dba05d2a | refs/heads/master | 2023-03-13T16:44:58.901326 | 2023-03-03T00:01:05 | 2023-03-03T00:01:05 | 165,402,662 | 6 | 6 | null | null | null | null | UTF-8 | Python | false | false | 1,513 | py | # Given the integers zero, one, low, and high, we can construct a string by starting with an empty string, and then at each step perform either of the following:
#
#
# Append the character '0' zero times.
# Append the character '1' one times.
#
#
# This can be performed any number of times.
#
# A good string is a string constructed by the above process having a length between low and high (inclusive).
#
# Return the number of different good strings that can be constructed satisfying these properties. Since the answer can be large, return it modulo 109 + 7.
#
#
# Example 1:
#
#
# Input: low = 3, high = 3, zero = 1, one = 1
# Output: 8
# Explanation:
# One possible valid good string is "011".
# It can be constructed as follows: "" -> "0" -> "01" -> "011".
# All binary strings from "000" to "111" are good strings in this example.
#
#
# Example 2:
#
#
# Input: low = 2, high = 3, zero = 1, one = 2
# Output: 5
# Explanation: The good strings are "00", "11", "000", "110", and "011".
#
#
#
# Constraints:
#
#
# 1 <= low <= high <= 105
# 1 <= zero, one <= low
#
#
class Solution:
def countGoodStrings(self, low: int, high: int, zero: int, one: int) -> int:
dp = [0] * (high + 1)
dp[0] = 1
for i in range(1, high + 1):
res = 0
if i - zero >= 0:
res += dp[i - zero]
if i - one >= 0:
res += dp[i - one]
dp[i] = res
return sum(dp[low:high+1]) % 1000000007
| [
"[email protected]"
] | |
d560fc0d2b8f3321a9e68037057fb55f921903a1 | 25e989e986522cf91365a6cc51e3c68b3d29351b | /app/http/middleware/AuthenticationMiddleware.py | 99adcdc091431d1a5b3a5bbb41bf42513de34bbd | [
"MIT"
] | permissive | josephmancuso/gbaleague-masonite2 | ff7a3865927705649deea07f68d89829b2132d31 | b3dd5ec3f20c07eaabcc3129b0c50379a946a82b | refs/heads/master | 2022-05-06T10:47:21.809432 | 2019-03-31T22:01:04 | 2019-03-31T22:01:04 | 136,680,885 | 0 | 1 | MIT | 2022-03-21T22:16:43 | 2018-06-09T01:33:01 | Python | UTF-8 | Python | false | false | 575 | py | """ Authentication Middleware """
from masonite.request import Request
class AuthenticationMiddleware(object):
""" Middleware To Check If The User Is Logged In """
def __init__(self, request: Request):
""" Inject Any Dependencies From The Service Container """
self.request = request
def before(self):
""" Run This Middleware Before The Route Executes """
if not self.request.user():
self.request.redirect_to('login')
def after(self):
""" Run This Middleware After The Route Executes """
pass
| [
"[email protected]"
] | |
db0f8afef8d68e3ed565154f785157d870834be8 | 1342deb03620f60f0e91c9d5b579667c11cb2d6d | /debug/mydict.py | 7c7d4035b416fe6a111668ac611d61938b76421f | [] | no_license | ahuer2435/python_study | 678501ff90a9fc403105bff7ba96bcf53c8f53e2 | 8a7cc568efefdc993c5738ffa2100c2e051acdb7 | refs/heads/master | 2021-01-15T18:00:43.851039 | 2018-04-04T05:37:50 | 2018-04-04T05:37:50 | 99,775,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | # -*- coding: utf-8 -*-
class Dict(dict):
def __init__(self, **kw):
super(Dict,self).__init__(**kw) # 返回的是基类
def __getattr__(self,key):
try:
return self[key]
except KeyError:
raise AttributeError(r"'Dict' object has no attribute '%s'" % key)
def __setattr__(self,key,value):
self[key] = value
#相当于在基类的基础之上增加了__getattr__和__setattr__对key的修饰。 | [
"[email protected]"
] | |
a251069222a43562b476d29d369c468287564ab7 | 60df913818933b6aabd8007405b675d958f5078b | /subscription_join/forms.py | c3a84f41095cb16420dc19aba818ae5447946c9f | [] | no_license | ProsenjitKumar/prosenjit-das | b8d0e9081790d07673753b774550975bca9ad423 | b08eb8e91939c234563f1f6a9d7870de2c519599 | refs/heads/master | 2022-11-28T07:04:01.615997 | 2019-06-02T20:23:21 | 2019-06-02T20:23:21 | 189,855,835 | 1 | 0 | null | 2022-11-22T03:14:52 | 2019-06-02T14:22:48 | HTML | UTF-8 | Python | false | false | 1,687 | py | from crispy_forms.helper import FormHelper
from django import forms
from django.core.mail import send_mail
from crispy_forms.layout import Layout, Div, Submit, Row, Column, Field
import re
from crispy_forms.layout import Submit
from .models import NewsLetterUser, NewsLetter
class ContactForm(forms.Form):
subject = forms.CharField(max_length=100)
message = forms.CharField(widget=forms.Textarea)
email = forms.EmailField()
cc_myself = forms.BooleanField(required=False)
def send_mail(self):
subject = self.cleaned_data['subject']
message = self.cleaned_data['message']
# subject = 'From Prosnjit Localhost'
# message = '%s %s' % (comment, name)
emailFrom = self.cleaned_data['email']
cc_myself = self.cleaned_data['cc_myself']
emailTo = ['[email protected]']
send_mail(subject, message, emailFrom, emailTo, fail_silently=False,)
class CrispyContactAddressForm(ContactForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Row(
Column('email', css_class='form-group col-md-6 mb-0'),
Column('subject', css_class='form-group col-md-6 mb-0'),
css_class='form-row'
),
Row(
Column('message', css_class='form-group col-md-6 mb-0'),
),
'cc_myself',
Submit('submit', 'Send')
)
class NewsLetterUserSignUpForm(forms.Form):
email = forms.EmailField()
def clean_data(self):
emailFrom = self.cleaned_data['email']
| [
"[email protected]"
] | |
f8b3f9e6faf40855265dccb71005eeb78d457cd6 | f4434c85e3814b6347f8f8099c081ed4af5678a5 | /sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations_async/_path_operations_async.py | 28f0999cd2f5575cc949a5590874ef32b4e7724d | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | yunhaoling/azure-sdk-for-python | 5da12a174a37672ac6ed8e3c1f863cb77010a506 | c4eb0ca1aadb76ad892114230473034830116362 | refs/heads/master | 2022-06-11T01:17:39.636461 | 2020-12-08T17:42:08 | 2020-12-08T17:42:08 | 177,675,796 | 1 | 0 | MIT | 2020-03-31T20:35:17 | 2019-03-25T22:43:40 | Python | UTF-8 | Python | false | false | 101,848 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import map_error
from ... import models
class PathOperations:
"""PathOperations async operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar comp: . Constant value: "expiry".
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
self.comp = "expiry"
async def create(self, resource=None, continuation=None, mode=None, rename_source=None, source_lease_id=None, properties=None, permissions=None, umask=None, request_id=None, timeout=None, path_http_headers=None, lease_access_conditions=None, modified_access_conditions=None, source_modified_access_conditions=None, *, cls=None, **kwargs):
"""Create File | Create Directory | Rename File | Rename Directory.
Create or rename a file or directory. By default, the destination is
overwritten and if the destination already exists and has a lease the
lease is broken. This operation supports conditional HTTP requests.
For more information, see [Specifying Conditional Headers for Blob
Service
Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
To fail if the destination already exists, use a conditional request
with If-None-Match: "*".
:param resource: Required only for Create File and Create Directory.
The value must be "file" or "directory". Possible values include:
'directory', 'file'
:type resource: str or
~azure.storage.filedatalake.models.PathResourceType
:param continuation: Optional. When deleting a directory, the number
of paths that are deleted with each invocation is limited. If the
number of paths to be deleted exceeds this limit, a continuation token
is returned in this response header. When a continuation token is
returned in the response, it must be specified in a subsequent
invocation of the delete operation to continue deleting the directory.
:type continuation: str
:param mode: Optional. Valid only when namespace is enabled. This
parameter determines the behavior of the rename operation. The value
must be "legacy" or "posix", and the default value will be "posix".
Possible values include: 'legacy', 'posix'
:type mode: str or ~azure.storage.filedatalake.models.PathRenameMode
:param rename_source: An optional file or directory to be renamed.
The value must have the following format: "/{filesystem}/{path}". If
"x-ms-properties" is specified, the properties will overwrite the
existing properties; otherwise, the existing properties will be
preserved. This value must be a URL percent-encoded string. Note that
the string may only contain ASCII characters in the ISO-8859-1
character set.
:type rename_source: str
:param source_lease_id: A lease ID for the source path. If specified,
the source path must have an active lease and the lease ID must match.
:type source_lease_id: str
:param properties: Optional. User-defined properties to be stored with
the filesystem, in the format of a comma-separated list of name and
value pairs "n1=v1, n2=v2, ...", where each value is a base64 encoded
string. Note that the string may only contain ASCII characters in the
ISO-8859-1 character set. If the filesystem exists, any properties
not included in the list will be removed. All properties are removed
if the header is omitted. To merge new and existing properties, first
get all existing properties and the current E-Tag, then make a
conditional request with the E-Tag and include values for all
properties.
:type properties: str
:param permissions: Optional and only valid if Hierarchical Namespace
is enabled for the account. Sets POSIX access permissions for the file
owner, the file owning group, and others. Each class may be granted
read, write, or execute permission. The sticky bit is also supported.
Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are
supported.
:type permissions: str
:param umask: Optional and only valid if Hierarchical Namespace is
enabled for the account. When creating a file or directory and the
parent folder does not have a default ACL, the umask restricts the
permissions of the file or directory to be created. The resulting
permission is given by p bitwise and not u, where p is the permission
and u is the umask. For example, if p is 0777 and u is 0057, then the
resulting permission is 0720. The default permission is 0777 for a
directory and 0666 for a file. The default umask is 0027. The umask
must be specified in 4-digit octal notation (e.g. 0766).
:type umask: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param path_http_headers: Additional parameters for the operation
:type path_http_headers:
~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.filedatalake.models.ModifiedAccessConditions
:param source_modified_access_conditions: Additional parameters for
the operation
:type source_modified_access_conditions:
~azure.storage.filedatalake.models.SourceModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
cache_control = None
if path_http_headers is not None:
cache_control = path_http_headers.cache_control
content_encoding = None
if path_http_headers is not None:
content_encoding = path_http_headers.content_encoding
content_language = None
if path_http_headers is not None:
content_language = path_http_headers.content_language
content_disposition = None
if path_http_headers is not None:
content_disposition = path_http_headers.content_disposition
content_type = None
if path_http_headers is not None:
content_type = path_http_headers.content_type
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
source_if_match = None
if source_modified_access_conditions is not None:
source_if_match = source_modified_access_conditions.source_if_match
source_if_none_match = None
if source_modified_access_conditions is not None:
source_if_none_match = source_modified_access_conditions.source_if_none_match
source_if_modified_since = None
if source_modified_access_conditions is not None:
source_if_modified_since = source_modified_access_conditions.source_if_modified_since
source_if_unmodified_since = None
if source_modified_access_conditions is not None:
source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if resource is not None:
query_parameters['resource'] = self._serialize.query("resource", resource, 'PathResourceType')
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
if mode is not None:
query_parameters['mode'] = self._serialize.query("mode", mode, 'PathRenameMode')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
if rename_source is not None:
header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str')
if source_lease_id is not None:
header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str')
if properties is not None:
header_parameters['x-ms-properties'] = self._serialize.header("properties", properties, 'str')
if permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("permissions", permissions, 'str')
if umask is not None:
header_parameters['x-ms-umask'] = self._serialize.header("umask", umask, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", cache_control, 'str')
if content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", content_encoding, 'str')
if content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", content_language, 'str')
if content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", content_disposition, 'str')
if content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", content_type, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if source_if_match is not None:
header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", source_if_match, 'str')
if source_if_none_match is not None:
header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", source_if_none_match, 'str')
if source_if_modified_since is not None:
header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", source_if_modified_since, 'rfc-1123')
if source_if_unmodified_since is not None:
header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", source_if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'x-ms-continuation': self._deserialize('str', response.headers.get('x-ms-continuation')),
'Content-Length': self._deserialize('long', response.headers.get('Content-Length')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
create.metadata = {'url': '/{filesystem}/{path}'}
async def update(self, action, mode, body, max_records=None, continuation=None, force_flag=None, position=None, retain_uncommitted_data=None, close=None, content_length=None, properties=None, owner=None, group=None, permissions=None, acl=None, request_id=None, timeout=None, path_http_headers=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Append Data | Flush Data | Set Properties | Set Access Control.
Uploads data to be appended to a file, flushes (writes) previously
uploaded data to a file, sets properties for a file or directory, or
sets access control for a file or directory. Data can only be appended
to a file. This operation supports conditional HTTP requests. For more
information, see [Specifying Conditional Headers for Blob Service
Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
:param action: The action must be "append" to upload data to be
appended to a file, "flush" to flush previously uploaded data to a
file, "setProperties" to set the properties of a file or directory,
"setAccessControl" to set the owner, group, permissions, or access
control list for a file or directory, or "setAccessControlRecursive"
to set the access control list for a directory recursively. Note that
Hierarchical Namespace must be enabled for the account in order to use
access control. Also note that the Access Control List (ACL) includes
permissions for the owner, owning group, and others, so the
x-ms-permissions and x-ms-acl request headers are mutually exclusive.
Possible values include: 'append', 'flush', 'setProperties',
'setAccessControl', 'setAccessControlRecursive'
:type action: str or
~azure.storage.filedatalake.models.PathUpdateAction
:param mode: Mode "set" sets POSIX access control rights on files and
directories, "modify" modifies one or more POSIX access control rights
that pre-exist on files and directories, "remove" removes one or more
POSIX access control rights that were present earlier on files and
directories. Possible values include: 'set', 'modify', 'remove'
:type mode: str or
~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode
:param body: Initial data
:type body: Generator
:param max_records: Optional. Valid for "SetAccessControlRecursive"
operation. It specifies the maximum number of files or directories on
which the acl change will be applied. If omitted or greater than
2,000, the request will process up to 2,000 items
:type max_records: int
:param continuation: Optional. The number of paths processed with each
invocation is limited. If the number of paths to be processed exceeds
this limit, a continuation token is returned in the response header
x-ms-continuation. When a continuation token is returned in the
response, it must be percent-encoded and specified in a subsequent
invocation of setAcessControlRecursive operation.
:type continuation: str
:param force_flag: Optional. Valid for "SetAccessControlRecursive"
operation. If set to false, the operation will terminate quickly on
encountering user errors (4XX). If true, the operation will ignore
user errors and proceed with the operation on other sub-entities of
the directory. Continuation token will only be returned when forceFlag
is true in case of user errors. If not set the default value is false
for this.
:type force_flag: bool
:param position: This parameter allows the caller to upload data in
parallel and control the order in which it is appended to the file.
It is required when uploading data to be appended to the file and when
flushing previously uploaded data to the file. The value must be the
position where the data is to be appended. Uploaded data is not
immediately flushed, or written, to the file. To flush, the
previously uploaded data must be contiguous, the position parameter
must be specified and equal to the length of the file after all data
has been written, and there must not be a request entity body included
with the request.
:type position: long
:param retain_uncommitted_data: Valid only for flush operations. If
"true", uncommitted data is retained after the flush operation
completes; otherwise, the uncommitted data is deleted after the flush
operation. The default is false. Data at offsets less than the
specified position are written to the file when flush succeeds, but
this optional parameter allows data after the flush position to be
retained for a future flush operation.
:type retain_uncommitted_data: bool
:param close: Azure Storage Events allow applications to receive
notifications when files change. When Azure Storage Events are
enabled, a file changed event is raised. This event has a property
indicating whether this is the final change to distinguish the
difference between an intermediate flush to a file stream and the
final close of a file stream. The close query parameter is valid only
when the action is "flush" and change notifications are enabled. If
the value of close is "true" and the flush operation completes
successfully, the service raises a file change notification with a
property indicating that this is the final update (the file stream has
been closed). If "false" a change notification is raised indicating
the file has changed. The default is false. This query parameter is
set to true by the Hadoop ABFS driver to indicate that the file stream
has been closed."
:type close: bool
:param content_length: Required for "Append Data" and "Flush Data".
Must be 0 for "Flush Data". Must be the length of the request content
in bytes for "Append Data".
:type content_length: long
:param properties: Optional. User-defined properties to be stored with
the filesystem, in the format of a comma-separated list of name and
value pairs "n1=v1, n2=v2, ...", where each value is a base64 encoded
string. Note that the string may only contain ASCII characters in the
ISO-8859-1 character set. If the filesystem exists, any properties
not included in the list will be removed. All properties are removed
if the header is omitted. To merge new and existing properties, first
get all existing properties and the current E-Tag, then make a
conditional request with the E-Tag and include values for all
properties.
:type properties: str
:param owner: Optional. The owner of the blob or directory.
:type owner: str
:param group: Optional. The owning group of the blob or directory.
:type group: str
:param permissions: Optional and only valid if Hierarchical Namespace
is enabled for the account. Sets POSIX access permissions for the file
owner, the file owning group, and others. Each class may be granted
read, write, or execute permission. The sticky bit is also supported.
Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are
supported.
:type permissions: str
:param acl: Sets POSIX access control rights on files and directories.
The value is a comma-separated list of access control entries. Each
access control entry (ACE) consists of a scope, a type, a user or
group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param path_http_headers: Additional parameters for the operation
:type path_http_headers:
~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.filedatalake.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: SetAccessControlRecursiveResponse or the result of
cls(response)
:rtype:
~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
content_md5 = None
if path_http_headers is not None:
content_md5 = path_http_headers.content_md5
cache_control = None
if path_http_headers is not None:
cache_control = path_http_headers.cache_control
content_type = None
if path_http_headers is not None:
content_type = path_http_headers.content_type
content_disposition = None
if path_http_headers is not None:
content_disposition = path_http_headers.content_disposition
content_encoding = None
if path_http_headers is not None:
content_encoding = path_http_headers.content_encoding
content_language = None
if path_http_headers is not None:
content_language = path_http_headers.content_language
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['action'] = self._serialize.query("action", action, 'PathUpdateAction')
if max_records is not None:
query_parameters['maxRecords'] = self._serialize.query("max_records", max_records, 'int', minimum=1)
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
query_parameters['mode'] = self._serialize.query("mode", mode, 'PathSetAccessControlRecursiveMode')
if force_flag is not None:
query_parameters['forceFlag'] = self._serialize.query("force_flag", force_flag, 'bool')
if position is not None:
query_parameters['position'] = self._serialize.query("position", position, 'long')
if retain_uncommitted_data is not None:
query_parameters['retainUncommittedData'] = self._serialize.query("retain_uncommitted_data", retain_uncommitted_data, 'bool')
if close is not None:
query_parameters['close'] = self._serialize.query("close", close, 'bool')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/octet-stream'
if content_length is not None:
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long', minimum=0)
if properties is not None:
header_parameters['x-ms-properties'] = self._serialize.header("properties", properties, 'str')
if owner is not None:
header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str')
if group is not None:
header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str')
if permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("permissions", permissions, 'str')
if acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("acl", acl, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if content_md5 is not None:
header_parameters['x-ms-content-md5'] = self._serialize.header("content_md5", content_md5, 'bytearray')
if cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", cache_control, 'str')
if content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", content_type, 'str')
if content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", content_disposition, 'str')
if content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", content_encoding, 'str')
if content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", content_language, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct body
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, stream_content=body)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
header_dict = {}
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SetAccessControlRecursiveResponse', response)
header_dict = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'Accept-Ranges': self._deserialize('str', response.headers.get('Accept-Ranges')),
'Cache-Control': self._deserialize('str', response.headers.get('Cache-Control')),
'Content-Disposition': self._deserialize('str', response.headers.get('Content-Disposition')),
'Content-Encoding': self._deserialize('str', response.headers.get('Content-Encoding')),
'Content-Language': self._deserialize('str', response.headers.get('Content-Language')),
'Content-Length': self._deserialize('long', response.headers.get('Content-Length')),
'Content-Range': self._deserialize('str', response.headers.get('Content-Range')),
'Content-Type': self._deserialize('str', response.headers.get('Content-Type')),
'Content-MD5': self._deserialize('str', response.headers.get('Content-MD5')),
'x-ms-properties': self._deserialize('str', response.headers.get('x-ms-properties')),
'x-ms-continuation': self._deserialize('str', response.headers.get('x-ms-continuation')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
if cls:
return cls(response, deserialized, header_dict)
return deserialized
update.metadata = {'url': '/{filesystem}/{path}'}
async def lease(self, x_ms_lease_action, x_ms_lease_duration=None, x_ms_lease_break_period=None, proposed_lease_id=None, request_id=None, timeout=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Lease Path.
Create and manage a lease to restrict write and delete access to the
path. This operation supports conditional HTTP requests. For more
information, see [Specifying Conditional Headers for Blob Service
Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
:param x_ms_lease_action: There are five lease actions: "acquire",
"break", "change", "renew", and "release". Use "acquire" and specify
the "x-ms-proposed-lease-id" and "x-ms-lease-duration" to acquire a
new lease. Use "break" to break an existing lease. When a lease is
broken, the lease break period is allowed to elapse, during which time
no lease operation except break and release can be performed on the
file. When a lease is successfully broken, the response indicates the
interval in seconds until a new lease can be acquired. Use "change"
and specify the current lease ID in "x-ms-lease-id" and the new lease
ID in "x-ms-proposed-lease-id" to change the lease ID of an active
lease. Use "renew" and specify the "x-ms-lease-id" to renew an
existing lease. Use "release" and specify the "x-ms-lease-id" to
release a lease. Possible values include: 'acquire', 'break',
'change', 'renew', 'release'
:type x_ms_lease_action: str or
~azure.storage.filedatalake.models.PathLeaseAction
:param x_ms_lease_duration: The lease duration is required to acquire
a lease, and specifies the duration of the lease in seconds. The
lease duration must be between 15 and 60 seconds or -1 for infinite
lease.
:type x_ms_lease_duration: int
:param x_ms_lease_break_period: The lease break period duration is
optional to break a lease, and specifies the break period of the
lease in seconds. The lease break duration must be between 0 and 60
seconds.
:type x_ms_lease_break_period: int
:param proposed_lease_id: Proposed lease ID, in a GUID string format.
The Blob service returns 400 (Invalid request) if the proposed lease
ID is not in the correct format. See Guid Constructor (String) for a
list of valid GUID string formats.
:type proposed_lease_id: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.filedatalake.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
# Construct URL
url = self.lease.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
header_parameters['x-ms-lease-action'] = self._serialize.header("x_ms_lease_action", x_ms_lease_action, 'PathLeaseAction')
if x_ms_lease_duration is not None:
header_parameters['x-ms-lease-duration'] = self._serialize.header("x_ms_lease_duration", x_ms_lease_duration, 'int')
if x_ms_lease_break_period is not None:
header_parameters['x-ms-lease-break-period'] = self._serialize.header("x_ms_lease_break_period", x_ms_lease_break_period, 'int')
if proposed_lease_id is not None:
header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'x-ms-lease-id': self._deserialize('str', response.headers.get('x-ms-lease-id')),
'x-ms-lease-time': self._deserialize('str', response.headers.get('x-ms-lease-time')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
lease.metadata = {'url': '/{filesystem}/{path}'}
async def read(self, range=None, x_ms_range_get_content_md5=None, request_id=None, timeout=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Read File.
Read the contents of a file. For read operations, range requests are
supported. This operation supports conditional HTTP requests. For more
information, see [Specifying Conditional Headers for Blob Service
Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
:param range: The HTTP Range request header specifies one or more byte
ranges of the resource to be retrieved.
:type range: str
:param x_ms_range_get_content_md5: Optional. When this header is set
to "true" and specified together with the Range header, the service
returns the MD5 hash for the range, as long as the range is less than
or equal to 4MB in size. If this header is specified without the Range
header, the service returns status code 400 (Bad Request). If this
header is set to true when the range exceeds 4 MB in size, the service
returns status code 400 (Bad Request).
:type x_ms_range_get_content_md5: bool
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.filedatalake.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: object or the result of cls(response)
:rtype: Generator
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
# Construct URL
url = self.read.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if range is not None:
header_parameters['Range'] = self._serialize.header("range", range, 'str')
if x_ms_range_get_content_md5 is not None:
header_parameters['x-ms-range-get-content-md5'] = self._serialize.header("x_ms_range_get_content_md5", x_ms_range_get_content_md5, 'bool')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 206]:
await response.load_body()
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
header_dict = {}
deserialized = None
if response.status_code == 200:
deserialized = response.stream_download(self._client._pipeline)
header_dict = {
'Accept-Ranges': self._deserialize('str', response.headers.get('Accept-Ranges')),
'Cache-Control': self._deserialize('str', response.headers.get('Cache-Control')),
'Content-Disposition': self._deserialize('str', response.headers.get('Content-Disposition')),
'Content-Encoding': self._deserialize('str', response.headers.get('Content-Encoding')),
'Content-Language': self._deserialize('str', response.headers.get('Content-Language')),
'Content-Length': self._deserialize('long', response.headers.get('Content-Length')),
'Content-Range': self._deserialize('str', response.headers.get('Content-Range')),
'Content-Type': self._deserialize('str', response.headers.get('Content-Type')),
'Content-MD5': self._deserialize('str', response.headers.get('Content-MD5')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'x-ms-resource-type': self._deserialize('str', response.headers.get('x-ms-resource-type')),
'x-ms-properties': self._deserialize('str', response.headers.get('x-ms-properties')),
'x-ms-lease-duration': self._deserialize('str', response.headers.get('x-ms-lease-duration')),
'x-ms-lease-state': self._deserialize('str', response.headers.get('x-ms-lease-state')),
'x-ms-lease-status': self._deserialize('str', response.headers.get('x-ms-lease-status')),
'x-ms-content-md5': self._deserialize('str', response.headers.get('x-ms-content-md5')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
if response.status_code == 206:
deserialized = response.stream_download(self._client._pipeline)
header_dict = {
'Accept-Ranges': self._deserialize('str', response.headers.get('Accept-Ranges')),
'Cache-Control': self._deserialize('str', response.headers.get('Cache-Control')),
'Content-Disposition': self._deserialize('str', response.headers.get('Content-Disposition')),
'Content-Encoding': self._deserialize('str', response.headers.get('Content-Encoding')),
'Content-Language': self._deserialize('str', response.headers.get('Content-Language')),
'Content-Length': self._deserialize('long', response.headers.get('Content-Length')),
'Content-Range': self._deserialize('str', response.headers.get('Content-Range')),
'Content-Type': self._deserialize('str', response.headers.get('Content-Type')),
'Content-MD5': self._deserialize('str', response.headers.get('Content-MD5')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'x-ms-resource-type': self._deserialize('str', response.headers.get('x-ms-resource-type')),
'x-ms-properties': self._deserialize('str', response.headers.get('x-ms-properties')),
'x-ms-lease-duration': self._deserialize('str', response.headers.get('x-ms-lease-duration')),
'x-ms-lease-state': self._deserialize('str', response.headers.get('x-ms-lease-state')),
'x-ms-lease-status': self._deserialize('str', response.headers.get('x-ms-lease-status')),
'x-ms-content-md5': self._deserialize('str', response.headers.get('x-ms-content-md5')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
if cls:
return cls(response, deserialized, header_dict)
return deserialized
read.metadata = {'url': '/{filesystem}/{path}'}
async def get_properties(self, action=None, upn=None, request_id=None, timeout=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Get Properties | Get Status | Get Access Control List.
Get Properties returns all system and user defined properties for a
path. Get Status returns all system defined properties for a path. Get
Access Control List returns the access control list for a path. This
operation supports conditional HTTP requests. For more information,
see [Specifying Conditional Headers for Blob Service
Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
:param action: Optional. If the value is "getStatus" only the system
defined properties for the path are returned. If the value is
"getAccessControl" the access control list is returned in the response
headers (Hierarchical Namespace must be enabled for the account),
otherwise the properties are returned. Possible values include:
'getAccessControl', 'getStatus'
:type action: str or
~azure.storage.filedatalake.models.PathGetPropertiesAction
:param upn: Optional. Valid only when Hierarchical Namespace is
enabled for the account. If "true", the user identity values returned
in the x-ms-owner, x-ms-group, and x-ms-acl response headers will be
transformed from Azure Active Directory Object IDs to User Principal
Names. If "false", the values will be returned as Azure Active
Directory Object IDs. The default value is false. Note that group and
application Object IDs are not translated because they do not have
unique friendly names.
:type upn: bool
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.filedatalake.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
# Construct URL
url = self.get_properties.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if action is not None:
query_parameters['action'] = self._serialize.query("action", action, 'PathGetPropertiesAction')
if upn is not None:
query_parameters['upn'] = self._serialize.query("upn", upn, 'bool')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'Accept-Ranges': self._deserialize('str', response.headers.get('Accept-Ranges')),
'Cache-Control': self._deserialize('str', response.headers.get('Cache-Control')),
'Content-Disposition': self._deserialize('str', response.headers.get('Content-Disposition')),
'Content-Encoding': self._deserialize('str', response.headers.get('Content-Encoding')),
'Content-Language': self._deserialize('str', response.headers.get('Content-Language')),
'Content-Length': self._deserialize('long', response.headers.get('Content-Length')),
'Content-Range': self._deserialize('str', response.headers.get('Content-Range')),
'Content-Type': self._deserialize('str', response.headers.get('Content-Type')),
'Content-MD5': self._deserialize('str', response.headers.get('Content-MD5')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'x-ms-resource-type': self._deserialize('str', response.headers.get('x-ms-resource-type')),
'x-ms-properties': self._deserialize('str', response.headers.get('x-ms-properties')),
'x-ms-owner': self._deserialize('str', response.headers.get('x-ms-owner')),
'x-ms-group': self._deserialize('str', response.headers.get('x-ms-group')),
'x-ms-permissions': self._deserialize('str', response.headers.get('x-ms-permissions')),
'x-ms-acl': self._deserialize('str', response.headers.get('x-ms-acl')),
'x-ms-lease-duration': self._deserialize('str', response.headers.get('x-ms-lease-duration')),
'x-ms-lease-state': self._deserialize('str', response.headers.get('x-ms-lease-state')),
'x-ms-lease-status': self._deserialize('str', response.headers.get('x-ms-lease-status')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
get_properties.metadata = {'url': '/{filesystem}/{path}'}
async def delete(self, recursive=None, continuation=None, request_id=None, timeout=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Delete File | Delete Directory.
Delete the file or directory. This operation supports conditional HTTP
requests. For more information, see [Specifying Conditional Headers
for Blob Service
Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
:param recursive: Required
:type recursive: bool
:param continuation: Optional. When deleting a directory, the number
of paths that are deleted with each invocation is limited. If the
number of paths to be deleted exceeds this limit, a continuation token
is returned in this response header. When a continuation token is
returned in the response, it must be specified in a subsequent
invocation of the delete operation to continue deleting the directory.
:type continuation: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.filedatalake.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if recursive is not None:
query_parameters['recursive'] = self._serialize.query("recursive", recursive, 'bool')
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'x-ms-continuation': self._deserialize('str', response.headers.get('x-ms-continuation')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
delete.metadata = {'url': '/{filesystem}/{path}'}
async def set_access_control(self, timeout=None, owner=None, group=None, permissions=None, acl=None, request_id=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param owner: Optional. The owner of the blob or directory.
:type owner: str
:param group: Optional. The owning group of the blob or directory.
:type group: str
:param permissions: Optional and only valid if Hierarchical Namespace
is enabled for the account. Sets POSIX access permissions for the file
owner, the file owning group, and others. Each class may be granted
read, write, or execute permission. The sticky bit is also supported.
Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are
supported.
:type permissions: str
:param acl: Sets POSIX access control rights on files and directories.
The value is a comma-separated list of access control entries. Each
access control entry (ACE) consists of a scope, a type, a user or
group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.filedatalake.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
action = "setAccessControl"
# Construct URL
url = self.set_access_control.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['action'] = self._serialize.query("action", action, 'str')
# Construct headers
header_parameters = {}
if owner is not None:
header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str')
if group is not None:
header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str')
if permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("permissions", permissions, 'str')
if acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("acl", acl, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
}
return cls(response, None, response_headers)
set_access_control.metadata = {'url': '/{filesystem}/{path}'}
async def set_access_control_recursive(self, mode, timeout=None, continuation=None, force_flag=None, max_records=None, acl=None, request_id=None, *, cls=None, **kwargs):
"""Set the access control list for a path and subpaths.
:param mode: Mode "set" sets POSIX access control rights on files and
directories, "modify" modifies one or more POSIX access control rights
that pre-exist on files and directories, "remove" removes one or more
POSIX access control rights that were present earlier on files and
directories. Possible values include: 'set', 'modify', 'remove'
:type mode: str or
~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param continuation: Optional. When deleting a directory, the number
of paths that are deleted with each invocation is limited. If the
number of paths to be deleted exceeds this limit, a continuation token
is returned in this response header. When a continuation token is
returned in the response, it must be specified in a subsequent
invocation of the delete operation to continue deleting the directory.
:type continuation: str
:param force_flag: Optional. Valid for "SetAccessControlRecursive"
operation. If set to false, the operation will terminate quickly on
encountering user errors (4XX). If true, the operation will ignore
user errors and proceed with the operation on other sub-entities of
the directory. Continuation token will only be returned when forceFlag
is true in case of user errors. If not set the default value is false
for this.
:type force_flag: bool
:param max_records: Optional. It specifies the maximum number of files
or directories on which the acl change will be applied. If omitted or
greater than 2,000, the request will process up to 2,000 items
:type max_records: int
:param acl: Sets POSIX access control rights on files and directories.
The value is a comma-separated list of access control entries. Each
access control entry (ACE) consists of a scope, a type, a user or
group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: SetAccessControlRecursiveResponse or the result of
cls(response)
:rtype:
~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
action = "setAccessControlRecursive"
# Construct URL
url = self.set_access_control_recursive.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
query_parameters['mode'] = self._serialize.query("mode", mode, 'PathSetAccessControlRecursiveMode')
if force_flag is not None:
query_parameters['forceFlag'] = self._serialize.query("force_flag", force_flag, 'bool')
if max_records is not None:
query_parameters['maxRecords'] = self._serialize.query("max_records", max_records, 'int', minimum=1)
query_parameters['action'] = self._serialize.query("action", action, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("acl", acl, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
header_dict = {}
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SetAccessControlRecursiveResponse', response)
header_dict = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-continuation': self._deserialize('str', response.headers.get('x-ms-continuation')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
}
if cls:
return cls(response, deserialized, header_dict)
return deserialized
set_access_control_recursive.metadata = {'url': '/{filesystem}/{path}'}
async def flush_data(self, timeout=None, position=None, retain_uncommitted_data=None, close=None, content_length=None, request_id=None, path_http_headers=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param position: This parameter allows the caller to upload data in
parallel and control the order in which it is appended to the file.
It is required when uploading data to be appended to the file and when
flushing previously uploaded data to the file. The value must be the
position where the data is to be appended. Uploaded data is not
immediately flushed, or written, to the file. To flush, the
previously uploaded data must be contiguous, the position parameter
must be specified and equal to the length of the file after all data
has been written, and there must not be a request entity body included
with the request.
:type position: long
:param retain_uncommitted_data: Valid only for flush operations. If
"true", uncommitted data is retained after the flush operation
completes; otherwise, the uncommitted data is deleted after the flush
operation. The default is false. Data at offsets less than the
specified position are written to the file when flush succeeds, but
this optional parameter allows data after the flush position to be
retained for a future flush operation.
:type retain_uncommitted_data: bool
:param close: Azure Storage Events allow applications to receive
notifications when files change. When Azure Storage Events are
enabled, a file changed event is raised. This event has a property
indicating whether this is the final change to distinguish the
difference between an intermediate flush to a file stream and the
final close of a file stream. The close query parameter is valid only
when the action is "flush" and change notifications are enabled. If
the value of close is "true" and the flush operation completes
successfully, the service raises a file change notification with a
property indicating that this is the final update (the file stream has
been closed). If "false" a change notification is raised indicating
the file has changed. The default is false. This query parameter is
set to true by the Hadoop ABFS driver to indicate that the file stream
has been closed."
:type close: bool
:param content_length: Required for "Append Data" and "Flush Data".
Must be 0 for "Flush Data". Must be the length of the request content
in bytes for "Append Data".
:type content_length: long
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param path_http_headers: Additional parameters for the operation
:type path_http_headers:
~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.filedatalake.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
content_md5 = None
if path_http_headers is not None:
content_md5 = path_http_headers.content_md5
cache_control = None
if path_http_headers is not None:
cache_control = path_http_headers.cache_control
content_type = None
if path_http_headers is not None:
content_type = path_http_headers.content_type
content_disposition = None
if path_http_headers is not None:
content_disposition = path_http_headers.content_disposition
content_encoding = None
if path_http_headers is not None:
content_encoding = path_http_headers.content_encoding
content_language = None
if path_http_headers is not None:
content_language = path_http_headers.content_language
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
action = "flush"
# Construct URL
url = self.flush_data.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if position is not None:
query_parameters['position'] = self._serialize.query("position", position, 'long')
if retain_uncommitted_data is not None:
query_parameters['retainUncommittedData'] = self._serialize.query("retain_uncommitted_data", retain_uncommitted_data, 'bool')
if close is not None:
query_parameters['close'] = self._serialize.query("close", close, 'bool')
query_parameters['action'] = self._serialize.query("action", action, 'str')
# Construct headers
header_parameters = {}
if content_length is not None:
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long', minimum=0)
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if content_md5 is not None:
header_parameters['x-ms-content-md5'] = self._serialize.header("content_md5", content_md5, 'bytearray')
if cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", cache_control, 'str')
if content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", content_type, 'str')
if content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", content_disposition, 'str')
if content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", content_encoding, 'str')
if content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", content_language, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'Content-Length': self._deserialize('long', response.headers.get('Content-Length')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
}
return cls(response, None, response_headers)
flush_data.metadata = {'url': '/{filesystem}/{path}'}
async def append_data(self, body, position=None, timeout=None, content_length=None, transactional_content_crc64=None, request_id=None, path_http_headers=None, lease_access_conditions=None, *, cls=None, **kwargs):
"""Append data to the file.
:param body: Initial data
:type body: Generator
:param position: This parameter allows the caller to upload data in
parallel and control the order in which it is appended to the file.
It is required when uploading data to be appended to the file and when
flushing previously uploaded data to the file. The value must be the
position where the data is to be appended. Uploaded data is not
immediately flushed, or written, to the file. To flush, the
previously uploaded data must be contiguous, the position parameter
must be specified and equal to the length of the file after all data
has been written, and there must not be a request entity body included
with the request.
:type position: long
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param content_length: Required for "Append Data" and "Flush Data".
Must be 0 for "Flush Data". Must be the length of the request content
in bytes for "Append Data".
:type content_length: long
:param transactional_content_crc64: Specify the transactional crc64
for the body, to be validated by the service.
:type transactional_content_crc64: bytearray
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param path_http_headers: Additional parameters for the operation
:type path_http_headers:
~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.filedatalake.models.LeaseAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
transactional_content_hash = None
if path_http_headers is not None:
transactional_content_hash = path_http_headers.transactional_content_hash
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
action = "append"
# Construct URL
url = self.append_data.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if position is not None:
query_parameters['position'] = self._serialize.query("position", position, 'long')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['action'] = self._serialize.query("action", action, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if content_length is not None:
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long', minimum=0)
if transactional_content_crc64 is not None:
header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if transactional_content_hash is not None:
header_parameters['Content-MD5'] = self._serialize.header("transactional_content_hash", transactional_content_hash, 'bytearray')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
# Construct body
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, stream_content=body)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Content-MD5': self._deserialize('bytearray', response.headers.get('Content-MD5')),
'x-ms-content-crc64': self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')),
'x-ms-request-server-encrypted': self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')),
}
return cls(response, None, response_headers)
append_data.metadata = {'url': '/{filesystem}/{path}'}
async def set_expiry(self, expiry_options, timeout=None, request_id=None, expires_on=None, *, cls=None, **kwargs):
"""Sets the time a blob will expire and be deleted.
:param expiry_options: Required. Indicates mode of the expiry time.
Possible values include: 'NeverExpire', 'RelativeToCreation',
'RelativeToNow', 'Absolute'
:type expiry_options: str or
~azure.storage.filedatalake.models.PathExpiryOptions
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param expires_on: The time to set the blob to expiry
:type expires_on: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.filedatalake.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.set_expiry.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("self.comp", self.comp, 'str')
# Construct headers
header_parameters = {}
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str')
if expires_on is not None:
header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
set_expiry.metadata = {'url': '/{filesystem}/{path}'}
| [
"[email protected]"
] | |
637146eef705dd3cb02c2aeedb792999448a633e | ea44a1681e276b3cc85226b53de217f6096a05d4 | /fhir/resources/tests/test_observation.py | 4b0c7d927a468561637492c493ad061673445858 | [
"BSD-3-Clause"
] | permissive | stephanie-howson/fhir.resources | 69d2a5a6b0fe4387b82e984255b24027b37985c4 | 126e9dc6e14541f74e69ef7c1a0b8a74aa981905 | refs/heads/master | 2020-05-04T22:24:49.826585 | 2019-06-27T15:51:26 | 2019-06-27T15:51:26 | 179,511,579 | 0 | 0 | null | 2019-04-04T14:14:53 | 2019-04-04T14:14:52 | null | UTF-8 | Python | false | false | 34,573 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b on 2019-01-17.
# 2019, SMART Health IT.
import os
import pytest
import io
import unittest
import json
from .fixtures import force_bytes
from .. import observation
from ..fhirdate import FHIRDate
@pytest.mark.usefixtures("base_settings")
class ObservationTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("Observation", js["resourceType"])
return observation.Observation(js)
def testObservation1(self):
inst = self.instantiate_from("observation-example-bmi-using-related.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation1(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation1(inst2)
def implObservation1(self, inst):
self.assertEqual(force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"))
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("39156-5"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("Body mass index (BMI) [Ratio]"))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.code.text), force_bytes("BMI"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("1999-07-02").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "1999-07-02")
self.assertEqual(force_bytes(inst.id), force_bytes("bmi-using-related"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("kg/m2"))
self.assertEqual(force_bytes(inst.valueQuantity.system), force_bytes("http://unitsofmeasure.org"))
self.assertEqual(force_bytes(inst.valueQuantity.unit), force_bytes("kg/m2"))
self.assertEqual(inst.valueQuantity.value, 16.2)
def testObservation2(self):
inst = self.instantiate_from("observation-example-vp-oyster.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation2(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation2(inst2)
def implObservation2(self, inst):
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("41857-4"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("Vibrio parahaemolyticus DNA [Presence] in Unspecified specimen by Probe and target amplification method"))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.code.coding[1].code), force_bytes("VP MULTI PCR"))
self.assertEqual(force_bytes(inst.code.coding[1].display), force_bytes("VP MULTI PCR"))
self.assertEqual(force_bytes(inst.code.coding[1].system), force_bytes("http://sonomacounty.ca.gov/Health/Public-Health/Laboratory/test-codes"))
self.assertEqual(force_bytes(inst.code.text), force_bytes("OYSTER TESTING"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("2017-10-12").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "2017-10-12")
self.assertEqual(force_bytes(inst.id), force_bytes("vp-oyster"))
self.assertEqual(force_bytes(inst.identifier[0].system), force_bytes("http://sonomacounty.ca.gov/Health/Public-Health/Laboratory"))
self.assertEqual(force_bytes(inst.identifier[0].use), force_bytes("official"))
self.assertEqual(force_bytes(inst.identifier[0].value), force_bytes("20171014-1234"))
self.assertEqual(force_bytes(inst.interpretation[0].coding[0].code), force_bytes("A"))
self.assertEqual(force_bytes(inst.interpretation[0].coding[0].display), force_bytes("Abnormal"))
self.assertEqual(force_bytes(inst.interpretation[0].coding[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ObservationInterpretation"))
self.assertEqual(inst.issued.date, FHIRDate("2017-10-14T23:11:24Z").date)
self.assertEqual(inst.issued.as_json(), "2017-10-14T23:11:24Z")
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.referenceRange[0].text), force_bytes("NEGATIVE"))
self.assertEqual(force_bytes(inst.status), force_bytes("preliminary"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[0].code), force_bytes("10828004"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[0].display), force_bytes("Positive (qualifier value)"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[0].system), force_bytes("http://snomed.info/sct"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[1].code), force_bytes("POSITIVE"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[1].display), force_bytes("POSITIVE"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[1].system), force_bytes("http://sonomacounty.ca.gov/Health/Public-Health/Laboratory/result-codes"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.text), force_bytes("POSITIVE"))
def testObservation3(self):
inst = self.instantiate_from("observation-example-date-lastmp.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation3(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation3(inst2)
def implObservation3(self, inst):
self.assertEqual(force_bytes(inst.category[0].coding[0].code), force_bytes("survey"))
self.assertEqual(force_bytes(inst.category[0].coding[0].display), force_bytes("Survey"))
self.assertEqual(force_bytes(inst.category[0].coding[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"))
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("AOE"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("8665-2"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("Date last menstrual period"))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.code.text), force_bytes("Date last menstrual period"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("2016-01-24").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "2016-01-24")
self.assertEqual(force_bytes(inst.id), force_bytes("date-lastmp"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(inst.valueDateTime.date, FHIRDate("2016-12-30").date)
self.assertEqual(inst.valueDateTime.as_json(), "2016-12-30")
def testObservation4(self):
inst = self.instantiate_from("observation-example-body-temperature.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation4(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation4(inst2)
def implObservation4(self, inst):
self.assertEqual(force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"))
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("8310-5"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("Body temperature"))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.code.text), force_bytes("Body temperature"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("1999-07-02").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "1999-07-02")
self.assertEqual(force_bytes(inst.id), force_bytes("body-temperature"))
self.assertEqual(force_bytes(inst.meta.profile[0]), force_bytes("http://hl7.org/fhir/StructureDefinition/vitalsigns"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("Cel"))
self.assertEqual(force_bytes(inst.valueQuantity.system), force_bytes("http://unitsofmeasure.org"))
self.assertEqual(force_bytes(inst.valueQuantity.unit), force_bytes("C"))
self.assertEqual(inst.valueQuantity.value, 36.5)
def testObservation5(self):
inst = self.instantiate_from("observation-example-phenotype.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation5(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation5(inst2)
def implObservation5(self, inst):
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("79716-7"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("CYP2C9 gene product metabolic activity interpretation"))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.extension[0].url), force_bytes("http://hl7.org/fhir/StructureDefinition/observation-geneticsGene"))
self.assertEqual(force_bytes(inst.extension[0].valueCodeableConcept.coding[0].code), force_bytes("2623"))
self.assertEqual(force_bytes(inst.extension[0].valueCodeableConcept.coding[0].display), force_bytes("CYP2C9"))
self.assertEqual(force_bytes(inst.extension[0].valueCodeableConcept.coding[0].system), force_bytes("http://www.genenames.org"))
self.assertEqual(force_bytes(inst.id), force_bytes("example-phenotype"))
self.assertEqual(inst.issued.date, FHIRDate("2013-04-03T15:30:10+01:00").date)
self.assertEqual(inst.issued.as_json(), "2013-04-03T15:30:10+01:00")
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[0].code), force_bytes("LA25391-6"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[0].display), force_bytes("Normal metabolizer"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[0].system), force_bytes("http://loinc.org"))
def testObservation6(self):
inst = self.instantiate_from("observation-example-2minute-apgar-score.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation6(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation6(inst2)
def implObservation6(self, inst):
self.assertEqual(force_bytes(inst.category[0].coding[0].code), force_bytes("survey"))
self.assertEqual(force_bytes(inst.category[0].coding[0].display), force_bytes("Survey"))
self.assertEqual(force_bytes(inst.category[0].coding[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"))
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("Survey"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("9273-4"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("2 minute Apgar Score"))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.code.text), force_bytes("2 minute Apgar Score"))
self.assertEqual(force_bytes(inst.component[0].code.coding[0].code), force_bytes("249227004"))
self.assertEqual(force_bytes(inst.component[0].code.coding[0].display), force_bytes("Apgar color score"))
self.assertEqual(force_bytes(inst.component[0].code.coding[0].system), force_bytes("http://snomed.info/sct"))
self.assertEqual(force_bytes(inst.component[0].code.text), force_bytes("Apgar color score"))
self.assertEqual(force_bytes(inst.component[0].valueCodeableConcept.coding[0].code), force_bytes("LA6723-6"))
self.assertEqual(force_bytes(inst.component[0].valueCodeableConcept.coding[0].display), force_bytes("Good color in body with bluish hands or feet"))
self.assertEqual(force_bytes(inst.component[0].valueCodeableConcept.coding[0].extension[0].url), force_bytes("http://hl7.org/fhir/StructureDefinition/ordinalValue"))
self.assertEqual(inst.component[0].valueCodeableConcept.coding[0].extension[0].valueDecimal, 1)
self.assertEqual(force_bytes(inst.component[0].valueCodeableConcept.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.component[0].valueCodeableConcept.coding[1].code), force_bytes("1"))
self.assertEqual(force_bytes(inst.component[0].valueCodeableConcept.coding[1].system), force_bytes("http://acme.ped/apgarcolor"))
self.assertEqual(force_bytes(inst.component[0].valueCodeableConcept.text), force_bytes("1. Good color in body with bluish hands or feet"))
self.assertEqual(force_bytes(inst.component[1].code.coding[0].code), force_bytes("249223000"))
self.assertEqual(force_bytes(inst.component[1].code.coding[0].display), force_bytes("Apgar heart rate score"))
self.assertEqual(force_bytes(inst.component[1].code.coding[0].system), force_bytes("http://snomed.info/sct"))
self.assertEqual(force_bytes(inst.component[1].code.text), force_bytes("Apgar respiratory effort score"))
self.assertEqual(force_bytes(inst.component[1].valueCodeableConcept.coding[0].code), force_bytes("LA6720-2"))
self.assertEqual(force_bytes(inst.component[1].valueCodeableConcept.coding[0].display), force_bytes("Fewer than 100 beats per minute"))
self.assertEqual(force_bytes(inst.component[1].valueCodeableConcept.coding[0].extension[0].url), force_bytes("http://hl7.org/fhir/StructureDefinition/ordinalValue"))
self.assertEqual(inst.component[1].valueCodeableConcept.coding[0].extension[0].valueDecimal, 1)
self.assertEqual(force_bytes(inst.component[1].valueCodeableConcept.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.component[1].valueCodeableConcept.coding[1].code), force_bytes("1"))
self.assertEqual(force_bytes(inst.component[1].valueCodeableConcept.coding[1].system), force_bytes("http://acme.ped/apgarheartrate"))
self.assertEqual(force_bytes(inst.component[1].valueCodeableConcept.text), force_bytes("1. Fewer than 100 beats per minute"))
self.assertEqual(force_bytes(inst.component[2].code.coding[0].code), force_bytes("249226008"))
self.assertEqual(force_bytes(inst.component[2].code.coding[0].display), force_bytes("Apgar response to stimulus score"))
self.assertEqual(force_bytes(inst.component[2].code.coding[0].system), force_bytes("http://snomed.info/sct"))
self.assertEqual(force_bytes(inst.component[2].code.text), force_bytes("Apgar response to stimulus score"))
self.assertEqual(force_bytes(inst.component[2].valueCodeableConcept.coding[0].code), force_bytes("LA6721-0"))
self.assertEqual(force_bytes(inst.component[2].valueCodeableConcept.coding[0].display), force_bytes("Grimace during suctioning"))
self.assertEqual(force_bytes(inst.component[2].valueCodeableConcept.coding[0].extension[0].url), force_bytes("http://hl7.org/fhir/StructureDefinition/ordinalValue"))
self.assertEqual(inst.component[2].valueCodeableConcept.coding[0].extension[0].valueDecimal, 1)
self.assertEqual(force_bytes(inst.component[2].valueCodeableConcept.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.component[2].valueCodeableConcept.coding[1].code), force_bytes("1"))
self.assertEqual(force_bytes(inst.component[2].valueCodeableConcept.coding[1].system), force_bytes("http://acme.ped/apgarreflexirritability"))
self.assertEqual(force_bytes(inst.component[2].valueCodeableConcept.text), force_bytes("1. Grimace during suctioning"))
self.assertEqual(force_bytes(inst.component[3].code.coding[0].code), force_bytes("249225007"))
self.assertEqual(force_bytes(inst.component[3].code.coding[0].display), force_bytes("Apgar muscle tone score"))
self.assertEqual(force_bytes(inst.component[3].code.coding[0].system), force_bytes("http://snomed.info/sct"))
self.assertEqual(force_bytes(inst.component[3].code.text), force_bytes("Apgar muscle tone score"))
self.assertEqual(force_bytes(inst.component[3].valueCodeableConcept.coding[0].code), force_bytes("LA6714-5"))
self.assertEqual(force_bytes(inst.component[3].valueCodeableConcept.coding[0].display), force_bytes("Some flexion of arms and legs"))
self.assertEqual(force_bytes(inst.component[3].valueCodeableConcept.coding[0].extension[0].url), force_bytes("http://hl7.org/fhir/StructureDefinition/ordinalValue"))
self.assertEqual(inst.component[3].valueCodeableConcept.coding[0].extension[0].valueDecimal, 1)
self.assertEqual(force_bytes(inst.component[3].valueCodeableConcept.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.component[3].valueCodeableConcept.coding[1].code), force_bytes("1"))
self.assertEqual(force_bytes(inst.component[3].valueCodeableConcept.coding[1].system), force_bytes("http://acme.ped/apgarmuscletone"))
self.assertEqual(force_bytes(inst.component[3].valueCodeableConcept.text), force_bytes("1. Some flexion of arms and legs"))
self.assertEqual(force_bytes(inst.component[4].code.coding[0].code), force_bytes("249224006"))
self.assertEqual(force_bytes(inst.component[4].code.coding[0].display), force_bytes("Apgar respiratory effort score"))
self.assertEqual(force_bytes(inst.component[4].code.coding[0].system), force_bytes("http://snomed.info/sct"))
self.assertEqual(force_bytes(inst.component[4].code.text), force_bytes("Apgar respiratory effort score"))
self.assertEqual(force_bytes(inst.component[4].valueCodeableConcept.coding[0].code), force_bytes("LA6726-9"))
self.assertEqual(force_bytes(inst.component[4].valueCodeableConcept.coding[0].display), force_bytes("Weak cry; may sound like whimpering, slow or irregular breathing"))
self.assertEqual(force_bytes(inst.component[4].valueCodeableConcept.coding[0].extension[0].url), force_bytes("http://hl7.org/fhir/StructureDefinition/ordinalValue"))
self.assertEqual(inst.component[4].valueCodeableConcept.coding[0].extension[0].valueDecimal, 1)
self.assertEqual(force_bytes(inst.component[4].valueCodeableConcept.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.component[4].valueCodeableConcept.coding[1].code), force_bytes("1"))
self.assertEqual(force_bytes(inst.component[4].valueCodeableConcept.coding[1].system), force_bytes("http://acme.ped/apgarrespiratoryeffort"))
self.assertEqual(force_bytes(inst.component[4].valueCodeableConcept.text), force_bytes("1. Weak cry; may sound like whimpering, slow or irregular breathing"))
self.assertEqual(force_bytes(inst.contained[0].id), force_bytes("newborn"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("2016-05-18T22:33:22Z").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "2016-05-18T22:33:22Z")
self.assertEqual(force_bytes(inst.id), force_bytes("2minute-apgar-score"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("{score}"))
self.assertEqual(force_bytes(inst.valueQuantity.system), force_bytes("http://unitsofmeasure.org"))
self.assertEqual(inst.valueQuantity.value, 5)
def testObservation7(self):
inst = self.instantiate_from("observation-example-f202-temperature.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation7(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation7(inst2)
def implObservation7(self, inst):
self.assertEqual(force_bytes(inst.bodySite.coding[0].code), force_bytes("74262004"))
self.assertEqual(force_bytes(inst.bodySite.coding[0].display), force_bytes("Oral cavity"))
self.assertEqual(force_bytes(inst.bodySite.coding[0].system), force_bytes("http://snomed.info/sct"))
self.assertEqual(force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("BT"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("Body temperature"))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://acme.lab"))
self.assertEqual(force_bytes(inst.code.coding[1].code), force_bytes("8310-5"))
self.assertEqual(force_bytes(inst.code.coding[1].display), force_bytes("Body temperature"))
self.assertEqual(force_bytes(inst.code.coding[1].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.code.coding[2].code), force_bytes("8331-1"))
self.assertEqual(force_bytes(inst.code.coding[2].display), force_bytes("Oral temperature"))
self.assertEqual(force_bytes(inst.code.coding[2].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.code.coding[3].code), force_bytes("56342008"))
self.assertEqual(force_bytes(inst.code.coding[3].display), force_bytes("Temperature taking"))
self.assertEqual(force_bytes(inst.code.coding[3].system), force_bytes("http://snomed.info/sct"))
self.assertEqual(force_bytes(inst.code.text), force_bytes("Temperature"))
self.assertEqual(force_bytes(inst.id), force_bytes("f202"))
self.assertEqual(force_bytes(inst.interpretation[0].coding[0].code), force_bytes("H"))
self.assertEqual(force_bytes(inst.interpretation[0].coding[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ObservationInterpretation"))
self.assertEqual(inst.issued.date, FHIRDate("2013-04-04T13:27:00+01:00").date)
self.assertEqual(inst.issued.as_json(), "2013-04-04T13:27:00+01:00")
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.method.coding[0].code), force_bytes("89003005"))
self.assertEqual(force_bytes(inst.method.coding[0].display), force_bytes("Oral temperature taking"))
self.assertEqual(force_bytes(inst.method.coding[0].system), force_bytes("http://snomed.info/sct"))
self.assertEqual(force_bytes(inst.referenceRange[0].high.unit), force_bytes("degrees C"))
self.assertEqual(inst.referenceRange[0].high.value, 38.2)
self.assertEqual(force_bytes(inst.status), force_bytes("entered-in-error"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("Cel"))
self.assertEqual(force_bytes(inst.valueQuantity.system), force_bytes("http://unitsofmeasure.org"))
self.assertEqual(force_bytes(inst.valueQuantity.unit), force_bytes("degrees C"))
self.assertEqual(inst.valueQuantity.value, 39)
def testObservation8(self):
inst = self.instantiate_from("observation-example-haplotype1.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation8(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation8(inst2)
def implObservation8(self, inst):
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("55233-1"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("Genetic analysis master panel-- This is the parent OBR for the panel holding all of the associated observations that can be reported with a molecular genetics analysis result."))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.extension[0].url), force_bytes("http://hl7.org/fhir/StructureDefinition/observation-geneticsGene"))
self.assertEqual(force_bytes(inst.extension[0].valueCodeableConcept.coding[0].code), force_bytes("2625"))
self.assertEqual(force_bytes(inst.extension[0].valueCodeableConcept.coding[0].display), force_bytes("CYP2D6"))
self.assertEqual(force_bytes(inst.extension[0].valueCodeableConcept.coding[0].system), force_bytes("http://www.genenames.org"))
self.assertEqual(force_bytes(inst.id), force_bytes("example-haplotype1"))
self.assertEqual(inst.issued.date, FHIRDate("2013-04-03T15:30:10+01:00").date)
self.assertEqual(inst.issued.as_json(), "2013-04-03T15:30:10+01:00")
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.status), force_bytes("unknown"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[0].code), force_bytes("PA165971587"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[0].display), force_bytes("*35B"))
self.assertEqual(force_bytes(inst.valueCodeableConcept.coding[0].system), force_bytes("http://pharmakb.org"))
def testObservation9(self):
inst = self.instantiate_from("observation-example-vitals-panel.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation9(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation9(inst2)
def implObservation9(self, inst):
self.assertEqual(force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"))
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("85353-1"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("Vital signs, weight, height, head circumference, oxygen saturation and BMI panel"))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.code.text), force_bytes("Vital signs Panel"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("1999-07-02").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "1999-07-02")
self.assertEqual(force_bytes(inst.id), force_bytes("vitals-panel"))
self.assertEqual(force_bytes(inst.meta.profile[0]), force_bytes("http://hl7.org/fhir/StructureDefinition/vitalsigns"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testObservation10(self):
inst = self.instantiate_from("observation-example-mbp.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation10(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation10(inst2)
def implObservation10(self, inst):
self.assertEqual(force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.category[0].coding[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"))
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("8478-0"))
self.assertEqual(force_bytes(inst.code.coding[0].display), force_bytes("Mean blood pressure"))
self.assertEqual(force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org"))
self.assertEqual(force_bytes(inst.code.text), force_bytes("Mean blood pressure"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("1999-07-02").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "1999-07-02")
self.assertEqual(force_bytes(inst.id), force_bytes("mbp"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(force_bytes(inst.meta.tag[0].display), force_bytes("test health data"))
self.assertEqual(force_bytes(inst.meta.tag[0].system), force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"))
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("mm[Hg]"))
self.assertEqual(force_bytes(inst.valueQuantity.system), force_bytes("http://unitsofmeasure.org"))
self.assertEqual(force_bytes(inst.valueQuantity.unit), force_bytes("mm[Hg]"))
self.assertEqual(inst.valueQuantity.value, 80)
| [
"[email protected]"
] | |
b448d0f8bc5967e987a3b64d13f17fcca3feb1bd | b8e9dd6fd8f8b691cba5a3af2388467bcf6c90bb | /samples/openapi3/client/3_0_3_unit_test/python-experimental/unit_test_api/model/maxlength_validation.pyi | 783b51cc9870e0680e3a4fa5efa97a10f27c64c3 | [
"Apache-2.0"
] | permissive | FallenRiteMonk/openapi-generator | f8b98940219eecf14dc76dced4b0fbd394522aa3 | b6576d11733ecad6fa4a0a616e1a06d502a771b7 | refs/heads/master | 2023-03-16T05:23:36.501909 | 2022-09-02T01:46:56 | 2022-09-02T01:46:56 | 164,609,299 | 0 | 0 | Apache-2.0 | 2019-01-08T09:08:56 | 2019-01-08T09:08:56 | null | UTF-8 | Python | false | false | 1,846 | pyi | # coding: utf-8
"""
openapi 3.0.3 sample spec
sample spec for testing openapi functionality, built from json schema tests for draft6 # noqa: E501
The version of the OpenAPI document: 0.0.1
Generated by: https://openapi-generator.tech
"""
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from unit_test_api import schemas # noqa: F401
class MaxlengthValidation(
schemas.AnyTypeSchema,
):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
class MetaOapg:
additional_properties = schemas.AnyTypeSchema
def __getitem__(self, name: typing.Union[str, ]) -> MetaOapg.additional_properties:
# dict_instance[name] accessor
if not hasattr(self.MetaOapg, 'properties') or name not in self.MetaOapg.properties.__annotations__:
return super().__getitem__(name)
try:
return super().__getitem__(name)
except KeyError:
return schemas.unset
def __new__(
cls,
*args: typing.Union[dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes, ],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Union[MetaOapg.additional_properties, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes, ],
) -> 'MaxlengthValidation':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
| [
"[email protected]"
] | |
73061d115f4cab440666da9fd0537873a719f7b1 | eed51de6f1f27979187d4be62a989f74d77a54aa | /py_flask2/run.py | da759db885d22dc60dd35b854fca129a036030cb | [] | no_license | yjw0216/Samsung-MultiCampus-python-edu | 2635c0074de6a94975482b57b88f38d98b487784 | d8178606305fd9aec54e1b0f9df63a0f012c728c | refs/heads/master | 2020-03-22T05:12:23.215529 | 2018-08-17T02:48:07 | 2018-08-17T02:48:07 | 139,549,482 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,904 | py | from flask import Flask , request , url_for , render_template , redirect , jsonify
from model.d1_8 import searchSql , selectTeamName , updateTeamInfo ## 내가만든 모듈(DB처리부분) import !!
app = Flask(__name__)
# 페이지 구성
# 홈페이지의 주소(URL)를 ~/ 로 정의하자
@app.route('/')
def home():
return render_template('index.html',title='홈페이지')
@app.route('/join')
def join():
return render_template('test.1.html', title='회원가입')
@app.route('/search' , methods=['POST'])
def search():
keyword = request.form['keyword']
# DB로 검색어를 보내서 쿼리 수행후 결과를 받아온다.
# tmp={'name':'맨유','keyword':keyword}
tmp = searchSql( keyword )
if tmp ==None:
tmp=[] ## json에서 None은 받아들이지 못하므로 비어있는 리스트로 대체한다.
# print(tmp)
# jsonify() : 파이썬 객체를 json문자열로 처리
return jsonify(tmp)
# 팀 세부 정보 보기
@app.route('/info/<teamName>')
def info(teamName):
q = request.args.get('q')
print( 'q=%s' % q )
row = selectTeamName(teamName)
# q값이 None이면 그냥 정보보기, update이면 수정하기 이다.
return render_template('info.html' , team = row , flag= q) ## team은 row라는 변수에 키 값을 부여한것 !
# 팀 정보 수정
@app.route('/updateTeam', methods = ['POST'])
def updateTeam():
## 전달된 데이터 중 총 경기수와 이름을 획득
total = request.form['total']
name = request.form['name']
## 수정 쿼리 수행
result = updateTeamInfo(total,name)
# return '덕배 %s' % request.form['name']
if result:
return render_template('alert2.html' , msg='수정 성공 XD' , url='/info/'+name)
else :
return render_template('alert2.html', msg=' 수정 실패 :( ')
if __name__ == '__main__':
app.run(debug=True) | [
"[email protected]"
] | |
6e11396cffb0e94913e3102fbb95ce31d9f0fe01 | bc60fea66d910fd7ff88e6c0476be0eeaccffe7e | /init_db.py | 7cf4902323126ede1ef112539d37a8ab4ce4f13b | [] | no_license | pahaz/django-2016-steps | cd2c0321660f8ae52778e7eaaa2fb3942d535f1c | 3daa9404dcbc8a2430d8d139ea8f5a0b763b8e3b | refs/heads/master | 2023-08-26T19:47:36.918975 | 2016-05-27T11:17:42 | 2016-05-27T11:17:42 | 58,202,366 | 0 | 2 | null | 2016-05-27T10:01:28 | 2016-05-06T11:21:49 | Python | UTF-8 | Python | false | false | 255 | py | import os
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "_project_.settings")
django.setup()
# -----------------------------------
from django.contrib.auth.models import User
User.objects.create_superuser('qwer', '[email protected]', 'qwer')
| [
"[email protected]"
] | |
3dcbae388d9f29fb43bb3ae85c4db82dc2cac795 | f2befaae3840bafd181cc712108e3b64caf2696f | /app/portal/horizon/openstack_dashboard/test/integration_tests/tests/test_floatingips.py | e13a5caeadcc5089f2265ba1b68db6ff925ac46c | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | F5Networks/f5-adcaas-openstack | 17d5c408d421dcfe542002e1f850b2d9f29f1663 | 02bd8a606215c0fa08b926bac1b092b5e8b278df | refs/heads/master | 2023-08-28T12:09:54.972191 | 2022-08-12T02:03:43 | 2022-08-12T02:03:43 | 164,592,273 | 4 | 23 | Apache-2.0 | 2022-08-12T02:03:44 | 2019-01-08T07:40:35 | Python | UTF-8 | Python | false | false | 4,543 | py | # Copyright 2015 Hewlett-Packard Development Company, L.P
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test.integration_tests import helpers
from openstack_dashboard.test.integration_tests.regions import messages
class TestFloatingip(helpers.TestCase):
"""Checks that the user is able to allocate/release floatingip."""
def test_floatingip(self):
floatingip_page = \
self.home_pg.go_to_compute_accessandsecurity_floatingipspage()
floating_ip = floatingip_page.allocate_floatingip()
self.assertTrue(
floatingip_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
floatingip_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(floatingip_page.is_floatingip_present(floating_ip))
floatingip_page.release_floatingip(floating_ip)
self.assertTrue(
floatingip_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
floatingip_page.find_message_and_dismiss(messages.ERROR))
self.assertFalse(floatingip_page.is_floatingip_present(floating_ip))
class TestFloatingipAssociateDisassociate(helpers.TestCase):
"""Checks that the user is able to Associate/Disassociate floatingip."""
def test_floatingip_associate_disassociate(self):
instance_name = helpers.gen_random_resource_name('instance',
timestamp=False)
instances_page = self.home_pg.go_to_compute_instancespage()
instances_page.create_instance(instance_name)
self.assertTrue(
instances_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
instances_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(instances_page.is_instance_active(instance_name))
instance_ipv4 = instances_page.get_fixed_ipv4(instance_name)
instance_info = "{} {}".format(instance_name, instance_ipv4)
floatingip_page = \
self.home_pg.go_to_compute_accessandsecurity_floatingipspage()
floating_ip = floatingip_page.allocate_floatingip()
self.assertTrue(
floatingip_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
floatingip_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(floatingip_page.is_floatingip_present(floating_ip))
self.assertEqual('-', floatingip_page.get_fixed_ip(floating_ip))
floatingip_page.associate_floatingip(floating_ip, instance_name,
instance_ipv4)
self.assertTrue(
floatingip_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
floatingip_page.find_message_and_dismiss(messages.ERROR))
self.assertEqual(instance_info,
floatingip_page.get_fixed_ip(floating_ip))
floatingip_page.disassociate_floatingip(floating_ip)
self.assertTrue(
floatingip_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
floatingip_page.find_message_and_dismiss(messages.ERROR))
self.assertEqual('-', floatingip_page.get_fixed_ip(floating_ip))
floatingip_page.release_floatingip(floating_ip)
self.assertTrue(
floatingip_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
floatingip_page.find_message_and_dismiss(messages.ERROR))
self.assertFalse(floatingip_page.is_floatingip_present(floating_ip))
instances_page = self.home_pg.go_to_compute_instancespage()
instances_page.delete_instance(instance_name)
self.assertTrue(
instances_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
instances_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(instances_page.is_instance_deleted(instance_name))
| [
"[email protected]"
] | |
2561ca0bc23319ecfde72cc5deb53b2ef46300fd | 242e68a7c15e6ced652734d1d0e3e88e1074bb39 | /climetlab/sphinxext/command_output.py | 37361004bc1756958119ea83b8845b710aaaf1bb | [
"Apache-2.0"
] | permissive | mchantry/climetlab | e6edf596882560ad0b23572b24ac9e5cd9325891 | 8d655b4ac121a69e7244efe109c04d5e110cdf9e | refs/heads/main | 2023-07-22T01:16:52.859802 | 2021-07-22T09:24:00 | 2021-07-22T09:24:00 | 379,984,648 | 0 | 0 | Apache-2.0 | 2021-06-24T16:16:38 | 2021-06-24T16:16:38 | null | UTF-8 | Python | false | false | 1,667 | py | # (C) Copyright 2020 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
#
import os
import subprocess
import traceback
from shlex import split
from docutils import statemachine
from docutils.parsers.rst import Directive
# Examples at https://github.com/docutils-mirror/docutils
class CommandOutput(Directive):
has_content = True
def run(self):
self.assert_has_content()
here = os.getcwd()
try:
# Get current file
current_rst_file = self.state_machine.input_lines.source(
self.lineno - self.state_machine.input_offset - 1
)
os.chdir(os.path.dirname(current_rst_file))
cmd = [x for x in self.content if x != ""][0]
out = subprocess.check_output(split(cmd)).decode("utf-8")
# Parse output
rst_lines = statemachine.string2lines(out)
# Insert in place
self.state_machine.insert_input(rst_lines, current_rst_file)
except Exception:
# rst_lines = statemachine.string2lines(str(e))
rst_lines = statemachine.string2lines(traceback.format_exc())
self.state_machine.insert_input(rst_lines, current_rst_file)
finally:
os.chdir(here)
return []
def setup(app):
app.add_directive("command-output", CommandOutput)
| [
"[email protected]"
] | |
336afe5cc9ce742780553565f8b39a033fad439e | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/rna-transcription/d4b259d6199f474994ab158d3f6b6d29.py | ec2b433100cc6ed26f816b7f9b0dcb97506c5e4a | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 179 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def to_rna(dna):
rna = ''
table = {'G':'C','C':'G','T':'A','A':'U'}
for i in range(len(dna)):
rna += table[dna[i]]
return rna
| [
"[email protected]"
] | |
1d28d5489d4aeaaa822a1ab68c4f19421aaeb767 | 9bf522a1716339fe928e83c9b416eeebaa1421af | /aiida_lammps/calculations/lammps/__init__.py | e2f59c730f40dc1d2f760165016020424d44ef6b | [
"MIT"
] | permissive | zaidurrehman/aiida-lammps | 132ccf6f6bc2b8e2a81fa3f852a76c8bd3bdcedd | e00d5501778c918b4333747398d4ae4df46fd3eb | refs/heads/master | 2020-03-22T16:07:52.265272 | 2018-05-15T08:26:39 | 2018-05-15T08:26:39 | 140,304,946 | 0 | 0 | MIT | 2018-07-09T15:31:32 | 2018-07-09T15:25:24 | Python | UTF-8 | Python | false | false | 10,755 | py | from aiida.orm.calculation.job import JobCalculation
from aiida.common.exceptions import InputValidationError
from aiida.common.datastructures import CalcInfo, CodeInfo
from aiida.common.utils import classproperty
from aiida.orm import DataFactory
StructureData = DataFactory('structure')
ParameterData = DataFactory('parameter')
from aiida_lammps.calculations.lammps.potentials import LammpsPotential
import numpy as np
def get_supercell(structure, supercell_shape):
import itertools
symbols = np.array([site.kind_name for site in structure.sites])
positions = np.array([site.position for site in structure.sites])
cell = np.array(structure.cell)
supercell_shape = np.array(supercell_shape.dict.shape)
supercell_array = np.dot(cell, np.diag(supercell_shape))
supercell = StructureData(cell=supercell_array)
for k in range(positions.shape[0]):
for r in itertools.product(*[range(i) for i in supercell_shape[::-1]]):
position = positions[k, :] + np.dot(np.array(r[::-1]), cell)
symbol = symbols[k]
supercell.append_atom(position=position, symbols=symbol)
return supercell
def get_FORCE_CONSTANTS_txt(force_constants):
force_constants = force_constants.get_array('force_constants')
fc_shape = force_constants.shape
fc_txt = "%4d\n" % (fc_shape[0])
for i in range(fc_shape[0]):
for j in range(fc_shape[1]):
fc_txt += "%4d%4d\n" % (i+1, j+1)
for vec in force_constants[i][j]:
fc_txt +=("%22.15f"*3 + "\n") % tuple(vec)
return fc_txt
def structure_to_poscar(structure):
atom_type_unique = np.unique([site.kind_name for site in structure.sites], return_index=True)[1]
labels = np.diff(np.append(atom_type_unique, [len(structure.sites)]))
poscar = ' '.join(np.unique([site.kind_name for site in structure.sites]))
poscar += '\n1.0\n'
cell = structure.cell
for row in cell:
poscar += '{0: 22.16f} {1: 22.16f} {2: 22.16f}\n'.format(*row)
poscar += ' '.join(np.unique([site.kind_name for site in structure.sites]))+'\n'
poscar += ' '.join(np.array(labels, dtype=str))+'\n'
poscar += 'Cartesian\n'
for site in structure.sites:
poscar += '{0: 22.16f} {1: 22.16f} {2: 22.16f}\n'.format(*site.position)
return poscar
def parameters_to_input_file(parameters_object):
parameters = parameters_object.get_dict()
input_file = ('STRUCTURE FILE POSCAR\nPOSCAR\n\n')
input_file += ('FORCE CONSTANTS\nFORCE_CONSTANTS\n\n')
input_file += ('PRIMITIVE MATRIX\n')
input_file += ('{} {} {} \n').format(*np.array(parameters['primitive'])[0])
input_file += ('{} {} {} \n').format(*np.array(parameters['primitive'])[1])
input_file += ('{} {} {} \n').format(*np.array(parameters['primitive'])[2])
input_file += ('\n')
input_file += ('SUPERCELL MATRIX PHONOPY\n')
input_file += ('{} {} {} \n').format(*np.array(parameters['supercell'])[0])
input_file += ('{} {} {} \n').format(*np.array(parameters['supercell'])[1])
input_file += ('{} {} {} \n').format(*np.array(parameters['supercell'])[2])
input_file += ('\n')
return input_file
def generate_LAMMPS_structure(structure):
import numpy as np
types = [site.kind_name for site in structure.sites]
type_index_unique = np.unique(types, return_index=True)[1]
count_index_unique = np.diff(np.append(type_index_unique, [len(types)]))
atom_index = []
for i, index in enumerate(count_index_unique):
atom_index += [i for j in range(index)]
masses = [site.mass for site in structure.kinds]
positions = [site.position for site in structure.sites]
number_of_atoms = len(positions)
lammps_data_file = 'Generated using dynaphopy\n\n'
lammps_data_file += '{0} atoms\n\n'.format(number_of_atoms)
lammps_data_file += '{0} atom types\n\n'.format(len(masses))
cell = np.array(structure.cell)
a = np.linalg.norm(cell[0])
b = np.linalg.norm(cell[1])
c = np.linalg.norm(cell[2])
alpha = np.arccos(np.dot(cell[1], cell[2])/(c*b))
gamma = np.arccos(np.dot(cell[1], cell[0])/(a*b))
beta = np.arccos(np.dot(cell[2], cell[0])/(a*c))
xhi = a
xy = b * np.cos(gamma)
xz = c * np.cos(beta)
yhi = np.sqrt(pow(b,2)- pow(xy,2))
yz = (b*c*np.cos(alpha)-xy * xz)/yhi
zhi = np.sqrt(pow(c,2)-pow(xz,2)-pow(yz,2))
xhi = xhi + max(0,0, xy, xz, xy+xz)
yhi = yhi + max(0,0, yz)
lammps_data_file += '\n{0:20.10f} {1:20.10f} xlo xhi\n'.format(0, xhi)
lammps_data_file += '{0:20.10f} {1:20.10f} ylo yhi\n'.format(0, yhi)
lammps_data_file += '{0:20.10f} {1:20.10f} zlo zhi\n'.format(0, zhi)
lammps_data_file += '{0:20.10f} {1:20.10f} {2:20.10f} xy xz yz\n\n'.format(xy, xz, yz)
lammps_data_file += 'Masses\n\n'
for i, mass in enumerate(masses):
lammps_data_file += '{0} {1:20.10f} \n'.format(i+1, mass)
lammps_data_file += '\nAtoms\n\n'
for i, row in enumerate(positions):
lammps_data_file += '{0} {1} {2:20.10f} {3:20.10f} {4:20.10f}\n'.format(i+1, atom_index[i]+1, row[0],row[1],row[2])
return lammps_data_file
def generate_LAMMPS_potential(pair_style):
potential_file = '# Potential file generated by aiida plugin (please check citation in the orignal file)\n'
for key, value in pair_style.dict.data.iteritems():
potential_file += '{} {}\n'.format(key, value)
return potential_file
class BaseLammpsCalculation(JobCalculation):
"""
A basic plugin for calculating force constants using Lammps.
Requirement: the node should be able to import phonopy
"""
_INPUT_FILE_NAME = 'input.in'
_INPUT_POTENTIAL = 'potential.pot'
_INPUT_STRUCTURE = 'input.data'
_retrieve_list = []
_retrieve_temporary_list = []
_cmdline_params = ['-in', _INPUT_FILE_NAME]
_stdout_name = None
def _init_internal_params(self):
super(BaseLammpsCalculation, self)._init_internal_params()
@classproperty
def _baseclass_use_methods(cls):
"""
Common methods for LAMMPS.
"""
retdict = {
"potential": {
'valid_types': ParameterData,
'additional_parameter': None,
'linkname': 'potential',
'docstring': ("Use a node that specifies the lammps potential "
"for the namelists"),
},
"structure": {
'valid_types': StructureData,
'additional_parameter': None,
'linkname': 'structure',
'docstring': "Use a node for the structure",
},
"parameters": {
'valid_types': ParameterData,
'additional_parameter': None,
'linkname': 'parameters',
'docstring': "Use a node for the lammps input parameters",
},
}
return retdict
def _create_additional_files(self, tempfolder, inputs_params):
pass
def _prepare_for_submission(self, tempfolder, inputdict):
"""
This is the routine to be called when you want to create
the input files and related stuff with a plugin.
:param tempfolder: a aiida.common.folders.Folder subclass where
the plugin should put all its files.
:param inputdict: a dictionary with the input nodes, as they would
be returned by get_inputdata_dict (without the Code!)
"""
self._parameters_data = inputdict.pop(self.get_linkname('parameters'), None)
try:
potential_data = inputdict.pop(self.get_linkname('potential'))
except KeyError:
raise InputValidationError("No potential specified for this "
"calculation")
if not isinstance(potential_data, ParameterData):
raise InputValidationError("potential is not of type "
"ParameterData")
try:
self._structure = inputdict.pop(self.get_linkname('structure'))
except KeyError:
raise InputValidationError("no structure is specified for this calculation")
try:
code = inputdict.pop(self.get_linkname('code'))
except KeyError:
raise InputValidationError("no code is specified for this calculation")
##############################
# END OF INITIAL INPUT CHECK #
##############################
# =================== prepare the python input files =====================
potential_object = LammpsPotential(potential_data, self._structure, potential_filename=self._INPUT_POTENTIAL)
structure_txt = generate_LAMMPS_structure(self._structure)
input_txt = self._generate_input_function(self._parameters_data,
potential_object,
structure_file=self._INPUT_STRUCTURE,
trajectory_file=self._OUTPUT_TRAJECTORY_FILE_NAME)
potential_txt = potential_object.get_potential_file()
# =========================== dump to file =============================
input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME)
with open(input_filename, 'w') as infile:
infile.write(input_txt)
structure_filename = tempfolder.get_abs_path(self._INPUT_STRUCTURE)
with open(structure_filename, 'w') as infile:
infile.write(structure_txt)
potential_filename = tempfolder.get_abs_path(self._INPUT_POTENTIAL)
with open(potential_filename, 'w') as infile:
infile.write(potential_txt)
self._create_additional_files(tempfolder, inputdict)
# ============================ calcinfo ================================
local_copy_list = []
remote_copy_list = []
# additional_retrieve_list = settings_dict.pop("ADDITIONAL_RETRIEVE_LIST",[])
calcinfo = CalcInfo()
calcinfo.uuid = self.uuid
# Empty command line by default
calcinfo.local_copy_list = local_copy_list
calcinfo.remote_copy_list = remote_copy_list
# Retrieve files
calcinfo.retrieve_list = self._retrieve_list
calcinfo.retrieve_temporary_list = self._retrieve_temporary_list
codeinfo = CodeInfo()
codeinfo.cmdline_params = self._cmdline_params
codeinfo.code_uuid = code.uuid
codeinfo.withmpi = False # Set lammps openmpi environment properly
calcinfo.codes_info = [codeinfo]
codeinfo.stdout_name = self._stdout_name
return calcinfo
| [
"[email protected]"
] | |
7b81eea3972f0d10ecfade6f13ea7d131e0a1565 | 2602266d33073790ac3f3b1ad3c3038922f6d279 | /RL/QL/lx_01.py | 94fa0e4000401eec9b2d6e45f3996b2c00b105f1 | [] | no_license | cy-2224/AI_first_contact | 7f6582a8f4c24acb270858e6cd08874580632bf8 | d536b5faa283e7e5f093716a6006fabae943f2e0 | refs/heads/master | 2022-03-19T20:29:31.487710 | 2019-11-18T06:00:05 | 2019-11-18T06:00:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,993 | py | import numpy as np
import pandas as pd
import time
np.random.seed(2) # reproducible
N_STATES = 10 # the length of the 1 dimensional world
ACTIONS = ['left', 'right'] # available actions
EPSILON = 0.9 # greedy police
ALPHA = 0.1 # learning rate
GAMMA = 0.9 # discount factor
MAX_EPISODES = 24 # maximum episodes
FRESH_TIME = 0.1 # fresh time for one move
def build_q_table(n_states, actions):
table = pd.DataFrame(
np.zeros((n_states, len(actions))), # q_table initial values
columns=actions, # actions's name
)
# print(table) # show table
return table
def choose_action(state, q_table):
# This is how to choose an action
state_actions = q_table.iloc[state, :]
if (np.random.uniform() > EPSILON) or (state_actions.all() == 0): # act non-greedy or state-action have no value
action_name = np.random.choice(ACTIONS)
else: # act greedy
action_name = state_actions.argmax()
return action_name
def get_env_feedback(S, A):
# This is how agent will interact with the environment
if A == 'right': # move right
if S == N_STATES - 2: # terminate
S_ = 'terminal'
R = 1
else:
S_ = S + 1
R = 0
else: # move left
R = 0
if S == 0:
S_ = S # reach the wall
else:
S_ = S - 1
return S_, R
def update_env(S, episode, step_counter):
# This is how environment be updated
env_list = ['-']*(N_STATES-1) + ['T'] # '---------T' our environment
if S == 'terminal':
interaction = 'Episode %s: total_steps = %s' % (episode+1, step_counter)
print('\r{}'.format(interaction), end='')
time.sleep(2)
print('\r ', end='')
else:
env_list[S] = 'o'
interaction = ''.join(env_list)
print('\r{}'.format(interaction), end='')
time.sleep(FRESH_TIME)
def rl():
# main part of RL loop
q_table = build_q_table(N_STATES, ACTIONS)
for episode in range(MAX_EPISODES):
step_counter = 0
S = 0
is_terminated = False
update_env(S, episode, step_counter)
while not is_terminated:
A = choose_action(S, q_table)
S_, R = get_env_feedback(S, A) # take action & get next state and reward
q_predict = q_table.ix[S, A]
if S_ != 'terminal':
q_target = R + GAMMA * q_table.iloc[S_, :].max() # next state is not terminal
else:
q_target = R # next state is terminal
is_terminated = True # terminate this episode
q_table.ix[S, A] += ALPHA * (q_target - q_predict) # update
S = S_ # move to next state
update_env(S, episode, step_counter+1)
step_counter += 1
return q_table
if __name__ == "__main__":
q_table = rl()
print('\r\nQ-table:\n')
print(q_table) | [
"[email protected]"
] | |
25b15ed23eab61a3811e0a0702bc8a0d35e62589 | ca299cec2cd84d8b7c2571fa2fdf7161e66b8fe7 | /private_server/guard/CELL-29/Q-42/29-42.py | 35f00c836668156e969549d904a2e36a1661dd6e | [] | no_license | benmechen/CodeSet | ca57d4a065ac4fc737749f65cb5aa1011d446a88 | f5a4bf627a9a8efc76a65ae58db63a973fedffb7 | refs/heads/master | 2021-07-16T14:23:36.355491 | 2019-12-02T13:58:27 | 2019-12-02T13:58:27 | 225,385,245 | 1 | 0 | null | 2021-06-22T15:37:57 | 2019-12-02T13:47:09 | JavaScript | UTF-8 | Python | false | false | 250 | py | animals = ["aardvark", "badger", "duck", "emu", "fennec fox"]
# Use index() to find "duck"
duck_index = animals.index("duck")
# Your code here!
cobra_index = animals.insert(2, "cobra")
# Observe what prints after the insert operation
print(animals) | [
"[email protected]"
] | |
f3a2ac6e803fc07512c8936659276e8b2ddcc899 | d973df1e60d17b2c4ac6430ec29cce7fd139c3ec | /99-Miscel/AnatomyOfMatplotlib-master/examples/vector_example.py | 05c3d530e5b59c169f0954224e4c1c10dbc082fd | [
"MIT",
"CC-BY-3.0"
] | permissive | dushyantkhosla/viz4ds | 49153414f10bfa0b577fa5e076ef6c697298146a | 05a004a390d180d87be2d09873c3f7283c2a2e27 | refs/heads/master | 2022-06-28T14:21:47.116921 | 2019-10-22T10:34:16 | 2019-10-22T10:34:16 | 133,490,301 | 0 | 0 | MIT | 2022-06-21T21:30:16 | 2018-05-15T09:08:30 | Jupyter Notebook | UTF-8 | Python | false | false | 1,294 | py | import matplotlib.pyplot as plt
import numpy as np
import example_utils
# Generate data
n = 256
x = np.linspace(-3, 3, n)
y = np.linspace(-3, 3, n)
xi, yi = np.meshgrid(x, y)
z = (1 - xi / 2 + xi**5 + yi**3) * np.exp(-xi**2 - yi**2)
dy, dx = np.gradient(z)
mag = np.hypot(dx, dy)
fig, axes = example_utils.setup_axes()
# Use ax.arrow to plot a single arrow on the axes.
axes[0].arrow(0, 0, -0.5, 0.5, width=0.005, color='black')
axes[0].axis([-1, 1, -1, 1])
example_utils.label(axes[0], 'arrow(x, y, dx, dy)')
# Plot a regularly-sampled vector field with ax.quiver
ds = np.s_[::16, ::16] # Downsample our array a bit...
axes[1].quiver(xi[ds], yi[ds], dx[ds], dy[ds], z[ds], cmap='gist_earth',
width=0.01, scale=0.25, pivot='middle')
axes[1].axis('tight')
example_utils.label(axes[1], 'quiver(x, y, dx, dy)')
# Use ax.streamplot to show flowlines through our vector field
# We'll get fancy and vary their width and color
lw = 2 * (mag - mag.min()) / mag.ptp() + 0.2
axes[2].streamplot(xi, yi, dx, dy, color=z, density=1.5, linewidth=lw,
cmap='gist_earth')
example_utils.label(axes[2], 'streamplot(x, y, dx, dy)')
example_utils.title(fig, '"arrow/quiver/streamplot": Vector fields', y=0.96)
fig.savefig('vector_example.png', facecolor='none')
plt.show()
| [
"[email protected]"
] | |
51f411fbfe7199fd08bd422157fa4731afd3e426 | 54db01a385495a4cd0c4ce1cf5ff5d7ba82d8cc5 | /course/migrations/0006_subject_credit_hours.py | ac6f580286364aacf611e11c0ddc01d5ed060c59 | [] | no_license | danish703/csapp | eb653988df31570fb3747bb7a0b48df0e16e089e | c268017189f6a05fd77663444fb33eca94e77d6d | refs/heads/master | 2022-10-09T09:21:03.225250 | 2020-06-05T14:16:06 | 2020-06-05T14:16:06 | 256,696,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | # Generated by Django 3.0.2 on 2020-04-17 04:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course', '0005_auto_20200417_1014'),
]
operations = [
migrations.AddField(
model_name='subject',
name='credit_hours',
field=models.CharField(default=3, max_length=3, verbose_name='Credit Hours'),
),
]
| [
"[email protected]"
] | |
7d2acc3aa2f7970438fcc584fb553426c3b2505e | 1ad4b4f46e9e3cafdf8ccb17eb7703905847fda2 | /collections/list/assignment1.py | 256616778869842786dbaa83b078910d29bf50c2 | [] | no_license | febacc103/febacc103 | 09711cd1d9e4c06bdb1631a72d86fe34e3edd13d | d5ebf3534a9ec2f3634f89c894816b22a7fbaa80 | refs/heads/master | 2023-03-29T22:25:48.073291 | 2021-04-20T08:31:44 | 2021-04-20T08:31:44 | 359,677,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | lst=[3,4,8]
lst1=[]
for i in lst:
sum1=sum(lst)
lst1.extend([sum1-i])
#sum+=i
print(lst1)
lst2=[5,10,20]
lst3=[]
for i in lst2:
sum2=sum(lst2)
lst3.extend([sum2-i])
print(lst3) | [
"[email protected]"
] | |
d906992ff23a0051e0875019d50eb5c2e2a23b94 | c1582da0f3c1d762f6c78e613dfced5176bbfc83 | /Algorithms/p217_Contains_Duplicate/p217_Contains_Duplicate.py | 7759f53d150c0a5f9e369cd6eb03d4e79ab0f625 | [] | no_license | lbingbing/leetcode | 08a90a4c018210a1f0182b5ef2ab55942d57da48 | f6019c6a04f6923e4ec3bb156c9ad80e6545c127 | refs/heads/master | 2020-05-21T16:30:06.582401 | 2016-12-15T06:44:49 | 2016-12-15T06:44:49 | 65,279,977 | 0 | 0 | null | 2016-08-27T04:19:27 | 2016-08-09T09:02:55 | Python | UTF-8 | Python | false | false | 178 | py | class Solution(object):
def containsDuplicate(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
return len(set(nums))<len(nums)
| [
"[email protected]"
] | |
9f2bb5b89c2bf4b4fc694b91d1551cbe40a13be6 | db80edb9be895c4ebcb9acac96eff92b7fda2bd3 | /src/scripts/dataset_experiment_2019_1_3_(LNL)_train_script.py | fdf61ca86044647bec3707c7457949e6de9f1db1 | [] | no_license | YhHoo/AE-signal-model | 9950182425377364d83a8a86b72ed181b789a599 | 8ba384397a88ea8316deee3173503fccb9e485af | refs/heads/master | 2021-06-26T03:43:21.482586 | 2019-05-16T10:53:25 | 2019-05-16T10:53:25 | 131,477,937 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,510 | py | # this is for bash to know the path of the src
# Iterative task
import sys
sys.path.append('C:/Users/YH/PycharmProjects/AE-signal-model')
import time
import tensorflow as tf
import argparse
from src.experiment_dataset.dataset_experiment_2019_1_3 import AcousticEmissionDataSet
from src.model_bank.dataset_2018_7_13_lcp_recognition_model import *
from src.utils.helpers import *
# ------------------------------------------------------------------------------------------------------------ ARG PARSE
parser = argparse.ArgumentParser(description='Input some parameters.')
parser.add_argument('--model', default=1, type=str, help='Model Name')
parser.add_argument('--kernel_size', default=1, type=int, nargs='+', help='kernel size')
parser.add_argument('--fc_size', default=1, type=int, nargs='+', help='fully connected size')
parser.add_argument('--epoch', default=100, type=int, help='Number of training epoch')
args = parser.parse_args()
MODEL_SAVE_FILENAME = args.model
RESULT_SAVE_FILENAME = 'C:/Users/YH/PycharmProjects/AE-signal-model/result/{}_result.txt'.format(MODEL_SAVE_FILENAME)
EPOCH = args.epoch
KERNEL_SIZE = args.kernel_size
FC_SIZE = args.fc_size
print('Result saving filename: ', RESULT_SAVE_FILENAME)
print('Conv Kernel size: ', KERNEL_SIZE)
print('FC neuron size: ', FC_SIZE)
# ----------------------------------------------------------------------------------------------------------- GPU CONFIG
# instruct GPU to allocate only sufficient memory for this script
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
# ------------------------------------------------------------------------------------------------------------ DATA PREP
ae_data = AcousticEmissionDataSet(drive='G')
train_x, train_y, test_x, test_y = ae_data.random_leak_noleak_include_unseen(train_split=0.8)
train_x_reshape = train_x.reshape((train_x.shape[0], train_x.shape[1], 1))
test_x_reshape = test_x.reshape((test_x.shape[0], test_x.shape[1], 1))
train_y_cat = to_categorical(train_y, num_classes=2)
test_y_cat = to_categorical(test_y, num_classes=2)
# ------------------------------------------------------------------------------------------------------- MODEL TRAINING
lcp_model = LNL_binary_model_2(kernel_size=KERNEL_SIZE, fc_size=FC_SIZE)
lcp_model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['acc'])
# saving best weight setting
logger = ModelLogger(model=lcp_model, model_name=MODEL_SAVE_FILENAME)
save_weight_checkpoint = logger.save_best_weight_cheakpoint(monitor='val_loss', period=5)
# start training
total_epoch = EPOCH
time_train_start = time.time()
history = lcp_model.fit(x=train_x_reshape,
y=train_y_cat,
validation_data=(test_x_reshape, test_y_cat),
callbacks=[save_weight_checkpoint],
epochs=total_epoch,
batch_size=200,
shuffle=True,
verbose=2)
time_train = time.time() - time_train_start
logger.save_architecture(save_readable=True)
# ------------------------------------------------------------------------------------------------------- LEARNING CURVE
# name for fig suptitle and filename
lr_name = '{}_LrCurve'.format(MODEL_SAVE_FILENAME)
fig_lr = plt.figure(figsize=(10, 7))
fig_lr.subplots_adjust(left=0.08, bottom=0.07, right=0.96, top=0.89)
fig_lr.suptitle(lr_name)
ax_lr = fig_lr.add_subplot(1, 1, 1)
ax_lr.plot(history.history['loss'], label='train_loss')
ax_lr.plot(history.history['val_loss'], label='val_loss')
ax_lr.plot(history.history['acc'], label='train_acc')
ax_lr.plot(history.history['val_acc'], label='val_acc')
ax_lr.legend()
fig_lr_save_filename = direct_to_dir(where='result') + '{}.png'.format(lr_name)
fig_lr.savefig(fig_lr_save_filename)
# evaluate ------------------------------------------------------------------------------------------ EVALUATE REPORTING
# no of trainable parameter
trainable_count = int(np.sum([K.count_params(p) for p in set(lcp_model.trainable_weights)]))
# find highest val acc and lowest loss
best_val_acc_index = np.argmax(history.history['val_acc'])
best_val_loss_index = np.argmin(history.history['val_loss'])
# loading best model saved
lcp_best_model = load_model(model_name=MODEL_SAVE_FILENAME)
# test with val data
time_predict_start = time.time()
prediction = lcp_best_model.predict(test_x_reshape)
time_predict = time.time() - time_predict_start
prediction_argmax = np.argmax(prediction, axis=1)
actual_argmax = np.argmax(test_y_cat, axis=1)
# plot validation data
evaluate_name = '{}_Evaluate'.format(MODEL_SAVE_FILENAME)
fig_evaluate = plt.figure(figsize=(10, 7))
fig_evaluate.subplots_adjust(left=0.08, bottom=0.07, right=0.96, top=0.89)
fig_evaluate.suptitle(evaluate_name)
ax_evaluate = fig_evaluate.add_subplot(1, 1, 1)
ax_evaluate.plot(actual_argmax, color='r', label='Actual')
ax_evaluate.plot(prediction_argmax, color='b', label='Prediction', linestyle='None', marker='x')
ax_evaluate.legend()
fig_lr_save_filename = direct_to_dir(where='result') + '{}.png'.format(evaluate_name)
fig_evaluate.savefig(fig_lr_save_filename)
print('\n---------- EVALUATION RESULT SCRIPT LNL 1 -----------')
print('**Param in tuning --> [pool:(3, 2, 2), split=0.8, val_included_test]')
print('Model Trainable params: {}'.format(trainable_count))
print('Best Validation Accuracy: {:.4f} at Epoch {}/{}'.format(history.history['val_acc'][best_val_acc_index],
best_val_acc_index,
total_epoch))
print('Lowest Validation Loss: {:.4f} at Epoch {}/{}'.format(history.history['val_loss'][best_val_loss_index],
best_val_loss_index,
total_epoch))
print('Time taken to execute 1 sample: {}s'.format(time_predict / len(test_x_reshape)))
print('Time taken to complete {} epoch: {:.4f}s'.format(total_epoch, time_train))
rpf_result = logger.save_recall_precision_f1(y_pred=prediction_argmax, y_true=actual_argmax, all_class_label=[0, 1])
print('\nDist and Labels')
print('[NoLeak] -> class_0')
print('[Leak] -> class_1')
# saving the printed result again
with open(RESULT_SAVE_FILENAME, 'w') as f:
f.write('\n---------- EVALUATION RESULT SCRIPT LNL 1 -----------')
f.write('\nModel Conv Kernels Size: {}, FC Size: {}'.format(KERNEL_SIZE, FC_SIZE))
f.write('\nModel Trainable params: {}'.format(trainable_count))
f.write('\nBest Validation Accuracy: {:.4f} at Epoch {}/{}'.format(history.history['val_acc'][best_val_acc_index],
best_val_acc_index,
total_epoch))
f.write('\nLowest Validation Loss: {:.4f} at Epoch {}/{}'.format(history.history['val_loss'][best_val_loss_index],
best_val_loss_index,
total_epoch))
f.write('\nTime taken to execute 1 sample: {}s'.format(time_predict / len(test_x_reshape)))
f.write('\nTime taken to complete {} epoch: {:.4f}s'.format(total_epoch, time_train))
for i in rpf_result:
f.write('\n' + i)
f.write('\n\nDist and Labels')
f.write('\n[NoLeak] -> class_0')
f.write('\n[Leak] -> class_1') | [
"[email protected]"
] | |
b7feb3b10a95e4d1b47da7d8a58e5a880d93255f | 9e73fe186ec08859ded85d692cffac1945424410 | /tests/test_spline.py | d262b0d67ee2b0cf63f9e17c095bd2b1eb1da140 | [] | no_license | TheTripleV/robotpy-wpimath | f2071c8922d609c36caec4484d9ed3ccffe942b2 | a35d92505a4af85be560866e985320f28768ddc8 | refs/heads/main | 2023-02-19T23:59:21.955481 | 2021-01-19T07:12:35 | 2021-01-19T11:25:19 | 331,135,028 | 0 | 0 | null | 2021-01-19T23:14:55 | 2021-01-19T23:14:55 | null | UTF-8 | Python | false | false | 50 | py | import wpimath.spline
def test_todo():
pass
| [
"[email protected]"
] | |
8699e2d9f2857916e726b82f33a9737a08f29c55 | da9c4a9a92d49d2fb2983a54e0f64c2a1ce8aa19 | /symphony/cli/pyinventory/graphql/fragment/service_type.py | 721c139d30f17b42911093ca706dd702b1e3e012 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | rohan-prasad/magma | 347c370347724488215a0783504788eac41d8ec7 | 2c1f36d2fd04eae90366cc8b314eaab656d7f8ad | refs/heads/master | 2022-10-14T14:08:14.067593 | 2020-06-11T23:52:03 | 2020-06-11T23:54:27 | 271,671,835 | 0 | 0 | NOASSERTION | 2020-06-12T00:20:23 | 2020-06-12T00:17:39 | null | UTF-8 | Python | false | false | 1,427 | py | #!/usr/bin/env python3
# @generated AUTOGENERATED file. Do not Change!
from dataclasses import dataclass
from datetime import datetime
from gql.gql.datetime_utils import DATETIME_FIELD
from gql.gql.graphql_client import GraphqlClient
from gql.gql.client import OperationException
from gql.gql.reporter import FailedOperationException
from functools import partial
from numbers import Number
from typing import Any, Callable, List, Mapping, Optional
from time import perf_counter
from dataclasses_json import DataClassJsonMixin
from ..fragment.property_type import PropertyTypeFragment, QUERY as PropertyTypeFragmentQuery
from ..fragment.service_endpoint_definition import ServiceEndpointDefinitionFragment, QUERY as ServiceEndpointDefinitionFragmentQuery
QUERY: List[str] = PropertyTypeFragmentQuery + ServiceEndpointDefinitionFragmentQuery + ["""
fragment ServiceTypeFragment on ServiceType {
id
name
hasCustomer
propertyTypes {
...PropertyTypeFragment
}
endpointDefinitions {
...ServiceEndpointDefinitionFragment
}
}
"""]
@dataclass
class ServiceTypeFragment(DataClassJsonMixin):
@dataclass
class PropertyType(PropertyTypeFragment):
pass
@dataclass
class ServiceEndpointDefinition(ServiceEndpointDefinitionFragment):
pass
id: str
name: str
hasCustomer: bool
propertyTypes: List[PropertyType]
endpointDefinitions: List[ServiceEndpointDefinition]
| [
"[email protected]"
] | |
024f42d62675e06d3eb3019186c76208aeeb1a10 | 13ce655f82b93fb4089b29e62a8e33dd7ff05493 | /src/wai/json/object/property/proxies/__init__.py | 02e3fe800f49bb03e939e66e0758c8f5b174e04d | [
"MIT"
] | permissive | waikato-datamining/wai-json | 603b90b13155114bbfb60b40f45100248c03d710 | cb013fb16e7c1b8d91e040a387a143d29d4ced96 | refs/heads/master | 2021-01-07T15:06:22.957223 | 2020-03-17T23:59:14 | 2020-03-17T23:59:14 | 241,736,670 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 283 | py | """
Package for proxy objects. Proxies are objects which act like other
objects, but can be converted to and from JSON, and subscribe to a
schema which is enforced during programmatic use as well as conversion.
"""
from ._ArrayProxy import ArrayProxy
from ._MapProxy import MapProxy
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.