blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b91175d32f3bb9b7fd3dd0808d4f5901e03ee082 | c8a1fdf8795c39b21ecc1f5b31370073418a3cbb | /tests/test_dataset.py | 0e3997dce7789cc9aa4e35c957c7d0774945e2c9 | [] | no_license | h4nyu/random-char-image | 6b59935994ed70b3923074cf1c5baca7539aa18f | 7e56368ce17429eed580fd54767c109a20f8e06d | refs/heads/master | 2022-11-08T02:24:01.991103 | 2020-06-24T14:59:00 | 2020-06-24T14:59:00 | 269,483,534 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,965 | py | import typing as t
from PIL import ImageDraw
from glob import glob
from random_char_image import TextRepo, BackgroundRepo, RandomImage
def test_image() -> None:
br = BackgroundRepo().with_file("templates/shiyoukyokashinnseisho09.jpg")
text = TextRepo().with_file("texts/hvt.txt")
ri = (
RandomImage()
.with_config(fontsize=24, line_space=20, char_space=10, direction="column")
.with_background(br.get())
.with_text(text)
)
for i, p in enumerate(glob("fonts/*.ttf")):
if i % 2 == 0:
ri.with_label_font(p, label=0, is_random=True)
else:
ri.with_label_font(p, label=1, is_random=False)
img, boxes, labels, _ = ri.get()
draw = ImageDraw.Draw(img)
w, h = img.size
for box, label in zip(boxes, labels):
x0, y0, x1, y1 = box
assert x1 < w
assert y1 < h
assert x0 < w
assert y0 < h
if label == 0:
draw.rectangle(box, outline="red")
else:
draw.rectangle(box, outline="blue")
img.save("/store/test.png")
def test_image_without_background() -> None:
text = TextRepo().with_file("texts/hvt.txt")
ri = (
RandomImage()
.with_config(fontsize=24, line_space=20, char_space=10, direction="column")
.with_text(text)
)
for i, p in enumerate(glob("fonts/*.ttf")):
if i % 2 == 0:
ri.with_label_font(p, label=0, is_random=True)
else:
ri.with_label_font(p, label=1, is_random=False)
img, boxes, labels, _ = ri.get()
draw = ImageDraw.Draw(img)
w, h = img.size
for box, label in zip(boxes, labels):
x0, y0, x1, y1 = box
assert x1 < w
assert y1 < h
assert x0 < w
assert y0 < h
if label == 0:
draw.rectangle(box, outline="red")
else:
draw.rectangle(box, outline="blue")
img.save("/store/without-background.png")
| [
"[email protected]"
] | |
a19e75646c36ed5108be9cad9273be168194691c | e51886646479dad5b99beb33561fad2f57db86c0 | /crapc/helper.py | 877fdbd3b08ef297b214afd2d88757bcd6eb6b5d | [
"Apache-2.0"
] | permissive | iffy/crapc | 97332a15241ce1eed0bf6fb52b8331c43e7f7a96 | 08d52f008318a5b13ccd1f0afd4dbe4b4fceebcc | refs/heads/master | 2020-05-16T23:10:37.440129 | 2015-03-04T23:52:02 | 2015-03-04T23:52:02 | 15,877,333 | 0 | 0 | null | 2015-01-09T18:13:23 | 2014-01-13T18:27:31 | Python | UTF-8 | Python | false | false | 2,438 | py |
__all__ = ['RPCFromObject', 'PythonInterface', 'RPCFromClass']
import inspect
from zope.interface import implements
from crapc.error import MethodNotFound
from crapc.interface import ISystem
from crapc._request import Request
class _LazyWrappingRPCSystem(object):
"""
Create an L{ISystem} from the public methods of an object that are looked
up lazily.
"""
implements(ISystem)
def __init__(self, original):
self.original = original
def runProcedure(self, request):
if request.method.startswith('_'):
raise MethodNotFound(request.method)
try:
func = getattr(self.original, request.method)
if inspect.ismethod(func) or inspect.isfunction(func):
return func(*request.args(), **request.kwargs())
else:
raise MethodNotFound(request.method)
except AttributeError:
raise MethodNotFound(request.method)
def RPCFromObject(obj):
"""
Create an L{ISystem} from the public methods on this object.
@return: An L{ISystem}-implementing instance.
"""
return _LazyWrappingRPCSystem(obj)
def RPCFromClass(cls):
"""
Wrap an existing class to make a new class, that, when instantiated is
an L{ISystem} for an instance of the wrapped class.
You will be able to get at the instance by accessing the C{original}
attribute.
By default, all public methods are turned into RPC-available methods.
"""
methods = inspect.getmembers(cls)
class _RPC(object):
implements(ISystem)
def __init__(self, *args, **kwargs):
self.original = cls(*args, **kwargs)
def runProcedure(self, request):
try:
func = self._functions[request.method]
except KeyError:
raise MethodNotFound(request.full_method)
return func(self.original, *request.args(), **request.kwargs())
_functions = {}
for name, func in methods:
if name.startswith('_'):
continue
_functions[name] = func
return _RPC
class PythonInterface(object):
"""
An in-memory interface to an L{ISystem}.
Just read (all 2 lines of) the source.
"""
def __init__(self, rpc):
self.rpc = rpc
def call(self, method, *args, **kwargs):
return self.rpc.runProcedure(Request(method, args or kwargs))
| [
"[email protected]"
] | |
75d70fbb98b545fcc479cedcb71d0c0c1a66c7f3 | 3199331cede4a22b782f945c6a71150a10c61afc | /20210520PythonAdvanced/04-generator/gen03.py | 8e90543f12be76de586348e1b666f99624657b1a | [] | no_license | AuroraBoreas/language-review | 6957a3cde2ef1b6b996716addaee077e70351de8 | 2cb0c491db7d179c283dba205b4d124a8b9a52a3 | refs/heads/main | 2023-08-19T23:14:24.981111 | 2021-10-11T12:01:47 | 2021-10-11T12:01:47 | 343,345,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | "#Python is a protocol orientated lang; every top-level funtion or syntax has a corresponding dunder method implemented;"
import time
def compute(n:int)->int:
for i in range(n):
yield i
time.sleep(.5)
if __name__ == "__main__":
for i in compute(10):
print(i) | [
"[email protected]"
] | |
4ae75bfe73911236396761c0f469cef3b96d3de8 | bc438e54c94f6ab8a178102b0913bb1957142589 | /0x09-python-everything_is_object/100-magic_string.py | e8ceede40fe306ebe5a1a60a2cf035a687f11668 | [] | no_license | paisap/holbertonschool-higher_level_programming | 75d5ac7abea9c94dcdb96c60e2b3bd3293346d4a | 1fde8e02d484c6cfe6b14847fd5d96c3fbf2cff3 | refs/heads/master | 2020-09-28T23:34:29.494192 | 2020-05-14T19:41:08 | 2020-05-14T19:41:08 | 226,892,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 112 | py | #!/usr/bin/python3
def magic_string(mylist=[]):
mylist.append("Holberton")
return ', '.join(mylist)
| [
"[email protected]"
] | |
d77d4d53827e97725d74639c8df62462eeca897d | b56cc9f6a5eadf81cb832c5074f78fba33afad42 | /notes/filterByAsciiRows.py | 31c59dc028882c43e156f1f752e83ee79265da5b | [] | no_license | NicholasPiano/puzzle | 450c953555a836e0d387de61c67bddfd1634540a | be0d201449863694817f8c18a9e38606224abecf | refs/heads/master | 2020-03-29T13:00:36.291681 | 2015-06-09T20:08:41 | 2015-06-09T20:08:41 | 29,688,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,519 | py | #!/opt/local/bin/python2.7
print '\nrunning...\n'
from xlrd import *
from xlsxwriter.workbook import Workbook
source = 'Nigeria practice file.xlsx'
# http://stackoverflow.com/questions/196345/how-to-check-if-a-string-in-python-is-in-ascii
def is_ascii(s):
return all(ord(c) < 128 for c in s)
### STEP 1: ANALYSE WORKBOOK
workbook = open_workbook(filename=source,encoding_override="cp1252")
print 'file {} loaded..format(source)
sheet1 = workbook.sheet_by_index(0)
colNames = {}
for col in range(0, sheet1.ncols):
value = sheet1.cell(0,col)
name = str(value.value)
colNames[name] = col
colNames[col] = name # this is a fun trick
asciiRows = []
nonAsciiRows = []
# note that we don't check the title row.
for i in range(1,sheet1.nrows):
#for i in range(2):
row = sheet1.row(i)
success = True # is all the text on this row valid ASCII?
for j in range(sheet1.ncols):
cell = sheet1.cell(i,j)
# type 1 means it contains text. see the xlrd docs.
if cell.ctype == 1:
if not is_ascii(cell.value): success = False
if success == True:
asciiRows.append(i) # store the row number
else:
nonAsciiRows.append(i) # store the row number
print 'asciiRows:', len(asciiRows)
print 'nonAsciiRows:', len(nonAsciiRows)
### STEP 2: OUTPUT
output1 = 'Nigeria_locations_ascii_rows.xlsx'
output2 = 'Nigeria_locations_NON_ascii_rows.xlsx'
print 'writing ascii rows to {}.format(output1)
wb1 = Workbook(output1)
sh1 = wb1.add_worksheet()
# write first row containing column names
row = 0
for i in range(sheet1.ncols):
name = colNames[i]
sh1.write(row, i, name)
# write all other rows
newRowNumber = 1 # we aren't leaving gaps / empty rows
for rowNumber in asciiRows:
# write each cell of the row
for colNumber in range(sheet1.ncols):
cell = sheet1.cell(rowNumber, colNumber)
sh1.write(newRowNumber, colNumber, cell.value)
newRowNumber += 1
print 'writing NON ascii rows to {}.format(output2)
wb2 = Workbook(output2)
sh2 = wb2.add_worksheet()
# write first row containing column names
row = 0
for colNumber in range(sheet1.ncols):
name = colNames[colNumber]
sh2.write(row, colNumber, name)
# write all other rows
newRowNumber = 1 # we aren't leaving gaps / empty rows
for rowNumber in nonAsciiRows:
# write each cell of the row
for colNumber in range(sheet1.ncols):
cell = sheet1.cell(rowNumber, colNumber)
sh2.write(newRowNumber, colNumber, cell.value)
newRowNumber += 1
wb1.close()
wb2.close()
print '\ndone.\n'
| [
"[email protected]"
] | |
99b185b33b49e397501b5227108ba3f56760b675 | e48702862be879ceebcab8fb35eb74481f0b52bf | /apps/groups/migrations/0007_auto_20150907_1438.py | 9978a6e76866729512cf25ba008bb135bfffccee | [] | no_license | od-5/enjoy-africa | af79e976a790a6b0af5d39e527f34e829cccb8f4 | bb05d44dd4ac6557ca9394f81f80465ed5174b60 | refs/heads/master | 2021-01-10T22:29:22.967261 | 2015-12-28T16:35:16 | 2015-12-28T16:35:16 | 42,873,984 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 582 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('groups', '0006_auto_20150907_1325'),
]
operations = [
migrations.AlterModelOptions(
name='groups',
options={'ordering': ['-travel_start'], 'verbose_name': '\u0413\u0440\u0443\u043f\u043f\u043e\u0432\u044b\u0435 \u0442\u0443\u0440\u044b', 'verbose_name_plural': '\u0413\u0440\u0443\u043f\u043f\u043e\u0432\u044b\u0435 \u0442\u0443\u0440\u044b'},
),
]
| [
"[email protected]"
] | |
71b330b488e52804270b90f7039a827c4e222d72 | f09dc121f213f2881df3572288b7ee5b39246d73 | /aliyun-python-sdk-arms/aliyunsdkarms/request/v20190808/CreateRetcodeAppRequest.py | 5a9b285fe82df3bb2625f17670d00e02cf073d03 | [
"Apache-2.0"
] | permissive | hetw/aliyun-openapi-python-sdk | 2f31378ad6be0896fb8090423f607e9c7d3ae774 | 7443eacee9fbbaa93c7975c6dbec92d3c364c577 | refs/heads/master | 2023-01-19T22:42:36.214770 | 2020-12-04T10:55:14 | 2020-12-04T10:55:14 | 318,689,093 | 1 | 0 | NOASSERTION | 2020-12-05T03:03:03 | 2020-12-05T03:03:03 | null | UTF-8 | Python | false | false | 1,665 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkarms.endpoint import endpoint_data
class CreateRetcodeAppRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ARMS', '2019-08-08', 'CreateRetcodeApp','arms')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_RetcodeAppName(self):
return self.get_query_params().get('RetcodeAppName')
def set_RetcodeAppName(self,RetcodeAppName):
self.add_query_param('RetcodeAppName',RetcodeAppName)
def get_RetcodeAppType(self):
return self.get_query_params().get('RetcodeAppType')
def set_RetcodeAppType(self,RetcodeAppType):
self.add_query_param('RetcodeAppType',RetcodeAppType) | [
"[email protected]"
] | |
ee170c27c9ebbfb8661950b51a290f617950767f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02694/s151928991.py | b58c3973aefeb1e64577a96d949fdf774070af65 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | X=int(input())
a=100
cnt=0
while a<X:
a=a+a//100
cnt+=1
print(cnt) | [
"[email protected]"
] | |
cdf41cbdd7ffb5aaf836ad13a9d2bcc91d0d6c4f | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_266/ch27_2019_03_08_12_05_37_480317.py | 88848104c47121d291e574f9ff740516855d64eb | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | cigarros = int(input('quantos cigarros fuma por dia: '))
anos = int(input('quantos anos fuma: '))
dias = anos*365
min = dias*10*cigarros
y = (min)/1440
print('voce perdeu {0} dias da sua vida'.format(y)) | [
"[email protected]"
] | |
cd22c23d0977b35c147f379d8bbf37ddec01fdcd | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnedda.py | 3fc5576135aefb89dde92bfe2e79089fdf22aba3 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 83 | py | ii = [('MarrFDI.py', 1), ('CoolWHM2.py', 1), ('CoolWHM.py', 1), ('CoolWHM3.py', 2)] | [
"[email protected]"
] | |
8211a06d9970d197e58ca99d1b537932a4f60b36 | 1c16b9812768abc9305a82aaa0a76a16668571cb | /0x16-api_advanced/0-subs.py | 6abde0d2cce700370f6cba0d160c0b4ecde8116a | [] | no_license | emmanavarro/holberton-system_engineering-devops | 73bbf5dcfe3661b68e1c9946bcb3fd6a03213a23 | bdf4b3e179518f9deb79856213e1c665cfde3d9e | refs/heads/master | 2020-12-23T01:48:07.725058 | 2020-10-07T23:18:18 | 2020-10-07T23:18:18 | 236,994,500 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | #!/usr/bin/python3
"""
Uses the reddit API to print the number of subscribers of a subreddit
"""
import requests
def number_of_subscribers(subreddit):
"""Get the numbers of subscribers by subreddit given"""
url_rsubs = "https://api.reddit.com/r/{}/about".format(subreddit)
headers = {'User-Agent': 'Python3'}
response = requests.get(url_rsubs, headers=headers,
allow_redirects=False)
if str(response) != "<Response [200]>":
return 0
r_json = response.json()
subs_count = r_json.get('data').get('subscribers')
return subs_count
| [
"[email protected]"
] | |
c70783d8cfd93612801582b5e2235579428603b7 | e9538b7ad6d0ce0ccfbb8e10c458f9e0b73926f6 | /tests/unit/modules/network/fortios/test_fortios_log_gui_display.py | 644e279cf41382231c315d93b3a1273a7d584043 | [] | no_license | ansible-collection-migration/misc.not_a_real_collection | b3ef8090c59de9ac30aca083c746ec3595d7f5f5 | 7ab1af924a3db4ada2f714b09bb392614344cb1e | refs/heads/master | 2020-12-18T13:48:51.849567 | 2020-01-22T17:39:18 | 2020-01-22T17:39:18 | 235,400,821 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,511 | py | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible_collections.misc.not_a_real_collection.plugins.modules import fortios_log_gui_display
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.modules.fortios_log_gui_display.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_log_gui_display_creation(mocker):
schema_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_gui_display': {
'fortiview_unscanned_apps': 'enable',
'resolve_apps': 'enable',
'resolve_hosts': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_gui_display.fortios_log(input_data, fos_instance)
expected_data = {
'fortiview-unscanned-apps': 'enable',
'resolve-apps': 'enable',
'resolve-hosts': 'enable'
}
set_method_mock.assert_called_with('log', 'gui-display', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_log_gui_display_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_gui_display': {
'fortiview_unscanned_apps': 'enable',
'resolve_apps': 'enable',
'resolve_hosts': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_gui_display.fortios_log(input_data, fos_instance)
expected_data = {
'fortiview-unscanned-apps': 'enable',
'resolve-apps': 'enable',
'resolve-hosts': 'enable'
}
set_method_mock.assert_called_with('log', 'gui-display', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_log_gui_display_idempotent(mocker):
schema_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_gui_display': {
'fortiview_unscanned_apps': 'enable',
'resolve_apps': 'enable',
'resolve_hosts': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_gui_display.fortios_log(input_data, fos_instance)
expected_data = {
'fortiview-unscanned-apps': 'enable',
'resolve-apps': 'enable',
'resolve-hosts': 'enable'
}
set_method_mock.assert_called_with('log', 'gui-display', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_log_gui_display_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_gui_display': {
'random_attribute_not_valid': 'tag',
'fortiview_unscanned_apps': 'enable',
'resolve_apps': 'enable',
'resolve_hosts': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_gui_display.fortios_log(input_data, fos_instance)
expected_data = {
'fortiview-unscanned-apps': 'enable',
'resolve-apps': 'enable',
'resolve-hosts': 'enable'
}
set_method_mock.assert_called_with('log', 'gui-display', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| [
"[email protected]"
] | |
b630a9f6c367e1fb16b0412c86faca800f21e951 | 9ab9d9a3883471763edbceea59a0e83170581b5f | /eggs/Paste-1.7.5.1-py2.7.egg/paste/exceptions/reporter.py | 95e31ba9ce0bd654f856a5f9ea8da4e0714090a5 | [
"CC-BY-2.5",
"AFL-2.1",
"AFL-3.0",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | asmmhossain/phyG | 24dc211dad5b3e89c87ff384e841f2e98bbd52db | 023f505b705ab953f502cbc55e90612047867583 | refs/heads/master | 2022-11-21T12:43:46.172725 | 2014-02-14T12:33:08 | 2014-02-14T12:33:08 | 13,800,552 | 0 | 1 | NOASSERTION | 2020-07-25T21:05:41 | 2013-10-23T11:04:25 | Python | UTF-8 | Python | false | false | 4,574 | py | # (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
import smtplib
import time
try:
from socket import sslerror
except ImportError:
sslerror = None
from paste.exceptions import formatter
class Reporter(object):
def __init__(self, **conf):
for name, value in conf.items():
if not hasattr(self, name):
raise TypeError(
"The keyword argument %s was not expected"
% name)
setattr(self, name, value)
self.check_params()
def check_params(self):
pass
def format_date(self, exc_data):
return time.strftime('%c', exc_data.date)
def format_html(self, exc_data, **kw):
return formatter.format_html(exc_data, **kw)
def format_text(self, exc_data, **kw):
return formatter.format_text(exc_data, **kw)
class EmailReporter(Reporter):
to_addresses = None
from_address = None
smtp_server = 'localhost'
smtp_username = None
smtp_password = None
smtp_use_tls = False
subject_prefix = ''
def report(self, exc_data):
msg = self.assemble_email(exc_data)
server = smtplib.SMTP(self.smtp_server)
if self.smtp_use_tls:
server.ehlo()
server.starttls()
server.ehlo()
if self.smtp_username and self.smtp_password:
server.login(self.smtp_username, self.smtp_password)
server.sendmail(self.from_address,
self.to_addresses, msg.as_string())
try:
server.quit()
except sslerror:
# sslerror is raised in tls connections on closing sometimes
pass
def check_params(self):
if not self.to_addresses:
raise ValueError("You must set to_addresses")
if not self.from_address:
raise ValueError("You must set from_address")
if isinstance(self.to_addresses, (str, unicode)):
self.to_addresses = [self.to_addresses]
def assemble_email(self, exc_data):
short_html_version = self.format_html(
exc_data, show_hidden_frames=False)
long_html_version = self.format_html(
exc_data, show_hidden_frames=True)
text_version = self.format_text(
exc_data, show_hidden_frames=False)
msg = MIMEMultipart()
msg.set_type('multipart/alternative')
msg.preamble = msg.epilogue = ''
text_msg = MIMEText(text_version)
text_msg.set_type('text/plain')
text_msg.set_param('charset', 'ASCII')
msg.attach(text_msg)
html_msg = MIMEText(short_html_version)
html_msg.set_type('text/html')
# @@: Correct character set?
html_msg.set_param('charset', 'UTF-8')
html_long = MIMEText(long_html_version)
html_long.set_type('text/html')
html_long.set_param('charset', 'UTF-8')
msg.attach(html_msg)
msg.attach(html_long)
subject = '%s: %s' % (exc_data.exception_type,
formatter.truncate(str(exc_data.exception_value)))
msg['Subject'] = self.subject_prefix + subject
msg['From'] = self.from_address
msg['To'] = ', '.join(self.to_addresses)
return msg
class LogReporter(Reporter):
filename = None
show_hidden_frames = True
def check_params(self):
assert self.filename is not None, (
"You must give a filename")
def report(self, exc_data):
text = self.format_text(
exc_data, show_hidden_frames=self.show_hidden_frames)
f = open(self.filename, 'a')
try:
f.write(text + '\n' + '-'*60 + '\n')
finally:
f.close()
class FileReporter(Reporter):
file = None
show_hidden_frames = True
def check_params(self):
assert self.file is not None, (
"You must give a file object")
def report(self, exc_data):
text = self.format_text(
exc_data, show_hidden_frames=self.show_hidden_frames)
self.file.write(text + '\n' + '-'*60 + '\n')
class WSGIAppReporter(Reporter):
def __init__(self, exc_data):
self.exc_data = exc_data
def __call__(self, environ, start_response):
start_response('500 Server Error', [('Content-type', 'text/html')])
return [formatter.format_html(self.exc_data)]
| [
"[email protected]"
] | |
3c31d6df0e8e6f26f2bc2df7c1841a03cf228944 | d9f7123433fe473cfa2fd5c3438251f83ffb326c | /apps/login/migrations/0001_initial.py | 66a865cda502ac38bb6d10342d0d79a968d271ba | [] | no_license | mazurbeam/friends | 6c2d201220db52bc85eb1869fd6685eee372e920 | 1dc2432ad371113c0979158053c821a449ebbc6c | refs/heads/master | 2021-01-01T18:27:12.875643 | 2017-07-25T20:46:08 | 2017-07-25T20:46:08 | 98,345,240 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 736 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-07-25 17:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('f_name', models.CharField(max_length=400)),
('l_name', models.CharField(max_length=400)),
('email', models.CharField(max_length=400)),
('password', models.CharField(max_length=400)),
],
),
]
| [
"[email protected]"
] | |
0bcb6ed75c2ebe3f34acff106b6a3a6d6ad9de9d | 7eb606a7957e5500f163c93dc4b19418cf9cf335 | /ludwig/datasets/archives.py | 6b2aa057cc3df86a1494f0c6e365d8f5f814191f | [
"Apache-2.0",
"MIT"
] | permissive | ludwig-ai/ludwig | 024f74da86567a57ec8e30efcb4600f0c52333a1 | e1d023e41606c9b76b35e1d231c2f13368a30eca | refs/heads/master | 2023-09-03T08:07:32.978301 | 2023-09-01T19:39:32 | 2023-09-01T19:39:32 | 163,346,054 | 2,567 | 285 | Apache-2.0 | 2023-09-14T20:34:52 | 2018-12-27T23:58:12 | Python | UTF-8 | Python | false | false | 5,833 | py | #! /usr/bin/env python
# Copyright (c) 2019 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import gzip
import logging
import os
import shutil
import tarfile
from enum import Enum
from typing import List, Optional
from zipfile import ZipFile
from ludwig.utils.fs_utils import upload_output_directory
logger = logging.getLogger(__name__)
class ArchiveType(str, Enum):
"""The type of file archive."""
UNKNOWN = "unknown"
ZIP = "zip"
GZIP = "gz"
TAR = "tar"
TAR_ZIP = "tar.z"
TAR_BZ2 = "tar.bz2"
TAR_GZ = "tar.gz"
def infer_archive_type(archive_path):
"""Try to infer archive type from file extension."""
# Get the path extension including multiple extensions, ex. ".tar.gz"
extension = ".".join(["", *os.path.basename(archive_path).split(".")[1:]])
extension = extension.lower()
if extension.endswith(".tar.z") or extension.endswith(".tar.zip"):
return ArchiveType.TAR_ZIP
elif extension.endswith(".tar.bz2") or extension.endswith(".tbz2"):
return ArchiveType.TAR_BZ2
elif extension.endswith(".tar.gz") or extension.endswith(".tgz"):
return ArchiveType.TAR_GZ
elif extension.endswith(".tar"):
return ArchiveType.TAR
elif extension.endswith(".zip") or extension.endswith(".zipx"):
return ArchiveType.ZIP
elif extension.endswith(".gz") or extension.endswith(".gzip"):
return ArchiveType.GZIP
else:
return ArchiveType.UNKNOWN
def is_archive(path):
"""Does this path a supported archive type."""
return infer_archive_type(path) != ArchiveType.UNKNOWN
def list_archive(archive_path, archive_type: Optional[ArchiveType] = None) -> List[str]:
"""Return list of files extracted in an archive (without extracting them)."""
if archive_type is None:
archive_type = infer_archive_type(archive_path)
if archive_type == ArchiveType.UNKNOWN:
logger.error(
f"Could not infer type of archive {archive_path}. May be an unsupported archive type."
"Specify archive_type in the dataset config if this file has an unknown file extension."
)
return []
if archive_type == ArchiveType.ZIP:
with ZipFile(archive_path) as zfile:
return zfile.namelist()
elif archive_type == ArchiveType.GZIP:
return [".".join(archive_path.split(".")[:-1])] # Path minus the .gz extension
elif archive_type in {ArchiveType.TAR, ArchiveType.TAR_ZIP, ArchiveType.TAR_BZ2, ArchiveType.TAR_GZ}:
with tarfile.open(archive_path) as tar_file:
return tar_file.getnames()
else:
logger.error(f"Unsupported archive: {archive_path}")
return []
def extract_archive(archive_path: str, archive_type: Optional[ArchiveType] = None) -> List[str]:
"""Extracts files from archive (into the same directory), returns a list of extracted files.
Args:
archive_path - The full path to the archive.
Returns A list of the files extracted.
"""
if archive_type is None:
archive_type = infer_archive_type(archive_path)
if archive_type == ArchiveType.UNKNOWN:
logger.error(
f"Could not infer type of archive {archive_path}. May be an unsupported archive type."
"Specify archive_type in the dataset config if this file has an unknown file extension."
)
return []
archive_directory = os.path.dirname(archive_path)
directory_contents_before = os.listdir(archive_directory)
with upload_output_directory(archive_directory) as (tmpdir, _):
if archive_type == ArchiveType.ZIP:
with ZipFile(archive_path) as zfile:
zfile.extractall(tmpdir)
elif archive_type == ArchiveType.GZIP:
gzip_content_file = ".".join(archive_path.split(".")[:-1]) # Path minus the .gz extension
with gzip.open(archive_path) as gzfile:
with open(os.path.join(tmpdir, gzip_content_file), "wb") as output:
shutil.copyfileobj(gzfile, output)
elif archive_type in {ArchiveType.TAR, ArchiveType.TAR_ZIP, ArchiveType.TAR_BZ2, ArchiveType.TAR_GZ}:
with tarfile.open(archive_path) as tar_file:
def is_within_directory(directory, target):
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory
def safe_extract(tar, path=".", members=None, *, numeric_owner=False):
for member in tar.getmembers():
member_path = os.path.join(path, member.name)
if not is_within_directory(path, member_path):
raise Exception("Attempted Path Traversal in Tar File")
tar.extractall(path, members, numeric_owner=numeric_owner)
safe_extract(tar_file, path=tmpdir)
else:
logger.error(f"Unsupported archive: {archive_path}")
directory_contents_after = set(os.listdir(archive_directory))
return directory_contents_after.difference(directory_contents_before)
| [
"[email protected]"
] | |
c9d7d43fca7a9180a32c04151abb9723f931ab20 | dbe5973d69df9c5a5f3b06b7451a0de7086ebda4 | /myapps/error_handlers.py | dc9bf7e6a63c1a820ff2ebef03ed0cecbfccf5fc | [] | no_license | phares/mall | 29e7c0fdf3222a05161de36c8252167ab59df7be | d3f0093828c892ce46d55afaa245e5780555cc68 | refs/heads/master | 2021-01-22T23:53:27.535609 | 2017-04-30T09:17:53 | 2017-04-30T09:17:53 | 85,676,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | from django.shortcuts import render
def _403(request):
return render(request, '403.html', {})
def _404(request):
return render(request, '404.html', {})
def _500(request):
return render(request, '500.html', {})
| [
"[email protected]"
] | |
8ce8de29e51709461abbc005a1bb995b1642d349 | 4f001a046035884748af1d3504fb4ba61788f6e8 | /viedeos_manager-New_code_with_resolve_issoe/imratedme/urls.py | 489abc93252e15ded22ec19775d2733e4e7f1a30 | [] | no_license | sidkushwah123/video_manager | eb19686dcb87612dea9c2e56b6a4de3defbe123f | eb39f72c574548bd35ebc05ae05806d602995e93 | refs/heads/main | 2023-07-29T10:09:09.862337 | 2021-09-11T15:56:00 | 2021-09-11T15:56:00 | 405,418,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,786 | py | """imratedme URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.contrib import admin
from django.urls import path,include
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('summernote/', include('django_summernote.urls')),
path('', include('django.contrib.auth.urls')),
path('', include(('home.urls','home'),namespace='home')),
path('account/', include(('account.urls','account'),namespace='account')),
path('video/', include(('videos.urls','videos'),namespace='videos')),
path('profiles/', include(('profiles.urls','profiles'),namespace='profiles')),
path('dashboard/', include(('my_videos.urls','my_videos'),namespace='my_videos')),
path('videos-detail/', include(('videos_detail.urls','videos_detail'),namespace='videos_detail')),
path('favourite-videos/', include(('favourite_videos.urls','favourite_videos'),namespace='favourite_videos')),
path('search/', include(('search.urls','search'),namespace='search')),
path('subscription/', include(('subscription.urls','subscription'),namespace='subscription')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | [
"[email protected]"
] | |
da927004a68396766225719a622fcf949077d8ff | 2c81769c05ffb32c0da0e5d1a1bb91750e37afc6 | /google/area120/tables_v1alpha1/services/tables_service/async_client.py | 57e5ddc22ccc55d0e76bced6c1220f0716f54ee5 | [
"Apache-2.0"
] | permissive | FavoriteProjects/python-area120-tables | cc43e3864bbd07395f6a0b8c928517ce183ad7dc | 3fdf054851d51e1c23dbed04bc0202e1a86d34a8 | refs/heads/master | 2022-12-26T18:41:10.690438 | 2020-09-22T22:15:14 | 2020-09-22T22:15:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,967 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.area120.tables_v1alpha1.services.tables_service import pagers
from google.area120.tables_v1alpha1.types import tables
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
from .transports.base import TablesServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import TablesServiceGrpcAsyncIOTransport
from .client import TablesServiceClient
class TablesServiceAsyncClient:
"""The Tables Service provides an API for reading and updating tables.
It defines the following resource model:
- The API has a collection of
[Table][google.area120.tables.v1alpha1.Table] resources, named
``tables/*``
- Each Table has a collection of
[Row][google.area120.tables.v1alpha1.Row] resources, named
``tables/*/rows/*``
"""
_client: TablesServiceClient
DEFAULT_ENDPOINT = TablesServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = TablesServiceClient.DEFAULT_MTLS_ENDPOINT
row_path = staticmethod(TablesServiceClient.row_path)
from_service_account_file = TablesServiceClient.from_service_account_file
from_service_account_json = from_service_account_file
get_transport_class = functools.partial(
type(TablesServiceClient).get_transport_class, type(TablesServiceClient)
)
def __init__(
self,
*,
credentials: credentials.Credentials = None,
transport: Union[str, TablesServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the tables service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.TablesServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint, this is the default value for
the environment variable) and "auto" (auto switch to the default
mTLS endpoint if client SSL credentials is present). However,
the ``api_endpoint`` property takes precedence if provided.
(2) The ``client_cert_source`` property is used to provide client
SSL credentials for mutual TLS transport. If not provided, the
default SSL credentials will be used if present.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = TablesServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def get_table(
self,
request: tables.GetTableRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> tables.Table:
r"""Gets a table. Returns NOT_FOUND if the table does not exist.
Args:
request (:class:`~.tables.GetTableRequest`):
The request object. Request message for
TablesService.GetTable.
name (:class:`str`):
Required. The name of the table to
retrieve. Format: tables/{table}
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.tables.Table:
A single table.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = tables.GetTableRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_table,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def list_tables(
self,
request: tables.ListTablesRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListTablesAsyncPager:
r"""Lists tables for the user.
Args:
request (:class:`~.tables.ListTablesRequest`):
The request object. Request message for
TablesService.ListTables.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.pagers.ListTablesAsyncPager:
Response message for
TablesService.ListTables.
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
request = tables.ListTablesRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_tables,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListTablesAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_row(
self,
request: tables.GetRowRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> tables.Row:
r"""Gets a row. Returns NOT_FOUND if the row does not exist in the
table.
Args:
request (:class:`~.tables.GetRowRequest`):
The request object. Request message for
TablesService.GetRow.
name (:class:`str`):
Required. The name of the row to
retrieve. Format:
tables/{table}/rows/{row}
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.tables.Row:
A single row in a table.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = tables.GetRowRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_row,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def list_rows(
self,
request: tables.ListRowsRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListRowsAsyncPager:
r"""Lists rows in a table. Returns NOT_FOUND if the table does not
exist.
Args:
request (:class:`~.tables.ListRowsRequest`):
The request object. Request message for
TablesService.ListRows.
parent (:class:`str`):
Required. The parent table.
Format: tables/{table}
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.pagers.ListRowsAsyncPager:
Response message for
TablesService.ListRows.
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([parent]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = tables.ListRowsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_rows,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListRowsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def create_row(
self,
request: tables.CreateRowRequest = None,
*,
parent: str = None,
row: tables.Row = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> tables.Row:
r"""Creates a row.
Args:
request (:class:`~.tables.CreateRowRequest`):
The request object. Request message for
TablesService.CreateRow.
parent (:class:`str`):
Required. The parent table where this
row will be created. Format:
tables/{table}
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
row (:class:`~.tables.Row`):
Required. The row to create.
This corresponds to the ``row`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.tables.Row:
A single row in a table.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([parent, row]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = tables.CreateRowRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if row is not None:
request.row = row
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_row,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def batch_create_rows(
self,
request: tables.BatchCreateRowsRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> tables.BatchCreateRowsResponse:
r"""Creates multiple rows.
Args:
request (:class:`~.tables.BatchCreateRowsRequest`):
The request object. Request message for
TablesService.BatchCreateRows.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.tables.BatchCreateRowsResponse:
Response message for
TablesService.BatchCreateRows.
"""
# Create or coerce a protobuf request object.
request = tables.BatchCreateRowsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_create_rows,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def update_row(
self,
request: tables.UpdateRowRequest = None,
*,
row: tables.Row = None,
update_mask: field_mask.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> tables.Row:
r"""Updates a row.
Args:
request (:class:`~.tables.UpdateRowRequest`):
The request object. Request message for
TablesService.UpdateRow.
row (:class:`~.tables.Row`):
Required. The row to update.
This corresponds to the ``row`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`~.field_mask.FieldMask`):
The list of fields to update.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.tables.Row:
A single row in a table.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([row, update_mask]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = tables.UpdateRowRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if row is not None:
request.row = row
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_row,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("row.name", request.row.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def batch_update_rows(
self,
request: tables.BatchUpdateRowsRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> tables.BatchUpdateRowsResponse:
r"""Updates multiple rows.
Args:
request (:class:`~.tables.BatchUpdateRowsRequest`):
The request object. Request message for
TablesService.BatchUpdateRows.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.tables.BatchUpdateRowsResponse:
Response message for
TablesService.BatchUpdateRows.
"""
# Create or coerce a protobuf request object.
request = tables.BatchUpdateRowsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_update_rows,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def delete_row(
self,
request: tables.DeleteRowRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes a row.
Args:
request (:class:`~.tables.DeleteRowRequest`):
The request object. Request message for
TablesService.DeleteRow
name (:class:`str`):
Required. The name of the row to
delete. Format:
tables/{table}/rows/{row}
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = tables.DeleteRowRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_row,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
await rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-area120-tables",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("TablesServiceAsyncClient",)
| [
"[email protected]"
] | |
615dc11cdddc204b7d4d84d3e3e6ffd4e4ab1827 | 25ea9de5a0b2a66ad69dc1cdff0869117c3d32bd | /usuarios/migrations/0002_auto_20210318_1714.py | 28b645d0db5cf0dc537655ece2c487aa5453804f | [] | no_license | arm98sub/djangoDeploy | 070b634979ae96935228c6f67d91305109be6625 | b1420b68100b357993e9ee062fd008b6f68accf0 | refs/heads/master | 2023-05-21T20:41:52.975252 | 2021-06-01T23:25:53 | 2021-06-01T23:25:53 | 372,984,449 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | # Generated by Django 3.1.6 on 2021-03-18 17:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('usuarios', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='usuario',
name='foto',
field=models.ImageField(blank=True, null=True, upload_to='perfiles', verbose_name='Foto de perfil'),
),
]
| [
"[email protected]"
] | |
8587e5c358d7da81e23a5053acacf1b5abfabd9d | f8ffa8ff257266df3de9d20d95b291e393f88434 | /Python from scratch/Zadania/zadania domowe/nr1_08-09-2018/zad_dom_nr1.py | 0b44e7ed08182fb13edec812cb43f0deaad16759 | [] | no_license | janiszewskibartlomiej/Python_Code_Me_Gda | c0583c068ef08b6130398ddf93c3a3d1a843b487 | 7568de2a9acf80bab1429bb55bafd89daad9b729 | refs/heads/master | 2020-03-30T05:06:26.757033 | 2020-03-02T08:53:28 | 2020-03-02T08:53:28 | 150,781,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 969 | py | liczba_str = input('Wpisz liczbę całkowią: ')
liczba = int(liczba_str)
liczba_podzielna_przez_2 = liczba % 2
liczba_podzielna_przez_3 = liczba % 3
liczba_podzielna_przez_4 = liczba % 4
liczba_podzielna_przez_5 = liczba % 5
liczba_podzielna_przez_6 = liczba % 6
liczba_podzielna_przez_9 = liczba % 9
liczba_podzielna_przez_10 = liczba % 10
if liczba_podzielna_przez_2 == 0:
print('Liczba ', liczba,' jest podzielna przez 2')
if liczba_podzielna_przez_3 == 0:
print('Liczba ', liczba,' jest podzielna przez 3')
if liczba_podzielna_przez_4 == 0:
print('Liczba ', liczba,' jest podzielna przez 4')
if liczba_podzielna_przez_5 == 0:
print('Liczba ', liczba,' jest podzielna przez 5')
if liczba_podzielna_przez_6 == 0:
print('Liczba ', liczba,' jest podzielna przez 6')
if liczba_podzielna_przez_9 == 0:
print('Liczba ', liczba,' jest podzielna przez 9')
if liczba_podzielna_przez_10 == 0:
print('Liczba ', liczba,' jest podzielna przez 10')
| [
"[email protected]"
] | |
24483d9352ce4f2d62458534a9923202d3ae25bf | d274e22b1cc5d546855fe46b089b13cfe2f4047c | /random/solutions/q08_ThirdMaximumNumber.py | 93d327b69fb06ea864b1db2d3724352a9fe9459b | [] | no_license | varunkumar032/lockdown-leetcode | ca6b7a8133033110680dd226c897dd8a1482682b | 15a72a53be9005eca816f018cb1b244f2aa4cdfb | refs/heads/master | 2023-06-30T08:31:54.323747 | 2021-07-12T11:29:59 | 2021-07-12T11:29:59 | 260,616,280 | 0 | 0 | null | 2021-05-06T10:24:48 | 2020-05-02T04:52:37 | Python | UTF-8 | Python | false | false | 1,108 | py | # Given integer array nums, return the third maximum number in this array. If the third maximum does not exist, return the maximum number.
# Example 1:
# Input: nums = [3,2,1]
# Output: 1
# Explanation: The third maximum is 1.
# Example 2:
# Input: nums = [1,2]
# Output: 2
# Explanation: The third maximum does not exist, so the maximum (2) is returned instead.
# Example 3:
# Input: nums = [2,2,3,1]
# Output: 1
# Explanation: Note that the third maximum here means the third maximum distinct number.
# Both numbers with value 2 are both considered as second maximum.
def thirdMax(nums):
firstMaxNum = secondMaxNum = thirdMaxNum = None
for num in nums:
if num == firstMaxNum or num == secondMaxNum or num == thirdMaxNum:
continue
if firstMaxNum is None or num > firstMaxNum:
thirdMaxNum = secondMaxNum
secondMaxNum = firstMaxNum
firstMaxNum = num
elif secondMaxNum is None or num > secondMaxNum:
thirdMaxNum = secondMaxNum
secondMaxNum = num
elif thirdMaxNum is None or num > thirdMaxNum:
thirdMaxNum = num
return thirdMaxNum if thirdMaxNum is not None else firstMaxNum
| [
"[email protected]"
] | |
b67e033f8ce190876bde29161ecb2bec08ec8442 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/KCB_YCHF/KCB_YCHF_MM/SHOffer/YCHF_KCBYCHF_SHBP_288.py | e62dd95589968fc2cb8e097b41b5f82371b6c0f5 | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,484 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test//xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test//service")
from ServiceConfig import *
from ARmainservice import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test//mysql")
from CaseParmInsertMysql import *
from SqlData_Transfer import *
sys.path.append("/home/yhl2/workspace/xtp_test//utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
from env_restart import *
class YCHF_KCBYCHF_SHBP_288(xtp_test_case):
def setUp(self):
#sql_transfer = SqlData_Transfer()
#sql_transfer.transfer_fund_asset('YCHF_KCBYCHF_SHBP_288')
#clear_data_and_restart_all()
#Api.trade.Logout()
#Api.trade.Login()
pass
#
def test_YCHF_KCBYCHF_SHBP_288(self):
title = '重启数据库服务(沪A限价部撤卖出)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '部撤',
'errorID': 0,
'errorMSG': queryOrderErrorMsg(0),
'是否生成报单': '是',
'是否是撤废': '否',
# '是否是新股申购': '',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('688000', '1', '4', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'报单测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
print(stkparm['错误原因'])
self.assertEqual(rs['报单测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':3,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'price': stkparm['随机中间价'],
'quantity': 300,
'position_effect':Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['报单测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
## 还原可用资金
#sql_transfer = SqlData_Transfer()
#sql_transfer.transfer_fund_asset('YW_KCB_BAK_000')
#oms_restart()
self.assertEqual(rs['报单测试结果'], True) # 211
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
5df8bb3efc81b4b1dde446829495f73005a13f29 | fa3e527114cd5799dddb0a25067da4923eae354e | /DataPrepare/FastSim/GAN/BES/Dedx/pid_check/plot_pid_eff.py | 10e13a0c58d2d4fb1bfb00937d19f712808da57c | [] | no_license | wenxingfang/FastSim_ML | e64c6b56ce2afd703d1ddda0ada2de6f65fde049 | d2f1abbb2f6879313d5f4f137b64c4d8bf10fe83 | refs/heads/master | 2022-11-28T01:35:39.727895 | 2020-08-03T15:47:37 | 2020-08-03T15:47:37 | 284,734,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47,800 | py | #!/usr/bin/env python
"""
Plot pid efficiency
"""
__author__ = "Maoqiang JING <[email protected]>"
__copyright__ = "Copyright (c) Maoqiang JING"
__created__ = "[2020-03-27 Fri 10:53]"
import numpy as np
import sys, os
import logging
import ROOT
import math
import gc
from ROOT import TChain,TH1F,TH2F,TCanvas,gStyle, TFile, TGraphAsymmErrors, TLegend
from params import pid_eff, data_path
from array import array
logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s- %(message)s')
gStyle.SetOptTitle(0)
gStyle.SetOptTitle(0)
gStyle.SetCanvasDefH(900)
gStyle.SetCanvasDefW(450)
gStyle.SetNdivisions(505,"xy")
gStyle.SetStatFontSize(0.05)
gStyle.SetPaintTextFormat("4.2f")
def usage():
sys.stdout.write('''
NAME
plot_pid_eff.py
SYNOPSIS
./plot_pid_eff.py [var] [particle] [ptrk_min] [ptrk_max]
AUTHOR
Maoqiang JING <[email protected]>
DATE
March 2020
\n''')
def set_canvas_style(mbc):
mbc.SetFillColor(0)
mbc.SetLeftMargin(0.15)
mbc.SetRightMargin(0.15)
mbc.SetTopMargin(0.1)
mbc.SetBottomMargin(0.15)
def make_weight_map(f1, f2, ch, tag, mom_min, mom_max, costheta_min, costheta_max):
theta_min = -1
theta_max = 1
theta_bin = 20
p_min = 0
p_max = 2.5
p_bin = 25
f1_h2 = TH2F('%s_data' %tag, '', p_bin, p_min, p_max, theta_bin, theta_min, theta_max)
f2_h2 = TH2F('%s_mc' %tag, '', p_bin, p_min, p_max, theta_bin, theta_min, theta_max)
w_map = TH2F('%s_weight'%tag, '', p_bin, p_min, p_max, theta_bin, theta_min, theta_max)
chain1 = TChain('n103')
chain1.Add(f1)
for i in range(chain1.GetEntries()):
chain1.GetEntry(i)
ptrk = getattr(chain1, 'ptrk')
charge = getattr(chain1, 'charge')
costheta = getattr(chain1, 'costheta')
prob = getattr(chain1, 'prob')
dEdx_hit = getattr(chain1, 'dEdx_hit')
if charge != ch:continue
if ptrk < mom_min or ptrk > mom_max :continue
if costheta < costheta_min or costheta > costheta_max :continue
if len(prob) != 5:continue ##for proton data, some time it has zero prob
######################## for proton background remove #
'''
mean_dedx = 0
for idedx in range(len(dEdx_hit)):
mean_dedx += dEdx_hit[idedx]
mean_dedx = mean_dedx/len(dEdx_hit) if len(dEdx_hit) != 0 else 0
if ptrk < 0.6 and mean_dedx < 600: continue
'''
#######################################################
f1_h2.Fill(ptrk, costheta)
chain2 = TChain('n103')
chain2.Add(f2)
for i in range(chain2.GetEntries()):
chain2.GetEntry(i)
ptrk = getattr(chain2, 'ptrk')
charge = getattr(chain2, 'charge')
costheta = getattr(chain2, 'costheta')
prob = getattr(chain2, 'prob')
if charge != ch:continue
if ptrk < mom_min or ptrk > mom_max :continue
if costheta < costheta_min or costheta > costheta_max :continue
if len(prob) != 5:continue
f2_h2.Fill(ptrk, costheta)
w_map.Divide(f1_h2, f2_h2)
mbc =TCanvas("H2_%s"%tag,"",1500,500)
set_canvas_style(mbc)
mbc.Divide(3, 1)
mbc.cd(1)
f1_h2.Draw("COLZ")
mbc.cd(2)
f2_h2.Draw("COLZ")
mbc.cd(3)
w_map.Draw("COLZ")
str_c={1:'+',-1:'-'}
print('ch=',ch)
print('str=',str_c[ch])
mbc.SaveAs('%s/data_mc_%s_%s%s.png'%(plot_path, tag, particle, str(str_c[ch])) )
return w_map
def process(particle, fname, cg, tag, wmap, mom_min, mom_max, costheta_min, costheta_max):
xaxis = wmap.GetXaxis()
yaxis = wmap.GetYaxis()
p_N_bin = xaxis.GetNbins()
p_bin_max = xaxis.GetXmax()
p_bin_min = xaxis.GetXmin()
costheta_N_bin = yaxis.GetNbins()
costheta_bin_max = yaxis.GetXmax()
costheta_bin_min = yaxis.GetXmin()
h_p_all = TH1F('%s_h_p_all' %tag, '', p_N_bin, p_bin_min, p_bin_max)
h_p_pass = TH1F('%s_h_p_pass' %tag, '', p_N_bin, p_bin_min, p_bin_max)
h_costheta_all = TH1F('%s_h_costheta_all' %tag, '', costheta_N_bin, costheta_bin_min, costheta_bin_max)
h_costheta_pass = TH1F('%s_h_costheta_pass'%tag, '', costheta_N_bin, costheta_bin_min, costheta_bin_max)
h_theta_all = TH1F('%s_h_theta_all' %tag, '', 18, 0, 180)
h_theta_pass = TH1F('%s_h_theta_pass'%tag, '', 18, 0, 180)
h_p_all.Sumw2()
h_p_pass.Sumw2()
h_costheta_all.Sumw2()
h_costheta_pass.Sumw2()
h_theta_all.Sumw2()
h_theta_pass.Sumw2()
###################################3
h_ndedx = TH1F('%s_ndedx' %tag , '', 60, 0, 60)
h_usedhit = TH1F('%s_usedhit' %tag , '', 60, 0, 60)
h_dedx = TH1F('%s_dedx' %tag , '', 300 , 0, 3000)
h_dEdx_meas = TH1F('%s_dEdx_meas' %tag , '', 300 , 0, 900 )
h_dedx_p = TH2F('%s_dedx_p' %tag , '', 1000, 0, 3000, 250, 0, 2.5)
h_dedx_theta = TH2F('%s_dedx_theta' %tag , '', 1000, 0, 3000, 180, 0, 180)
p_bin = np.arange(0,3,0.5)
theta_bin = np.arange(0,180,10)
h_p_list = []
h_dEdx_meas_p_list = []
h_theta_list = []
for i in range(len(p_bin)-1):
h_p_list .append(TH1F('%s__p_%.3f_%.3f__dedx'%(tag, p_bin[i], p_bin[i+1]) , '', 300, 0, 3000))
h_dEdx_meas_p_list.append(TH1F('%s__dEdx_meas_p_%.3f_%.3f__dedx'%(tag, p_bin[i], p_bin[i+1]) , '', 300, 0, 900))
for i in range(len(theta_bin)-1):
h_theta_list.append(TH1F('%s__theta_%.3f_%.3f__dedx'%(tag, theta_bin[i], theta_bin[i+1]) , '', 300, 0, 3000))
###################################3
h_prob_e = TH1F('%s_prob_e' %tag , '', 50, 0, 1)
h_prob_mu = TH1F('%s_prob_mu' %tag , '', 50, 0, 1)
h_prob_p = TH1F('%s_prob_p' %tag , '', 50, 0, 1)
h_prob_k = TH1F('%s_prob_k' %tag , '', 50, 0, 1)
h_prob_pi = TH1F('%s_prob_pi' %tag , '', 50, 0, 1)
h_chi_e = TH1F('%s_chi_e' %tag , '', 50, -5, 20)
h_chi_mu = TH1F('%s_chi_mu' %tag , '', 50, -5, 20)
h_chi_p = TH1F('%s_chi_p' %tag , '', 50, -5, 20)
h_chi_k = TH1F('%s_chi_k' %tag , '', 50, -5, 20)
h_chi_pi = TH1F('%s_chi_pi' %tag , '', 50, -5, 20)
###################################3
h_hit_dedx = {}
theta_min = -1
theta_max = 1.1
theta_step = 0.1
p_min = 0.3
p_max = 2.1
p_step = 0.2
p_list = np.arange(p_min, p_max, p_step)
theta_list = np.arange(theta_min, theta_max, theta_step)
for i in range(p_list.shape[0]-1):
for j in range(theta_list.shape[0]-1):
str_key = "%s_%s__%s_%s"%( str(p_list[i]), str(p_list[i+1]), str(theta_list[j]), str(theta_list[j+1]) )
h_hit_dedx[str_key] = TH1F('%s_%s_hit_dedx'%(tag, str_key) , '', 200, 0, 2000)
h2_dedx_mean = TH2F('%s_dedx_mean'%tag, '', len(theta_list)-1, theta_list, len(p_list)-1, p_list)
h2_dedx_std = TH2F('%s_dedx_std' %tag, '', len(theta_list)-1, theta_list, len(p_list)-1, p_list)
h2_dedx_mean_std = [h2_dedx_mean, h2_dedx_std]
###################################3
chain = TChain('n103')
chain.Add(fname)
for i in range(chain.GetEntries()):
chain.GetEntry(i)
ptrk = getattr(chain, 'ptrk')
charge = getattr(chain, 'charge')
costheta = getattr(chain, 'costheta')
prob = getattr(chain, 'prob')
chi = getattr(chain, 'chi' )
dEdx_meas= getattr(chain, 'dEdx_meas')
usedhit = getattr(chain, 'usedhit')
ndedxhit = getattr(chain, 'ndedxhit')
dEdx_hit = getattr(chain, 'dEdx_hit')
tmp_theta = math.acos(costheta)*180/math.pi
if charge != cg:continue
if len(prob) != 5:continue
if ptrk < mom_min or ptrk > mom_max :continue
if costheta < costheta_min or costheta > costheta_max :continue
'''
if tag == 'data':
######################## for proton background remove #
mean_dedx = 0
for idedx in range(len(dEdx_hit)):
mean_dedx += dEdx_hit[idedx]
mean_dedx = mean_dedx/len(dEdx_hit) if len(dEdx_hit) != 0 else 0
if ptrk < 0.6 and mean_dedx < 600: continue
#######################################################
'''
we = 1
if tag != 'data':
binx = xaxis.FindBin(ptrk)
biny = yaxis.FindBin(costheta)
we= wmap.GetBinContent(binx, biny)
h_p_all .Fill(ptrk ,we)
h_costheta_all.Fill(costheta,we)
h_theta_all.Fill(math.acos(costheta)*180/math.pi,we)
'''
#if (prob[0]>prob[1]) and (prob[0]>prob[2]) and (prob[0]>prob[3]):
#if (prob[0]>prob[1]) and (prob[0]>prob[4]) and (prob[0]>prob[3]):
#if (prob[0]>prob[2]) and (prob[0]>prob[4]) and (prob[0]>prob[3]):
if (prob[0]>prob[1]) and (prob[0]>prob[2]) and (prob[0]>prob[4]):
h_p_all .Fill(ptrk ,we)
h_costheta_all.Fill(costheta,we)
h_theta_all.Fill(math.acos(costheta)*180/math.pi,we)
'''
pass_cut = False
if particle == 'e':
pass_cut = (prob[0]>prob[1]) and (prob[0]>prob[2]) and (prob[0]>prob[3]) and (prob[0]>prob[4])
elif particle == 'mu':
pass_cut = (prob[1]>prob[0]) and (prob[1]>prob[2]) and (prob[1]>prob[3]) and (prob[1]>prob[4])
elif particle == 'pi':
pass_cut = (prob[2]>prob[3]) and (prob[2]>prob[4])
elif particle == 'K':
pass_cut = (prob[3]>prob[2]) and (prob[3]>prob[4])
elif particle == 'p':
pass_cut = (prob[4]>prob[2]) and (prob[4]>prob[3])
else:
print('wrong patricle name:',particle)
os.exit()
if pass_cut:
h_p_pass .Fill(ptrk ,we)
h_costheta_pass.Fill(costheta,we)
h_theta_pass.Fill(math.acos(costheta)*180/math.pi,we)
########################################3
h_ndedx.Fill(ndedxhit, we)
h_usedhit.Fill(usedhit, we)
h_dEdx_meas.Fill(dEdx_meas, we)
for n in range(len(dEdx_hit)):
h_dedx .Fill(dEdx_hit[n], we)
h_dedx_p .Fill(dEdx_hit[n], ptrk, we)
h_dedx_theta.Fill(dEdx_hit[n], math.acos(costheta)*180/math.pi, we)
for ip in range(len(p_bin)-1):
if ptrk > p_bin[ip] and ptrk < p_bin[ip+1]:
for n in range(len(dEdx_hit)):
h_p_list[ip].Fill(dEdx_hit[n], we)
break
'''
for it in range(len(theta_bin)-1):
if tmp_theta > theta_bin[it] and tmp_theta < theta_bin[it+1]:
h_theta_list[it].Fill(dEdx_hit[n], we)
break
'''
'''
for ip in range(len(p_bin)-1):
if ptrk > p_bin[ip] and ptrk < p_bin[ip+1]:
h_dEdx_meas_p_list[ip].Fill(dEdx_meas, we)
break
'''
for ip in h_hit_dedx:
tmp_0 = ip.split('__')[0]
tmp_1 = ip.split('__')[1]
p_min = float( tmp_0.split('_')[0] )
p_max = float( tmp_0.split('_')[1] )
theta_min = float( tmp_1.split('_')[0] )
theta_max = float( tmp_1.split('_')[1] )
if p_min < ptrk and ptrk < p_max and theta_min < costheta and costheta < theta_max:
for n in range(len(dEdx_hit)):
h_hit_dedx[ip].Fill(dEdx_hit[n], we)
break
########################################3
h_prob_e .Fill(prob[0], we)
h_prob_mu.Fill(prob[1], we)
h_prob_pi.Fill(prob[2], we)
h_prob_k .Fill(prob[3], we)
h_prob_p .Fill(prob[4], we)
h_chi_e .Fill(chi [0], we)
h_chi_mu .Fill(chi [1], we)
h_chi_pi .Fill(chi [2], we)
h_chi_k .Fill(chi [3], we)
h_chi_p .Fill(chi [4], we)
h_prob_list = [h_prob_e, h_prob_mu, h_prob_pi, h_prob_k, h_prob_p]
h_chi_list = [h_chi_e , h_chi_mu , h_chi_pi , h_chi_k , h_chi_p ]
p_eff = TGraphAsymmErrors()
p_eff.Divide(h_p_pass, h_p_all,"cl=0.683 b(1,1) mode")
costheta_eff = TGraphAsymmErrors()
costheta_eff.Divide(h_costheta_pass, h_costheta_all,"cl=0.683 b(1,1) mode")
theta_eff = TGraphAsymmErrors()
theta_eff.Divide(h_theta_pass, h_theta_all,"cl=0.683 b(1,1) mode")
#return (h_p_pass, h_p_all, p_eff, h_costheta_pass, h_costheta_all, costheta_eff, h_theta_pass, h_theta_all, theta_eff)
return (h_p_pass, h_p_all, p_eff, h_costheta_pass, h_costheta_all, costheta_eff, h_theta_pass, h_theta_all, theta_eff, [h_dedx, h_dedx_p, h_dedx_theta], h_p_list, h_theta_list, h_ndedx, h_usedhit, h_dEdx_meas, h_prob_list, h_chi_list, h_dEdx_meas_p_list, h_hit_dedx, h2_dedx_mean_std)
def do_plot_h3(Type, h1, h2, h3, out_name, title, plot_label):
canvas=TCanvas("%s"%(out_name),"",800,800)
canvas.cd()
canvas.SetTopMargin(0.13)
canvas.SetBottomMargin(0.12)
canvas.SetLeftMargin(0.15)
canvas.SetRightMargin(0.15)
if Type == 2:
h1.GetYaxis().SetTitle(title['Y'])
h1.GetXaxis().SetTitle(title['X'])
h1.SetStats(ROOT.kFALSE)
h1.Draw("COLZ")
canvas.SaveAs("%s/data_%s.png"%(plot_path,out_name))
h2.GetYaxis().SetTitle(title['Y'])
h2.GetXaxis().SetTitle(title['X'])
h2.SetStats(ROOT.kFALSE)
h2.Draw("COLZ")
canvas.SaveAs("%s/mc_off_%s.png"%(plot_path,out_name))
h3.GetYaxis().SetTitle(title['Y'])
h3.GetXaxis().SetTitle(title['X'])
h3.SetStats(ROOT.kFALSE)
h3.Draw("COLZ")
canvas.SaveAs("%s/mc_NN_%s.png"%(plot_path,out_name))
return 0
elif Type == 22:
canvas=TCanvas("%s"%(out_name),"",1600,800)
canvas.cd()
canvas.SetTopMargin(0.13)
canvas.SetBottomMargin(0.12)
canvas.SetLeftMargin(0.15)
canvas.SetRightMargin(0.15)
h1.GetYaxis().SetTitle(title['Y'])
h1.GetXaxis().SetTitle(title['X'])
h1.SetStats(ROOT.kFALSE)
h1.Draw("COLZ TEXTE")
canvas.SaveAs("%s/%s.png"%(plot_path,out_name))
return 0
elif Type == 33:
canvas=TCanvas("%s"%(out_name),"",1600,800)
canvas.cd()
canvas.SetTopMargin(0.13)
canvas.SetBottomMargin(0.12)
canvas.SetLeftMargin(0.15)
canvas.SetRightMargin(0.15)
h1.GetYaxis().SetTitle(title['Y'])
h1.GetXaxis().SetTitle(title['X'])
h1.SetStats(ROOT.kFALSE)
h1.Draw("COLZ")
canvas.SaveAs("%s/data_%s.png"%(plot_path,out_name))
h2.GetYaxis().SetTitle(title['Y'])
h2.GetXaxis().SetTitle(title['X'])
h2.SetStats(ROOT.kFALSE)
h2.Draw("COLZ")
canvas.SaveAs("%s/mc_off_%s.png"%(plot_path,out_name))
h3.GetYaxis().SetTitle(title['Y'])
h3.GetXaxis().SetTitle(title['X'])
h3.SetStats(ROOT.kFALSE)
h3.Draw("COLZ")
canvas.SaveAs("%s/mc_NN_%s.png"%(plot_path,out_name))
return 0
# h1.Scale(1/h1.GetSumOfWeights())
# h2.Scale(1/h2.GetSumOfWeights())
x_min=h1.GetXaxis().GetXmin()
x_max=h1.GetXaxis().GetXmax()
y_min=0
y_max=1.5
if Type == 1 or Type == 3:
y_max=1.5*h1.GetBinContent(h1.GetMaximumBin())
if 'eff_p' in out_name:
#y_min=0.9
y_min=0.84
elif 'eff_phi' in out_name:
y_min=0.9
elif 'eff_costheta' in out_name:
y_min=0.9
elif 'E_mean' in out_name:
y_min=-0.04
y_max= 0.01
elif 'E_std' in out_name:
y_min= 0.01
y_max= 0.08
elif 'Phi_mean' in out_name:
y_min= 0.0
y_max= 0.008
elif 'Phi_std' in out_name:
y_min= 0.01
y_max= 0.04
elif 'Theta_mean' in out_name:
y_min= -0.01
y_max= 0.015
elif 'Theta_std' in out_name:
y_min= 0.006
y_max= 0.02
if "logx" in out_name:
canvas.SetLogx()
if "logy" in out_name:
canvas.SetLogy()
y_min = 1e-3
dummy = ROOT.TH2D("dummy","",1,x_min,x_max,1,y_min,y_max)
dummy.SetStats(ROOT.kFALSE)
dummy.GetYaxis().SetTitle(title['Y'])
dummy.GetXaxis().SetTitle(title['X'])
dummy.GetYaxis().SetTitleSize(0.04)
dummy.GetXaxis().SetTitleSize(0.04)
dummy.GetYaxis().SetLabelSize(0.04)
dummy.GetXaxis().SetLabelSize(0.04)
dummy.GetYaxis().SetTitleOffset(1.8)
dummy.GetXaxis().SetTitleOffset(1.2)
dummy.GetXaxis().SetMoreLogLabels()
dummy.GetXaxis().SetTitleFont(42)
dummy.GetXaxis().SetLabelFont(42)
dummy.GetYaxis().SetTitleFont(42)
dummy.GetYaxis().SetLabelFont(42)
h1 .SetLineWidth(2)
h2 .SetLineWidth(2)
h3 .SetLineWidth(2)
h3 .SetLineColor(ROOT.kRed)
h2 .SetLineColor(ROOT.kBlue)
h1 .SetLineColor(ROOT.kBlack)
h3 .SetMarkerColor(ROOT.kRed)
h2 .SetMarkerColor(ROOT.kBlue)
h1 .SetMarkerColor(ROOT.kBlack)
#h1 .SetMarkerStyle(20)
#h2 .SetMarkerStyle(21)
h1 .SetMarkerStyle(24)
h2 .SetMarkerStyle(24)
h3 .SetMarkerStyle(24)
hit_dedx_mean_std = []
if Type == 1:
dummy.Draw()
h1.Draw("same:pe")
h2.Draw("same:pe")
h3.Draw("same:pe")
dummy.Draw("AXISSAME")
elif Type == 3:
dummy.Draw()
h1.Draw("same:pe")
h2.Draw("same:pe")
h3.Draw("same:pe")
dummy.Draw("AXISSAME")
color_list = [2,3,4]
hist_list = [h1, h2, h3]
for hist in hist_list:
if hist.GetSumOfWeights() < 100:
hit_dedx_mean_std.append(0)
hit_dedx_mean_std.append(0)
hit_dedx_mean_std.append(0)
hit_dedx_mean_std.append(0)
continue
mean = hist.GetMean()
rms = hist.GetRMS()
lower = mean - 2*rms
higher = mean + 2*rms
f1 = ROOT.TF1("f1", "gaus", lower, higher)
f1.SetParameters(600.0, 200.0)
result = hist.Fit('f1','RLS0')
status = result.Status()
if status == 0:
print(result)
par0 = result.Parameter(0)
err0 = result.ParError(0)
par1 = result.Parameter(1)
err1 = result.ParError(1)
par2 = result.Parameter(2)
err2 = result.ParError (2)
######### do it again ######
mean = par1
rms = par2
lower = mean - 1.5*rms
higher = mean + 1*rms
f2 = ROOT.TF1("f2", "gaus", lower, higher)
f2.SetParameters(par1, par2)
#f1.SetLineColor(color_list[hist_list.index(hist)])
f2.SetLineColor(hist.GetLineColor())
result = hist.Fit('f2','RLS')
print('fit 1 for %s with entry %f mean=%f, rms=%f, lower=%f, higher=%f'%(hist.GetName(), hist.GetSumOfWeights(), hist.GetMean(), hist.GetRMS(), lower, higher))
status = result.Status()
if status == 0:
par0 = result.Parameter(0)
err0 = result.ParError(0)
par1 = result.Parameter(1)
err1 = result.ParError(1)
par2 = result.Parameter(2)
err2 = result.ParError (2)
hit_dedx_mean_std.append(par1)
hit_dedx_mean_std.append(err1)
hit_dedx_mean_std.append(par2)
hit_dedx_mean_std.append(err2)
#print('%s:mean=%f,mean err=%f,sigma=%f, sigma err=%f, status=%d'%(hist.GetName(), par1, err1, par2, err2, status))
else:
print('failed fit 1 for %s with entry %f, lower=%f, higher=%f'%(hist.GetName(), hist.GetSumOfWeights(), lower, higher))
else:
print('failed fit 0 for %s with entry %f'%(hist.GetName(), hist.GetSumOfWeights()))
else:
dummy.Draw("AXIS")
h1.Draw("pe")
h2.Draw("pe")
h3.Draw("pe")
#x_l = 0.6
x_l = 0.2
x_dl = 0.25
y_h = 0.85
y_dh = 0.15
if 'h_costheta_all' in out_name or 'h_costheta_pass' in out_name:
x_l = 0.55
y_h = 0.85
elif 'h_theta_all' in out_name or 'h_theta_pass' in out_name:
x_l = 0.55
y_h = 0.85
elif '_chi_' in out_name:
x_l = 0.55
y_h = 0.85
legend = TLegend(x_l,y_h-y_dh,x_l+x_dl,y_h)
legend.AddEntry(h1 ,'data','lep')
legend.AddEntry(h2 ,"mc official",'lep')
legend.AddEntry(h3 ,"mc NN",'lep')
legend.SetBorderSize(0)
#legend.SetLineColor(1)
#legend.SetLineStyle(1)
#legend.SetLineWidth(1)
#legend.SetFillColor(19)
legend.SetFillStyle(0)
legend.SetTextSize(0.05)
legend.SetTextFont(42)
legend.Draw()
label = ROOT.TLatex(0.5 , 0.9, plot_label)
label.SetTextAlign(32)
label.SetTextSize(0.035)
label.SetNDC(ROOT.kTRUE)
label.Draw()
label1 = None
label2 = None
label3 = None
if Type == 3:
label1 = ROOT.TLatex(0.8 , 0.94, "data:mean=%.1f#pm%.1f,#sigma=%.1f#pm%.1f"%(hit_dedx_mean_std[0], hit_dedx_mean_std[1], hit_dedx_mean_std[2], hit_dedx_mean_std[3]))
label1.SetTextAlign(32)
label1.SetTextSize(0.035)
label1.SetNDC(ROOT.kTRUE)
label1.Draw()
label2 = ROOT.TLatex(0.8 , 0.91, "mc official:mean=%.1f#pm%.1f,#sigma=%.1f#pm%.1f"%(hit_dedx_mean_std[4], hit_dedx_mean_std[5], hit_dedx_mean_std[6], hit_dedx_mean_std[7]))
label2.SetTextAlign(32)
label2.SetTextSize(0.035)
label2.SetNDC(ROOT.kTRUE)
label2.Draw()
label3 = ROOT.TLatex(0.8 , 0.88, "mc NN:mean=%.1f#pm%.1f,#sigma=%.1f#pm%.1f"%(hit_dedx_mean_std[8], hit_dedx_mean_std[9], hit_dedx_mean_std[10], hit_dedx_mean_std[11]))
label3.SetTextAlign(32)
label3.SetTextSize(0.035)
label3.SetNDC(ROOT.kTRUE)
label3.Draw()
canvas.SaveAs("%s/%s.png"%(plot_path,out_name))
del canvas
gc.collect()
return hit_dedx_mean_std
def ana(particle, cg, mom_min, mom_max, costheta_min, costheta_max):
out_tag = "_%s_%s_%s_%s_%s_%s"%(str(particle), str(cg), str(mom_min), str(mom_max), str(costheta_min), str(costheta_max))
f_data, f_off, f_NN = data_path(particle)
off_map = make_weight_map(f_data, f_off, cg, 'off', mom_min, mom_max, costheta_min, costheta_max)
NN_map = make_weight_map(f_data, f_NN , cg, 'NN' , mom_min, mom_max, costheta_min, costheta_max)
h_p_pass_data, h_p_all_data, p_eff_data, h_costheta_pass_data, h_costheta_all_data, costheta_eff_data, h_theta_pass_data, h_theta_all_data, theta_eff_data, hlist0_data, hlist1_data, hlist2_data, hndedx_data,h_usedhit_data, h_dEdx_meas_data, h_prob_list_data, h_chi_list_data, h_dEdx_meas_list_data, h2_hit_dedx_dict_data, h2_dedx_mean_std_data = process(particle, f_data, cg, 'data',off_map, mom_min, mom_max, costheta_min, costheta_max)
h_p_pass_off , h_p_all_off , p_eff_off , h_costheta_pass_off , h_costheta_all_off , costheta_eff_off , h_theta_pass_off , h_theta_all_off , theta_eff_off , hlist0_off , hlist1_off , hlist2_off , hndedx_off ,h_usedhit_off , h_dEdx_meas_off , h_prob_list_off, h_chi_list_off , h_dEdx_meas_list_off , h2_hit_dedx_dict_off , h2_dedx_mean_std_off = process(particle, f_off , cg, 'off' ,off_map, mom_min, mom_max, costheta_min, costheta_max)
h_p_pass_NN , h_p_all_NN , p_eff_NN , h_costheta_pass_NN , h_costheta_all_NN , costheta_eff_NN , h_theta_pass_NN , h_theta_all_NN , theta_eff_NN , hlist0_NN , hlist1_NN , hlist2_NN , hndedx_NN ,h_usedhit_NN , h_dEdx_meas_NN , h_prob_list_NN, h_chi_list_NN , h_dEdx_meas_list_NN , h2_hit_dedx_dict_NN , h2_dedx_mean_std_NN = process(particle, f_NN , cg, 'NN' ,NN_map, mom_min, mom_max, costheta_min, costheta_max)
do_plot_h3(Type=1, h1=h_p_pass_data, h2=h_p_pass_off, h3=h_p_pass_NN , out_name="h_p_pass_%s" %out_tag, title={'X':'P (GeV)','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_p_all_data , h2=h_p_all_off , h3=h_p_all_NN , out_name="h_p_all_%s" %out_tag, title={'X':'P (GeV)','Y':'Entries'}, plot_label='')
do_plot_h3(Type=0, h1=p_eff_data , h2=p_eff_off , h3=p_eff_NN , out_name="h_p_eff_%s" %out_tag, title={'X':'P (GeV)','Y':'pid eff'}, plot_label='')
do_plot_h3(Type=1, h1=h_costheta_pass_data, h2=h_costheta_pass_off, h3=h_costheta_pass_NN, out_name="h_costheta_pass_%s"%out_tag, title={'X':'cos#theta','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_costheta_all_data , h2=h_costheta_all_off , h3=h_costheta_all_NN , out_name="h_costheta_all_%s" %out_tag, title={'X':'cos#theta','Y':'Entries'}, plot_label='')
do_plot_h3(Type=0, h1=costheta_eff_data , h2=costheta_eff_off , h3=costheta_eff_NN , out_name="h_costheta_eff_%s" %out_tag, title={'X':'cos#theta','Y':'pid eff'}, plot_label='')
do_plot_h3(Type=1, h1=h_theta_pass_data, h2=h_theta_pass_off, h3=h_theta_pass_NN , out_name="h_theta_pass_%s" %out_tag, title={'X':'#theta','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_theta_all_data , h2=h_theta_all_off , h3=h_theta_all_NN , out_name="h_theta_all_%s" %out_tag, title={'X':'#theta','Y':'Entries'}, plot_label='')
do_plot_h3(Type=0, h1=theta_eff_data , h2=theta_eff_off , h3=theta_eff_NN , out_name="h_theta_eff_%s" %out_tag, title={'X':'#theta','Y':'pid eff'}, plot_label='')
do_plot_h3(Type=1, h1=hndedx_data , h2=hndedx_off , h3=hndedx_NN , out_name="h_N_dedx_%s" %out_tag, title={'X':'N dedx','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_usedhit_data, h2=h_usedhit_off, h3=h_usedhit_NN , out_name="h_N_usedhit_%s" %out_tag, title={'X':'N usedhit','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_dEdx_meas_data, h2=h_dEdx_meas_off, h3=h_dEdx_meas_NN , out_name="h_dEdx_meas_%s" %out_tag, title={'X':'dEdx_meas','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=hlist0_data[0], h2=hlist0_off[0], h3=hlist0_NN[0] , out_name="h_hit_dedx_%s" %out_tag, title={'X':'Dedx','Y':'Entries'}, plot_label='')
do_plot_h3(Type=2, h1=hlist0_data[1], h2=hlist0_off[1], h3=hlist0_NN[1] , out_name="h_hit_dedx_p_%s" %out_tag, title={'X':'Dedx','Y':'ptrk (GeV)'}, plot_label='')
do_plot_h3(Type=2, h1=hlist0_data[2], h2=hlist0_off[2], h3=hlist0_NN[2] , out_name="h_hit_dedx_theta_%s"%out_tag, title={'X':'Dedx','Y':'#theta trk (degree)'}, plot_label='')
for i in range(len(hlist1_data)):
h_p_data = hlist1_data[i]
h_p_off = hlist1_off [i]
h_p_NN = hlist1_NN [i]
tmp_str = h_p_data.GetName().split('__')[1]
do_plot_h3(Type=1, h1=h_p_data, h2=h_p_off, h3=h_p_NN, out_name="h_hit_dedx_%s_%s"%(tmp_str,out_tag) , title={'X':'Dedx','Y':'Entries'}, plot_label='')
for i in range(len(hlist2_data)):
h_p_data = hlist2_data[i]
h_p_off = hlist2_off [i]
h_p_NN = hlist2_NN [i]
tmp_str = h_p_data.GetName().split('__')[1]
do_plot_h3(Type=1, h1=h_p_data, h2=h_p_off, h3=h_p_NN, out_name="h_hit_dedx_%s_%s"%(tmp_str,out_tag) , title={'X':'Dedx','Y':'Entries'}, plot_label='')
for i in range(len(h_dEdx_meas_list_data)):
h_p_data = h_dEdx_meas_list_data[i]
h_p_off = h_dEdx_meas_list_off [i]
h_p_NN = h_dEdx_meas_list_NN [i]
tmp_str = h_p_data.GetName().split('__')[1]
do_plot_h3(Type=1, h1=h_p_data, h2=h_p_off, h3=h_p_NN, out_name="h_%s_%s"%(tmp_str,out_tag) , title={'X':'Dedx','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_prob_list_data[0], h2=h_prob_list_off[0], h3=h_prob_list_NN[0] , out_name="h_prob_e_%s" %out_tag, title={'X':'prob e','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_prob_list_data[1], h2=h_prob_list_off[1], h3=h_prob_list_NN[1] , out_name="h_prob_mu_%s" %out_tag, title={'X':'prob #mu','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_prob_list_data[2], h2=h_prob_list_off[2], h3=h_prob_list_NN[2] , out_name="h_prob_pi_%s" %out_tag, title={'X':'prob #pi','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_prob_list_data[3], h2=h_prob_list_off[3], h3=h_prob_list_NN[3] , out_name="h_prob_k_%s" %out_tag, title={'X':'prob k','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_prob_list_data[4], h2=h_prob_list_off[4], h3=h_prob_list_NN[4] , out_name="h_prob_p_%s" %out_tag, title={'X':'prob p','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_chi_list_data[0], h2=h_chi_list_off[0], h3=h_chi_list_NN[0] , out_name="h_chi_e_%s" %out_tag, title={'X':'#chi e','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_chi_list_data[1], h2=h_chi_list_off[1], h3=h_chi_list_NN[1] , out_name="h_chi_mu_%s" %out_tag, title={'X':'#chi #mu','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_chi_list_data[2], h2=h_chi_list_off[2], h3=h_chi_list_NN[2] , out_name="h_chi_pi_%s" %out_tag, title={'X':'#chi #pi','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_chi_list_data[3], h2=h_chi_list_off[3], h3=h_chi_list_NN[3] , out_name="h_chi_k_%s" %out_tag, title={'X':'#chi k','Y':'Entries'}, plot_label='')
do_plot_h3(Type=1, h1=h_chi_list_data[4], h2=h_chi_list_off[4], h3=h_chi_list_NN[4] , out_name="h_chi_p_%s" %out_tag, title={'X':'#chi p','Y':'Entries'}, plot_label='')
costheta_axis = h2_dedx_mean_std_data[0].GetXaxis()
mom_axis = h2_dedx_mean_std_data[0].GetYaxis()
h2_dedx_mean_rel1 = h2_dedx_mean_std_data[0].Clone('h2_dedx_mean_NN_data')
h2_dedx_std_rel1 = h2_dedx_mean_std_data[1].Clone('h2_dedx_std_NN_data')
h2_dedx_mean_rel2 = h2_dedx_mean_std_data[0].Clone('h2_dedx_mean_off_data')
h2_dedx_std_rel2 = h2_dedx_mean_std_data[1].Clone('h2_dedx_std_off_data')
h2_dedx_mean_rel1.Scale(0)
h2_dedx_std_rel1 .Scale(0)
h2_dedx_mean_rel2.Scale(0)
h2_dedx_std_rel2 .Scale(0)
for ih in h2_hit_dedx_dict_data:
para_list = do_plot_h3(Type=3, h1=h2_hit_dedx_dict_data[ih], h2=h2_hit_dedx_dict_off[ih], h3=h2_hit_dedx_dict_NN[ih], out_name="h_%s_hit_dedx"%ih, title={'X':'hit Dedx','Y':'Entries'}, plot_label='')
tmp_p_min = float( (ih.split('__')[0]).split('_')[0] )
tmp_p_max = float( (ih.split('__')[0]).split('_')[1] )
tmp_costheta_min = float( (ih.split('__')[1]).split('_')[0] )
tmp_costheta_max = float( (ih.split('__')[1]).split('_')[1] )
tmp_binx = costheta_axis.FindBin(0.5*tmp_costheta_min+0.5*tmp_costheta_max)
tmp_biny = mom_axis.FindBin(0.5*tmp_p_min+0.5*tmp_p_max)
h2_dedx_mean_std_data[0].SetBinContent(tmp_binx, tmp_biny, para_list[0])
h2_dedx_mean_std_data[0].SetBinError (tmp_binx, tmp_biny, para_list[1])
h2_dedx_mean_std_data[1].SetBinContent(tmp_binx, tmp_biny, para_list[2])
h2_dedx_mean_std_data[1].SetBinError (tmp_binx, tmp_biny, para_list[3])
h2_dedx_mean_std_off [0].SetBinContent(tmp_binx, tmp_biny, para_list[4])
h2_dedx_mean_std_off [0].SetBinError (tmp_binx, tmp_biny, para_list[5])
h2_dedx_mean_std_off [1].SetBinContent(tmp_binx, tmp_biny, para_list[6])
h2_dedx_mean_std_off [1].SetBinError (tmp_binx, tmp_biny, para_list[7])
h2_dedx_mean_std_NN [0].SetBinContent(tmp_binx, tmp_biny, para_list[8])
h2_dedx_mean_std_NN [0].SetBinError (tmp_binx, tmp_biny, para_list[9])
h2_dedx_mean_std_NN [1].SetBinContent(tmp_binx, tmp_biny, para_list[10])
h2_dedx_mean_std_NN [1].SetBinError (tmp_binx, tmp_biny, para_list[11])
mean_rel1 = abs(para_list[8] - para_list[0]) / para_list[0] if para_list[0] != 0 else 0
mean_rel1_err = math.sqrt(para_list[0]*para_list[0]*para_list[9]*para_list[9] + para_list[8]*para_list[8]*para_list[1]*para_list[1])/(para_list[0]*para_list[0]) if para_list[0] != 0 else 0
std_rel1 = abs(para_list[10] - para_list[2]) / para_list[2] if para_list[2] != 0 else 0
std_rel1_err = math.sqrt(para_list[2]*para_list[2]*para_list[11]*para_list[11] + para_list[10]*para_list[10]*para_list[3]*para_list[3])/(para_list[2]*para_list[2]) if para_list[2] != 0 else 0
mean_rel2 = abs(para_list[4] - para_list[0]) / para_list[0] if para_list[0] != 0 else 0
mean_rel2_err = math.sqrt(para_list[0]*para_list[0]*para_list[5]*para_list[5] + para_list[4]*para_list[4]*para_list[1]*para_list[1])/(para_list[0]*para_list[0]) if para_list[0] != 0 else 0
std_rel2 = abs(para_list[6] - para_list[2]) / para_list[2] if para_list[2] != 0 else 0
std_rel2_err = math.sqrt(para_list[2]*para_list[2]*para_list[7]*para_list[7] + para_list[6]*para_list[6]*para_list[3]*para_list[3])/(para_list[2]*para_list[2]) if para_list[2] != 0 else 0
h2_dedx_mean_rel1.SetBinContent(tmp_binx, tmp_biny, mean_rel1)
h2_dedx_mean_rel1.SetBinError (tmp_binx, tmp_biny, mean_rel1_err)
h2_dedx_std_rel1 .SetBinContent(tmp_binx, tmp_biny, std_rel1)
h2_dedx_std_rel1 .SetBinError (tmp_binx, tmp_biny, std_rel1_err)
h2_dedx_mean_rel2.SetBinContent(tmp_binx, tmp_biny, mean_rel2)
h2_dedx_mean_rel2.SetBinError (tmp_binx, tmp_biny, mean_rel2_err)
h2_dedx_std_rel2 .SetBinContent(tmp_binx, tmp_biny, std_rel2)
h2_dedx_std_rel2 .SetBinError (tmp_binx, tmp_biny, std_rel2_err)
do_plot_h3(Type=33 , h1=h2_dedx_mean_std_data[0], h2=h2_dedx_mean_std_off[0], h3=h2_dedx_mean_std_NN[0] , out_name="h_hit_dedx_mean", title={'X':'cos#theta','Y':'ptrk (GeV)'}, plot_label='')
do_plot_h3(Type=33 , h1=h2_dedx_mean_std_data[1], h2=h2_dedx_mean_std_off[1], h3=h2_dedx_mean_std_NN[1] , out_name="h_hit_dedx_std" , title={'X':'cos#theta','Y':'ptrk (GeV)'}, plot_label='')
do_plot_h3(Type=22, h1=h2_dedx_mean_rel1 , h2=h2_dedx_mean_rel1 , h3=h2_dedx_mean_rel1 , out_name="h_hit_dedx_mean_rel_NN_data", title={'X':'cos#theta','Y':'ptrk (GeV)'}, plot_label='')
do_plot_h3(Type=22, h1=h2_dedx_std_rel1 , h2=h2_dedx_std_rel1 , h3=h2_dedx_std_rel1 , out_name="h_hit_dedx_std_rel_NN_data" , title={'X':'cos#theta','Y':'ptrk (GeV)'}, plot_label='')
do_plot_h3(Type=22, h1=h2_dedx_mean_rel2 , h2=h2_dedx_mean_rel2 , h3=h2_dedx_mean_rel2 , out_name="h_hit_dedx_mean_rel_off_data", title={'X':'cos#theta','Y':'ptrk (GeV)'}, plot_label='')
do_plot_h3(Type=22, h1=h2_dedx_std_rel2 , h2=h2_dedx_std_rel2 , h3=h2_dedx_std_rel2 , out_name="h_hit_dedx_std_rel_off_data" , title={'X':'cos#theta','Y':'ptrk (GeV)'}, plot_label='')
def plot(var, particle, ptrk_min, ptrk_max):
try:
chain = TChain('n103')
chain.Add(data_path(particle))
except:
logging.error(data_path(particle) + ' is invalid!')
sys.exit()
if var == 'costheta' or var == 'phi':
cut_ptrkp = '('+ptrk_min+'<ptrk<'+ptrk_max+')&&'
cut_ptrkm = '('+ptrk_min+'<ptrk<'+ptrk_max+')&&'
elif var == 'ptrk':
cut_ptrkp = ''
cut_ptrkm = ''
mbc = TCanvas('mbc', 'mbc')
set_canvas_style(mbc)
mbc.Divide(2, 3)
xmin, xmax, xbins = pid_eff(var)
h_plus_nume = TH1F('h_plus_nume', ' ', xbins, xmin, xmax)
h_plus_deno = TH1F('h_plus_deno', ' ', xbins, xmin, xmax)
h_minus_nume = TH1F('h_minus_nume', ' ', xbins, xmin, xmax)
h_minus_deno = TH1F('h_minus_deno', ' ', xbins, xmin, xmax)
if particle == 'e':
cut_prob = '(prob[0]>prob[1]&&prob[0]>prob[2]&&prob[0]>prob[3]&&prob[0]>prob[4])'
if particle == 'mu':
cut_prob = '(prob[1]>prob[0]&&prob[1]>prob[2]&&prob[1]>prob[3]&&prob[1]>prob[4])'
if particle == 'pi':
cut_prob = '(prob[2]>prob[3]&&prob[2]>prob[4])'
if particle == 'K':
cut_prob = '(prob[3]>prob[2]&&prob[3]>prob[4])'
if particle == 'p':
cut_prob = '(prob[4]>prob[2]&&prob[4]>prob[3])'
mbc.cd(1)
chain.Draw(var + '>>h_plus_nume', cut_ptrkp + '(charge==1)&&' + cut_prob)
mbc.cd(2)
chain.Draw(var + '>>h_plus_deno', cut_ptrkp + '(charge==1)')
mbc.cd(3)
chain.Draw(var + '>>h_minus_nume', cut_ptrkm + '(charge==-1)&&' + cut_prob)
mbc.cd(4)
chain.Draw(var + '>>h_minus_deno', cut_ptrkm + '(charge==-1)')
mbc.cd(5)
h_plus_nume.Sumw2()
h_plus_deno.Sumw2()
h_plus_pid_eff = TH1F('h_plus_eff', ' ', xbins, xmin, xmax)
h_plus_pid_eff.Divide(h_plus_nume, h_plus_deno)
if particle == 'e':
frame1 = TH2F('frame1', 'Eff of e^{+} for data', xbins, xmin, xmax, 22, 0, 1.1)
if particle == 'mu':
frame1 = TH2F('frame1', 'Eff of #mu^{+} for data', xbins, xmin, xmax, 22, 0, 1.1)
if particle == 'pi':
frame1 = TH2F('frame1', 'Eff of #pi^{+} for data', xbins, xmin, xmax, 22, 0, 1.1)
elif particle == 'K':
frame1 = TH2F('frame1', 'Eff of K^{+} for data', xbins, xmin, xmax, 22, 0, 1.1)
elif particle == 'p':
frame1 = TH2F('frame1', 'Eff of p for data', xbins, xmin, xmax, 22, 0, 1.1)
frame1.Draw()
h_plus_pid_eff.SetLineColor(2)
h_plus_pid_eff.Draw('same')
mbc.cd(6)
h_minus_nume.Sumw2()
h_minus_deno.Sumw2()
h_minus_pid_eff = TH1F('h_minus_eff', ' ', xbins, xmin, xmax)
h_minus_pid_eff.Divide(h_minus_nume, h_minus_deno)
if particle == 'e':
frame2 = TH2F('frame2', 'Eff of e^{-} for data', xbins, xmin, xmax, 22, 0, 1.1)
if particle == 'mu':
frame2 = TH2F('frame2', 'Eff of #mu^{-} for data', xbins, xmin, xmax, 22, 0, 1.1)
if particle == 'pi':
frame2 = TH2F('frame2', 'Eff of #pi^{-} for data', xbins, xmin, xmax, 22, 0, 1.1)
elif particle == 'K':
frame2 = TH2F('frame2', 'Eff of K^{-} for data', xbins, xmin, xmax, 22, 0, 1.1)
elif particle == 'p':
frame2 = TH2F('frame2', 'Eff of #bar{p} for data', xbins, xmin, xmax, 22, 0, 1.1)
frame2.Draw()
h_minus_pid_eff.SetLineColor(2)
h_minus_pid_eff.Draw('same')
if not os.path.exists('./files/'):
os.makedirs('./files/')
out_root = TFile('./files/dedx_pideff_' + var + '_' + particle + '_data.root', 'RECREATE')
h_plus_pid_eff.Write()
h_minus_pid_eff.Write()
out_root.Write()
if not os.path.exists('./figs/'):
os.makedirs('./figs/')
mbc.SaveAs('./figs/pid_eff_' + var + '_' + particle + '.pdf')
def readTxt(path, out_name, title):
#h_dedx = TH1F('h_dedx' , ' ', 200, -10, 10)
#h_dedx_off = TH1F('h_dedx_off', ' ', 200, -10, 10)
h_dedx = TH1F('h_dedx' , ' ', 1000, -500, 3500)
h_dedx_off = TH1F('h_dedx_off', ' ', 1000, -500, 3500)
h_dedx_col = TH1F('h_dedx_col', ' ', 1000, -500, 3500)
h_dedx_ins = TH1F('h_dedx_ins', ' ', 1000, -500, 3500)
#h_ins_pdg = TH1F('h_ins_pdg', ' ', 500, 0, 500)
h_ins_pdg = TH1F('h_ins_pdg', ' ', 20, 0, 20)
#h_dedx = TH1F('h_dedx' , ' ', 1000, 0, 3000)
#h_dedx_off = TH1F('h_dedx_off', ' ', 1000, 0, 3000)
files = os.listdir(path)
for i in files:
if 'nohup.out' != i : continue
#if 'nohup.out' not in i or 'swp'in i: continue
#if 'nohup_NN.out' not in i : continue
#if '0.sh.out.' not in i: continue
#if '.out.' not in i: continue
print('%s/%s'%(path,i))
f_in = open('%s/%s'%(path,i),'r')
lines = f_in.readlines()
f_in.close()
for line in lines:
if "costheta" in line and 'ndedxhit' not in line:
str0 = line.split(',')
mom = float(str0[0].replace('mom=',''))
costheta = float(str0[1].replace('costheta=',''))
dedx = float(str0[2].replace('pred=',''))
#dedx = float(str0[2].replace('edep=',''))
dedx_off = float(str0[3].replace('off=',''))
pid = float(str0[4].replace('pdg=',''))
#if pid != 211: continue
#if mom <= 0: continue
#if mom > 0.5: continue
#if dedx > 0: continue
h_dedx .Fill(dedx)
h_dedx_off.Fill(dedx_off)
elif "energy deposit" in line:
if 'MeV' in line:
str0 = line.split('MeV')[0]
str1 = str0.split('energy deposit:')[-1]
h_dedx_col .Fill(float(str1))
elif 'GeV' in line:
str0 = line.split('GeV')[0]
str1 = str0.split('energy deposit:')[-1]
h_dedx_col .Fill(float(str1)*1000)
else:
print(line)
elif "insert:" in line :
str0 = line.split(',')
str1 = str0[0].replace('insert:edep=','')
str2 = str0[1].replace('pdg=','')
h_dedx_ins .Fill(float(str1))
h_ins_pdg .Fill(float(str2))
canvas=TCanvas("%s"%(out_name),"",800,800)
canvas.cd()
canvas.SetTopMargin(0.13)
canvas.SetBottomMargin(0.12)
canvas.SetLeftMargin(0.15)
canvas.SetRightMargin(0.15)
x_min=h_dedx.GetXaxis().GetXmin()
x_max=h_dedx.GetXaxis().GetXmax()
y_min=0
y_max=1.5*h_dedx.GetBinContent(h_dedx.GetMaximumBin())
y_max=12000
dummy = ROOT.TH2D("dummy","",1,x_min,x_max,1,y_min,y_max)
dummy.SetStats(ROOT.kFALSE)
dummy.GetYaxis().SetTitle(title['Y'])
dummy.GetXaxis().SetTitle(title['X'])
dummy.GetYaxis().SetTitleSize(0.04)
dummy.GetXaxis().SetTitleSize(0.04)
dummy.GetYaxis().SetLabelSize(0.04)
dummy.GetXaxis().SetLabelSize(0.04)
dummy.GetYaxis().SetTitleOffset(1.8)
dummy.GetXaxis().SetTitleOffset(1.2)
dummy.GetXaxis().SetMoreLogLabels()
dummy.GetXaxis().SetTitleFont(42)
dummy.GetXaxis().SetLabelFont(42)
dummy.GetYaxis().SetTitleFont(42)
dummy.GetYaxis().SetLabelFont(42)
h_dedx.SetLineWidth(2)
h_dedx.SetLineColor(ROOT.kBlack)
h_dedx.SetMarkerColor(ROOT.kBlack)
h_dedx.SetMarkerStyle(20)
h_dedx_off.SetLineWidth(2)
h_dedx_off.SetLineColor (ROOT.kRed)
h_dedx_off.SetMarkerColor(ROOT.kRed)
h_dedx_off.SetMarkerStyle(21)
h_dedx_col.SetLineWidth(2)
h_dedx_col.SetLineColor (ROOT.kBlue)
h_dedx_col.SetMarkerColor(ROOT.kBlue)
h_dedx_col.SetMarkerStyle(22)
h_dedx_ins.SetLineWidth(2)
h_dedx_ins.SetLineColor (ROOT.kGreen)
h_dedx_ins.SetMarkerColor(ROOT.kGreen)
h_dedx_ins.SetMarkerStyle(23)
dummy.Draw()
h_dedx.Draw("same:pe")
h_dedx_off.Draw("same:pe")
h_dedx_col.Draw("same:pe")
h_dedx_ins.Draw("same:pe")
dummy.Draw("AXISSAME")
x_l = 0.2
x_dl = 0.25
y_h = 0.85
y_dh = 0.15
legend = TLegend(x_l,y_h-y_dh,x_l+x_dl,y_h)
legend.AddEntry(h_dedx ,'NN' ,'lep')
legend.AddEntry(h_dedx_off ,"official",'lep')
legend.AddEntry(h_dedx_col ,"NN col",'lep')
legend.SetBorderSize(0)
legend.SetFillStyle(0)
legend.SetTextSize(0.05)
legend.SetTextFont(42)
legend.Draw()
canvas.SaveAs("%s/%s.png"%(plot_path,out_name))
h_ins_pdg.Draw()
canvas.SaveAs("%s/%s_pdg.png"%(plot_path,out_name))
del canvas
gc.collect()
def simpleAna(f1, f2, out_name, title):
h_dedx = TH1F('h_dedx' , ' ', 1000, -500, 3500)
h_dedx_off = TH1F('h_dedx_off', ' ', 1000, -500, 3500)
#h_dedx = TH1F('h_dedx' , ' ', 1000, 0, 3000)
#h_dedx_off = TH1F('h_dedx_off', ' ', 1000, 0, 3000)
chain1 = TChain('n103')
chain1.Add(f1)
for i in range(chain1.GetEntries()):
chain1.GetEntry(i)
ptrk = getattr(chain1, 'ptrk')
charge = getattr(chain1, 'charge')
costheta = getattr(chain1, 'costheta')
prob = getattr(chain1, 'prob')
dEdx_meas= getattr(chain1, 'dEdx_meas')
ndedxhit = getattr(chain1, 'ndedxhit')
dEdx_hit = getattr(chain1, 'dEdx_hit')
tmp_theta = math.acos(costheta)*180/math.pi
if ptrk > 0.5:continue
#if charge != cg:continue
#if len(prob) != 5:continue
for j in range(len(dEdx_hit)):
h_dedx.Fill(dEdx_hit[j])
chain2 = TChain('n103')
chain2.Add(f2)
for i in range(chain2.GetEntries()):
chain2.GetEntry(i)
ptrk = getattr(chain2, 'ptrk')
charge = getattr(chain2, 'charge')
costheta = getattr(chain2, 'costheta')
prob = getattr(chain2, 'prob')
dEdx_meas= getattr(chain2, 'dEdx_meas')
ndedxhit = getattr(chain2, 'ndedxhit')
dEdx_hit = getattr(chain2, 'dEdx_hit')
tmp_theta = math.acos(costheta)*180/math.pi
if ptrk > 0.5:continue
#if charge != cg:continue
#if len(prob) != 5:continue
for j in range(len(dEdx_hit)):
h_dedx_off.Fill(dEdx_hit[j])
canvas=TCanvas("%s"%(out_name),"",800,800)
canvas.cd()
canvas.SetTopMargin(0.13)
canvas.SetBottomMargin(0.12)
canvas.SetLeftMargin(0.15)
canvas.SetRightMargin(0.15)
x_min=h_dedx.GetXaxis().GetXmin()
x_max=h_dedx.GetXaxis().GetXmax()
y_min=0
y_max=1.5*h_dedx.GetBinContent(h_dedx.GetMaximumBin())
dummy = ROOT.TH2D("dummy","",1,x_min,x_max,1,y_min,y_max)
dummy.SetStats(ROOT.kFALSE)
dummy.GetYaxis().SetTitle(title['Y'])
dummy.GetXaxis().SetTitle(title['X'])
dummy.GetYaxis().SetTitleSize(0.04)
dummy.GetXaxis().SetTitleSize(0.04)
dummy.GetYaxis().SetLabelSize(0.04)
dummy.GetXaxis().SetLabelSize(0.04)
dummy.GetYaxis().SetTitleOffset(1.8)
dummy.GetXaxis().SetTitleOffset(1.2)
dummy.GetXaxis().SetMoreLogLabels()
dummy.GetXaxis().SetTitleFont(42)
dummy.GetXaxis().SetLabelFont(42)
dummy.GetYaxis().SetTitleFont(42)
dummy.GetYaxis().SetLabelFont(42)
h_dedx.SetLineWidth(2)
h_dedx.SetLineColor(ROOT.kBlack)
h_dedx.SetMarkerColor(ROOT.kBlack)
h_dedx.SetMarkerStyle(20)
h_dedx_off.SetLineWidth(2)
h_dedx_off.SetLineColor (ROOT.kRed)
h_dedx_off.SetMarkerColor(ROOT.kRed)
h_dedx_off.SetMarkerStyle(21)
dummy.Draw()
h_dedx.Draw("same:pe")
h_dedx_off.Draw("same:pe")
dummy.Draw("AXISSAME")
x_l = 0.2
x_dl = 0.25
y_h = 0.85
y_dh = 0.15
legend = TLegend(x_l,y_h-y_dh,x_l+x_dl,y_h)
legend.AddEntry(h_dedx ,'NN' ,'lep')
legend.AddEntry(h_dedx_off ,"official",'lep')
legend.SetBorderSize(0)
legend.SetFillStyle(0)
legend.SetTextSize(0.05)
legend.SetTextFont(42)
legend.Draw()
canvas.SaveAs("%s/%s.png"%(plot_path,out_name))
del canvas
gc.collect()
'''
if __name__ == '__main__':
args = sys.argv[1:]
if len(args)<4:
usage()
sys.exit()
var = args[0]
particle = args[1]
ptrk_min = args[2]
ptrk_max = args[3]
plot(var, particle, ptrk_min, ptrk_max)
'''
if __name__ == '__main__':
args = sys.argv[1:]
plot_path = './pid_plots/'
particle = args[0]
cg = args[1]
mom_min = float(args[2])
mom_max = float(args[3])
costheta_min = float(args[4])
costheta_max = float(args[5])
particle = particle.strip(' ')
cg = int(cg)
print('particle=',particle,',charge=',cg,',mom_min=',mom_min,',mom_max=',mom_max,',costheta_min=',costheta_min,',costheta_max=',costheta_max)
#readTxt(path='/junofs/users/wxfang/FastSim/bes3/workarea/TestRelease/TestRelease-00-00-86/DedxTest/sim/job_NN/k+/', out_name='sim_k+', title={'X':'dedx', 'Y':'Entries'})
#simpleAna(f1='/junofs/users/wxfang/FastSim/bes3/workarea/Mdc/DedxTrack/output_NN/Single_kp_NN_0.root', f2='/junofs/users/wxfang/FastSim/bes3/workarea/Mdc/DedxTrack/output_off/Single_k+_off_0.root', out_name='rec_k+', title={'X':'dedx', 'Y':'Entries'})
ana(particle, cg, mom_min, mom_max, costheta_min, costheta_max)
print('done')
| [
"[email protected]"
] | |
bca600bfc1ded616304d1f4bf3ce2e519ef39d6f | 482c670a1885d4058909d817b1b20904eedb09c6 | /python_api_auto/request-demo/post_request.py | f7244d74286e0358f74171f174b146c51ddf2988 | [] | no_license | qiaoxu1123/python-auto | 13bddeafd3832dc9c28f39ab9fa2343d23fd3dd0 | 15bfc1aaedbbdf7a00f03cd3552ed8e7b30eabdc | refs/heads/master | 2020-04-02T16:10:15.862150 | 2018-11-04T07:27:51 | 2018-11-04T07:27:51 | 154,601,297 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,525 | py | import requests
import unittest
class Test_login(unittest.TestCase):
url = 'https://passport.womai.com/login/login.do'
headers = {'Accept': 'application/json, text/javascript, */*',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Connection': 'keep-alive',
'Content-Length': '177',
'Content-Type': 'application/x-www-form-urlencoded',
'Host': 'passport.womai.com',
'Origin': 'https: // passport.womai.com',
'Referer': 'https: // passport.womai.com / redirect / redirect.do?mid = 0 & returnUrl = http % 3',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.26 Safari/537.36 Core/1.63.6756.400 QQBrowser/10.3.2473.400',
'X-Requested-With': 'XMLHttpRequest'}
payload = {'serverPath': 'http://www.womai.com/',
'loginId': 'qiaoxu1123',
'password': 'haotest2018',
'validateCode': '',
'tempcode': '',
'mid': '0',
'returnUrl': 'http://www.womai.com/index-31000-0.htm'}
def test_login(self):
response = requests.post(self.url, headers=self.headers, data=self.payload)
json_data = response.json()
print(json_data)
# 断言:测试结果与期望结果对比
self.assertEqual('2', json_data['msg'])
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
] | |
db8d9e2cf111fa8e3a2526c2bfef7a8f4dc10a02 | 3c8f9251b0bf6e9247a07fe81ccd8df75d982580 | /MeClass5/question2c.py | 43bc69baeb6fc65b861db9cd6f8c5a87693f04d7 | [
"Apache-2.0"
] | permissive | carrascodc/pyneta | 2f70c25261264d930767533593908e584e36b23e | 857c0279d6c567d8855fedde2883603d7c70b50b | refs/heads/master | 2020-04-18T10:59:51.614601 | 2020-02-28T01:43:18 | 2020-02-28T01:43:18 | 167,485,054 | 0 | 0 | Apache-2.0 | 2019-01-26T22:34:10 | 2019-01-25T04:31:02 | Python | UTF-8 | Python | false | false | 3,205 | py | from jinja2 import FileSystemLoader, StrictUndefined
from jinja2.environment import Environment
from netmiko import ConnectHandler
from mydevices import nxos1, nxos2
from pprint import pprint
import textfsm
import time
import re
from colorama import Fore, Back, Style
env = Environment(undefined=StrictUndefined)
env.loader = FileSystemLoader("./templates/")
template_file = "question2.j2"
interface = "1"
nxos1_vars = {
"device_name": "nxos1",
"local_as": 22,
"interface": interface,
"ip_address": "10.1.100.1",
"netmask": "24"
}
nxos2_vars = {
"device_name": "nxos2",
"local_as": 22,
"interface": interface,
"ip_address": "10.1.100.2",
"netmask": "24"
}
nxos1_vars["peer_ip"] = nxos2_vars["ip_address"]
nxos2_vars["peer_ip"] = nxos1_vars["ip_address"]
# Add Jinja2 vars to be included in the Netmiko device dictionary
nxos1["j2_vars"] = nxos1_vars
nxos2["j2_vars"] = nxos2_vars
template = env.get_template(template_file)
def config():
for device in [nxos1,nxos2]:
### Pop the device dict 'j2_vars' to 'device_var',
### leaving 'device' with just the netmiko parameters
device_var = device.pop('j2_vars')
cfg = template.render(**device_var)
Node = {
"host": device['host'],
"username": device['username'],
"password": device['password'],
"device_type": device['device_type']
}
net_connect = ConnectHandler(**Node)
print(f"Updating {device['host']} ".center(80, "#"))
output = net_connect.send_config_set(cfg)
print('Completed' + '\n')
def verify():
for device in [nxos1,nxos2]:
Node = {
"host": device['host'],
"username": device['username'],
"password": device['password'],
"device_type": device['device_type']
}
net_connect = ConnectHandler(**Node)
raw_text_data = net_connect.send_command('show ip bgp sum')
net_connect.disconnect()
textfsm_file = "templates/question3.template"
textfsm_template = open(textfsm_file)
# with open("show_ip_bgp_sum.txt") as f:
# raw_text_data = f.read()
# The argument 'template' is a file handle and 'raw_text_data' is a string.
re_table = textfsm.TextFSM(textfsm_template)
bgp_status = re_table.ParseText(raw_text_data)[0][0]
bgp_state = re_table.ParseText(raw_text_data)[0][1]
textfsm_template.close()
### Regular expressions to match the bgp variables above
regex_status = re.compile(r'[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}')
regex_state = re.compile(r'\d+')
if regex_status.match(bgp_status) and regex_state.match(bgp_state):
''' These two conditions are to match
- Whether or not there is an time counter
- Whether or not the bgp state is a number, and NOT a building bgp state
'''
print(f"BGP has been established on: {device['host']}")
else:
print(f"The current BGP State of {device['host']} is: {bgp_state}. Please review")
def run():
config()
time.sleep(15)
verify()
if __name__ == "__main__":
run()
| [
"[email protected]"
] | |
0fcff59e8e9a70815c8cbe655d66190841f3eb45 | a82418f3d62b944a27b6e9000829af54b7575893 | /psets_gensim_v1/2018/cfgs_update_noFilter/cfg_hiddenValleyGridPack_darkphoton_m_2_ctau_500_xiO_2p5.py | b520654fef0394c8e15ac2b1b4833d95d6de20db | [] | no_license | mcitron/hiddenValleyGeneration | abb347a30319ce5f230e0e1248a4259bf4cc4b1b | 5d165be91ae082fdba790506bfb11a026d602787 | refs/heads/master | 2023-04-08T13:34:56.835752 | 2021-04-28T17:14:46 | 2021-04-28T17:17:14 | 362,550,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,197 | py | # Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: Configuration/GenProduction/python/cfgs_update_noFilter/hiddenValleyGridPack_darkphoton_m_2_ctau_500_xiO_2p5.py --python_filename cfg_hiddenValleyGridPack_darkphoton_m_2_ctau_500_xiO_2p5.py --eventcontent RAWSIM --customise Configuration/DataProcessing/Utils.addMonitoring --datatier GEN-SIM --fileout file:output.root --conditions 102X_upgrade2018_realistic_v11 --beamspot Realistic25ns13TeVEarly2018Collision --customise_commands process.RandomNumberGeneratorService.externalLHEProducer.initialSeed=int(0) --step LHE,GEN,SIM --geometry DB:Extended --era Run2_2018 --no_exec --mc -n 10
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('SIM',eras.Run2_2018)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mixNoPU_cfi')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.GeometrySimDB_cff')
process.load('Configuration.StandardSequences.MagneticField_cff')
process.load('Configuration.StandardSequences.Generator_cff')
process.load('IOMC.EventVertexGenerators.VtxSmearedRealistic25ns13TeVEarly2018Collision_cfi')
process.load('GeneratorInterface.Core.genFilterSummary_cff')
process.load('Configuration.StandardSequences.SimIdeal_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(10)
)
# Input source
process.source = cms.Source("EmptySource")
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('Configuration/GenProduction/python/cfgs_update_noFilter/hiddenValleyGridPack_darkphoton_m_2_ctau_500_xiO_2p5.py nevts:10'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.RAWSIMoutput = cms.OutputModule("PoolOutputModule",
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('generation_step')
),
compressionAlgorithm = cms.untracked.string('LZMA'),
compressionLevel = cms.untracked.int32(1),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('GEN-SIM'),
filterName = cms.untracked.string('')
),
eventAutoFlushCompressedSize = cms.untracked.int32(20971520),
fileName = cms.untracked.string('file:output.root'),
outputCommands = process.RAWSIMEventContent.outputCommands,
splitLevel = cms.untracked.int32(0)
)
# Additional output definition
# Other statements
process.XMLFromDBSource.label = cms.string("Extended")
process.genstepfilter.triggerConditions=cms.vstring("generation_step")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '102X_upgrade2018_realistic_v11', '')
process.generator = cms.EDFilter("Pythia8HadronizerFilter",
PythiaParameters = cms.PSet(
parameterSets = cms.vstring(
'pythia8CommonSettings',
'pythia8CP5Settings',
'pythia8PSweightsSettings',
'pythia8PowhegEmissionVetoSettings',
'processParameters'
),
processParameters = cms.vstring(
'POWHEG:nFinal = 1',
'ParticleDecays:limitTau0= off',
'HiggsSM:gg2H = on',
'25:m0 =125',
'25:addChannel = 1 1.0 102 4900101 -4900101',
'25:0:onMode=0',
'25:1:onMode=0',
'25:2:onMode=0',
'25:3:onMode=0',
'25:4:onMode=0',
'25:5:onMode=0',
'25:6:onMode=0',
'25:7:onMode=0',
'25:8:onMode=0',
'25:9:onMode=0',
'25:10:onMode=0',
'25:11:onMode=0',
'25:12:onMode=0',
'25:13:onMode=0',
'HiddenValley:Ngauge = 3',
'HiddenValley:nFlav = 1',
'HiddenValley:fragment = on',
'HiddenValley:FSR = on',
'HiddenValley:alphaOrder = 1',
'HiddenValley:Lambda = 2.0',
'HiddenValley:pTminFSR = 2.2',
'HiddenValley:spinFv=0',
'4900101:m0 = 0.8',
'4900111:m0 = 2.0',
'4900113:m0 = 5.0',
'999999:all = GeneralResonance void 1 0 0 0.8 0.001 0. 0. 0.',
'4900113:addChannel = 1 1.0 91 4900111 4900111',
'4900111:addChannel = 1 1.0 91 999999 999999',
'999999:addChannel = 1 0.083 91 1 -1',
'999999:addChannel = 1 0.333 91 2 -2',
'999999:addChannel = 1 0.083 91 3 -3',
'999999:addChannel = 1 0.25 91 11 -11',
'999999:addChannel = 1 0.25 91 13 -13',
'999999:tau0 = 5000'
),
pythia8CP5Settings = cms.vstring(
'Tune:pp 14',
'Tune:ee 7',
'MultipartonInteractions:ecmPow=0.03344',
'PDF:pSet=20',
'MultipartonInteractions:bProfile=2',
'MultipartonInteractions:pT0Ref=1.41',
'MultipartonInteractions:coreRadius=0.7634',
'MultipartonInteractions:coreFraction=0.63',
'ColourReconnection:range=5.176',
'SigmaTotal:zeroAXB=off',
'SpaceShower:alphaSorder=2',
'SpaceShower:alphaSvalue=0.118',
'SigmaProcess:alphaSvalue=0.118',
'SigmaProcess:alphaSorder=2',
'MultipartonInteractions:alphaSvalue=0.118',
'MultipartonInteractions:alphaSorder=2',
'TimeShower:alphaSorder=2',
'TimeShower:alphaSvalue=0.118'
),
pythia8CommonSettings = cms.vstring(
'Tune:preferLHAPDF = 2',
'Main:timesAllowErrors = 10000',
'Check:epTolErr = 0.01',
'Beams:setProductionScalesFromLHEF = off',
'SLHA:keepSM = on',
'SLHA:minMassSM = 1000.',
'ParticleDecays:limitTau0 = on',
'ParticleDecays:tau0Max = 10',
'ParticleDecays:allowPhotonRadiation = on'
),
pythia8PSweightsSettings = cms.vstring(
'UncertaintyBands:doVariations = on',
'UncertaintyBands:List = {isrRedHi isr:muRfac=0.707,fsrRedHi fsr:muRfac=0.707,isrRedLo isr:muRfac=1.414,fsrRedLo fsr:muRfac=1.414,isrDefHi isr:muRfac=0.5,fsrDefHi fsr:muRfac=0.5,isrDefLo isr:muRfac=2.0,fsrDefLo fsr:muRfac=2.0,isrConHi isr:muRfac=0.25,fsrConHi fsr:muRfac=0.25,isrConLo isr:muRfac=4.0,fsrConLo fsr:muRfac=4.0,fsr_G2GG_muR_dn fsr:G2GG:muRfac=0.5,fsr_G2GG_muR_up fsr:G2GG:muRfac=2.0,fsr_G2QQ_muR_dn fsr:G2QQ:muRfac=0.5,fsr_G2QQ_muR_up fsr:G2QQ:muRfac=2.0,fsr_Q2QG_muR_dn fsr:Q2QG:muRfac=0.5,fsr_Q2QG_muR_up fsr:Q2QG:muRfac=2.0,fsr_X2XG_muR_dn fsr:X2XG:muRfac=0.5,fsr_X2XG_muR_up fsr:X2XG:muRfac=2.0,fsr_G2GG_cNS_dn fsr:G2GG:cNS=-2.0,fsr_G2GG_cNS_up fsr:G2GG:cNS=2.0,fsr_G2QQ_cNS_dn fsr:G2QQ:cNS=-2.0,fsr_G2QQ_cNS_up fsr:G2QQ:cNS=2.0,fsr_Q2QG_cNS_dn fsr:Q2QG:cNS=-2.0,fsr_Q2QG_cNS_up fsr:Q2QG:cNS=2.0,fsr_X2XG_cNS_dn fsr:X2XG:cNS=-2.0,fsr_X2XG_cNS_up fsr:X2XG:cNS=2.0,isr_G2GG_muR_dn isr:G2GG:muRfac=0.5,isr_G2GG_muR_up isr:G2GG:muRfac=2.0,isr_G2QQ_muR_dn isr:G2QQ:muRfac=0.5,isr_G2QQ_muR_up isr:G2QQ:muRfac=2.0,isr_Q2QG_muR_dn isr:Q2QG:muRfac=0.5,isr_Q2QG_muR_up isr:Q2QG:muRfac=2.0,isr_X2XG_muR_dn isr:X2XG:muRfac=0.5,isr_X2XG_muR_up isr:X2XG:muRfac=2.0,isr_G2GG_cNS_dn isr:G2GG:cNS=-2.0,isr_G2GG_cNS_up isr:G2GG:cNS=2.0,isr_G2QQ_cNS_dn isr:G2QQ:cNS=-2.0,isr_G2QQ_cNS_up isr:G2QQ:cNS=2.0,isr_Q2QG_cNS_dn isr:Q2QG:cNS=-2.0,isr_Q2QG_cNS_up isr:Q2QG:cNS=2.0,isr_X2XG_cNS_dn isr:X2XG:cNS=-2.0,isr_X2XG_cNS_up isr:X2XG:cNS=2.0}',
'UncertaintyBands:nFlavQ = 4',
'UncertaintyBands:MPIshowers = on',
'UncertaintyBands:overSampleFSR = 10.0',
'UncertaintyBands:overSampleISR = 10.0',
'UncertaintyBands:FSRpTmin2Fac = 20',
'UncertaintyBands:ISRpTmin2Fac = 1'
),
pythia8PowhegEmissionVetoSettings = cms.vstring(
'POWHEG:veto = 1',
'POWHEG:pTdef = 1',
'POWHEG:emitted = 0',
'POWHEG:pTemt = 0',
'POWHEG:pThard = 0',
'POWHEG:vetoCount = 100',
'SpaceShower:pTmaxMatch = 2',
'TimeShower:pTmaxMatch = 2'
)
),
comEnergy = cms.double(13000.0),
crossSection = cms.untracked.double(1),
filterEfficiency = cms.untracked.double(1),
maxEventsToPrint = cms.untracked.int32(10),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1)
)
process.externalLHEProducer = cms.EDProducer("ExternalLHEProducer",
args = cms.vstring('/cvmfs/cms.cern.ch/phys_generator/gridpacks/2017/13TeV/powheg/V2/gg_H_quark-mass-effects_NNPDF31_13TeV_M125/v1/gg_H_quark-mass-effects_NNPDF31_13TeV_M125_slc6_amd64_gcc630_CMSSW_9_3_0.tgz'),
nEvents = cms.untracked.uint32(10),
numberOfParameters = cms.uint32(1),
outputFile = cms.string('cmsgrid_final.lhe'),
scriptName = cms.FileInPath('GeneratorInterface/LHEInterface/data/run_generic_tarball_cvmfs.sh')
)
# Path and EndPath definitions
process.lhe_step = cms.Path(process.externalLHEProducer)
process.generation_step = cms.Path(process.pgen)
process.simulation_step = cms.Path(process.psim)
process.genfiltersummary_step = cms.EndPath(process.genFilterSummary)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.RAWSIMoutput_step = cms.EndPath(process.RAWSIMoutput)
# Schedule definition
process.schedule = cms.Schedule(process.lhe_step,process.generation_step,process.genfiltersummary_step,process.simulation_step,process.endjob_step,process.RAWSIMoutput_step)
from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
associatePatAlgosToolsTask(process)
# filter all path with the production filter sequence
for path in process.paths:
if path in ['lhe_step']: continue
getattr(process,path)._seq = process.generator * getattr(process,path)._seq
# customisation of the process.
# Automatic addition of the customisation function from Configuration.DataProcessing.Utils
from Configuration.DataProcessing.Utils import addMonitoring
#call to customisation function addMonitoring imported from Configuration.DataProcessing.Utils
process = addMonitoring(process)
# End of customisation functions
# Customisation from command line
process.RandomNumberGeneratorService.externalLHEProducer.initialSeed=int(0)
# Add early deletion of temporary data products to reduce peak memory need
from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
process = customiseEarlyDelete(process)
# End adding early deletion
| [
"[email protected]"
] | |
215f4513eb1b55e128ba2887b9bbd6c50c1398a5 | 0cd81f8a964de3712881fe48a75208490d644cf0 | /entry.py | b7da5013b3a8c99aca7afaf4ed186298cde46d87 | [] | no_license | ALLYOURSR/cvml | 1bf79b0d57da7b225cb0b98d5ee20889fe73204a | 0dfaa5a21e6d86731e5e034ceb9f209079bac912 | refs/heads/master | 2021-05-18T09:59:33.208411 | 2020-04-13T01:23:36 | 2020-04-13T01:23:36 | 251,200,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 210 | py | from config import ServerConfig
from question_generator import QuestionGenerator
from server import Server
c = ServerConfig()
qg = QuestionGenerator(c.QuestionFilepath)
qg.PrintAll()
s = Server(c, qg)
s.Run() | [
"x"
] | x |
332e7cb3d78bc410afc3bccae5fcd93f5a4e5e59 | 0cc4eb3cb54f8394c127ace62d3108fdb5230c85 | /.spack-env/view/lib/python3.7/test/test_cmath.py | c3ee3140b71cac235830ae6efa65e1fb68f59869 | [] | no_license | jacobmerson/spack-develop-env | 5b2d76f58c0b64ae97c64f77a3c4d33a770c71c8 | 5fca20ca343b1a76f05fc635c87f94ed25417d94 | refs/heads/master | 2022-07-04T02:22:50.264727 | 2020-05-06T05:13:50 | 2020-05-06T05:13:50 | 261,657,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 142 | py | /lore/mersoj/spack/spack/opt/spack/linux-rhel7-x86_64/gcc-7.3.0/python-3.7.7-oihhthdoxtgh4krvzpputn5ozwcnq2by/lib/python3.7/test/test_cmath.py | [
"[email protected]"
] | |
36d995b522dd8a070c1fe6af8ffc791077fabbd4 | 70f5f279e051360310f95be895320d8fa6cd8d93 | /extraPackages/matplotlib-3.0.2/examples/mplot3d/surface3d_radial.py | 521f6195330d266d655b1e63e19f76cfd132b2a3 | [
"BSD-3-Clause"
] | permissive | spacetime314/python3_ios | 4b16ab3e81c31213b3db1e1eb00230621b0a7dc8 | e149f1bc2e50046c8810f83dae7739a8dea939ee | refs/heads/master | 2020-05-09T20:39:14.980041 | 2019-04-08T15:07:53 | 2019-04-08T15:07:53 | 181,415,024 | 2 | 0 | BSD-3-Clause | 2019-04-15T05:00:14 | 2019-04-15T05:00:12 | null | UTF-8 | Python | false | false | 1,075 | py | '''
=================================
3D surface with polar coordinates
=================================
Demonstrates plotting a surface defined in polar coordinates.
Uses the reversed version of the YlGnBu color map.
Also demonstrates writing axis labels with latex math mode.
Example contributed by Armin Moser.
'''
# This import registers the 3D projection, but is otherwise unused.
from mpl_toolkits.mplot3d import Axes3D # noqa: F401 unused import
import matplotlib.pyplot as plt
import numpy as np
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Create the mesh in polar coordinates and compute corresponding Z.
r = np.linspace(0, 1.25, 50)
p = np.linspace(0, 2*np.pi, 50)
R, P = np.meshgrid(r, p)
Z = ((R**2 - 1)**2)
# Express the mesh in the cartesian system.
X, Y = R*np.cos(P), R*np.sin(P)
# Plot the surface.
ax.plot_surface(X, Y, Z, cmap=plt.cm.YlGnBu_r)
# Tweak the limits and add latex math labels.
ax.set_zlim(0, 1)
ax.set_xlabel(r'$\phi_\mathrm{real}$')
ax.set_ylabel(r'$\phi_\mathrm{im}$')
ax.set_zlabel(r'$V(\phi)$')
plt.show()
| [
"[email protected]"
] | |
2a39ee5245c8e52dc5d1316e6515469db668b9c7 | dd6ea0bfbaf6ed6eaab919398f6b2c21bc03c0c6 | /setup.py | 6f214ef68e95a0657105c4ac598fcf4abc45bf9c | [
"BSD-2-Clause"
] | permissive | oliverzgy/wechat-python-sdk | 32f0b3622f30072d74e5affd268742e01876c6fe | eafd27521feea92215aae8b0ed0887ee40703dc9 | refs/heads/master | 2021-01-20T23:40:38.744050 | 2015-02-06T15:04:59 | 2015-02-06T15:04:59 | 30,707,434 | 0 | 1 | null | 2015-02-12T14:59:58 | 2015-02-12T14:59:58 | null | UTF-8 | Python | false | false | 591 | py | # -*- coding: utf-8 -*-
# !/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='wechat-sdk',
version='0.5.2',
keywords=('wechat', 'sdk', 'wechat sdk'),
description=u'微信公众平台Python开发包',
long_description=open("README.rst").read(),
license='BSD License',
url='https://github.com/doraemonext/wechat-python-sdk',
author='doraemonext',
author_email='[email protected]',
packages=find_packages(),
include_package_data=True,
platforms='any',
install_requires=open("requirements.txt").readlines(),
) | [
"[email protected]"
] | |
433c74652da8834ac5064d09ce7f8a68abd9384a | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /backup_write_3/backup-vault-notification_put.py | 6ea0433dce6df72d7c2c3e9b44bc794bccbba370 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,601 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_three_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/backup/put-backup-vault-notifications.html
if __name__ == '__main__':
"""
delete-backup-vault-notifications : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/backup/delete-backup-vault-notifications.html
get-backup-vault-notifications : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/backup/get-backup-vault-notifications.html
"""
parameter_display_string = """
# backup-vault-name : The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
# sns-topic-arn : The Amazon Resource Name (ARN) that specifies the topic for a backup vaultâs events; for example, arn:aws:sns:us-west-2:111122223333:MyVaultTopic .
# backup-vault-events : An array of events that indicate the status of jobs to back up resources to the backup vault.
(string)
"""
add_option_dict = {}
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_three_parameter("backup", "put-backup-vault-notifications", "backup-vault-name", "sns-topic-arn", "backup-vault-events", add_option_dict)
| [
"[email protected]"
] | |
889ff2dd9c9418259b6ee467ef8771eb521c7d26 | 34de2b3ef4a2478fc6a03ea3b5990dd267d20d2d | /Python/science/integration/myintegration.py | 8d3710164567f2f0d1c663cd3b2fab77d9da7592 | [
"MIT"
] | permissive | bhishanpdl/Programming | d4310f86e1d9ac35483191526710caa25b5f138e | 9654c253c598405a22cc96dfa1497406c0bd0990 | refs/heads/master | 2020-03-26T06:19:01.588451 | 2019-08-21T18:09:59 | 2019-08-21T18:09:59 | 69,140,073 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | #!/usr/bin/env python3
# integrate ax from 0 to 3 with a = 5
# using scipy.integrate.quad
# intgrand = 5x
# limit = 0 to 3
# ans = 5x**2/2 = 2.5 x**2
from scipy import integrate
import numpy as np
a = 5
# lambda method
fun = lambda a,x: a*x
# function method
def fun(a,x):
return a*x
y = integrate.quad(fun, 0, 3, args=(a))
print(y)
y2 = 2.5 * (3**2 - 0**2)
print(y2)
| [
"[email protected]"
] | |
8eb80fa95c3be888ed3cf7bfe84303f5dc888efd | dee468400b97faa9926a8f80be9d400fab2c6d85 | /tests/flatpages_tests/no_slash_urls.py | 112e6d16384b35566483dc930b852f67b3e7b3c6 | [
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-other-permissive",
"Python-2.0"
] | permissive | claudep/django | 0a8eec4039ddb57fc3c31ae03b313fccdeb6a063 | f1a808a5025b63715d1034af2b96a6a5241d29e9 | refs/heads/master | 2023-09-01T05:08:41.544950 | 2020-04-15T11:11:13 | 2020-04-15T16:31:30 | 4,217,165 | 3 | 2 | BSD-3-Clause | 2023-08-27T16:40:58 | 2012-05-03T18:20:44 | Python | UTF-8 | Python | false | false | 121 | py | from django.urls import include, path
urlpatterns = [
path('flatpage', include('django.contrib.flatpages.urls')),
]
| [
"[email protected]"
] | |
636f25285e5b1ceca841e1881768541bb14c88ca | b57b0a14df5c6841f04cccb7b02ad04afbca18f8 | /etcd/datadog_checks/etcd/__about__.py | c75a87ea64bdb4e76aadd942c28a4a1a906e0f36 | [
"AFL-3.0",
"BSD-3-Clause-Modification",
"LGPL-3.0-only",
"Unlicense",
"LGPL-2.1-only",
"BSD-3-Clause",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | zeroc0d3/integrations-core | d9c99803c049668b7f9f9c796d338e343d3d46ee | 634d567f3c38d32aabb3f4c16b50bcfa8a4ae0fb | refs/heads/master | 2021-09-28T18:37:00.650406 | 2021-09-13T11:59:45 | 2021-09-13T11:59:45 | 199,758,958 | 0 | 0 | BSD-3-Clause | 2019-07-31T02:01:25 | 2019-07-31T02:01:24 | null | UTF-8 | Python | false | false | 138 | py | # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
__version__ = '2.7.1'
| [
"[email protected]"
] | |
3c47c84faa7edc6e871e1909b808a873271d09c8 | 20a3cc1106fa86fc2d45cd1728cc87d5db97e1f7 | /jiayq/utils/timer.py | 3607ee673ddbb8d716631baa9d2b02da0848a641 | [] | no_license | sarahboufelja54/galatea | f5664f0b3117629b2c5bbe078a1bd52bb5e359e6 | 002a9f2905868be25b71770190fb2d5eda11c861 | refs/heads/master | 2020-12-04T13:45:07.697189 | 2018-12-12T16:27:09 | 2018-12-12T16:27:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,898 | py | from time import time
from jiayq.utils import mpiutils
def hms(t,template='{}h {}m {:.2f}s'):
'''
format the time value to "xxh xxm xxs"
'''
# not implemented
hour = int(t / 3600.0)
t = t % 3600.0
minute = int (t / 60)
t = t % 60
return template.format(hour,minute,t)
class Timer:
'''
class Timer implements some sugar functions that works like a stopwatch.
Timer.reset() resets the watch
Timer.lap() returns the time elapsed since the last lap() call
Timer.total() returns the total time elapsed since the last reset
'''
def __init__(self):
# t is the total time
# l is the lap time
self.t = time()
self.l = time()
def reset(self):
self.t = time()
self.l = time()
def lap(self):
diff = time() - self.l
self.l = time()
return diff
def total(self):
return time() - self.t
class LoopReporter:
'''
class LoopReporter implements some sugar functions that reports
the stats of a loop that Yangqing usually needs.
'''
def __init__(self, step = 100, header = '', rootOnly = False):
self.timer = Timer()
self.header = header
self.step = step
self.rootOnly = rootOnly
def reset(self):
self.timer.reset()
def report(self,processed,total):
if processed % self.step != 0:
return
elapsed = self.timer.total()
if processed == 0:
eta = 0.0
else:
eta = elapsed * (total - processed) / processed
if self.rootOnly:
mpiutils.rootprint('{} {}/{}, elapsed {}, eta {}.'.format(self.header, processed, total, hms(elapsed), hms(eta)))
else:
mpiutils.nodeprint('{} {}/{}, elapsed {}, eta {}.'.format(self.header, processed, total, hms(elapsed), hms(eta)))
| [
"[email protected]"
] | |
094a8e5ce8cf0d9bb819ce0a32723b0f2da0c8a8 | d652c5cd50abc59163288f67aabf511edf2ffc16 | /{{cookiecutter.package_name}}/sandbox/settings/demo.py | 3d99764e0583de7cbdf4e92da06122dc22131747 | [
"MIT"
] | permissive | sveetch/cookiecutter-sveetch-djangoapp | 2f883958a665a84423f9dcc0bbd794a67d91fb0e | 6770a00e5ed67702f61543c0495bc55dcebdc76a | refs/heads/master | 2023-04-03T18:05:59.380348 | 2023-03-17T16:26:15 | 2023-03-17T16:26:15 | 297,186,173 | 3 | 1 | null | 2020-10-12T00:52:41 | 2020-09-21T00:04:59 | null | UTF-8 | Python | false | false | 462 | py | """
Django settings for demonstration
Intended to be used with ``make run``.
"""
from sandbox.settings.base import * # noqa: F403
DEBUG = True
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # noqa: F405
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": VAR_PATH / "db" / "db.sqlite3", # noqa: F405
}
}
# Import local settings if any
try:
from .local import * # noqa: F401,F403
except ImportError:
pass
| [
"[email protected]"
] | |
d679a89de0c76f528dd84256305a04ee5ca93ed2 | 267298206e19567d2399cd32f7d4ac264f470760 | /sorbet/feedmanager/migrations/0007_chg_field_item_title.py | aec0e9a9cd82eb62705c3e8b0e8c653aec319cef | [
"BSD-3-Clause",
"CC-BY-3.0",
"BSD-2-Clause"
] | permissive | kklimonda/sorbet | e340b4d5749ddb06e313f6b17f968b4391072cf8 | d7d0d04fbd6ba16700a7549cfe1d240ca51693af | refs/heads/master | 2021-01-18T06:30:10.063506 | 2012-07-15T06:20:16 | 2012-07-15T06:20:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,256 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Item.title'
db.alter_column('feedmanager_item', 'title', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Changing field 'Item.title'
db.alter_column('feedmanager_item', 'title', self.gf('django.db.models.fields.CharField')(max_length=70))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'feedmanager.feed': {
'Meta': {'object_name': 'Feed'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_checked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {}),
'url': ('django.db.models.fields.TextField', [], {}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False'})
},
'feedmanager.item': {
'Meta': {'object_name': 'Item'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feedmanager.Feed']"}),
'guid': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.TextField', [], {}),
'pubdate': ('django.db.models.fields.DateTimeField', [], {}),
'title': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['feedmanager']
| [
"[email protected]"
] | |
55080a68f4c3a651706f71cc3c3ecebb5102ac65 | 4bba68477b57ccbd9e8c3bf1622fc3fb4c07034a | /texar/modules/pretrained/pretrained_gpt2.py | 35f3aa3bb99251e8df4b6bed440d78b69bf89803 | [
"Apache-2.0"
] | permissive | TPNguyen/texar-pytorch | 9ed56e2983ee1e055c5eb9c14856f967906e96f5 | d212ab01e427da00cd4210344cc18e0d88a16e50 | refs/heads/master | 2020-06-25T06:56:59.791929 | 2019-07-28T01:00:33 | 2019-07-28T01:00:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,405 | py | # Copyright 2019 The Texar Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utils of GPT2 Modules.
"""
import json
import os
import sys
from abc import ABC
from typing import Any, Dict
import torch
from torch import nn
from texar.modules.pretrained.pretrained_base import PretrainedMixin
__all__ = [
"PretrainedGPT2Mixin",
]
_GPT2_PATH = "https://storage.googleapis.com/gpt-2/models/"
_CHECKPOINT_FILES = [
"checkpoint", "encoder.json", "hparams.json", "vocab.bpe",
"model.ckpt.data-00000-of-00001", "model.ckpt.index", "model.ckpt.meta"]
class PretrainedGPT2Mixin(PretrainedMixin, ABC):
r"""A mixin class to support loading pre-trained checkpoints for modules
that implement the GPT2 model.
The GPT2 model was proposed in
`Language Models are Unsupervised Multitask Learners`_
by `Radford et al.` from OpenAI. It is a unidirectional Transformer model
pre-trained using the vanilla language modeling objective on a large corpus.
.. _`Language Models are Unsupervised Multitask Learners`:
https://openai.com/blog/better-language-models/
"""
_MODEL_NAME = "GPT2"
_MODEL2URL = {
'117M': [_GPT2_PATH + f"117M/{file}" for file in _CHECKPOINT_FILES],
'345M': [_GPT2_PATH + f"345M/{file}" for file in _CHECKPOINT_FILES],
}
@classmethod
def _transform_config(cls, cache_dir: str) -> Dict[str, Any]:
info = list(os.walk(cache_dir))
root, _, files = info[0]
config_path = None
for file in files:
if file.endswith('hparams.json'):
config_path = os.path.join(root, file)
if config_path is None:
raise ValueError(f"Cannot find the config file in {cache_dir}")
with open(config_path) as f:
config_gpt = json.loads(f.read())
hidden_dim = config_gpt["n_embd"]
configs = {
"vocab_size": config_gpt["n_vocab"],
"context_size": config_gpt["n_ctx"],
"embedding_size": config_gpt["n_embd"], "embed": {
"dim": hidden_dim,
},
"position_size": config_gpt["n_ctx"],
"position_embed": {
"dim": hidden_dim
}
}
configs.update({
"dim": hidden_dim,
"num_blocks": config_gpt["n_layer"],
"use_gpt_config": True,
"embedding_dropout": 0,
"residual_dropout": 0,
"multihead_attention": {
"use_bias": True,
"num_units": hidden_dim,
"num_heads": config_gpt["n_head"],
"output_dim": hidden_dim,
},
"initializer": {
"type": "variance_scaling_initializer",
"kwargs": {
"factor": 1.0,
"mode": "FAN_AVG",
"uniform": True,
},
},
"poswise_feedforward": {
"layers": [
{
"type": "Linear",
"kwargs": {
"in_features": hidden_dim,
"out_features": hidden_dim * 4,
"bias": True,
}
},
{
"type": "GPTGELU",
"kwargs": {}
},
{
"type": "Linear",
"kwargs": {
"in_features": hidden_dim * 4,
"out_features": hidden_dim,
"bias": True,
}
}
],
"name": "ffn",
},
})
return configs
def _init_from_checkpoint(self, cache_dir: str,
load_output_layer: bool = True, **kwargs):
r"""Initialize model parameters from weights stored in the pre-trained
checkpoint.
Args:
cache_dir (str): Path to the cache directory.
load_output_layer (bool): If `False`, will not load weights of the
output layer. Set this argument to `False` when loading weights
into a GPT2 encoder. Defaults to `True`.
"""
try:
import numpy as np
import tensorflow as tf
except ImportError:
print("Loading TensorFlow models in PyTorch requires installing "
"TensorFlow. Please see https://www.tensorflow.org/install/ "
"for installation instructions.")
raise
global_tensor_map = {
"model/wte": "_embedding",
"model/wpe": "_embedding",
"model/ln_f/b": "final_layer_norm.bias",
"model/ln_f/g": "final_layer_norm.weight",
}
layer_tensor_map = {
"ln_1/b": "self_attn_layer_norm.{}.bias",
"ln_1/g": "self_attn_layer_norm.{}.weight",
"ln_2/b": "poswise_layer_norm.{}.bias",
"ln_2/g": "poswise_layer_norm.{}.weight",
"mlp/c_fc/b": "poswise_networks.{}._layers.0.bias",
"mlp/c_proj/b": "poswise_networks.{}._layers.2.bias",
"attn/c_proj/b": "self_attns.{}.O_dense.bias",
}
layer_transpose_map = {
"mlp/c_fc/w": "poswise_networks.{}._layers.0.weight",
"mlp/c_proj/w": "poswise_networks.{}._layers.2.weight",
"attn/c_proj/w": "self_attns.{}.O_dense.weight",
}
tf_path = os.path.abspath(os.path.join(cache_dir, 'model.ckpt'))
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
arrays = []
for name, _ in init_vars:
array = tf.train.load_variable(tf_path, name)
names.append(name)
arrays.append(array.squeeze())
tensor_names = []
for name, _ in self.word_embedder.named_parameters():
tensor_names.append(name)
for name, _ in self.position_embedder.named_parameters():
tensor_names.append(name)
for name, _ in self.named_parameters():
tensor_names.append(name)
idx = 0
for name, array in zip(names, arrays):
processing = (idx + 1.0) / len(names)
idx += 1
sys.stdout.write(f"\rLoading checkpoint: {processing:.1%}")
sys.stdout.flush()
if name in global_tensor_map:
v_name = global_tensor_map[name]
if name == "model/wte":
if load_output_layer:
pointer = self.word_embedder.embedding
assert pointer.shape == array.shape
pointer.data = torch.from_numpy(array)
output_pointer = name_to_variable(
self, "_output_layer.weight")
if not isinstance(output_pointer, nn.Identity):
assert output_pointer.shape == array.shape
output_pointer.data = torch.from_numpy(array)
elif name == "model/wpe":
pointer = self.position_embedder.embedding
assert pointer.shape == array.shape
pointer.data = torch.from_numpy(array)
else:
pointer = self._name_to_variable(v_name)
assert pointer.shape == array.shape
pointer.data = torch.from_numpy(array)
else:
name_tmp = name.split("/")
layer_no = name_tmp[1][1:]
name = "/".join(name_tmp[2:])
if name in layer_tensor_map:
v_name = layer_tensor_map[name].format(layer_no)
pointer = self._name_to_variable(v_name)
assert pointer.shape == array.shape
pointer.data = torch.from_numpy(array)
elif name in layer_transpose_map:
v_name = layer_transpose_map[name].format(layer_no)
pointer = self._name_to_variable(v_name)
array_t = np.transpose(array)
assert pointer.shape == array_t.shape
pointer.data = torch.from_numpy(array_t)
elif name == "attn/c_attn/w":
index_d = array.shape[-1] // 3
Q_w = np.transpose(array[:, :index_d])
K_w = np.transpose(array[:, index_d: 2 * index_d])
V_w = np.transpose(array[:, 2 * index_d:])
q_weight = self._name_to_variable(
f"self_attns.{layer_no}.Q_dense.weight")
k_weight = self._name_to_variable(
f"self_attns.{layer_no}.K_dense.weight")
v_weight = self._name_to_variable(
f"self_attns.{layer_no}.V_dense.weight")
assert q_weight.shape == Q_w.shape
assert k_weight.shape == K_w.shape
assert v_weight.shape == V_w.shape
q_weight.data = torch.from_numpy(Q_w)
k_weight.data = torch.from_numpy(K_w)
v_weight.data = torch.from_numpy(V_w)
elif name == "attn/c_attn/b":
d = array.shape[0]
Q_b = array[: d // 3]
K_b = array[d // 3: 2 * d // 3]
V_b = array[2 * d // 3:]
q_bias = self._name_to_variable(
f"self_attns.{layer_no}.Q_dense.bias")
k_bias = self._name_to_variable(
f"self_attns.{layer_no}.K_dense.bias")
v_bias = self._name_to_variable(
f"self_attns.{layer_no}.V_dense.bias")
assert q_bias.shape == Q_b.shape
assert k_bias.shape == K_b.shape
assert v_bias.shape == V_b.shape
q_bias.data = torch.from_numpy(Q_b)
k_bias.data = torch.from_numpy(K_b)
v_bias.data = torch.from_numpy(V_b)
else:
print("Name error", name)
raise Exception
def name_to_variable(model: nn.Module, name: str) -> nn.Module:
r"""Find the corresponding variable given the specified name.
"""
pointer = model
name = name.split(".")
for m_name in name:
if m_name.isdigit():
num = int(m_name)
pointer = pointer[num] # type: ignore
else:
if not isinstance(pointer, nn.Identity):
pointer = getattr(pointer, m_name)
return pointer
| [
"[email protected]"
] | |
14b2ba3ed8ddd6c1f75e8bf51a7f5fba38312c1d | 283f9fc69e0b00051a79232fc2cbaabdab589c18 | /flashsale/pay/migrations/0004_create_brand_and_brand_product_add_amount_flow_outer_id.py | 3b6952a54f9dc87a3d6b483977e5c893ab1de7a5 | [] | no_license | nidepuzi/ndpuzsys | 572b67a84fcd6c4fa4d49d3bdb0eb826e7791e62 | be58dc8f1f0630d3a04e551911f66d9091bedc45 | refs/heads/master | 2023-01-06T22:52:49.861479 | 2019-07-09T11:00:06 | 2019-07-09T11:00:06 | 188,955,119 | 1 | 0 | null | 2022-12-26T20:15:24 | 2019-05-28T04:41:35 | Python | UTF-8 | Python | false | false | 4,309 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('pay', '0003_auto_20160425_1212'),
]
operations = [
migrations.CreateModel(
name='BrandEntry',
fields=[
('created', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65e5\u671f', db_index=True)),
('modified', models.DateTimeField(auto_now=True, verbose_name='\u4fee\u6539\u65e5\u671f', db_index=True)),
('id', models.AutoField(serialize=False, primary_key=True)),
('brand_name', models.CharField(db_index=True, max_length=32, verbose_name='\u54c1\u724c\u540d\u79f0', blank=True)),
('brand_desc', models.TextField(max_length=512, verbose_name='\u54c1\u724c\u6d3b\u52a8\u63cf\u8ff0', blank=True)),
('brand_pic', models.CharField(max_length=256, verbose_name='\u54c1\u724c\u56fe\u7247', blank=True)),
('brand_post', models.CharField(max_length=256, verbose_name='\u54c1\u724c\u6d77\u62a5', blank=True)),
('brand_applink', models.CharField(max_length=256, verbose_name='\u54c1\u724cAPP\u534f\u8bae\u94fe\u63a5', blank=True)),
('start_time', models.DateTimeField(db_index=True, null=True, verbose_name='\u5f00\u59cb\u65f6\u95f4', blank=True)),
('end_time', models.DateTimeField(null=True, verbose_name='\u7ed3\u675f\u65f6\u95f4', blank=True)),
('order_val', models.IntegerField(default=0, verbose_name='\u6392\u5e8f\u503c')),
('is_active', models.BooleanField(default=True, verbose_name='\u4e0a\u7ebf')),
],
options={
'db_table': 'flashsale_brand_entry',
'verbose_name': '\u7279\u5356/\u54c1\u724c\u63a8\u5e7f\u5165\u53e3',
'verbose_name_plural': '\u7279\u5356/\u54c1\u724c\u63a8\u5e7f\u5165\u53e3',
},
),
migrations.CreateModel(
name='BrandProduct',
fields=[
('created', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65e5\u671f', db_index=True)),
('modified', models.DateTimeField(auto_now=True, verbose_name='\u4fee\u6539\u65e5\u671f', db_index=True)),
('id', models.AutoField(serialize=False, primary_key=True)),
('brand_name', models.CharField(db_index=True, max_length=32, verbose_name='\u54c1\u724c\u540d\u79f0', blank=True)),
('product_id', models.BigIntegerField(default=0, verbose_name='\u5546\u54c1id', db_index=True)),
('product_name', models.CharField(max_length=64, verbose_name='\u5546\u54c1\u540d\u79f0', blank=True)),
('product_img', models.CharField(max_length=256, verbose_name='\u5546\u54c1\u56fe\u7247', blank=True)),
('start_time', models.DateTimeField(db_index=True, null=True, verbose_name='\u5f00\u59cb\u65f6\u95f4', blank=True)),
('end_time', models.DateTimeField(null=True, verbose_name='\u7ed3\u675f\u65f6\u95f4', blank=True)),
('brand', models.ForeignKey(related_name='brand_products', verbose_name='\u54c1\u724c\u7f16\u53f7id', to='pay.BrandEntry')),
],
options={
'db_table': 'flashsale_brand_product',
'verbose_name': '\u7279\u5356/\u54c1\u724c\u5546\u54c1',
'verbose_name_plural': '\u7279\u5356/\u54c1\u724c\u5546\u54c1',
},
),
migrations.AddField(
model_name='salerefund',
name='amount_flow',
field=jsonfield.fields.JSONField(default=b'{"desc":""}', max_length=512, verbose_name='\u9000\u6b3e\u53bb\u5411', blank=True),
),
migrations.AlterField(
model_name='saleorder',
name='outer_id',
field=models.CharField(max_length=32, verbose_name='\u5546\u54c1\u5916\u90e8\u7f16\u7801', blank=True),
),
migrations.AlterField(
model_name='saleorder',
name='outer_sku_id',
field=models.CharField(max_length=32, verbose_name='\u89c4\u683c\u5916\u90e8\u7f16\u7801', blank=True),
),
]
| [
"[email protected]"
] | |
46e9f7c42f8a3f5978431277c2b11a34a1bd58df | 1ffc17893d9e15fd939628bbc41c3d2633713ebd | /tests/test_utils/reference_implementation_helper.py | e703d38310611aa42b79f1811d54ed8bc4f70b02 | [
"Apache-2.0"
] | permissive | xadupre/sklearn-onnx | 646e8a158cdded725064964494f0f8a760630aa8 | b05e4864cedbf4f2a9e6c003781d1db8b53264ac | refs/heads/master | 2023-09-01T15:58:38.112315 | 2022-12-21T01:59:45 | 2022-12-21T01:59:45 | 382,323,831 | 0 | 2 | Apache-2.0 | 2023-01-04T13:41:33 | 2021-07-02T11:22:00 | Python | UTF-8 | Python | false | false | 4,801 | py | # SPDX-License-Identifier: Apache-2.0
import numpy as np
def ErfInv(x):
sgn = -1. if x < 0 else 1.
x = (1. - x) * (1 + x)
log = np.log(x)
v = 2. / (3.14159 * 0.147) + 0.5 * log
v2 = 1. / 0.147 * log
v3 = -v + np.sqrt(v * v - v2)
x = sgn * np.sqrt(v3)
return x
def ComputeLogistic(val):
v = 1. / (1. + np.exp(-np.abs(val)))
return (1. - v) if val < 0 else v
def ComputeProbit(val):
return 1.41421356 * ErfInv(val * 2 - 1)
def ComputeSoftmax(values):
v_max = values.max()
values[:] = np.exp(values - v_max)
this_sum = values.sum()
values /= this_sum
return values
def ComputeSoftmaxZero(values):
v_max = values.max()
exp_neg_v_max = np.exp(-v_max)
s = 0
for i in range(len(values)):
v = values[i]
if v > 0.0000001 or v < -0.0000001:
values[i] = np.exp(v - v_max)
s += values[i]
else:
values[i] *= exp_neg_v_max
values[i] /= s
return values
def sigmoid_probability(score, proba, probb):
# ref: https://github.com/arnaudsj/libsvm/blob/
# eaaefac5ebd32d0e07902e1ae740e038eaaf0826/svm.cpp#L1818
val = score * proba + probb
return 1 - ComputeLogistic(val)
def multiclass_probability(k, R):
max_iter = max(100, k)
Q = np.empty((k, k), dtype=R.dtype)
Qp = np.empty((k, ), dtype=R.dtype)
P = np.empty((k, ), dtype=R.dtype)
eps = 0.005 / k
for t in range(0, k):
P[t] = 1.0 / k
Q[t, t] = 0
for j in range(t):
Q[t, t] += R[j, t] * R[j, t]
Q[t, j] = Q[j, t]
for j in range(t + 1, k):
Q[t, t] += R[j, t] * R[j, t]
Q[t, j] = -R[j, t] * R[t, j]
for it in range(max_iter):
# stopping condition, recalculate QP,pQP for numerical accuracy
pQp = 0
for t in range(0, k):
Qp[t] = 0
for j in range(k):
Qp[t] += Q[t, j] * P[j]
pQp += P[t] * Qp[t]
max_error = 0
for t in range(0, k):
error = np.abs(Qp[t] - pQp)
if error > max_error:
max_error = error
if max_error < eps:
break
for t in range(k):
diff = (-Qp[t] + pQp) / Q[t, t]
P[t] += diff
pQp = ((pQp + diff * (diff * Q[t, t] + 2 * Qp[t])) /
(1 + diff) ** 2)
for j in range(k):
Qp[j] = (Qp[j] + diff * Q[t, j]) / (1 + diff)
P[j] /= (1 + diff)
return P
def write_scores(n_classes, scores, post_transform, add_second_class):
if n_classes >= 2:
if post_transform == "PROBIT":
res = []
for score in scores:
res.append(ComputeProbit(score))
return np.array(res, dtype=scores.dtype)
if post_transform == "LOGISTIC":
res = []
for score in scores:
res.append(ComputeLogistic(score))
return np.array(res, dtype=scores.dtype)
if post_transform == "SOFTMAX":
return ComputeSoftmax(scores)
if post_transform == "SOFTMAX_ZERO":
return ComputeSoftmaxZero(scores)
return scores
if n_classes == 1:
if post_transform == "PROBIT":
return np.array([ComputeProbit(scores[0])], dtype=scores.dtype)
if add_second_class == 0:
res = np.array([1 - scores[0], scores[0]], dtype=scores.dtype)
elif add_second_class == 1:
res = np.array([1 - scores[0], scores[0]], dtype=scores.dtype)
elif add_second_class in (2, 3):
if post_transform == "LOGISTIC":
return np.array([ComputeLogistic(-scores[0]),
ComputeLogistic(scores[0])],
dtype=scores.dtype)
return np.array([-scores[0], scores[0]], dtype=scores.dtype)
return np.array([scores[0]], dtype=scores.dtype)
raise NotImplementedError(f"n_classes={n_classes} not supported.")
def set_score_svm(max_weight, maxclass, n, post_transform,
has_proba, weights_are_all_positive_,
classlabels, posclass, negclass):
write_additional_scores = -1
if len(classlabels) == 2:
write_additional_scores = 2 if post_transform == "NONE" else 0
if not has_proba:
if weights_are_all_positive_ and max_weight >= 0.5:
return classlabels[1], write_additional_scores
if max_weight > 0 and not weights_are_all_positive_:
return classlabels[1], write_additional_scores
return classlabels[maxclass], write_additional_scores
if max_weight > 0:
return posclass, write_additional_scores
return negclass, write_additional_scores
| [
"[email protected]"
] | |
c9186add6d0c25f076889a28cd4e487475439e09 | f3bd271bf00325881fb5b2533b9ef7f7448a75ec | /xcp2k/classes/_com1.py | c9f4959959fa47cab33563e9c613b6405be5674a | [] | no_license | obaica/xcp2k | 7f99fc9d494859e16b9b0ea8e217b0493f4b2f59 | 6e15c2c95658f545102595dc1783f5e03a9e6916 | refs/heads/master | 2020-07-15T17:27:43.378835 | 2019-02-11T16:32:24 | 2019-02-11T16:32:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 651 | py | from xcp2k.inputsection import InputSection
from _each74 import _each74
class _com1(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Section_parameters = None
self.Add_last = None
self.Common_iteration_levels = None
self.Filename = None
self.Log_print_key = None
self.EACH = _each74()
self._name = "COM"
self._keywords = {'Common_iteration_levels': 'COMMON_ITERATION_LEVELS', 'Log_print_key': 'LOG_PRINT_KEY', 'Add_last': 'ADD_LAST', 'Filename': 'FILENAME'}
self._subsections = {'EACH': 'EACH'}
self._attributes = ['Section_parameters']
| [
"[email protected]"
] | |
1c850d20f46c8bacea3e2001b3b00d03e4f2e38e | 5d1c43bb4881039f198eedcee2ceb101b406e0a0 | /Django/myvenv/Scripts/rst2man.py | 52894792e98bd74e7fc0f83fd5849d6aff9320c2 | [] | no_license | MunSeoHee/Likelion_Gachon_2020 | 46155b1686a245a59c5664f7726ac754b7079e4b | e0e48845fdb0e4aa2365e7c47e29880a27f0f261 | refs/heads/master | 2021-04-10T09:51:06.618980 | 2020-12-07T10:06:43 | 2020-12-07T10:06:43 | 248,927,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | #!c:\users\munseohee\desktop\likelion_gachon_2020\django\myvenv\scripts\python.exe
# Author:
# Contact: [email protected]
# Copyright: This module has been placed in the public domain.
"""
man.py
======
This module provides a simple command line interface that uses the
man page writer to output from ReStructuredText source.
"""
import locale
try:
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
from docutils.writers import manpage
description = ("Generates plain unix manual documents. " + default_description)
publish_cmdline(writer=manpage.Writer(), description=description)
| [
"[email protected]"
] | |
3e499444b616bca4d9fddf21ce013e69466b7aaf | 462e53caefc202f1e48f7a3891b27dad6d4032f1 | /src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/_cosmos_db_management_client.py | 3545f0218265ae5196150878aee2d9b2a28f3050 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | bragi92/azure-cli-extensions | a910f14a0613f5611b08ed34ce8d67c1ad05013e | e9aebbcbd3df15fd874a32babc40ae1a0ba23c1f | refs/heads/k8s-extension/public | 2023-08-04T13:22:05.747918 | 2023-07-28T15:45:27 | 2023-07-28T15:45:27 | 205,455,084 | 0 | 0 | MIT | 2019-08-30T20:50:25 | 2019-08-30T20:50:25 | null | UTF-8 | Python | false | false | 18,163 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from .. import models as _models
from .._serialization import Deserializer, Serializer
from ._configuration import CosmosDBManagementClientConfiguration
from .operations import (
CassandraClustersOperations,
CassandraDataCentersOperations,
CassandraResourcesOperations,
CollectionOperations,
CollectionPartitionOperations,
CollectionPartitionRegionOperations,
CollectionRegionOperations,
DataTransferJobsOperations,
DatabaseAccountRegionOperations,
DatabaseAccountsOperations,
DatabaseOperations,
GraphResourcesOperations,
GremlinResourcesOperations,
LocationsOperations,
MongoClustersOperations,
MongoDBResourcesOperations,
NotebookWorkspacesOperations,
Operations,
PartitionKeyRangeIdOperations,
PartitionKeyRangeIdRegionOperations,
PercentileOperations,
PercentileSourceTargetOperations,
PercentileTargetOperations,
PrivateEndpointConnectionsOperations,
PrivateLinkResourcesOperations,
RestorableDatabaseAccountsOperations,
RestorableGremlinDatabasesOperations,
RestorableGremlinGraphsOperations,
RestorableGremlinResourcesOperations,
RestorableMongodbCollectionsOperations,
RestorableMongodbDatabasesOperations,
RestorableMongodbResourcesOperations,
RestorableSqlContainersOperations,
RestorableSqlDatabasesOperations,
RestorableSqlResourcesOperations,
RestorableTableResourcesOperations,
RestorableTablesOperations,
ServiceOperations,
SqlResourcesOperations,
TableResourcesOperations,
)
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class CosmosDBManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
"""Azure Cosmos DB Database Service Resource Provider REST API.
:ivar database_accounts: DatabaseAccountsOperations operations
:vartype database_accounts: azure.mgmt.cosmosdb.aio.operations.DatabaseAccountsOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.cosmosdb.aio.operations.Operations
:ivar database: DatabaseOperations operations
:vartype database: azure.mgmt.cosmosdb.aio.operations.DatabaseOperations
:ivar collection: CollectionOperations operations
:vartype collection: azure.mgmt.cosmosdb.aio.operations.CollectionOperations
:ivar collection_region: CollectionRegionOperations operations
:vartype collection_region: azure.mgmt.cosmosdb.aio.operations.CollectionRegionOperations
:ivar database_account_region: DatabaseAccountRegionOperations operations
:vartype database_account_region:
azure.mgmt.cosmosdb.aio.operations.DatabaseAccountRegionOperations
:ivar percentile_source_target: PercentileSourceTargetOperations operations
:vartype percentile_source_target:
azure.mgmt.cosmosdb.aio.operations.PercentileSourceTargetOperations
:ivar percentile_target: PercentileTargetOperations operations
:vartype percentile_target: azure.mgmt.cosmosdb.aio.operations.PercentileTargetOperations
:ivar percentile: PercentileOperations operations
:vartype percentile: azure.mgmt.cosmosdb.aio.operations.PercentileOperations
:ivar collection_partition_region: CollectionPartitionRegionOperations operations
:vartype collection_partition_region:
azure.mgmt.cosmosdb.aio.operations.CollectionPartitionRegionOperations
:ivar collection_partition: CollectionPartitionOperations operations
:vartype collection_partition: azure.mgmt.cosmosdb.aio.operations.CollectionPartitionOperations
:ivar partition_key_range_id: PartitionKeyRangeIdOperations operations
:vartype partition_key_range_id:
azure.mgmt.cosmosdb.aio.operations.PartitionKeyRangeIdOperations
:ivar partition_key_range_id_region: PartitionKeyRangeIdRegionOperations operations
:vartype partition_key_range_id_region:
azure.mgmt.cosmosdb.aio.operations.PartitionKeyRangeIdRegionOperations
:ivar graph_resources: GraphResourcesOperations operations
:vartype graph_resources: azure.mgmt.cosmosdb.aio.operations.GraphResourcesOperations
:ivar sql_resources: SqlResourcesOperations operations
:vartype sql_resources: azure.mgmt.cosmosdb.aio.operations.SqlResourcesOperations
:ivar mongo_db_resources: MongoDBResourcesOperations operations
:vartype mongo_db_resources: azure.mgmt.cosmosdb.aio.operations.MongoDBResourcesOperations
:ivar table_resources: TableResourcesOperations operations
:vartype table_resources: azure.mgmt.cosmosdb.aio.operations.TableResourcesOperations
:ivar cassandra_resources: CassandraResourcesOperations operations
:vartype cassandra_resources: azure.mgmt.cosmosdb.aio.operations.CassandraResourcesOperations
:ivar gremlin_resources: GremlinResourcesOperations operations
:vartype gremlin_resources: azure.mgmt.cosmosdb.aio.operations.GremlinResourcesOperations
:ivar locations: LocationsOperations operations
:vartype locations: azure.mgmt.cosmosdb.aio.operations.LocationsOperations
:ivar data_transfer_jobs: DataTransferJobsOperations operations
:vartype data_transfer_jobs: azure.mgmt.cosmosdb.aio.operations.DataTransferJobsOperations
:ivar cassandra_clusters: CassandraClustersOperations operations
:vartype cassandra_clusters: azure.mgmt.cosmosdb.aio.operations.CassandraClustersOperations
:ivar cassandra_data_centers: CassandraDataCentersOperations operations
:vartype cassandra_data_centers:
azure.mgmt.cosmosdb.aio.operations.CassandraDataCentersOperations
:ivar mongo_clusters: MongoClustersOperations operations
:vartype mongo_clusters: azure.mgmt.cosmosdb.aio.operations.MongoClustersOperations
:ivar notebook_workspaces: NotebookWorkspacesOperations operations
:vartype notebook_workspaces: azure.mgmt.cosmosdb.aio.operations.NotebookWorkspacesOperations
:ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
:vartype private_endpoint_connections:
azure.mgmt.cosmosdb.aio.operations.PrivateEndpointConnectionsOperations
:ivar private_link_resources: PrivateLinkResourcesOperations operations
:vartype private_link_resources:
azure.mgmt.cosmosdb.aio.operations.PrivateLinkResourcesOperations
:ivar restorable_database_accounts: RestorableDatabaseAccountsOperations operations
:vartype restorable_database_accounts:
azure.mgmt.cosmosdb.aio.operations.RestorableDatabaseAccountsOperations
:ivar restorable_sql_databases: RestorableSqlDatabasesOperations operations
:vartype restorable_sql_databases:
azure.mgmt.cosmosdb.aio.operations.RestorableSqlDatabasesOperations
:ivar restorable_sql_containers: RestorableSqlContainersOperations operations
:vartype restorable_sql_containers:
azure.mgmt.cosmosdb.aio.operations.RestorableSqlContainersOperations
:ivar restorable_sql_resources: RestorableSqlResourcesOperations operations
:vartype restorable_sql_resources:
azure.mgmt.cosmosdb.aio.operations.RestorableSqlResourcesOperations
:ivar restorable_mongodb_databases: RestorableMongodbDatabasesOperations operations
:vartype restorable_mongodb_databases:
azure.mgmt.cosmosdb.aio.operations.RestorableMongodbDatabasesOperations
:ivar restorable_mongodb_collections: RestorableMongodbCollectionsOperations operations
:vartype restorable_mongodb_collections:
azure.mgmt.cosmosdb.aio.operations.RestorableMongodbCollectionsOperations
:ivar restorable_mongodb_resources: RestorableMongodbResourcesOperations operations
:vartype restorable_mongodb_resources:
azure.mgmt.cosmosdb.aio.operations.RestorableMongodbResourcesOperations
:ivar restorable_gremlin_databases: RestorableGremlinDatabasesOperations operations
:vartype restorable_gremlin_databases:
azure.mgmt.cosmosdb.aio.operations.RestorableGremlinDatabasesOperations
:ivar restorable_gremlin_graphs: RestorableGremlinGraphsOperations operations
:vartype restorable_gremlin_graphs:
azure.mgmt.cosmosdb.aio.operations.RestorableGremlinGraphsOperations
:ivar restorable_gremlin_resources: RestorableGremlinResourcesOperations operations
:vartype restorable_gremlin_resources:
azure.mgmt.cosmosdb.aio.operations.RestorableGremlinResourcesOperations
:ivar restorable_tables: RestorableTablesOperations operations
:vartype restorable_tables: azure.mgmt.cosmosdb.aio.operations.RestorableTablesOperations
:ivar restorable_table_resources: RestorableTableResourcesOperations operations
:vartype restorable_table_resources:
azure.mgmt.cosmosdb.aio.operations.RestorableTableResourcesOperations
:ivar service: ServiceOperations operations
:vartype service: azure.mgmt.cosmosdb.aio.operations.ServiceOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2023-03-01-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = CosmosDBManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.database_accounts = DatabaseAccountsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.database = DatabaseOperations(self._client, self._config, self._serialize, self._deserialize)
self.collection = CollectionOperations(self._client, self._config, self._serialize, self._deserialize)
self.collection_region = CollectionRegionOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.database_account_region = DatabaseAccountRegionOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.percentile_source_target = PercentileSourceTargetOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.percentile_target = PercentileTargetOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.percentile = PercentileOperations(self._client, self._config, self._serialize, self._deserialize)
self.collection_partition_region = CollectionPartitionRegionOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.collection_partition = CollectionPartitionOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.partition_key_range_id = PartitionKeyRangeIdOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.partition_key_range_id_region = PartitionKeyRangeIdRegionOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.graph_resources = GraphResourcesOperations(self._client, self._config, self._serialize, self._deserialize)
self.sql_resources = SqlResourcesOperations(self._client, self._config, self._serialize, self._deserialize)
self.mongo_db_resources = MongoDBResourcesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.table_resources = TableResourcesOperations(self._client, self._config, self._serialize, self._deserialize)
self.cassandra_resources = CassandraResourcesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.gremlin_resources = GremlinResourcesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.locations = LocationsOperations(self._client, self._config, self._serialize, self._deserialize)
self.data_transfer_jobs = DataTransferJobsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.cassandra_clusters = CassandraClustersOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.cassandra_data_centers = CassandraDataCentersOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.mongo_clusters = MongoClustersOperations(self._client, self._config, self._serialize, self._deserialize)
self.notebook_workspaces = NotebookWorkspacesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.private_link_resources = PrivateLinkResourcesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_database_accounts = RestorableDatabaseAccountsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_sql_databases = RestorableSqlDatabasesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_sql_containers = RestorableSqlContainersOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_sql_resources = RestorableSqlResourcesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_mongodb_databases = RestorableMongodbDatabasesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_mongodb_collections = RestorableMongodbCollectionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_mongodb_resources = RestorableMongodbResourcesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_gremlin_databases = RestorableGremlinDatabasesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_gremlin_graphs = RestorableGremlinGraphsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_gremlin_resources = RestorableGremlinResourcesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_tables = RestorableTablesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restorable_table_resources = RestorableTableResourcesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "CosmosDBManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
| [
"[email protected]"
] | |
8f06941c60c03810aab9bda1e07ab6e226fdb88d | 74d17bf3d5485ffa7fe795b633f461277b1a0e40 | /mall/utils/token_jwt.py | 9f6bdc1c94a1663630df923d329b870e42591943 | [] | no_license | py-king/ready | 6ddc7e8b830742bcdeb696791ec49ad9d30f2296 | e352aa335b07f6852f4c99e7fbd030f2eb8f16da | refs/heads/master | 2022-12-09T13:31:38.199448 | 2019-01-24T02:13:35 | 2019-01-24T02:13:35 | 167,284,151 | 2 | 0 | null | 2022-12-08T02:29:02 | 2019-01-24T01:52:28 | HTML | UTF-8 | Python | false | false | 359 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# author:caozy time:19-1-10
from rest_framework_jwt.settings import api_settings
def token_jwt(user):
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
return token
| [
"[email protected]"
] | |
77565a9d8c3f31e8f3d58e03818152374b4ed0b0 | ef821468b081ef2a0b81bf08596a2c81e1c1ef1a | /PythonWebBasics_Django/Django_Web_Basics/manage.py | 9e82bfc16fd0851d60bb007f7e74d85d2250139e | [] | no_license | Ivaylo-Atanasov93/The-Learning-Process | 71db22cd79f6d961b9852f140f4285ef7820dd80 | 354844e2c686335345f6a54b3af86b78541ed3f3 | refs/heads/master | 2023-03-30T20:59:34.304207 | 2021-03-29T15:23:05 | 2021-03-29T15:23:05 | 294,181,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 673 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Django_Web_Basics.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
ee216ef4dc793f6328f24c7d46838a6ff173f7bf | 0b80791593076c266c682226a001a06c4b02fcf8 | /backend/mobile_build_a_dev_16819/urls.py | eea5aa0e16aece39f5771d77abe0ac6f55e3d09d | [] | no_license | crowdbotics-apps/mobile-build-a-dev-16819 | 1b697601504f5a42cefc30fd6d764312869a210a | 80d6870f71371273eddddb7416d5672623d4510e | refs/heads/master | 2023-02-07T10:56:17.932363 | 2020-12-16T14:17:06 | 2020-12-16T14:17:06 | 322,003,869 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,959 | py | """mobile_build_a_dev_16819 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "mobile build A"
admin.site.site_title = "mobile build A Admin Portal"
admin.site.index_title = "mobile build A Admin"
# swagger
api_info = openapi.Info(
title="mobile build A API",
default_version="v1",
description="API documentation for mobile build A App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
| [
"[email protected]"
] | |
3e395ec76d78e4c609fdde7e8103b501babf72a7 | 2fba0a631bb70aaae6dc89bff09f13e728934605 | /privacy/migrations/0038_auto_20200602_1034.py | db62c8fff5c90f19cdb26e9b6407b2889ebb2669 | [] | no_license | murengera/eshoping-api | 4c5bcbeb7ac3ef12858e08f8a88d4f7b710b5c64 | 90acb0f8db519a38a1bd0976bd1f704f6d02f2dd | refs/heads/master | 2022-12-25T10:19:39.431427 | 2020-09-26T12:35:38 | 2020-09-26T12:35:38 | 286,399,741 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | # Generated by Django 3.0 on 2020-06-02 08:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('privacy', '0037_auto_20200602_1031'),
]
operations = [
migrations.AlterField(
model_name='privacypoliciesandtermsofuse',
name='language',
field=models.CharField(choices=[('rwandese', 'rwandese'), ('english', 'english')], max_length=30),
),
]
| [
"[email protected]"
] | |
8796bd002abd785667c8defcd90ef90f7d1870ed | b5402b40b69244380bc0d3f85ff65483d0505181 | /bot/event.py | b4768733a97c8525da2b9963c7e42ad67f84ca50 | [
"MIT"
] | permissive | RxJellyBot/Jelly-Bot | ea7b6bd100431736732f9f4cc739858ec148e3e2 | c7da1e91783dce3a2b71b955b3a22b68db9056cf | refs/heads/master | 2023-08-29T20:41:01.813945 | 2021-10-20T05:27:21 | 2021-10-20T05:27:21 | 189,347,226 | 5 | 1 | MIT | 2020-09-05T00:50:41 | 2019-05-30T04:47:48 | Python | UTF-8 | Python | false | false | 1,298 | py | """
Events for the bot to trigger after certain conditions are met.
Methods prefixed with ``signal_`` should be called when the certain event occurs.
Methods prefixed with ``on_`` will be executed on all of the events specified occur.
"""
from django.conf import settings
from JellyBot.systemconfig import System
from bot.user import perform_existence_check
from bot.system import record_boot_dt
from extutils.ddns import activate_ddns_update
from msghandle import HandlingFunctionBox
__all__ = ["signal_discord_ready", "signal_django_ready"]
_ready = {
"Discord": False,
"Django": False
}
def signal_django_ready():
"""Signal that Django application is ready."""
_ready["Discord"] = True
_check_all_ready()
def signal_discord_ready():
"""Signal that called when the Discord bot is ready."""
_ready["Django"] = True
_check_all_ready()
def _check_all_ready():
if all(_ready.values()):
on_system_fully_ready()
def on_system_fully_ready():
"""Code to execute when the system is fully prepared (Discord bot and Django application ready)."""
HandlingFunctionBox.load()
record_boot_dt()
if settings.PRODUCTION:
perform_existence_check(set_name_to_cache=True)
activate_ddns_update(System.DDNSUpdateIntervalSeconds)
| [
"[email protected]"
] | |
e1c6b71f6753abc16634e2ceac2ea85cb7d57daf | 5754e080348df0fc7f1fffe46c6909edf681ee79 | /branch/dome2-12.py | 463148dadbe21d3912ba1bb5f43a3507a8d00409 | [] | no_license | biao111/learn_python | 57e373d62979096b94b9ea96664dec3b7d88dfce | 609a6a9634a37ecd1c59fa639fcca1eaa6472bfd | refs/heads/master | 2023-01-05T23:15:39.670105 | 2020-11-08T05:23:57 | 2020-11-08T05:23:57 | 310,991,572 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | #定义变量year,并接收“请输入正确的年份:”
year = input("请输入正确的年份")
#判断是否是闰年:1、能被4整除,但是不能被100整除的年份 2、能被400整除的年份
if int(year) / 4 and not int(year) / 100 and int(year) / 400:
print("{0}年是闰年".format(year))
else:
print("{0}年不是闰年".format(year)) | [
"[email protected]"
] | |
332fdd820f0bc7ff950fcaeb827614a42af1283e | 6e932aa6ec9424ae0238c559112fdd0214c52be6 | /ffawp/ch03/6_excel_value_match_pattern.py | d1e792c640076832617e29f290e915655527f22c | [] | no_license | LingChenBill/python_first_introduce | d1c780dcd3653ef4cda39cc4a0c631a99071f088 | 32ff4a16fe10505fcb49e4762fc573f5f1c62167 | refs/heads/master | 2020-07-29T13:03:15.447728 | 2020-06-09T13:39:07 | 2020-06-09T13:39:07 | 209,813,590 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,922 | py | # Date:2020/5/24
# Author:Lingchen
# Mark: 使用基础Python筛选出Excel中客户姓名包含一个特定模式(以大写字母J开始)
# python 6_excel_value_match_pattern.py data/sales_2013.xlsx data/output/6_output.xlsx
import sys
import re
from datetime import date
from xlrd import open_workbook, xldate_as_tuple
from xlwt import Workbook
input_file = sys.argv[1]
output_file = sys.argv[2]
output_workbook = Workbook()
output_worksheet = output_workbook.add_sheet('jan_2013_output')
# 特定模式
# r表示单引号之间的模式是一个原始字符串,正则是 ^J.*
pattern = re.compile(r'(?P<my_pattern>^J.*)')
# 姓名列索引
customer_name_index = 1
with open_workbook(input_file) as workbook:
worksheet = workbook.sheet_by_name('january_2013')
data = []
# 标题处理
header = worksheet.row_values(0)
data.append(header)
for row_index in range(1, worksheet.nrows):
row_list = []
# 姓名正则匹配筛选
if pattern.search(worksheet.cell_value(row_index, customer_name_index)):
for column_index in range(worksheet.ncols):
cell_value = worksheet.cell_value(row_index, column_index)
cell_type = worksheet.cell_type(row_index, column_index)
# 购买日期列判断处理
if cell_type == 3:
date_cell = xldate_as_tuple(cell_value, workbook.datemode)
# 日期格式化
date_cell = date(*date_cell[0:3]).strftime('%m/%d/%Y')
row_list.append(date_cell)
else:
row_list.append(cell_value)
if row_list:
data.append(row_list)
for list_index, output_list in enumerate(data):
for element_index, element in enumerate(output_list):
output_worksheet.write(list_index, element_index, element)
output_workbook.save(output_file)
| [
"[email protected]"
] | |
a8ac8c461732d4ded453bead82fc9acec29dbbba | ac5e52a3fc52dde58d208746cddabef2e378119e | /schedRUN/batchExps.py | 7db0515b331adb3b60252aaf47cb1feafd1af7df | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,656 | py | #!/usr/bin/env python
import expconfig as cfg
import model.SystemResourceGenerator as generator
import schedulability.schedulabilityRUN as mySched
#x = utilLimit, y=resDistr
def saveFile(fileName, Data, resN, reqN, resW):
out_file = open(fileName,"w")
out_file.write("# utilLimit, resDistr, success\n")
for k1 in cfg.UL:
for k2 in cfg.RD:
out_file.write(str(k1)+" "+str(k2)+" "+str(Data[k1][k2][resN][reqN][resW])+"\n")
out_file.write("\n")
out_file.close()
def main():
#(self, periodDistr, utilDistr, resDistr, resWeight, resNumber, utilLimit, cpuLimit)
schedResultRUN = {}
surplusUtilRUN = {}
for utilLimit in cfg.UL:
schedResultRUN[utilLimit] = {}
surplusUtilRUN[utilLimit] = {}
for resDistr in cfg.RD:
schedResultRUN[utilLimit][resDistr] = {}
surplusUtilRUN[utilLimit][resDistr] = {}
for resNumb in cfg.RN:
schedResultRUN[utilLimit][resDistr][resNumb] = {}
surplusUtilRUN[utilLimit][resDistr][resNumb] = {}
for reqNumb in cfg.QN :
schedResultRUN[utilLimit][resDistr][resNumb][reqNumb] = {}
surplusUtilRUN[utilLimit][resDistr][resNumb][reqNumb] = {}
for resWeight in cfg.RW:
taskSetGenerator = generator.SystemResourcesGenerator(
cfg.NAMED_PERIODS['uni-moderate'],
cfg.NAMED_UTILIZATIONS['uni-medium'],
resDistr, resWeight, resNumb, reqNumb, utilLimit, cfg.cpuLimit)
averageSurplusRUN = []
counterRUN = 0
for i in range(0, cfg.NumExps):
taskSet = taskSetGenerator.generateTaskSetLinear()
initialUtil = sum([float(x.cost)/float(x.period) for x in taskSet])
mySchedRUN = mySched.SchedulabilityTestRUN(range(0, resNumb), taskSet)
if mySchedRUN.isSchedulable(cfg.cpuLimit) :
counterRUN += 1
averageSurplusRUN.append(100.0*(mySchedRUN.getFinalUtilization() - initialUtil)/initialUtil)
schedResultRUN[utilLimit][resDistr][resNumb][reqNumb][resWeight] = float(counterRUN)/float(cfg.NumExps)
surplusUtilRUN[utilLimit][resDistr][resNumb][reqNumb][resWeight] = sum(averageSurplusRUN)/float(max(len(averageSurplusRUN), 1))
for resN in cfg.RN:
for reqN in cfg.QN:
for resW in cfg.RW:
saveFile("/home/ricardo/litmus/experiment-scripts/output/RUNsched:"+str(resN)+":"+str(reqN)+":"+str(resW), schedResultRUN, resN, reqN, resW)
saveFile("/home/ricardo/litmus/experiment-scripts/output/RUNsurpl:"+str(resN)+":"+str(reqN)+":"+str(resW), surplusUtilRUN, resN, reqN, resW)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
dad141c8211da3eff2f75cc84917eb3fd1c34fe3 | f4434c85e3814b6347f8f8099c081ed4af5678a5 | /sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_12_01/operations/_proximity_placement_groups_operations.py | 3596a3d4ff636882792fbbb9493826f69ebc878f | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | yunhaoling/azure-sdk-for-python | 5da12a174a37672ac6ed8e3c1f863cb77010a506 | c4eb0ca1aadb76ad892114230473034830116362 | refs/heads/master | 2022-06-11T01:17:39.636461 | 2020-12-08T17:42:08 | 2020-12-08T17:42:08 | 177,675,796 | 1 | 0 | MIT | 2020-03-31T20:35:17 | 2019-03-25T22:43:40 | Python | UTF-8 | Python | false | false | 22,039 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ProximityPlacementGroupsOperations(object):
"""ProximityPlacementGroupsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2019_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def create_or_update(
self,
resource_group_name, # type: str
proximity_placement_group_name, # type: str
parameters, # type: "_models.ProximityPlacementGroup"
**kwargs # type: Any
):
# type: (...) -> "_models.ProximityPlacementGroup"
"""Create or update a proximity placement group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param proximity_placement_group_name: The name of the proximity placement group.
:type proximity_placement_group_name: str
:param parameters: Parameters supplied to the Create Proximity Placement Group operation.
:type parameters: ~azure.mgmt.compute.v2019_12_01.models.ProximityPlacementGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ProximityPlacementGroup, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2019_12_01.models.ProximityPlacementGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProximityPlacementGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'proximityPlacementGroupName': self._serialize.url("proximity_placement_group_name", proximity_placement_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ProximityPlacementGroup')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ProximityPlacementGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ProximityPlacementGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/proximityPlacementGroups/{proximityPlacementGroupName}'} # type: ignore
def update(
self,
resource_group_name, # type: str
proximity_placement_group_name, # type: str
parameters, # type: "_models.UpdateResource"
**kwargs # type: Any
):
# type: (...) -> "_models.ProximityPlacementGroup"
"""Update a proximity placement group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param proximity_placement_group_name: The name of the proximity placement group.
:type proximity_placement_group_name: str
:param parameters: Parameters supplied to the Update Proximity Placement Group operation.
:type parameters: ~azure.mgmt.compute.v2019_12_01.models.UpdateResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ProximityPlacementGroup, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2019_12_01.models.ProximityPlacementGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProximityPlacementGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'proximityPlacementGroupName': self._serialize.url("proximity_placement_group_name", proximity_placement_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'UpdateResource')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ProximityPlacementGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/proximityPlacementGroups/{proximityPlacementGroupName}'} # type: ignore
def delete(
self,
resource_group_name, # type: str
proximity_placement_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Delete a proximity placement group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param proximity_placement_group_name: The name of the proximity placement group.
:type proximity_placement_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'proximityPlacementGroupName': self._serialize.url("proximity_placement_group_name", proximity_placement_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/proximityPlacementGroups/{proximityPlacementGroupName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
proximity_placement_group_name, # type: str
include_colocation_status=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.ProximityPlacementGroup"
"""Retrieves information about a proximity placement group .
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param proximity_placement_group_name: The name of the proximity placement group.
:type proximity_placement_group_name: str
:param include_colocation_status: includeColocationStatus=true enables fetching the colocation
status of all the resources in the proximity placement group.
:type include_colocation_status: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ProximityPlacementGroup, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2019_12_01.models.ProximityPlacementGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProximityPlacementGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'proximityPlacementGroupName': self._serialize.url("proximity_placement_group_name", proximity_placement_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if include_colocation_status is not None:
query_parameters['includeColocationStatus'] = self._serialize.query("include_colocation_status", include_colocation_status, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ProximityPlacementGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/proximityPlacementGroups/{proximityPlacementGroupName}'} # type: ignore
def list_by_subscription(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ProximityPlacementGroupListResult"]
"""Lists all proximity placement groups in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProximityPlacementGroupListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_12_01.models.ProximityPlacementGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProximityPlacementGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ProximityPlacementGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/proximityPlacementGroups'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ProximityPlacementGroupListResult"]
"""Lists all proximity placement groups in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProximityPlacementGroupListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_12_01.models.ProximityPlacementGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProximityPlacementGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ProximityPlacementGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/proximityPlacementGroups'} # type: ignore
| [
"[email protected]"
] | |
45a2dc394fecfadccc2ed49b79f85b17928b0fb6 | aa369073fab4f8e13ac27a714fe0d975a5a4a9ed | /mathematics/math/math_isinf.py | 38f0c57b3c57471a56b8a47acecc32dda6634bd6 | [] | no_license | ramsayleung/python3-module-of-week | 4076599a8b1d8aa5794de5d73e2083555abe9f0c | 54266c7e62025c3816a6987191c40f3bc0fdd97c | refs/heads/master | 2021-06-18T09:07:30.256614 | 2017-06-25T10:14:54 | 2017-06-25T10:14:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 265 | py | import math
print('{:^3} {:6} {:6}{:^}'.format('e', 'e', 'x**2', 'isinf'))
print('{:-^3} {:-^6} {:^6} {:-^6}'.format('', '', '', ''))
for e in range(0, 201, 20):
x = 10.0**e
y = x * x
print('{:3d} {:<6g} {:<6g} {!s:6}'.format(e, x, y, math.isinf(y)))
| [
"[email protected]"
] | |
934d2b157de713c6d466541d5d13b2e7e9e822df | 9aa488e813b6986d463b35a394a997727a2b26e2 | /NimGame.py | 710f59aac2f9cd39102407a9ac5574b1c545f0ad | [] | no_license | s781825175/learnpython | 5fb2304e3183acb0faa068470b416620115c07dc | 458c84693d8a27e13dab5cb0dc649f894dbba621 | refs/heads/master | 2020-12-30T10:49:13.531546 | 2017-12-14T10:01:32 | 2017-12-14T10:01:32 | 98,855,316 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | class Solution(object):
def canWinNim(self, n):
"""
:type n: int
:rtype: bool
"""
return bool(n%4)
a=Solution()
n=4
b=a.canWinNim(n)
print(b)
| [
"[email protected]"
] | |
35391ce5c447621ce5679aed9db59627ffd82563 | a4681043cb56a9ab45be32a62fa9700b391f087f | /16-Hypothesis_Testing_with_SciPy/P-Values.py | f449cb0cb34998455568dd581dbc9c0f9e642373 | [] | no_license | MarceloDL-A/Python | b16b221ae4355b6323092d069bf83d1d142b9975 | c091446ae0089f03ffbdc47b3a6901f4fa2a25fb | refs/heads/main | 2023-01-01T02:29:31.591861 | 2020-10-27T19:04:11 | 2020-10-27T19:04:11 | 301,565,957 | 0 | 0 | null | 2020-10-27T19:04:12 | 2020-10-05T23:41:30 | Python | UTF-8 | Python | false | false | 1,894 | py | """
A p-value of 0.05 means that if the null hypothesis is true, there is a 5% chance that an observed sample statistic could have occurred due to random sampling error. For example, in comparing two sample means, a p-value of 0.05 indicates there is a 5% chance that the observed difference in sample means occurred by random chance, even though the population means are equal.
Before conducting a hypothesis test, we determine the necessary threshold we would need before concluding that the results are significant. A higher threshold is more likely to give a false positive so if we want to be very sure that the result is not due to just chance, we will select a very small threshold.
It is important that we choose the significance level before we perform our statistical hypothesis tests to yield a p-value. If we wait until after we see the results, we might pick our threshold such that we get the result we want to see. For instance, if we're trying to publish our results, we might set a significance level that makes our results seem statistically significant. Choosing our significance level in advance helps keep us honest.
Generally, we want a p-value of less than 0.05, meaning that there is less than a 5% chance that our results are due to random chance.
"""
"""
Fill in the body of the given function reject_null_hypothesis to return True if the p-value is small enough to reject the null hypothesis (i.e., it's less than 0.05), and return False otherwise.
"""
def reject_null_hypothesis(p_value):
"""
Returns the truthiness of whether the null hypothesis can be rejected
Takes a p-value as its input and assumes p <= 0.05 is significant
"""
if p_value < 0.05:
return True
else:
return False
hypothesis_tests = [0.1, 0.009, 0.051, 0.012, 0.37, 0.6, 0.11, 0.025, 0.0499, 0.0001]
for p_value in hypothesis_tests:
reject_null_hypothesis(p_value)
| [
"[email protected]"
] | |
4e9e704c291b63d48880b728ff72d7853655dd19 | 0381663735f6187eaba2a080972c696fef6a122c | /tests/test_transforms.py | aa34fda2fe829d05a840aac3a1b185df62f77444 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ankitshah009/CovidPrognosis | 3cfe8740511ff6672d3d27b547d83ba83eb6eb48 | febabc43a56a1c55e5237513f85f592f578910ea | refs/heads/master | 2023-02-28T08:14:17.475669 | 2021-02-08T07:50:53 | 2021-02-08T07:50:53 | 330,040,422 | 0 | 0 | MIT | 2021-02-08T07:50:54 | 2021-01-15T22:57:31 | null | UTF-8 | Python | false | false | 5,321 | py | """
Copyright (c) Facebook, Inc. and its affiliates.
This source code is licensed under the MIT license found in the
LICENSE file in the root directory of this source tree.
"""
import covidprognosis.data.transforms as cpt
import numpy as np
import pytest
import torch
import torchvision.transforms as tvt
from scipy.ndimage import gaussian_filter
from .conftest import create_input
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_compose(shape):
sample = create_input(shape)
transform = cpt.Compose(
[tvt.RandomHorizontalFlip(), tvt.ToTensor(), cpt.RandomGaussianBlur()]
)
sample = transform(sample)
assert sample["image"] is not None
@pytest.mark.parametrize("shape, label_idx", [[[32, 32, 3], 0], [[45, 16, 3], 5]])
def test_nan_to_int(shape, label_idx):
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.NanToInt(5)])
sample["labels"][label_idx] = np.nan
sample = transform(sample)
assert sample["labels"][label_idx] == 5
@pytest.mark.parametrize(
"shape, label_idx, start_label, end_label",
[[[32, 32, 3], 2, -1, 0], [[45, 16, 3], 10, 1, 0]],
)
def test_remap_label(shape, label_idx, start_label, end_label):
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.RemapLabel(start_label, end_label)])
sample["labels"][label_idx] = start_label
sample = transform(sample)
assert sample["labels"][label_idx] == end_label
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_histnorm(shape):
"""Test this to guard against an implementation change."""
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.HistogramNormalize()])
image = np.transpose(
torch.tensor(np.array(sample["image"]), dtype=torch.float).numpy(), (2, 0, 1)
)
# get image histogram
image_histogram, bins = np.histogram(
image.flatten(), transform.transforms[1].number_bins, density=True
)
cdf = image_histogram.cumsum() # cumulative distribution function
cdf = 255 * cdf / cdf[-1] # normalize
# use linear interpolation of cdf to find new pixel values
image_equalized = np.interp(image.flatten(), bins[:-1], cdf)
image_equalized.reshape(image.shape)
image = torch.tensor(image_equalized.reshape(image.shape)).to(torch.float)
sample = transform(sample)
assert torch.allclose(sample["image"], image)
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_rand_gauss_blur(shape):
"""Test this to guard against an implementation change."""
seed = 123
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.RandomGaussianBlur(p=1)])
# run the custom blur
np.random.seed(seed)
image = tvt.functional.to_tensor(sample["image"]) * 1
sigma = np.random.uniform(
transform.transforms[1].sigma_range[0], transform.transforms[1].sigma_range[1]
)
image = torch.tensor(gaussian_filter(image.numpy(), sigma), dtype=image.dtype,)
# transform blur
transform = cpt.Compose(
[tvt.ToTensor(), cpt.RandomGaussianBlur(p=1, sigma_range=(sigma, sigma))]
)
sample = transform(sample)
assert torch.allclose(sample["image"], image)
# retest for 0 probability
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.RandomGaussianBlur(p=-0.1)])
# run the custom blur
image = tvt.functional.to_tensor(sample["image"]) * 1
# transform blur
sample = transform(sample)
assert torch.allclose(sample["image"], image)
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_add_noise(shape):
"""Test this to guard against an implementation change."""
seed = 456
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.AddGaussianNoise(p=1)])
# run the custom noise
np.random.seed(seed)
image = tvt.functional.to_tensor(sample["image"]) * 1
np.random.uniform()
snr_level = np.random.uniform(
low=transform.transforms[1].snr_range[0],
high=transform.transforms[1].snr_range[1],
)
signal_level = np.mean(image.numpy())
image = image + (signal_level / snr_level) * torch.tensor(
np.random.normal(size=tuple(image.shape)), dtype=image.dtype,
)
# transform blur
np.random.seed(seed)
sample = transform(sample)
assert torch.allclose(sample["image"], image)
# retest for 0 probability
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.AddGaussianNoise(p=-0.1)])
# run the custom blur
image = tvt.functional.to_tensor(sample["image"]) * 1
# transform blur
sample = transform(sample)
assert torch.allclose(sample["image"], image)
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_tensor_to_rgb(shape):
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.TensorToRGB()])
image = tvt.functional.to_tensor(sample["image"]) * 1
expands = list()
for i in range(image.ndim):
if i == 0:
expands.append(3)
else:
expands.append(-1)
image = image.expand(*expands)
sample = transform(sample)
assert torch.allclose(sample["image"], image)
| [
"[email protected]"
] | |
ad3872fe9215e5b4563df49a87e54b6899c3d9a4 | 316b8375a7ef8095f09973d13f5a49bc7fbe7580 | /leetcode/1319.py | f921bc8cfc4b6c03132ff116954b9e8c0afdc9b9 | [] | no_license | zhaolijian/suanfa | 9a8d23fbca01d994f7eef24631783c4b7ed25683 | 4f3b25f360f30c0e604ba4dc4d5774ccb5f25b32 | refs/heads/master | 2023-06-08T17:12:41.522937 | 2021-06-27T08:13:16 | 2021-06-27T08:13:16 | 313,269,459 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,443 | py | # 用以太网线缆将 n 台计算机连接成一个网络,计算机的编号从 0 到 n-1。线缆用 connections 表示,其中 connections[i] = [a, b] 连接了计算机 a 和 b。
# 网络中的任何一台计算机都可以通过网络直接或者间接访问同一个网络中其他任意一台计算机。
# 给你这个计算机网络的初始布线 connections,你可以拔开任意两台直连计算机之间的线缆,并用它连接一对未直连的计算机。
# 请你计算并返回使所有计算机都连通所需的最少操作次数。如果不可能,则返回 -1 。
# 如果线数<n-1,则返回-1
# 否则线数肯定够,返回连通分量数-1
class Solution:
def makeConnected(self, n: int, connections) -> int:
def find(node):
if parent[node] != node:
parent[node] = find(parent[node])
return parent[node]
def union(node1, node2):
nonlocal res
root_1, root_2 = find(node1), find(node2)
# 根节点不同,则说明不是一个连通分量
if root_1 != root_2:
parent[find(node2)] = find(node1)
res -= 1
if len(connections) < n - 1:
return -1
parent = [i for i in range(n)]
# 初始化连通分量数为节点数n
res = n
for first, second in connections:
union(first, second)
return res - 1
| [
"[email protected]"
] | |
1f48b579b46935fd936d8a0e11a65d6de57091ac | 6c492996b452423ff3c02ae2bda35c806b5e2beb | /ALDS1_3_C.py | 8c03c6d04d4818cf18920a385da7b33829d1b0aa | [] | no_license | TakuroKato/AOJ | 4764820aa0fc523d1f2719d968ab9a30069cdef7 | cdcf173eca3079c89041967121f746b200d39ea7 | refs/heads/master | 2021-05-09T17:34:24.953074 | 2018-01-27T07:09:04 | 2018-01-27T07:09:04 | 119,141,600 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 630 | py | # -*- coding:utf-8 -*-
n = int(input())
arr = []
for i in range(n):
c = input()
try:
tmp = c.split()
com = str(tmp[0])
num = int(tmp[1])
except:
com = str(c)
num = -1
if num != -1:
if com == 'insert':
arr.insert(0,num)
if com == 'delete':
ind = arr.index(num)
arr.pop(ind)
else:
if com == 'deleteFirst':
arr.pop(0)
if com == 'deleteLast':
arr.pop(-1)
if len(arr) != 1:
for i in range(len(arr)-1):
print(arr[i], end = ' ')
print(arr[-1])
else:
print(arr[0])
| [
"[email protected]"
] | |
9ba9418423e0772feb3012850b6a1961edec013b | 181af10fcf40b824fe92d3b8f72fd15d6d1490c2 | /Contests/201-300/week 239/1851. Minimum Interval to Include Each Query/Minimum Interval to Include Each Query.py | 11dc19b8a31558f91781f8fb8511f15ad9f7d71f | [] | no_license | wangyendt/LeetCode | 402c59a0b7b7f5b3a672231ea5dad8056ade36af | 4a3ba15284c45b2d8bf38306c8c8526ae174615c | refs/heads/master | 2023-08-10T06:27:54.995152 | 2023-08-10T02:22:27 | 2023-08-10T02:22:27 | 176,651,399 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 797 | py | #!/usr/bin/env python
# -*- coding:utf-8 _*-
"""
@author: wangye(Wayne)
@license: Apache Licence
@file: Minimum Interval to Include Each Query.py
@time: 2021/05/02
@contact: [email protected]
@site:
@software: PyCharm
# code is far away from bugs.
"""
from typing import *
import heapq
class Solution:
def minInterval(self, A: List[List[int]], queries: List[int]) -> List[int]:
A = sorted(A)[::-1]
h = []
res = {}
for q in sorted(queries):
while A and A[-1][0] <= q:
i, j = A.pop()
if j >= q:
heapq.heappush(h, [j - i + 1, j])
while h and h[0][1] < q:
heapq.heappop(h)
res[q] = h[0][0] if h else -1
return [res[q] for q in queries]
| [
"[email protected]"
] | |
842deaed315b48e3b07793d16beefeae9e057192 | aa458694658fd621e9f8fb5bc131a796b541a6b1 | /experiments/stripe82/tractor_render.py | 876960eb8b759f7310ed727a8573ae13f30681df | [] | no_license | PinkDiamond1/DESI-MCMC | 7e6681e605b039d3584ea93aa8ecc35aa3e51344 | 54436db88e97eb0ffa571bff84d1cae2c4b44887 | refs/heads/master | 2023-03-17T15:21:51.431764 | 2016-04-27T13:16:38 | 2016-04-27T13:16:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,589 | py | import os
import logging
import numpy as np
import pylab as plt
import fitsio
import math
from astrometry.util.file import *
from astrometry.util.plotutils import ArcsinhNormalize
from astrometry.util.starutil_numpy import *
from astrometry.sdss.fields import *
from astrometry.sdss import DR9
from tractor import *
from tractor import sdss as st
def tractor_render_patch(run, camcol, field, roi, radec, celeste_src=None, bandname='r'):
""" ROI = (x0, x1, y0, y1), zero indexed pixel coordinates """
# set up tractor
if not os.path.exists('sdss_data'):
os.makedirs('sdss_data')
sdss = DR9(basedir='sdss_data')
getim = st.get_tractor_image_dr9
getsrc = st.get_tractor_sources_dr9
#imkw = dict(zrange=[-3,100], sdss=sdss)
# get images
tim,tinf = getim(run, camcol, field, bandname, curl=True, roi=roi, sdss=sdss, psf='kl-pix') #, **imkw)
tim.zr = tinf['zr']
# get corresponding sources
#sources = getsrc(run, camcol, field, bandname, bands=[bandname], curl=True, roi=roi)
sources = getsrc(run, camcol, field, radecrad=(radec[0], radec[1], .001))
if radec is not None and len(sources) > 1:
locs = np.array([ [t.getPosition().ra, t.getPosition().dec] for t in sources])
dists = np.sum((locs - radec)**2, axis=1)
ts = sources[dists.argmin()]
else:
ts = sources[0]
print "tractor source: ", ts
# set the first source to have the same parameters as dustin's
if celeste_src is not None:
print "setting source parameters"
ts_pos = ts.getPosition()
ts_pos.ra, ts_pos.dec = celeste_src.params.u
from CelestePy.util.data import nanomaggies2mags
band_mag = nanomaggies2mags(celeste_src.params.flux_dict[bandname])
ts_bright = ts.getBrightness()
ts_bright.setMag(bandname, band_mag)
import tractor.galaxy as tg
if type(ts) == tg.ExpGalaxy or type(ts) == tg.DevGalaxy:
print "setting exp or dev galaxy params (not composite)"
ts.getShape().re = celeste_src.params.sigma
ts.getShape().ab = celeste_src.params.rho
ts.getShape().phi = celeste_src.params.phi
elif type(ts) == tg.CompositeGalaxy:
ts.shapeExp.re = celeste_src.params.sigma
ts.shapeExp.ab = celeste_src.params.rho
ts.shapeExp.phi = celeste_src.params.phi
ts.shapeDev.re = celeste_src.params.sigma
ts.shapeDev.ab = celeste_src.params.rho
ts.shapeDev.phi = celeste_src.params.phi
# create a tractor object, get a model image
print "tractor source plotted:", ts
tractor = Tractor(images = [tim], catalog=[ts])
mod = tractor.getModelImage(tim, srcs=[ts])
chi = tractor.getChiImage(imgi=0, srcs=[ts])
return mod, tim
#def save(idstr, tractor, nlscale=1., debug=False, plotAll=False, imgi=0,
# chilo=-10., chihi=10., roi=None):
# #print "Index: ", imgi
# mod = tractor.getModelImage(imgi)
# chi = tractor.getChiImage(imgi=imgi)
# synthfn = 'synth-%s.fits' % idstr
# print 'Writing synthetic image to', synthfn
# fitsio.write(synthfn, mod, clobber=True)
#
# pfn = 'tractor-%s.pickle' % idstr
# print 'Saving state to', pfn
# pickle_to_file(tractor, pfn)
#
# plt.clf()
# plt.hist(chi.ravel(), range=(-10,10), bins=100)
# plt.savefig('chi2.png')
#
# timg = tractor.getImage(imgi)
# data = timg.getImage()
# print 'Mod type:', mod.dtype
# print 'Chi type:', chi.dtype
# print 'Data type:', data.dtype
# zr = timg.zr
# print 'zr', zr
# # Set up nonlinear mapping based on the statistics of the data image.
# #sigma = np.median(timg.getInvError())
# #print 'sigma', sigma
# ima = dict(interpolation='nearest', origin='lower')
# if nlscale == 0.:
# ima.update(vmin=zr[0], vmax=zr[1])
# else:
# print data.shape
# q1,q2,q3 = np.percentile(data.ravel(), [25, 50, 75])
# print 'Data quartiles:', q1, q2, q3
# ima.update(norm = ArcsinhNormalize(mean=q2, std=(1./nlscale) * (q3-q1)/2.,
# vmin=zr[0], vmax=zr[1]))
#
# if roi is not None:
# ima.update(extent=roi)
#
# imchi = ima.copy()
# if nlscale == 0.:
# imchi.update(vmin=chilo, vmax=chihi, norm=None)
# else:
# imchi.update(norm = ArcsinhNormalize(mean=0., std=1./nlscale, vmin=chilo, vmax=chihi))
#
# imdiff = ima.copy()
# dzr = (zr[1] - zr[0])/2.
# if nlscale == 0.:
# imdiff.update(vmin=-dzr, vmax=+dzr, norm=None)
# else:
# imdiff.update(norm = ArcsinhNormalize(mean=0., std=1./nlscale, vmin=-dzr, vmax=dzr))
#
# if debug:
# sources = tractor.getCatalog()
# wcs = timg.getWcs()
# allobjx = []
# allobjy = []
# allobjc = []
# pointx = []
# pointy = []
# xplotx = []
# xploty = []
#
# for obj in sources:
# if (isinstance(obj,PointSource)):
# xt,yt = wcs.positionToPixel(obj.getPosition(), obj)
# pointx.append(xt)
# pointy.append(yt)
# continue
# print type(obj)
# shapes = []
# attrType = []
# if (isinstance(obj,st.CompositeGalaxy)):
# for attr in 'shapeExp', 'shapeDev':
# shapes.append(getattr(obj, attr))
# attrType.append(attr)
# else:
# shapes.append(getattr(obj,'shape'))
# attrType.append(' ')
# x0,y0 = wcs.positionToPixel(obj.getPosition(), obj)
#
# cd = timg.getWcs().cdAtPixel(x0,y0)
# print "CD",cd
# for i,shape in enumerate(shapes):
# xplotx.append(x0)
# xploty.append(y0)
# T=np.linalg.inv(shape.getTensor(cd))
# print "Inverted tensor:",T
# print obj.getPosition()
# print i
#
# x,y = [],[]
# for theta in np.linspace(0,2*np.pi,100):
# ux = np.cos(theta)
# uy = np.sin(theta)
# dx,dy = np.dot(T,np.array([ux,uy]))
# x.append(x0+dx)
# y.append(y0+dy)
# allobjx.append(x)
# allobjy.append(y)
# if (attrType[i] == 'shapeExp'):
# allobjc.append('b')
# elif attrType[i] == 'shapeDev':
# allobjc.append('g')
# else:
# allobjc.append('r')
#
# def savepng(pre, img, title=None, **kwargs):
# fn = '%s-%s.png' % (pre, idstr)
# print 'Saving', fn
# plt.clf()
# plt.imshow(img, **kwargs)
# ax = plt.axis()
# if debug:
# print len(xplotx),len(allobjx)
# for i,(objx,objy,objc) in enumerate(zip(allobjx,allobjy,allobjc)):
# plt.plot(objx,objy,'-',c=objc)
# tempx = []
# tempx.append(xplotx[i])
# tempx.append(objx[0])
# tempy = []
# tempy.append(xploty[i])
# tempy.append(objy[0])
# plt.plot(tempx,tempy,'-',c='purple')
# plt.plot(pointx,pointy,'y.')
# plt.plot(xplotx,xploty,'xg')
# plt.axis(ax)
# if title is not None:
# plt.title(title)
# plt.colorbar()
# plt.gray()
# plt.savefig(fn)
#
# savepng('data', data, title='Data ' + timg.name, **ima)
# savepng('model', mod, title='Model ' + timg.name, **ima)
# savepng('diff', data - mod, title='Data - Model, ' + timg.name, **imdiff)
# savepng('chi', chi, title='Chi ' + timg.name, **imchi)
# if plotAll:
# debug = False
# for i,src in enumerate(tractor.getCatalog()):
# savepng('data-s%i'%(i+1),data - sky, title='Data '+timg.name,**ima)
# modelimg = tractor.getModelImage(timg, srcs=[src])
# savepng('model-s%i'%(i+1), modelimg - sky, title='Model-s%i'%(i+1),**ima)
# savepng('diff-s%i'%(i+1), data - modelimg, title='Model-s%i'%(i+1),**imdiff)
# savepng('chi-s%i'%(i+1),tractor.getChiImage(imgi,srcs=[src]),title='Chi',**imchi)
#
#def main():
# from optparse import OptionParser
# import sys
#
# # Otherwise plotting code can raise floating-point errors
# np.seterr(under='print')
#
# tune = []
# def store_value (option, opt, value, parser):
# if opt == '--ntune':
# tune.append(('n',value))
# elif opt == '--itune':
# tune.append(('i',value))
#
# parser = OptionParser(usage=('%prog [options] [-r <run> -c <camcol> -f <field> -b <band>]'))
# parser.add_option('-r', '--run', dest='run', type='int')
# parser.add_option('-c', '--camcol', dest='camcol', type='int')
# parser.add_option('-f', '--field', dest='field', type='int')
# parser.add_option('-b', '--band', dest='band', help='SDSS Band (u, g, r, i, z)')
# parser.add_option('--dg', action='store_true', default=False,
# help='Use double-Gaussian PSF model (default is Gaussian mixture approximation of pixelized model')
# parser.add_option('-R', '--radec', dest='radec', nargs=2, type=str, help='RA,Dec center: float degrees or hh:mm:ss +-dd:mm:ss')
# parser.add_option('-s', '--size', dest='pixsize', type=int, help='Pixel size when using RA,Dec center option')
# parser.add_option('--drfields', dest='drfields', help='FITS table of SDSS fields: default dr%ifields.fits, etc')
# parser.add_option('--curl', dest='curl', action='store_true', default=False, help='Use "curl", not "wget", to download files')
# parser.add_option('--ntune', action='callback', callback=store_value, type=int, help='Improve synthetic image by locally optimizing likelihood for nsteps iterations')
# parser.add_option('--itune', action='callback', callback=store_value, type=int, nargs=2, help='Optimizes each source individually')
# parser.add_option('--roi', dest='roi', type=int, nargs=4, help='Select an x0,x1,y0,y1 subset of the image')
# parser.add_option('--prefix', dest='prefix', help='Set output filename prefix; default is the SDSS RRRRRR-BC-FFFF string (run, band, camcol, field)')
# parser.add_option('-v', '--verbose', dest='verbose', action='count', default=0,
# help='Make more verbose')
# parser.add_option('-d','--debug',dest='debug',action='store_true',default=False,help="Trigger debug images")
# parser.add_option('--plotAll',dest='plotAll',action='store_true',default=False,help="Makes a plot for each source")
# parser.add_option('--no-arcsinh', dest='noarcsinh', action='store_true', help='Do not arcsinh-stretch plots')
# parser.add_option('--lbfgsb', dest='lbfgsb', action='store_true', default=False,
# help='Use L-BFGS-B optimization method')
# parser.add_option('--scale', dest='scale', type=int, help='Scale images down by this factor')
# parser.add_option('--unzip', dest='unzip', help='Save unzipped frame files in this directory')
# opt,args = parser.parse_args()
#
# if opt.verbose == 0:
# lvl = logging.INFO
# else:
# lvl = logging.DEBUG
# logging.basicConfig(level=lvl, format='%(message)s', stream=sys.stdout)
#
# run = opt.run
# field = opt.field
# camcol = opt.camcol
# bands = []
#
# if run is None and field is None and camcol is None and opt.band is None:
# # Demo mode...
# ## FIXME -- find a nice field here
# run = 1752
# camcol = 3
# field = 164
# opt.band = 'r'
# opt.curl = True
# tune.extend([('i',[1,1]), ('n',1)])
# opt.roi = [100,600,100,600]
# print
# print 'Demo mode: grabbing Run/Camcol/Field/Band %i/%i/%i/%s' % (run, camcol, field, opt.band)
# print 'Using SDSS DR9 data'
# print
#
# if opt.band is None:
# parser.print_help()
# print
# print 'Must supply band (-b [u | g |r |i | z])'
# print
# sys.exit(-1)
# for char in opt.band:
# bands.append(char)
# if not char in ['u','g','r','i','z']:
# parser.print_help()
# print
# print 'Must supply band (u/g/r/i/z)'
# print
# sys.exit(-1)
# rerun = 0
# usercf = (run is not None and field is not None and camcol is not None)
# userd = (opt.radec is not None and opt.pixsize is not None)
# if not (usercf or userd) or len(bands)==0:
# parser.print_help()
# print 'Must supply (--run, --camcol, --field) or (--radec, --size), and --band'
# sys.exit(-1)
# bandname = bands[0] #initial position and shape
# prefix = opt.prefix
# if userd:
# ra = opt.radec[0]
# dec = opt.radec[1]
# size = opt.pixsize
# if prefix is None:
# if usercf:
# prefix = '%06i-%i-%04i' % (run,camcol, field)
# else:
# prefix = '%s-%s' % (ra, dec)
#
# if userd:
# # convert h:m:s to deg
# try:
# ra = float(ra)
# except:
# ra = ra.replace(':',' ')
# ra = hmsstring2ra(ra)
# try:
# dec = float(dec)
# except:
# dec = dec.replace(':',' ')
# dec = dmsstring2dec(dec)
#
# sdss = DR9()
# getim = st.get_tractor_image_dr9
# getsrc = st.get_tractor_sources_dr9
# imkw = dict(zrange=[-3,100], sdss=sdss)
# if opt.unzip:
# sdss.saveUnzippedFiles(opt.unzip)
#
# tims = []
# for bandname in bands:
# if userd:
# if not usercf:
# tfn = opt.drfields
# if tfn is None:
# tfn = 'dr%ifields.fits' % drnum
# rcfs = radec_to_sdss_rcf(ra, dec, radius=math.hypot(radius,13./2.), tablefn=tfn)
# if len(rcfs) == 0:
# print 'RA,Dec (%.3f,%.3f): found no overlapping SDSS fields in file %s' % (ra, dec, tfn)
# sys.exit(-1)
# if len(rcfs) > 1:
# print 'Found %i Run,Camcol,Fields overlapping RA,Dec (%.3f,%.3f): taking the first one' % (len(rcfs), ra, dec)
# rcfs = rcfs[0]
# run,camcol,field = rcfs[:3]
# imkw.update(roiradecsize = (ra, dec, opt.pixsize))
# if opt.dg:
# imkw.update(psf='dg')
#
# tim,tinf = getim(run, camcol, field, bandname, curl=opt.curl,
# roi=opt.roi, **imkw)
# tim.zr = tinf['zr']
# if userd:
# opt.roi = tinf['roi']
#
# tims.append(tim)
#
# if opt.scale:
# print 'Scaling images by', opt.scale
# tims = [st.scale_sdss_image(tim, opt.scale) for tim in tims]
#
# sources = getsrc(run, camcol, field, bandname, bands=bands, curl=opt.curl, roi=opt.roi)
# tractor = Tractor(tims, sources)
#
# sa = dict(debug=opt.debug, plotAll=opt.plotAll, roi=opt.roi)
# if opt.noarcsinh:
# sa.update(nlscale=0)
#
# for j,band in enumerate(bands):
# save('initial-%s-' % (band) + prefix, tractor, imgi=j, **sa)
#
# lnp0 = tractor.getLogProb()
# print 'Initial lnprob:', lnp0
#
# for im in tractor.images:
# im.freezeAllParams()
# im.thawParam('sky')
#
# for count, each in enumerate(tune):
# if each[0]=='n':
# tractor.catalog.thawAllParams()
# tractor.images.thawParamsRecursive('sky')
# for i in range(each[1]):
# if opt.lbfgsb:
# tractor.optimize_lbfgsb()
# else:
# tractor.optimize()
# for j,band in enumerate(bands):
# save('tune-%d-%d-%s-' % (count+1, i+1,band) + prefix, tractor, imgi=j, **sa)
# lnp1 = tractor.getLogProb()
# print 'After optimization: lnprob', lnp1
# print ' delta-lnprob:', lnp1 - lnp0
#
# elif each[0]=='i':
# tractor.images.freezeParamsRecursive('sky')
# for i in range(each[1][0]):
# for j,src in enumerate(tractor.getCatalog()):
# tractor.catalog.freezeAllBut(src)
#
# if i < 6:
# print 'Freezing position'
# src.freezeParam('pos')
#
# print 'Optimizing:'
# for nm in tractor.getParamNames():
# print nm
#
# for step in range(each[1][1]):
# if opt.lbfgsb:
# tractor.optimize_lbfgsb(plotfn='synt-opt-i%i-s%04i.png' % (i,j))
# else:
# tractor.optimize()
#
# src.unfreezeParam('pos')
#
# for j,band in enumerate(bands):
# save('tune-%d-%d-%s-' % (count+1,i+1,band) + prefix, tractor, imgi=j, **sa)
# tractor.clearCache()
#
# lnp1 = tractor.getLogProb()
# print 'After optimization: lnprob', lnp1
# print ' delta-lnprob:', lnp1 - lnp0
#
# makeflipbook(opt, prefix,tune,len(tractor.getCatalog()),bands)
# print
# print 'Created flip-book flip-%s.pdf' % prefix
#
#
#
#def makeflipbook(opt, prefix,tune,numSrcs,bands):
# # Create a tex flip-book of the plots
# tex = r'''
# \documentclass[compress]{beamer}
# \usepackage{helvet}
# \newcommand{\plot}[1]{\includegraphics[width=0.5\textwidth]{#1}}
# \begin{document}
# '''
# if len(tune) != 0:
# tex += r'''\part{Tuning steps}\frame{\partpage}''' + '\n'
# page = r'''
# \begin{frame}{%s}
# \plot{data-%s}
# \plot{model-%s} \\
# \plot{diff-%s}
# \plot{chi-%s} \\
# \end{frame}'''
# for j,band in enumerate(bands):
# tex += page % (('Initial model, Band: %s' % (band),) + ('initial-%s-' % (band) + prefix,)*4)
# if opt.plotAll:
# for i in range(numSrcs):
# for j,band in enumerate(bands):
# tex += page % (('Source: %i, Band: %s' % (i+1,band),)+ ('s%d-initial-%s-' % (i+1,band) + prefix,)*4)
# for count,step in enumerate(tune):
# if step[0] == 'n':
# for i in range (step[1]):
# for k,band in enumerate(bands):
# tex += page % (('Tuning set %i, Tuning step %i, Band: %s' % (count+1,i+1,band),) +
# ('tune-%d-%d-%s-' % (count+1,i+1,band) + prefix,)*4)
# if opt.plotAll:
# for j in range(numSrcs):
# for k,band in enumerate(bands):
# tex += page % (('Source: %i, Band: %s' % (j+1,band),)+ ('s%d-tune-%d-%d-%s-' % (j+1,count+1,i+1,band) + prefix,)*4)
# elif step[0] == 'i':
# for i in range(step[1][0]):
# for band in bands:
# tex += page % (('Tuning set %i, Individual tuning step %i, Band: %s' % (count+1,i+1,band),) + ('tune-%d-%d-%s-' % (count+1,i+1,band) + prefix,)*4)
# if opt.plotAll:
# for j in range(numSrcs):
# for band in bands:
# tex += page % (('Source: %i, Band: %s' % (j+1,band),) + ('s%d-tune-%d-%d-%s-' % (j+1,count+1,i+1,band) + prefix,)*4)
# if len(tune) != 0:
# last = tune[len(tune)-1]
# lastSet = len(tune) - 1
# if last[0] == 'n':
# final = last[1]
# elif last[0] == 'i':
# final = last[1][0]
# lBand = bands[0]
# # Finish with a 'blink'
# tex += r'''\part{Before-n-after}\frame{\partpage}''' + '\n'
# tex += (r'''
# \begin{frame}{Data}
# \plot{data-%s}
# \plot{data-%s} \\
# \plot{diff-%s}
# \plot{diff-%s}
# \end{frame}
# \begin{frame}{Before (left); After (right)}
# \plot{model-%s}
# \plot{model-%s} \\
# \plot{diff-%s}
# \plot{diff-%s}
# \end{frame}
# ''' % (('initial-%s-' % (lBand) + prefix,)*2 +
# ('initial-%s-' %(lBand) +prefix, 'tune-%d-%d-%s-' % (lastSet+1,final,lBand)+ prefix)*3))
# tex += r'\end{document}' + '\n'
# fn = 'flip-' + prefix + '.tex'
# print 'Writing', fn
# open(fn, 'wb').write(tex)
# os.system("pdflatex '%s'" % fn)
#
#if __name__ == '__main__':
# import cProfile
# import sys
# from datetime import tzinfo, timedelta, datetime
# cProfile.run('main()', 'prof-%s.dat' % (datetime.now().isoformat()))
# sys.exit(0)
# #main()
| [
"[email protected]"
] | |
ff2271359db1616124e1268faddb92c674bae44a | 7b47c686684e145ad06f2096c4be9fcf4dba4c68 | /regress_nn.py | e40313ea874bca697c0ccb86cfd15631c9e9e903 | [] | no_license | evanthebouncy/learn_torch | 3e5f52fb9dc7d8dbcf6fe5f2f3dcaf252c523512 | d612375e1b0f6b8dee667e25644d03d297e3da65 | refs/heads/master | 2020-04-09T19:00:59.485452 | 2018-03-07T19:01:37 | 2018-03-07T19:01:37 | 124,240,753 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,959 | py | import torch
from torch.autograd import Variable
import numpy as np
# generate the data
A = np.array([[1.0, 2.0],[3.0, 4.0]])
B = np.array([[4.0, 3.0],[2.0, 1.0]])
def to_torch(x):
x = Variable(torch.from_numpy(x)).type(torch.cuda.FloatTensor)
return x
def gen_xy():
x = np.random.rand(2)
y = np.matmul(A,x) if np.sum(x) > 1.0 else np.matmul(B,x)
return x, y
def gen_xy_batch():
xs, ys = [], []
for i in range(30):
x,y = gen_xy()
xs.append(x)
ys.append(y)
return np.array(xs), np.array(ys)
print (gen_xy())
n_hidden = 200
model = torch.nn.Sequential(
torch.nn.Linear(2, n_hidden),
torch.nn.ReLU(),
torch.nn.Linear(n_hidden, n_hidden),
torch.nn.ReLU(),
torch.nn.Linear(n_hidden, 2),
).cuda()
loss_fn = torch.nn.MSELoss(size_average=False)
learning_rate = 1e-3
for t in range(5000):
x, y = gen_xy_batch()
x = to_torch(x)
y = to_torch(y)
y_pred = model(x)
# Compute and print loss. We pass Variables containing the predicted and true
# values of y, and the loss function returns a Variable containing the loss.
loss = loss_fn(y_pred, y)
print(t, loss.data[0])
# Zero the gradients before running the backward pass.
model.zero_grad()
# Backward pass: compute gradient of the loss with respect to all the learnable
# parameters of the model. Internally, the parameters of each Module are stored
# in Variables with requires_grad=True, so this call will compute gradients for
# all learnable parameters in the model.
loss.backward()
# Update the weights using gradient descent. Each parameter is a Variable, so
# we can access its data and gradients like we did before.
for param in model.parameters():
param.data -= learning_rate * param.grad.data
for i in range(100):
print ("========================")
x, y = gen_xy()
print (x)
print ("prediction ")
print (model(to_torch(x)))
print ("truth")
print (y)
| [
"[email protected]"
] | |
07c3f2b17542741fdba879d28e6400445fafd77b | 5dd190725aaaeb7287d935b3c99c20480b208816 | /slim/deployment/model_deploy_test.py | af033d9b9ce6deb1fd51ed74cd65a6fa3e9f852a | [
"MIT"
] | permissive | DemonDamon/mask-detection-based-on-tf2odapi | 32d947164fb54395b9e45368c0d4bcf3a6ea1c28 | 192ae544169c1230c21141c033800aa1bd94e9b6 | refs/heads/main | 2023-05-13T05:05:44.534885 | 2021-06-08T05:56:09 | 2021-06-08T05:56:09 | 369,463,131 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 24,854 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for model_deploy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf
import tf_slim as slim
from deployment import model_deploy
class DeploymentConfigTest(tf.test.TestCase):
def testDefaults(self):
deploy_config = model_deploy.DeploymentConfig()
self.assertEqual(slim.get_variables(), [])
self.assertEqual(deploy_config.caching_device(), None)
self.assertDeviceEqual(deploy_config.clone_device(0), 'GPU:0')
self.assertEqual(deploy_config.clone_scope(0), '')
self.assertDeviceEqual(deploy_config.optimizer_device(), 'CPU:0')
self.assertDeviceEqual(deploy_config.inputs_device(), 'CPU:0')
self.assertDeviceEqual(deploy_config.variables_device(), 'CPU:0')
def testCPUonly(self):
deploy_config = model_deploy.DeploymentConfig(clone_on_cpu=True)
self.assertEqual(deploy_config.caching_device(), None)
self.assertDeviceEqual(deploy_config.clone_device(0), 'CPU:0')
self.assertEqual(deploy_config.clone_scope(0), '')
self.assertDeviceEqual(deploy_config.optimizer_device(), 'CPU:0')
self.assertDeviceEqual(deploy_config.inputs_device(), 'CPU:0')
self.assertDeviceEqual(deploy_config.variables_device(), 'CPU:0')
def testMultiGPU(self):
deploy_config = model_deploy.DeploymentConfig(num_clones=2)
self.assertEqual(deploy_config.caching_device(), None)
self.assertDeviceEqual(deploy_config.clone_device(0), 'GPU:0')
self.assertDeviceEqual(deploy_config.clone_device(1), 'GPU:1')
self.assertEqual(deploy_config.clone_scope(0), 'clone_0')
self.assertEqual(deploy_config.clone_scope(1), 'clone_1')
self.assertDeviceEqual(deploy_config.optimizer_device(), 'CPU:0')
self.assertDeviceEqual(deploy_config.inputs_device(), 'CPU:0')
self.assertDeviceEqual(deploy_config.variables_device(), 'CPU:0')
def testPS(self):
deploy_config = model_deploy.DeploymentConfig(num_clones=1, num_ps_tasks=1)
self.assertDeviceEqual(deploy_config.clone_device(0),
'/job:worker/device:GPU:0')
self.assertEqual(deploy_config.clone_scope(0), '')
self.assertDeviceEqual(deploy_config.optimizer_device(),
'/job:worker/device:CPU:0')
self.assertDeviceEqual(deploy_config.inputs_device(),
'/job:worker/device:CPU:0')
with tf.device(deploy_config.variables_device()):
a = tf.Variable(0)
b = tf.Variable(0)
c = tf.no_op()
d = slim.variable('a', [],
caching_device=deploy_config.caching_device())
self.assertDeviceEqual(a.device, '/job:ps/task:0/device:CPU:0')
self.assertDeviceEqual(a.device, a.value().device)
self.assertDeviceEqual(b.device, '/job:ps/task:0/device:CPU:0')
self.assertDeviceEqual(b.device, b.value().device)
self.assertDeviceEqual(c.device, '')
self.assertDeviceEqual(d.device, '/job:ps/task:0/device:CPU:0')
self.assertDeviceEqual(d.value().device, '')
def testMultiGPUPS(self):
deploy_config = model_deploy.DeploymentConfig(num_clones=2, num_ps_tasks=1)
self.assertEqual(deploy_config.caching_device()(tf.no_op()), '')
self.assertDeviceEqual(deploy_config.clone_device(0),
'/job:worker/device:GPU:0')
self.assertDeviceEqual(deploy_config.clone_device(1),
'/job:worker/device:GPU:1')
self.assertEqual(deploy_config.clone_scope(0), 'clone_0')
self.assertEqual(deploy_config.clone_scope(1), 'clone_1')
self.assertDeviceEqual(deploy_config.optimizer_device(),
'/job:worker/device:CPU:0')
self.assertDeviceEqual(deploy_config.inputs_device(),
'/job:worker/device:CPU:0')
def testReplicasPS(self):
deploy_config = model_deploy.DeploymentConfig(num_replicas=2,
num_ps_tasks=2)
self.assertDeviceEqual(deploy_config.clone_device(0),
'/job:worker/device:GPU:0')
self.assertEqual(deploy_config.clone_scope(0), '')
self.assertDeviceEqual(deploy_config.optimizer_device(),
'/job:worker/device:CPU:0')
self.assertDeviceEqual(deploy_config.inputs_device(),
'/job:worker/device:CPU:0')
def testReplicasMultiGPUPS(self):
deploy_config = model_deploy.DeploymentConfig(num_replicas=2,
num_clones=2,
num_ps_tasks=2)
self.assertDeviceEqual(deploy_config.clone_device(0),
'/job:worker/device:GPU:0')
self.assertDeviceEqual(deploy_config.clone_device(1),
'/job:worker/device:GPU:1')
self.assertEqual(deploy_config.clone_scope(0), 'clone_0')
self.assertEqual(deploy_config.clone_scope(1), 'clone_1')
self.assertDeviceEqual(deploy_config.optimizer_device(),
'/job:worker/device:CPU:0')
self.assertDeviceEqual(deploy_config.inputs_device(),
'/job:worker/device:CPU:0')
def testVariablesPS(self):
deploy_config = model_deploy.DeploymentConfig(num_ps_tasks=2)
with tf.device(deploy_config.variables_device()):
a = tf.Variable(0)
b = tf.Variable(0)
c = tf.no_op()
d = slim.variable('a', [],
caching_device=deploy_config.caching_device())
self.assertDeviceEqual(a.device, '/job:ps/task:0/device:CPU:0')
self.assertDeviceEqual(a.device, a.value().device)
self.assertDeviceEqual(b.device, '/job:ps/task:1/device:CPU:0')
self.assertDeviceEqual(b.device, b.value().device)
self.assertDeviceEqual(c.device, '')
self.assertDeviceEqual(d.device, '/job:ps/task:0/device:CPU:0')
self.assertDeviceEqual(d.value().device, '')
def LogisticClassifier(inputs, labels, scope=None, reuse=None):
with tf.variable_scope(scope, 'LogisticClassifier', [inputs, labels],
reuse=reuse):
predictions = slim.fully_connected(inputs, 1, activation_fn=tf.sigmoid,
scope='fully_connected')
slim.losses.log_loss(predictions, labels)
return predictions
def BatchNormClassifier(inputs, labels, scope=None, reuse=None):
with tf.variable_scope(scope, 'BatchNormClassifier', [inputs, labels],
reuse=reuse):
inputs = slim.batch_norm(inputs, decay=0.1, fused=True)
predictions = slim.fully_connected(inputs, 1,
activation_fn=tf.sigmoid,
scope='fully_connected')
slim.losses.log_loss(predictions, labels)
return predictions
class CreatecloneTest(tf.test.TestCase):
def setUp(self):
# Create an easy training set:
np.random.seed(0)
self._inputs = np.zeros((16, 4))
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
self._logdir = self.get_temp_dir()
for i in range(16):
j = int(2 * self._labels[i] + np.random.randint(0, 2))
self._inputs[i, j] = 1
def testCreateLogisticClassifier(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = LogisticClassifier
clone_args = (tf_inputs, tf_labels)
deploy_config = model_deploy.DeploymentConfig(num_clones=1)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, clone_args)
clone = clones[0]
self.assertEqual(len(slim.get_variables()), 2)
for v in slim.get_variables():
self.assertDeviceEqual(v.device, 'CPU:0')
self.assertDeviceEqual(v.value().device, 'CPU:0')
self.assertEqual(clone.outputs.op.name,
'LogisticClassifier/fully_connected/Sigmoid')
self.assertEqual(clone.scope, '')
self.assertDeviceEqual(clone.device, 'GPU:0')
self.assertEqual(len(slim.losses.get_losses()), 1)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
self.assertEqual(update_ops, [])
def testCreateSingleclone(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = BatchNormClassifier
clone_args = (tf_inputs, tf_labels)
deploy_config = model_deploy.DeploymentConfig(num_clones=1)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, clone_args)
clone = clones[0]
self.assertEqual(len(slim.get_variables()), 5)
for v in slim.get_variables():
self.assertDeviceEqual(v.device, 'CPU:0')
self.assertDeviceEqual(v.value().device, 'CPU:0')
self.assertEqual(clone.outputs.op.name,
'BatchNormClassifier/fully_connected/Sigmoid')
self.assertEqual(clone.scope, '')
self.assertDeviceEqual(clone.device, 'GPU:0')
self.assertEqual(len(slim.losses.get_losses()), 1)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
self.assertEqual(len(update_ops), 2)
def testCreateMulticlone(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = BatchNormClassifier
clone_args = (tf_inputs, tf_labels)
num_clones = 4
deploy_config = model_deploy.DeploymentConfig(num_clones=num_clones)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, clone_args)
self.assertEqual(len(slim.get_variables()), 5)
for v in slim.get_variables():
self.assertDeviceEqual(v.device, 'CPU:0')
self.assertDeviceEqual(v.value().device, 'CPU:0')
self.assertEqual(len(clones), num_clones)
for i, clone in enumerate(clones):
self.assertEqual(
clone.outputs.op.name,
'clone_%d/BatchNormClassifier/fully_connected/Sigmoid' % i)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, clone.scope)
self.assertEqual(len(update_ops), 2)
self.assertEqual(clone.scope, 'clone_%d/' % i)
self.assertDeviceEqual(clone.device, 'GPU:%d' % i)
def testCreateOnecloneWithPS(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = BatchNormClassifier
clone_args = (tf_inputs, tf_labels)
deploy_config = model_deploy.DeploymentConfig(num_clones=1,
num_ps_tasks=1)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, clone_args)
self.assertEqual(len(clones), 1)
clone = clones[0]
self.assertEqual(clone.outputs.op.name,
'BatchNormClassifier/fully_connected/Sigmoid')
self.assertDeviceEqual(clone.device, '/job:worker/device:GPU:0')
self.assertEqual(clone.scope, '')
self.assertEqual(len(slim.get_variables()), 5)
for v in slim.get_variables():
self.assertDeviceEqual(v.device, '/job:ps/task:0/CPU:0')
self.assertDeviceEqual(v.device, v.value().device)
def testCreateMulticloneWithPS(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = BatchNormClassifier
clone_args = (tf_inputs, tf_labels)
deploy_config = model_deploy.DeploymentConfig(num_clones=2,
num_ps_tasks=2)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, clone_args)
self.assertEqual(len(slim.get_variables()), 5)
for i, v in enumerate(slim.get_variables()):
t = i % 2
self.assertDeviceEqual(v.device, '/job:ps/task:%d/device:CPU:0' % t)
self.assertDeviceEqual(v.device, v.value().device)
self.assertEqual(len(clones), 2)
for i, clone in enumerate(clones):
self.assertEqual(
clone.outputs.op.name,
'clone_%d/BatchNormClassifier/fully_connected/Sigmoid' % i)
self.assertEqual(clone.scope, 'clone_%d/' % i)
self.assertDeviceEqual(clone.device, '/job:worker/device:GPU:%d' % i)
class OptimizeclonesTest(tf.test.TestCase):
def setUp(self):
# Create an easy training set:
np.random.seed(0)
self._inputs = np.zeros((16, 4))
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
self._logdir = self.get_temp_dir()
for i in range(16):
j = int(2 * self._labels[i] + np.random.randint(0, 2))
self._inputs[i, j] = 1
def testCreateLogisticClassifier(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = LogisticClassifier
clone_args = (tf_inputs, tf_labels)
deploy_config = model_deploy.DeploymentConfig(num_clones=1)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, clone_args)
self.assertEqual(len(slim.get_variables()), 2)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
self.assertEqual(update_ops, [])
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
total_loss, grads_and_vars = model_deploy.optimize_clones(clones,
optimizer)
self.assertEqual(len(grads_and_vars), len(tf.trainable_variables()))
self.assertEqual(total_loss.op.name, 'total_loss')
for g, v in grads_and_vars:
self.assertDeviceEqual(g.device, 'GPU:0')
self.assertDeviceEqual(v.device, 'CPU:0')
def testCreateSingleclone(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = BatchNormClassifier
clone_args = (tf_inputs, tf_labels)
deploy_config = model_deploy.DeploymentConfig(num_clones=1)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, clone_args)
self.assertEqual(len(slim.get_variables()), 5)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
self.assertEqual(len(update_ops), 2)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
total_loss, grads_and_vars = model_deploy.optimize_clones(clones,
optimizer)
self.assertEqual(len(grads_and_vars), len(tf.trainable_variables()))
self.assertEqual(total_loss.op.name, 'total_loss')
for g, v in grads_and_vars:
self.assertDeviceEqual(g.device, 'GPU:0')
self.assertDeviceEqual(v.device, 'CPU:0')
def testCreateMulticlone(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = BatchNormClassifier
clone_args = (tf_inputs, tf_labels)
num_clones = 4
deploy_config = model_deploy.DeploymentConfig(num_clones=num_clones)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, clone_args)
self.assertEqual(len(slim.get_variables()), 5)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
self.assertEqual(len(update_ops), num_clones * 2)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
total_loss, grads_and_vars = model_deploy.optimize_clones(clones,
optimizer)
self.assertEqual(len(grads_and_vars), len(tf.trainable_variables()))
self.assertEqual(total_loss.op.name, 'total_loss')
for g, v in grads_and_vars:
self.assertDeviceEqual(g.device, '')
self.assertDeviceEqual(v.device, 'CPU:0')
def testCreateMulticloneCPU(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = BatchNormClassifier
model_args = (tf_inputs, tf_labels)
num_clones = 4
deploy_config = model_deploy.DeploymentConfig(num_clones=num_clones,
clone_on_cpu=True)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, model_args)
self.assertEqual(len(slim.get_variables()), 5)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
self.assertEqual(len(update_ops), num_clones * 2)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
total_loss, grads_and_vars = model_deploy.optimize_clones(clones,
optimizer)
self.assertEqual(len(grads_and_vars), len(tf.trainable_variables()))
self.assertEqual(total_loss.op.name, 'total_loss')
for g, v in grads_and_vars:
self.assertDeviceEqual(g.device, '')
self.assertDeviceEqual(v.device, 'CPU:0')
def testCreateOnecloneWithPS(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = BatchNormClassifier
model_args = (tf_inputs, tf_labels)
deploy_config = model_deploy.DeploymentConfig(num_clones=1,
num_ps_tasks=1)
self.assertEqual(slim.get_variables(), [])
clones = model_deploy.create_clones(deploy_config, model_fn, model_args)
self.assertEqual(len(slim.get_variables()), 5)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
self.assertEqual(len(update_ops), 2)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
total_loss, grads_and_vars = model_deploy.optimize_clones(clones,
optimizer)
self.assertEqual(len(grads_and_vars), len(tf.trainable_variables()))
self.assertEqual(total_loss.op.name, 'total_loss')
for g, v in grads_and_vars:
self.assertDeviceEqual(g.device, '/job:worker/device:GPU:0')
self.assertDeviceEqual(v.device, '/job:ps/task:0/CPU:0')
class DeployTest(tf.test.TestCase):
def setUp(self):
# Create an easy training set:
np.random.seed(0)
self._inputs = np.zeros((16, 4))
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
self._logdir = self.get_temp_dir()
for i in range(16):
j = int(2 * self._labels[i] + np.random.randint(0, 2))
self._inputs[i, j] = 1
def _addBesselsCorrection(self, sample_size, expected_var):
correction_factor = sample_size / (sample_size - 1)
expected_var *= correction_factor
return expected_var
def testLocalTrainOp(self):
g = tf.Graph()
with g.as_default():
tf.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
model_fn = BatchNormClassifier
model_args = (tf_inputs, tf_labels)
deploy_config = model_deploy.DeploymentConfig(num_clones=2,
clone_on_cpu=True)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
self.assertEqual(slim.get_variables(), [])
model = model_deploy.deploy(deploy_config, model_fn, model_args,
optimizer=optimizer)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
self.assertEqual(len(update_ops), 4)
self.assertEqual(len(model.clones), 2)
self.assertEqual(model.total_loss.op.name, 'total_loss')
self.assertEqual(model.summary_op.op.name, 'summary_op/summary_op')
self.assertEqual(model.train_op.op.name, 'train_op')
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
moving_mean = slim.get_variables_by_name('moving_mean')[0]
moving_variance = slim.get_variables_by_name('moving_variance')[0]
initial_loss = sess.run(model.total_loss)
initial_mean, initial_variance = sess.run([moving_mean,
moving_variance])
self.assertAllClose(initial_mean, [0.0, 0.0, 0.0, 0.0])
self.assertAllClose(initial_variance, [1.0, 1.0, 1.0, 1.0])
for _ in range(10):
sess.run(model.train_op)
final_loss = sess.run(model.total_loss)
self.assertLess(final_loss, initial_loss / 5.0)
final_mean, final_variance = sess.run([moving_mean,
moving_variance])
expected_mean = np.array([0.125, 0.25, 0.375, 0.25])
expected_var = np.array([0.109375, 0.1875, 0.234375, 0.1875])
expected_var = self._addBesselsCorrection(16, expected_var)
self.assertAllClose(final_mean, expected_mean)
self.assertAllClose(final_variance, expected_var)
def testNoSummariesOnGPU(self):
with tf.Graph().as_default():
deploy_config = model_deploy.DeploymentConfig(num_clones=2)
# clone function creates a fully_connected layer with a regularizer loss.
def ModelFn():
inputs = tf.constant(1.0, shape=(10, 20), dtype=tf.float32)
reg = slim.l2_regularizer(0.001)
slim.fully_connected(inputs, 30, weights_regularizer=reg)
model = model_deploy.deploy(
deploy_config, ModelFn,
optimizer=tf.train.GradientDescentOptimizer(1.0))
# The model summary op should have a few summary inputs and all of them
# should be on the CPU.
self.assertTrue(model.summary_op.op.inputs)
for inp in model.summary_op.op.inputs:
self.assertEqual('/device:CPU:0', inp.device)
def testNoSummariesOnGPUForEvals(self):
with tf.Graph().as_default():
deploy_config = model_deploy.DeploymentConfig(num_clones=2)
# clone function creates a fully_connected layer with a regularizer loss.
def ModelFn():
inputs = tf.constant(1.0, shape=(10, 20), dtype=tf.float32)
reg = slim.l2_regularizer(0.001)
slim.fully_connected(inputs, 30, weights_regularizer=reg)
# No optimizer here, it's an eval.
model = model_deploy.deploy(deploy_config, ModelFn)
# The model summary op should have a few summary inputs and all of them
# should be on the CPU.
self.assertTrue(model.summary_op.op.inputs)
for inp in model.summary_op.op.inputs:
self.assertEqual('/device:CPU:0', inp.device)
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
2befe10f9db67c105252acf3bd768d479655c6b7 | 886400ec768a04900761a2487ef473daf5acdd6c | /recipes/nysopwdd_providers/build.py | b6ae36c3ba33b39e314f412f973558856b6b8ab2 | [] | no_license | NYCPlanning/db-data-recipes | b058ae9abcee8dc916ee9f36e13c57aad53af0dc | 29ea8e1dc0a4d6dc0dd1704c68389e73f318227a | refs/heads/master | 2020-04-30T18:37:10.044641 | 2019-11-08T17:52:57 | 2019-11-08T17:52:57 | 177,013,624 | 2 | 0 | null | 2019-08-02T16:14:20 | 2019-03-21T19:47:58 | Python | UTF-8 | Python | false | false | 582 | py | from dataflows import *
from lib import joined_lower, create_base_path, dump_to_s3
def ETL():
table_name = 'nysopwdd_providers'
url = 'https://data.ny.gov/api/views/ieqx-cqyk/rows.csv?accessType=DOWNLOAD'
base_path = create_base_path(__file__)
Flow(
load(url, name=table_name, format='csv', force_strings=True),
joined_lower(resources=table_name),
update_resource(resources=table_name, path=table_name+'.csv'),
dump_to_s3(resources=table_name, params=dict(base_path=base_path))
).process()
if __name__ == '__main__':
ETL() | [
"[email protected]"
] | |
6a52daae1178628c2ebb5e4f1b022cb05d9e4e8f | 60c18eefd903957622a8bd9dc2b7c8522d13552b | /app/jobs/pay.py | e85ffd8937a57dc6377f72492f10e4b47bb4a1b1 | [] | no_license | 15051882416/food_shop | f2868ac7ca63e9e8e36564f979c0c9585e5a22f0 | 0033580a08da6e7f043153e5d3dd382333a9eac2 | refs/heads/master | 2022-03-03T01:45:25.648296 | 2019-02-21T03:25:58 | 2019-02-21T03:25:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 699 | py | from datetime import datetime, timedelta
from flask_script import Manager
from app.libs.enums import OrderStatus
from app.models import Order
from app.service.order import OrderService
from food_shop import app
pay = Manager()
@pay.command
def pay_deadline():
now = datetime.now()
date_30mins_ago = (now - timedelta(seconds=1)).timestamp()
orders = Order.query.filter_by(order_status=OrderStatus.UNPAID.value).filter(
Order.create_time <= date_30mins_ago
).all()
if not orders:
app.logger.info('没有订单数据')
return
for item in orders:
OrderService.cancel_order(item)
app.logger.info('超过30分钟未支付订单已关闭')
| [
"[email protected]"
] | |
5c39684cc433176e05bd518b1786f2fcb92b87d7 | c9198b0524f07648804d4dd556865840ccbc0195 | /main.py | ae1faa23dccd98e0bc7a82e73d9e48040861b4f3 | [] | no_license | p9s/spider_main | 64e83d5831c2b9095ae7bb6f8707f0576091e4bb | b63b7663c6b70f3d7d49201edea806eab778db37 | refs/heads/master | 2021-07-23T04:34:50.586697 | 2017-11-02T02:57:51 | 2017-11-02T02:57:51 | 109,651,350 | 1 | 0 | null | 2017-11-06T05:24:39 | 2017-11-06T05:24:39 | null | UTF-8 | Python | false | false | 158 | py | import time
import os
os.system('python resetnew.py')
os.system('python word_key.py')
os.system('python set_AZCM.py')
os.system('python word_count_key.py')
| [
"[email protected]"
] | |
3ced7480cec0ff578a70ba52ceb9a6776529471d | e71fa62123b2b8f7c1a22acb1babeb6631a4549b | /xlsxwriter/test/workbook/test_check_images.py | 557841a96a86143abdc923cc767ad74e85ab9dd2 | [
"BSD-2-Clause"
] | permissive | timgates42/XlsxWriter | 40480b6b834f28c4a7b6fc490657e558b0a466e5 | 7ad2541c5f12b70be471b447ab709c451618ab59 | refs/heads/main | 2023-03-16T14:31:08.915121 | 2022-07-13T23:43:45 | 2022-07-13T23:43:45 | 242,121,381 | 0 | 0 | NOASSERTION | 2020-02-21T11:14:55 | 2020-02-21T11:14:55 | null | UTF-8 | Python | false | false | 1,358 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, [email protected]
#
import unittest
from io import StringIO
from ...workbook import Workbook
from ...exceptions import UndefinedImageSize
from ...exceptions import UnsupportedImageFormat
class TestInsertImage(unittest.TestCase):
"""
Test exceptions with insert_image().
"""
def test_undefined_image_size(self):
"""Test adding an image with no height/width data."""
fh = StringIO()
workbook = Workbook()
workbook._set_filehandle(fh)
worksheet = workbook.add_worksheet()
worksheet.insert_image('B13', 'xlsxwriter/test/comparison/images/nosize.png')
self.assertRaises(UndefinedImageSize, workbook._prepare_drawings)
workbook.fileclosed = True
def test_unsupported_image(self):
"""Test adding an unsupported image type."""
fh = StringIO()
workbook = Workbook()
workbook._set_filehandle(fh)
worksheet = workbook.add_worksheet()
worksheet.insert_image('B13', 'xlsxwriter/test/comparison/images/unsupported.txt')
self.assertRaises(UnsupportedImageFormat, workbook._prepare_drawings)
workbook.fileclosed = True
| [
"[email protected]"
] | |
054fa4e0d4cc7c36581cb80e72ee24794728aa75 | c781392896fd2498670bdb2eef9c6f6c43cea6bf | /feder/questionaries/models.py | b89eff9cb28b37e37fc10124f863c21c6d6662f9 | [
"MIT"
] | permissive | fossabot/feder | f35b7485bfe76f9a85e6434ddd97a8c3a4f53ade | c73ebb119e9e620f367e6cf59334e6e9cb13b592 | refs/heads/master | 2021-07-05T04:27:24.801358 | 2017-09-29T23:17:18 | 2017-09-29T23:17:18 | 105,324,724 | 0 | 0 | null | 2017-09-29T23:17:17 | 2017-09-29T23:17:17 | null | UTF-8 | Python | false | false | 2,123 | py | from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from jsonfield import JSONField
from model_utils.models import TimeStampedModel
from feder.monitorings.models import Monitoring
from .utils import get_modulators
_('Questionaries index')
LOCK_HELP = _("Prevent of edit question to protect against destruction the data set")
@python_2_unicode_compatible
class Questionary(TimeStampedModel):
title = models.CharField(max_length=250, verbose_name=_("Title"))
monitoring = models.ForeignKey(Monitoring, verbose_name=_("Monitoring"))
lock = models.BooleanField(default=False, verbose_name=_("Lock of edition"),
help_text=LOCK_HELP)
def get_absolute_url(self):
return reverse('questionaries:details', kwargs={'pk': self.pk})
def __str__(self):
return self.title
class Meta:
ordering = ['created', ]
verbose_name = _("Questionary")
verbose_name_plural = _("Questionaries")
@python_2_unicode_compatible
class Question(models.Model):
questionary = models.ForeignKey(Questionary, verbose_name=_("Questionary"))
position = models.SmallIntegerField(default=0, verbose_name=_("Position"))
genre = models.CharField(max_length=25, verbose_name=_("Genre"))
definition = JSONField(verbose_name=_("Technical definition"))
def get_absolute_url(self):
return reverse('questionaries:question_update', kwargs={'pk': self.pk})
@property
def is_configured(self):
return bool(self.definition)
@property
def modulator(self):
return get_modulators()[self.genre]()
def __str__(self):
if not self.is_configured:
return _("Undefined question - {description}").format(
description=self.modulator.description)
return self.modulator.get_label_text(self.definition)
class Meta:
ordering = ['position', ]
verbose_name = _("Question")
verbose_name_plural = _("Questions")
| [
"[email protected]"
] | |
4626e411c019e6382bd13482f3b826e081d71712 | a697a38b37c4cf0a9d6c3439faf7a04d3d8c7766 | /tests/test_spyd/test_utils/test_rate_limiter.py | 5cf89c82abb4f7d3001b9c5cadf6764dc8fca1d4 | [
"Zlib"
] | permissive | fdChasm/spyd | e22ea50c7dbcd9901edcb7e989a455b6db40ec1e | 38e070d10290c2da1e9e5c2226aace871e4dcc59 | refs/heads/master | 2021-01-10T20:01:25.684294 | 2014-03-19T03:47:38 | 2014-03-19T03:47:38 | 13,235,339 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 925 | py | import unittest
from twisted.internet import task
from spyd.utils.rate_limiter import RateLimiter
class TestRateLimiter(unittest.TestCase):
def setUp(self):
self.clock = task.Clock()
RateLimiter.clock = self.clock
self.rate_limiter = RateLimiter(5)
def test_check_drop_first_second(self):
self.assertFalse(any(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
self.assertTrue(all(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
def test_check_drop_two_seconds(self):
self.assertFalse(any(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
self.assertTrue(all(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
self.clock.advance(1)
self.assertFalse(any(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
self.assertTrue(all(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
| [
"[email protected]"
] | |
7844f97bf551ed550c53a8663218ff44d86b27c2 | 3f64e138f14e3555d3750327961fa8bdf7ef3894 | /tests/test_utils.py | 98c5d830e14c8029c42a174ba56eed26ac6d8889 | [] | no_license | potykion/repka | 1f2cb76fac6e55d1b397bc3376c0c2734937603e | 4af753fd7ca85df34a2d56846abfee209f199ea1 | refs/heads/master | 2021-06-24T08:51:45.699627 | 2021-01-16T20:26:39 | 2021-01-16T20:26:39 | 194,866,011 | 15 | 6 | null | 2021-01-16T19:42:13 | 2019-07-02T13:13:38 | Python | UTF-8 | Python | false | false | 1,376 | py | import datetime as dt
import pytest
from aiopg.sa import SAConnection
from pydantic import BaseModel
from repka.utils import model_to_primitive, create_async_db_connection
class MyModel(BaseModel):
id: int
title: str
created: dt.datetime
@pytest.fixture()
def model() -> MyModel:
return MyModel(id=1, title="model", created=dt.datetime(2020, 1, 4))
def test_model_to_primitive(model: MyModel) -> None:
dict_ = model_to_primitive(model)
assert dict_ == {"id": model.id, "title": model.title, "created": '2020-01-04T00:00:00'}
def test_model_to_primitive_with_python_primitives(model: MyModel) -> None:
dict_ = model_to_primitive(model, keep_python_primitives=True)
assert dict_ == {"id": model.id, "title": model.title, "created": model.created}
def test_model_to_primitive_excludes_id(model: MyModel) -> None:
dict_ = model_to_primitive(model, without_id=True)
assert "id" not in dict_
def test_model_to_primitive_excludes_fields_from_list(model: MyModel) -> None:
dict_ = model_to_primitive(model, exclude=["title", "created"])
assert "title" not in dict_ and "created" not in dict_
@pytest.mark.asyncio
async def test_create_async_db_connection(db_url: str) -> None:
async with create_async_db_connection(db_url) as connection:
conn: SAConnection = connection
assert conn.connection.status
| [
"[email protected]"
] | |
1d66426f9e4724302c9516cccd3afd40f81c426b | e56214188faae8ebfb36a463e34fc8324935b3c2 | /test/test_ls_service_profile_all_of.py | 852d1df889f3cbf9ce05ab54ad0afc4887b566f5 | [
"Apache-2.0"
] | permissive | CiscoUcs/intersight-python | 866d6c63e0cb8c33440771efd93541d679bb1ecc | a92fccb1c8df4332ba1f05a0e784efbb4f2efdc4 | refs/heads/master | 2021-11-07T12:54:41.888973 | 2021-10-25T16:15:50 | 2021-10-25T16:15:50 | 115,440,875 | 25 | 18 | Apache-2.0 | 2020-03-02T16:19:49 | 2017-12-26T17:14:03 | Python | UTF-8 | Python | false | false | 1,943 | py | # coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import intersight
from intersight.models.ls_service_profile_all_of import LsServiceProfileAllOf # noqa: E501
from intersight.rest import ApiException
class TestLsServiceProfileAllOf(unittest.TestCase):
"""LsServiceProfileAllOf unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testLsServiceProfileAllOf(self):
"""Test LsServiceProfileAllOf"""
# FIXME: construct object with mandatory attributes with example values
# model = intersight.models.ls_service_profile_all_of.LsServiceProfileAllOf() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
4701c32f3cea059e7d98f62136b9bdb9e14346a0 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/programa-stic_barf-project/barf-project-master/examples/scripts/arm/check_constraint1.py | 0575fae11d2ef320c4090b71d86c5307d021bd56 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 2,988 | py | #! /usr/bin/env python
import os
import sys
from barf import BARF
from barf.arch import ARCH_ARM_MODE_ARM
if __name__ == "__main__":
#
# Open file
#
try:
filename = os.path.abspath("../../bin/arm/constraint1")
barf = BARF(filename)
except Exception as err:
print err
print "[-] Error opening file : %s" % filename
sys.exit(1)
#
# Check constraint
#
# 00008390 <main>:
# 8390: e52db004 push {fp} ; (str fp, [sp, #-4]!)
# 8394: e28db000 add fp, sp, #0
# 8398: e24dd014 sub sp, sp, #20
# 839c: e51b2008 ldr r2, [fp, #-8]
# 83a0: e51b300c ldr r3, [fp, #-12]
# 83a4: e0823003 add r3, r2, r3
# 83a8: e2833005 add r3, r3, #5
# 83ac: e50b3010 str r3, [fp, #-16]
# 83b0: e51b3010 ldr r3, [fp, #-16]
# 83b4: e1a00003 mov r0, r3
# 83b8: e28bd000 add sp, fp, #0
# 83bc: e8bd0800 ldmfd sp!, {fp}
# 83c0: e12fff1e bx lr
start_addr = 0x8390
end_addr = 0x83bc
# Add instructions to analyze
print("[+] Adding instructions to the analyzer...")
for addr, asm_instr, reil_instrs in barf.translate(ea_start=start_addr, ea_end=end_addr, arch_mode=ARCH_ARM_MODE_ARM):
print("0x{0:08x} : {1}".format(addr, asm_instr))
for reil_instr in reil_instrs:
print("{0:14}{1}".format("", reil_instr))
barf.code_analyzer.add_instruction(reil_instr)
# Get smt expressions and set pre and post conditions
print("[+] Adding pre and post conditions to the analyzer...")
# Get smt expression for eax and ebp registers
fp = barf.code_analyzer.get_register_expr("fp")
# Get smt expressions for memory locations (each one of 4 bytes)
a = barf.code_analyzer.get_memory_expr(fp - 0x08, 4)
b = barf.code_analyzer.get_memory_expr(fp - 0x0c, 4)
c = barf.code_analyzer.get_memory_expr(fp - 0x10, 4)
# Set range for variable a and b
barf.code_analyzer.set_preconditions([a >= 2, a <= 100])
barf.code_analyzer.set_preconditions([b >= 2, b <= 100])
# Set desired value for the result
barf.code_analyzer.set_postconditions([c >= 26, c <= 28])
# Check satisfiability
print("[+] Check for satisfiability...")
if barf.code_analyzer.check() == 'sat':
print(" SAT! :: Possible assigments : ")
# Get concrete value for expressions
a_val = barf.code_analyzer.get_expr_value(a)
b_val = barf.code_analyzer.get_expr_value(b)
c_val = barf.code_analyzer.get_expr_value(c)
# Print values
print(" a : 0x{0:08x} ({0})".format(a_val))
print(" b : 0x{0:08x} ({0})".format(b_val))
print(" c : 0x{0:08x} ({0})".format(c_val))
assert(a_val + b_val + 5 == c_val)
else:
print(" UNSAT!")
| [
"[email protected]"
] | |
3fe0429d361d570d27e0ba30b97de0e16a82479a | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/quota/azure-mgmt-quota/azure/mgmt/quota/aio/_quota_mgmt_client.py | 48fc3c78a5d268c271a5b23817271d60686e4814 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 4,808 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from .. import models as _models
from .._serialization import Deserializer, Serializer
from ._configuration import QuotaMgmtClientConfiguration
from .operations import QuotaOperationOperations, QuotaOperations, QuotaRequestStatusOperations, UsagesOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class QuotaMgmtClient: # pylint: disable=client-accepts-api-version-keyword
"""Microsoft Azure Quota Resource Provider.
:ivar usages: UsagesOperations operations
:vartype usages: azure.mgmt.quota.aio.operations.UsagesOperations
:ivar quota: QuotaOperations operations
:vartype quota: azure.mgmt.quota.aio.operations.QuotaOperations
:ivar quota_request_status: QuotaRequestStatusOperations operations
:vartype quota_request_status: azure.mgmt.quota.aio.operations.QuotaRequestStatusOperations
:ivar quota_operation: QuotaOperationOperations operations
:vartype quota_operation: azure.mgmt.quota.aio.operations.QuotaOperationOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2023-02-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self, credential: "AsyncTokenCredential", base_url: str = "https://management.azure.com", **kwargs: Any
) -> None:
self._config = QuotaMgmtClientConfiguration(credential=credential, **kwargs)
self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize)
self.quota = QuotaOperations(self._client, self._config, self._serialize, self._deserialize)
self.quota_request_status = QuotaRequestStatusOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.quota_operation = QuotaOperationOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "QuotaMgmtClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
| [
"[email protected]"
] | |
ad8c7c1713fe5226ae24dda311c28e08b57f3ef5 | 09a2d0231caf5231875270ca85dba3bf201d83c7 | /linotak/images/models.py | 3ca3d0604eab73f4b3fb4635ce44532e92768c83 | [] | no_license | pdc/linotak | 898c2a014a2f2beed25127efc4b69db637c1a537 | 0075ea457f764cbb67acecb584e927bf58d2e7a8 | refs/heads/develop | 2023-03-09T19:39:59.013308 | 2022-12-19T19:58:49 | 2022-12-19T19:58:49 | 148,982,795 | 0 | 0 | null | 2023-02-15T20:20:01 | 2018-09-16T09:54:31 | Python | UTF-8 | Python | false | false | 23,561 | py | """Modles for images.
Images have two main classes:
Image: the source data for an image.
In RDF terms, this is the resource.
It is augmented with hints for representing it as a thumbnail:
crop_{left,top,width,height} and focus_[xy]
Representation: information about where we stored a
cropped, scaled version of an image
"""
from base64 import b64decode, urlsafe_b64encode
from django.core.files import File
from django.core.files.base import ContentFile
from django.core.validators import RegexValidator, MaxValueValidator, MinValueValidator
from django.db import models, transaction
from django.db.models import F
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from hashlib import md5
import io
import logging
import re
import requests
import subprocess
from xml.etree import ElementTree
from .signals import wants_data, wants_representation
from .size_spec import SizeSpec
logger = logging.getLogger(__name__)
MAX_LENGTH = 4000
class CannotSniff(Exception):
"""Raised if cannot sniff. Args are returncode & message from ImageMagick."""
def __str__(self):
returncode, message = self.args
return "ImageMagick identify returned status %d: %r" % (returncode, message)
class Image(models.Model):
"""The source data for an image displayed in a note."""
class NotSniffable(CannotSniff):
"""Raised if cannot sniff. Args are file_name and returncode & message from ImageMagick."""
def __str__(self):
file_name, returncode, message = self.args
return (
"Could not identify %s: ImageMagick identify returned status %d: %r"
% (file_name, returncode, message)
)
MAX_DATA = 10 * 1024 * 1024
MIN_WIDTH = MIN_HEIGHT = 80
data_url = models.URLField(
_("data URL"),
max_length=MAX_LENGTH,
unique=True,
)
media_type = models.CharField(
_("media-type"),
max_length=MAX_LENGTH,
validators=[
RegexValidator(r"^(image|application)/[\w.+-]+(;\s*\w+=.*)?$"),
],
null=True,
blank=True,
help_text=_("MIME media-type for this image source, such as image/jpeg"),
)
description = models.TextField(
_("description"),
max_length=4000,
blank=True,
help_text=_(
"Description of the image that might be used in a longdesc attribute "
"or as the alt text for Mastodon et al."
),
)
cached_data = models.FileField(
_("cached data"),
upload_to="cached-images",
null=True,
blank=True,
help_text=_("A copy of the image data"),
)
width = models.PositiveIntegerField(
_("width"),
null=True,
blank=True,
)
height = models.PositiveIntegerField(
_("height"),
null=True,
blank=True,
)
crop_left = models.FloatField(
_("crop left"),
default=0.0,
validators=[MinValueValidator(0.0), MaxValueValidator(1.0)],
help_text=_("Range 0.0 to 1.0. Fraction of the way from the left edge"),
)
crop_top = models.FloatField(
_("crop top"),
default=0.0,
validators=[MinValueValidator(0.0), MaxValueValidator(1.0)],
help_text=_("Range 0.0 to 1.0. Fraction of the way down from the top"),
)
crop_width = models.FloatField(
_("crop width"),
default=1.0,
validators=[MinValueValidator(0.0), MaxValueValidator(1.0)],
help_text=_("Range 0.0 to 1.0. Fraction of the way from left toward right"),
)
crop_height = models.FloatField(
_("crop height"),
default=1.0,
validators=[MinValueValidator(0.0), MaxValueValidator(1.0)],
help_text=_("Range 0.0 to 1.0. Fraction of the way from top toward bottom"),
)
focus_x = models.FloatField(
_("focus x"),
default=0.5,
validators=[MinValueValidator(0.0), MaxValueValidator(1.0)],
help_text=_("Range 0.0 to 1.0. Fraction of the way from the left edge"),
)
focus_y = models.FloatField(
_("focus y"),
default=0.5,
validators=[MinValueValidator(0.0), MaxValueValidator(1.0)],
help_text=_("Range 0.0 to 1.0. Fraction of the way down from the top"),
)
placeholder = models.CharField(
_("placeholder"),
max_length=255,
null=True,
blank=True,
help_text=_("CSS colour for a blank rect shown while awaiting image proper"),
)
etag = models.BinaryField(
_("etag"),
max_length=16,
null=True,
blank=True,
editable=False,
help_text=_("Hash of the image data when retrieved."),
)
retrieved = models.DateTimeField(
_("retrieved"),
null=True,
blank=True,
)
created = models.DateTimeField(_("created"), default=timezone.now)
modified = models.DateTimeField(_("modified"), auto_now=True)
class Meta:
verbose_name = _("image")
verbose_name_plural = _("images")
def __str__(self):
last_part = self.data_url.rsplit("/", 1)[-1]
if len(last_part) > 60:
return "%s…%s" % (last_part[:30], last_part[-20:])
return last_part
NOT_IN_JSON = {
"data_url",
"cached_data",
"media_type",
"retrieved",
"etag",
}
def to_json(self):
"""JSON summary for use in editor page."""
result = {
json_name(f.name): f.value_from_object(self)
for f in Image._meta.get_fields()
if f.name not in self.NOT_IN_JSON and not hasattr(f, "related_name")
}
return result
def crop_unique(self):
"""Return a value that changes if crop rect is changed."""
return self.crop_left, self.crop_top, self.crop_width, self.crop_height
def focus_unique(self):
"""Return a value that changes if focus point is changed."""
return self.focus_x, self.focus_y
def retrieve_data_task(self):
"""Celery signature to arrange for async download of this image."""
from . import tasks
return tasks.retrieve_image_data.s(
self.pk,
if_not_retrieved_since=(
self.retrieved.timestamp() if self.retrieved else None
),
)
def queue_retrieve_data(self):
"""Arrange for async download of this image."""
# Queue after transaction committed to avoid a race with the Celery queue.
transaction.on_commit(self.retrieve_data_task().delay)
@transaction.atomic
def retrieve_data(self, if_not_retrieved_since=None, save=False):
"""Download the image data if available."""
if self.retrieved and (
not if_not_retrieved_since or if_not_retrieved_since < self.retrieved
):
return
self.retrieved = timezone.now()
self.save() # This will be rolled back in case of error but setting it now avoids some race conditons.
if self.data_url.startswith("data:"):
media_type, rest = self.data_url.split(";", 1)
if rest.startswith("base64,"):
data = b64decode(rest[7:])
self.etag = md5(data).digest()
file = ContentFile(data)
else:
r = requests.get(
self.data_url, headers={"User-Agent": "Linotak/0.1 (thumbnailer)"}
)
media_type = r.headers["Content-Type"]
buf = io.BytesIO()
total_size = 0
hasher = md5()
for chunk in r.iter_content(chunk_size=10_240):
total_size += len(chunk)
buf.write(chunk)
hasher.update(chunk)
self.etag = hasher.digest()
data = buf.getvalue()
file = File(buf)
if media_type:
self.media_type = media_type
try:
# Sniff to get media_type for the file name.
self._sniff(input=data, media_type=media_type)
file_name = file_name_from_etag(self.etag, self.media_type)
except Image.NotSniffable as e:
file_name = file_name_from_etag(self.etag, None)
logger.warning(e)
self.cached_data.save(file_name, file, save=save)
def sniff(self, save=False):
"""Presuming already has image data, guess width, height, and media_type."""
with self.cached_data.open() as f:
if not self.etag:
# Happens if cached data is set outside of retrieve_data.
hasher = md5()
chunk = f.read(10_240)
while chunk:
hasher.update(chunk)
chunk = f.read(10_240)
self.etag = hasher.digest()
f.seek(0)
self._sniff(stdin=f.file)
if save:
self.save()
def _sniff(self, media_type=None, **kwargs):
"""Given a file-like object, guess width, height, and media_type.
Arguments --
kwargs -- how to get the input. Either stdin=REALFILE or input=BYTES
"""
try:
self.media_type, self.width, self.height, self.placeholder = _sniff(
media_type=media_type, **kwargs
)
except CannotSniff as e:
rc, msg = e.args
raise Image.NotSniffable(file_name_from_etag(self.etag, None), rc, msg)
def delete_if_small(self):
"""If this image is small, delete it."""
if (
self.width
and self.width < self.MIN_WIDTH
and self.height
and self.height < self.MIN_HEIGHT
):
for r in self.representations.all():
r.delete()
self.delete()
def create_square_representation(self, size):
"""Create a representation (probably cropped) of this image."""
return self.create_representation(SizeSpec.of_square(size))
@transaction.atomic
def create_representation(self, spec):
"""Create a representation of this image.
Arguments --
spec -- SizeSpec instance that specifies how to scale and crop
"""
if not self.width or not self.height:
# Image size not known, so probably not actually an image.
return None
has_crop = (
self.crop_left > 0.0
or self.crop_top > 0.0
or self.crop_width < 1.0
or self.crop_height < 1.0
)
if has_crop:
crop_width = self.crop_width * self.width
crop_height = self.crop_height * self.height
else:
crop_width, crop_height = self.width, self.height
scaled, crop = spec.scale_and_crop_to_match(crop_width, crop_height)
final_width, final_height = crop or scaled
candidates = self.representations.filter(
width=final_width, height=final_height
)[:1]
if candidates:
# Already done!
return candidates[0]
if not has_crop and final_width == self.width and final_height == self.height:
# Want the original size of the image. Just copy the data directly.
rep = self.representations.create(
media_type=self.media_type,
width=self.width,
height=self.height,
is_cropped=bool(crop),
etag=self.etag,
)
with self.cached_data.open() as f:
rep.content.save(file_name_from_etag(rep.etag, rep.media_type), f)
return
cmd = ["convert", "-", "-resize", "%dx%d>" % scaled, "-"]
if has_crop:
x_c = int(round(self.width * self.crop_left))
y_c = int(round(self.height * self.crop_top))
w_c = int(round(self.width * self.crop_width))
h_c = int(round(self.height * self.crop_height))
cmd[2:2] = ["-crop", f"{w_c}x{h_c}+{x_c}+{y_c}", "+repage"]
if crop:
w_s, h_s = scaled
w_c, h_c = crop
x, y = round(self.focus_x * (w_s - w_c)), round(self.focus_y * (h_s - h_c))
cmd[-1:-1] = ["-extent", "%dx%d+%d+%d" % (w_c, h_c, x, y)]
with self.cached_data.open() as f:
output = subprocess.run(
cmd, check=True, stdin=f.file, stdout=subprocess.PIPE
).stdout
etag = md5(output).digest()
output_file = ContentFile(output)
rep = self.representations.create(
media_type=self.media_type,
width=final_width,
height=final_height,
is_cropped=bool(crop),
etag=etag,
)
rep.content.save(file_name_from_etag(rep.etag, rep.media_type), output_file)
return rep
def representation_task(self, spec):
"""Celery signature to arrange for representation to be created.
Do not try to do this in the same transaction as creates the image,
as this causes a race condition.
"""
from . import tasks
return tasks.create_image_representation.si(self.pk, spec.unparse())
def queue_representation(self, spec):
"""Arrange for representation satisfying this spec to be created.
Do not try to do this in the same transaction as creates the image,
as this causes a race condition.
"""
self.representation_task(spec).delay()
@transaction.atomic
def find_representation(self, spec):
"""Return the best match for an area of this size.
If there is no exact match, fires signal.
"""
if self.width and self.height:
final_width, final_height = spec.best_match(self.width, self.height)
results = list(
self.representations.filter(
width__lte=final_width, height__lte=final_height
).order_by((F("width") * F("height")).desc())[:1]
)
result = results[0] if results else None
else:
result = None
if not result or result.width != final_width or result.height != final_height:
wants_representation.send(self.__class__, instance=self, spec=spec)
return result
def find_square_representation(self, size):
"""Return the best match for a square area of this size.
If there is no exact match, fires signal.
"""
return self.find_representation(SizeSpec.of_square(size))
def wants_size(self):
"""Indicates size is wanted and not available."""
if self.width and self.height:
return
wants_data.send(self.__class__, instance=self)
def json_name(x):
parts = x.split("_")
return parts[0] + "".join(x.title() for x in parts[1:])
GEOMETRY_RE = re.compile(r"(\d+)x(\d+)\+0\+0")
def _sniff(media_type=None, **kwargs):
"""Given a file-like object, guess media_type, width, and height.
Arguments --
kwargs -- how to get the input. Either stdin=REALFILE or input=BYTES
Returns --
MEDIA_TYPE, WIDTH, HEIGHT, PLACEHOLDER
Raises --
CannotSniff when cannot sniff
"""
if media_type and "svg" in media_type:
media_type_1, width, height, placeholder = _sniff_svg(**kwargs)
if media_type_1:
return media_type_1, width, height, placeholder
cmd = ["identify", "-colorspace", "lab", "-verbose", "-"]
result = subprocess.run(
cmd, check=False, stderr=subprocess.PIPE, stdout=subprocess.PIPE, **kwargs
)
if result.returncode:
# Command failed.
raise CannotSniff(result.returncode, result.stderr or result.stdout)
output = result.stdout
media_type, geometry, l_bit, a_bit, b_bit = _comb_imagemagick_verbose(
(
("Mime type",),
("Geometry",),
("Channel statistics", "Channel 0", "mean"),
("Channel statistics", "Channel 1", "mean"),
("Channel statistics", "Channel 2", "mean"),
),
output,
)
if geometry and (m := GEOMETRY_RE.match(geometry)):
width, height = int(m[1]), int(m[2])
else:
width, height = None, None
lab = l_bit is not None and _lab_from_imagemagick_verbose_bits(
(l_bit, a_bit, b_bit)
)
placeholder = "#%02X%02X%02X" % sRGB_from_Lab(lab) if lab else None
return media_type, width, height, placeholder
_length_pattern = re.compile(r"^\s*(\d+|\d*\.\d+)\s*([a-z]{2,3})?\s*$")
_px_per_unit = {
"px": 1.0,
"pt": 96.0 / 72.0,
"cm": 96.0 / 2.54,
"mm": 96.0 / 25.4,
"em": 16.0, # Assume my usual 16px type size
"ex": 16.0 / 2, # Half an em
"rem": 16.0, # Ditto
}
def px_from_length(length):
m = _length_pattern.match(length)
if m:
if m[2]:
return round(float(m[1]) * _px_per_unit[m[2]])
return round(float(m[1]))
def _sniff_svg(input=None, stdin=None):
"""Given file data that might well be SVG, return type and dimensions.
Arguments --
input (bytes instance) -- the image data
stdin (file-like object) -- contains the image data
Exactly one of the above should be specified.
Returns --
MEDIA_TYPE, WIDTH, HEIGHT, PLACEHOLDER
"""
root = ElementTree.parse(stdin) if stdin else ElementTree.fromstring(input)
width, height, view_box = root.get("width"), root.get("height"), root.get("viewBox")
if width and height:
width, height = px_from_length(width), px_from_length(height)
elif view_box:
width, height = view_box.split()[-2:]
width, height = round(float(width)), round(float(height))
else:
width, height = None, None
return (
"image/svg+xml"
if root.tag in ["{http://www.w3.org/2000/svg}svg", "svg"]
else None,
width,
height,
"#AAA",
)
_media_types_by_imagemagick = {
b"SVG": "image/svg+xml",
}
def media_type_from_imagemagick_type(type):
"""Given the type of image as reported by ImageMagick, return MIME type."""
return (
_media_types_by_imagemagick.get(type)
or "image/%s" % type.decode("UTF-8").lower()
)
_suffixes_by_media = {
"image/svg+xml": ".svg",
}
def suffix_from_media_type(media_type):
"""Given an image MIME type, return file-name suffix."""
if not media_type:
return ".data"
media_type = media_type.split(";", 1)[0]
return _suffixes_by_media.get(media_type) or "." + media_type.split("/", 1)[1]
def file_name_from_etag(etag, media_type):
return urlsafe_b64encode(etag).decode("ascii").rstrip("=") + suffix_from_media_type(
media_type
)
def _comb_imagemagick_verbose(specs, data):
"""Pull data items out of the output of `identify -colorspace lab -verbose`.
Arguments --
specs -- list of path, where a path is a list of headings
data -- raw data as written by ImageMagick `identity -verbose`
Returns --
Sequence of values extracted from stream
(value None means that value not found).
"""
stream = io.TextIOWrapper(io.BytesIO(data), encoding="UTF-8", errors="replace")
found = {tuple(p): None for p in specs}
path = []
indents = []
indent = -1
for line in stream:
key, *rest = line.split(":", 1)
len1 = len(key)
key = key.lstrip()
new_indent = len1 - len(key)
while new_indent < indent:
path = path[:-1]
indent = indents.pop(-1)
if new_indent == indent:
path[-1] = key
else:
path.append(key)
indents.append(indent)
indent = new_indent
p = tuple(path[1:])
if p in found:
found[p] = rest[0].strip()
return tuple(found[tuple(p)] for p in specs)
# Picking a representative colour form an image to use as a placeholder.
#
# For now I am using the mean of the pixels, using the L*a*b* colourspace
# since its perceptually linear mapping form numbers to colours means the
# average colour should be closer to the average as perceieved by eye.
#
# Reluctant to get in to learning enough NumPy to write this directly, I am
# instead using ImageMagick’s colourspace transformation and statistics in the
# output of `identify -verbose`. I need to comb the verbose output for the
# values I want, and then convert from the transformed L*a*b* ImageMagick
# writes (the values are mapped from (0..255, -128..127, -182..127) to (0..1,
# 0..1, 0..1) by scaling and adding 0.5) back to sRGB.
COORD_RE = re.compile(r"\d*(?:.\d+)? \((0|1|0\.\d+)\)")
def _lab_from_imagemagick_verbose_bits(bits):
"""Extract L*a*b* coordinates from the format output by `identify -verbose`."""
scaled_l, scaled_a, scaled_b = tuple(
float(m[1]) for bit in bits if bit and (m := COORD_RE.match(bit))
)
return scaled_l * 100.0, scaled_a * 255.0 - 128.0, scaled_b * 255.0 - 128.0
def sRGB_from_Lab(lab):
"""Convert from CIE L*a*b* colour to 8-bit sRGB.
Argument --
lab -- a tuple of floats (L*, a*, b*) in range 0..100, -128..127, -128..127
Returns --
rgb -- a tuple of integers (r, g, b) in range 0..255
"""
L_star, a_star, b_star = lab
# Convert L*a*b* to XYZ
delta = 6 / 29
# D65 white point, scaled so XYZ are in range 0..1:
X_n, Y_n, Z_n = (0.950489, 1.0, 1.088840)
def f_minus_1(t):
return t ** 3 if t > delta else 3 * delta * delta * (t - 4 / 29)
q = (L_star + 16) / 116
X = X_n * f_minus_1(q + a_star / 500)
Y = Y_n * f_minus_1(q)
Z = Z_n * f_minus_1(q - b_star / 200)
# Convert XYZ to RGB linear in 0..1 scale
R_L = 3.2406255 * X - 1.537208 * Y - 0.4986286 * Z
G_L = -0.9689307 * X + 1.8757561 * Y + 0.0415175 * Z
B_L = 0.0557101 * X - 0.2040211 * Y + 1.0569959 * Z
return tuple(
round(
255 * (323 * u / 25 if u <= 0.0031308 else (211 * u ** (5 / 12) - 11) / 200)
)
for u in (R_L, G_L, B_L)
)
class Representation(models.Model):
"""A representation of an image at a given size."""
image = models.ForeignKey(
Image,
models.CASCADE,
verbose_name="representation",
related_name="representations",
related_query_name="representation",
)
content = models.FileField(
_("content"),
upload_to="i",
null=True,
blank=True,
help_text=_("Content of the image representation."),
)
media_type = models.CharField(
_("media-type"),
max_length=MAX_LENGTH,
validators=[
RegexValidator(r"^(image|application)/\w+(;\s*\w+=.*)?$"),
],
)
width = models.PositiveIntegerField(_("width"))
height = models.PositiveIntegerField(_("height"))
is_cropped = models.BooleanField(_("is cropped"))
etag = models.BinaryField(
_("etag"),
max_length=16,
editable=False,
help_text=_("Hash of the image data when generated."),
)
created = models.DateTimeField(_("created"), default=timezone.now)
modified = models.DateTimeField(_("modified"), auto_now=True)
class Meta:
verbose_name = "representation"
verbose_name_plural = "representations"
unique_together = [
("image", "width", "is_cropped"),
]
def __str__(self):
return "%s (%dx%d)" % (self.image, self.width, self.height)
| [
"[email protected]"
] | |
904911564f504e2a80881c6d84483ecf5f71f3ff | ccae8d40438c4f569463fd422d12fff7baaeba16 | /Utils/Timer.py | 9047838faf2e19200dac23dc3375fe42f8e63e88 | [] | no_license | Occy88/console_game | 2fab618812091d61a40951d15813bd7f603392a6 | 4a70e9188c2061db35712aa4dd0d91b913ee9842 | refs/heads/master | 2022-12-03T20:21:09.494594 | 2021-01-01T14:00:07 | 2021-01-01T14:00:07 | 240,998,812 | 0 | 0 | null | 2022-11-22T05:19:24 | 2020-02-17T01:52:04 | Python | UTF-8 | Python | false | false | 881 | py | import time
class Timer:
def __init__(self):
self.timer_start = 0
self.timer_end = 0
self.elapsed = 0
self.running = False
self.prev_lap = 0
def poll(self):
if self.running:
return time.time() - self.timer_start
else:
return self.elapsed
def lap(self):
if not self.running:
to_return = self.timer_end - self.prev_lap
self.prev_lap = self.timer_end
else:
to_return = time.time() - self.prev_lap
self.prev_lap = time.time()
return to_return
def stop(self):
self.timer_end = time.time()
self.elapsed = self.timer_end - self.timer_start
self.running = False
def start(self):
self.timer_start = time.time()
self.prev_lap = self.timer_start
self.running = True
| [
"[email protected]"
] | |
e345860053111c2bfa4a40e308126db75975ad68 | 8928c4745515ffecfc581da36df47b0789fb463f | /Chapter_5/downloader.py | ab075c69084a83f6cf614df2e43a8f8bc16ed2ff | [] | no_license | iluxonchik/webscraping-with-python-book | 72da36ba8fae016ccc20d44753ec4c46bc933dee | ffc5a1459778649d081c62812c8d3edbb2f120a9 | refs/heads/master | 2021-01-10T10:19:12.443341 | 2016-01-21T21:50:11 | 2016-01-21T21:50:11 | 48,058,040 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,261 | py | import os
from urllib.request import urlretrieve
from urllib.request import urlopen
from bs4 import BeautifulSoup
downloadDirectory = "downloaded"
baseUrl = "http://pythonscraping.com"
def getAbsoluteURL(baseUrl, source):
if source.startswith("http://www."):
url = "http://" + source[11:]
elif source.startswith("http://"):
url = source
elif source.startswith("www."):
url = "http://" + source[4:]
else:
url = baseUrl + "/" + source
if baseUrl not in url:
return None
return url
def getDownloadPath(baseUrl, absoluteUrl, dowonloadDirectory):
path = absoluteUrl.replace("www.", "")
path = path.replace(baseUrl, "")
path = dowonloadDirectory + path
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
return removeGet(path)
def removeGet(fileName):
"""
Removes any characters after "?" in string
"""
pos = fileName.find("?")
if pos != -1:
return fileName[:pos]
return fileName
html = urlopen(baseUrl)
bsObj = BeautifulSoup(html, "html.parser")
downloadList = bsObj.findAll(src=True)
for download in downloadList:
fileUrl = getAbsoluteURL(baseUrl, download["src"])
if fileUrl is not None:
print(fileUrl)
urlretrieve(fileUrl, getDownloadPath(baseUrl, fileUrl, downloadDirectory))
| [
"[email protected]"
] | |
fc2ab2260587bdda8ade496a114f769bb62fa695 | 06671e14ae54f887be05a64c632712537d38add6 | /integration_distributed_training/server/sanity_check_redis.py | be2d662f50ca2b88be7b8e88384c2d38df63a90f | [] | no_license | Jessilee/ImportanceSamplingSGD | cf74a220a55b468b72fed0538b3a6740f532fcb2 | 0831b9b1833726391a20594d2b2f64f80e1b8fe2 | refs/heads/master | 2021-01-24T10:12:48.285641 | 2016-02-05T19:25:34 | 2016-02-05T19:25:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,488 | py |
import redis
import numpy as np
import time
import progressbar
import signal
import sys
from redis_server_wrapper import EphemeralRedisServer
def start_redis_server(server_port=None):
server_scratch_path = "."
if server_port is None:
server_port = np.random.randint(low=1025, high=65535)
#server_password = "".join(["%d" % np.random.randint(low=0, high=10) for _ in range(10)])
server_password = None
rserv = EphemeralRedisServer( scratch_path=server_scratch_path,
port=server_port, password=server_password)
rserv.start()
time.sleep(5)
rsconn = rserv.get_client()
print "pinging master server : %s" % (rsconn.ping(),)
import socket
hostname = socket.gethostname()
D_server_desc = {'hostname' : hostname, 'port' : server_port, 'password' : server_password}
return (rserv, rsconn, D_server_desc)
def test_cycle_queue(rsconn, N=20):
queue_name = "L_queue"
for n in range(N):
#value = n
value = (n * np.ones(100000, dtype=np.int8)).tostring()
rsconn.rpush(queue_name, value)
Nread = rsconn.llen(queue_name)
print "(N, Nread) is (%d, %d)." % (N, Nread)
for _ in range(1000):
for n in range(N):
e = rsconn.lpop(queue_name)
rsconn.rpush(queue_name, e)
L = []
while 0 < rsconn.llen(queue_name):
e = rsconn.lpop(queue_name)
L.append(e)
print [np.fromstring(e, dtype=np.int8)[0] for e in L]
def test_timestamp_hashmap(rsconn):
#def get_next_timestamp():
# get_next_timestamp.counter += 1.0
# return get_next_timestamp.counter
#get_next_timestamp.counter = 0.0
def get_next_timestamp():
return time.time()
N = 100
hashmap_name = "H_timestamps"
D_ref = {}
for n in range(N):
#value = n
value = (n * np.ones(100000, dtype=np.int8)).tostring()
timestamp_str = str(get_next_timestamp())
rsconn.hset(hashmap_name, value, timestamp_str)
D_ref[value] = timestamp_str
Niter = 1000
widgets = ['Parsing lines: ', progressbar.Percentage(),
' ', progressbar.Bar(marker=progressbar.RotatingMarker()),
' ', progressbar.ETA()]
pbar = progressbar.ProgressBar(widgets=widgets, maxval=Niter-1).start()
previous_timestamp = time.time()
for niter in range(Niter):
for (k, local_recorded_timestamp_str) in D_ref.items():
current_timestamp = get_next_timestamp()
database_recorded_timestamp_str = rsconn.hget(hashmap_name, k)
database_recorded_timestamp = float(database_recorded_timestamp_str)
local_recorded_timestamp = float(local_recorded_timestamp_str)
assert local_recorded_timestamp <= current_timestamp, (local_recorded_timestamp, current_timestamp)
assert database_recorded_timestamp <= current_timestamp, (database_recorded_timestamp, current_timestamp)
current_timestamp_str = str(current_timestamp)
D_ref[k] = current_timestamp_str
rsconn.hset(hashmap_name, k, current_timestamp_str)
pbar.update(niter)
def run():
(rserv, rsconn, _) = start_redis_server()
def signal_handler(signal, frame):
rserv.stop()
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
#test_cycle_queue(rsconn)
test_timestamp_hashmap(rsconn)
if __name__ == "__main__":
run()
| [
"[email protected]"
] | |
77e0749e86338275012284cf329a197dfdf60053 | 2ec5a02fe7e23d6a07ed653c240aa47ca8c58323 | /ephemeral/visualise_embeddings.py | c2d1779917c5a8cc3966fc87a785f0f0944ce944 | [
"BSD-3-Clause"
] | permissive | gob1thaasan/fMRI_Cubical_Persistence | aecfe5721e1dd0445356f3820f6c8c46a4e5bf4e | 5a4773333a33f86c17bbd0604cc48a361fc9a35f | refs/heads/master | 2022-12-27T18:05:36.885999 | 2020-10-22T16:49:01 | 2020-10-22T16:49:01 | 445,372,065 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,467 | py | #!/usr/bin/env python3
#
# Visualises persistence-based embeddings in the form of trajectories
# in a space of a given dimension.
import argparse
import json
import os
import scprep
import sys
import matplotlib.collections
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d.art3d import Line3DCollection
import numpy as np
import pandas as pd
from sklearn.decomposition import PCA
from sklearn.manifold import LocallyLinearEmbedding
from sklearn.manifold import MDS
from sklearn.manifold import TSNE
from sklearn.metrics import pairwise_distances
from sklearn.preprocessing import StandardScaler
from phate import PHATE
from m_phate import M_PHATE
from tqdm import tqdm
def embed(
encoder,
subject,
data,
suffix,
prefix=None,
metric=None,
rolling=None,
refit=True
):
"""Embed data of a given subject.
Performs the embedding for a given subject and stores the resulting
plot in an output directory.
Parameters
----------
encoder : `TransformerMixin`
Transformer class for performing the embedding or encoding of
the data.
subject : str
Name of the subject to embed
data : list of `np.array`
List of high-dimensional vectors corresponding to the feature
vectors at a given time step.
suffix : str
Suffix to use for storing embeddings
prefix : str, or `None` (optional)
Prefix to use for naming each individual embedding. Can be used
to distinguish between different sorts of outputs.
metric : str, callable, or `None` (optional)
If set, specifies the distance metric to use for the embedding
process. Needs to be a metric recognised by `scikit-learn`, or
a callable function.
rolling : int or `None`
If set, performs smoothing of the data set prior to calculating
distances.
refit : bool
If set, refits the encoder to the current data set. Else, the
encoder is used as-is.
"""
X = np.array([row for row in data])
if rolling is not None:
df = pd.DataFrame(X)
df = df.rolling(rolling, axis=0, min_periods=1).mean()
X = df.to_numpy()
# scaler = StandardScaler()
# X = scaler.fit_transform(X)
# TODO: decide whether this will be useful or not
# X -= np.mean(X, axis=0)
if metric is not None:
X = pairwise_distances(X, metric=metric)
# TODO: check whether the classifier supports this
encoder.set_params(dissimilarity='precomputed')
try:
if refit:
X = encoder.fit_transform(X)
else:
X = encoder.transform(X)
except ValueError:
# Nothing to do but to move on...
return
colours = np.linspace(0, 1, len(X))
if args.dimension == 3:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
else:
fig, ax = plt.subplots()
fig.set_size_inches(5, 5)
ax.set_title(subject)
min_x = X[:, 0].min()
max_x = X[:, 0].max()
min_y = X[:, 1].min()
max_y = X[:, 1].max()
if args.dimension == 3:
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=colours)
min_z = X[:, 2].min()
max_z = X[:, 2].max()
ax.set_zlim(min_z, max_z)
elif args.dimension == 2:
ax.scatter(X[:, 0], X[:, 1], c=colours, cmap='Spectral')
ax.set_xlim(min_x, max_x)
ax.set_ylim(min_y, max_y)
# TOOD: adjust this path correctly
path = f'../../figures/persistence_images_embeddings/{suffix}'
# Create output directories for storing *all* subjects in. This
# depends on the input file.
os.makedirs(path, exist_ok=True)
plt.tight_layout()
name = ''
if prefix is not None:
name += f'{prefix}'
name += f'_{args.dimension}D'
if rolling is not None:
name += f'_r{rolling}'
else:
name += f'_r0'
# TODO: this cannot handle callable arguments yet, but at least some
# simple defaults.
if type(metric) is str:
name += f'_{metric}'
name += f'_{subject}'
if args.global_embedding:
name += '_global'
plt.savefig(
os.path.join(path, f'{name}.png'),
bbox_inches='tight'
)
# Save the raw data as well
df = pd.DataFrame(X)
df.index.name = 'time'
if args.dimension == 3:
df.columns = ['x', 'y', 'z']
elif args.dimension == 2:
df.columns = ['x', 'y']
df.to_csv(
os.path.join(path, f'{name}.csv'),
float_format='%.04f',
index=True
)
# FIXME: make density calculation configurable
if args.dimension == 2 and False:
ax.clear()
ax.set_title(subject)
ax.set_xlim(min_x, max_x)
ax.set_ylim(min_y, max_y)
ax.hist2d(X[:, 0], X[:, 1], bins=20, cmap='viridis')
plt.tight_layout()
plt.savefig(
os.path.join(path, f'{name}_density.png'),
bbox_inches='tight'
)
plt.close(fig)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'INPUT',
help='Input file (JSON)',
type=str
)
parser.add_argument(
'-d', '--dimension',
help='Dimension of visualisation',
type=int,
default=2,
)
parser.add_argument(
'-e', '--encoder',
help='Specifies encoding/embedding method',
type=str,
default='pca'
)
parser.add_argument(
'-m', '--metric',
help='Specifies metric for calculating embedding',
type=str,
)
parser.add_argument(
'-g', '--global-embedding',
action='store_true',
help='If set, calculates *global* embeddings'
)
parser.add_argument(
'-t', '--trajectories',
action='store_true',
help='If set, shows trajectory visualisations'
)
parser.add_argument(
'-r', '--rolling',
type=int,
default=None,
help='If set, performs rolling average calculation'
)
args = parser.parse_args()
with open(args.INPUT) as f:
data = json.load(f)
basename = os.path.basename(args.INPUT)
basename = os.path.splitext(basename)[0]
if args.encoder == 'pca':
encoder = PCA(
n_components=args.dimension,
random_state=42
)
elif args.encoder == 'mds':
encoder = MDS(
n_components=args.dimension,
random_state=42,
)
elif args.encoder == 'tsne':
encoder = TSNE(
n_components=args.dimension,
random_state=42,
)
elif args.encoder == 'lle':
encoder = LocallyLinearEmbedding(
n_components=args.dimension,
random_state=42,
)
elif args.encoder == 'phate':
encoder = PHATE(
n_components=args.dimension,
mds_solver='smacof',
random_state=42,
)
elif args.encoder == 'm-phate':
encoder = M_PHATE(
n_components=args.dimension,
random_state=42,
n_landmark=1000,
mds_solver='sgd',
n_jobs=-1,
)
# Filter subjects; this could be solved smarter...
subjects = data.keys()
subjects = [subject for subject in subjects if len(subject) == 3]
if args.global_embedding:
if args.dimension == 3:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
else:
fig, ax = plt.subplots()
# Fit the estimator on *all* subjects first, then attempt to
# embed them individually.
#
# M-PHATE gets special treatment because it is the only method
# capable of handling tensors correctly.
if args.encoder == 'm-phate':
X = np.concatenate(
[np.array([data[subject]]) for subject in subjects]
)
X = np.moveaxis(X, 1, 0)
# Create verbose variables for this encoder; this will not
# be used for other embeddings.
n_time_steps = X.shape[0]
n_samples = X.shape[1]
n_features = X.shape[2]
else:
X = np.concatenate(
[np.array(data[subject]) for subject in subjects]
)
X = encoder.fit_transform(X)
# Create array full of subject indices. We can use this to
# colour-code subjects afterwards.
indices = np.concatenate([
np.full(fill_value=int(subject),
shape=np.array(data[subject]).shape[0])
for subject in subjects
])
for subject in subjects:
indices_per_subject = np.argwhere(indices == int(subject))
X_per_subject = X[indices_per_subject[:, 0]]
if args.dimension == 2:
X_per_subject = X_per_subject.reshape(-1, 1, 2)
elif args.dimension == 3:
X_per_subject = X_per_subject.reshape(-1, 1, 3)
segments = np.concatenate(
[X_per_subject[:-1], X_per_subject[1:]], axis=1
)
if args.dimension == 2:
instance = matplotlib.collections.LineCollection
elif args.dimension == 3:
instance = Line3DCollection
lc = instance(
segments,
color='k',
zorder=0,
linewidths=1.0,
)
if args.dimension == 2:
ax.add_collection(lc)
elif args.dimension == 3:
# TODO: until the issue with the line collection have
# been fixed, do *not* show them for 3D plots.
pass
if encoder == 'm-phate':
indices = np.repeat(np.arange(n_time_steps), n_samples)
if args.dimension == 2:
scatter = ax.scatter(
X[:, 0], X[:, 1], c=indices, cmap='Spectral',
zorder=10,
s=10.0,
)
elif args.dimension == 3:
scatter = ax.scatter(
X[:, 0], X[:, 1], X[:, 2], c=indices, cmap='Spectral',
zorder=10,
s=10.0,
)
plt.colorbar(scatter)
path = f'../../figures/persistence_images_embeddings/{basename}'
plt.tight_layout()
plt.savefig(
os.path.join(path, f'{args.encoder}_global.png'),
bbox_inches='tight'
)
plt.close(fig)
if args.dimension == 2:
fig, ax = plt.subplots()
_, _, _, hist = ax.hist2d(
X[:, 0], X[:, 1],
bins=30,
cmap='viridis',
)
fig.colorbar(hist)
plt.tight_layout()
plt.savefig(
os.path.join(path, f'{args.encoder}_density.png'),
bbox_inches='tight'
)
plt.close(fig)
refit = False
else:
refit = True
# M-PHATE only supports global embeddings
if args.encoder == 'm-phate':
sys.exit(0)
for subject in tqdm(subjects, desc='Subject'):
embed(
encoder,
subject,
data[subject],
basename,
prefix=args.encoder,
metric=args.metric,
rolling=args.rolling,
refit=refit,
)
| [
"[email protected]"
] | |
d516c0765a53652ce4b81c21c84245df87e8baf7 | 3419067388879d8a6542df01cb0278ae90b021a2 | /面向对象02/04-__del__方法.py | 4f57a186c20066a4ae3cea8e7927fc016c4ee51b | [] | no_license | oweson/python-river-master | faa31c5248e297a92054cc302e213e2b37fb8bd5 | cf9e99e611311b712465eb11dec4bb8f712929b2 | refs/heads/master | 2021-06-21T15:47:01.755957 | 2019-10-02T00:08:05 | 2019-10-02T00:08:05 | 205,607,518 | 0 | 0 | null | 2021-06-10T21:55:20 | 2019-08-31T23:39:55 | Python | UTF-8 | Python | false | false | 526 | py | class Dog:
def __init__(self):
print("英雄出生啦!")
def __del__(self):
print("-----英雄over------")
dog1 = Dog()
dog2 = dog1
pig = dog2
del pig
del dog1 # 不会调用 __del__方法,因为这个对象 还有其他的变量指向它,即 引用计算不是0
del dog2 # 此时会调用__del__方法,因为没有变量指向它了
print("====================")
# 如果在程序结束时,有些对象还存在,那么python解释器会自动调用它们的__del__方法来完成清理工作
| [
"[email protected]"
] | |
9d9ead965bb272ef26c91d607ee1edf97b67eb46 | c4bed653be5a24b9cdd8816a05bba9faf2314798 | /everpad/pad/editor/content.py | 36a00d2cffd73c79974998f05f97ae08875d6992 | [] | no_license | hechaoyuyu/everpad | 842188ec6328aa10fc8f28f6ade807143dcdbc4d | 0d0732c2187808f622235caa1f771e0c7643c3f2 | refs/heads/master | 2021-01-16T18:38:22.005686 | 2013-01-15T14:38:19 | 2013-01-15T14:38:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,363 | py | from PySide.QtGui import (
QIcon, QPixmap, QAction, QFileDialog,
QShortcut, QKeySequence, QInputDialog,
QPrintPreviewDialog, QPrinter, QDropEvent,
QDragEnterEvent, QDragMoveEvent, QApplication,
QDesktopServices, QApplication,
)
from PySide.QtCore import (
Slot, Qt, QPoint, QObject, Signal, QUrl,
QFileInfo,
)
from PySide.QtWebKit import QWebPage, QWebSettings
from everpad.pad.editor.actions import ImagePrefs, TableWidget
from everpad.pad.tools import file_icon_path
from everpad.tools import sanitize, clean, html_unescape
from everpad.const import DEFAULT_FONT, DEFAULT_FONT_SIZE
from BeautifulSoup import BeautifulSoup
from functools import partial
import webbrowser
import magic
import os
import json
import re
import cgi
class Page(QWebPage):
def __init__(self, edit):
QWebPage.__init__(self)
self.current = None
self.edit = edit
self.active_image = None
self.active_link = None
self.active_table = None
settings = self.settings()
family = self.edit.app.settings.value(
'note-font-family', DEFAULT_FONT,
)
size = int(self.edit.app.settings.value(
'note-font-size', DEFAULT_FONT_SIZE,
))
settings.setFontFamily(
QWebSettings.StandardFont, family,
)
settings.setFontSize(
QWebSettings.DefaultFontSize, size,
)
settings.setFontSize(
QWebSettings.DefaultFixedFontSize, size,
)
# This allows JavaScript to call back to Slots, connect to Signals
# and access/modify Qt props
self.mainFrame().addToJavaScriptWindowObject("qpage", self)
@Slot()
def show_findbar(self):
self.edit.parent.findbar.show()
@Slot()
def find_next(self):
self.edit.parent.findbar.find_next()
@Slot()
def find_previous(self):
self.edit.parent.findbar.find_previous()
@Slot(str)
def set_current_focus(self, focused):
self.current = focused
@Slot()
def page_changed(self):
self.edit.page_changed()
@Slot(str, int, int)
def set_active_image_info(self, image, width, height):
self.active_image = image
self.active_width = width
self.active_height = height
@Slot(str)
def set_active_link(self, url):
self.active_link = url
@Slot(str)
def set_active_table(self, id):
self.active_table = id
def javaScriptConsoleMessage(self, message, lineNumber, sourceID):
print message
def acceptNavigationRequest(self, frame, request, type):
modifiers = QApplication.keyboardModifiers()
if modifiers == Qt.ControlModifier and type == QWebPage.NavigationTypeLinkClicked:
QDesktopServices.openUrl(request.url())
return False
def event(self, e):
if isinstance(e, QDragEnterEvent):
data = e.mimeData()
if data.hasUrls():
e.accept()
else:
e.ignore()
return True
elif isinstance(e, QDragMoveEvent):
pass
elif isinstance(e, QDropEvent):
self.edit.insert_files(e.mimeData().urls(), e.pos())
return True
return super(Page, self).event(e)
class ContentEdit(QObject):
_editor_path = os.path.join(
os.path.dirname(__file__), 'editor.html',
)
if not os.path.exists(_editor_path):
_editor_path = '/usr/share/everpad/editor.html'
_html = open(_editor_path).read()
copy_available = Signal(bool)
def __init__(self, parent, widget, on_change):
QObject.__init__(self)
self.parent = parent
self.app = QApplication.instance()
self.widget = widget
self.page = Page(self)
self._on_change = on_change
self._title = None
self._content = None
self._hovered_url = None
self.widget.setContextMenuPolicy(Qt.CustomContextMenu)
self.widget.customContextMenuRequested.connect(self.context_menu)
self._init_actions()
self._init_shortcuts()
self._last_table_num = 0
def _init_actions(self):
self.check_action = QAction(self.tr('Insert Checkbox'), self)
self.check_action.triggered.connect(self._insert_check)
self.link_action = QAction(self.tr('Insert Link'), self)
self.link_action.triggered.connect(self._insert_link)
self.table_action = QAction(self.tr('Insert Table'), self)
self.table_action.triggered.connect(self._insert_table)
self.image_action = QAction(self.tr('Insert Image'), self)
self.image_action.triggered.connect(self._insert_image)
self.change_link = QAction(self.tr('Change link'), self)
self.change_link.triggered.connect(
Slot()(partial(self._change_link, self.page.active_link))
)
self.remove_link = QAction(self.tr('Remove link'), self)
self.remove_link.triggered.connect(self._remove_link)
def _init_shortcuts(self):
for key, action in (
('Ctrl+b', QWebPage.ToggleBold),
('Ctrl+i', QWebPage.ToggleItalic),
('Ctrl+u', QWebPage.ToggleUnderline),
('Ctrl+Shift+b', QWebPage.InsertUnorderedList),
('Ctrl+Shift+o', QWebPage.InsertOrderedList),
('Ctrl+Shift+v', QWebPage.PasteAndMatchStyle),
('Ctrl+k', self.link_action),
('Ctrl+Shift+k', self.change_link),
('Ctrl+l', QWebPage.AlignLeft),
('Ctrl+r', QWebPage.AlignRight),
('Ctrl+e', QWebPage.AlignCenter),
('Ctrl+j', QWebPage.AlignJustified),
('Ctrl+t', QWebPage.ToggleStrikethrough),
('Ctrl+Space', QWebPage.RemoveFormat),
('Ctrl+Shift+c', self.check_action),
):
QShortcut(
QKeySequence(self.app.tr(key)),
self.widget,
).activated.connect(
Slot()(partial(self._action_for_key, action)),
)
@property
def title(self):
"""Cache title and return"""
soup = BeautifulSoup(self.page.mainFrame().toHtml())
self._title = soup.find(id='title').text
return clean(html_unescape(self._title))
@title.setter
def title(self, val):
"""Set title"""
self._title = val
self.apply()
@property
def content(self):
"""Cache content and return"""
soup = BeautifulSoup(self.page.mainFrame().toHtml())
for todo in soup.findAll('input', {'type': 'checkbox'}):
todo.name = 'en-todo'
if todo.get('checked') == 'false':
del todo['checked']
del todo['type']
for media in soup.findAll('img'):
if media.get('class') == 'tab':
media.replaceWith(' ' * 5)
if media.get('hash'):
media.name = 'en-media'
del media['src']
del media['title']
# remove tables id's before save
for table in soup.findAll('table'):
del table['id']
self._content = sanitize(
soup=soup.find(id='content'),
).replace(' ', u'\xa0\xa0').replace(u'\xa0 ', u'\xa0\xa0')
return self._content
@content.setter
def content(self, val):
"""Set content"""
soup = BeautifulSoup(val)
for todo in soup.findAll('en-todo'):
todo.name = 'input'
todo['type'] = 'checkbox'
if todo.get('checked') == 'false':
del todo['checked']
self.changed_by_default = True
for media in soup.findAll('en-media'):
if media.get('hash'): # evernote android app error
media.name = 'img'
res = self.parent.resource_edit.get_by_hash(media['hash']) # shit!
if res:
if media['type'].find('image') == 0:
media['src'] = 'file://%s' % res.file_path
else:
media['src'] = file_icon_path
media['title'] = res.file_name
res.in_content = True
else:
media['src'] = ''
media['title'] = ''
else:
media.hidden = True
# set tables id's for identifing on hover
for num, table in enumerate(soup.findAll('table')):
table['id'] = 'table_%d' % num
self._last_table_num = num
self._content = re.sub(
r'( | ){5}', '<img class="tab" />',
unicode(soup).replace(u'\xa0', ' '),
) # shit!
self.apply()
def apply(self):
"""Apply title and content when filled"""
if None not in (self._title, self._content):
self.page.mainFrame().setHtml(self._html % {
'title': cgi.escape(self._title),
'content': self._content,
})
self.widget.setPage(self.page)
self.page.selectionChanged.connect(self.selection_changed)
self.page.setLinkDelegationPolicy(QWebPage.DelegateAllLinks)
self.page.linkClicked.connect(self.link_clicked)
self.page.linkHovered.connect(self.link_hovered)
self.page.contentsChanged.connect(self.page_changed)
@Slot()
def selection_changed(self):
self.copy_available.emit(
self.page.current == 'body'
)
@Slot(QUrl)
def link_clicked(self, url):
webbrowser.open(url.toString())
@Slot(QPoint)
def context_menu(self, pos, image_hash=None):
"""Show custom context menu"""
menu = self.page.createStandardContextMenu()
menu.clear()
menu.addAction(self.page.action(QWebPage.Cut))
menu.addAction(self.page.action(QWebPage.Copy))
menu.addAction(self.page.action(QWebPage.Paste))
paste_wo = self.page.action(QWebPage.PasteAndMatchStyle)
paste_wo.setText(self.tr('Paste as Plain Text'))
menu.addAction(paste_wo)
if self._hovered_url:
menu.addAction(self.page.action(QWebPage.CopyLinkToClipboard))
menu.addAction(self.change_link)
menu.addAction(self.remove_link)
self.page.active_link = None
if self.page.active_image:
res = self.parent.resource_edit.get_by_hash(self.page.active_image)
self.page.active_image = None
menu.addAction(
self.tr('Image Preferences'),
Slot()(partial(self._show_image_dialog, res)),
)
if self.page.active_table:
menu.addAction(
self.tr('Change table'),
Slot()(partial(self._update_table, self.page.active_table)),
)
self.page.active_table = None
menu.addSeparator()
menu.addAction(self.page.action(QWebPage.RemoveFormat))
menu.addAction(self.page.action(QWebPage.SelectAll))
menu.exec_(self.widget.mapToGlobal(pos))
@Slot(unicode, unicode, unicode)
def link_hovered(self, link, title, text):
self._hovered_url = link
@Slot()
def page_changed(self):
self._on_change()
def _action_with_icon(self, action_type, icon_names, is_action=False):
if is_action:
action = action_type
else:
action = self.page.action(action_type)
for icon_name in icon_names:
if QIcon.hasThemeIcon(icon_name):
action.setIcon(QIcon.fromTheme(icon_name))
break
self.copy_available.connect(action.setEnabled)
return action
def _action_for_key(self, action):
if self.page.current == 'body':
if isinstance(action, QWebPage.WebAction):
action = self.page.action(action)
action.trigger()
@Slot()
def _insert_link(self):
url, ok = QInputDialog.getText(self.parent,
self.tr('Everpad / Insert link'),
self.tr('Press link address'),
)
if ok and url:
self.page.mainFrame().evaluateJavaScript(
'insertLink(%s);' % json.dumps(url),
)
self.page_changed()
def _change_link(self, url):
url, ok = QInputDialog.getText(self.parent,
self.tr('Everpad / Change link'),
self.tr('Press new link address'),
text=url,
)
if ok and url:
self.page.mainFrame().evaluateJavaScript(
'changeLink(%s);' % json.dumps(url),
)
self.page_changed()
@Slot()
def _remove_link(self):
self.page.mainFrame().evaluateJavaScript(
'removeLink();',
)
self.page_changed()
@Slot()
def _insert_table(self):
dialog = TableWidget(self.parent)
self._last_table_num += 1
if dialog.exec_():
self.page.mainFrame().evaluateJavaScript(
'insertTable(%s, %s, "%s", "table_%d");' % (
dialog.ui.rows.text(),
dialog.ui.columns.text(),
dialog.get_width(),
self._last_table_num,
)
)
def _update_table(self, id):
rows = self.page.mainFrame().evaluateJavaScript(
'getTableRows("%s")' % id,
)
cells = self.page.mainFrame().evaluateJavaScript(
'getTableCells("%s")' % id,
)
dialog = TableWidget(self.parent, rows, cells)
self._last_table_num += 1
if dialog.exec_():
self.page.mainFrame().evaluateJavaScript(
'updateTable(%s, %s, "%s", "%s");' % (
dialog.ui.rows.text(),
dialog.ui.columns.text(),
dialog.get_width(),
id,
)
)
@Slot()
def _insert_check(self):
self.page.mainFrame().evaluateJavaScript(
'insertCheck();',
)
self.page_changed()
@Slot()
def _insert_image(self):
name = QFileDialog.getOpenFileName(
filter=self.tr("Image Files (*.png *.jpg *.bmp *.gif)"),
)[0]
if name:
self._insert_image_from_path(name)
def _insert_image_from_path(self, path):
res = self.parent.resource_edit.add_attach(path)
self.paste_res(res)
def get_format_actions(self):
actions = [
(QWebPage.ToggleBold,
['format-text-bold', 'everpad-text-bold']),
(QWebPage.ToggleItalic,
['format-text-italic', 'everpad-text-italic']),
(QWebPage.ToggleUnderline,
['format-text-underline', 'everpad-text-underline']),
(QWebPage.ToggleStrikethrough,
['format-text-strikethrough', 'everpad-text-strikethrough']),
(QWebPage.AlignCenter,
['format-justify-center', 'everpad-justify-center']),
(QWebPage.AlignJustified,
['format-justify-fill', 'everpad-justify-fill']),
(QWebPage.AlignLeft,
['format-justify-left', 'everpad-justify-left']),
(QWebPage.AlignRight,
['format-justify-right', 'everpad-justify-right']),
]
if self._enable_text_direction_support():
actions += [
(QWebPage.SetTextDirectionLeftToRight,
['format-text-direction-ltr', 'everpad-text-direction-ltr']),
(QWebPage.SetTextDirectionRightToLeft,
['format-text-direction-rtl', 'everpad-text-direction-rtl']),
]
actions += [
(QWebPage.InsertUnorderedList,
['format-list-unordered', 'everpad-list-unordered']),
(QWebPage.InsertOrderedList,
['format-list-ordered', 'everpad-list-ordered']),
# Don't include 'checkbox' since it looks bad in some default themes
(self.check_action, ['everpad-checkbox'], True),
(self.table_action, ['insert-table', 'everpad-insert-table'], True),
(self.link_action, ['insert-link'], True),
(self.image_action, ['insert-image'], True),
]
return map(lambda action: self._action_with_icon(*action), actions)
def paste_res(self, res):
if res.mime.find('image') == 0:
preview = 'file://%s' % res.file_path
else:
preview = file_icon_path
self.page.mainFrame().evaluateJavaScript(
'insertRes("%s", "%s", "%s");' % (
preview, res.hash, res.mime,
),
)
self.page_changed()
def _show_image_dialog(self, res):
res.w = int(self.page.active_width)
res.h = int(self.page.active_height)
dialog = ImagePrefs(res, self.parent)
if dialog.exec_():
w, h = dialog.get_size()
self.page.mainFrame().evaluateJavaScript(
'changeRes("%s", %d, %d);' % (
res.hash, w, h,
),
)
self.page_changed()
def in_content(self, res):
return self.page.mainFrame().evaluateJavaScript(
'resExist("%s");' % (
res.hash,
),
)
def _enable_text_direction_support(self):
def is_rtl_language(language_code):
if not language_code:
return False
rtl_languages = ['ar', 'fa', 'he', 'ur']
for lang in rtl_languages:
if language_code.startswith(lang):
return True
return False
import locale
default_language = locale.getdefaultlocale()[0]
if is_rtl_language(default_language):
return True
# If the default language is not a RTL language, go through the preferred list
# of languages to see if one of them is RTL. Unfortunately, this is platform-specific
# logic, and I couldn't find a portable way of doing it.
preferred_languages = os.getenv('LANGUAGE', '').split(':')
for language in preferred_languages:
if is_rtl_language(language):
return True
return False
def print_(self):
"""Print note with preview"""
printer = QPrinter()
dialog = QPrintPreviewDialog(printer)
dialog.paintRequested.connect(self.page.view().print_)
dialog.exec_()
def email_note(self):
body = self.page.mainFrame().toPlainText()[
len(self.title):
].strip()
url = QUrl("mailto:")
url.addQueryItem("subject", self.title)
url.addQueryItem("body", body)
QDesktopServices.openUrl(url)
def insert_files(self, urls, pos):
"""Not only images"""
image_extensions = ['.png', '.jpg', '.bmp', '.gif']
for url in urls:
if url.scheme() == 'file':
path = url.path()
ext = os.path.splitext(path)[1]
if os.path.exists(path) and ext in image_extensions:
self._insert_image_from_path(path)
else:
self.parent.resource_edit.add_attach(path)
| [
"[email protected]"
] | |
d6451583c9261fe045e59096f4413197ec245229 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03816/s835463454.py | bb046e7283c6af48d8494de5f20feb193506bf36 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | N = int(input())
A = list(map(int, input().split()))
s = set(A)
if len(s)%2 == 0:
ans = len(s)-1
else:
ans = len(s)
print(ans) | [
"[email protected]"
] | |
d18c48e1c9e31fcd43c523c8aec7b75312b8f08f | 98d832289b7437247ce03ea54ad3cb7b95451159 | /rapid7vmconsole/models/disk_free.py | fc0bdc97d984c0fa933b54ae148bfe5d434c05a4 | [
"MIT"
] | permissive | rmehilli-r7/vm-console-client-python | 7f02f13345dce4f4d4d85e18da7146daeefbceb9 | 069041c1c7b53c6b3d8bfdd81b974141bfca3c0c | refs/heads/master | 2020-03-23T11:20:33.364442 | 2018-08-10T20:06:37 | 2018-08-10T20:06:37 | 141,498,444 | 0 | 0 | MIT | 2018-08-08T19:58:45 | 2018-07-18T23:00:41 | Python | UTF-8 | Python | false | false | 51,784 | py | # coding: utf-8
"""
InsightVM API
# Overview This guide documents the InsightVM Application Programming Interface (API) Version 3. This API supports the Representation State Transfer (REST) design pattern. Unless noted otherwise this API accepts and produces the `application/json` media type. This API uses Hypermedia as the Engine of Application State (HATEOAS) and is hypermedia friendly. All API connections must be made to the security console using HTTPS. ## Versioning Versioning is specified in the URL and the base path of this API is: `https://<host>:<port>/api/3/`. ## Specification An <a target=\"_blank\" href=\"https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md\">OpenAPI v2</a> specification (also known as Swagger 2) of this API is available. Tools such as <a target=\"_blank\" href=\"https://github.com/swagger-api/swagger-codegen\">swagger-codegen</a> can be used to generate an API client in the language of your choosing using this specification document. <p class=\"openapi\">Download the specification: <a class=\"openapi-button\" target=\"_blank\" download=\"\" href=\"/api/3/json\"> Download </a></p> ## Authentication Authorization to the API uses HTTP Basic Authorization (see <a target=\"_blank\" href=\"https://www.ietf.org/rfc/rfc2617.txt\">RFC 2617</a> for more information). Requests must supply authorization credentials in the `Authorization` header using a Base64 encoded hash of `\"username:password\"`. <!-- ReDoc-Inject: <security-definitions> --> ### 2FA This API supports two-factor authentication (2FA) by supplying an authentication token in addition to the Basic Authorization. The token is specified using the `Token` request header. To leverage two-factor authentication, this must be enabled on the console and be configured for the account accessing the API. ## Resources ### Naming Resource names represent nouns and identify the entity being manipulated or accessed. All collection resources are pluralized to indicate to the client they are interacting with a collection of multiple resources of the same type. Singular resource names are used when there exists only one resource available to interact with. The following naming conventions are used by this API: | Type | Case | | --------------------------------------------- | ------------------------ | | Resource names | `lower_snake_case` | | Header, body, and query parameters parameters | `camelCase` | | JSON fields and property names | `camelCase` | #### Collections A collection resource is a parent resource for instance resources, but can itself be retrieved and operated on independently. Collection resources use a pluralized resource name. The resource path for collection resources follow the convention: ``` /api/3/{resource_name} ``` #### Instances An instance resource is a \"leaf\" level resource that may be retrieved, optionally nested within a collection resource. Instance resources are usually retrievable with opaque identifiers. The resource path for instance resources follows the convention: ``` /api/3/{resource_name}/{instance_id}... ``` ## Verbs The following HTTP operations are supported throughout this API. The general usage of the operation and both its failure and success status codes are outlined below. | Verb | Usage | Success | Failure | | --------- | ------------------------------------------------------------------------------------- | ----------- | -------------------------------------------------------------- | | `GET` | Used to retrieve a resource by identifier, or a collection of resources by type. | `200` | `400`, `401`, `402`, `404`, `405`, `408`, `410`, `415`, `500` | | `POST` | Creates a resource with an application-specified identifier. | `201` | `400`, `401`, `404`, `405`, `408`, `413`, `415`, `500` | | `POST` | Performs a request to queue an asynchronous job. | `202` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Creates a resource with a client-specified identifier. | `200` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Performs a full update of a resource with a specified identifier. | `201` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `DELETE` | Deletes a resource by identifier or an entire collection of resources. | `204` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `OPTIONS` | Requests what operations are available on a resource. | `200` | `401`, `404`, `405`, `408`, `500` | ### Common Operations #### OPTIONS All resources respond to the `OPTIONS` request, which allows discoverability of available operations that are supported. The `OPTIONS` response returns the acceptable HTTP operations on that resource within the `Allow` header. The response is always a `200 OK` status. ### Collection Resources Collection resources can support the `GET`, `POST`, `PUT`, and `DELETE` operations. #### GET The `GET` operation invoked on a collection resource indicates a request to retrieve all, or some, of the entities contained within the collection. This also includes the optional capability to filter or search resources during the request. The response from a collection listing is a paginated document. See [hypermedia links](#section/Overview/Paging) for more information. #### POST The `POST` is a non-idempotent operation that allows for the creation of a new resource when the resource identifier is not provided by the system during the creation operation (i.e. the Security Console generates the identifier). The content of the `POST` request is sent in the request body. The response to a successful `POST` request should be a `201 CREATED` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. The `POST` to a collection resource can also be used to interact with asynchronous resources. In this situation, instead of a `201 CREATED` response, the `202 ACCEPTED` response indicates that processing of the request is not fully complete but has been accepted for future processing. This request will respond similarly with a `Location` header with link to the job-oriented asynchronous resource that was created and/or queued. #### PUT The `PUT` is an idempotent operation that either performs a create with user-supplied identity, or a full replace or update of a resource by a known identifier. The response to a `PUT` operation to create an entity is a `201 Created` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. `PUT` on a collection resource replaces all values in the collection. The typical response to a `PUT` operation that updates an entity is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. #### DELETE The `DELETE` is an idempotent operation that physically deletes a resource, or removes an association between resources. The typical response to a `DELETE` operation is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. ### Instance Resources Instance resources can support the `GET`, `PUT`, `POST`, `PATCH` and `DELETE` operations. #### GET Retrieves the details of a specific resource by its identifier. The details retrieved can be controlled through property selection and property views. The content of the resource is returned within the body of the response in the acceptable media type. #### PUT Allows for and idempotent \"full update\" (complete replacement) on a specific resource. If the resource does not exist, it will be created; if it does exist, it is completely overwritten. Any omitted properties in the request are assumed to be undefined/null. For \"partial updates\" use `POST` or `PATCH` instead. The content of the `PUT` request is sent in the request body. The identifier of the resource is specified within the URL (not the request body). The response to a successful `PUT` request is a `201 CREATED` to represent the created status, with a valid `Location` header field set to the URI that can be used to access to the newly created (or fully replaced) resource. #### POST Performs a non-idempotent creation of a new resource. The `POST` of an instance resource most commonly occurs with the use of nested resources (e.g. searching on a parent collection resource). The response to a `POST` of an instance resource is typically a `200 OK` if the resource is non-persistent, and a `201 CREATED` if there is a resource created/persisted as a result of the operation. This varies by endpoint. #### PATCH The `PATCH` operation is used to perform a partial update of a resource. `PATCH` is a non-idempotent operation that enforces an atomic mutation of a resource. Only the properties specified in the request are to be overwritten on the resource it is applied to. If a property is missing, it is assumed to not have changed. #### DELETE Permanently removes the individual resource from the system. If the resource is an association between resources, only the association is removed, not the resources themselves. A successful deletion of the resource should return `204 NO CONTENT` with no response body. This operation is not fully idempotent, as follow-up requests to delete a non-existent resource should return a `404 NOT FOUND`. ## Requests Unless otherwise indicated, the default request body media type is `application/json`. ### Headers Commonly used request headers include: | Header | Example | Purpose | | ------------------ | --------------------------------------------- | ---------------------------------------------------------------------------------------------- | | `Accept` | `application/json` | Defines what acceptable content types are allowed by the client. For all types, use `*/*`. | | `Accept-Encoding` | `deflate, gzip` | Allows for the encoding to be specified (such as gzip). | | `Accept-Language` | `en-US` | Indicates to the server the client's locale (defaults `en-US`). | | `Authorization ` | `Basic Base64(\"username:password\")` | Basic authentication | | `Token ` | `123456` | Two-factor authentication token (if enabled) | ### Dates & Times Dates and/or times are specified as strings in the ISO 8601 format(s). The following formats are supported as input: | Value | Format | Notes | | --------------------------- | ------------------------------------------------------ | ----------------------------------------------------- | | Date | YYYY-MM-DD | Defaults to 12 am UTC (if used for a date & time | | Date & time only | YYYY-MM-DD'T'hh:mm:ss[.nnn] | Defaults to UTC | | Date & time in UTC | YYYY-MM-DD'T'hh:mm:ss[.nnn]Z | | | Date & time w/ offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm | | | Date & time w/ zone-offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm[<zone-id>] | | ### Timezones Timezones are specified in the regional zone format, such as `\"America/Los_Angeles\"`, `\"Asia/Tokyo\"`, or `\"GMT\"`. ### Paging Pagination is supported on certain collection resources using a combination of two query parameters, `page` and `size`. As these are control parameters, they are prefixed with the underscore character. The page parameter dictates the zero-based index of the page to retrieve, and the `size` indicates the size of the page. For example, `/resources?page=2&size=10` will return page 3, with 10 records per page, giving results 21-30. The maximum page size for a request is 500. ### Sorting Sorting is supported on paginated resources with the `sort` query parameter(s). The sort query parameter(s) supports identifying a single or multi-property sort with a single or multi-direction output. The format of the parameter is: ``` sort=property[,ASC|DESC]... ``` Therefore, the request `/resources?sort=name,title,DESC` would return the results sorted by the name and title descending, in that order. The sort directions are either ascending `ASC` or descending `DESC`. With single-order sorting, all properties are sorted in the same direction. To sort the results with varying orders by property, multiple sort parameters are passed. For example, the request `/resources?sort=name,ASC&sort=title,DESC` would sort by name ascending and title descending, in that order. ## Responses The following response statuses may be returned by this API. | Status | Meaning | Usage | | ------ | ------------------------ |------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `200` | OK | The operation performed without error according to the specification of the request, and no more specific 2xx code is suitable. | | `201` | Created | A create request has been fulfilled and a resource has been created. The resource is available as the URI specified in the response, including the `Location` header. | | `202` | Accepted | An asynchronous task has been accepted, but not guaranteed, to be processed in the future. | | `400` | Bad Request | The request was invalid or cannot be otherwise served. The request is not likely to succeed in the future without modifications. | | `401` | Unauthorized | The user is unauthorized to perform the operation requested, or does not maintain permissions to perform the operation on the resource specified. | | `403` | Forbidden | The resource exists to which the user has access, but the operating requested is not permitted. | | `404` | Not Found | The resource specified could not be located, does not exist, or an unauthenticated client does not have permissions to a resource. | | `405` | Method Not Allowed | The operations may not be performed on the specific resource. Allowed operations are returned and may be performed on the resource. | | `408` | Request Timeout | The client has failed to complete a request in a timely manner and the request has been discarded. | | `413` | Request Entity Too Large | The request being provided is too large for the server to accept processing. | | `415` | Unsupported Media Type | The media type is not supported for the requested resource. | | `500` | Internal Server Error | An internal and unexpected error has occurred on the server at no fault of the client. | ### Security The response statuses 401, 403 and 404 need special consideration for security purposes. As necessary, error statuses and messages may be obscured to strengthen security and prevent information exposure. The following is a guideline for privileged resource response statuses: | Use Case | Access | Resource | Permission | Status | | ------------------------------------------------------------------ | ------------------ |------------------- | ------------ | ------------ | | Unauthenticated access to an unauthenticated resource. | Unauthenticated | Unauthenticated | Yes | `20x` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Authenticated | No | `401` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Non-existent | No | `401` | | Authenticated access to a unauthenticated resource. | Authenticated | Unauthenticated | Yes | `20x` | | Authenticated access to an authenticated, unprivileged resource. | Authenticated | Authenticated | No | `404` | | Authenticated access to an authenticated, privileged resource. | Authenticated | Authenticated | Yes | `20x` | | Authenticated access to an authenticated, non-existent resource | Authenticated | Non-existent | Yes | `404` | ### Headers Commonly used response headers include: | Header | Example | Purpose | | -------------------------- | --------------------------------- | --------------------------------------------------------------- | | `Allow` | `OPTIONS, GET` | Defines the allowable HTTP operations on a resource. | | `Cache-Control` | `no-store, must-revalidate` | Disables caching of resources (as they are all dynamic). | | `Content-Encoding` | `gzip` | The encoding of the response body (if any). | | `Location` | | Refers to the URI of the resource created by a request. | | `Transfer-Encoding` | `chunked` | Specified the encoding used to transform response. | | `Retry-After` | 5000 | Indicates the time to wait before retrying a request. | | `X-Content-Type-Options` | `nosniff` | Disables MIME type sniffing. | | `X-XSS-Protection` | `1; mode=block` | Enables XSS filter protection. | | `X-Frame-Options` | `SAMEORIGIN` | Prevents rendering in a frame from a different origin. | | `X-UA-Compatible` | `IE=edge,chrome=1` | Specifies the browser mode to render in. | ### Format When `application/json` is returned in the response body it is always pretty-printed (indented, human readable output). Additionally, gzip compression/encoding is supported on all responses. #### Dates & Times Dates or times are returned as strings in the ISO 8601 'extended' format. When a date and time is returned (instant) the value is converted to UTC. For example: | Value | Format | Example | | --------------- | ------------------------------ | --------------------- | | Date | `YYYY-MM-DD` | 2017-12-03 | | Date & Time | `YYYY-MM-DD'T'hh:mm:ss[.nnn]Z` | 2017-12-03T10:15:30Z | #### Content In some resources a Content data type is used. This allows for multiple formats of representation to be returned within resource, specifically `\"html\"` and `\"text\"`. The `\"text\"` property returns a flattened representation suitable for output in textual displays. The `\"html\"` property returns an HTML fragment suitable for display within an HTML element. Note, the HTML returned is not a valid stand-alone HTML document. #### Paging The response to a paginated request follows the format: ```json { resources\": [ ... ], \"page\": { \"number\" : ..., \"size\" : ..., \"totalResources\" : ..., \"totalPages\" : ... }, \"links\": [ \"first\" : { \"href\" : \"...\" }, \"prev\" : { \"href\" : \"...\" }, \"self\" : { \"href\" : \"...\" }, \"next\" : { \"href\" : \"...\" }, \"last\" : { \"href\" : \"...\" } ] } ``` The `resources` property is an array of the resources being retrieved from the endpoint, each which should contain at minimum a \"self\" relation hypermedia link. The `page` property outlines the details of the current page and total possible pages. The object for the page includes the following properties: - number - The page number (zero-based) of the page returned. - size - The size of the pages, which is less than or equal to the maximum page size. - totalResources - The total amount of resources available across all pages. - totalPages - The total amount of pages. The last property of the paged response is the `links` array, which contains all available hypermedia links. For paginated responses, the \"self\", \"next\", \"previous\", \"first\", and \"last\" links are returned. The \"self\" link must always be returned and should contain a link to allow the client to replicate the original request against the collection resource in an identical manner to that in which it was invoked. The \"next\" and \"previous\" links are present if either or both there exists a previous or next page, respectively. The \"next\" and \"previous\" links have hrefs that allow \"natural movement\" to the next page, that is all parameters required to move the next page are provided in the link. The \"first\" and \"last\" links provide references to the first and last pages respectively. Requests outside the boundaries of the pageable will result in a `404 NOT FOUND`. Paginated requests do not provide a \"stateful cursor\" to the client, nor does it need to provide a read consistent view. Records in adjacent pages may change while pagination is being traversed, and the total number of pages and resources may change between requests within the same filtered/queries resource collection. #### Property Views The \"depth\" of the response of a resource can be configured using a \"view\". All endpoints supports two views that can tune the extent of the information returned in the resource. The supported views are `summary` and `details` (the default). View are specified using a query parameter, in this format: ```bash /<resource>?view={viewName} ``` #### Error Any error responses can provide a response body with a message to the client indicating more information (if applicable) to aid debugging of the error. All 40x and 50x responses will return an error response in the body. The format of the response is as follows: ```json { \"status\": <statusCode>, \"message\": <message>, \"links\" : [ { \"rel\" : \"...\", \"href\" : \"...\" } ] } ``` The `status` property is the same as the HTTP status returned in the response, to ease client parsing. The message property is a localized message in the request client's locale (if applicable) that articulates the nature of the error. The last property is the `links` property. This may contain additional [hypermedia links](#section/Overview/Authentication) to troubleshoot. #### Search Criteria <a section=\"section/Responses/SearchCriteria\"></a> Multiple resources make use of search criteria to match assets. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The operator is a type and property-specific operating performed on the filtered property. The valid values for fields and operators are outlined in the table below. Every filter also defines one or more values that are supplied to the operator. The valid values vary by operator and are outlined below. ##### Fields The following table outlines the search criteria fields and the available operators: | Field | Operators | | --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------ | | `alternate-address-type` | `in` | | `container-image` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is like` ` not like` | | `container-status` | `is` ` is not` | | `containers` | `are` | | `criticality-tag` | `is` ` is not` ` is greater than` ` is less than` ` is applied` ` is not applied` | | `custom-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `cve` | `is` ` is not` ` contains` ` does not contain` | | `cvss-access-complexity` | `is` ` is not` | | `cvss-authentication-required` | `is` ` is not` | | `cvss-access-vector` | `is` ` is not` | | `cvss-availability-impact` | `is` ` is not` | | `cvss-confidentiality-impact` | `is` ` is not` | | `cvss-integrity-impact` | `is` ` is not` | | `cvss-v3-confidentiality-impact` | `is` ` is not` | | `cvss-v3-integrity-impact` | `is` ` is not` | | `cvss-v3-availability-impact` | `is` ` is not` | | `cvss-v3-attack-vector` | `is` ` is not` | | `cvss-v3-attack-complexity` | `is` ` is not` | | `cvss-v3-user-interaction` | `is` ` is not` | | `cvss-v3-privileges-required` | `is` ` is not` | | `host-name` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is empty` ` is not empty` ` is like` ` not like` | | `host-type` | `in` ` not in` | | `ip-address` | `is` ` is not` ` in range` ` not in range` ` is like` ` not like` | | `ip-address-type` | `in` ` not in` | | `last-scan-date` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `location-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `mobile-device-last-sync-time` | `is-within-the-last` ` is earlier than` | | `open-ports` | `is` ` is not` ` in range` | | `operating-system` | `contains` ` does not contain` ` is empty` ` is not empty` | | `owner-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `pci-compliance` | `is` | | `risk-score` | `is` ` is not` ` in range` ` greater than` ` less than` | | `service-name` | `contains` ` does not contain` | | `site-id` | `in` ` not in` | | `software` | `contains` ` does not contain` | | `vAsset-cluster` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-datacenter` | `is` ` is not` | | `vAsset-host-name` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-power-state` | `in` ` not in` | | `vAsset-resource-pool-path` | `contains` ` does not contain` | | `vulnerability-assessed` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `vulnerability-category` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` | | `vulnerability-cvss-v3-score` | `is` ` is not` | | `vulnerability-cvss-score` | `is` ` is not` ` in range` ` is greater than` ` is less than` | | `vulnerability-exposures` | `includes` ` does not include` | | `vulnerability-title` | `contains` ` does not contain` ` is` ` is not` ` starts with` ` ends with` | | `vulnerability-validated-status` | `are` | ##### Enumerated Properties The following fields have enumerated values: | Field | Acceptable Values | | ----------------------------------------- | ------------------------------------------------------------------------------------------------------------- | | `alternate-address-type` | 0=IPv4, 1=IPv6 | | `containers` | 0=present, 1=not present | | `container-status` | `created` `running` `paused` `restarting` `exited` `dead` `unknown` | | `cvss-access-complexity` | <ul><li><code>L</code> = Low</li><li><code>M</code> = Medium</li><li><code>H</code> = High</li></ul> | | `cvss-integrity-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-confidentiality-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-availability-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-access-vector` | <ul><li><code>L</code> = Local</li><li><code>A</code> = Adjacent</li><li><code>N</code> = Network</li></ul> | | `cvss-authentication-required` | <ul><li><code>N</code> = None</li><li><code>S</code> = Single</li><li><code>M</code> = Multiple</li></ul> | | `cvss-v3-confidentiality-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-integrity-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-availability-impact` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-attack-vector` | <ul><li><code>N</code> = Network</li><li><code>A</code> = Adjacent</li><li><code>L</code> = Local</li><li><code>P</code> = Physical</li></ul> | | `cvss-v3-attack-complexity` | <ul><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-user-interaction` | <ul><li><code>N</code> = None</li><li><code>R</code> = Required</li></ul> | | `cvss-v3-privileges-required` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `host-type` | 0=Unknown, 1=Guest, 2=Hypervisor, 3=Physical, 4=Mobile | | `ip-address-type` | 0=IPv4, 1=IPv6 | | `pci-compliance` | 0=fail, 1=pass | | `vulnerability-validated-status` | 0=present, 1=not present | ##### Operator Properties <a section=\"section/Responses/SearchCriteria/OperatorProperties\"></a> The following table outlines which properties are required for each operator and the appropriate data type(s): | Operator | `value` | `lower` | `upper` | | ----------------------|-----------------------|-----------------------|-----------------------| | `are` | `string` | | | | `contains` | `string` | | | | `does-not-contain` | `string` | | | | `ends with` | `string` | | | | `in` | `Array[ string ]` | | | | `in-range` | | `numeric` | `numeric` | | `includes` | `Array[ string ]` | | | | `is` | `string` | | | | `is-applied` | | | | | `is-between` | | `numeric` | `numeric` | | `is-earlier-than` | `numeric` | | | | `is-empty` | | | | | `is-greater-than` | `numeric` | | | | `is-on-or-after` | `string` (yyyy-MM-dd) | | | | `is-on-or-before` | `string` (yyyy-MM-dd) | | | | `is-not` | `string` | | | | `is-not-applied` | | | | | `is-not-empty` | | | | | `is-within-the-last` | `numeric` | | | | `less-than` | `string` | | | | `like` | `string` | | | | `not-contains` | `string` | | | | `not-in` | `Array[ string ]` | | | | `not-in-range` | | `numeric` | `numeric` | | `not-like` | `string` | | | | `starts-with` | `string` | | | #### Discovery Connection Search Criteria <a section=\"section/Responses/DiscoverySearchCriteria\"></a> Dynamic sites make use of search criteria to match assets from a discovery connection. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The list of supported fields vary depending on the type of discovery connection configured for the dynamic site (e.g vSphere, ActiveSync, etc.). The operator is a type and property-specific operating performed on the filtered property. The valid values for fields outlined in the tables below and are grouped by the type of connection. Every filter also defines one or more values that are supplied to the operator. See <a href=\"#section/Responses/SearchCriteria/OperatorProperties\">Search Criteria Operator Properties</a> for more information on the valid values for each operator. ##### Fields (ActiveSync) This section documents search criteria information for ActiveSync discovery connections. The discovery connections must be one of the following types: `\"activesync-ldap\"`, `\"activesync-office365\"`, or `\"activesync-powershell\"`. The following table outlines the search criteria fields and the available operators for ActiveSync connections: | Field | Operators | | --------------------------------- | ------------------------------------------------------------- | | `last-sync-time` | `is-within-the-last` ` is-earlier-than` | | `operating-system` | `contains` ` does-not-contain` | | `user` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (AWS) This section documents search criteria information for AWS discovery connections. The discovery connections must be the type `\"aws\"`. The following table outlines the search criteria fields and the available operators for AWS connections: | Field | Operators | | ----------------------- | ------------------------------------------------------------- | | `availability-zone` | `contains` ` does-not-contain` | | `guest-os-family` | `contains` ` does-not-contain` | | `instance-id` | `contains` ` does-not-contain` | | `instance-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `instance-state` | `in` ` not-in` | | `instance-type` | `in` ` not-in` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `region` | `in` ` not-in` | | `vpc-id` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (DHCP) This section documents search criteria information for DHCP discovery connections. The discovery connections must be the type `\"dhcp\"`. The following table outlines the search criteria fields and the available operators for DHCP connections: | Field | Operators | | --------------- | ------------------------------------------------------------- | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `mac-address` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (Sonar) This section documents search criteria information for Sonar discovery connections. The discovery connections must be the type `\"sonar\"`. The following table outlines the search criteria fields and the available operators for Sonar connections: | Field | Operators | | ------------------- | -------------------- | | `search-domain` | `contains` ` is` | | `ip-address` | `in-range` ` is` | | `sonar-scan-date` | `is-within-the-last` | ##### Fields (vSphere) This section documents search criteria information for vSphere discovery connections. The discovery connections must be the type `\"vsphere\"`. The following table outlines the search criteria fields and the available operators for vSphere connections: | Field | Operators | | -------------------- | ------------------------------------------------------------------------------------------ | | `cluster` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `data-center` | `is` ` is-not` | | `discovered-time` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `guest-os-family` | `contains` ` does-not-contain` | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `power-state` | `in` ` not-in` | | `resource-pool-path` | `contains` ` does-not-contain` | | `last-time-seen` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `vm` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Enumerated Properties (vSphere) The following fields have enumerated values: | Field | Acceptable Values | | ------------- | ------------------------------------ | | `power-state` | `poweredOn` `poweredOff` `suspended` | ## HATEOAS This API follows Hypermedia as the Engine of Application State (HATEOAS) principals and is therefore hypermedia friendly. Hyperlinks are returned in the `links` property of any given resource and contain a fully-qualified hyperlink to the corresponding resource. The format of the hypermedia link adheres to both the <a target=\"_blank\" href=\"http://jsonapi.org\">{json:api} v1</a> <a target=\"_blank\" href=\"http://jsonapi.org/format/#document-links\">\"Link Object\"</a> and <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html\">JSON Hyper-Schema</a> <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html#rfc.section.5.2\">\"Link Description Object\"</a> formats. For example: ```json \"links\": [{ \"rel\": \"<relation>\", \"href\": \"<href>\" ... }] ``` Where appropriate link objects may also contain additional properties than the `rel` and `href` properties, such as `id`, `type`, etc. See the [Root](#tag/Root) resources for the entry points into API discovery. # noqa: E501
OpenAPI spec version: 3
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class DiskFree(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'bytes': 'int',
'formatted': 'str'
}
attribute_map = {
'bytes': 'bytes',
'formatted': 'formatted'
}
def __init__(self, bytes=None, formatted=None): # noqa: E501
"""DiskFree - a model defined in Swagger""" # noqa: E501
self._bytes = None
self._formatted = None
self.discriminator = None
if bytes is not None:
self.bytes = bytes
if formatted is not None:
self.formatted = formatted
@property
def bytes(self):
"""Gets the bytes of this DiskFree. # noqa: E501
The raw value in bytes. # noqa: E501
:return: The bytes of this DiskFree. # noqa: E501
:rtype: int
"""
return self._bytes
@bytes.setter
def bytes(self, bytes):
"""Sets the bytes of this DiskFree.
The raw value in bytes. # noqa: E501
:param bytes: The bytes of this DiskFree. # noqa: E501
:type: int
"""
self._bytes = bytes
@property
def formatted(self):
"""Gets the formatted of this DiskFree. # noqa: E501
The value formatted in human-readable notation (e.g. GB, MB, KB, bytes). # noqa: E501
:return: The formatted of this DiskFree. # noqa: E501
:rtype: str
"""
return self._formatted
@formatted.setter
def formatted(self, formatted):
"""Sets the formatted of this DiskFree.
The value formatted in human-readable notation (e.g. GB, MB, KB, bytes). # noqa: E501
:param formatted: The formatted of this DiskFree. # noqa: E501
:type: str
"""
self._formatted = formatted
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DiskFree):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
853bed2cb752c48a90ca0259b3a04f2b8aee8684 | 6160586aa239eada16e735d40d57970dedbe1dfc | /modules/ifttt_manage/ifttt_scene_query/ifttt_scene_query_detail.py | 042e1159acc9f5254e74791f0a6593cdb8438812 | [] | no_license | showgea/AIOT | 7f9ffcd49da54836714b3342232cdba330d11e6c | fe8275aba1c4b5402c7c2c2987509c0ecf49f330 | refs/heads/master | 2020-07-23T10:19:37.478456 | 2019-09-23T12:25:59 | 2019-09-23T12:25:59 | 207,525,184 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 642 | py | import requests
from config import readcfg
header_Gary = readcfg.header_Gary
header_Jenny = readcfg.header_Jenny
url = readcfg.url
def ifttt_scene_query_detail(sceneId):
url_ = url + "/app/v1.0/lumi/ifttt/scene/query/detail"
params_ = {
"sceneId": sceneId
}
proxies = {'http': 'http://127.0.0.1:8888', 'https': 'http://127.0.0.1:8888'}
print("请求数据:%s" % params_)
r = requests.get(url=url_, params=params_, headers=header_Gary, proxies=proxies, verify=False)
return r
if __name__ == '__main__':
result_main = ifttt_scene_query_detail("AL.615944318139310080")
print(result_main.text)
| [
"[email protected]"
] | |
1206c9ef9057cf5c0c26e4daf0692b70bd167e82 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/plaudit.py | de18d7887743622f3d164216f2eedbb4c948086d | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 451 | py | ii = [('CookGHP3.py', 1), ('WilbRLW4.py', 1), ('CookGHP.py', 3), ('KembFJ1.py', 1), ('PettTHE.py', 1), ('ChalTPW2.py', 1), ('CarlTFR.py', 6), ('LyttELD.py', 1), ('AinsWRR3.py', 1), ('RoscTTI2.py', 1), ('ClarGE.py', 2), ('DibdTRL2.py', 1), ('BackGNE.py', 1), ('LeakWTI4.py', 1), ('LyttELD3.py', 1), ('RoscTTI.py', 1), ('ThomGLG.py', 2), ('KembFJ2.py', 1), ('HaliTBC.py', 2), ('AinsWRR2.py', 1), ('RogeSIP.py', 1), ('HogaGMM2.py', 3), ('WordWYR.py', 1)] | [
"[email protected]"
] | |
8c68e381114915acc331eee949fba0bca03c4ec5 | 754d39fbc163cb38bcff31d4b16bfc583242b759 | /Session20L.py | 81de6bb7d8ef42101807af128a981e9329e89bd2 | [] | no_license | MUSKANJASSAL/PythonTraining2019 | c68654b5548860c7b501252ce2289a48dbe575c3 | 0e3f226d2d7443759c92b3808d9d7f176c9a4a84 | refs/heads/master | 2020-06-22T14:39:29.491716 | 2019-07-21T10:46:10 | 2019-07-21T10:46:10 | 197,730,687 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,534 | py | import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
result = pd.read_csv('CityTemps.csv', delimiter = ',')
print(result)
print("Maximum Temparature of Ludhiana", result['Ludhiana'].max())
print("Maximum Temparature of Amritsar", result['Amritsar'].max())
print("Maximum Temparature of Chandigarh", result['Chandigarh'].max())
print("Minimum Temparature of Ludhiana", result['Ludhiana'].min())
print("Minimum Temparature of Amritsar", result['Amritsar'].min())
print("Minimum Temparature of Chandigarh", result['Chandigarh'].min())
# max_temp = {"Ldh":21.1, "Amr":22.0, "Chd":20.4}
# min_temp = {"Ldh":-8.6, "Amr":8.9, "Chd":10.3}
# for i, key in enumerate(max_temp):
# for i, key in enumerate(max_temp):
# # plt.bar(i, scores[key])
# plt.bar(key, min_temp[key])
# plt.bar(key, max_temp[key])
# plt.xlabel("Cities")
# plt.ylabel("Temp")
# plt.title("Temp_Cties")
#
# plt.show()
# data to plot
n_groups = 2
ldh = (21.1, 8.6)
amr = (22.0, 8.9)
chd = (20.4, 10.3)
# create plot
fig, ax = plt.subplots()
index = np.arange(n_groups)
bar_width = 0.05
opacity = 0.8
rects1 = plt.bar(index, ldh, bar_width,alpha=opacity,color='b',label='Ldh')
rects2 = plt.bar(index + bar_width, amr, bar_width,alpha=opacity,color='g',label='Amr')
rects3 = plt.bar(index + bar_width, chd, bar_width,alpha=opacity,color='m',label='Chd')
plt.xlabel('Cities')
plt.ylabel('Temp')
plt.title('Scores by Temp_Cties')
plt.xticks(index + bar_width, ('Ldh', 'Amr', 'Chd'))
plt.legend()
plt.tight_layout()
plt.show()
| [
"[email protected]"
] | |
9c40c6ed010c2cd1f0ebcc2470ac11538c1ffa5f | 6e9c127bd6705a8b92f240ca663163504b86cd81 | /test/test_plants/test_plants/test_noFuelPlant.py | 15637666059d8880a95372b53df0b25bedf4bd63 | [
"MIT"
] | permissive | alexanderkell/elecsim | 239ffd539d1b04f24186ddaae20ac4ce6b258c03 | df9ea14cbc8dd3fd4302be9274cb6ea61c0cdb10 | refs/heads/master | 2023-04-06T10:03:35.367411 | 2023-04-05T16:52:16 | 2023-04-05T16:52:16 | 124,561,430 | 36 | 10 | MIT | 2022-12-08T01:57:45 | 2018-03-09T15:55:53 | Jupyter Notebook | UTF-8 | Python | false | false | 2,109 | py | """
File name: test_noFuelPlant
Date created: 28/11/2018
Feature: #Enter feature description here
"""
from unittest import TestCase
from pytest import approx
from elecsim.plants.plant_type.non_fuel_plant import NonFuelPlant
__author__ = "Alexander Kell"
__copyright__ = "Copyright 2018, Alexander Kell"
__license__ = "MIT"
__email__ = "[email protected]"
class TestNoFuelPlant(TestCase):
# def create_2018_biomass_power_plant(self):
# fuel_plant = FuelPlant(name="Test_Plant", plant_type="Biomass_wood", capacity_mw=1200, construction_year=2010,
# average_load_factor=0.93, efficiency=0.54, pre_dev_period=3, construction_period=3,
# operating_period=25, pre_dev_spend_years=[0.44, 0.44, 0.12],
# construction_spend_years=[0.4, 0.4, 0.2], pre_dev_cost_per_mw=1000,
# construction_cost_per_mw=500, infrastructure=15100, fixed_o_and_m_per_mw=12200,
# variable_o_and_m_per_mwh=3, insurance_cost_per_mw=2100, connection_cost_per_mw=3300)
# return fuel_plant
#
#
# def test_calculate_lcoe(self):
# power_plant = self.create_2018_biomass_power_plant()
# print("LCOE for biomass: {}".format(power_plant.calculate_lcoe(0.1)))
# # assert power_plant.calculate_lcoe() == 1
def test_small_hydro_plant_lcoe_calculation(self):
params = {'connection_cost_per_mw': 0.0, 'construction_cost_per_mw': 4103676.6103626275, 'fixed_o_and_m_per_mw': 37265.847352193756, 'infrastructure': 311.06133108680143, 'insurance_cost_per_mw': 0.0, 'pre_dev_cost_per_mw': 0, 'variable_o_and_m_per_mwh': 3.074841257793032, 'pre_dev_period': 0, 'operating_period': 35, 'construction_period': 0, 'efficiency': 1, 'average_load_factor': 0.4, 'construction_spend_years': [1.0], 'pre_dev_spend_years': []}
hydro_plant = NonFuelPlant(name="Hydro", plant_type="Hydro", capacity_mw=5, construction_year=2002, **params)
assert hydro_plant.calculate_lcoe(0.075) == approx(103.8260236534459)
| [
"[email protected]"
] | |
6335ca36003aa92af24d10cb4f58f016985db399 | 16546a94e9f078c3e7a39337d47b21d7b71b6799 | /siphon.py | 09a895c8d1f3ff55df5b9a8edae0e64a5d8af074 | [] | no_license | mdevaev/arduino-siphon | 7c7259bb87f116fa9c0eb65b2c1b8941c11dd06a | 56a7959a13a73311faeefcd54a9f3aeb9f7989ff | refs/heads/master | 2021-01-21T08:57:33.710400 | 2016-01-09T05:06:40 | 2016-01-09T05:06:40 | 15,296,940 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,073 | py | #!/usr/bin/env python3
import sys
import xmlrpc.client
import serial
import struct
import time
import logging
# =====
class Siphon:
def __init__(self, device):
self._tty = serial.Serial(device, 115200)
def send(self, download, upload, has_download, has_upload):
self._tty.write(struct.pack("<cHccccc", *((b"\x01", download) + (b"\x00",) * 5)))
self._tty.write(struct.pack("<cHccccc", *((b"\x02", upload) + (b"\x00",) * 5)))
self._tty.write(struct.pack("<cccccccc", *((b"\x03", self._make_byte(has_download)) + (b"\x00",) * 6)))
self._tty.write(struct.pack("<cccccccc", *((b"\x04", self._make_byte(has_upload)) + (b"\x00",) * 6)))
def _make_byte(self, value):
return bytes([int(value)])
def receive(self):
self._tty.write(struct.pack("<cccccccc", *((b"\x05",) + (b"\x00",) * 7)))
download = struct.unpack("<H", self._tty.read(2))[0]
self._tty.write(struct.pack("<cccccccc", *((b"\x06",) + (b"\x00",) * 7)))
upload = struct.unpack("<H", self._tty.read(2))[0]
return (download, upload)
class Server:
def __init__(self, url) :
self._server = xmlrpc.client.ServerProxy(url)
self._prev_down = None
self._prev_up = None
def get_speed(self) :
multicall = xmlrpc.client.MultiCall(self._server)
multicall.get_down_rate()
multicall.get_up_rate()
return tuple(map(self._make_speed, multicall()))
def set_speed_limits(self, download, upload) :
if self._prev_down != download or self._prev_up != upload :
multicall = xmlrpc.client.MultiCall(self._server)
if self._prev_down != download :
multicall.set_download_rate(self._make_limit(download))
self._prev_down = download
if self._prev_up != upload :
multicall.set_upload_rate(self._make_limit(upload))
self._prev_up = upload
multicall()
return True
return False
def _make_speed(self, speed) :
return int(speed * 8.0 / (1024.0 ** 2))
def _make_limit(self, speed) :
return int(speed / 8.0 * (1024.0 ** 2))
# =====
def main():
assert len(sys.argv) == 3
logger = logging.getLogger("siphon")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s [%(levelname)s]: %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
server = Server(sys.argv[1])
siphon = Siphon(sys.argv[2])
while True :
(download, upload) = server.get_speed()
logger.info("siphon << server: speed: D:%d / U:%d", download, upload)
siphon.send(download, upload, download != 0, upload != 0)
(download, upload) = siphon.receive()
if server.set_speed_limits(download, upload):
logger.info("siphon >> server: limits: D:%d / U:%d", download, upload)
time.sleep(1)
if __name__ == "__main__" :
main()
| [
"[email protected]"
] | |
acc34bec7a83d214a82312b4dd86944cdfe77048 | bad62c2b0dfad33197db55b44efeec0bab405634 | /sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_07_01/models/__init__.py | 7095017a476bda7b22db2b89d133e5d222d228d9 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | test-repo-billy/azure-sdk-for-python | 20c5a2486456e02456de17515704cb064ff19833 | cece86a8548cb5f575e5419864d631673be0a244 | refs/heads/master | 2022-10-25T02:28:39.022559 | 2022-10-18T06:05:46 | 2022-10-18T06:05:46 | 182,325,031 | 0 | 0 | MIT | 2019-07-25T22:28:52 | 2019-04-19T20:59:15 | Python | UTF-8 | Python | false | false | 23,839 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._models_py3 import AccessUri
from ._models_py3 import AdditionalCapabilities
from ._models_py3 import AdditionalUnattendContent
from ._models_py3 import ApiEntityReference
from ._models_py3 import ApiError
from ._models_py3 import ApiErrorBase
from ._models_py3 import AutomaticOSUpgradePolicy
from ._models_py3 import AutomaticOSUpgradeProperties
from ._models_py3 import AutomaticRepairsPolicy
from ._models_py3 import AvailabilitySet
from ._models_py3 import AvailabilitySetListResult
from ._models_py3 import AvailabilitySetUpdate
from ._models_py3 import BillingProfile
from ._models_py3 import BootDiagnostics
from ._models_py3 import BootDiagnosticsInstanceView
from ._models_py3 import ComputeOperationListResult
from ._models_py3 import ComputeOperationValue
from ._models_py3 import CreationData
from ._models_py3 import DataDisk
from ._models_py3 import DataDiskImage
from ._models_py3 import DedicatedHost
from ._models_py3 import DedicatedHostAllocatableVM
from ._models_py3 import DedicatedHostAvailableCapacity
from ._models_py3 import DedicatedHostGroup
from ._models_py3 import DedicatedHostGroupListResult
from ._models_py3 import DedicatedHostGroupUpdate
from ._models_py3 import DedicatedHostInstanceView
from ._models_py3 import DedicatedHostListResult
from ._models_py3 import DedicatedHostUpdate
from ._models_py3 import DiagnosticsProfile
from ._models_py3 import DiffDiskSettings
from ._models_py3 import Disallowed
from ._models_py3 import Disk
from ._models_py3 import DiskEncryptionSet
from ._models_py3 import DiskEncryptionSetList
from ._models_py3 import DiskEncryptionSetParameters
from ._models_py3 import DiskEncryptionSetUpdate
from ._models_py3 import DiskEncryptionSettings
from ._models_py3 import DiskInstanceView
from ._models_py3 import DiskList
from ._models_py3 import DiskSku
from ._models_py3 import DiskUpdate
from ._models_py3 import Encryption
from ._models_py3 import EncryptionSetIdentity
from ._models_py3 import EncryptionSettingsCollection
from ._models_py3 import EncryptionSettingsElement
from ._models_py3 import Gallery
from ._models_py3 import GalleryApplication
from ._models_py3 import GalleryApplicationList
from ._models_py3 import GalleryApplicationUpdate
from ._models_py3 import GalleryApplicationVersion
from ._models_py3 import GalleryApplicationVersionList
from ._models_py3 import GalleryApplicationVersionPublishingProfile
from ._models_py3 import GalleryApplicationVersionUpdate
from ._models_py3 import GalleryArtifactPublishingProfileBase
from ._models_py3 import GalleryArtifactSource
from ._models_py3 import GalleryArtifactVersionSource
from ._models_py3 import GalleryDataDiskImage
from ._models_py3 import GalleryDiskImage
from ._models_py3 import GalleryIdentifier
from ._models_py3 import GalleryImage
from ._models_py3 import GalleryImageIdentifier
from ._models_py3 import GalleryImageList
from ._models_py3 import GalleryImageUpdate
from ._models_py3 import GalleryImageVersion
from ._models_py3 import GalleryImageVersionList
from ._models_py3 import GalleryImageVersionPublishingProfile
from ._models_py3 import GalleryImageVersionStorageProfile
from ._models_py3 import GalleryImageVersionUpdate
from ._models_py3 import GalleryList
from ._models_py3 import GalleryOSDiskImage
from ._models_py3 import GalleryUpdate
from ._models_py3 import GrantAccessData
from ._models_py3 import HardwareProfile
from ._models_py3 import Image
from ._models_py3 import ImageDataDisk
from ._models_py3 import ImageDisk
from ._models_py3 import ImageDiskReference
from ._models_py3 import ImageListResult
from ._models_py3 import ImageOSDisk
from ._models_py3 import ImagePurchasePlan
from ._models_py3 import ImageReference
from ._models_py3 import ImageStorageProfile
from ._models_py3 import ImageUpdate
from ._models_py3 import InnerError
from ._models_py3 import InstanceViewStatus
from ._models_py3 import KeyVaultAndKeyReference
from ._models_py3 import KeyVaultAndSecretReference
from ._models_py3 import KeyVaultKeyReference
from ._models_py3 import KeyVaultSecretReference
from ._models_py3 import LinuxConfiguration
from ._models_py3 import ListUsagesResult
from ._models_py3 import LogAnalyticsInputBase
from ._models_py3 import LogAnalyticsOperationResult
from ._models_py3 import LogAnalyticsOutput
from ._models_py3 import MaintenanceRedeployStatus
from ._models_py3 import ManagedArtifact
from ._models_py3 import ManagedDiskParameters
from ._models_py3 import NetworkInterfaceReference
from ._models_py3 import NetworkProfile
from ._models_py3 import OSDisk
from ._models_py3 import OSDiskImage
from ._models_py3 import OSProfile
from ._models_py3 import Plan
from ._models_py3 import ProximityPlacementGroup
from ._models_py3 import ProximityPlacementGroupListResult
from ._models_py3 import ProximityPlacementGroupUpdate
from ._models_py3 import PurchasePlan
from ._models_py3 import RecommendedMachineConfiguration
from ._models_py3 import RecoveryWalkResponse
from ._models_py3 import RegionalReplicationStatus
from ._models_py3 import ReplicationStatus
from ._models_py3 import RequestRateByIntervalInput
from ._models_py3 import Resource
from ._models_py3 import ResourceRange
from ._models_py3 import RollbackStatusInfo
from ._models_py3 import RollingUpgradePolicy
from ._models_py3 import RollingUpgradeProgressInfo
from ._models_py3 import RollingUpgradeRunningStatus
from ._models_py3 import RollingUpgradeStatusInfo
from ._models_py3 import RunCommandDocument
from ._models_py3 import RunCommandDocumentBase
from ._models_py3 import RunCommandInput
from ._models_py3 import RunCommandInputParameter
from ._models_py3 import RunCommandListResult
from ._models_py3 import RunCommandParameterDefinition
from ._models_py3 import RunCommandResult
from ._models_py3 import ScaleInPolicy
from ._models_py3 import ScheduledEventsProfile
from ._models_py3 import Sku
from ._models_py3 import Snapshot
from ._models_py3 import SnapshotList
from ._models_py3 import SnapshotSku
from ._models_py3 import SnapshotUpdate
from ._models_py3 import SourceVault
from ._models_py3 import SshConfiguration
from ._models_py3 import SshPublicKey
from ._models_py3 import StorageProfile
from ._models_py3 import SubResource
from ._models_py3 import SubResourceReadOnly
from ._models_py3 import SubResourceWithColocationStatus
from ._models_py3 import TargetRegion
from ._models_py3 import TerminateNotificationProfile
from ._models_py3 import ThrottledRequestsInput
from ._models_py3 import UpdateResource
from ._models_py3 import UpgradeOperationHistoricalStatusInfo
from ._models_py3 import UpgradeOperationHistoricalStatusInfoProperties
from ._models_py3 import UpgradeOperationHistoryStatus
from ._models_py3 import UpgradePolicy
from ._models_py3 import Usage
from ._models_py3 import UsageName
from ._models_py3 import UserArtifactManage
from ._models_py3 import UserArtifactSource
from ._models_py3 import UserAssignedIdentitiesValue
from ._models_py3 import VMScaleSetConvertToSinglePlacementGroupInput
from ._models_py3 import VaultCertificate
from ._models_py3 import VaultSecretGroup
from ._models_py3 import VirtualHardDisk
from ._models_py3 import VirtualMachine
from ._models_py3 import VirtualMachineAgentInstanceView
from ._models_py3 import VirtualMachineCaptureParameters
from ._models_py3 import VirtualMachineCaptureResult
from ._models_py3 import VirtualMachineExtension
from ._models_py3 import VirtualMachineExtensionHandlerInstanceView
from ._models_py3 import VirtualMachineExtensionImage
from ._models_py3 import VirtualMachineExtensionInstanceView
from ._models_py3 import VirtualMachineExtensionUpdate
from ._models_py3 import VirtualMachineExtensionsListResult
from ._models_py3 import VirtualMachineHealthStatus
from ._models_py3 import VirtualMachineIdentity
from ._models_py3 import VirtualMachineImage
from ._models_py3 import VirtualMachineImageResource
from ._models_py3 import VirtualMachineInstanceView
from ._models_py3 import VirtualMachineListResult
from ._models_py3 import VirtualMachineReimageParameters
from ._models_py3 import VirtualMachineScaleSet
from ._models_py3 import VirtualMachineScaleSetDataDisk
from ._models_py3 import VirtualMachineScaleSetExtension
from ._models_py3 import VirtualMachineScaleSetExtensionListResult
from ._models_py3 import VirtualMachineScaleSetExtensionProfile
from ._models_py3 import VirtualMachineScaleSetExtensionUpdate
from ._models_py3 import VirtualMachineScaleSetIPConfiguration
from ._models_py3 import VirtualMachineScaleSetIdentity
from ._models_py3 import VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue
from ._models_py3 import VirtualMachineScaleSetInstanceView
from ._models_py3 import VirtualMachineScaleSetInstanceViewStatusesSummary
from ._models_py3 import VirtualMachineScaleSetIpTag
from ._models_py3 import VirtualMachineScaleSetListOSUpgradeHistory
from ._models_py3 import VirtualMachineScaleSetListResult
from ._models_py3 import VirtualMachineScaleSetListSkusResult
from ._models_py3 import VirtualMachineScaleSetListWithLinkResult
from ._models_py3 import VirtualMachineScaleSetManagedDiskParameters
from ._models_py3 import VirtualMachineScaleSetNetworkConfiguration
from ._models_py3 import VirtualMachineScaleSetNetworkConfigurationDnsSettings
from ._models_py3 import VirtualMachineScaleSetNetworkProfile
from ._models_py3 import VirtualMachineScaleSetOSDisk
from ._models_py3 import VirtualMachineScaleSetOSProfile
from ._models_py3 import VirtualMachineScaleSetPublicIPAddressConfiguration
from ._models_py3 import VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings
from ._models_py3 import VirtualMachineScaleSetReimageParameters
from ._models_py3 import VirtualMachineScaleSetSku
from ._models_py3 import VirtualMachineScaleSetSkuCapacity
from ._models_py3 import VirtualMachineScaleSetStorageProfile
from ._models_py3 import VirtualMachineScaleSetUpdate
from ._models_py3 import VirtualMachineScaleSetUpdateIPConfiguration
from ._models_py3 import VirtualMachineScaleSetUpdateNetworkConfiguration
from ._models_py3 import VirtualMachineScaleSetUpdateNetworkProfile
from ._models_py3 import VirtualMachineScaleSetUpdateOSDisk
from ._models_py3 import VirtualMachineScaleSetUpdateOSProfile
from ._models_py3 import VirtualMachineScaleSetUpdatePublicIPAddressConfiguration
from ._models_py3 import VirtualMachineScaleSetUpdateStorageProfile
from ._models_py3 import VirtualMachineScaleSetUpdateVMProfile
from ._models_py3 import VirtualMachineScaleSetVM
from ._models_py3 import VirtualMachineScaleSetVMExtensionsSummary
from ._models_py3 import VirtualMachineScaleSetVMInstanceIDs
from ._models_py3 import VirtualMachineScaleSetVMInstanceRequiredIDs
from ._models_py3 import VirtualMachineScaleSetVMInstanceView
from ._models_py3 import VirtualMachineScaleSetVMListResult
from ._models_py3 import VirtualMachineScaleSetVMNetworkProfileConfiguration
from ._models_py3 import VirtualMachineScaleSetVMProfile
from ._models_py3 import VirtualMachineScaleSetVMProtectionPolicy
from ._models_py3 import VirtualMachineScaleSetVMReimageParameters
from ._models_py3 import VirtualMachineSize
from ._models_py3 import VirtualMachineSizeListResult
from ._models_py3 import VirtualMachineStatusCodeCount
from ._models_py3 import VirtualMachineUpdate
from ._models_py3 import WinRMConfiguration
from ._models_py3 import WinRMListener
from ._models_py3 import WindowsConfiguration
from ._compute_management_client_enums import AccessLevel
from ._compute_management_client_enums import AggregatedReplicationState
from ._compute_management_client_enums import AvailabilitySetSkuTypes
from ._compute_management_client_enums import CachingTypes
from ._compute_management_client_enums import DedicatedHostLicenseTypes
from ._compute_management_client_enums import DiffDiskOptions
from ._compute_management_client_enums import DiskCreateOption
from ._compute_management_client_enums import DiskCreateOptionTypes
from ._compute_management_client_enums import DiskEncryptionSetIdentityType
from ._compute_management_client_enums import DiskState
from ._compute_management_client_enums import DiskStorageAccountTypes
from ._compute_management_client_enums import EncryptionType
from ._compute_management_client_enums import GalleryApplicationVersionPropertiesProvisioningState
from ._compute_management_client_enums import GalleryImagePropertiesProvisioningState
from ._compute_management_client_enums import GalleryImageVersionPropertiesProvisioningState
from ._compute_management_client_enums import GalleryPropertiesProvisioningState
from ._compute_management_client_enums import HostCaching
from ._compute_management_client_enums import HyperVGeneration
from ._compute_management_client_enums import HyperVGenerationType
from ._compute_management_client_enums import HyperVGenerationTypes
from ._compute_management_client_enums import IPVersion
from ._compute_management_client_enums import IntervalInMins
from ._compute_management_client_enums import MaintenanceOperationResultCodeTypes
from ._compute_management_client_enums import OperatingSystemStateTypes
from ._compute_management_client_enums import OperatingSystemTypes
from ._compute_management_client_enums import ProtocolTypes
from ._compute_management_client_enums import ProximityPlacementGroupType
from ._compute_management_client_enums import ReplicationState
from ._compute_management_client_enums import ReplicationStatusTypes
from ._compute_management_client_enums import ResourceIdentityType
from ._compute_management_client_enums import RollingUpgradeActionType
from ._compute_management_client_enums import RollingUpgradeStatusCode
from ._compute_management_client_enums import SettingNames
from ._compute_management_client_enums import SnapshotStorageAccountTypes
from ._compute_management_client_enums import StatusLevelTypes
from ._compute_management_client_enums import StorageAccountType
from ._compute_management_client_enums import StorageAccountTypes
from ._compute_management_client_enums import UpgradeMode
from ._compute_management_client_enums import UpgradeOperationInvoker
from ._compute_management_client_enums import UpgradeState
from ._compute_management_client_enums import VirtualMachineEvictionPolicyTypes
from ._compute_management_client_enums import VirtualMachinePriorityTypes
from ._compute_management_client_enums import VirtualMachineScaleSetScaleInRules
from ._compute_management_client_enums import VirtualMachineScaleSetSkuScaleType
from ._compute_management_client_enums import VirtualMachineSizeTypes
from ._patch import __all__ as _patch_all
from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"AccessUri",
"AdditionalCapabilities",
"AdditionalUnattendContent",
"ApiEntityReference",
"ApiError",
"ApiErrorBase",
"AutomaticOSUpgradePolicy",
"AutomaticOSUpgradeProperties",
"AutomaticRepairsPolicy",
"AvailabilitySet",
"AvailabilitySetListResult",
"AvailabilitySetUpdate",
"BillingProfile",
"BootDiagnostics",
"BootDiagnosticsInstanceView",
"ComputeOperationListResult",
"ComputeOperationValue",
"CreationData",
"DataDisk",
"DataDiskImage",
"DedicatedHost",
"DedicatedHostAllocatableVM",
"DedicatedHostAvailableCapacity",
"DedicatedHostGroup",
"DedicatedHostGroupListResult",
"DedicatedHostGroupUpdate",
"DedicatedHostInstanceView",
"DedicatedHostListResult",
"DedicatedHostUpdate",
"DiagnosticsProfile",
"DiffDiskSettings",
"Disallowed",
"Disk",
"DiskEncryptionSet",
"DiskEncryptionSetList",
"DiskEncryptionSetParameters",
"DiskEncryptionSetUpdate",
"DiskEncryptionSettings",
"DiskInstanceView",
"DiskList",
"DiskSku",
"DiskUpdate",
"Encryption",
"EncryptionSetIdentity",
"EncryptionSettingsCollection",
"EncryptionSettingsElement",
"Gallery",
"GalleryApplication",
"GalleryApplicationList",
"GalleryApplicationUpdate",
"GalleryApplicationVersion",
"GalleryApplicationVersionList",
"GalleryApplicationVersionPublishingProfile",
"GalleryApplicationVersionUpdate",
"GalleryArtifactPublishingProfileBase",
"GalleryArtifactSource",
"GalleryArtifactVersionSource",
"GalleryDataDiskImage",
"GalleryDiskImage",
"GalleryIdentifier",
"GalleryImage",
"GalleryImageIdentifier",
"GalleryImageList",
"GalleryImageUpdate",
"GalleryImageVersion",
"GalleryImageVersionList",
"GalleryImageVersionPublishingProfile",
"GalleryImageVersionStorageProfile",
"GalleryImageVersionUpdate",
"GalleryList",
"GalleryOSDiskImage",
"GalleryUpdate",
"GrantAccessData",
"HardwareProfile",
"Image",
"ImageDataDisk",
"ImageDisk",
"ImageDiskReference",
"ImageListResult",
"ImageOSDisk",
"ImagePurchasePlan",
"ImageReference",
"ImageStorageProfile",
"ImageUpdate",
"InnerError",
"InstanceViewStatus",
"KeyVaultAndKeyReference",
"KeyVaultAndSecretReference",
"KeyVaultKeyReference",
"KeyVaultSecretReference",
"LinuxConfiguration",
"ListUsagesResult",
"LogAnalyticsInputBase",
"LogAnalyticsOperationResult",
"LogAnalyticsOutput",
"MaintenanceRedeployStatus",
"ManagedArtifact",
"ManagedDiskParameters",
"NetworkInterfaceReference",
"NetworkProfile",
"OSDisk",
"OSDiskImage",
"OSProfile",
"Plan",
"ProximityPlacementGroup",
"ProximityPlacementGroupListResult",
"ProximityPlacementGroupUpdate",
"PurchasePlan",
"RecommendedMachineConfiguration",
"RecoveryWalkResponse",
"RegionalReplicationStatus",
"ReplicationStatus",
"RequestRateByIntervalInput",
"Resource",
"ResourceRange",
"RollbackStatusInfo",
"RollingUpgradePolicy",
"RollingUpgradeProgressInfo",
"RollingUpgradeRunningStatus",
"RollingUpgradeStatusInfo",
"RunCommandDocument",
"RunCommandDocumentBase",
"RunCommandInput",
"RunCommandInputParameter",
"RunCommandListResult",
"RunCommandParameterDefinition",
"RunCommandResult",
"ScaleInPolicy",
"ScheduledEventsProfile",
"Sku",
"Snapshot",
"SnapshotList",
"SnapshotSku",
"SnapshotUpdate",
"SourceVault",
"SshConfiguration",
"SshPublicKey",
"StorageProfile",
"SubResource",
"SubResourceReadOnly",
"SubResourceWithColocationStatus",
"TargetRegion",
"TerminateNotificationProfile",
"ThrottledRequestsInput",
"UpdateResource",
"UpgradeOperationHistoricalStatusInfo",
"UpgradeOperationHistoricalStatusInfoProperties",
"UpgradeOperationHistoryStatus",
"UpgradePolicy",
"Usage",
"UsageName",
"UserArtifactManage",
"UserArtifactSource",
"UserAssignedIdentitiesValue",
"VMScaleSetConvertToSinglePlacementGroupInput",
"VaultCertificate",
"VaultSecretGroup",
"VirtualHardDisk",
"VirtualMachine",
"VirtualMachineAgentInstanceView",
"VirtualMachineCaptureParameters",
"VirtualMachineCaptureResult",
"VirtualMachineExtension",
"VirtualMachineExtensionHandlerInstanceView",
"VirtualMachineExtensionImage",
"VirtualMachineExtensionInstanceView",
"VirtualMachineExtensionUpdate",
"VirtualMachineExtensionsListResult",
"VirtualMachineHealthStatus",
"VirtualMachineIdentity",
"VirtualMachineImage",
"VirtualMachineImageResource",
"VirtualMachineInstanceView",
"VirtualMachineListResult",
"VirtualMachineReimageParameters",
"VirtualMachineScaleSet",
"VirtualMachineScaleSetDataDisk",
"VirtualMachineScaleSetExtension",
"VirtualMachineScaleSetExtensionListResult",
"VirtualMachineScaleSetExtensionProfile",
"VirtualMachineScaleSetExtensionUpdate",
"VirtualMachineScaleSetIPConfiguration",
"VirtualMachineScaleSetIdentity",
"VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue",
"VirtualMachineScaleSetInstanceView",
"VirtualMachineScaleSetInstanceViewStatusesSummary",
"VirtualMachineScaleSetIpTag",
"VirtualMachineScaleSetListOSUpgradeHistory",
"VirtualMachineScaleSetListResult",
"VirtualMachineScaleSetListSkusResult",
"VirtualMachineScaleSetListWithLinkResult",
"VirtualMachineScaleSetManagedDiskParameters",
"VirtualMachineScaleSetNetworkConfiguration",
"VirtualMachineScaleSetNetworkConfigurationDnsSettings",
"VirtualMachineScaleSetNetworkProfile",
"VirtualMachineScaleSetOSDisk",
"VirtualMachineScaleSetOSProfile",
"VirtualMachineScaleSetPublicIPAddressConfiguration",
"VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings",
"VirtualMachineScaleSetReimageParameters",
"VirtualMachineScaleSetSku",
"VirtualMachineScaleSetSkuCapacity",
"VirtualMachineScaleSetStorageProfile",
"VirtualMachineScaleSetUpdate",
"VirtualMachineScaleSetUpdateIPConfiguration",
"VirtualMachineScaleSetUpdateNetworkConfiguration",
"VirtualMachineScaleSetUpdateNetworkProfile",
"VirtualMachineScaleSetUpdateOSDisk",
"VirtualMachineScaleSetUpdateOSProfile",
"VirtualMachineScaleSetUpdatePublicIPAddressConfiguration",
"VirtualMachineScaleSetUpdateStorageProfile",
"VirtualMachineScaleSetUpdateVMProfile",
"VirtualMachineScaleSetVM",
"VirtualMachineScaleSetVMExtensionsSummary",
"VirtualMachineScaleSetVMInstanceIDs",
"VirtualMachineScaleSetVMInstanceRequiredIDs",
"VirtualMachineScaleSetVMInstanceView",
"VirtualMachineScaleSetVMListResult",
"VirtualMachineScaleSetVMNetworkProfileConfiguration",
"VirtualMachineScaleSetVMProfile",
"VirtualMachineScaleSetVMProtectionPolicy",
"VirtualMachineScaleSetVMReimageParameters",
"VirtualMachineSize",
"VirtualMachineSizeListResult",
"VirtualMachineStatusCodeCount",
"VirtualMachineUpdate",
"WinRMConfiguration",
"WinRMListener",
"WindowsConfiguration",
"AccessLevel",
"AggregatedReplicationState",
"AvailabilitySetSkuTypes",
"CachingTypes",
"DedicatedHostLicenseTypes",
"DiffDiskOptions",
"DiskCreateOption",
"DiskCreateOptionTypes",
"DiskEncryptionSetIdentityType",
"DiskState",
"DiskStorageAccountTypes",
"EncryptionType",
"GalleryApplicationVersionPropertiesProvisioningState",
"GalleryImagePropertiesProvisioningState",
"GalleryImageVersionPropertiesProvisioningState",
"GalleryPropertiesProvisioningState",
"HostCaching",
"HyperVGeneration",
"HyperVGenerationType",
"HyperVGenerationTypes",
"IPVersion",
"IntervalInMins",
"MaintenanceOperationResultCodeTypes",
"OperatingSystemStateTypes",
"OperatingSystemTypes",
"ProtocolTypes",
"ProximityPlacementGroupType",
"ReplicationState",
"ReplicationStatusTypes",
"ResourceIdentityType",
"RollingUpgradeActionType",
"RollingUpgradeStatusCode",
"SettingNames",
"SnapshotStorageAccountTypes",
"StatusLevelTypes",
"StorageAccountType",
"StorageAccountTypes",
"UpgradeMode",
"UpgradeOperationInvoker",
"UpgradeState",
"VirtualMachineEvictionPolicyTypes",
"VirtualMachinePriorityTypes",
"VirtualMachineScaleSetScaleInRules",
"VirtualMachineScaleSetSkuScaleType",
"VirtualMachineSizeTypes",
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk()
| [
"[email protected]"
] | |
da97ca1fe888a2921cdd2db3c8eb5634445741f2 | 9f118ed377f62c84ff46710d15bfeb60ff43a514 | /11 - Unit 4/4.3.6.py | 17d9a7b0d37933163b49ee42961f2e80067563f1 | [] | no_license | srujanprophet/PythonPractice | 382f8bd5cc3f70504c3d62c1d8795c7451b344dc | ebc13e9f21a6b0f594d10b8524ef358c797979de | refs/heads/master | 2021-09-12T15:37:22.423484 | 2018-04-18T05:05:59 | 2018-04-18T05:05:59 | 67,717,033 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | c=dict()
n = input("Enter total number ")
i=1
while i<=n:
a=raw_input("enter place")
b=raw_input("enter number")
c[a]=b
i=i+1
print "place","\t","number"
for i in c:
print i,"\t",c[i]
| [
"[email protected]"
] | |
743e4f277a0e2c96bc6bd00fda0b9a9e8941dddb | bd06d7672673370fb6fde1e2f9c5364fb131f2c9 | /s02e03.py | b61732294b523dfd70d6bacddf59f9c3f4200712 | [] | no_license | tjmode/python | 12e327b644b1579af1f47de20dc579737f6be202 | a99c45e18d5f453747d0caaa9b96aedc84d23bc8 | refs/heads/master | 2020-08-30T10:49:27.190948 | 2019-10-29T18:32:52 | 2019-10-29T18:32:52 | 218,356,539 | 1 | 0 | null | 2019-10-29T18:32:09 | 2019-10-29T18:32:09 | null | UTF-8 | Python | false | false | 137 | py | a=int(input())
count=0;
for i in range(2,a):
if a%i==0 :
count=count+1
if count==0 :
print("yes")
else :
print("no")
| [
"[email protected]"
] | |
da51bd29e9ec5c024eccd93f2c07274810ce5075 | b24e993bfae0e530b7c6ee676b0efa1b2cbea33c | /rsopt/codes/__init__.py | caa06d213dc50724c2cf5cbbba9ae378f38c5a34 | [
"Apache-2.0"
] | permissive | tanxicccc/rsopt | f99d8d721ce37647717b41c08b44f69a065444ae | 8705e937f95a4bbe6ed3fb1a04b78f724a5f3931 | refs/heads/master | 2023-01-06T19:21:40.065806 | 2020-10-24T23:48:34 | 2020-10-24T23:48:34 | 288,584,476 | 0 | 0 | Apache-2.0 | 2020-08-18T23:19:55 | 2020-08-18T23:19:54 | null | UTF-8 | Python | false | false | 403 | py | # Templated codes have schema files that can be used to check input and create run files. Otherwise user
# must supply module containing inputs
_TEMPLATED_CODES = ['elegant', 'opal']
# Supported codes have defined Job class
# FUTURE: 'Unsupported' codes could become a class of supported codes that have expanded user input required to run
_SUPPORTED_CODES = ['python', 'genesis', *_TEMPLATED_CODES]
| [
"[email protected]"
] | |
6d4e8ab0a2d77c2dd96717365794b656c3c1a689 | 673f9b85708affe260b892a4eb3b1f6a0bd39d44 | /Botnets/App/App Web/PDG-env/lib/python3.6/site-packages/pyshark/capture/inmem_capture.py | 2a1f2d1370ad5eb6fc9c98cf1d87cdcfb0d0f0d9 | [
"MIT"
] | permissive | i2tResearch/Ciberseguridad_web | feee3fe299029bef96b158d173ce2d28ef1418e4 | e6cccba69335816442c515d65d9aedea9e7dc58b | refs/heads/master | 2023-07-06T00:43:51.126684 | 2023-06-26T00:53:53 | 2023-06-26T00:53:53 | 94,152,032 | 14 | 0 | MIT | 2023-09-04T02:53:29 | 2017-06-13T00:21:00 | Jupyter Notebook | UTF-8 | Python | false | false | 8,200 | py | import asyncio
import subprocess
import os
import struct
import time
import warnings
from distutils.version import LooseVersion
from pyshark.capture.capture import Capture, StopCapture
DEFAULT_TIMEOUT = 30
class LinkTypes(object):
NULL = 0
ETHERNET = 1
IEEE802_5 = 6
PPP = 9
IEEE802_11 = 105
class InMemCapture(Capture):
def __init__(self, bpf_filter=None, display_filter=None, only_summaries=False,
decryption_key=None, encryption_type='wpa-pwk', decode_as=None,
disable_protocol=None, tshark_path=None, override_prefs=None, use_json=False,
linktype=LinkTypes.ETHERNET, include_raw=False, eventloop=None, custom_parameters=None,
debug=False):
"""
Creates a new in-mem capture, a capture capable of receiving binary packets and parsing them using tshark.
Significantly faster if packets are added in a batch.
:param bpf_filter: BPF filter to use on packets.
:param display_filter: Display (wireshark) filter to use.
:param only_summaries: Only produce packet summaries, much faster but includes very little information
:param decryption_key: Key used to encrypt and decrypt captured traffic.
:param encryption_type: Standard of encryption used in captured traffic (must be either 'WEP', 'WPA-PWD',
or 'WPA-PWK'. Defaults to WPA-PWK).
:param decode_as: A dictionary of {decode_criterion_string: decode_as_protocol} that are used to tell tshark
to decode protocols in situations it wouldn't usually, for instance {'tcp.port==8888': 'http'} would make
it attempt to decode any port 8888 traffic as HTTP. See tshark documentation for details.
:param tshark_path: Path of the tshark binary
:param override_prefs: A dictionary of tshark preferences to override, {PREFERENCE_NAME: PREFERENCE_VALUE, ...}.
:param disable_protocol: Tells tshark to remove a dissector for a specifc protocol.
:param custom_parameters: A dict of custom parameters to pass to tshark, i.e. {"--param": "value"}
"""
super(InMemCapture, self).__init__(display_filter=display_filter, only_summaries=only_summaries,
decryption_key=decryption_key, encryption_type=encryption_type,
decode_as=decode_as, disable_protocol=disable_protocol,
tshark_path=tshark_path, override_prefs=override_prefs,
use_json=use_json, include_raw=include_raw, eventloop=eventloop,
custom_parameters=custom_parameters, debug=debug)
self.bpf_filter = bpf_filter
self._packets_to_write = None
self._current_linktype = linktype
self._current_tshark = None
def get_parameters(self, packet_count=None):
"""
Returns the special tshark parameters to be used according to the configuration of this class.
"""
params = super(InMemCapture, self).get_parameters(packet_count=packet_count)
params += ['-i', '-']
return params
async def _get_tshark_process(self, packet_count=None):
if self._current_tshark:
return self._current_tshark
proc = await super(InMemCapture, self)._get_tshark_process(packet_count=packet_count, stdin=subprocess.PIPE)
self._current_tshark = proc
# Create PCAP header
header = struct.pack("IHHIIII", 0xa1b2c3d4, 2, 4, 0, 0, 0x7fff, self._current_linktype)
proc.stdin.write(header)
return proc
def _get_json_separators(self):
""""Returns the separators between packets in a JSON output
Returns a tuple of (packet_separator, end_of_file_separator, characters_to_disregard).
The latter variable being the number of characters to ignore in order to pass the packet (i.e. extra newlines,
commas, parenthesis).
"""
if self._get_tshark_version() >= LooseVersion("2.6.7"):
return ("%s }" % os.linesep).encode(), ("}%s]" % os.linesep).encode(), 0
else:
return ("}%s%s" % (os.linesep, os.linesep)).encode(), ("}%s%s]" % (os.linesep, os.linesep)).encode(), 1
def _write_packet(self, packet):
# Write packet header
self._current_tshark.stdin.write(struct.pack("IIII", int(time.time()), 0, len(packet), len(packet)))
self._current_tshark.stdin.write(packet)
def parse_packet(self, binary_packet):
"""
Parses a single binary packet and returns its parsed version.
DOES NOT CLOSE tshark. It must be closed manually by calling close() when you're done
working with it.
Use parse_packets when parsing multiple packets for faster parsing
"""
return self.parse_packets([binary_packet])[0]
def parse_packets(self, binary_packets):
"""
Parses binary packets and return a list of parsed packets.
DOES NOT CLOSE tshark. It must be closed manually by calling close() when you're done
working with it.
"""
return asyncio.get_event_loop().run_until_complete(self.parse_packets_async(binary_packets))
async def parse_packets_async(self, binary_packets):
"""
A coroutine which parses binary packets and return a list of parsed packets.
DOES NOT CLOSE tshark. It must be closed manually by calling close() when you're done
working with it.
"""
parsed_packets = []
if not self._current_tshark:
await self._get_tshark_process()
for binary_packet in binary_packets:
self._write_packet(binary_packet)
def callback(pkt):
parsed_packets.append(pkt)
if len(parsed_packets) == len(binary_packets):
raise StopCapture()
await self._get_parsed_packet_from_tshark(callback)
return parsed_packets
async def _get_parsed_packet_from_tshark(self, callback):
await self._current_tshark.stdin.drain()
try:
await asyncio.wait_for(self.packets_from_tshark(callback, close_tshark=False), DEFAULT_TIMEOUT)
except asyncio.TimeoutError:
await self.close_async()
raise asyncio.TimeoutError("Timed out while waiting for tshark to parse packet. "
"Try rerunning with cap.set_debug() to see tshark errors. "
"Closing tshark..")
async def close_async(self):
self._current_tshark = None
await super(InMemCapture, self).close_async()
def feed_packet(self, binary_packet, linktype=LinkTypes.ETHERNET):
"""
DEPRECATED. Use parse_packet instead.
This function adds the packet to the packets list, and also closes and reopens tshark for
each packet.
==============
Gets a binary (string) packet and parses & adds it to this capture.
Returns the added packet.
Use feed_packets if you have multiple packets to insert.
By default, assumes the packet is an ethernet packet. For another link type, supply the linktype argument (most
can be found in the class LinkTypes)
"""
warnings.warn("Deprecated method. Use InMemCapture.parse_packet() instead.")
self._current_linktype = linktype
pkt = self.parse_packet(binary_packet)
self.close()
self._packets.append(pkt)
return pkt
def feed_packets(self, binary_packets, linktype=LinkTypes.ETHERNET):
"""
Gets a list of binary packets, parses them using tshark and returns their parsed values.
Keeps the packets in the internal packet list as well.
By default, assumes the packets are ethernet packets. For another link type, supply the linktype argument (most
can be found in the class LinkTypes)
"""
self._current_linktype = linktype
parsed_packets = self.parse_packets(binary_packets)
self._packets.extend(parsed_packets)
self.close()
return parsed_packets
| [
"[email protected]"
] | |
f55bac2f84b841a6ce569f89983b6c04c82dbda8 | dc30b6ecea0a1ad2342244871817a5882f506fda | /Euler Challenge/Euler71.1.py | c3635d3278d68ee5699fad292243f03b338cd532 | [] | no_license | CasperHagenaars/WISB256 | 26e4b272a80d687b22ce7fd48a6040e45e152b85 | d0871a2221f71fe64d7aba4efcd3a1e276c22f34 | refs/heads/master | 2020-05-20T17:03:27.182900 | 2015-06-18T12:43:33 | 2015-06-18T12:43:33 | 34,313,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | import math
import fractions
from time import time
print(fractions.Fraction(3.0/7-0.000001).limit_denominator(1000000)) | [
"[email protected]"
] | |
ac1798e3eaec3b174772f1900d1e578f23c91dbd | 21183612ae42b6e84e010ead82f5ae5346890ac7 | /python/ccxt/braziliex.py | 31c183f7c4f299f3f90229487b6e8e0654596bfd | [
"MIT"
] | permissive | mpalamar/ccxt | f81cce56b9fd585f38ec96026c2fd64c18459934 | d1d71de475db83fd1cd38ed6b8b8873b7a98d5bd | refs/heads/master | 2021-05-13T15:38:19.086675 | 2018-01-09T05:23:07 | 2018-01-09T05:23:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,855 | py | # -*- coding: utf-8 -*-
from ccxt.base.exchange import Exchange
import hashlib
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import InvalidOrder
class braziliex (Exchange):
def describe(self):
return self.deep_extend(super(braziliex, self).describe(), {
'id': 'braziliex',
'name': 'Braziliex',
'countries': 'BR',
'rateLimit': 1000,
# obsolete metainfo interface
'hasFetchTickers': True,
'hasFetchOpenOrders': True,
'hasFetchMyTrades': True,
# new metainfo interface
'has': {
'fetchTickers': True,
'fetchOpenOrders': True,
'fetchMyTrades': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/34703593-c4498674-f504-11e7-8d14-ff8e44fb78c1.jpg',
'api': 'https://braziliex.com/api/v1',
'www': 'https://braziliex.com/',
'doc': 'https://braziliex.com/exchange/api.php',
'fees': 'https://braziliex.com/exchange/fees.php',
},
'api': {
'public': {
'get': [
'currencies',
'ticker',
'ticker/{market}',
'orderbook/{market}',
'tradehistory/{market}',
],
},
'private': {
'post': [
'balance',
'complete_balance',
'open_orders',
'trade_history',
'deposit_address',
'sell',
'buy',
'cancel_order',
],
},
},
'fees': {
'trading': {
'maker': 0.005,
'taker': 0.005,
},
},
'precision': {
'amount': 8,
'price': 8,
},
})
def fetch_currencies(self, params={}):
currencies = self.publicGetCurrencies(params)
ids = list(currencies.keys())
result = {}
for i in range(0, len(ids)):
id = ids[i]
currency = currencies[id]
precision = currency['decimal']
uppercase = id.upper()
code = self.common_currency_code(uppercase)
active = currency['active'] == 1
status = 'ok'
if currency['under_maintenance'] != 0:
active = False
status = 'maintenance'
canWithdraw = currency['is_withdrawal_active'] == 1
canDeposit = currency['is_deposit_active'] == 1
if not canWithdraw or not canDeposit:
active = False
result[code] = {
'id': id,
'code': code,
'name': currency['name'],
'active': active,
'status': status,
'precision': precision,
'wallet': {
'address': None,
'extra': None,
'withdraw': {
'active': canWithdraw,
'fee': currency['txWithdrawalFee'],
},
'deposit': {
'active': canDeposit,
'fee': currency['txDepositFee'],
},
},
'limits': {
'amount': {
'min': currency['minAmountTrade'],
'max': math.pow(10, precision),
},
'price': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'cost': {
'min': None,
'max': None,
},
'withdraw': {
'min': currency['MinWithdrawal'],
'max': math.pow(10, precision),
},
'deposit': {
'min': currency['minDeposit'],
'max': None,
},
},
'info': currency,
}
return result
def fetch_markets(self):
markets = self.publicGetTicker()
ids = list(markets.keys())
result = []
for i in range(0, len(ids)):
id = ids[i]
market = markets[id]
idUpperCase = id.upper()
base, quote = idUpperCase.split('_')
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = base + '/' + quote
active = market['active'] == 1
precision = {
'amount': 8,
'price': 8,
}
lot = math.pow(10, -precision['amount'])
result.append({
'id': id,
'symbol': symbol.upper(),
'base': base,
'quote': quote,
'active': active,
'lot': lot,
'precision': precision,
'limits': {
'amount': {
'min': lot,
'max': math.pow(10, precision['amount']),
},
'price': {
'min': math.pow(10, -precision['price']),
'max': math.pow(10, precision['price']),
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_ticker(self, ticker, market=None):
symbol = market['symbol']
timestamp = ticker['date']
ticker = ticker['ticker']
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': float(ticker['highestBid24']),
'low': float(ticker['lowestAsk24']),
'bid': float(ticker['highestBid']),
'ask': float(ticker['lowestAsk']),
'vwap': None,
'open': None,
'close': None,
'first': None,
'last': float(ticker['last']),
'change': float(ticker['percentChange']),
'percentage': None,
'average': None,
'baseVolume': float(ticker['baseVolume24']),
'quoteVolume': float(ticker['quoteVolume24']),
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
ticker = self.publicGetTickerMarket(self.extend({
'market': market['id'],
}, params))
ticker = {
'date': self.milliseconds(),
'ticker': ticker,
}
return self.parse_ticker(ticker, market)
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
tickers = self.publicGetTicker(params)
result = {}
timestamp = self.milliseconds()
ids = list(tickers.keys())
for i in range(0, len(ids)):
id = ids[i]
market = self.markets_by_id[id]
symbol = market['symbol']
ticker = {
'date': timestamp,
'ticker': tickers[id],
}
result[symbol] = self.parse_ticker(ticker, market)
return result
def fetch_order_book(self, symbol, params={}):
self.load_markets()
orderbook = self.publicGetOrderbookMarket(self.extend({
'market': self.market_id(symbol),
}, params))
return self.parse_order_book(orderbook, None, 'bids', 'asks', 'price', 'amount')
def parse_trade(self, trade, market=None):
timestamp = None
if 'date_exec' in trade:
timestamp = self.parse8601(trade['date_exec'])
else:
timestamp = self.parse8601(trade['date'])
price = float(trade['price'])
amount = float(trade['amount'])
symbol = market['symbol']
cost = float(trade['total'])
orderId = self.safe_string(trade, 'order_number')
return {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': None,
'order': orderId,
'type': 'limit',
'side': trade['type'],
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
'info': trade,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
trades = self.publicGetTradehistoryMarket(self.extend({
'market': market['id'],
}, params))
return self.parse_trades(trades, market, since, limit)
def fetch_balance(self, params={}):
self.load_markets()
balances = self.privatePostCompleteBalance(params)
result = {'info': balances}
currencies = list(balances.keys())
for i in range(0, len(currencies)):
id = currencies[i]
balance = balances[id]
currency = self.common_currency_code(id)
account = {
'free': float(balance['available']),
'used': 0.0,
'total': float(balance['total']),
}
account['used'] = account['total'] - account['free']
result[currency] = account
return self.parse_balance(result)
def parse_order(self, order, market=None):
symbol = None
if not market:
marketId = self.safe_string(order, 'market')
if marketId:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
if market:
symbol = market['symbol']
timestamp = self.safe_value(order, 'timestamp')
if not timestamp:
timestamp = self.parse8601(order['date'])
price = float(order['price'])
cost = self.safe_float(order, 'total', 0.0)
amount = self.safe_float(order, 'amount')
filledPercentage = self.safe_float(order, 'progress')
filled = amount * filledPercentage
remaining = self.amount_to_precision(symbol, amount - filled)
info = order
if 'info' in info:
info = order['info']
return {
'id': order['order_number'],
'datetime': self.iso8601(timestamp),
'timestamp': timestamp,
'status': 'open',
'symbol': symbol,
'type': 'limit',
'side': order['type'],
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'trades': None,
'fee': self.safe_value(order, 'fee'),
'info': info,
}
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
method = 'privatePost' + self.capitalize(side)
response = getattr(self, method)(self.extend({
'market': market['id'],
# 'price': self.price_to_precision(symbol, price),
# 'amount': self.amount_to_precision(symbol, amount),
'price': price,
'amount': amount,
}, params))
success = self.safe_integer(response, 'success')
if success != 1:
raise InvalidOrder(self.id + ' ' + self.json(response))
parts = response['message'].split(' / ')
parts = parts[1:]
feeParts = parts[5].split(' ')
order = self.parse_order({
'timestamp': self.milliseconds(),
'order_number': response['order_number'],
'type': parts[0].lower(),
'market': parts[0].lower(),
'amount': parts[2].split(' ')[1],
'price': parts[3].split(' ')[1],
'total': parts[4].split(' ')[1],
'fee': {
'cost': float(feeParts[1]),
'currency': feeParts[2],
},
'progress': '0.0',
'info': response,
}, market)
id = order['id']
self.orders[id] = order
return order
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
market = self.market(symbol)
result = self.privatePostCancelOrder(self.extend({
'order_number': id,
'market': market['id'],
}, params))
return result
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
orders = self.privatePostOpenOrders(self.extend({
'market': market['id'],
}, params))
return self.parse_orders(orders['order_open'], market, since, limit)
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
trades = self.privatePostTradeHistory(self.extend({
'market': market['id'],
}, params))
return self.parse_trades(trades['trade_history'], market, since, limit)
def fetch_deposit_address(self, currencyCode, params={}):
self.load_markets()
currency = self.currency(currencyCode)
response = self.privatePostDepositAddress(self.extend({
'currency': currency['id'],
}, params))
address = self.safe_string(response['deposit_address'], 'address')
if not address:
raise ExchangeError(self.id + ' fetchDepositAddress failed: ' + self.last_http_response)
return {
'currency': currencyCode,
'address': address,
'status': 'ok',
'info': response,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + api
query = self.omit(params, self.extract_params(path))
if api == 'public':
url += '/' + self.implode_params(path, params)
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
query = self.extend({
'command': path,
'nonce': self.nonce(),
}, query)
body = self.urlencode(query)
signature = self.hmac(self.encode(body), self.encode(self.secret), hashlib.sha512)
headers = {
'Content-type': 'application/x-www-form-urlencoded',
'Key': self.apiKey,
'Sign': self.decode(signature),
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
success = self.safe_integer(response, 'success')
if success == 0:
message = response['message']
if message == 'Invalid APIKey':
raise AuthenticationError(message)
raise ExchangeError(message)
return response
| [
"[email protected]"
] | |
6f7b4b4469eea1d72517868ec70829943fb7202a | df0df0aa366c323e61a397b2ea54d359bbee3469 | /forms.py | f203bdd474a96cb79067f297555d0cd69932099c | [
"Apache-2.0"
] | permissive | craigderington/snowy-owl-api | d8c3cd47bc64e9d96512a5bee49f5978ac500398 | b61141e8b4d28f82782ca30b7360ec7fe55bb8ba | refs/heads/master | 2023-02-05T00:38:41.008820 | 2020-12-23T22:49:41 | 2020-12-23T22:49:41 | 324,015,562 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | from wtforms import Form, BooleanField, StringField, PasswordField, validators
class LoginForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
password = StringField('Email Address', [validators.Length(min=6, max=35)])
| [
"[email protected]"
] | |
858debd46c8c5246143b38a6c3044dfbbb758d0b | b08d42933ac06045905d7c005ca9c114ed3aecc0 | /src/learningCurve/ninetyPercent/lrClassifierC.py | a453c046d35a319d2fd070e68cd30511bf66bef3 | [] | no_license | TanemuraKiyoto/PPI-native-detection-via-LR | d148d53f5eb60a4dda5318b371a3048e3f662725 | 897e7188b0da94e87126a4acc0c9a6ff44a64574 | refs/heads/master | 2022-12-05T11:59:01.014309 | 2020-08-10T00:41:17 | 2020-08-10T00:41:17 | 225,272,083 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,133 | py | # 9 September 2019
# Kiyoto Aramis Tanemura
# I modified the rfClassifier.py script to implement a logistic regression classifier. This classifier runs faster than the random forest classifier and Jun previously observed comparable results between logistic regression and random forest classifiers for the protein folding system. Due to the lesser time cost, I may sample a greater hyperparameter space using the logistic regression classifier. If the sampling yields a region in which overfitting is not observed, then I can refine the search. If the results are similar to that of the random forest classifier, then I may have exhausted the dataset for generalizability.
# Modified 26 October 2019 by Kiyoto Aramis Tanemura. Apply logistic regression classifier to CASF-PPI dataset.
# Modified 2020-02-09 by KAT. Code generalized for public use on GitHub.
import pandas as pd
import numpy as np
import os
import json
import pickle
#from multiprocessing import Pool
from time import time
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import RandomizedSearchCV
from sklearn.preprocessing import StandardScaler
from random import shuffle, random
#os.chdir('/mnt/scratch/tanemur1/')
toc = time()
# Randomize input file orders
pathToInput = 'data/comparison_descriptors/'
pathToOutput = 'results/learningCurve/'
fileNames = [x for x in os.listdir(pathToInput) if '.csv' in x]
shuffle(fileNames) # note: shuffle is in-place. Do not assign to variable
# Specify training set fraction
train_fraction = 0.9
if len(fileNames) * train_fraction == int(len(fileNames) * train_fraction):
train_file_number = int(len(fileNames) * train_fraction)
else:
train_file_number = int(len(fileNames) * train_fraction + 1)
x_train = pd.DataFrame()
y_train = pd.DataFrame()
# Read individual csv for comparison descriptors, append to train_data, and partition to x_train, y_train
fileNamesWithPath = [pathToInput + fileName for fileName in fileNames]
def read_csv(filePath):
return pd.read_csv(filePath, index_col = 0)
print('begin read training set')
#with Pool(np.min([train_file_number, 28])) as p:
# train_dataList = list(p.map(read_csv, fileNamesWithPath[:train_file_number]))
train_dataList = list(map(read_csv, fileNamesWithPath[:train_file_number]))
print('begin append DF | ', (time() - toc) / 60, ' min')
# Append DataFrames into one. While loop used to reduce append operations. Iteratively, DFs in a list are appended
# to the following DF.
while len(train_dataList) != 1:
number = int(len(train_dataList) / 2)
for i in range(number):
train_dataList[2 * i] = train_dataList[2 * i].append(train_dataList[2 * i + 1], sort = True)
for j in range(number):
del train_dataList[j + 1]
x_train = train_dataList[0]
del train_dataList
print('train_data dimensions', x_train.shape, ' | ', (time() - toc) / 60, ' min')
y_train = x_train['class']
x_train = x_train.drop('class', axis = 1) # x_train contains only nonbonding descriptors
feature_names = x_train.columns
scaler = StandardScaler()
scaler.fit(x_train)
x_train = scaler.transform(x_train)
y_train = y_train.values
print('Dimensions x_train ', x_train.shape, ' | y_train', y_train.shape)
# Define a logistic regression classifier along with pertinent hyperparameters. Here, default values are used.
clf = LogisticRegression(penalty='l2', verbose = 1)
def sampleRationalVals(minVal, maxVal):
return 2 ** (random() * (np.log2(maxVal) - np.log2(minVal)) + np.log2(minVal))
def sampleRationalList(minVal, maxVal):
theList = []
for i in range(int(2 * np.log2(maxVal - minVal) + 1)):
theVal = sampleRationalVals(minVal, maxVal)
theList.append(theVal)
return theList
parameters = {
# include any hyperparameters to sample. Otherwise, leave empty to perform five fold cross validation with default values. For example:
# 'C': sampleRationalList(0.001, 1000),
# 'solver': ['newton-cg', 'lbfgs', 'sag','saga']
}
print('begin RandomizedSearchCV | ' + str((time() - toc)/60) + ' mins')
randomized_search = RandomizedSearchCV(estimator = clf, param_distributions = parameters, n_iter = 1, scoring = 'accuracy', refit = True, cv = 5, verbose = 1, n_jobs = 1, pre_dispatch = 'n_jobs', return_train_score=True)
randomized_search.fit(x_train, y_train)
print('begin output | ', (time() - toc) / 60 / 60, ' hours')
tic = time()
with open(pathToOutput + 'bestParamC.json', 'w') as g:
json.dump(randomized_search.best_estimator_.get_params(), g)
with open(pathToOutput + 'modelC.pkl', 'wb') as h:
pickle.dump(randomized_search, h)
with open(pathToOutput + 'trainingSetC.txt', 'w') as i:
i.write('Training set:\n')
for pdbID in fileNames[:train_file_number]:
i.write(pdbID + '\n')
i.write('\nJob time: ' + str((tic - toc) / 60 / 60) + ' hours')
with open(pathToOutput + 'standardScalerC.pkl', 'wb') as j:
pickle.dump(scaler, j)
bestCoefficient = randomized_search.best_estimator_.coef_
coefDf = pd.DataFrame(bestCoefficient, columns = feature_names)
with open(pathToOutput + 'coefficientsC.csv', 'w') as f:
coefDf.to_csv(f)
| [
"[email protected]"
] | |
bd66eed366baf1ce93deacad00fb25566d2e9611 | c85ec637dd7202eccbab3623f0e12608f2c58c73 | /redditdownloader/tests/integration/processing/handlers/test_ytdl.py | 54bdcc840f71ebbc42637314c6bf87935b3a79c1 | [] | no_license | shadowmoose/RedditDownloader | 2d7b8d68d3be7cd63614c5019e2935e25d8548f8 | ebcb791f78e5d761efcca28b5ebd5b7e1b61df85 | refs/heads/master | 2023-07-13T09:55:21.700858 | 2023-07-02T05:02:18 | 2023-07-02T05:02:18 | 93,103,288 | 1,134 | 120 | null | 2023-05-04T05:57:26 | 2017-06-01T22:03:41 | Python | UTF-8 | Python | false | false | 1,214 | py | from tests.mock import StagedTest, mock_handler_request
from processing.handlers import ytdl
class YTDLHandlerTest(StagedTest):
""" Test the YT_DL Handler's downloading capabilities """
def test_gfycat(self):
""" Attempt Gfycat.com download """
_task, _prog, _file = mock_handler_request(self.dir, 'https://gfycat.com/sarcasticfixedanemoneshrimp')
res = ytdl.handle(_task, _prog)
self.assertTrue(res, "Failed to download Gfycat video!")
self.assertTrue(_file.exists(), "Gfycat video was not downloaded! %s" % res.failure_reason)
self.assertTrue(_file.relative().endswith('.mp4'), 'Failed to use .mp4 extension for video file!')
def test_youtube(self):
""" Attempt Youtube download """
_task, _prog, _file = mock_handler_request(self.dir, 'https://www.youtube.com/watch?v=8URukvnUYTw')
res = ytdl.handle(_task, _prog)
self.assertTrue(res, "Failed to download YouTube video!")
self.assertTrue(_file.exists(), "YouTube video was not downloaded! %s" % res.failure_reason)
self.assertTrue('.' in _file.relative(), "YTDL failed to apply file extension! (%s)" % _file.absolute())
self.assertTrue('unknown' not in _file.relative(), 'Invalid name for video file! (%s)' % _file.absolute())
| [
"[email protected]"
] | |
6ac23f071d374644a614840e4e5d00c178a6e2d0 | d5d0daf16e3dd7621b0f1fefc31f4312d7871043 | /src/jmx4py/jolokia/client.py | 942152f906eed683020d0772327a3cbb3af16d20 | [
"Apache-2.0"
] | permissive | pombredanne/jmx4py | fb5aae878095855e7198d9cac9ec1a8058f9c65a | 7950c9bdbdbd09f9fc13377b2d4a42ddd757309f | refs/heads/master | 2021-01-15T16:17:41.289333 | 2013-09-27T18:21:12 | 2013-09-27T18:21:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,944 | py | # pylint: disable=C0321
""" JMX Client Proxy.
See http://www.jolokia.org/reference/html/protocol.html for a
detailed description of the Jolokia protocol and different ways
to query for information. The client API methods only describe
the major points, and specifics of the Python interface.
@author: jhe
"""
# Copyright 2011 Juergen Hermann
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import datetime
from contextlib import closing
from jmx4py.util import json, auxiliary
from jmx4py.jolokia.errors import JmxResponseError
from jmx4py.jolokia.connection import JmxConnection
_QUOTE_RE = re.compile(r"([!/])")
_UNQUOTE_RE = re.compile(r"!(.)")
def quote(path):
""" Escape a path according to Jolokia v6 rules.
"""
return _QUOTE_RE.sub(r"!\1", path) if path else path
def unquote(path):
""" Un-escape a path according to Jolokia v6 rules.
"""
if not path:
return path
if path.endswith('!') and (len(path) - len(path.rstrip('!'))) % 2:
raise ValueError("Invalid trailing escape in %r" % path)
return _UNQUOTE_RE.sub(r"\1", path)
#class JmxRequest(object):
# """ JMX Proxy Request.
# """
#
# def __init__(self, **kw):
# """ Populate request from given keyword arguments.
# """
class JmxResponse(dict):
""" JMX Proxy Response. Wraps responses into a fancy accessor.
"""
def __str__(self):
""" Return fully nested representation of JMX response (JSON dump).
"""
return json.dumps(self, indent=2)
def __repr__(self):
""" Return short representation of JMX response.
>>> jp.read("java.lang:type=Memory", "HeapMemoryUsage", path="used")
<JmxResponse type=read status=200 timestamp=2011-11-15T17:44:50 value=4573856>
"""
return "<%s type=%s status=%d timestamp=%s value=%s>" % (
self.__class__.__name__, self["request"]["type"], self["status"],
datetime.datetime.fromtimestamp(self["timestamp"]).isoformat(),
auxiliary.digest_repr(self["value"]),
)
def __getitem__(self, key):
try:
return super(JmxResponse, self).__getitem__(key)
except KeyError:
if '.' in key:
namespace = self["value"]
for part in key.split('.'):
namespace = namespace.get(part, None)
if not namespace:
break
return namespace
elif key in self["value"]:
return self["value"][key]
raise
def __getattr__(self, name):
try:
# TODO: Check if there's such a wrapper already out there!
# TODO: Need to handle nested access (response.foo.bar)
return self[name]
except KeyError:
return getattr(super(JmxResponse, self), name)
class JmxClientConfig(object):
""" JMX Client Proxy Configuration.
TODO: atually pass these on to the connection!
user User name when authentication is used.
If not set, no authentication is used. If set, password must be set, too
password Password used for authentication. Only used when user is set.
TODO: more parameters?
timeout The timeout in seconds for network operations.
contentCharset Defines the charset to be used per default for encoding content body. ISO-8859-1
expectContinue Activates Expect: 100-Continue handshake for the entity enclosing methods. The purpose of the Expect: 100-Continue handshake to allow a client that is sending a request message with a request body to determine if the origin server is willing to accept the request (based on the request headers) before the client sends the request body. The use of the Expect: 100-continue handshake can result in noticable peformance improvement for entity enclosing requests that require the target server's authentication. true
"""
def __init__(self, url, **kw):
""" Store configuration as given in keyword parameters, mixing in default values.
"""
self.url = url
self.user = kw.get("user", None)
self.password = kw.get("password", None)
self.method = kw.get("method", "POST")
#self. = kw.get("", None)
class JmxClient(object):
""" JMX Client Proxy.
"""
# TODO: Historical values - http://www.jolokia.org/reference/html/protocol.html#history
# TODO: Support bulk requests - http://www.jolokia.org/reference/html/protocol.html#post-request
def __init__(self, url, **kw):
""" Open a proxy connection to a Jolokia agent.
"""
self.cfg = JmxClientConfig(url, **kw)
self.connection = JmxConnection.from_url(self.cfg.url)
def __repr__(self):
""" Return client proxy identification.
"""
return "%s(%r)" % (self.__class__.__name__, self.connection.url)
def _execute(self, **kw):
""" Execute a request as defined by the given keyword arguments.
TODO: Do we need this?
MAX_DEPTH Maximum traversal depth for serialization of complex objects. Use this with a "list" request to restrict the depth of the returned meta data tree.
MAX_COLLECTION_SIZE Maximum size of collections returned during serialization. If larger, a collection is truncated to this size.
MAX_OBJECTS Maximum number of objects returned in the response's value.
IGNORE_ERRORS Option for ignoring errors during JMX operations and JSON serialization. This works only for certain operations like pattern reads and should be either true or false.
"""
req = kw # pass on unchanged keyword params, for now
with closing(self.connection.open()) as handle:
resp = JmxResponse(handle.send(req))
if resp.status != 200:
raise JmxResponseError(resp, req)
return resp
def read(self, mbean, attribute=None, path=None, **kw):
""" A read request gets one or more attributes from one or more
MBeans within a single request.
Various call variants can be used to specify one or more
attributes along with the JMX ObjectName (which can be a pattern). A
path can be set as property for specifying an inner path, too.
A read request for multiple attributes on the same MBean is initiated
by giving a list of attributes to the request. If no attribute is
provided, then all attributes are fetched. The MBean name can be
given as a pattern in which case the attributes are read on all
matching MBeans. If a MBean pattern and multiple attributes are
requestes, then only the value of attributes which matches both
are returned, the others are ignored. Paths cannot be used with
multi value reads, though.
"""
req = dict(type = "read", mbean = mbean)
if attribute: req["attribute"] = attribute
if path: req["path"] = quote(path)
if kw: req.update(kw)
resp = self._execute(**req)
return resp
def write(self):
""" TODO: Implement write()
J4pWriteRequest and J4pWriteResponse
A J4pWriteRequest is used to set the value of an MBean
attribute. Beside the mandatory object and attribute name the
value must be give in the constructor as well. Optionally a path
can be provided, too. Only certain types for the given value can
be serialized properly for calling the Jolokia agent as described
in Section 6.4.2, "Request parameter serialization".
The old value is returned as J4pWriteResponse's value.
J4pExecRequest and J4pExecResponse
"""
resp = self._execute(type = "write")
return resp
def invoke(self):
""" TODO: Implement invoke()
J4pExecRequests are used for executing operation on MBeans.
The constructor takes as mandatory arguments the MBean's object
name, the operation name and any arguments required by the
operation. Only certain types for the given arguments can be
serialized properly for calling the Jolokia agent as described in
Section 6.4.2, "Request parameter serialization".
The returned J4pExecResponse contains the return value of the
operation called.
J4pSearchRequest and J4pSearchResponse
"""
resp = self._execute(type = "invoke")
return resp
def search(self):
""" TODO: Implement search()
A J4pSearchRequest contains a valid single MBean object name
pattern which is used for searching MBeans.
The J4pSearchResponse holds a list of found object names.
J4pListRequest and J4pListResponse
For obtaining meta data on MBeans a J4pListRequest should be
used. It can be used with a inner path to obtain only a subtree
of the response, otherwise the whole tree as described in Section
6.2.5.3, "List response" is returned. With the query parameter
maxDepth can be used to restrict the depth of returned tree.
The single value of a J4pListResponse is a tree (or subtree)
as a JSON object, which has the format described in Section
6.2.5.3, "List response".
J4pVersionRequest
"""
resp = self._execute(type = "search")
return resp
def version(self):
""" Request the Jolokia agent's version information.
See JmxResponse for ways to access the result.
>>> import jmx4py.jolokia
>>> jp = jmx4py.jolokia.JmxClient(("localhost", 8089))
>>> jp
JmxClient('http://localhost:8089/jolokia/')
>>> jp.version().protocol_info[:1]
(6,)
>>> jp.version().agent_info[:2]
(1, 0)
"""
resp = self._execute(type = "version")
resp["value"]["protocol_info"] = tuple(int(i) for i in resp.protocol.split('.'))
resp["value"]["agent_info"] = tuple(int(i) for i in resp.agent.split('.'))
return resp
| [
"[email protected]"
] | |
5cc085f2bcede2101a78d224a389fcf4d5aedfc7 | 40da919c52cfdb9658b7400f26c48c11e124e315 | /ising3D.py | 0d08754fec2e3fa6bd878d34ac543fa856d3c52f | [] | no_license | bvillasen/isingModel | ae71f57c94db58ac35d3bc26b36c944b70fed2b0 | b4d47c8b563cf6f787fe9a764ccdbd560964acbe | refs/heads/master | 2021-01-19T14:07:01.017178 | 2019-11-23T22:36:08 | 2019-11-23T22:36:08 | 14,155,775 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,589 | py | # 3D Ising model simulation
# made by Bruno Villasenor
# contact me at: [email protected]
# personal web page: https://bvillasen.webs.com
# github: https://github.com/bvillasen
#To run you need these complementary files: CUDAising3D.cu, volumeRender.py, CUDAvolumeRender.cu, cudaTools.py
#you can find them in my github:
# https://github.com/bvillasen/volumeRender
# https://github.com/bvillasen/tools
import sys, time, os
import numpy as np
#import pylab as plt
import pycuda.driver as cuda
from pycuda.compiler import SourceModule
import pycuda.gpuarray as gpuarray
import pycuda.curandom as curandom
#Add Modules from other directories
currentDirectory = os.getcwd()
parentDirectory = currentDirectory[:currentDirectory.rfind("/")]
toolsDirectory = parentDirectory + "/tools"
volumeRenderDirectory = parentDirectory + "/volumeRender"
sys.path.extend( [toolsDirectory, volumeRenderDirectory] )
import volumeRender_old as volumeRender
from cudaTools import setCudaDevice, getFreeMemory, gpuArray3DtocudaArray
nPoints = 512
useDevice = 0
for option in sys.argv:
#if option == "128" or option == "256": nPoints = int(option)
if option.find("device=") != -1: useDevice = int(option[-1])
#set simulation volume dimentions
nWidth = nPoints
nHeight = nPoints
nDepth = nPoints
nData = nWidth*nHeight*nDepth
temp = 3
beta = np.float32( 1./temp)
plotVar = 1
upVal = 0.7
downVal = 0.4
#Initialize openGL
volumeRender.nWidth = nWidth
volumeRender.nHeight = nHeight
volumeRender.nDepth = nDepth
volumeRender.windowTitle = "Ising3D spins={0}x{1}x{2} T={3:.1f}".format(nHeight, nWidth, nDepth, float(temp))
volumeRender.initGL()
#set thread grid for CUDA kernels
block_size_x, block_size_y, block_size_z = 8,8,8 #hardcoded, tune to your needs
gridx = nWidth // block_size_x + 1 * ( nWidth % block_size_x != 0 )
gridy = nHeight // block_size_y + 1 * ( nHeight % block_size_y != 0 )
gridz = nDepth // block_size_z + 1 * ( nDepth % block_size_z != 0 )
block3D = (block_size_x, block_size_y, block_size_z)
grid3D = (gridx, gridy, gridz)
grid3D_ising = (gridx//2, gridy, gridz)
#initialize pyCUDA context
cudaDevice = setCudaDevice( devN=useDevice, usingAnimation=True )
#Read and compile CUDA code
print "\nCompiling CUDA code"
cudaCodeString_raw = open("CUDAising3D.cu", "r").read()
cudaCodeString = cudaCodeString_raw # % { "BLOCK_WIDTH":block2D[0], "BLOCK_HEIGHT":block2D[1], "BLOCK_DEPTH":block2D[2], }
cudaCode = SourceModule(cudaCodeString)
tex_spins = cudaCode.get_texref('tex_spinsIn')
surf_spins = cudaCode.get_surfref('surf_spinsOut')
isingKernel = cudaCode.get_function('ising_kernel')
########################################################################
from pycuda.elementwise import ElementwiseKernel
########################################################################
changeIntToFloat = ElementwiseKernel(arguments="float a, float b, int *input, float *output",
operation = "output[i] = a*input[i] + b;",
name = "intToFloat_kernel")
########################################################################
floatToUchar = ElementwiseKernel(arguments="float *input, unsigned char *output",
operation = "output[i] = (unsigned char) ( -255*(input[i]-1));",
name = "floatToUchar_kernel")
########################################################################
def sendToScreen( plotData ):
floatToUchar( plotDataFloat_d, plotData_d )
copyToScreenArray()
########################################################################
def swipe():
randomNumbers_d = curandom.rand((nData))
stepNumber = np.int32(0)
#saveEnergy = np.int32(0)
tex_spins.set_array( spinsInArray_d )
surf_spins.set_array( spinsInArray_d )
isingKernel( stepNumber, np.int32(nWidth), np.int32(nHeight), np.int32(nDepth), beta,
spinsOut_d, randomNumbers_d,
plotDataFloat_d, np.float32(upVal), np.float32(downVal), grid=grid3D_ising, block=block3D )
#copy3D_dtod()
stepNumber = np.int32(1)
#saveEnergy = np.int32(0)
tex_spins.set_array( spinsInArray_d )
surf_spins.set_array( spinsInArray_d )
isingKernel( stepNumber, np.int32(nWidth), np.int32(nHeight), np.int32(nDepth), beta,
spinsOut_d, randomNumbers_d,
plotDataFloat_d, np.float32(upVal), np.float32(downVal), grid=grid3D_ising, block=block3D )
#copy3D_dtod()
########################################################################
def stepFunction():
sendToScreen( spinsOut_d )
swipe()
########################################################################
def changePlotting():
global upVal, downVal
if plotVar == 1: upVal, downVal = 0.7, 0.4
if plotVar == 2: upVal, downVal = 0.7, 100.
if plotVar == 3: upVal, downVal = 0, 0.4
########################################################################
def specialKeyboardFunc( key, x, y ):
global temp, beta, plotVar
if key== volumeRender.GLUT_KEY_UP:
temp += 0.1
if key== volumeRender.GLUT_KEY_DOWN:
if temp > 0.1: temp -= 0.1
if key== volumeRender.GLUT_KEY_RIGHT:
plotVar += 1
if plotVar == 4: plotVar = 1
if key== volumeRender.GLUT_KEY_LEFT:
plotVar -= 1
if plotVar == 0: plotVar = 3
beta = np.float32(1./temp)
changePlotting()
volumeRender.windowTitle = "Ising3D spins={0}x{1}x{2} T={3:.1f}".format(nHeight, nWidth, nDepth, float(temp))
########################################################################
########################################################################
#Initialize all gpu data
print "\nInitializing Data"
initialMemory = getFreeMemory( show=True )
#Set initial random distribution
spins_h = (2*np.random.random_integers(0,1,[nDepth, nHeight, nWidth ]) - 1 ).astype(np.int32)
#spins_h = np.ones([nDepth, nHeight, nWidth ]).astype(np.int32)
spinsOut_d = gpuarray.to_gpu( spins_h )
randomNumbers_d = curandom.rand((nData))
#For texture version
spinsInArray_d, copy3D_dtod = gpuArray3DtocudaArray( spinsOut_d, allowSurfaceBind=True )
#For shared version
#memory for plotting
plotDataFloat_d = gpuarray.to_gpu(np.zeros_like(spins_h))
plotData_d = gpuarray.to_gpu(np.zeros([nDepth, nHeight, nWidth], dtype = np.uint8))
volumeRender.plotData_dArray, copyToScreenArray = gpuArray3DtocudaArray( plotData_d )
finalMemory = getFreeMemory( show=False )
print " Total Global Memory Used: {0} Mbytes\n".format(float(initialMemory-finalMemory)/1e6)
#configure volumeRender functions
volumeRender.stepFunc = stepFunction
volumeRender.specialKeys = specialKeyboardFunc
#stepFunction()
#run volumeRender animation
volumeRender.animate()
| [
"[email protected]"
] | |
eb699c07c9e6654200ed8d1ce223b385bfd7154d | bf72d3e5a22e4deaeeb2bbdf25efc942cfa4da08 | /2013spring/cd/w16_gearwidth2.py | 5bf0ac17632b819d1e0c3a750fc5f0746e8568f3 | [] | no_license | chiamingyen/mdeCourse | e9caf13ee9f701d4641e91c04963d60aec5d85e3 | 43dea5078df8ede58d8cfaa013b94d54750feead | refs/heads/master | 2021-01-16T21:22:01.906176 | 2013-10-25T14:25:28 | 2013-10-25T14:25:28 | 8,744,258 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 9,394 | py | #coding: utf-8
'''
"本程式的目的在輔助設計者選擇齒輪的尺寸大小,";
"由於相囓合的兩齒輪其徑節 (Diametral Pitch) 相同";
",齒的大小也相同。因徑節為每單位直徑的齒數,因此徑節愈大,則其齒的尺寸愈小";
";反之,徑節愈小,則齒的尺寸則愈大。";
"一般在設計齒輪對時,為避免使用過大的齒及過寬的齒面厚度,因此必須要就齒輪大小與強度與負載加以設計。";
"一般而言是希望齒輪面的寬度 (Face Width) 能大於3倍周節 (Circular Pitch),以避免選用太大的齒尺寸。";
"並且希望齒輪面的寬度 (Face Width) 能小於5倍周節,以便齒面傳遞負載時能有較為均勻的分佈,因此";
"設 d 為齒輪的節圓直徑(Pitch Diameter),單位為英吋";
"N 為齒數";
"P 為徑節, 即單位英吋的齒數";
"因此 d=N/P";
"設 V 為節線速度(Pitch Line Velocity),單位為英呎/分鐘";
"因此 V=(PI) * d * n/12";
"其中 n 為齒輪轉速,單位為 rpm";
"設傳輸負載大小為 W,單位為 pounds";
"因此 W=33000H/V";
"其中 H 為傳輸功率,單位為 hourse power";
"若設 K 為速度因子(Velocity Factor)";
"因此 K=1200/(1200+V)";
"最後可求出齒輪的齒面寬度(Face Width) F ,單位為英吋";
"即 F=WP/KYS";
"其中 S 為齒面的材料彎曲應力強度";
"設計要求:控制所選齒的尺寸大小,在滿足強度與傳輸負載的要求下,讓齒面厚度介於3倍周節與5倍周節之間。";
"設計者可以選擇的參數:";
"安全係數(建議值為3以上)";
"齒輪減速比";
"馬達傳輸功率,單位為 horse power";
"馬達轉速,單位為 rpm";
"齒制(Gear System)";
"齒輪材料與強度";
'''
# 這個程式要計算正齒輪的齒面寬, 資料庫連結希望使用 pybean 與 SQLite
# 導入 pybean 模組與所要使用的 Store 及 SQLiteWriter 方法
from pybean import Store, SQLiteWriter
import math
SQLite連結 = Store(SQLiteWriter("lewis.db", frozen=True))
# 執行 formfactor 內插運算的函式
def interpolation(小齒輪齒數, 齒形):
global SQLite連結
# 使用內插法求值
# 找出比目標齒數大的其中的最小的,就是最鄰近的大值
lewis_factor = SQLite連結.find_one("lewis","gearno > ?",[小齒輪齒數])
if(齒形 == 1):
larger_formfactor = lewis_factor.type1
elif(齒形 == 2):
larger_formfactor = lewis_factor.type2
elif(齒形 == 3):
larger_formfactor = lewis_factor.type3
else:
larger_formfactor = lewis_factor.type4
larger_toothnumber = lewis_factor.gearno
# 找出比目標齒數小的其中的最大的,就是最鄰近的小值
lewis_factor = SQLite連結.find_one("lewis","gearno < ? order by gearno DESC",[小齒輪齒數])
if(齒形 == 1):
smaller_formfactor = lewis_factor.type1
elif(齒形 == 2):
smaller_formfactor = lewis_factor.type2
elif(齒形 == 3):
smaller_formfactor = lewis_factor.type3
else:
smaller_formfactor = lewis_factor.type4
smaller_toothnumber = lewis_factor.gearno
calculated_factor = larger_formfactor + (小齒輪齒數 - larger_toothnumber) *
(larger_formfactor - smaller_formfactor) / (larger_toothnumber - smaller_toothnumber)
# 只傳回小數點後五位數
return round(calculated_factor, 5)
# 取得設計參數
馬力 = 100
轉速 = 1120
減速比 = 4
齒形 = 4
安全係數 = 3
#unsno_treatment
材料 = "G10350_CD"
小齒輪齒數 = 18
# 改寫為齒面寬的設計函式
def gear_width(馬力, 轉速, 減速比, 齒形, 安全係數, 材料, 小齒輪齒數):
global SQLite連結
# 根據所選用的齒形決定壓力角
if(齒形 == 1 or 齒形 == 2):
壓力角 = 20
else:
壓力角 = 25
# 根據壓力角決定最小齒數
if(壓力角== 20):
最小齒數 = 18
else:
最小齒數 = 12
# 直接設最小齒數
if 小齒輪齒數 <= 最小齒數:
小齒輪齒數 = 最小齒數
# 大於400的齒數則視為齒條(Rack)
if 小齒輪齒數 >= 400:
小齒輪齒數 = 400
# 根據所選用的材料查詢強度值
# 由 material之序號查 steel 表以得材料之降伏強度S單位為 kpsi 因此查得的值要成乘上1000
# 利用 Store 建立資料庫檔案對應物件, 並且設定 frozen=True 表示不要開放動態資料表的建立
#SQLite連結 = Store(SQLiteWriter("lewis.db", frozen=True))
# 指定 steel 資料表
steel = SQLite連結.new("steel")
# 資料查詢
# 將 unsno 與 treatment 從材料字串中隔開
unsno, treatment = 材料.split("_", 1)
#print(unsno, treatment)
treatment = treatment.replace("_", " ")
#print(treatment)
material = SQLite連結.find_one("steel","unsno=? and treatment=?",[unsno, treatment])
# 列出 steel 資料表中的資料筆數
#print(SQLite連結.count("steel"))
#print (material.yield_str)
strengthstress = material.yield_str*1000
# 由小齒輪的齒數與齒形類別,查詢lewis form factor
# 先查驗是否有直接對應值
on_table = SQLite連結.count("lewis","gearno=?",[小齒輪齒數])
if on_table == 1:
# 直接進入設計運算
#print("直接運算")
#print(on_table)
lewis_factor = SQLite連結.find_one("lewis","gearno=?",[小齒輪齒數])
#print(lewis_factor.type1)
# 根據齒形查出 formfactor 值
if(齒形 == 1):
formfactor = lewis_factor.type1
elif(齒形 == 2):
formfactor = lewis_factor.type2
elif(齒形 == 3):
formfactor = lewis_factor.type3
else:
formfactor = lewis_factor.type4
else:
# 沒有直接對應值, 必須進行查表內插運算後, 再執行設計運算
#print("必須內插")
#print(interpolation(小齒輪齒數, 齒形))
formfactor = interpolation(小齒輪齒數, 齒形)
# 開始進行設計運算
ngear = 小齒輪齒數 * 減速比
# 重要的最佳化設計---儘量用整數的diametralpitch
# 先嘗試用整數算若 diametralpitch 找到100 仍無所獲則改用 0.25 作為增量再不行則宣告 fail
counter = 0
i = 0.1
facewidth = 0
circularpitch = 0
while (facewidth <= 3 * circularpitch or facewidth >= 5 * circularpitch):
diametralpitch = i
#circularpitch = 3.14159/diametralpitch
circularpitch = math.pi/diametralpitch
pitchdiameter = 小齒輪齒數/diametralpitch
#pitchlinevelocity = 3.14159*pitchdiameter*轉速/12
pitchlinevelocity = math.pi * pitchdiameter * 轉速/12
transmittedload = 33000 * 馬力/pitchlinevelocity
velocityfactor = 1200/(1200 + pitchlinevelocity)
# formfactor is Lewis form factor
# formfactor need to get from table 13-3 and determined ty teeth number and type of tooth
# formfactor = 0.293
# 90 is the value get from table corresponding to material type
facewidth = transmittedload * diametralpitch * 安全係數/velocityfactor/formfactor/strengthstress
if(counter>5000):
print("超過5000次的設計運算,仍無法找到答案!")
print("可能所選用的傳遞功率過大,或無足夠強度的材料可以使用!")
# 離開while迴圈
break
i += 0.1
counter += 1
facewidth = round(facewidth, 4)
if(counter<5000):
print("進行"+str(counter)+"次重複運算後,得到合用的facewidth值為:"+str(facewidth))
# 執行正齒輪齒面寬的設計運算
#gear_width(馬力, 轉速, 減速比, 齒形, 安全係數, 材料, 小齒輪齒數)
# 執行輸入檔案的解讀
輸入檔案 = open('design_input.txt', encoding="UTF-8") # 開檔案的內建模式為 read
# 先將數字檔案中各行資料打包成為 list
輸入= []
while True:
# readline() 讀取單行
# readlines() 讀取多行, 並放入串列資料格式中
各行資料 = 輸入檔案.readline()
#print(各行資料,end="")
# 以下兩行判斷式在確定檔案讀到最後一行後就會跳出 while 迴圈, 不會無限執行
if len(各行資料) == 0: # 若該行的字數為 0, 表示已經到底
break
# 去掉各行最後面的跳行符號
各行資料 = 各行資料.rstrip()
#print(各行資料,end="")
# 依照資料的區隔符號 "\t" 將各行資料拆開, 並且存為 list, 到這裡各行資料為 list
各行資料 = 各行資料.split("\t")
'''
# 取得設計參數
馬力 = 100
轉速 = 1120
減速比 = 4
齒形 = 4
安全係數 = 3
#unsno_treatment
材料 = "G10350_CD"
小齒輪齒數 = 18
'''
馬力 = int(各行資料[0])
轉速 = int(各行資料[1])
減速比 = float(各行資料[2])
齒形 = int(各行資料[3])
安全係數 = float(各行資料[4])
材料 = 各行資料[5]
小齒輪齒數 = int(各行資料[6])
gear_width(馬力, 轉速, 減速比, 齒形, 安全係數, 材料, 小齒輪齒數)
# 可以將各行資料印出檢查
#print(各行資料)
# 將各行資料數列再疊成 數字 list
#輸入.append(各行資料)
#print(輸入)
# 取得各行輸入值後, 再呼叫 gear_width 執行齒面寬的設計運算
輸入檔案.close()
| [
"[email protected]"
] | |
5cfb3d0d5e2118c2eb69149f2e71449e382566cd | 8d014a0120864b42748ef63dddfa3c733370118c | /layint_api/models/clair_layer.py | 930dd8c648cdb805eef50bded4091bd69bb4939c | [
"Apache-2.0",
"LicenseRef-scancode-unknown"
] | permissive | LayeredInsight/layint_api_python | 3a6cf0bf62219f09010b828d7e02c2f3852a6f6f | a5c9a5b24098bd823c5102b7ab9e4745432f19b4 | refs/heads/develop | 2020-03-27T05:43:35.831400 | 2018-10-15T22:28:54 | 2018-10-15T22:28:54 | 146,044,385 | 0 | 0 | Apache-2.0 | 2018-10-15T22:28:55 | 2018-08-24T22:11:08 | Python | UTF-8 | Python | false | false | 7,525 | py | # coding: utf-8
"""
Layered Insight Assessment, Compliance, Witness & Control
LI Assessment & Compliance performs static vulnerability analysis, license and package compliance. LI Witness provides deep insight and analytics into containerized applications. Control provides dynamic runtime security and analytics for containerized applications. You can find out more about the Layered Insight Suite at [http://layeredinsight.com](http://layeredinsight.com).
OpenAPI spec version: 0.10
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ClairLayer(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'namespace_name': 'str',
'path': 'str',
'parent_name': 'str',
'format': 'str',
'indexed_by_version': 'int',
'features': 'list[ClairFeature]'
}
attribute_map = {
'name': 'Name',
'namespace_name': 'NamespaceName',
'path': 'Path',
'parent_name': 'ParentName',
'format': 'Format',
'indexed_by_version': 'IndexedByVersion',
'features': 'Features'
}
def __init__(self, name=None, namespace_name=None, path=None, parent_name=None, format=None, indexed_by_version=None, features=None):
"""
ClairLayer - a model defined in Swagger
"""
self._name = None
self._namespace_name = None
self._path = None
self._parent_name = None
self._format = None
self._indexed_by_version = None
self._features = None
if name is not None:
self.name = name
if namespace_name is not None:
self.namespace_name = namespace_name
if path is not None:
self.path = path
if parent_name is not None:
self.parent_name = parent_name
if format is not None:
self.format = format
if indexed_by_version is not None:
self.indexed_by_version = indexed_by_version
if features is not None:
self.features = features
@property
def name(self):
"""
Gets the name of this ClairLayer.
Machine name of layer
:return: The name of this ClairLayer.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this ClairLayer.
Machine name of layer
:param name: The name of this ClairLayer.
:type: str
"""
self._name = name
@property
def namespace_name(self):
"""
Gets the namespace_name of this ClairLayer.
Name of namespace of this layer
:return: The namespace_name of this ClairLayer.
:rtype: str
"""
return self._namespace_name
@namespace_name.setter
def namespace_name(self, namespace_name):
"""
Sets the namespace_name of this ClairLayer.
Name of namespace of this layer
:param namespace_name: The namespace_name of this ClairLayer.
:type: str
"""
self._namespace_name = namespace_name
@property
def path(self):
"""
Gets the path of this ClairLayer.
:return: The path of this ClairLayer.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this ClairLayer.
:param path: The path of this ClairLayer.
:type: str
"""
self._path = path
@property
def parent_name(self):
"""
Gets the parent_name of this ClairLayer.
Parent of this layer - reference to container
:return: The parent_name of this ClairLayer.
:rtype: str
"""
return self._parent_name
@parent_name.setter
def parent_name(self, parent_name):
"""
Sets the parent_name of this ClairLayer.
Parent of this layer - reference to container
:param parent_name: The parent_name of this ClairLayer.
:type: str
"""
self._parent_name = parent_name
@property
def format(self):
"""
Gets the format of this ClairLayer.
:return: The format of this ClairLayer.
:rtype: str
"""
return self._format
@format.setter
def format(self, format):
"""
Sets the format of this ClairLayer.
:param format: The format of this ClairLayer.
:type: str
"""
self._format = format
@property
def indexed_by_version(self):
"""
Gets the indexed_by_version of this ClairLayer.
:return: The indexed_by_version of this ClairLayer.
:rtype: int
"""
return self._indexed_by_version
@indexed_by_version.setter
def indexed_by_version(self, indexed_by_version):
"""
Sets the indexed_by_version of this ClairLayer.
:param indexed_by_version: The indexed_by_version of this ClairLayer.
:type: int
"""
self._indexed_by_version = indexed_by_version
@property
def features(self):
"""
Gets the features of this ClairLayer.
:return: The features of this ClairLayer.
:rtype: list[ClairFeature]
"""
return self._features
@features.setter
def features(self, features):
"""
Sets the features of this ClairLayer.
:param features: The features of this ClairLayer.
:type: list[ClairFeature]
"""
self._features = features
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ClairLayer):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"Scott Oberg"
] | Scott Oberg |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.