blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0001b37bd0d1d6b08e473e5f1c41d7bc88ba50bd | 48a8430d19c4d8d6fdcecf1cb9875d74b5efce6a | /CycleGAN/data_loader.py | 7cf7d6a2fd0954c3313fa0ba7bc7a498ee9437a9 | []
| no_license | orange-eng/GAN | af00f469b763893b2e474f8adb83460164c843e0 | 7a7fafa4c6e9aac0da73791ca646b6503c39b24f | refs/heads/main | 2023-02-25T20:21:54.825164 | 2021-01-25T08:34:41 | 2021-01-25T08:34:41 | 324,327,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,628 | py | import cv2
from glob import glob
# glob是python自己带的一个文件操作相关模块,用它可以查找符合自己目的的文件,就类似于Windows下的文件搜索
# https://www.cnblogs.com/lovemo1314/archive/2011/04/26/2029556.html
import numpy as np
import os
import sys
apath = os.path.abspath(os.path.dirname(sys.argv[0]))
#得到文件下面的所有文件目录。果然很方便
path = glob(apath+"/datasets/monet2photo/*")
print(path)
class DataLoader():
def __init__(self,dataset_name,img_res=(128,128)):
self.img_res = img_res
self.dataset_name = dataset_name
def load_data(self,domain,batch_size=1,is_testing = False):
data_type = "train%s"% domain if not is_testing else "test%s"% domain
path = glob(apath+"/datasets/%s/%s/*"%(self.dataset_name,data_type))
batch_images = np.random.choice(path,size=batch_size)
imgs = []
for img_path in batch_images:
img = self.imread(img_path)
img = cv2.resize(img,self.img_res) #把图像变为128*128*3
img = np.array(img)/127.5 - 1
cv2.imshow("img",img)
cv2.waitKey(0)
imgs.append(img)
return imgs
def load_batch(self,batch_size=1,is_testing=False):
data_type = "train" if not is_testing else "val"
path_A = glob(apath +'./datasets/%s/%sA/*' % (self.dataset_name, data_type))
path_B = glob(apath +'./datasets/%s/%sB/*' % (self.dataset_name, data_type))
self.n_batches = int(min(len(path_A),len(path_B)) / batch_size )
print("min:",int(min(len(path_A),len(path_B))))
total_samples = self.n_batches * batch_size
path_A = np.random.choice(path_A, total_samples, replace=False)
path_B = np.random.choice(path_B, total_samples, replace=False)
for i in range(self.n_batches - 1):
batch_A = path_A[i*batch_size:(i+1)*batch_size]
batch_B = path_B[i*batch_size:(i+1)*batch_size]
imgs_A, imgs_B = [], []
for img_A,img_B in zip(batch_A,batch_B):
'''
a = [1, 2, 3]
b = [4, 5, 6]
a_b_zip = zip(a, b) # 打包为元组的列表,而且元素个数与最短的列表一致
print("type of a_b_zip is %s" % type(a_b_zip)) # 输出zip函数的返回对象类型
a_b_zip = list(a_b_zip) # 因为zip函数返回一个zip类型对象,所以需要转换为list类型
print(a_b_zip)
'''
img_A = self.imread(img_A)
img_B = self.imread(img_B)
img_A = cv2.resize(img_A,self.img_res)
img_B = cv2.resize(img_B,self.img_res)
imgs_A.append(img_A)
imgs_B.append(img_B)
imgs_A = np.array(imgs_A,dtype=np.float32)/127.5 - 1
imgs_B = np.array(imgs_B,dtype=np.float32)/127.5 - 1
yield imgs_A,imgs_B
# 带yield的函数是一个生成器,而不是一个函数了,
# 这个生成器有一个函数就是next函数,next就相当于“下一步”生成哪个数
# 这一次的next开始的地方是接着上一次的next停止的地方执行的
#把BGR格式的图片转化为RGB格式的图片
def imread(self,path):
img = cv2.imread(path)
img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
return img
# if __name__ == "__main__":
# Data = DataLoader(dataset_name="monet2photo")
# for batch_i,(imgs_A,imgs_B) in enumerate(Data.load_batch(50)):
# print(batch_i)
| [
"[email protected]"
]
| |
5438edc9a22551e8091a4992b211263f519f6cce | 8e939e0f075a14377d87e0eb7729e4f0818f1df9 | /zarc/models_2017-08-04-06:42:45.py | 0831485aa5117c669efb76a2c672493a20217ba0 | [
"MIT"
]
| permissive | mimcomp/caseke | 072d076c9442c19916d8f71ec25fa45676031914 | 3c0749a431bb2e2c82bcb292d528c748bea8b1ba | refs/heads/master | 2020-06-02T15:42:24.159728 | 2019-06-03T16:34:09 | 2019-06-03T16:34:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53,466 | py | # coding: utf-8
# AUTOGENERATED BY gen_script.sh from kp2.py
# Copyright (C) Nyimbi Odero, Fri Aug 4 06:40:31 EAT 2017
from sqlalchemy import func
from flask_appbuilder import Model
from flask_appbuilder.models.mixins import AuditMixin, FileColumn, ImageColumn, UserExtensionMixin
from flask_appbuilder.models.decorators import renders
from flask_appbuilder.filemanager import ImageManager
from sqlalchemy_utils import aggregated, force_auto_coercion
from sqlalchemy.orm import relationship, query, defer, deferred
from sqlalchemy import (Column, Integer, String, ForeignKey,
Sequence, Float, Text, BigInteger, Date,
DateTime, Time, Boolean, Index, CheckConstraint,
UniqueConstraint,ForeignKeyConstraint, Numeric, LargeBinary , Table)
from datetime import timedelta, datetime, date
from sqlalchemy.dialects.postgresql import *
from sqlalchemy.sql import func
from .mixins import *
# Here is how to extend the User model
#class UserExtended(Model, UserExtensionMixin):
# contact_group_id = Column(Integer, ForeignKey('contact_group.id'), nullable=True)
# contact_group = relationship('ContactGroup')
# UTILITY CLASSES
import arrow, enum
import enum
# Initialize sqlalchemy_utils
#force_auto_coercion()
class Lawyer(PersonMixin, ContactMixin, AuditMixin, Model):
__tablename__ = 'Lawyers'
def ViewName(self):
return self.__class__.__name__ +'View'
def photo_img(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def photo_img_thumbnail(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url_thumbnail(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def print_button(self):
vn = self.ViewName()
#pdf = render_pdf(url_for(vn, pk=str(self.id)))
#pdf = pdfkit.from_string(url_for(vn, pk=str(self.id)))
#response = make_response(pdf)
#response.headers['Content-Type'] = 'application/pdf'
#response.headers['Content-Disposition'] = 'inline; filename=output.pdf'
return Markup(
'<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
'title="Print">' +
'<i class="fa fa-edit"></i>' +
'</a>')
def audio_play(self):
vn = self.ViewName()
return Markup(
'<audio controls>' +
'<source src="' + url_for(vn) + '" type="audio/mpeg"'> +'<i class="fa fa-volume-up"></i>' +
'Your browser does not support the audio element.' +
'</audio>'
)
# edit_form_extra_fields = {'field2': TextField('field2',
# widget=BS3TextFieldROWidget())}
id = Column(Integer, primary_key=True, autoincrement=True)
law_firm = Column(ForeignKey(u'lawfirm.id'), index=True)
gender = Column(ForeignKey(u'gender.id'), index=True)
barnumber = Column(String(20))
admissiondate = Column(Date)
gender1 = relationship(u'Gender', primaryjoin='Lawyer.gender == Gender.id', backref=u'lawyers')
lawfirm = relationship(u'Lawfirm', primaryjoin='Lawyer.law_firm == Lawfirm.id', backref=u'lawyers')
class Medevent(ActivityMixin, AuditMixin, Model):
__tablename__ = 'Medevent'
id = Column(Integer, primary_key=True, autoincrement=True)
class Bail(AuditMixin, Model):
__tablename__ = 'bail'
id = Column(Integer, primary_key=True, autoincrement=True)
hearing = Column(ForeignKey(u'hearing.id'), nullable=False, index=True)
defendant = Column(ForeignKey(u'defendant.id'), nullable=False, index=True)
amountgranted = Column(Numeric(12, 2))
noofsureties = Column(Integer, nullable=False)
paid = Column(Boolean)
paydate = Column(Date)
defendant1 = relationship(u'Defendant', primaryjoin='Bail.defendant == Defendant.id', backref=u'bails')
hearing1 = relationship(u'Hearing', primaryjoin='Bail.hearing == Hearing.id', backref=u'bails')
surety = relationship(u'Surety', secondary='bail_surety', backref=u'bails')
bail_surety = Table(
'bail_surety', Model.metadata,
Column('bail', ForeignKey(u'bail.id'), primary_key=True, nullable=False),
Column('surety', ForeignKey(u'surety.id'), primary_key=True, nullable=False, index=True)
)
class Case(AuditMixin, Model):
__tablename__ = 'case'
id = Column(Integer, primary_key=True, autoincrement=True)
casename = Column(String(200), nullable=False)
initialreport = Column(Text, nullable=False)
priority = Column(Integer, nullable=False)
investigationassigmentdate = Column(DateTime)
investigationassignmentnote = Column(Text, nullable=False)
investigationplan = Column(Text, nullable=False)
investigationsummary = Column(Text, nullable=False)
investigationreview = Column(Text)
agadvicerequested = Column(Boolean)
agadvicedate = Column(Date)
agadvice = Column(Text, nullable=False)
chargesheet = Column(Text, nullable=False)
sendtotrial = Column(Boolean, nullable=False)
docketnumber = Column(String(100))
nameofcase = Column(String(400))
chargedate = Column(DateTime)
judgement = Column(Text, nullable=False)
judgementdate = Column(DateTime)
sentencelengthyr = Column(Integer)
sentencelengthmnth = Column(Integer)
senetencelenghtdays = Column(Integer)
sentencestartdate = Column(Date)
sentenceexpirydate = Column(Date)
fineamount = Column(Numeric(12, 2))
caseappealed = Column(Boolean)
appealdate = Column(DateTime)
appealexpiry = Column(Date)
caseclosed = Column(Boolean)
closedate = Column(Date)
policestation = relationship(u'Policestation', secondary='case_policestation', backref=u'cases')
natureofsuit = relationship(u'Natureofsuit', secondary='case_natureofsuit', backref=u'cases')
plaintiff = relationship(u'Plaintiff', secondary='case_plaintiff', backref=u'cases')
witness = relationship(u'Witnes', secondary='case_witness', backref=u'cases')
prosecutor = relationship(u'Prosecutor', secondary='case_prosecutor', backref=u'prosecutor_cases')
policeofficer = relationship(u'Policeofficer', secondary='case_policeofficer', backref=u'policeofficer_cases')
policeofficer1 = relationship(u'Policeofficer', secondary='case_policeofficer_2', backref=u'policeofficer_cases_0')
casecategory = relationship(u'Casecategory', secondary='case_casecategory', backref=u'cases')
defendant = relationship(u'Defendant', secondary='case_defendant', backref=u'cases')
prosecutor1 = relationship(u'Prosecutor', secondary='case_prosecutor_2', backref=u'prosecutor_cases_0')
causeofaction = relationship(u'Causeofaction', secondary='case_causeofaction', backref=u'cases')
case_casecategory = Table(
'case_casecategory', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('casecategory', ForeignKey(u'casecategory.id'), primary_key=True, nullable=False, index=True)
)
case_causeofaction = Table(
'case_causeofaction', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('causeofaction', ForeignKey(u'causeofaction.id'), primary_key=True, nullable=False, index=True)
)
case_defendant = Table(
'case_defendant', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('defendant', ForeignKey(u'defendant.id'), primary_key=True, nullable=False, index=True)
)
case_natureofsuit = Table(
'case_natureofsuit', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('natureofsuit', ForeignKey(u'natureofsuit.id'), primary_key=True, nullable=False, index=True)
)
case_plaintiff = Table(
'case_plaintiff', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('plaintiff', ForeignKey(u'plaintiff.id'), primary_key=True, nullable=False, index=True)
)
case_policeofficer = Table(
'case_policeofficer', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('policeofficer', ForeignKey(u'policeofficer.id'), primary_key=True, nullable=False, index=True)
)
case_policeofficer_2 = Table(
'case_policeofficer_2', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('policeofficer', ForeignKey(u'policeofficer.id'), primary_key=True, nullable=False, index=True)
)
case_policestation = Table(
'case_policestation', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('policestation', ForeignKey(u'policestation.id'), primary_key=True, nullable=False, index=True)
)
case_prosecutor = Table(
'case_prosecutor', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('prosecutor', ForeignKey(u'prosecutor.id'), primary_key=True, nullable=False, index=True)
)
case_prosecutor_2 = Table(
'case_prosecutor_2', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('prosecutor', ForeignKey(u'prosecutor.id'), primary_key=True, nullable=False, index=True)
)
case_witness = Table(
'case_witness', Model.metadata,
Column('case', ForeignKey(u'case.id'), primary_key=True, nullable=False),
Column('witness', ForeignKey(u'witness.id'), primary_key=True, nullable=False, index=True)
)
class Casecategory(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'casecategory'
id = Column(Integer, primary_key=True, autoincrement=True)
indictable = Column(Boolean)
class Causeofaction(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'causeofaction'
id = Column(Integer, primary_key=True, autoincrement=True)
criminal = Column(Boolean, nullable=False)
parent_coa = Column(ForeignKey(u'causeofaction.id'), index=True)
parent = relationship(u'Causeofaction', remote_side=[id], primaryjoin='Causeofaction.parent_coa == Causeofaction.id', backref=u'causeofactions')
filing = relationship(u'Filing', secondary='causeofaction_filing', backref=u'causeofactions')
hearing = relationship(u'Hearing', secondary='causeofaction_hearing', backref=u'causeofactions')
causeofaction_filing = Table(
'causeofaction_filing', Model.metadata,
Column('causeofaction', ForeignKey(u'causeofaction.id'), primary_key=True, nullable=False),
Column('filing', ForeignKey(u'filing.id'), primary_key=True, nullable=False, index=True)
)
causeofaction_hearing = Table(
'causeofaction_hearing', Model.metadata,
Column('causeofaction', ForeignKey(u'causeofaction.id'), primary_key=True, nullable=False),
Column('hearing', ForeignKey(u'hearing.id'), primary_key=True, nullable=False, index=True)
)
class Cell(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'cell'
id = Column(Integer, primary_key=True, autoincrement=True)
prison = Column(ForeignKey(u'prison.id'), nullable=False, index=True)
prison1 = relationship(u'Prison', primaryjoin='Cell.prison == Prison.id', backref=u'cells')
class Commitaltype(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'commitaltype'
id = Column(Integer, primary_key=True, autoincrement=True)
prisoncommital = relationship(u'Prisoncommital', secondary='commitaltype_prisoncommital', backref=u'commitaltypes')
commitaltype_prisoncommital = Table(
'commitaltype_prisoncommital', Model.metadata,
Column('commitaltype', ForeignKey(u'commitaltype.id'), primary_key=True, nullable=False),
Column('prisoncommital_prison', Integer, primary_key=True, nullable=False),
Column('prisoncommital_warrantno', String(100), primary_key=True, nullable=False),
ForeignKeyConstraint(['prisoncommital_prison', 'prisoncommital_warrantno'], [u'prisoncommital.prison', u'prisoncommital.warrantno']),
Index('idx_commitaltype_prisoncommital', 'prisoncommital_prison', 'prisoncommital_warrantno')
)
class Constituency(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'constituency'
id = Column(Integer, primary_key=True, autoincrement=True)
county = Column(ForeignKey(u'county.id'), nullable=False, index=True)
town = Column(ForeignKey(u'town.id'), index=True)
county1 = relationship(u'County', primaryjoin='Constituency.county == County.id', backref=u'constituencies')
town1 = relationship(u'Town', primaryjoin='Constituency.town == Town.id', backref=u'constituencies')
class County(AuditMixin, Model):
__tablename__ = 'county'
id = Column(Integer, primary_key=True, autoincrement=True)
class Court(PlaceMixin, RefTypeMixin, AuditMixin, Model):
__tablename__ = 'court'
id = Column(Integer, primary_key=True, autoincrement=True)
court_station = Column(ForeignKey(u'courtstation.id'), nullable=False, index=True)
courtstation = relationship(u'Courtstation', primaryjoin='Court.court_station == Courtstation.id', backref=u'courts')
class Courtlevel(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'courtlevel'
id = Column(Integer, primary_key=True, autoincrement=True)
class Courtstation(PlaceMixin, AuditMixin, Model):
__tablename__ = 'courtstation'
id = Column(Integer, primary_key=True, autoincrement=True)
residentmagistrate = Column(String(100))
registrar = Column(String(100), nullable=False)
court_level = Column(ForeignKey(u'courtlevel.id'), nullable=False, index=True)
num_of_courts = Column(Integer)
town = Column(ForeignKey(u'town.id'), nullable=False, index=True)
courtlevel = relationship(u'Courtlevel', primaryjoin='Courtstation.court_level == Courtlevel.id', backref=u'courtstations')
town1 = relationship(u'Town', primaryjoin='Courtstation.town == Town.id', backref=u'courtstations')
class Defendant(PersonMedicalMixin, PersonDocMixin, BiometricMixin, EmploymentMixin, PersonMixin, ContactMixin, AuditMixin, Model):
__tablename__ = 'defendant'
def ViewName(self):
return self.__class__.__name__ +'View'
def photo_img(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def photo_img_thumbnail(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url_thumbnail(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def print_button(self):
vn = self.ViewName()
#pdf = render_pdf(url_for(vn, pk=str(self.id)))
#pdf = pdfkit.from_string(url_for(vn, pk=str(self.id)))
#response = make_response(pdf)
#response.headers['Content-Type'] = 'application/pdf'
#response.headers['Content-Disposition'] = 'inline; filename=output.pdf'
return Markup(
'<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
'title="Print">' +
'<i class="fa fa-edit"></i>' +
'</a>')
def audio_play(self):
vn = self.ViewName()
return Markup(
'<audio controls>' +
'<source src="' + url_for(vn) + '" type="audio/mpeg"'> +'<i class="fa fa-volume-up"></i>' +
'Your browser does not support the audio element.' +
'</audio>'
)
# edit_form_extra_fields = {'field2': TextField('field2',
# widget=BS3TextFieldROWidget())}
id = Column(Integer, primary_key=True, autoincrement=True)
juvenile = Column(Boolean)
gender = Column(ForeignKey(u'gender.id'), nullable=False, index=True)
cell = Column(ForeignKey(u'cell.id'), nullable=False, index=True)
cell1 = relationship(u'Cell', primaryjoin='Defendant.cell == Cell.id', backref=u'defendants')
gender1 = relationship(u'Gender', primaryjoin='Defendant.gender == Gender.id', backref=u'defendants')
Medevent = relationship(u'Medevent', secondary='defendant_medevent', backref=u'defendants')
hearing = relationship(u'Hearing', secondary='defendant_hearing', backref=u'defendants')
defendant_hearing = Table(
'defendant_hearing', Model.metadata,
Column('defendant', ForeignKey(u'defendant.id'), primary_key=True, nullable=False),
Column('hearing', ForeignKey(u'hearing.id'), primary_key=True, nullable=False, index=True)
)
defendant_medevent = Table(
'defendant_medevent', Model.metadata,
Column('defendant', ForeignKey(u'defendant.id'), primary_key=True, nullable=False),
Column('medevent', ForeignKey(u'Medevent.id'), primary_key=True, nullable=False, index=True)
)
class Discipline(ActivityMixin, AuditMixin, Model):
__tablename__ = 'discipline'
id = Column(Integer, primary_key=True, autoincrement=True)
defendant = Column(ForeignKey(u'defendant.id'), nullable=False, index=True)
defendant1 = relationship(u'Defendant', primaryjoin='Discipline.defendant == Defendant.id', backref=u'disciplines')
class Doctemplate(DocMixin, RefTypeMixin, AuditMixin, Model):
__tablename__ = 'doctemplate'
id = Column(Integer, primary_key=True, autoincrement=True)
class Document(DocMixin, AuditMixin, Model):
__tablename__ = 'document'
id = Column(Integer, primary_key=True, autoincrement=True)
doc_template = Column(ForeignKey(u'doctemplate.id'), index=True)
confidential = Column(Boolean)
pagecount = Column(Integer)
filing = Column(ForeignKey(u'filing.id'), nullable=False, index=True)
locked = Column(Boolean)
doctemplate = relationship(u'Doctemplate', primaryjoin='Document.doc_template == Doctemplate.id', backref=u'documents')
filing1 = relationship(u'Filing', primaryjoin='Document.filing == Filing.id', backref=u'documents')
class Filing(AuditMixin, Model):
__tablename__ = 'filing'
id = Column(Integer, primary_key=True, autoincrement=True)
uploaddate = Column(DateTime)
pagecount = Column(Integer)
totalfees = Column(Numeric(12, 2), nullable=False)
filing_attorney = Column(ForeignKey(u'Lawyers.id'), nullable=False, index=True)
filing_prosecutor = Column(ForeignKey(u'prosecutor.id'), nullable=False, index=True)
assessedfees = Column(Numeric(12, 2))
receiptverified = Column(Boolean)
amountpaid = Column(Numeric(12, 2))
feebalance = Column(Numeric(12, 2))
paymenthistory = Column(Text, nullable=False)
case = Column(ForeignKey(u'case.id'), nullable=False, index=True)
urgent = Column(Boolean)
urgentreason = Column(Text, nullable=False)
case1 = relationship(u'Case', primaryjoin='Filing.case == Case.id', backref=u'filings')
Lawyer = relationship(u'Lawyer', primaryjoin='Filing.filing_attorney == Lawyer.id', backref=u'filings')
prosecutor = relationship(u'Prosecutor', primaryjoin='Filing.filing_prosecutor == Prosecutor.id', backref=u'filings')
filingtype = relationship(u'Filingtype', secondary='filing_filingtype', backref=u'filings')
payment = relationship(u'Payment', secondary='filing_payment', backref=u'filings')
filing_filingtype = Table(
'filing_filingtype', Model.metadata,
Column('filing', ForeignKey(u'filing.id'), primary_key=True, nullable=False),
Column('filingtype', ForeignKey(u'filingtype.id'), primary_key=True, nullable=False, index=True)
)
filing_payment = Table(
'filing_payment', Model.metadata,
Column('filing', ForeignKey(u'filing.id'), primary_key=True, nullable=False),
Column('payment', ForeignKey(u'payment.id'), primary_key=True, nullable=False, index=True)
)
class Filingtype(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'filingtype'
id = Column(Integer, primary_key=True, autoincrement=True)
fees = Column(Numeric(12, 2))
perpagecost = Column(Numeric(12, 2))
paid_per_page = Column(Boolean)
class Gender(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'gender'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(20), nullable=False, unique=True)
class Hearing(ActivityMixin, AuditMixin, Model):
__tablename__ = 'hearing'
id = Column(Integer, primary_key=True, autoincrement=True)
hearingdate = Column(DateTime, nullable=False)
adjourned = Column(Boolean)
case = Column(ForeignKey(u'case.id'), nullable=False, index=True)
court = Column(ForeignKey(u'court.id'), nullable=False, index=True)
remandwarrant = Column(Text)
hearing_type = Column(ForeignKey(u'hearingtype.id'), nullable=False, index=True)
remanddays = Column(Integer)
remanddate = Column(Date)
remandwarrantexpirydate = Column(Date)
nexthearingdate = Column(Date)
finalhearing = Column(Boolean, nullable=False)
transcript = Column(Text)
audio = Column(LargeBinary)
video = Column(LargeBinary)
case1 = relationship(u'Case', primaryjoin='Hearing.case == Case.id', backref=u'hearings')
court1 = relationship(u'Court', primaryjoin='Hearing.court == Court.id', backref=u'hearings')
hearingtype = relationship(u'Hearingtype', primaryjoin='Hearing.hearing_type == Hearingtype.id', backref=u'hearings')
prosecutor = relationship(u'Prosecutor', secondary='hearing_prosecutor', backref=u'hearings')
policeofficer = relationship(u'Policeofficer', secondary='hearing_policeofficer', backref=u'hearings')
witness = relationship(u'Witnes', secondary='hearing_witness', backref=u'hearings')
Lawyers = relationship(u'Lawyer', secondary='hearing_lawyer', backref=u'hearings')
judicialofficer = relationship(u'Judicialofficer', secondary='hearing_judicialofficer', backref=u'hearings')
hearing_judicialofficer = Table(
'hearing_judicialofficer', Model.metadata,
Column('hearing', ForeignKey(u'hearing.id'), primary_key=True, nullable=False),
Column('judicialofficer', ForeignKey(u'judicialofficer.id'), primary_key=True, nullable=False, index=True)
)
hearing_lawyer = Table(
'hearing_lawyer', Model.metadata,
Column('hearing', ForeignKey(u'hearing.id'), primary_key=True, nullable=False),
Column('lawyer', ForeignKey(u'Lawyers.id'), primary_key=True, nullable=False, index=True)
)
hearing_policeofficer = Table(
'hearing_policeofficer', Model.metadata,
Column('hearing', ForeignKey(u'hearing.id'), primary_key=True, nullable=False),
Column('policeofficer', ForeignKey(u'policeofficer.id'), primary_key=True, nullable=False, index=True)
)
hearing_prosecutor = Table(
'hearing_prosecutor', Model.metadata,
Column('hearing', ForeignKey(u'hearing.id'), primary_key=True, nullable=False),
Column('prosecutor', ForeignKey(u'prosecutor.id'), primary_key=True, nullable=False, index=True)
)
hearing_witness = Table(
'hearing_witness', Model.metadata,
Column('hearing', ForeignKey(u'hearing.id'), primary_key=True, nullable=False),
Column('witness', ForeignKey(u'witness.id'), primary_key=True, nullable=False, index=True)
)
class Hearingtype(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'hearingtype'
id = Column(Integer, primary_key=True, autoincrement=True)
class Investigation(PlaceMixin, AuditMixin, Model):
__tablename__ = 'investigation'
id = Column(Integer, primary_key=True, autoincrement=True)
case = Column(ForeignKey(u'case.id'), nullable=False, index=True)
actiondate = Column(DateTime, nullable=False)
evidence = Column(Text, nullable=False)
narrative = Column(Text, nullable=False)
weather = Column(Text, nullable=False)
location = Column(Text, nullable=False)
case1 = relationship(u'Case', primaryjoin='Investigation.case == Case.id', backref=u'investigations')
policeofficer = relationship(u'Policeofficer', secondary='investigation_policeofficer', backref=u'investigations')
witness = relationship(u'Witnes', secondary='investigation_witness', backref=u'investigations')
investigation_policeofficer = Table(
'investigation_policeofficer', Model.metadata,
Column('investigation', ForeignKey(u'investigation.id'), primary_key=True, nullable=False),
Column('policeofficer', ForeignKey(u'policeofficer.id'), primary_key=True, nullable=False, index=True)
)
investigation_witness = Table(
'investigation_witness', Model.metadata,
Column('investigation', ForeignKey(u'investigation.id'), primary_key=True, nullable=False),
Column('witness', ForeignKey(u'witness.id'), primary_key=True, nullable=False, index=True)
)
class JoRank(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'jo_rank'
id = Column(Integer, primary_key=True, autoincrement=True)
appelation = Column(Text, nullable=False)
informaladdress = Column(Text, nullable=False)
class Judicialofficer(PersonMixin, ContactMixin, AuditMixin, Model):
__tablename__ = 'judicialofficer'
def ViewName(self):
return self.__class__.__name__ +'View'
def photo_img(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def photo_img_thumbnail(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url_thumbnail(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def print_button(self):
vn = self.ViewName()
#pdf = render_pdf(url_for(vn, pk=str(self.id)))
#pdf = pdfkit.from_string(url_for(vn, pk=str(self.id)))
#response = make_response(pdf)
#response.headers['Content-Type'] = 'application/pdf'
#response.headers['Content-Disposition'] = 'inline; filename=output.pdf'
return Markup(
'<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
'title="Print">' +
'<i class="fa fa-edit"></i>' +
'</a>')
def audio_play(self):
vn = self.ViewName()
return Markup(
'<audio controls>' +
'<source src="' + url_for(vn) + '" type="audio/mpeg"'> +'<i class="fa fa-volume-up"></i>' +
'Your browser does not support the audio element.' +
'</audio>'
)
# edit_form_extra_fields = {'field2': TextField('field2',
# widget=BS3TextFieldROWidget())}
id = Column(Integer, primary_key=True, autoincrement=True)
j_o__rank = Column(ForeignKey(u'jo_rank.id'), nullable=False, index=True)
gender = Column(ForeignKey(u'gender.id'), index=True)
court = Column(ForeignKey(u'court.id'), nullable=False, index=True)
court1 = relationship(u'Court', primaryjoin='Judicialofficer.court == Court.id', backref=u'judicialofficers')
gender1 = relationship(u'Gender', primaryjoin='Judicialofficer.gender == Gender.id', backref=u'judicialofficers')
jo_rank = relationship(u'JoRank', primaryjoin='Judicialofficer.j_o__rank == JoRank.id', backref=u'judicialofficers')
class Lawfirm(PlaceMixin, RefTypeMixin, AuditMixin, Model):
__tablename__ = 'lawfirm'
id = Column(Integer, primary_key=True, autoincrement=True)
class Natureofsuit(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'natureofsuit'
id = Column(Integer, primary_key=True, autoincrement=True)
class Payment(AuditMixin, Model):
__tablename__ = 'payment'
id = Column(Integer, primary_key=True, autoincrement=True)
amountpaid = Column(Numeric(12, 2))
datepaid = Column(DateTime)
paymentreference = Column(String(80), nullable=False)
paymentconfirmed = Column(Boolean)
paidby = Column(Text, nullable=False)
msisdn = Column(Text)
receiptnumber = Column(String(100), nullable=False)
ispartial = Column(Boolean)
bail = Column(ForeignKey(u'bail.id'), index=True)
billrefnumber = Column(Text, nullable=False)
payment_method = Column(ForeignKey(u'paymentmethod.id'), nullable=False, index=True)
paymentdescription = Column(Text, nullable=False)
case = Column(ForeignKey(u'case.id'), nullable=False, index=True)
bail1 = relationship(u'Bail', primaryjoin='Payment.bail == Bail.id', backref=u'payments')
case1 = relationship(u'Case', primaryjoin='Payment.case == Case.id', backref=u'payments')
paymentmethod = relationship(u'Paymentmethod', primaryjoin='Payment.payment_method == Paymentmethod.id', backref=u'payments')
class Paymentmethod(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'paymentmethod'
id = Column(Integer, primary_key=True, autoincrement=True)
key = Column(Text, nullable=False)
secret = Column(Text, nullable=False)
portal = Column(Text, nullable=False)
tillnumber = Column(Text, nullable=False)
shortcode = Column(Text, nullable=False)
class Plaintiff(PersonMixin, ContactMixin, AuditMixin, Model):
__tablename__ = 'plaintiff'
def ViewName(self):
return self.__class__.__name__ +'View'
def photo_img(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def photo_img_thumbnail(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url_thumbnail(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def print_button(self):
vn = self.ViewName()
#pdf = render_pdf(url_for(vn, pk=str(self.id)))
#pdf = pdfkit.from_string(url_for(vn, pk=str(self.id)))
#response = make_response(pdf)
#response.headers['Content-Type'] = 'application/pdf'
#response.headers['Content-Disposition'] = 'inline; filename=output.pdf'
return Markup(
'<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
'title="Print">' +
'<i class="fa fa-edit"></i>' +
'</a>')
def audio_play(self):
vn = self.ViewName()
return Markup(
'<audio controls>' +
'<source src="' + url_for(vn) + '" type="audio/mpeg"'> +'<i class="fa fa-volume-up"></i>' +
'Your browser does not support the audio element.' +
'</audio>'
)
# edit_form_extra_fields = {'field2': TextField('field2',
# widget=BS3TextFieldROWidget())}
id = Column(Integer, primary_key=True, autoincrement=True)
gender = Column(ForeignKey(u'gender.id'), index=True)
juvenile = Column(Boolean)
gender1 = relationship(u'Gender', primaryjoin='Plaintiff.gender == Gender.id', backref=u'plaintiffs')
class Policeofficer(PersonMixin, ContactMixin, AuditMixin, Model):
__tablename__ = 'policeofficer'
def ViewName(self):
return self.__class__.__name__ +'View'
def photo_img(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def photo_img_thumbnail(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url_thumbnail(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def print_button(self):
vn = self.ViewName()
#pdf = render_pdf(url_for(vn, pk=str(self.id)))
#pdf = pdfkit.from_string(url_for(vn, pk=str(self.id)))
#response = make_response(pdf)
#response.headers['Content-Type'] = 'application/pdf'
#response.headers['Content-Disposition'] = 'inline; filename=output.pdf'
return Markup(
'<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
'title="Print">' +
'<i class="fa fa-edit"></i>' +
'</a>')
def audio_play(self):
vn = self.ViewName()
return Markup(
'<audio controls>' +
'<source src="' + url_for(vn) + '" type="audio/mpeg"'> +'<i class="fa fa-volume-up"></i>' +
'Your browser does not support the audio element.' +
'</audio>'
)
# edit_form_extra_fields = {'field2': TextField('field2',
# widget=BS3TextFieldROWidget())}
id = Column(Integer, primary_key=True, autoincrement=True)
police_rank = Column(ForeignKey(u'policerank.id'), nullable=False, index=True)
gender = Column(ForeignKey(u'gender.id'), nullable=False, index=True)
servicenumber = Column(Text)
gender1 = relationship(u'Gender', primaryjoin='Policeofficer.gender == Gender.id', backref=u'policeofficers')
policerank = relationship(u'Policerank', primaryjoin='Policeofficer.police_rank == Policerank.id', backref=u'policeofficers')
policerole = relationship(u'Policerole', secondary='policeofficer_policerole', backref=u'policeofficers')
policeofficer_policerole = Table(
'policeofficer_policerole', Model.metadata,
Column('policeofficer', ForeignKey(u'policeofficer.id'), primary_key=True, nullable=False),
Column('policerole', ForeignKey(u'policerole.id'), primary_key=True, nullable=False, index=True)
)
class Policerank(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'policerank'
id = Column(Integer, primary_key=True, autoincrement=True)
class Policerole(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'policerole'
id = Column(Integer, primary_key=True, autoincrement=True)
class Policestation(PlaceMixin, AuditMixin, Model):
__tablename__ = 'policestation'
id = Column(Integer, primary_key=True, autoincrement=True)
town = Column(ForeignKey(u'town.id'), nullable=False, index=True)
officercommanding = Column(String(100))
police_station_type = Column(ForeignKey(u'policestationtype.id'), nullable=False, index=True)
policestationtype = relationship(u'Policestationtype', primaryjoin='Policestation.police_station_type == Policestationtype.id', backref=u'policestations')
town1 = relationship(u'Town', primaryjoin='Policestation.town == Town.id', backref=u'policestations')
class Policestationtype(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'policestationtype'
id = Column(Integer, primary_key=True, autoincrement=True)
class Prison(PlaceMixin, AuditMixin, Model):
__tablename__ = 'prison'
id = Column(Integer, primary_key=True, autoincrement=True)
town = Column(ForeignKey(u'town.id'), nullable=False, index=True)
warden = Column(String(100))
capacity = Column(Integer)
population = Column(Integer)
cellcount = Column(Integer)
town1 = relationship(u'Town', primaryjoin='Prison.town == Town.id', backref=u'prisons')
securityrank = relationship(u'Securityrank', secondary='prison_securityrank', backref=u'prisons')
prison_securityrank = Table(
'prison_securityrank', Model.metadata,
Column('prison', ForeignKey(u'prison.id'), primary_key=True, nullable=False),
Column('securityrank', ForeignKey(u'securityrank.id'), primary_key=True, nullable=False, index=True)
)
class Prisoncommital(ActivityMixin, AuditMixin, Model):
__tablename__ = 'prisoncommital'
prison = Column(ForeignKey(u'prison.id'), primary_key=True, nullable=False)
warrantno = Column(String(100), primary_key=True, nullable=False)
defendant = Column(ForeignKey(u'defendant.id'), nullable=False, index=True)
hearing = Column(ForeignKey(u'hearing.id'), nullable=False, index=True)
warrantduration = Column(Integer, nullable=False)
warrantdate = Column(DateTime)
warrant = Column(Text, nullable=False)
warrantexpiry = Column(DateTime, nullable=False)
history = Column(Text, nullable=False)
earliestrelease = Column(Date)
releasedate = Column(DateTime)
property = Column(Text)
itemcount = Column(Integer)
releasenotes = Column(Text)
commitalnotes = Column(Text)
police_officer_commiting = Column(ForeignKey(u'policeofficer.id'), nullable=False, index=True)
defendant1 = relationship(u'Defendant', primaryjoin='Prisoncommital.defendant == Defendant.id', backref=u'prisoncommitals')
hearing1 = relationship(u'Hearing', primaryjoin='Prisoncommital.hearing == Hearing.id', backref=u'prisoncommitals')
policeofficer = relationship(u'Policeofficer', primaryjoin='Prisoncommital.police_officer_commiting == Policeofficer.id', backref=u'prisoncommitals')
prison1 = relationship(u'Prison', primaryjoin='Prisoncommital.prison == Prison.id', backref=u'prisoncommitals')
warder = relationship(u'Warder', secondary='prisoncommital_warder', backref=u'prisoncommitals')
prisoncommital_warder = Table(
'prisoncommital_warder', Model.metadata,
Column('prisoncommital_prison', Integer, primary_key=True, nullable=False),
Column('prisoncommital_warrantno', String(100), primary_key=True, nullable=False),
Column('warder', ForeignKey(u'warder.id'), primary_key=True, nullable=False, index=True),
ForeignKeyConstraint(['prisoncommital_prison', 'prisoncommital_warrantno'], [u'prisoncommital.prison', u'prisoncommital.warrantno'])
)
class Prosecutor(PersonMixin, ContactMixin, AuditMixin, Model):
__tablename__ = 'prosecutor'
def ViewName(self):
return self.__class__.__name__ +'View'
def photo_img(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def photo_img_thumbnail(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url_thumbnail(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def print_button(self):
vn = self.ViewName()
#pdf = render_pdf(url_for(vn, pk=str(self.id)))
#pdf = pdfkit.from_string(url_for(vn, pk=str(self.id)))
#response = make_response(pdf)
#response.headers['Content-Type'] = 'application/pdf'
#response.headers['Content-Disposition'] = 'inline; filename=output.pdf'
return Markup(
'<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
'title="Print">' +
'<i class="fa fa-edit"></i>' +
'</a>')
def audio_play(self):
vn = self.ViewName()
return Markup(
'<audio controls>' +
'<source src="' + url_for(vn) + '" type="audio/mpeg"'> +'<i class="fa fa-volume-up"></i>' +
'Your browser does not support the audio element.' +
'</audio>'
)
# edit_form_extra_fields = {'field2': TextField('field2',
# widget=BS3TextFieldROWidget())}
id = Column(Integer, primary_key=True, autoincrement=True)
gender = Column(ForeignKey(u'gender.id'), index=True)
gender1 = relationship(u'Gender', primaryjoin='Prosecutor.gender == Gender.id', backref=u'prosecutors')
prosecutorteam = relationship(u'Prosecutorteam', secondary='prosecutor_prosecutorteam', backref=u'prosecutors')
prosecutor_prosecutorteam = Table(
'prosecutor_prosecutorteam', Model.metadata,
Column('prosecutor', ForeignKey(u'prosecutor.id'), primary_key=True, nullable=False),
Column('prosecutorteam', ForeignKey(u'prosecutorteam.id'), primary_key=True, nullable=False, index=True)
)
class Prosecutorteam(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'prosecutorteam'
id = Column(Integer, primary_key=True, autoincrement=True)
class Securityrank(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'securityrank'
id = Column(Integer, primary_key=True, autoincrement=True)
class Subcounty(AuditMixin, Model):
__tablename__ = 'subcounty'
id = Column(Integer, primary_key=True, autoincrement=True)
county = Column(ForeignKey(u'county.id'), nullable=False, index=True)
county1 = relationship(u'County', primaryjoin='Subcounty.county == County.id', backref=u'subcounties')
class Surety(PersonMixin, ContactMixin, AuditMixin, Model):
__tablename__ = 'surety'
def ViewName(self):
return self.__class__.__name__ +'View'
def photo_img(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def photo_img_thumbnail(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url_thumbnail(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def print_button(self):
vn = self.ViewName()
#pdf = render_pdf(url_for(vn, pk=str(self.id)))
#pdf = pdfkit.from_string(url_for(vn, pk=str(self.id)))
#response = make_response(pdf)
#response.headers['Content-Type'] = 'application/pdf'
#response.headers['Content-Disposition'] = 'inline; filename=output.pdf'
return Markup(
'<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
'title="Print">' +
'<i class="fa fa-edit"></i>' +
'</a>')
def audio_play(self):
vn = self.ViewName()
return Markup(
'<audio controls>' +
'<source src="' + url_for(vn) + '" type="audio/mpeg"'> +'<i class="fa fa-volume-up"></i>' +
'Your browser does not support the audio element.' +
'</audio>'
)
# edit_form_extra_fields = {'field2': TextField('field2',
# widget=BS3TextFieldROWidget())}
id = Column(Integer, primary_key=True, autoincrement=True)
gender = Column(ForeignKey(u'gender.id'), index=True)
gender1 = relationship(u'Gender', primaryjoin='Surety.gender == Gender.id', backref=u'sureties')
class Town(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'town'
id = Column(Integer, primary_key=True, autoincrement=True)
subcounty = Column(ForeignKey(u'subcounty.id'), nullable=False, index=True)
subcounty1 = relationship(u'Subcounty', primaryjoin='Town.subcounty == Subcounty.id', backref=u'towns')
class Visit(ActivityMixin, AuditMixin, Model):
__tablename__ = 'visit'
vistors = Column(ForeignKey(u'vistor.id'), primary_key=True, nullable=False)
defendants = Column(ForeignKey(u'defendant.id'), primary_key=True, nullable=False, index=True)
visitdate = Column(DateTime)
visitnotes = Column(Text)
visitduration = Column(INTERVAL)
defendant = relationship(u'Defendant', primaryjoin='Visit.defendants == Defendant.id', backref=u'visits')
vistor = relationship(u'Vistor', primaryjoin='Visit.vistors == Vistor.id', backref=u'visits')
class Vistor(AuditMixin, Model):
__tablename__ = 'vistor'
id = Column(Integer, primary_key=True, autoincrement=True)
gender = Column(ForeignKey(u'gender.id'), nullable=False, index=True)
gender1 = relationship(u'Gender', primaryjoin='Vistor.gender == Gender.id', backref=u'vistors')
class Warder(PersonMixin, ContactMixin, AuditMixin, Model):
__tablename__ = 'warder'
def ViewName(self):
return self.__class__.__name__ +'View'
def photo_img(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def photo_img_thumbnail(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url_thumbnail(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def print_button(self):
vn = self.ViewName()
#pdf = render_pdf(url_for(vn, pk=str(self.id)))
#pdf = pdfkit.from_string(url_for(vn, pk=str(self.id)))
#response = make_response(pdf)
#response.headers['Content-Type'] = 'application/pdf'
#response.headers['Content-Disposition'] = 'inline; filename=output.pdf'
return Markup(
'<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
'title="Print">' +
'<i class="fa fa-edit"></i>' +
'</a>')
def audio_play(self):
vn = self.ViewName()
return Markup(
'<audio controls>' +
'<source src="' + url_for(vn) + '" type="audio/mpeg"'> +'<i class="fa fa-volume-up"></i>' +
'Your browser does not support the audio element.' +
'</audio>'
)
# edit_form_extra_fields = {'field2': TextField('field2',
# widget=BS3TextFieldROWidget())}
id = Column(Integer, primary_key=True, autoincrement=True)
prison = Column(ForeignKey(u'prison.id'), nullable=False, index=True)
warder_rank = Column(ForeignKey(u'warderrank.id'), nullable=False, index=True)
prison1 = relationship(u'Prison', primaryjoin='Warder.prison == Prison.id', backref=u'warders')
warderrank = relationship(u'Warderrank', primaryjoin='Warder.warder_rank == Warderrank.id', backref=u'warders')
class Warderrank(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'warderrank'
id = Column(Integer, primary_key=True, autoincrement=True)
class Witnes(PersonMixin, ContactMixin, AuditMixin, Model):
__tablename__ = 'witness'
def ViewName(self):
return self.__class__.__name__ +'View'
def photo_img(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def photo_img_thumbnail(self):
im = ImageManager()
vn = self.ViewName()
if self.photo:
return Markup('<a href="' + url_for(vn+'.show', pk=str(self.id)) +
'" class="thumbnail"><img src="' + im.get_url_thumbnail(self.photo) +
'" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for(vn, pk=str(self.id)) +
'" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def print_button(self):
vn = self.ViewName()
#pdf = render_pdf(url_for(vn, pk=str(self.id)))
#pdf = pdfkit.from_string(url_for(vn, pk=str(self.id)))
#response = make_response(pdf)
#response.headers['Content-Type'] = 'application/pdf'
#response.headers['Content-Disposition'] = 'inline; filename=output.pdf'
return Markup(
'<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
'title="Print">' +
'<i class="fa fa-edit"></i>' +
'</a>')
def audio_play(self):
vn = self.ViewName()
return Markup(
'<audio controls>' +
'<source src="' + url_for(vn) + '" type="audio/mpeg"'> +'<i class="fa fa-volume-up"></i>' +
'Your browser does not support the audio element.' +
'</audio>'
)
# edit_form_extra_fields = {'field2': TextField('field2',
# widget=BS3TextFieldROWidget())}
id = Column(Integer, primary_key=True, autoincrement=True)
fordefense = Column(Boolean)
gender = Column(ForeignKey(u'gender.id'), nullable=False, index=True)
gender1 = relationship(u'Gender', primaryjoin='Witnes.gender == Gender.id', backref=u'witness')
| [
"[email protected]"
]
| |
9a0f2585c8786ae68cdb437ea210b0230321d96c | f71aecb0e91fe877af3ec652c7f6753a1e7b5ccd | /RemoveComments_MID_722.py | 5fd160cab533466e6146d8361f29931987c5447a | []
| no_license | 953250587/leetcode-python | 036ad83154bf1fce130d41220cf2267856c7770d | 679a2b246b8b6bb7fc55ed1c8096d3047d6d4461 | refs/heads/master | 2020-04-29T12:01:47.084644 | 2019-03-29T15:50:45 | 2019-03-29T15:50:45 | 176,122,880 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,195 | py | """
Given a C++ program, remove comments from it. The program source is an array where source[i] is the i-th line of the source code. This represents the result of splitting the original source code string by the newline character \n.
In C++, there are two types of comments, line comments, and block comments.
The string // denotes a line comment, which represents that it and rest of the characters to the right of it in the same line should be ignored.
The string /* denotes a block comment, which represents that all characters until the next (non-overlapping) occurrence of */ should be ignored. (Here, occurrences happen in reading order: line by line from left to right.) To be clear, the string /*/ does not yet end the block comment, as the ending would be overlapping the beginning.
The first effective comment takes precedence over others: if the string // occurs in a block comment, it is ignored. Similarly, if the string /* occurs in a line or block comment, it is also ignored.
If a certain line of code is empty after removing comments, you must not output that line: each string in the answer list will be non-empty.
There will be no control characters, single quote, or double quote characters. For example, source = "string s = "/* Not a comment. */";" will not be a test case. (Also, nothing else such as defines or macros will interfere with the comments.)
It is guaranteed that every open block comment will eventually be closed, so /* outside of a line or block comment always starts a new comment.
Finally, implicit newline characters can be deleted by block comments. Please see the examples below for details.
After removing the comments from the source code, return the source code in the same format.
Example 1:
Input:
source = ["/*Test program */", "int main()", "{ ", " // variable declaration ", "int a, b, c;", "/* This is a test", " multiline ", " comment for ", " testing */", "a = b + c;", "}"]
The line by line code is visualized as below:
/*Test program */
int main()
{
// variable declaration
int a, b, c;
/* This is a test
multiline
comment for
testing */
a = b + c;
}
Output: ["int main()","{ "," ","int a, b, c;","a = b + c;","}"]
The line by line code is visualized as below:
int main()
{
int a, b, c;
a = b + c;
}
Explanation:
The string
/*
denotes a block comment, including line 1 and lines 6-9. The string
//
denotes line 4 as comments.
Example 2:
Input:
source = ["a/*comment", "line", "more_comment*/b"]
Output: ["ab"]
Explanation: The original source string is "a/*comment\nline\nmore_comment*/b", where we have bolded the newline characters. After deletion, the implicit newline characters are deleted, leaving the string "ab", which when delimited by newline characters becomes ["ab"].
Note:
The length of source is in the range [1, 100].
The length of source[i] is in the range [0, 80].
Every open block comment is eventually closed.
There are no single-quote, double-quote, or control characters in the source code.
"""
class Solution(object):
def removeComments(self, source):
"""
:type source: List[str]
:rtype: List[str]
32ms
"""
result = []
block = False
union = False
start = 0
while start < len(source):
line = source[start]
start += 1
if not block and '//' in line and '/*' in line:
s = line.split('//', 1)
if '/*' not in s[0]:
s = line.split('//', 1)
if len(s[0]) >= 1:
if union:
result[-1] += s[0]
union = False
else:
result.append(s[0])
union = False
else:
block = True
s = line.split('/*', 1)
if len(s[0]) >= 1:
if union:
result[-1] += s[0]
else:
union = True
result.append(s[0])
source.insert(start, s[1])
elif not block and '//' in line:
s = line.split('//', 1)
if len(s[0]) >= 1:
if union:
result[-1] += s[0]
else:
result.append(s[0])
union = False
elif not block and '/*' in line:
block = True
s = line.split('/*', 1)
if len(s[0]) >= 1:
if union:
result[-1] += s[0]
else:
union = True
result.append(s[0])
source.insert(start, s[1])
elif block and '*/' in line:
end = line.split('*/', 1)[1]
source.insert(start, end)
block = False
elif not block:
if union:
result[-1] += line
union = False
else:
if len(line) >= 1:
result.append(line)
print(source, union, block)
print(result)
return result
# source = ["/*Test program */",
# "int main()",
# "{ ",
# " // variable declaration ",
# "int a, b, c;",
# "",
# "/* This is a test",
# " multiline ",
# " comment for ",
# " testing */",
# "a = b + c;",
# "}"]
# a = Solution().removeComments(source)
# for i in a:
# print(i)
#
# source = ["a/*comment", "line", "more_comment*/b"]
# a = Solution().removeComments(source)
# for i in a:
# print(i)
#
# source = ["a//*b//*c","blank","d/*/e*//f"]
# a = Solution().removeComments(source)
# for i in a:
# print(i)
#
# source = ["a/*/b//*c","blank","d/*/e*//f"]
# a = Solution().removeComments(source)
# for i in a:
# print(i)
#
# source = ["class test{",
# "public: ",
# " int x = 1;",
# " /*double y = 1;*/",
# " char c;", "};"]
# a = Solution().removeComments(source)
# for i in a:
# print(i)
source = ['d/*/aee*//d/********/']
a = Solution().removeComments(source)
for i in a:
print(i)
def _find_comment(line):
for i, ch in enumerate(line):
if ch == '/' and i + 1 < len(line):
ch = line[i + 1]
if ch == '/' or ch == '*':
return i
return -1
# O(n) time. O(1) space.
class Solution(object):
def removeComments(self, source):
"""
:type source: List[str]
:rtype: List[str]
36ms
"""
row = 0
while row < len(source):
line = source[row]
lo = _find_comment(line)
if lo == -1:
row += 1
continue
if line[lo + 1] == '/':
if lo == 0:
source.pop(row)
else:
source[row] = line[:lo]
row += 1
continue
hi = line.find('*/', lo + 2)
if hi != -1:
if lo == 0 and hi + 2 == len(line):
source.pop(row)
else:
source[row] = line[:lo] + line[hi + 2:]
continue
if lo == 0:
source.pop(row)
else:
source[row] = line[:lo]
row += 1
while row < len(source):
line = source[row]
hi = line.find('*/')
if hi == -1:
source.pop(row)
continue
if hi + 2 == len(line):
source.pop(row)
else:
if lo == 0:
source[row] = line[hi + 2:]
else:
source.pop(row)
row -= 1
source[row] += line[hi + 2:]
break
return source | [
"[email protected]"
]
| |
139ae4368f9dcc52c84dcbfbcab84a8112ca406a | 727987094c01eaf41343464567a52fbb705f6701 | /yabgp/message/attribute/nlri/__init__.py | c67e29f0a1e9105cce0eecd0e3eebb32ea38ff2a | [
"Apache-2.0"
]
| permissive | xinwu/yabgp | 1377d11e4e42f259dd66bb08060b74d0683a1796 | ae7cc871a4a8a67d08eef2abc82cf1397f2601c3 | refs/heads/master | 2021-01-13T03:05:33.031083 | 2016-12-14T16:27:23 | 2016-12-14T16:27:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,558 | py | # Copyright 2016 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import struct
import binascii
import netaddr
class NLRI(object):
@classmethod
def parse(cls, *args):
raise NotImplementedError
@classmethod
def construct(cls, *args):
raise NotImplementedError
@staticmethod
def construct_prefix_v4(masklen, prefix_str):
ip_hex = struct.pack('!I', netaddr.IPNetwork(prefix_str).value)
if 16 < masklen <= 24:
ip_hex = ip_hex[0:3]
elif 8 < masklen <= 16:
ip_hex = ip_hex[0:2]
elif masklen <= 8:
ip_hex = ip_hex[0:1]
return ip_hex
@staticmethod
def construct_prefix_v6(prefix):
mask = int(prefix.split('/')[1])
prefix_hex = binascii.unhexlify(hex(netaddr.IPNetwork(prefix).ip)[2:])
offset = mask / 8
offset_re = mask % 8
if offset == 0:
return prefix_hex[0: 1]
return prefix_hex[0: offset + offset_re]
| [
"[email protected]"
]
| |
5aac12f50a7d3c33b9c2797a1d90192b97b9ea24 | c78e61ccee6ac695d3f71f72fc3212fdd2c1d193 | /cfed_timestep_comp.py | d2ac35905b1a4a0d651d5ae895ee6e77bc35a7ae | []
| no_license | bbw7561135/phd_code | 28557e84228119dd204f9e16ca27d7c7cef81188 | ef06c317115f0744a7941796c4092e489923ef4e | refs/heads/master | 2021-06-13T02:35:08.475474 | 2017-02-26T21:12:17 | 2017-02-26T21:12:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 55,751 | py | #------------------------------------------------------------------------------#
# #
# This code is a Python script that reads in arrays of synchrotron intensity #
# produced at different times in the evolution of the simulation, and #
# calculates the normalised correlation functions, structure functions, #
# and quadrupole ratios of the synchrotron intensity maps, for different #
# lines of sight. Plots are then produced of the normalised correlation #
# functions, structure functions, quadrupole ratios. This code is intended to #
# be used with simulations produced by Christoph Federrath. #
# #
# Author: Chris Herron #
# Start Date: 20/1/2016 #
# #
#------------------------------------------------------------------------------#
# First import numpy for array handling, matplotlib for plotting, astropy.io
# for fits manipulation, scipy.stats for calculating statistical quantities
import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
from scipy import stats
# Import the functions that calculate the structure and correlation functions
# using FFT, as well as the function that calculates the radially averaged
# structure or correlation functions. Also import the function that calculates
# multipoles of the 2D structure functions, and the function that calculates the
# magnitude and argument of the quadrupole ratio
from sf_fft import sf_fft
from cf_fft import cf_fft
from sfr import sfr
from calc_multipole_2D import calc_multipole_2D
from calc_quad_ratio import calc_quad_ratio
# Define a function that calculates the errors in statistics by breaking up
# synchrotron images into quarters, calculating statistics for each quarter, and
# then calculates the standard deviation of the statistics.
def calc_err_bootstrap(sync_map, log = False):
'''
Description
This function divides the given images into quarters, and then
calculates statistics for each quarter. The standard deviation of the
calculated statistics is then returned, representing the error on
each statistic.
Required Input
sync_map - The synchrotron intensity map. Should be a 2D Numpy array.
log - A boolean value. If True, then the moments are calculated for the
the logarithm of the PDF, and not the PDF itself
Output
mean_err - The error calculated for the mean of synchrotron intensity
stdev_err - The error calculated for the standard deviation of the
synchrotron intensity
skew_err - The error calculated for the skewness of synchrotron
intensity
kurt_err - The error calculated for the kurtosis of synchrotron
intensity
m_err - The error calculated for the structure function slope of the
synchrotron intensity
residual_err - The error calculated for the residual of the linear fit
to the structure function of synchrotron intensity
int_quad_err - The error calculated for the integrated quadrupole ratio
modulus of the synchrotron intensity
quad_point_err - The error calculated for the value of the quadrupole
ratio modulus at a point of synchrotron intensity
'''
# Create an array that will hold the quarters of the synchrotron images
quarter_arr = np.zeros((4,np.shape(sync_map)[0]/2,np.shape(sync_map)[1]/2))
# Add the quarters of the images into the array
quarter_arr[0], quarter_arr[1] = np.split(np.split(sync_map,2,axis=0)[0],2,axis=1)
quarter_arr[2], quarter_arr[3] = np.split(np.split(sync_map,2,axis=0)[1],2,axis=1)
# Create arrays that will hold the calculated statistics for each quarter
mean_val = np.zeros(np.shape(quarter_arr)[0])
stdev_val = np.zeros(np.shape(quarter_arr)[0])
skew_val = np.zeros(np.shape(quarter_arr)[0])
kurt_val = np.zeros(np.shape(quarter_arr)[0])
m_val = np.zeros(np.shape(quarter_arr)[0])
resid_val = np.zeros(np.shape(quarter_arr)[0])
int_quad_val = np.zeros(np.shape(quarter_arr)[0])
# Loop over the quarters, to calculate statistics for each one
for i in range(np.shape(quarter_arr)[0]):
# Extract the current image quarter from the array
image = quarter_arr[i]
# Flatten the image, so that we can calculate the skewness and kurtosis
flat_image = image.flatten()
# If we are calculating moments of the log PDFs, then calculate the
# logarithm of the flat image
if log == True:
# In this case we are calculating the moments of the log PDFs, so
# calculate the log PDFs
flat_image = np.log10(flat_image)
# Calculate the mean of the synchrotron intensity map
mean_val[i] = np.mean(flat_image, dtype=np.float64)
# Calculate the standard deviation of the synchrotron intensity map
stdev_val[i] = np.std(flat_image, dtype=np.float64)
# Calculate the biased skewness of the synchrotron intensity map
skew_val[i] = stats.skew(flat_image)
# Calculate the biased Fisher kurtosis of the synchrotron intensity
# maps
kurt_val[i] = stats.kurtosis(flat_image)
# Calculate the structure function (two-dimensional) of the synchrotron
# intensity map. Note that no_fluct = True is set, because we are not
# subtracting the mean from the synchrotron maps before calculating the
# structure function.
strfn = sf_fft(image, no_fluct = True)
# Radially average the calculated 2D structure function, using the
# specified number of bins.
rad_sf = sfr(strfn, num_bins, verbose = False)
# Extract the calculated radially averaged structure function
sf = rad_sf[1]
# Extract the radius values used to calculate this structure function.
sf_rad_arr = rad_sf[0]
# Calculate the spectral index of the structure function calculated for
# this value of gamma. Note that only the first third of the structure
# function is used in the calculation, as this is the part that is
# close to a straight line.
spec_ind_data = np.polyfit(np.log10(\
sf_rad_arr[5:14]),\
np.log10(sf[5:14]), 1, full = True)
# Extract the returned coefficients from the polynomial fit
coeff = spec_ind_data[0]
# Extract the sum of the residuals from the polynomial fit
resid_val[i] = spec_ind_data[1]
# Enter the value of m, the slope of the structure function minus 1,
# into the corresponding array
m_val[i] = coeff[0]-1.0
# Calculate the 2D structure function for this slice of the synchrotron
# intensity data cube. Note that no_fluct = True is set, because we are
# not subtracting the mean from the synchrotron maps before calculating
# the structure function. We are also calculating the normalised
# structure function, which only takes values between 0 and 2.
norm_strfn = sf_fft(image, no_fluct = True, normalise = True)
# Shift the 2D structure function so that the zero radial separation
# entry is in the centre of the image.
norm_strfn = np.fft.fftshift(norm_strfn)
# Calculate the magnitude and argument of the quadrupole ratio
quad_mod, quad_arg, quad_rad = calc_quad_ratio(norm_strfn, num_bins)
# Integrate the magnitude of the quadrupole / monopole ratio from
# one sixth of the way along the radial separation bins, until three
# quarters of the way along the radial separation bins. This integration
# is performed with respect to log separation (i.e. I am ignoring the
# fact that the points are equally separated in log space, to calculate
# the area under the quadrupole / monopole ratio plot when the x axis
# is scaled logarithmically). I normalise the value that is returned by
# dividing by the number of increments in log radial separation used in
# the calculation.
int_quad_val[i] = np.trapz(quad_mod[8:23], dx = 1.0)\
/ (22 - 8)
# At this point, the statistics have been calculated for each quarter
# The next step is to calculate the standard error of the mean of each
# statistic
mean_err = np.std(mean_val) / np.sqrt(len(mean_val))
stdev_err = np.std(stdev_val) / np.sqrt(len(stdev_val))
skew_err = np.std(skew_val) / np.sqrt(len(skew_val))
kurt_err = np.std(kurt_val) / np.sqrt(len(kurt_val))
m_err = np.std(m_val) / np.sqrt(len(m_val))
residual_err = np.std(resid_val) / np.sqrt(len(resid_val))
int_quad_err = np.std(int_quad_val) / np.sqrt(len(int_quad_val))
# Now that all of the calculations have been performed, return the
# calculated errors
return mean_err,stdev_err,skew_err,kurt_err,m_err,residual_err, int_quad_err
# Set a variable to hold the number of bins to use in calculating the
# correlation functions
num_bins = 25
# Create a variable that controls whether the moments of the log PDFs are
# calculated
log = True
# Create a string for the directory that contains the simulated magnetic fields
# and synchrotron intensity maps to use.
simul_loc = '/Users/chrisherron/Documents/PhD/CFed_2016/'
# Create a string for the specific simulated data sets to use in calculations.
# The directories end in:
# 512sM5Bs5886_20 (Solenoidal turbulence, timestep 20)
# 512sM5Bs5886_25 (Solenoidal turbulence, timestep 25)
# 512sM5Bs5886_30 (Solenoidal turbulence, timestep 30)
# 512sM5Bs5886_35 (Solenoidal turbulence, timestep 35)
# 512sM5Bs5886_40 (Solenoidal turbulence, timestep 40)
# 512cM5Bs5886_20 (Compressive turbulence, timestep 20)
# 512cM5Bs5886_25 (Compressive turbulence, timestep 25)
# 512cM5Bs5886_30 (Compressive turbulence, timestep 30)
# 512cM5Bs5886_35 (Compressive turbulence, timestep 35)
# 512cM5Bs5886_40 (Compressive turbulence, timestep 40)
spec_locs = ['512sM5Bs5886_20/', '512sM5Bs5886_25/', '512sM5Bs5886_30/',\
'512sM5Bs5886_35/', '512sM5Bs5886_40/', '512cM5Bs5886_20/', '512cM5Bs5886_25/',\
'512cM5Bs5886_30/', '512cM5Bs5886_35/', '512cM5Bs5886_40/']
# Create an array of strings, where each string gives the legend label for
# a corresponding simulation
sim_labels = ['Sol 20', 'Sol 25', 'Sol 30', 'Sol 35', 'Sol 40',\
'Comp 20', 'Comp 25', 'Comp 30', 'Comp 35', 'Comp 40']
# Create a variable that holds the number of timesteps being used
num_timestep = 5
# Create a variable that controls whether the line of sight is assumed to be
# along the x, y or z axis of the data cube when constructing the synchrotron
# maps. This can include 'x', 'y', or 'z'. Synchrotron maps are produced for
# each line of sight included in the array
line_o_sight = ['x', 'y', 'z']
# Create a variable that specifies the gamma values that will be used to produce
# these synchrotron emission maps
gamma = 2.0
# Create a three dimensional array that will hold all of the information
# for the normalised correlation functions. The first index gives the simulation
# the second gives the line of sight, and the third axis goes along radius.
# The x, y and z axes are numbered with indices 0, 1 and 2 respectively
norm_corr_arr = np.zeros((len(spec_locs), 3, num_bins))
# Create a three dimensional array that will just hold the radius values used
# to make all of the normalised correlation functions. The first axis represents
# the simulation used, the second represents the line of sight, and
# the third axis goes over radius.
# The x, y and z axes are numbered with indices 0, 1 and 2 respectively
corr_rad_arr = np.zeros((len(spec_locs), 3, num_bins))
# Create a three dimensional array that will hold all of the information
# for the structure functions. The first index gives the simulation
# the second gives the line of sight, and the third axis goes along radius.
# The x, y and z axes are numbered with indices 0, 1 and 2 respectively
sf_arr = np.zeros((len(spec_locs), 3, num_bins))
# Create a three dimensional array that will just hold the radius values used
# to make all of the structure functions. The first axis represents the
# simulation used, the second represents the line of sight, and
# the third axis goes over radius.
# The x, y and z axes are numbered with indices 0, 1 and 2 respectively
sf_rad_arr = np.zeros((len(spec_locs), 3, num_bins))
# Create a three dimensional array that will hold all of the information
# for the magnitude of the quadrupole ratios. The first index gives the
# simulation the second gives the line of sight, and the third axis goes
# along radius.
# The x, y and z axes are numbered with indices 0, 1 and 2 respectively
quad_arr = np.zeros((len(spec_locs), 3, num_bins))
# Create a three dimensional array that will just hold the radius values used
# to make all of the quadrupole ratios. The first axis represents the
# simulation used, the second represents the line of sight, and
# the third axis goes over radius.
# The x, y and z axes are numbered with indices 0, 1 and 2 respectively
quad_rad_arr = np.zeros((len(spec_locs), 3, num_bins))
# Create a three dimensional array that will hold all of the information
# for the real part of the quadrupole ratios. The first index gives the
# simulation the second gives the line of sight, and the third axis goes
# along radius.
# The x, y and z axes are numbered with indices 0, 1 and 2 respectively
quad_real_arr = np.zeros((len(spec_locs), 3, num_bins))
# Create a three dimensional array that will hold all of the information
# for the imaginary part of the quadrupole ratios. The first index gives the
# simulation the second gives the line of sight, and the third axis goes
# along radius.
# The x, y and z axes are numbered with indices 0, 1 and 2 respectively
quad_imag_arr = np.zeros((len(spec_locs), 3, num_bins))
# Create an empty array, where each entry specifies the calculated mean of
# the synchrotron intensity image of the corresponding simulation for a
# particular value of gamma. The first index gives the simulation, and the
# second index gives the line of sight as (x,y,z).
mean_arr = np.zeros((len(spec_locs),3))
# Create an empty array, where each entry specifies the calculated standard
# deviation of the synchrotron intensity image of the corresponding simulation
# for a particular value of gamma. The first index gives the simulation, and the
# second index gives the line of sight as (x,y,z).
stdev_arr = np.zeros((len(spec_locs),3))
# Create an empty array, where each entry specifies the calculated skewness of
# the synchrotron intensity image of the corresponding simulation for a
# particular value of gamma. The first index gives the simulation, and the
# second index gives the line of sight as (x,y,z).
# NOTE: We will calculate the biased skewness
skew_arr = np.zeros((len(spec_locs),3))
# Create an empty array, where each entry specifies the calculated kurtosis of
# the synchrotron intensity image of the corresponding simulation for a
# particular value of gamma. The first index gives the simulation, and the
# second index gives the line of sight as (x,y,z).
# NOTE: We will calculate the biased Fisher kurtosis
kurt_arr = np.zeros((len(spec_locs),3))
# Create an empty array, where each entry specifies the calculated slope of
# the structure function of the synchrotron intensity image minus 1, of the
# corresponding simulation, for a particular value of gamma. The first index
# gives the simulation, and the second index gives the line of sight as (x,y,z).
m_arr = np.zeros((len(spec_locs),3))
# Create an empty array, where each entry specifies the residuals of the
# linear fit to the structure function, of the corresponding simulation, for a
# particular value of gamma. The first index gives the simulation, and the
# second index gives the line of sight as (x,y,z).
residual_arr = np.zeros((len(spec_locs),3))
# Create an empty array, where each entry specifies the calculated integral of
# the magnitude of the quadrupole / monopole ratio of the synchrotron intensity
# image, for the corresponding simulation, for a particular value of gamma.
# The first index gives the simulation, and the second index gives the line of
# sight as (x,y,z).
int_quad_arr = np.zeros((len(spec_locs),3))
# Create error arrays for each of the statistics. These errors are calculated
# by the standard deviation of the statistics calculated for sub-images of the
# synchrotron maps. The first index gives the simulation, and the
# second index gives the line of sight as (x,y,z).
mean_err_arr = np.zeros((len(spec_locs),3))
stdev_err_arr = np.zeros((len(spec_locs),3))
skew_err_arr = np.zeros((len(spec_locs),3))
kurt_err_arr = np.zeros((len(spec_locs),3))
m_err_arr = np.zeros((len(spec_locs),3))
residual_err_arr = np.zeros((len(spec_locs),3))
int_quad_err_arr = np.zeros((len(spec_locs),3))
# Loop over the different simulations that we are using to make the plot
for i in range(len(spec_locs)):
# Create a string for the full directory path to use in this calculation
data_loc = simul_loc + spec_locs[i]
# Loop over the lines of sight, to calculate the correlation function,
# structure function and quadrupole ratio for each line of sight
for j in range(3):
# Open the FITS file that contains the synchrotron intensity maps for this
# simulation
sync_fits = fits.open(data_loc + 'synint_{}_gam{}.fits'.format(line_o_sight[j],gamma))
# Extract the data for the simulated synchrotron intensities
sync_data = sync_fits[0].data
# Print a message to the screen to show that the data has been loaded
print 'Synchrotron intensity loaded successfully'
# Flatten the synchrotron intensity map
flat_sync = sync_data.flatten()
# If we are calculating the moments of the log PDFs, then calculate the
# logarithm of the synchrotron intensity values
if log == True:
# In this case we are calculating the moments of the log PDFs, so
# calculate the log of the synchrotron intensities
flat_sync = np.log10(flat_sync/ np.mean(flat_sync, dtype = np.float64))
# Calculate the mean of the synchrotron intensity map, and store the
# result in the corresponding array
mean_arr[i,j] = np.mean(flat_sync, dtype=np.float64)
# Calculate the standard deviation of the synchrotron intensity map, and
# store the result in the corresponding array
stdev_arr[i,j] = np.std(flat_sync, dtype=np.float64)
# Calculate the biased skewness of the synchrotron intensity map, and store
# the result in the corresponding array
skew_arr[i,j] = stats.skew(flat_sync)
# Calculate the biased Fisher kurtosis of the synchrotron intensity
# map, and store the result in the corresponding array
kurt_arr[i,j] = stats.kurtosis(flat_sync)
# Calculate the 2D correlation function for this slice of the synchrotron
# intensity data cube. Note that no_fluct = True is set, because we are
# not subtracting the mean from the synchrotron maps before calculating
# the correlation function
corr = cf_fft(sync_data, no_fluct = True)
# Radially average the calculated 2D correlation function, using the
# specified number of bins
rad_corr = sfr(corr, num_bins, verbose = False)
# Calculate the square of the mean of the synchrotron intensity values
sync_sq_mean = np.power( np.mean(sync_data, dtype = np.float64), 2.0 )
# Calculate the mean of the synchrotron intensity values squared
sync_mean_sq = np.mean( np.power(sync_data, 2.0), dtype = np.float64 )
# Calculate the normalised, radially averaged correlation function for
# this value of gamma
norm_rad_corr = (rad_corr[1] - sync_sq_mean) / (sync_mean_sq - sync_sq_mean)
# Print a message to show that the correlation function of the
# synchrotron intensity has been calculated for this line of sight
print 'Correlation function of synchrotron intensity'\
+ ' calculated for {} LOS'.format(line_o_sight[j])
# Insert the calculated normalised, radially averaged correlation function
# into the matrix that stores all of the calculated correlation functions
norm_corr_arr[i,j] = norm_rad_corr
# Insert the radius values used to calculate this correlation function
# into the matrix that stores the radius values
corr_rad_arr[i,j] = rad_corr[0]
# Print a message to show that the correlation function has been calculated
print 'Normalised, radially averaged correlation function calculated for'\
+ ' {} LOS'.format(line_o_sight[j])
# Calculate the structure function (two-dimensional) of the synchrotron
# intensity map. Note that no_fluct = True is set, because we are not
# subtracting the mean from the synchrotron maps before calculating the
# structure function.
strfn = sf_fft(sync_data, no_fluct = True)
# Radially average the calculated 2D structure function, using the
# specified number of bins.
rad_sf = sfr(strfn, num_bins, verbose = False)
# Extract the calculated radially averaged structure function
sf_arr[i,j] = rad_sf[1]
# Extract the radius values used to calculate this structure function
sf_rad_arr[i,j] = rad_sf[0]
# Calculate the spectral index of the structure function calculated for
# this value of gamma.
spec_ind_data = np.polyfit(np.log10(\
sf_rad_arr[i,j,5:14]),\
np.log10(sf_arr[i,j,5:14]), 1, full = True)
# Extract the returned coefficients from the polynomial fit
coeff = spec_ind_data[0]
# Enter the value of m, the slope of the structure function minus 1,
# into the corresponding array
m_arr[i,j] = coeff[0]-1.0
# Enter the value of the residuals into the corresponding array
residual_arr[i,j] = spec_ind_data[1]
# Calculate the 2D structure function for this slice of the synchrotron
# intensity data cube. Note that no_fluct = True is set, because we are
# not subtracting the mean from the synchrotron maps before calculating
# the structure function. We are also calculating the normalised
# structure function, which only takes values between 0 and 2.
norm_strfn = sf_fft(sync_data, no_fluct = True, normalise = True)
# Shift the 2D structure function so that the zero radial separation
# entry is in the centre of the image.
norm_strfn = np.fft.fftshift(norm_strfn)
# Calculate the magnitude and argument of the quadrupole ratio
quad_mod, quad_arg, quad_rad = calc_quad_ratio(norm_strfn, num_bins)
# Add the calculated modulus of the quadrupole ratio to the final array
quad_arr[i,j] = quad_mod
# Add the radius values used to calculate the quadrupole ratio to the
# corresponding array
quad_rad_arr[i,j] = quad_rad
# Calculate the real part of the quadrupole ratio
quad_real_arr[i,j] = quad_mod * np.cos(quad_arg)
# Calculate the imaginary part of the quadrupole ratio
quad_imag_arr[i,j] = quad_mod * np.sin(quad_arg)
# Integrate the magnitude of the quadrupole / monopole ratio from one sixth
# of the way along the radial separation bins, until three quarters of the
# way along the radial separation bins. This integration is performed with
# respect to log separation (i.e. I am ignoring the fact that the
# points are equally separated in log space, to calculate the area under
# the quadrupole / monopole ratio plot when the x axis is scaled
# logarithmically). I normalise the value that is returned by dividing
# by the number of increments in log radial separation used in the
# calculation.
int_quad_arr[i,j] = np.trapz(quad_mod[8:23], dx = 1.0) / (22 - 8)
# Create errors for each of the statistics. These errors are only for the
# statistics calculated from the y and z axes (perpendicular to the mean
# magnetic field), and are calculated by the standard deviation of the
# statistics calculated for sub-images of the synchrotron maps.
mean_err_arr[i,j], stdev_err_arr[i,j], skew_err_arr[i,j],\
kurt_err_arr[i,j], m_err_arr[i,j],\
residual_err_arr[i,j], int_quad_err_arr[i,j]\
= calc_err_bootstrap(sync_data, log = log)
# Close the fits files, to save memory
sync_fits.close()
# Print a message to show that the calculation has finished successfully
# for this simulation
print 'All statistics calculated for simulation {} LOS {}'.format(\
spec_locs[i], line_o_sight[j])
# When the code reaches this point, the normalised correlation functions,
# structure functions, and quadrupole ratios have been saved for every
# simulation, and every line of sight, so start making the final plots.
# Create an array of marker symbols, so that the plot for each line of sight has
# a different plot symbol
symbol_arr = ['o','^','s','*','D']
# ----------------- Plots of normalised correlation functions ------------------
# Here we want to produce one plot with six subplots. There should be two rows
# of subplots, with three subplots in each row. The left subplot will be the
# normalised correlation functions for a line of sight along the x axis, the
# centre plot will be for the y axis, and the right subplot will be the
# normalised correlation functions for the z axis. In each plot the timesteps
# of the solenoidal and compressive simulations will be compared, with
# solenoidal simulations on the top row, and compressive on the bottom.
# Create a figure to hold all of the subplots
fig = plt.figure(1, figsize=(10,6), dpi = 300)
# Create an axis for the first subplot to be produced, which is for the
# x line of sight, solenoidal simulations
ax1 = fig.add_subplot(231)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep):
# Plot the normalised correlation function for this simulation, for this
# line of sight
plt.plot(corr_rad_arr[i,0], norm_corr_arr[i,0], '-' + symbol_arr[i])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(corr_rad_arr[0,0])), \
np.zeros(np.shape(corr_rad_arr[0,0])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax1.set_xscale('log')
# Make the x axis tick labels invisible
plt.setp( ax1.get_xticklabels(), visible=False)
# Create an axis for the second subplot to be produced, which is for the
# y line of sight, solenoidal simulation. Make the y axis limits the same as
# for the x axis plot
ax2 = fig.add_subplot(232, sharey = ax1)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep):
# Plot the normalised correlation function for this simulation, for this
# line of sight
plt.plot(corr_rad_arr[i,1], norm_corr_arr[i,1], '-' + symbol_arr[i])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(corr_rad_arr[0,1])), \
np.zeros(np.shape(corr_rad_arr[0,1])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax2.set_xscale('log')
# Make the x axis tick labels invisible
plt.setp( ax2.get_xticklabels(), visible=False)
# Make the y axis tick labels invisible
plt.setp( ax2.get_yticklabels(), visible=False)
# Create an axis for the third subplot to be produced, which is for the
# z line of sight, solenoidal simulation. Make the y axis limits the same as for
# the x axis plot
ax3 = fig.add_subplot(233, sharey = ax1)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep):
# Plot the normalised correlation function for this simulation, for this
# line of sight
plt.plot(corr_rad_arr[i,2], norm_corr_arr[i,2], '-' + symbol_arr[i],\
label = '{}'.format(sim_labels[i]))
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(corr_rad_arr[0,2])), \
np.zeros(np.shape(corr_rad_arr[0,2])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax3.set_xscale('log')
# Make the x axis tick labels invisible
plt.setp( ax3.get_xticklabels(), visible=False)
# Make the y axis tick labels invisible
plt.setp( ax3.get_yticklabels(), visible=False)
# Force the legend to appear on the plot
plt.legend(fontsize = 8, numpoints=1)
# Create an axis for the fourth subplot to be produced, which is for the
# x line of sight, compressive simulations
ax4 = fig.add_subplot(234, sharex = ax1)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep, 2*num_timestep):
# Plot the normalised correlation function for this simulation, for this
# line of sight
plt.plot(corr_rad_arr[i,0], norm_corr_arr[i,0], '-' + symbol_arr[i - num_timestep])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(corr_rad_arr[0,0])), \
np.zeros(np.shape(corr_rad_arr[0,0])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax4.set_xscale('log')
# Create an axis for the fifth subplot to be produced, which is for the
# y line of sight, compressive simulation. Make the y axis limits the same as
# for the x axis plot
ax5 = fig.add_subplot(235, sharex = ax2, sharey = ax4)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep, 2*num_timestep):
# Plot the normalised correlation function for this simulation, for this
# line of sight
plt.plot(corr_rad_arr[i,1], norm_corr_arr[i,1], '-' + symbol_arr[i - num_timestep])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(corr_rad_arr[0,1])), \
np.zeros(np.shape(corr_rad_arr[0,1])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax5.set_xscale('log')
# Make the y axis tick labels invisible
plt.setp( ax5.get_yticklabels(), visible=False)
# Create an axis for the sixth subplot to be produced, which is for the
# z line of sight, compressive simulation. Make the y axis limits the same as for
# the x axis plot
ax6 = fig.add_subplot(236, sharex = ax3, sharey = ax4)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep, 2*num_timestep):
# Plot the normalised correlation function for this simulation, for this
# line of sight
plt.plot(corr_rad_arr[i,2], norm_corr_arr[i,2], '-' + symbol_arr[i - num_timestep],\
label = '{}'.format(sim_labels[i]))
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(corr_rad_arr[0,2])), \
np.zeros(np.shape(corr_rad_arr[0,2])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax6.set_xscale('log')
# Make the y axis tick labels invisible
plt.setp( ax6.get_yticklabels(), visible=False)
# Force the legend to appear on the plot
plt.legend(fontsize = 8, numpoints=1)
# Add a label to the x-axis
plt.figtext(0.5, 0.0, 'Radial Separation [pixels]', ha = 'center', \
va = 'bottom', fontsize = 20)
# Add a label to the y-axis
plt.figtext(0.03, 0.5, 'NCF Sync Intensity', ha = 'left', \
va = 'center', fontsize = 20, rotation = 'vertical')
# Add some text to the figure, to label the left plot as figure a
plt.figtext(0.15, 0.94, 'a) Sol x-LOS', fontsize = 18)
# Add some text to the figure, to label the centre plot as figure b
plt.figtext(0.42, 0.94, 'b) Sol y-LOS', fontsize = 18)
# Add some text to the figure, to label the right plot as figure c
plt.figtext(0.7, 0.94, 'c) Sol z-LOS', fontsize = 18)
# Add some text to the figure, to label the left plot as figure d
plt.figtext(0.15, 0.475, 'd) Comp x-LOS', fontsize = 18)
# Add some text to the figure, to label the centre plot as figure e
plt.figtext(0.42, 0.475, 'e) Comp y-LOS', fontsize = 18)
# Add some text to the figure, to label the right plot as figure f
plt.figtext(0.7, 0.475, 'f) Comp z-LOS', fontsize = 18)
# Make sure that all of the labels are clearly visible in the plot
#plt.tight_layout()
# Save the figure using the given filename and format
plt.savefig(simul_loc + 'ncfs_all_sims_time_gam{}.eps'.format(gamma), format = 'eps')
# Close the figure so that it does not stay in memory
plt.close()
#--------------------------- Structure Functions -------------------------------
# Here we want to produce one plot with six subplots. There should be two rows
# of subplots, with three subplots in each row. The left subplot will be the
# structure functions for a line of sight along the x axis, the centre plot will
# be for the y axis, and the right subplot will be the structure functions for
# the z axis. In each plot the solenoidal and compressive simulations will be
# compared for different timesteps. The top row is for the solenoidal simulation
# and the bottom row is for the compressive simulation.
# Create a figure to hold all of the subplots
fig = plt.figure(1, figsize=(10,6), dpi = 300)
# Create an axis for the first subplot to be produced, which is for the
# x line of sight, solenoidal simulation
ax1 = fig.add_subplot(231)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep):
# Plot the structure function for this simulation, for this
# line of sight
plt.plot(sf_rad_arr[i,0], sf_arr[i,0], '-' + symbol_arr[i])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(sf_rad_arr[0,0])), \
np.zeros(np.shape(sf_rad_arr[0,0])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax1.set_xscale('log')
# Make the y axis of the plot logarithmic
ax1.set_yscale('log')
# Make the x axis tick labels invisible
plt.setp( ax1.get_xticklabels(), visible=False)
# Create an axis for the second subplot to be produced, which is for the
# y line of sight, solenoidal simulation. Make the y axis limits the same as for
# the x axis plot
ax2 = fig.add_subplot(232, sharey = ax1)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep):
# Plot the structure function for this simulation, for this
# line of sight
plt.plot(sf_rad_arr[i,1], sf_arr[i,1], '-' + symbol_arr[i])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(sf_rad_arr[0,1])), \
np.zeros(np.shape(sf_rad_arr[0,1])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax2.set_xscale('log')
# Make the y axis of the plot logarithmic
ax2.set_yscale('log')
# Make the x axis tick labels invisible
plt.setp( ax2.get_xticklabels(), visible=False)
# Make the y axis tick labels invisible
plt.setp( ax2.get_yticklabels(), visible=False)
# Create an axis for the third subplot to be produced, which is for the
# z line of sight, solenoidal simulation. Make the y axis limits the same as for
# the x axis plot
ax3 = fig.add_subplot(233, sharey = ax1)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep):
# Plot the structure function for this simulation, for this
# line of sight
plt.plot(sf_rad_arr[i,2], sf_arr[i,2], '-' + symbol_arr[i],\
label = '{}'.format(sim_labels[i]))
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(sf_rad_arr[0,2])), \
np.zeros(np.shape(sf_rad_arr[0,2])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax3.set_xscale('log')
# Make the y axis of the plot logarithmic
ax3.set_yscale('log')
# Make the x axis tick labels invisible
plt.setp( ax3.get_xticklabels(), visible=False)
# Make the y axis tick labels invisible
plt.setp( ax3.get_yticklabels(), visible=False)
# Force the legend to appear on the plot
plt.legend(loc = 4, fontsize = 8, numpoints=1)
# Create an axis for the fourth subplot to be produced, which is for the
# x line of sight, compressive simulation
ax4 = fig.add_subplot(234, sharex = ax1)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep, 2*num_timestep):
# Plot the structure function for this simulation, for this
# line of sight
plt.plot(sf_rad_arr[i,0], sf_arr[i,0], '-' + symbol_arr[i - num_timestep])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(sf_rad_arr[0,0])), \
np.zeros(np.shape(sf_rad_arr[0,0])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax4.set_xscale('log')
# Make the y axis of the plot logarithmic
ax4.set_yscale('log')
# Create an axis for the fifth subplot to be produced, which is for the
# y line of sight, compressive simulation. Make the y axis limits the same as for
# the x axis plot
ax5 = fig.add_subplot(235, sharex = ax2, sharey = ax4)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep, 2*num_timestep):
# Plot the structure function for this simulation, for this
# line of sight
plt.plot(sf_rad_arr[i,1], sf_arr[i,1], '-' + symbol_arr[i - num_timestep])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(sf_rad_arr[0,1])), \
np.zeros(np.shape(sf_rad_arr[0,1])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax5.set_xscale('log')
# Make the y axis of the plot logarithmic
ax5.set_yscale('log')
# Make the y axis tick labels invisible
plt.setp( ax5.get_yticklabels(), visible=False)
# Create an axis for the sixth subplot to be produced, which is for the
# z line of sight, compressive simulation. Make the y axis limits the same as for
# the x axis plot
ax6 = fig.add_subplot(236, sharex = ax3, sharey = ax4)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep, 2*num_timestep):
# Plot the structure function for this simulation, for this
# line of sight
plt.plot(sf_rad_arr[i,2], sf_arr[i,2], '-' + symbol_arr[i - num_timestep],\
label = '{}'.format(sim_labels[i]))
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(sf_rad_arr[0,2])), \
np.zeros(np.shape(sf_rad_arr[0,2])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax6.set_xscale('log')
# Make the y axis of the plot logarithmic
ax6.set_yscale('log')
# Make the y axis tick labels invisible
plt.setp( ax6.get_yticklabels(), visible=False)
# Force the legend to appear on the plot
plt.legend(loc = 4, fontsize = 8, numpoints=1)
# Add a label to the x-axis
plt.figtext(0.5, 0.0, 'Radial Separation [pixels]', ha = 'center', \
va = 'bottom', fontsize = 20)
# Add a label to the y-axis
plt.figtext(0.03, 0.5, 'Structure Function Amplitude', ha = 'left', \
va = 'center', fontsize = 20, rotation = 'vertical')
# Add some text to the figure, to label the left plot as figure a
plt.figtext(0.15, 0.94, 'a) Sol x-LOS', fontsize = 18)
# Add some text to the figure, to label the centre plot as figure b
plt.figtext(0.42, 0.94, 'b) Sol y-LOS', fontsize = 18)
# Add some text to the figure, to label the right plot as figure c
plt.figtext(0.7, 0.94, 'c) Sol z-LOS', fontsize = 18)
# Add some text to the figure, to label the left plot as figure d
plt.figtext(0.15, 0.475, 'd) Comp x-LOS', fontsize = 18)
# Add some text to the figure, to label the centre plot as figure e
plt.figtext(0.42, 0.475, 'e) Comp y-LOS', fontsize = 18)
# Add some text to the figure, to label the right plot as figure f
plt.figtext(0.7, 0.475, 'f) Comp z-LOS', fontsize = 18)
# Make sure that all of the labels are clearly visible in the plot
#plt.tight_layout()
# Save the figure using the given filename and format
plt.savefig(simul_loc + 'sfs_all_sims_time_gam{}.eps'.format(gamma), format = 'eps')
# Close the figure so that it does not stay in memory
plt.close()
#----------------------------- Quadrupole Ratios -------------------------------
# Here we want to produce one plot with six subplots. There should be two rows
# of subplots, with three subplots in each row. The left subplot will be the
# quadrupole ratio modulus for a line of sight along the x axis, the centre plot
# will be for the y axis, and the right subplot will be the quadrupole ratio
# modulus for the z axis. In each plot the solenoidal and compressive
# simulations will be compared for different timesteps. The top row is for the
# solenoidal simulation, and the bottom row for the compressive simulation.
# Create a figure to hold all of the subplots
fig = plt.figure(1, figsize=(10,6), dpi = 300)
# Create an axis for the first subplot to be produced, which is for the
# x line of sight, solenoidal simulation
ax1 = fig.add_subplot(231)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep):
# Plot the quadrupole ratio modulus for this simulation, for this
# line of sight
plt.plot(quad_rad_arr[i,0], quad_arr[i,0], '-' + symbol_arr[i])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,0])), \
np.zeros(np.shape(quad_rad_arr[0,0])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax1.set_xscale('log')
# Make the x axis tick labels invisible
plt.setp( ax1.get_xticklabels(), visible=False)
# Create an axis for the second subplot to be produced, which is for the
# y line of sight, solenoidal simulation. Make the y axis limits the same as for
# the x axis plot
ax2 = fig.add_subplot(232, sharey = ax1)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep):
# Plot the quadrupole ratio modulus for this simulation, for this
# line of sight
plt.plot(quad_rad_arr[i,1], quad_arr[i,1], '-' + symbol_arr[i])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,1])), \
np.zeros(np.shape(quad_rad_arr[0,1])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax2.set_xscale('log')
# Make the x axis tick labels invisible
plt.setp( ax2.get_xticklabels(), visible=False)
# Make the y axis tick labels invisible
plt.setp( ax2.get_yticklabels(), visible=False)
# Create an axis for the third subplot to be produced, which is for the
# z line of sight, solenoidal simulation. Make the y axis limits the same as for
# the x axis plot
ax3 = fig.add_subplot(233, sharey = ax1)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep):
# Plot the quadrupole ratio modulus for this simulation, for this
# line of sight
plt.plot(quad_rad_arr[i,2], quad_arr[i,2], '-' + symbol_arr[i],\
label = '{}'.format(sim_labels[i]))
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,2])), \
np.zeros(np.shape(quad_rad_arr[0,2])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax3.set_xscale('log')
# Make the x axis tick labels invisible
plt.setp( ax3.get_xticklabels(), visible=False)
# Make the y axis tick labels invisible
plt.setp( ax3.get_yticklabels(), visible=False)
# Force the legend to appear on the plot
plt.legend(fontsize = 8, numpoints=1)
# Create an axis for the fourth subplot to be produced, which is for the
# x line of sight, compressive simulation
ax4 = fig.add_subplot(234, sharex = ax1)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep, 2*num_timestep):
# Plot the quadrupole ratio modulus for this simulation, for this
# line of sight
plt.plot(quad_rad_arr[i,0], quad_arr[i,0], '-' + symbol_arr[i - num_timestep])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,0])), \
np.zeros(np.shape(quad_rad_arr[0,0])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax4.set_xscale('log')
# Create an axis for the fifth subplot to be produced, which is for the
# y line of sight, compressive simulation. Make the y axis limits the same as for
# the x axis plot
ax5 = fig.add_subplot(235, sharex = ax2, sharey = ax4)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep, 2*num_timestep):
# Plot the quadrupole ratio modulus for this simulation, for this
# line of sight
plt.plot(quad_rad_arr[i,1], quad_arr[i,1], '-' + symbol_arr[i - num_timestep])
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,1])), \
np.zeros(np.shape(quad_rad_arr[0,1])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax5.set_xscale('log')
# Make the y axis tick labels invisible
plt.setp( ax5.get_yticklabels(), visible=False)
# Create an axis for the sixth subplot to be produced, which is for the
# z line of sight, compressive simulation. Make the y axis limits the same as for
# the x axis plot
ax6 = fig.add_subplot(236, sharex = ax3, sharey = ax4)
# Loop over the simulations to produce plots for each simulation
for i in range(num_timestep, 2*num_timestep):
# Plot the quadrupole ratio modulus for this simulation, for this
# line of sight
plt.plot(quad_rad_arr[i,2], quad_arr[i,2], '-' + symbol_arr[i - num_timestep],\
label = '{}'.format(sim_labels[i]))
# Plot a faded dashed line to represent the line y = 0
plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,2])), \
np.zeros(np.shape(quad_rad_arr[0,2])), 'k--', alpha = 0.5)
# Make the x axis of the plot logarithmic
ax6.set_xscale('log')
# Make the y axis tick labels invisible
plt.setp( ax6.get_yticklabels(), visible=False)
# Force the legend to appear on the plot
plt.legend(fontsize = 8, numpoints=1)
# Add a label to the x-axis
plt.figtext(0.5, 0.0, 'Radial Separation [pixels]', ha = 'center', \
va = 'bottom', fontsize = 20)
# Add a label to the y-axis
plt.figtext(0.03, 0.5, 'Quadrupole Ratio', ha = 'left', \
va = 'center', fontsize = 20, rotation = 'vertical')
# Add some text to the figure, to label the left plot as figure a
plt.figtext(0.15, 0.94, 'a) Sol x-LOS', fontsize = 18)
# Add some text to the figure, to label the centre plot as figure b
plt.figtext(0.42, 0.94, 'b) Sol y-LOS', fontsize = 18)
# Add some text to the figure, to label the right plot as figure c
plt.figtext(0.7, 0.94, 'c) Sol z-LOS', fontsize = 18)
# Add some text to the figure, to label the left plot as figure d
plt.figtext(0.15, 0.475, 'd) Comp x-LOS', fontsize = 18)
# Add some text to the figure, to label the centre plot as figure e
plt.figtext(0.42, 0.475, 'e) Comp y-LOS', fontsize = 18)
# Add some text to the figure, to label the right plot as figure f
plt.figtext(0.7, 0.475, 'f) Comp z-LOS', fontsize = 18)
# Make sure that all of the labels are clearly visible in the plot
#plt.tight_layout()
# Save the figure using the given filename and format
plt.savefig(simul_loc + 'quad_ratio_all_sims_time_gam{}.eps'.format(gamma), format = 'eps')
# Close the figure so that it does not stay in memory
plt.close()
# #----------------------- Real and Imaginary Parts of Quad Ratio ----------------
# # Here we want to produce one plot with six subplots. There should be two rows
# # of subplots, with three subplots in each row. The top row will be the real
# # part of the quadrupole ratio, and the bottom row will be the imaginary part.
# # The left column will be for a line of sight along the x axis, the centre
# # column for a line of sight along the y axis, and the right column will be for
# # a line of sight along the z axis.
# # Create a figure to hold all of the subplots
# fig = plt.figure(1, figsize=(10,6), dpi = 300)
# # Create an axis for the first subplot to be produced, which is for the real
# # part of the quadrupole ratio for a line of sight along the x axis
# ax1 = fig.add_subplot(231)
# # Loop over the simulations to produce plots for each simulation
# for i in range(len(spec_locs)):
# # Plot the quadrupole ratio for this simulation, for this line of sight
# plt.plot(quad_rad_arr[i,0], quad_real_arr[i,0], '-' + symbol_arr[i])
# # Plot a faded dashed line to represent the line y = 0
# plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,0])), \
# np.zeros(np.shape(quad_rad_arr[0,0])), 'k--', alpha = 0.5)
# # Make the x axis of the plot logarithmic
# ax1.set_xscale('log')
# # Make the x axis tick labels invisible
# plt.setp( ax1.get_xticklabels(), visible=False)
# # Create an axis for the second subplot to be produced, which is for the real
# # part of the quadrupole ratio for a line of sight along the y axis. Make the y
# # axis limits the same as for the x axis plot
# ax2 = fig.add_subplot(232, sharey = ax1)
# # Loop over the simulations to produce plots for each simulation
# for i in range(len(spec_locs)):
# # Plot the quadrupole ratio for this simulation, for this line of sight
# plt.plot(quad_rad_arr[i,1], quad_real_arr[i,1], '-' + symbol_arr[i])
# # Plot a faded dashed line to represent the line y = 0
# plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,1])), \
# np.zeros(np.shape(quad_rad_arr[0,1])), 'k--', alpha = 0.5)
# # Make the x axis of the plot logarithmic
# ax2.set_xscale('log')
# # Make the x axis tick labels invisible
# plt.setp( ax2.get_xticklabels(), visible=False)
# # Make the y axis tick labels invisible
# plt.setp( ax2.get_yticklabels(), visible=False)
# # Create an axis for the third subplot to be produced, which is for the real
# # part of the quadrupole ratio for a line of sight along the z axis. Make the y
# # axis limits the same as for the x axis plot
# ax3 = fig.add_subplot(233, sharey = ax1)
# # Loop over the simulations to produce plots for each simulation
# for i in range(len(spec_locs)):
# # Plot the quadrupole ratio for this simulation, for this line of sight
# plt.plot(quad_rad_arr[i,2], quad_real_arr[i,2], '-' + symbol_arr[i],\
# label = '{}'.format(sim_labels[i]))
# # Plot a faded dashed line to represent the line y = 0
# plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,2])), \
# np.zeros(np.shape(quad_rad_arr[0,2])), 'k--', alpha = 0.5)
# # Make the x axis of the plot logarithmic
# ax3.set_xscale('log')
# # Make the x axis tick labels invisible
# plt.setp( ax3.get_xticklabels(), visible=False)
# # Make the y axis tick labels invisible
# plt.setp( ax3.get_yticklabels(), visible=False)
# # Force the legend to appear on the plot
# plt.legend(loc=4, fontsize = 9, numpoints=1)
# # Create an axis for the fourth subplot to be produced, which is for the
# # imaginary part of the quadrupole ratio for a line of sight along the x axis.
# # Make the x axis limits the same as for the first plot
# ax4 = fig.add_subplot(234, sharex = ax1, sharey = ax1)
# # Loop over the simulations to produce plots for each simulation
# for i in range(len(spec_locs)):
# # Plot the quadrupole ratio for this simulation, for this line of sight
# plt.plot(quad_rad_arr[i,0], quad_imag_arr[i,0], '-' + symbol_arr[i])
# # Plot a faded dashed line to represent the line y = 0
# plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,0])), \
# np.zeros(np.shape(quad_rad_arr[0,0])), 'k--', alpha = 0.5)
# # Make the x axis of the plot logarithmic
# ax4.set_xscale('log')
# # Create an axis for the fifth subplot to be produced, which is for the
# # imaginary part of the quadrupole ratio for a line of sigth along the y axis.
# # Make the x axis limits the same as for the second plot, and the y axis limits
# # the same as for the fourth plot
# ax5 = fig.add_subplot(235, sharex = ax2, sharey = ax4)
# # Loop over the simulations to produce plots for each simulation
# for i in range(len(spec_locs)):
# # Plot the quadrupole ratio for this simulation, for this line of sight
# plt.plot(quad_rad_arr[i,1], quad_imag_arr[i,1], '-' + symbol_arr[i])
# # Plot a faded dashed line to represent the line y = 0
# plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,1])), \
# np.zeros(np.shape(quad_rad_arr[0,1])), 'k--', alpha = 0.5)
# # Make the x axis of the plot logarithmic
# ax5.set_xscale('log')
# # Make the y axis tick labels invisible
# plt.setp( ax5.get_yticklabels(), visible=False)
# # Create an axis for the sixth subplot to be produced, which is for the
# # imaginary part of the quadrupole ratio for a line of sigth along the z axis.
# # Make the x axis limits the same as for the third plot, and the y axis limits
# # the same as for the fourth plot
# ax6 = fig.add_subplot(236, sharex = ax3, sharey = ax4)
# # Loop over the simulations to produce plots for each simulation
# for i in range(len(spec_locs)):
# # Plot the quadrupole ratio for this simulation, for this line of sight
# plt.plot(quad_rad_arr[i,2], quad_imag_arr[i,2], '-' + symbol_arr[i])
# # Plot a faded dashed line to represent the line y = 0
# plt.plot(np.linspace(0,1000,len(quad_rad_arr[0,2])), \
# np.zeros(np.shape(quad_rad_arr[0,2])), 'k--', alpha = 0.5)
# # Make the x axis of the plot logarithmic
# ax6.set_xscale('log')
# # Make the y axis tick labels invisible
# plt.setp( ax6.get_yticklabels(), visible=False)
# # Add a label to the x-axis
# plt.figtext(0.5, 0.0, 'Radial Separation [pixels]', ha = 'center', \
# va = 'bottom', fontsize = 20)
# # Add a label to the y-axis
# plt.figtext(0.03, 0.5, 'Quadrupole Ratio', ha = 'left', \
# va = 'center', fontsize = 20, rotation = 'vertical')
# # Add some text to the figure, to label the left plot as figure a
# plt.figtext(0.15, 0.94, 'a) x-LOS Real', fontsize = 18)
# # Add some text to the figure, to label the centre plot as figure b
# plt.figtext(0.42, 0.94, 'b) y-LOS Real', fontsize = 18)
# # Add some text to the figure, to label the right plot as figure c
# plt.figtext(0.7, 0.94, 'c) z-LOS Real', fontsize = 18)
# # Add some text to the figure, to label the left plot as figure d
# plt.figtext(0.15, 0.475, 'd) x-LOS Imag', fontsize = 18)
# # Add some text to the figure, to label the centre plot as figure e
# plt.figtext(0.42, 0.475, 'e) y-LOS Imag', fontsize = 18)
# # Add some text to the figure, to label the right plot as figure f
# plt.figtext(0.7, 0.475, 'f) z-LOS Imag', fontsize = 18)
# # Make sure that all of the labels are clearly visible in the plot
# #plt.tight_layout()
# # Save the figure using the given filename and format
# plt.savefig(simul_loc + 'real_imag_quad_all_sims_time_gam{}.eps'.format(gamma), format = 'eps')
# # Close the figure so that it does not stay in memory
# plt.close()
#-------------------------------------------------------------------------------
# Now that all of the statistics have been calculated, print them out to the
# screen. Loop over all of the lines of sight, and the different simulations,
# and print out results for the simulations
for j in range(3):
# For this line of sight, loop over the simulations
for i in range(len(spec_locs)):
# Print out the value of the mean for this line of sight
print "{} {} LOS Mean: {} Error: {}".format(sim_labels[i], line_o_sight[j],\
mean_arr[i,j], mean_err_arr[i,j])
for j in range(3):
# For this line of sight, loop over the simulations
for i in range(len(spec_locs)):
# Print out the value of the standard deviation for this line of sight
print "{} {} LOS St Dev: {} Error: {}".format(sim_labels[i], line_o_sight[j],\
stdev_arr[i,j], stdev_err_arr[i,j])
for j in range(3):
# For this line of sight, loop over the simulations
for i in range(len(spec_locs)):
# Print out the value of skewness for this line of sight
print "{} {} LOS Skewness: {} Error: {}".format(sim_labels[i], line_o_sight[j],\
skew_arr[i,j], skew_err_arr[i,j])
for j in range(3):
# For this line of sight, loop over the simulations
for i in range(len(spec_locs)):
# Print out the value of kurtosis for this line of sight
print "{} {} LOS Kurtosis: {} Error: {}".format(sim_labels[i], line_o_sight[j],\
kurt_arr[i,j], kurt_err_arr[i,j])
for j in range(3):
# For this line of sight, loop over the simulations
for i in range(len(spec_locs)):
# Print out the structure function slope for this line of sight
print "{} {} LOS SF Slope: {} Error: {}".format(sim_labels[i], line_o_sight[j],\
m_arr[i,j], m_err_arr[i,j])
for j in range(3):
# For this line of sight, loop over the simulations
for i in range(len(spec_locs)):
# Print out the residuals for this line of sight
print "{} {} LOS Residuals: {} Error: {}".format(sim_labels[i], line_o_sight[j],\
residual_arr[i,j], residual_err_arr[i,j])
for j in range(3):
# For this line of sight, loop over the simulations
for i in range(len(spec_locs)):
# Print out the value of the quadrupole ratio for this line of sight
print "{} {} LOS Quad Ratio: {} Error: {}".format(sim_labels[i], line_o_sight[j],\
int_quad_arr[i,j], int_quad_err_arr[i,j]) | [
"[email protected]"
]
| |
1d71cdd16103283b54ddbfae586dbd58e635dea8 | 7c28640e152dad3843423d04c96a3a37015bd9ba | /Examples/Game Tutorial/Tutorial Part 6.py | 0b571e28753411ea66a52d103d7f671bc5c1d42d | []
| no_license | DocVaughan/Pythonista | 251bbfd69203cf91f3d6a6bf20d478efd74a61a0 | 7d482c7db2c7b4daae10289b765f09a4f348a50c | refs/heads/master | 2021-01-20T20:48:32.603993 | 2017-12-31T10:33:48 | 2017-12-31T10:33:48 | 61,178,643 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,233 | py | # coding: utf-8
'''
Part 6 -- Meteors Incoming! ☄️
Collecting coins is fun, but did you notice the distinct lack of... challenge?
Let's change that now, and add some meteors to the mix. The mechanism is essentially the same as with the coins, but when the alien collides with a meteor, the game is over.
To make the game a bit harder, the speed at which coins and meteors fall to the ground now increases slightly over time.
'''
from scene import *
import sound
import random
A = Action
def cmp(a, b):
return ((a > b) - (a < b))
standing_texture = Texture('plf:AlienGreen_front')
walk_textures = [Texture('plf:AlienGreen_walk1'), Texture('plf:AlienGreen_walk2')]
# ---[1]
# Because the alien can be hit by a meteor, we need one additional texture for the unhappy alien:
hit_texture = Texture('plf:AlienGreen_hit')
class Coin (SpriteNode):
def __init__(self, **kwargs):
SpriteNode.__init__(self, 'plf:Item_CoinGold', **kwargs)
# ---[2]
# As with the coins, we use a custom subclass of SpriteNode to represent the meteors. For some variety, the texture of the meteor is chosen randomly.
class Meteor (SpriteNode):
def __init__(self, **kwargs):
img = random.choice(['spc:MeteorBrownBig1', 'spc:MeteorBrownBig2'])
SpriteNode.__init__(self, img, **kwargs)
class Game (Scene):
def setup(self):
self.background_color = '#004f82'
self.ground = Node(parent=self)
x = 0
while x <= self.size.w + 64:
tile = SpriteNode('plf:Ground_PlanetHalf_mid', position=(x, 0))
self.ground.add_child(tile)
x += 64
self.player = SpriteNode(standing_texture)
self.player.anchor_point = (0.5, 0)
self.add_child(self.player)
score_font = ('Futura', 40)
self.score_label = LabelNode('0', score_font, parent=self)
self.score_label.position = (self.size.w/2, self.size.h - 70)
self.score_label.z_position = 1
self.items = []
# ---[3]
# Because the game can end now, we need a method to restart it.
# Some of the initialization logic that was previously in `setup()` is now in `new_game()`, so it can be repeated without having to close the game first.
self.new_game()
def new_game(self):
# Reset everything to its initial state...
for item in self.items:
item.remove_from_parent()
self.items = []
self.score = 0
self.score_label.text = '0'
self.walk_step = -1
self.player.position = (self.size.w/2, 32)
self.player.texture = standing_texture
self.speed = 1.0
# ---[4]
# The game_over attribute is set to True when the alien gets hit by a meteor. We use this to stop player movement and collision checking (the update method simply does nothing when game_over is True).
self.game_over = False
def update(self):
if self.game_over:
return
self.update_player()
self.check_item_collisions()
if random.random() < 0.05 * self.speed:
self.spawn_item()
def update_player(self):
g = gravity()
if abs(g.x) > 0.05:
self.player.x_scale = cmp(g.x, 0)
x = self.player.position.x
max_speed = 40
x = max(0, min(self.size.w, x + g.x * max_speed))
self.player.position = x, 32
step = int(self.player.position.x / 40) % 2
if step != self.walk_step:
self.player.texture = walk_textures[step]
sound.play_effect('rpg:Footstep00', 0.05, 1.0 + 0.5 * step)
self.walk_step = step
else:
self.player.texture = standing_texture
self.walk_step = -1
def check_item_collisions(self):
# ---[5]
# The hit testing is essentially the same as before, but now distinguishes between coins and meteors (simply by checking the class of the item).
# When a meteor hits, the game is over (see the `player_hit()` method below).
player_hitbox = Rect(self.player.position.x - 20, 32, 40, 65)
for item in list(self.items):
if item.frame.intersects(player_hitbox):
if isinstance(item, Coin):
self.collect_item(item)
elif isinstance(item, Meteor):
self.player_hit()
elif not item.parent:
self.items.remove(item)
def player_hit(self):
# ---[6]
# This is. alled from `check_item_collisions()` when the alien collides with a meteor. The alien simply drops off the screen, and after 2 seconds, a new game is started.
self.game_over = True
sound.play_effect('arcade:Explosion_1')
self.player.texture = hit_texture
self.player.run_action(A.move_by(0, -150))
# Note: The duration of the `wait` action is multiplied by the current game speed, so that it always takes exactly 2 seconds, regardless of how fast the rest of the game is running.
self.run_action(A.sequence(A.wait(2*self.speed), A.call(self.new_game)))
def spawn_item(self):
if random.random() < 0.3:
# ---[7]
# Whenever a new item is created, there's now a 30% chance that it is a meteor instead of a coin.
# Their behavior is very similar to that of the coins, but instead of moving straight down, they may come in at an angle. To accomplish this, the x coordinate of the final position is simply chosen randomly.
meteor = Meteor(parent=self)
meteor.position = (random.uniform(20, self.size.w-20), self.size.h + 30)
d = random.uniform(2.0, 4.0)
actions = [A.move_to(random.uniform(0, self.size.w), -100, d), A.remove()]
meteor.run_action(A.sequence(actions))
self.items.append(meteor)
else:
coin = Coin(parent=self)
coin.position = (random.uniform(20, self.size.w-20), self.size.h + 30)
d = random.uniform(2.0, 4.0)
actions = [A.move_by(0, -(self.size.h + 60), d), A.remove()]
coin.run_action(A.sequence(actions))
self.items.append(coin)
# ---[8]
# To make things a bit more interesting, the entire game gets slightly faster whenever a new item is spawned. The `speed` attribute is essentially a multiplier for the duration of all actions in the scene. Note that this is actually an attribute of `Node`, so you could apply different speeds for different groups of nodes. Since all items are added directly to the scene in this example, we don't make use of that here though.
self.speed = min(3, self.speed + 0.005)
def collect_item(self, item, value=10):
sound.play_effect('digital:PowerUp7')
item.remove_from_parent()
self.items.remove(item)
self.score += value
self.score_label.text = str(self.score)
if __name__ == '__main__':
run(Game(), PORTRAIT, show_fps=True) | [
"[email protected]"
]
| |
165dab7e57e2a352300f0576717c1cdae8927d4b | 5e2dddce9c67d5b54d203776acd38d425dbd3398 | /spacy/lang/es/syntax_iterators.py | 869f404e040edf4e143bf6e80dab2eaac4390688 | [
"MIT"
]
| permissive | yuxuan2015/spacy_zh_model | 8164a608b825844e9c58d946dcc8698853075e37 | e89e00497ab3dad0dd034933e25bc2c3f7888737 | refs/heads/master | 2020-05-15T11:07:52.906139 | 2019-08-27T08:28:11 | 2019-08-27T08:28:11 | 182,213,671 | 1 | 0 | null | 2019-04-19T06:27:18 | 2019-04-19T06:27:17 | null | UTF-8 | Python | false | false | 1,695 | py | # coding: utf8
from __future__ import unicode_literals
from ...symbols import NOUN, PROPN, PRON, VERB, AUX
def noun_chunks(obj):
doc = obj.doc
np_label = doc.vocab.strings.add('NP')
left_labels = ['det', 'fixed', 'neg'] # ['nunmod', 'det', 'appos', 'fixed']
right_labels = ['flat', 'fixed', 'compound', 'neg']
stop_labels = ['punct']
np_left_deps = [doc.vocab.strings[label] for label in left_labels]
np_right_deps = [doc.vocab.strings[label] for label in right_labels]
stop_deps = [doc.vocab.strings[label] for label in stop_labels]
def noun_bounds(root):
left_bound = root
for token in reversed(list(root.lefts)):
if token.dep in np_left_deps:
left_bound = token
right_bound = root
for token in root.rights:
if (token.dep in np_right_deps):
left, right = noun_bounds(token)
if list(filter(lambda t: is_verb_token(t) or t.dep in stop_deps,
doc[left_bound.i: right.i])):
break
else:
right_bound = right
return left_bound, right_bound
token = doc[0]
while token and token.i < len(doc):
if token.pos in [PROPN, NOUN, PRON]:
left, right = noun_bounds(token)
yield left.i, right.i+1, np_label
token = right
token = next_token(token)
def is_verb_token(token):
return token.pos in [VERB, AUX]
def next_token(token):
try:
return token.nbor()
except:
return None
SYNTAX_ITERATORS = {
'noun_chunks': noun_chunks
}
| [
"[email protected]"
]
| |
4450db57e64db6586c682bfbdf846ffb456d9e4e | 4d718292ec9f90444eeda13d18febb10757da894 | /mission 11/classement.py | e50dc9fbba32e99394191730b85603be4aa7080f | []
| no_license | rverschuren/Info | b40fb04a6260dacfc95d12e63c99abd82b140e06 | c9aa0bdc1b026c8ba8134b878b5fae7d49d75e19 | refs/heads/master | 2020-04-16T07:29:49.847812 | 2019-01-14T14:50:18 | 2019-01-14T14:50:18 | 165,389,281 | 1 | 2 | null | 2019-01-12T18:56:01 | 2019-01-12T13:12:46 | Python | UTF-8 | Python | false | false | 3,812 | py | class Classement :
"""
Une implémentation primitive de classement, non ordonnée et de capacité fixe.
@author Kim Mens
@version 02 Décembre 2018
"""
__maxcapacity = 10
def __init__(self):
"""
@pre: -
@post: un classement vide de taille 0 a été créé
"""
self.__resultats = {} # dictionnaire de résultats actuelle (clé = coureur; valeur = résultat)
self.__size = 0 # nombre de résultats actuel (initialement 0, maximum __maxcapacity)
def size(self):
"""
Méthode accesseur.
Retourne la taille de ce classement.
@pre: -
@post: Le nombre de résultats actuellement stockés dans ce classement a été retourné.
"""
return self.__size
def add(self,r):
"""
Ajoute un résultat r dans ce classement.
@pre: r est une instance de la classe Resultat
@post: Le résultat r a été inséré selon l'ordre du classement.
En cas d'ex-aequo, r est inséré après les autres résultats de même ordre.
ATTENTION : L'implémentation actuelle ne respecte pas encore la post-condition!
Le résultat est simplement ajouté à la dictionnaire, sans tenir compte de l'ordre.
Une dictionnaire ne donne pas de garanties sur l'ordre des éléments.
"""
if self.size() >= self.__maxcapacity :
raise Error("Capacity of classement exceeded")
else :
self.__size += 1
self.__resultats[r.coureur()] = r
def get(self,c):
"""
Retourne le résultat d'un coureur donné.
@pre c est un Coureur
@post retourne le premier (meilleur) Resultat r du coureur c dans le
classement. Retourne None si le coureur ne figure pas (encore)
dans le classement.
"""
return self.__resultats.get(c)
def get_position(self,c):
"""
Retourne la meilleure position d'un coureur dans ce classement.
@pre c est un Coureur
@post retourne un entier représentant la position du coureur c dans ce classement,
à partir de 1 pour la tête de ce classement. Si le coureur figure plusieurs fois
dans le classement, la première (meilleure) position est retournée.
Retourne -1 si le coureur ne figure pas dans le classement.
ATTENTION : L'implémentation actuelle ne respecte pas encore la post-condition!
Etant donné que la dictionnaire de résultats ne connaît pas de position,
pour le moment cette méthode retourne toujours "***position inconnue***".
A vous de la corriger en utilisant une liste chaînée ordonnée
comme structure de données, plutôt qu'une simple dictionnaire.
"""
return "***position inconnue***"
def remove(self,c):
"""
Retire un résultat du classement.
@pre c est un Coureur
@post retire le premier (meilleur) résultat pour le coureur c du classement.
c est comparé au sens de __eq__. Retourne c si un résultat a été retiré,
of False si c n'est pas trouvé dans la liste.
"""
self.__size -= 1
return self.__resultats.pop(c,False)
def __str__(self):
"""
Méthode magique
Retourne une représentation string de cet objet.
@pre: -
@post: Retourne une représentation de ce classement sous forme d'un string,
avec une ligne par résultat.
"""
s = ""
d = self.__resultats
for c in d:
s += " " + str(self.get_position(c)) + " > " + str(d[c]) + "\n"
return s | [
"[email protected]"
]
| |
0864a55af4f109e92a6b1185d04837dc723f87a7 | e5d130e183b5dea1b7aad23a047c703fa0d2b3bf | /lightbus/transports/__init__.py | d0a7d70c722ce0c3f6df21091b256155c18899f5 | [
"Apache-2.0"
]
| permissive | adamcharnock/lightbus | 4a86428b8203bfe98f77a32375ac961ef398ce16 | cf892779a9a9a8f69c789ffa83c24acfb7f9a336 | refs/heads/master | 2023-08-26T04:19:39.395735 | 2023-08-23T11:07:44 | 2023-08-23T11:07:44 | 94,617,214 | 193 | 22 | Apache-2.0 | 2023-08-10T21:21:51 | 2017-06-17T10:39:23 | Python | UTF-8 | Python | false | false | 534 | py | from lightbus.transports.base import (
RpcTransport,
ResultTransport,
EventTransport,
SchemaTransport,
Transport,
)
from lightbus.transports.debug import (
DebugRpcTransport,
DebugResultTransport,
DebugEventTransport,
DebugSchemaTransport,
)
from lightbus.transports.redis.rpc import RedisRpcTransport
from lightbus.transports.redis.result import RedisResultTransport
from lightbus.transports.redis.event import RedisEventTransport
from lightbus.transports.redis.schema import RedisSchemaTransport
| [
"[email protected]"
]
| |
23f3a9b619600c2c45f879384f3a51dda94f5c3e | 38466811d0e12a8f755bae58d7244622ef5f4d9b | /leetcode/200/141_linked_list_cycle.py | 9e5262a7110cf85407e3ce7e9183543e977219f0 | []
| no_license | andysitu/algo-problems | 4ab5a2b6591f0c0d84174b69598f30bc354ff8aa | 35c88dc747e7afa4fdd51d538bc80c4712eb1172 | refs/heads/master | 2023-06-24T15:55:39.019652 | 2021-02-26T20:31:07 | 2021-02-26T20:31:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 602 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def hasCycle(self, head: ListNode) -> bool:
if head == None:
return False
forward1 = head
forward2 = head
while True:
forward2 = forward2.next
if forward2 == None:
return False
forward2 = forward2.next
if forward2 == None:
return False
forward1 = forward1.next
if forward1 == forward2: | [
"[email protected]"
]
| |
9551d519b20dfcc4061ff956e357f5bdb2481c6d | 7cd8ee14711eaf33cee0d9e06e78a974fc579242 | /Linkedin/Linkedin/spiders/linkedin_distinct_12logincount.py | 73e600bb84ee61402c91c2f94e2db905b9cf883c | []
| no_license | Chandler-Song/pi | c618117dfdd9a7496a57c69f029851e94787f591 | aebc6d65b79ed43c66e7e1bf16d6d9f31b470372 | refs/heads/master | 2022-03-13T02:44:30.452673 | 2019-02-19T09:38:45 | 2019-02-19T09:38:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,482 | py | from linkedin_voyager_functions import *
class Companylidsurls(object):
def __init__(self, *args, **kwargs):
self.con, self.cur = get_mysql_connection(DB_HOST, 'FACEBOOK', '')
#self.qu1 = 'select profile_sk , connections_profile_url from linkedin_connections where date(modified_at)>"2017-04-17" and date(modified_at)<"2017-08-21" and member_id = "%s"'
self.qu1 = "select distinct member_id from linkedin_meta where date(modified_at) < '2017-08-20'"
self.qu2 = "select distinct member_id from linkedin_connections where date(modified_at) > '2017-08-20'"
self.query2 = "select connections_profile_url, member_id, sk from FACEBOOK.linkedin_connections where date(modified_at) >= '2017-08-20'"
self.excel_file_name = 'linkedin_connections_profiles_%s.csv'%str(datetime.datetime.now().date())
if os.path.isfile(self.excel_file_name):
os.system('rm %s'%self.excel_file_name)
oupf = open(self.excel_file_name, 'ab+')
self.todays_excel_file = csv.writer(oupf)
self.headers = ['Linkedin_Profile_url', 'member_id']
self.todays_excel_file.writerow(self.headers)
def main(self):
"""with open('duplicate_members', 'r') as f:
rows = f.readlines()
for inde, row in enumerate(rows):
row = row.strip('\n')
one_ = fetchmany(self.cur, self.qu1 % row)
pf_sk = '<>'.join([i[0] for i in one_])
pf_url = '<>'.join([i[0] for i in one_])
file("duplicate_member_info","ab+").write("%s, %s, %s\n" % (row, pf_sk, pf_url))"""
re1 = fetchall(self.cur, self.qu1)
re2 = fetchall(self.cur, self.qu2)
re2 = [str(i[0]) for i in re2]
re1 = [str(i[0]) for i in re1]
new_list = []
for i in re1:
if i in re2:
new_list.append(i)
print len(new_list)
total_distinct_list = []
total_connection_records = fetchall(self.cur, self.query2)
for tocr in total_connection_records:
linkedin_profilef, member_id, connection_sk = tocr
if member_id in new_list:
continue
total_distinct_list.append((linkedin_profilef, member_id))
print len(total_distinct_list), 'total_length'
print len(set(total_distinct_list)), 'total_distinct_lenth'
total_distinct_list = set(total_distinct_list)
for tdl in total_distinct_list:
lk_url, mem_id = tdl
values = [lk_url, mem_id]
values = [normalize(i) for i in values]
self.todays_excel_file.writerow(values)
if __name__ == '__main__':
Companylidsurls().main()
| [
"[email protected]"
]
| |
a7e316b3e4294deab2c4be72af3994d2504b8d49 | ccf94dcb6b1500fcbbd56964ae8c4832a496b8b3 | /python/baiduads-sdk-auto/test/test_update_creative_response_wrapper.py | cca6a9dbe84cb7e4f9ee1164d8a578b819dd20ce | [
"Apache-2.0"
]
| permissive | baidu/baiduads-sdk | 24c36b5cf3da9362ec5c8ecd417ff280421198ff | 176363de5e8a4e98aaca039e4300703c3964c1c7 | refs/heads/main | 2023-06-08T15:40:24.787863 | 2023-05-20T03:40:51 | 2023-05-20T03:40:51 | 446,718,177 | 16 | 11 | Apache-2.0 | 2023-06-02T05:19:40 | 2022-01-11T07:23:17 | Python | UTF-8 | Python | false | false | 1,076 | py | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import baiduads
from baiduads.common.model.api_response_header import ApiResponseHeader
from baiduads.creative.model.update_creative_response_wrapper_body import UpdateCreativeResponseWrapperBody
globals()['ApiResponseHeader'] = ApiResponseHeader
globals()['UpdateCreativeResponseWrapperBody'] = UpdateCreativeResponseWrapperBody
from baiduads.creative.model.update_creative_response_wrapper import UpdateCreativeResponseWrapper
class TestUpdateCreativeResponseWrapper(unittest.TestCase):
"""UpdateCreativeResponseWrapper unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testUpdateCreativeResponseWrapper(self):
"""Test UpdateCreativeResponseWrapper"""
# FIXME: construct object with mandatory attributes with example values
# model = UpdateCreativeResponseWrapper() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
ebbc23d30dbea2dafb4b6a71b92a5ccb4c9bb341 | d1c352676563b2decacfad19120001959b043f05 | /superset/migrations/versions/a33a03f16c4a_add_extra_column_to_savedquery.py | 07e0b05a1c0c6e035dd0a4931949130430b03579 | [
"Apache-2.0",
"CC-BY-4.0",
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
]
| permissive | Affirm/incubator-superset | c9a09a10289b4ebf8a09284a483bca93725a4b51 | 421183d3f46c48215e88e9d7d285f2dc6c7ccfe6 | refs/heads/master | 2023-07-06T11:34:38.538178 | 2019-05-22T23:39:01 | 2019-05-22T23:39:01 | 128,005,001 | 1 | 3 | Apache-2.0 | 2023-03-20T19:49:14 | 2018-04-04T04:02:42 | JavaScript | UTF-8 | Python | false | false | 2,090 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add extra column to SavedQuery
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Revision ID: a33a03f16c4a
Revises: fb13d49b72f9
Create Date: 2019-01-14 16:00:26.344439
"""
# revision identifiers, used by Alembic.
revision = 'a33a03f16c4a'
down_revision = 'fb13d49b72f9'
from alembic import op
import sqlalchemy as sa
def upgrade():
with op.batch_alter_table('saved_query') as batch_op:
batch_op.add_column(sa.Column('extra_json', sa.Text(), nullable=True))
def downgrade():
with op.batch_alter_table('saved_query') as batch_op:
batch_op.drop_column('extra_json')
| [
"[email protected]"
]
| |
49d99d025201045a3100ae9ab2515e297887e22a | 9b10d8482a7af9c90766747f5f2ddc343871d5fa | /Gemtek/AutoTest/Sprinkler-Auto-Test/appium/modules/android/main_screen.py | 53dfab1b87c290967aa74c0b91465fb24e0c9366 | []
| no_license | DarcyChang/MyProjects | 86d33f5cf8bdfd4b21e64922e4eb25c1afc3c135 | 47efb2dfe13ace264f8943b59b701f39f23c4c17 | refs/heads/master | 2021-05-12T12:43:39.255082 | 2020-09-23T06:42:03 | 2020-09-23T06:42:03 | 117,419,269 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,525 | py | import unittest
from time import sleep
from appium import webdriver
import android.verify.exist
import android.verify.next_page
from appium.webdriver.common.touch_action import TouchAction
def add_device(self):
el = self.driver.find_element_by_id("com.blackloud.wetti:id/ivAddSprinkler")
self.assertIsNotNone(el)
el.click()
sleep(1)
def choose_device(self):
# TODO timeout 30 seconds
el = self.driver.find_element_by_id("com.blackloud.wetti:id/ivThum")
# "com.blackloud.wetti:id/tvName" is too.
self.assertIsNotNone(el)
action = TouchAction(self.driver)
i = 1
while(1):
try:
action.tap(el).perform()
# el.click()
sleep(1)
try:
android.verify.next_page.verify_binging_success(self)
except:
android.verify.next_page.verify_binging_network_success(self)
break
except:
sleep(1)
i += 1
if(i == 30):
print("[Gemtek] choose device TIME OUT !")
break
sleep(1)
# TODO : 1. There are four point can touch that choose sprinkler function.
# two are recourse-id, another are class.
# Maybe we can do it by random.
# 2. Binging two or more devices.
def my_account(self):
el = self.driver.find_element_by_id("com.blackloud.wetti:id/imvAbLeft")
self.assertIsNotNone(el)
el.click()
sleep(2)
def buy_blackloud_sprinkler(self):
el = self.driver.find_element_by_id("com.blackloud.wetti:id/tvBuyNetti")
self.assertIsNotNone(el)
el.click()
sleep(5)
def user_manual(self):
el = self.driver.find_element_by_id("com.blackloud.wetti:id/tvUserManual")
self.assertIsNotNone(el)
el.click()
sleep(5)
def feature_introduction(self):
el = self.driver.find_element_by_id("com.blackloud.wetti:id/tvTourGuide")
self.assertIsNotNone(el)
el.click()
sleep(1)
def contact_us(self):
el = self.driver.find_element_by_id("com.blackloud.wetti:id/tvContactUs")
self.assertIsNotNone(el)
el.click()
sleep(5)
def about_blackloud(self):
el = self.driver.find_element_by_id("com.blackloud.wetti:id/tvAbout")
self.assertIsNotNone(el)
el.click()
sleep(5)
def legal_and_privacy_policy(self):
el = self.driver.find_element_by_id("com.blackloud.wetti:id/tvUnderText")
self.assertIsNotNone(el)
el.click()
sleep(5)
if __name__ == '__main__':
print("[Gemtek] main_screen.py")
| [
"[email protected]"
]
| |
ef7dcf27560b561e80bb4f4a68f159d63bf00127 | bbf1ae079309eca11270422d3f0d259d1515d430 | /numerical-tours/python/nt_solutions/ml_3_classification/exo5.py | 3c0f3f3cda5068fd794b1a41a27c032ac538f66e | [
"BSD-2-Clause"
]
| permissive | ZichaoDi/Di_MATLABTool | 5e6a67b613c4bcf4d904ddc47c2744b4bcea4885 | c071291c63685c236f507b2cb893c0316ab6415c | refs/heads/master | 2021-08-11T07:28:34.286526 | 2021-08-04T18:26:46 | 2021-08-04T18:26:46 | 149,222,333 | 9 | 5 | null | null | null | null | UTF-8 | Python | false | false | 743 | py | sigma_list = np.array( [.1, .5, 1, 4] )
niter = 4000
plt.clf
for io in np.arange(0, np.size(sigma_list)):
sigma = sigma_list[io]
# grad descent
K = kappa(X,X,sigma)
tau = .5
if io==4:
tau = .05
h = np.zeros((n,1))
for i in np.arange(0,niter):
h = h - tau * nablaF(h,K,y)
# evaluate on a grid
K1 = kappa(G,X,sigma)
Theta = theta( K1.dot(h) )
Theta = Theta.reshape((q,q))
# Display the classification probability.
plt.subplot(2,2,io+1)
plt.imshow(Theta.transpose(), origin="lower", extent=[-tmax, tmax, -tmax, tmax])
plt.plot(X[I,0], X[I,1], '.')
plt.plot(X[J,0], X[J,1], '.')
plt.axis('equal')
plt.axis('off')
plt.title('$\sigma=' + str(sigma) + '$')
| [
"[email protected]"
]
| |
54f9c26e1c1a8e2e8d1f6c35b715163db168de74 | ac3227ef8da077bfb871f8e515bda92ac96385f9 | /pressure17.py | 3631628a3c32772c9b545461956a5c1f062cb4a9 | []
| no_license | anlitsai/ngc2146-moment | 07dac25a981b404a55e00b5a17f314861324efd0 | 6060239f1919bb7c316dfe93465d1f5fa240bd56 | refs/heads/master | 2022-05-20T18:05:26.343243 | 2020-04-12T21:15:30 | 2020-04-12T21:15:30 | 255,170,762 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,365 | py | #!/usr/bin/env python
# NGC 2146
# --- import constant ------------------------- #
import math
#import pyfits
# --- constant -------------------------------- #
c=3.0e10 # light speed
pc=3.26*c*365*86400
kpc=pc*1.0e3
ev=1.6e-12
G=6.67e-8 # gravitational constant
k_B=1.38e-16 # boltzmann constant
Msun=1.99e33 # solar mass
Msun_pc2=Msun/pc**2
Lsun=3.9e33 # solar luminosity
N_A=6.02e23 # Avogadro constant
m_p=1.67e-24
E_ion_ev=13.6 # [eV]
E_bnd_ev=4.52 # [eV]
# --- parameter ------------------------------- #
print "============"
print "NGC 2146"
print "============"
print "parameters"
print "------------"
i_deg=70.0
i_pi=i_deg/180.0*math.pi
sin_i=math.sin(i_pi)
cos_i=math.cos(i_pi)
D_Mpc=17.2
X_CO2H2=1.4/3.0
H22mol=1.36
XH=X_CO2H2*H22mol
print "inclination",i_pi,sin_i,cos_i
v_motf_kms=200.0 # km/s (Tsai et al. 2009)
v_sound_kms=1.0e3
T_mol=30.0 # K (Tsai et al. 2009)
T_xotf=1.0e6 # K
EI=8.0e62 # cm^-3 (Inui et al. 2005)
R_xotf_pc=6.3*1000*4.8/3.5 # (Inui et al. 2005)
R_xotf=R_xotf_pc*pc
V_xotf=(4.0/3.0*math.pi)*R_xotf**3 # (Inui et al. 2005)
V_xotf_Inui=(4.0/3.0*math.pi)*(6.3e3*pc)**3 # (Inui et al. 2005)
fill=0.01 # (Inui et al. 2005)
print "filling factor =", fill
n_mol=100.0 # cm^-3 (Tsai et al. 2009)
rho_mol=n_mol/N_A
n_xotf=(EI/V_xotf/fill)**(0.5) # cm^-3 (Iuni et al. 2005)
print "n_xotf =", n_xotf, "[cm^-3]"
print "n_xotf (Inui) =", 5.1e-3*fill**(-0.5), "[cm^-3]"
print "n_mol =", n_mol, "[cm^-3]"
print "rho_mol", '%.2e' %(rho_mol), "[g cm^-3]"
kT_xotf_ev=0.5e3
kT_xotf=kT_xotf_ev*ev
print "kT [ev] of xray outflow =", kT_xotf_ev,"ev"
print "kT [erg] of xray outflow =", kT_xotf,"erg"
print "V_xotf =", V_xotf
print "V_xotf (Inui) =", V_xotf_Inui
M_xotf=n_xotf*m_p*V_xotf*fill
M_xotf_Msun=M_xotf/Msun
M_xotf_Msun_Inui=1.3e8*fill**0.5
M_xotf_Inui=M_xotf_Msun_Inui*Msun
M_galdsk_Msun=8.67e10
M_galdsk_Msun=2.18e11
M_galdsk=M_galdsk_Msun*Msun
L_xotf=1.3e40 # (Inui et al. 2005)
E_xotf=1.0e56 # (Inui et al. 2005)
print "L_xotf =", L_xotf, "[erg/s]"
print "E_xotf =", E_xotf, "[erg]"
effi_x_thrm=0.3 # (Strickland & Stevens 2000)
effi_x_mech=0.01 # (Strickland & Stevens 2000)
R_motf=2.0e3*pc # (Tsai et al. 2009)
SFR=10.0 # Msun/yr (Greve et al. 2000)
SNrate= 0.15 # yr^-1 (Tarchi et al. 2000)
print "SN rate =", SNrate, "[yr^-1]"
effi_mass2rad=0.001 # (Thompson et al. 2005)
as2pc=80.0
px2as=0.2
bm2as2=3.3*2.8
R_starburst_pc=700.0 # (@tau=1# Thompson et al. 2005)
# R_starburst_pc=1000.0 # (our data)
R_starburst_pc_greve=1250.0 # (Greve 2000)
R_conti_pc=2560.0/2 # (our data) (/iapetus/data/satoki_data/ngc2146/20100130.conti89GHz )
V_starburst_pc3_greve=2.0e10 # (Greve 2000)
z_starburst_pc=40.0 # (our calculation) (iapetus:/iapetus/thesis/phd/calculation/veldisp13.sh)
z_starburst_pc_greve=500.0 # (Greve 2000)
#z_starburst_pc_greve=500.0 # (Greve 2000)
tau=10.0
d_mpc=17.2 # Mpc
alpha=1.0 # (Chevalier 1985)
beta=1.0 # (Chevalier 1985)
a1=0.3 # Tsai 2009
b1=0.32 # Tsai 2009
c1=256.39 # Tsai 2009
timescale=1.0e7 # [yr] ; ourdata
v_rms_kms=11.16
v_rms=v_rms_kms*1.0e5
def surf_den_dyn(r_kpc):
v_kms=r_kpc*c1/(r_kpc**a1+r_kpc**(1-b1))/sin_i
v=v_kms*1.0e5
r=r_kpc*kpc
r_pc=r_kpc*1000
m_dyn=r*v**2/G # Orange book p.958
m_dyn_Msun=m_dyn/Msun
sd_dyn=m_dyn/(math.pi*r**2)
sd_dyn_Msunpc2=m_dyn_Msun/(math.pi*r_pc**2)
# sd_dyn_Msunpc2=sd_dyn/Msun_pc2
z_pc=(v_rms**2/(2*math.pi*G*sd_dyn))/pc
print '%.2f' %(sd_dyn_Msunpc2),"[Msun/pc2] (r <",r_kpc,"kpc ; z =",'%.2f' %(z_pc),"pc)",'%.2f' %v_kms,"[km/s]", '%.2e' %(m_dyn_Msun),"[Msun]"
return sd_dyn_Msunpc2
sd05=surf_den_dyn(0.5)
sd08=surf_den_dyn(0.8)
sd10=surf_den_dyn(1.0)
sd12=surf_den_dyn(1.2)
sd_dyn=sd12
sd15=surf_den_dyn(1.5)
sd24=surf_den_dyn(2.4)
sd28=surf_den_dyn(2.8)
sd30=surf_den_dyn(3.0)
sd32=surf_den_dyn(3.2)
#sd35=surf_den_dyn(3.5)
#sd50=surf_den_dyn(5)
#sd100=surf_den_dyn(10)
# --- gravitational pressure (Gunn & Gott 1972) ------- #
#def surf_den_kazushi(S_CO_JyBmMSpx,px,i_deg,D_Mpc):
def surf_den_kazushi(S_CO_JyBmMS,px,type):
n_as=px*px2as**2
n_bm=n_as/bm2as2
S_CO=S_CO_JyBmMS/1000*n_bm
I_CO=S_CO/n_as
sd_Msunpc2=(5.0e2*cos_i*I_CO)*XH
M_gas_Msun=(1.18e4*D_Mpc**2*S_CO)*XH#*cos_i
M_gas=M_gas_Msun*Msun
print '%.2f' %I_CO,"[Jy km/s as-2]", '%.2e' %S_CO,"[Jy km/s]",'%.0f' %n_as,"[as2]",'%.0f' %n_bm,"[beam]",'%.2f' %sd_Msunpc2,"[Msun/pc2]",'%.2e' %M_gas_Msun,"[Msun]",type
return sd_Msunpc2,M_gas
sd=surf_den_kazushi(4.1472E+02,11022,"motf")
sd=surf_den_kazushi(1.429E+03,84370,"motf")
sd=surf_den_kazushi(7.481E+02,46830,"motf")
sd=surf_den_kazushi(1.327E+03,69990,"motf")
sd=surf_den_kazushi(6.669E+02,57300,"motf")
sd_motf_Msunpc2=sd[0]
M_motf=sd[1]
sd=surf_den_kazushi(1.1243E+04,38685,"unkown")
sd=surf_den_kazushi(1.3559E+04,30802,"45as")
sd=surf_den_kazushi(1.3182E+04,31100,"mdsk")
sd=surf_den_kazushi(2.054E+04,186790,"mdsk")
sd=surf_den_kazushi(2.111E+04,180400,"mdsk")
sd=surf_den_kazushi(3.730E+03,53030,"mbbl")
sd=surf_den_kazushi(3.356E+04,64050,"0.8kpc")
sd=surf_den_kazushi(3.652E+04,54540,"0.8kpc")
sd=surf_den_kazushi(3.049E+04,93940,"1.2kpc")
sd=surf_den_kazushi(2.431E+04,128520,"1.2kpc")
sd=surf_den_kazushi(2.139E+04,166460,"1.6kpc")
sd=surf_den_kazushi(1.723E+04,214640,"1.6kpc")
sd=surf_den_kazushi(1.579E+04,256930,"2.4kpc")
sd=surf_den_kazushi(1.381E+04,301520,"2.4kpc")
sd=surf_den_kazushi(1.466E+04,281700,"2.4kpc")
sd=surf_den_kazushi(1.364E+04,306950,"2.8kpc")
sd=surf_den_kazushi(1.364E+04,306950,"tvbox;30as")
sd_motf=sd_motf_Msunpc2*Msun_pc2
sd_xotf=sd_motf*(n_xotf/n_mol)*(M_xotf/M_motf)
sd_xotf_Msunpc2=sd_xotf*Msun_pc2
print "------------"
print "calculation results"
print "------------"
print "+[ grav P = 2*pi*G*(surf_den_xry)*(surf_den_dynamical_mass) ]+"
def p_grav(sd,type):
gravP=2*math.pi*G*sd*sd_dyn*Msun_pc2
print " o", type, "grav P:", '%.2e' %(gravP)
return gravP
p_grav_motf=p_grav(sd_motf,"mol. outflow")
p_grav_xotf=p_grav(sd_xotf, "xray outflow")
# unit checked
# --- ram pressure (Gunn & Gott 1972) ----------------- #
print "------------"
print "+[ ram P = rho*(relative velocity)^2 ]+"
def v_esc(M_Msun,R_kpc):
v=math.sqrt(2*G*M_Msun*Msun/(R_kpc*1.0e3*pc))/1.0e5
print "escape velocity",v,"[km/s]"
return v
v_2kpc=v_esc(8.67e10,2)
v_1kpc=v_esc(8.67e10,1)
#E_bnd_ion_particle=(E_bnd_ev+E_ion_ev)*ev
#E_bnd_ion_mass=E_bnd_ion_particle*N_A*M_xotf_g
def ramP(effi_x1,effi_x2,type1,type2):
print " ", type1, effi_x1
print " ", type2, effi_x2
effi_x=effi_x1*effi_x2
E_ttl=E_xotf/effi_x
print " total Energy =",'%.2f' %(E_ttl), "[erg]"
E_mech=E_ttl*(1-effi_x)
v_xry_kms=math.sqrt(E_mech*2/M_xotf)/1.0e5
print " v_xry = ", '%.2f' %(v_xry_kms), "[km/s]"
v_rel_kms=v_xry_kms-v_motf_kms
v_rel=v_rel_kms*1.0e5
p=rho_mol*v_rel**2
p_K=p/k_B
print " v_rel = ", '%.2f' %(v_rel_kms), "[km/s]"
print " o ram P: ", '%.2e' %(p),"[dyne cm^-2]"
print " o ram P: ", '%.2e' %(p_K), "[K cm^-3]"
return p
# unit checked
type2="Lx increasing factor"
print "the total energy released by SNe and stellar winds (doesn't include the K.E)"
p_ram1=ramP(0.1,1,"thermalization efficiency (lower)",type2)
p_ram1=ramP(0.3,1,"thermalization efficiency",type2)
p_ram1=ramP(0.5,1,"thermalization efficiency",type2)
p_ram1=ramP(1,1,"thermalization efficiency (upper)",type2)
print "radiating the mechanical energy supplied by the starburst"
p_ram1=ramP(0.01,10,"starburst energy injection rate (thin-disk)",type2)
p_ram1=ramP(0.05,3,"starburst energy injection rate (thick-disk, lower)",type2)
p_ram1=ramP(0.2,3,"starburst energy injection rate (thick-disk, upper)",type2)
# --- SN explosions pressure effect on hot ISM (Thompson et al 2005) -- #
print "------------"
print "+[ shock P = 10^-12*E_xry^(17/14)*n_mol^(-4/7)*SNrate/Vol ]+"
def A_pc2(R):
A=math.pi*R**2
# print "Starburst Area = ", '%.2e' %(A) , "[pc^2]"
return A
def V_pc3(R,z):
V=math.pi*R**2*z
return V
# SNrate_V=SNrate/V_starburst
V_conti=V_pc3(R_conti_pc,z_starburst_pc)
V_greve1=V_pc3(R_starburst_pc_greve,z_starburst_pc_greve)
V_greve2=2.0e10 # (Greve 2000)
E_init=E_xotf
n_ambi=n_mol
def p_SNhot(V,type):
rate_V=SNrate/V
P=1.0e-12*(E_init/1.0e51)**(17.0/14)*(n_ambi/0.01)**(-4.0/7)*(rate_V/1.0e-13) # [erg cm^-3]
print " data from", type
print " starburst Volume = ", '%.2e' %(V), "[pc^3]"
print " o SN shock-heated P: ", '%.2e' %(P)
# print "SN explosion P (shock-heated hot ISM): ", P
return P
# E_init [erg]
# n_ambi [cm^-3]
# SNrate_V [yr^-1 pc^-3]
# SNrate_V=SNrateate per volume
p_SNhot_conti=p_SNhot(V_conti,"89GHz Continuum")
p_SNhot_greve1=p_SNhot(V_greve1,"Greve 2000 (1)")
p_SNhot_greve2=p_SNhot(V_greve2, "Greve 2000 (2)")
# unit checked
# --- thermal pressure -------------------------------- #
print "------------"
print "+[ thermal P = n*k*T ]+"
p_thm_mol=n_mol*k_B*T_mol
p1=1000*k_B*100
p2=100*k_B*10
print "pppp",p1,p2
p_thm_xotf=2*n_xotf*kT_xotf
p_thm_xotf_inui=4.9e-12*fill**(-0.5)
print " o molecular gas thermal P: ", p_thm_mol
print " o ionized gas thermal P: ", '%.2e' %(p_thm_xotf)
print " o ionized gas thermal P (Inui): ", p_thm_xotf_inui
# unit checked
# --- radiation pressure (Thompson et al 2005) -------- #
print "------------"
print "+[ radiation P = effic*SFR/Area*c ]+"
# SFR_A=SFR/(A_starburst)
A_starburst=A_pc2(R_conti_pc)*pc**2
SFR_A=SFR/(A_starburst)
# when tau << 1
p_rad_thin=c*effi_mass2rad*SFR_A
# when tau >= 1
p_rad_thick=c*effi_mass2rad*SFR_A*(1.0/2)*tau
#print " o optically thin radiation P: ", '%.2e' %(p_rad_thin)
print " o optically thick radiation P: ", '%.2e' %(p_rad_thick)
# unit checked
# --- SN explosion pressure affect on cold ISM (Thompson et al 2005) -- #
print "------------"
print "+[ SN explosion P = 5*n_mol^(-1/4)*E_xry^(13/14)*P_rad ]+"
# E_init=E_xotf
# n_ambi=n_mol
p_SN_cold=5*(n_ambi/1.0)**(-1.0/4)*(E_init/1.0e51)**(13.0/14)*p_rad_thick
print " o SN explosion P (cold ISM): ", '%.2e' %(p_SN_cold)
print "------------"
# unit checked
# --------------------------------------------- #
# --- reference ------------------------------- #
# Chevalier et al. 2001, ApJ, 558, L27
# Chevalier et al. 1985, Nature, 317, 44
# Greve et al. 2000, AA, 364, 409
# Gunn & Gott 1972, ApJ, 176, 1
# Inui et al. 2005, P: ASJ, 57, 135
# Sakamoto et al. 1995, AJ, 110, 2075
# Strickland & Stevens 2000, MNRAS, 314, 511
# Tarchi et al. 2000, 358, 95
# Tsai et al. 2009, P: ASJ, 61, 237
# Thompson et al. 2005, ApJ, 630, 167
# Vollmer et al. 2001, ApJ, 561, 708
# Vollmer et al. 2005, AA, 441, 473
exit
| [
"[email protected]"
]
| |
47e266d665db77c973d48ba03cb937966bfcbd41 | c733e6b433914a8faba256c7853f5cf2cd39c62a | /Python/Leetcode Daily Practice/unclassified/647. Palindromic Substrings.py | a75c67380d9fa7090153a83f1116d883ea245643 | []
| no_license | YaqianQi/Algorithm-and-Data-Structure | 3016bebcc1f1356b6e5f3c3e588f3d46c276a805 | 2e1751263f484709102f7f2caf18776a004c8230 | refs/heads/master | 2021-10-27T16:29:18.409235 | 2021-10-14T13:57:36 | 2021-10-14T13:57:36 | 178,946,803 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 795 | py | """
Given a string, your task is to count how many palindromic substrings in this string.
The substrings with different start indexes or end indexes are counted as different
substrings even they consist of same characters.
Input: "abc"
Output: 3
Explanation: Three palindromic strings: "a", "b", "c".
"""
class Solution(object):
def countSubstrings_dp(self, s):
n = len(s)
dp = [[False for i in range(n)] for i in range(n)]
# dp[j][i] = (dp[j-1][i+1] or i - j <= 2) and s[j][i]
res = 0
for j in range(n):
for i in range(j+1):
if s[i] == s[j] and (j - i <=2 or dp[i+1][j-1]):
dp[i][j] = True
res += 1
return res
s = "aaa"
print(Solution().countSubstrings_dfs(s))
| [
"[email protected]"
]
| |
4084a64ffe7d52b14cb8b756e1efe29d46730493 | 8784a3a9d4054d1aca752ec742902abb51a9de80 | /python_stack/python_OOP/arithmetic_module/main.py | 785338b9b6b6a9481506f9e74ad051b34a087637 | []
| no_license | MichaelKirkaldyV/mean-deploy-2 | 25eaf7cc430ac095f5327c04be84b9212314c7f2 | f30b8ea14ccbaecfe62929948f2a84191d316c22 | refs/heads/master | 2023-01-13T07:20:28.984728 | 2019-05-23T16:42:15 | 2019-05-23T16:42:15 | 151,123,880 | 0 | 0 | null | 2022-12-30T09:47:11 | 2018-10-01T16:54:09 | TypeScript | UTF-8 | Python | false | false | 313 | py | #imports arithmetic module within the same folder.
#Then uses the module as a variable and calls its functions using the .method
#Adds parameters.
#prints out the solutions that each function returns.
import arithmetic
print arithmetic.add(5, 8)
print arithmetic.subtract(10, 5)
print arithmetic.multiply(12, 6)
| [
"[email protected]"
]
| |
83d88a5ed0bdcad629a6e3815dd75d21cc5a72e0 | e61e664d95af3b93150cda5b92695be6551d2a7c | /vega/networks/pytorch/customs/modnas/arch_space/construct/torch/model_init.py | 63a2eea20c488ff5f7c5cdf7026be84854afb40b | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"BSD-3-Clause",
"MIT"
]
| permissive | huawei-noah/vega | 44aaf8bb28b45f707ed6cd4e871ba70fc0c04846 | 12e37a1991eb6771a2999fe0a46ddda920c47948 | refs/heads/master | 2023-09-01T20:16:28.746745 | 2023-02-15T09:36:59 | 2023-02-15T09:36:59 | 273,667,533 | 850 | 184 | NOASSERTION | 2023-02-15T09:37:01 | 2020-06-20T08:20:06 | Python | UTF-8 | Python | false | false | 5,623 | py | # -*- coding:utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model weight initializer."""
import copy
import math
import torch.nn.init as init
from modnas.registry.construct import register
def _t_init_he_normal_fout(t, gain, fan_in, fan_out):
stdv = gain / math.sqrt(fan_out)
init.normal_(t, 0, stdv)
def _t_init_he_normal_fin(t, gain, fan_in, fan_out):
stdv = gain / math.sqrt(fan_in)
init.normal_(t, 0, stdv)
def _t_init_he_uniform_fout(t, gain, fan_in, fan_out):
b = math.sqrt(3.) * gain / math.sqrt(fan_out)
init.uniform_(t, -b, b)
def _t_init_he_uniform_fin(t, gain, fan_in, fan_out):
b = math.sqrt(3.) * gain / math.sqrt(fan_in)
init.uniform_(t, -b, b)
def _t_init_xavier_uniform(t, gain, fan_in, fan_out):
b = math.sqrt(6.) * gain / math.sqrt(fan_in + fan_out)
init.uniform_(t, -b, b)
def _t_init_xavier_normal(t, gain, fan_in, fan_out):
stdv = math.sqrt(2.) * gain / math.sqrt(fan_in + fan_out)
init.normal_(t, 0, stdv)
def _t_init_uniform_fin(t, gain, fan_in, fan_out):
b = 1.0 / math.sqrt(fan_in)
init.uniform_(t, -b, b)
def _t_init_uniform_fout(t, gain, fan_in, fan_out):
b = 1.0 / math.sqrt(fan_out)
init.uniform_(t, -b, b)
def _t_init_uniform(t, gain, fan_in, fan_out):
init.uniform_(t)
def _t_init_normal(t, gain, fan_in, fan_out):
init.normal_(t)
def _t_init_zeros(t, gain, fan_in, fan_out):
init.zeros_(t)
def _t_init_ones(t, gain, fan_in, fan_out):
init.ones_(t)
def _init_tensor(init_type, t, gain, fan_in, fan_out):
init_fn = _tensor_init_fn.get(init_type)
if init_fn is None or t is None:
return
init_fn(t, gain, fan_in, fan_out)
def _m_init_conv(m, config):
init_type = config['conv']['type']
bias_init_type = config['bias']['type']
gain = config['gain']
if init_type is None:
return
rec_size = m.kernel_size[0] * m.kernel_size[1]
fan_in = rec_size * m.in_channels
fan_out = rec_size * m.out_channels
if config['conv'].get('div_groups', True):
fan_in /= m.groups
fan_out /= m.groups
_init_tensor(init_type, m.weight, gain, fan_in, fan_out)
if m.bias is not None:
_init_tensor(bias_init_type, m.bias, gain, fan_in, fan_out)
def _m_init_norm(m, config):
init_type = config['norm']['type']
bias_init_type = config['bias']['type']
momentum = config['norm'].get('momentum')
eps = config['norm'].get('eps')
gain = config['gain']
m.reset_running_stats()
if momentum is not None:
m.momentum = momentum
if eps is not None:
m.eps = eps
if not m.affine:
return
fan_in = fan_out = m.num_features
_init_tensor(init_type, m.weight, gain, fan_in, fan_out)
_init_tensor(bias_init_type, m.bias, gain, fan_in, fan_out)
def _m_init_fc(m, config):
init_type = config['fc']['type']
bias_init_type = config['bias']['type']
gain = config['gain']
if init_type is None:
return
fan_in, fan_out = m.in_features, m.out_features
_init_tensor(init_type, m.weight, gain, fan_in, fan_out)
if m.bias is None:
return
_init_tensor(bias_init_type, m.bias, gain, fan_in, fan_out)
_tensor_init_fn = {k[8:]: v for (k, v) in globals().items() if k.startswith('_t_init_')}
_module_init_fn = {k[8:]: v for (k, v) in globals().items() if k.startswith('_m_init_')}
_default_init_config = {
'conv': {
'type': None,
'div_groups': True,
},
'norm': {
'type': None,
},
'fc': {
'type': None,
},
'bias': {
'type': None,
},
}
_default_module_map = {
'Conv2d': 'conv',
'BatchNorm2d': 'norm',
'GroupNorm': 'norm',
'Linear': 'fc',
}
@register
class DefaultModelInitializer():
"""Model weight initializer class."""
def __init__(self,
init_config=None,
module_init_map=None,
default_init_type=None,
neg_slope=math.sqrt(5),
nonlinear='leaky_relu'):
self.init_config = copy.deepcopy(_default_init_config)
self.init_config['gain'] = init.calculate_gain(nonlinear, neg_slope)
self.init_config.update(init_config or {})
self.module_init_map = _default_module_map.copy()
self.module_init_map.update(module_init_map or {})
self.default_init_type = default_init_type
def __call__(self, model):
"""Return initialized model."""
for m in model.modules():
m_init_type = self.module_init_map.get(type(m).__name__)
if m_init_type is not None:
_module_init_fn[m_init_type](m, self.init_config)
elif len(list(m.children())) == 0:
for p in m.parameters():
sz = p.shape
fan_out = sz[0] if len(sz) else 1
fan_in = sz[min(1, len(sz) - 1)] if len(sz) else 1
_init_tensor(self.default_init_type, p, self.init_config['gain'], fan_in, fan_out)
return model
| [
"[email protected]"
]
| |
1d629c3f80bdea998e1edcc704dadcb450ca56ed | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/dev/nlp/BERT-ITPT-FiT_ID0340_for_PyTorch/baseline_main.py | c8f1c9fc7e3f924283508a3653a8b1f69083fc34 | [
"BSD-3-Clause",
"MIT",
"GPL-1.0-or-later",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 8,638 | py | #
# BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
#
"""
Script for training, testing, and saving baseline, binary classification models for the IMDB
dataset.
"""
import logging
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
# !pip install pytorch_transformers
from pytorch_transformers import AdamW # Adam's optimization w/ fixed weight decay
from models.baseline_models import SimpleRNN, SimpleRNNWithBERTEmbeddings
from bert_utils.data_utils import IMDBDataset
from bert_utils.model_utils import train, test
# Disable unwanted warning messages from pytorch_transformers
# NOTE: Run once without the line below to check if anything is wrong, here we target to eliminate
# the message "Token indices sequence length is longer than the specified maximum sequence length"
# since we already take care of it within the tokenize() function through fixing sequence length
logging.getLogger('pytorch_transformers').setLevel(logging.CRITICAL)
DEVICE = torch.device('npu' if torch.npu.is_available() else 'cpu')
print("DEVICE FOUND: %s" % DEVICE)
# Set seeds for reproducibility
SEED = 42
torch.manual_seed(seed=SEED)
torch.backends.cudnn.deterministic = True
# Define hyperparameters
USE_BERT_EMBEDDING_PARAMETERS = True
PRETRAINED_MODEL_NAME = 'bert-base-cased'
NUM_EPOCHS = 50
BATCH_SIZE = 32
MAX_VOCABULARY_SIZE = 25000
MAX_TOKENIZATION_LENGTH = 512
EMBEDDING_DIM = 100
NUM_CLASSES = 2
NUM_RECURRENT_LAYERS = 1
HIDDEN_SIZE = 128
USE_BIDIRECTIONAL = True
DROPOUT_RATE = 0.20
# Initialize model
if USE_BERT_EMBEDDING_PARAMETERS:
model = SimpleRNNWithBERTEmbeddings(pretrained_model_name_for_embeddings=PRETRAINED_MODEL_NAME,
max_tokenization_length=MAX_TOKENIZATION_LENGTH,
num_classes=NUM_CLASSES,
num_recurrent_layers=NUM_RECURRENT_LAYERS,
use_bidirectional=USE_BIDIRECTIONAL,
hidden_size=HIDDEN_SIZE,
dropout_rate=DROPOUT_RATE,
use_gpu=True if torch.npu.is_available() else False)
# IMPORTANT NOTE: Maximum vocabulary size should be set to be equal or larger than the maximum
# encoded (embedded) index used for any token, else the embedding matrix will not capture that token
else:
model = SimpleRNN(pretrained_model_name_for_tokenizer=PRETRAINED_MODEL_NAME,
max_vocabulary_size=MAX_VOCABULARY_SIZE*4,
max_tokenization_length=MAX_TOKENIZATION_LENGTH,
embedding_dim=EMBEDDING_DIM,
num_classes=NUM_CLASSES,
num_recurrent_layers=NUM_RECURRENT_LAYERS,
hidden_size=HIDDEN_SIZE,
use_bidirectional=USE_BIDIRECTIONAL,
dropout_rate=DROPOUT_RATE,
use_gpu=True if torch.npu.is_available() else False)
# Initialize train & test datasets
train_dataset = IMDBDataset(input_directory='aclImdb/train',
tokenizer=model.get_tokenizer(),
apply_cleaning=False,
max_tokenization_length=MAX_TOKENIZATION_LENGTH,
truncation_method='head-only',
device=DEVICE)
test_dataset = IMDBDataset(input_directory='aclImdb/test',
tokenizer=model.get_tokenizer(),
apply_cleaning=False,
max_tokenization_length=MAX_TOKENIZATION_LENGTH,
truncation_method='head-only',
device=DEVICE)
# Acquire iterators through data loaders
train_loader = DataLoader(dataset=train_dataset,
batch_size=BATCH_SIZE,
shuffle=True)
test_loader = DataLoader(dataset=test_dataset,
batch_size=BATCH_SIZE,
shuffle=False)
# Define loss function
criterion = nn.CrossEntropyLoss()
# Define identifiers & group model parameters accordingly (check README.md for the intuition)
if USE_BERT_EMBEDDING_PARAMETERS:
bert_learning_rate = 3e-5
custom_learning_rate = 1e-3
bert_identifiers = ['embeddings']
no_weight_decay_identifiers = ['bias', 'LayerNorm.weight']
grouped_model_parameters = [
{'params': [param for name, param in model.named_parameters()
if any(identifier in name for identifier in bert_identifiers) and
not any(identifier_ in name for identifier_ in no_weight_decay_identifiers)],
'lr': bert_learning_rate,
'betas': (0.9, 0.999),
'weight_decay': 0.01,
'eps': 1e-8},
{'params': [param for name, param in model.named_parameters()
if any(identifier in name for identifier in bert_identifiers) and
any(identifier_ in name for identifier_ in no_weight_decay_identifiers)],
'lr': bert_learning_rate,
'betas': (0.9, 0.999),
'weight_decay': 0.0,
'eps': 1e-8},
{'params': [param for name, param in model.named_parameters()
if not any(identifier in name for identifier in bert_identifiers)],
'lr': custom_learning_rate,
'betas': (0.9, 0.999),
'weight_decay': 0.0,
'eps': 1e-8}
]
# Define optimizer
optimizer = AdamW(grouped_model_parameters)
else:
# Define optimizer
optimizer = optim.Adam(params=model.parameters(),
lr=1e-3,
betas=(0.9, 0.999),
eps=1e-8)
# Place model & loss function on GPU
model, criterion = model.to(DEVICE), criterion.to(DEVICE)
# Start actual training, check test loss after each epoch
best_test_loss = float('inf')
for epoch in range(NUM_EPOCHS):
print("EPOCH NO: %d" % (epoch + 1))
train_loss, train_acc = train(model=model,
iterator=train_loader,
criterion=criterion,
optimizer=optimizer,
device=DEVICE,
include_bert_masks=True)
test_loss, test_acc = test(model=model,
iterator=test_loader,
criterion=criterion,
device=DEVICE,
include_bert_masks=True)
if test_loss < best_test_loss:
best_test_loss = test_loss
torch.save(model.state_dict(), 'saved_models/simple-lstm-model.pt')
print(f'\tTrain Loss: {train_loss:.3f} | Train Accuracy: {train_acc * 100:.2f}%')
print(f'\tTest Loss: {test_loss:.3f} | Test Accuracy: {test_acc * 100:.2f}%')
| [
"[email protected]"
]
| |
59817d4f4915dfc4c470c6d51b0592362187ec0b | 350d6b7246d6ef8161bdfccfb565b8671cc4d701 | /Binary Tree Vertical Order Traversal.py | da22a1ddbb5aca8b4d6f3dbd14fa43d4a483c554 | []
| no_license | YihaoGuo2018/leetcode_python_2 | 145d5fbe7711c51752b2ab47a057b37071d2fbf7 | 2065355198fd882ab90bac6041c1d92d1aff5c65 | refs/heads/main | 2023-02-14T14:25:58.457991 | 2021-01-14T15:57:10 | 2021-01-14T15:57:10 | 329,661,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 696 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
dic = {}
def verticalOrder(self, root):
self.help(root, 1)
save = []
keys = sorted(self.dic.keys())
for k in keys:
save.append(self.dic[k])
return save
def help(self, root, depth):
if root == None:
return
if depth not in self.dic.keys():
self.dic[depth] = []
self.dic[depth].append(root.val)
self.help(root.left, depth - 1)
self.help(root.right, depth + 1)
return
| [
"[email protected]"
]
| |
b778f0bcd27786a4be937fba9a023d8f4c35c68c | 9923e30eb99716bfc179ba2bb789dcddc28f45e6 | /openapi-generator/python/test/test_hos_logs_summary_response_drivers.py | 5733eeb8fc3d9ded047880b3b5940e1ba43f6fd4 | []
| no_license | silverspace/samsara-sdks | cefcd61458ed3c3753ac5e6bf767229dd8df9485 | c054b91e488ab4266f3b3874e9b8e1c9e2d4d5fa | refs/heads/master | 2020-04-25T13:16:59.137551 | 2019-03-01T05:49:05 | 2019-03-01T05:49:05 | 172,804,041 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,564 | py | # coding: utf-8
"""
Samsara API
# Introduction Samsara provides API endpoints for interacting with Samsara Cloud, so that you can build powerful applications and custom solutions with sensor data. Samsara has endpoints available to track and analyze sensors, vehicles, and entire fleets. The Samsara Cloud API is a [RESTful API](https://en.wikipedia.org/wiki/Representational_state_transfer) accessed by an [HTTP](https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol) client such as wget or curl, or HTTP libraries of most modern programming languages including python, ruby, java. We use built-in HTTP features, like HTTP authentication and HTTP verbs, which are understood by off-the-shelf HTTP clients. We allow you to interact securely with our API from a client-side web application (though you should never expose your secret API key). [JSON](http://www.json.org/) is returned by all API responses, including errors. If you’re familiar with what you can build with a REST API, the following API reference guide will be your go-to resource. API access to the Samsara cloud is available to all Samsara administrators. To start developing with Samsara APIs you will need to [obtain your API keys](#section/Authentication) to authenticate your API requests. If you have any questions you can reach out to us on [[email protected]](mailto:[email protected]) # Endpoints All our APIs can be accessed through HTTP requests to URLs like: ```curl https://api.samsara.com/<version>/<endpoint> ``` All our APIs are [versioned](#section/Versioning). If we intend to make breaking changes to an API which either changes the response format or request parameter, we will increment the version. # Authentication To authenticate your API request you will need to include your secret token. You can manage your API tokens in the [Dashboard](https://cloud.samsara.com). They are visible under `Settings->Organization->API Tokens`. Your API tokens carry many privileges, so be sure to keep them secure. Do not share your secret API tokens in publicly accessible areas such as GitHub, client-side code, and so on. Authentication to the API is performed via [HTTP Basic Auth](https://en.wikipedia.org/wiki/Basic_access_authentication). Provide your API token as the basic access_token value in the URL. You do not need to provide a password. ```curl https://api.samsara.com/<version>/<endpoint>?access_token={access_token} ``` All API requests must be made over [HTTPS](https://en.wikipedia.org/wiki/HTTPS). Calls made over plain HTTP or without authentication will fail. # Request Methods Our API endpoints use [HTTP request methods](https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Request_methods) to specify the desired operation to be performed. The documentation below specified request method supported by each endpoint and the resulting action. ## GET GET requests are typically used for fetching data (like data for a particular driver). ## POST POST requests are typically used for creating or updating a record (like adding new tags to the system). With that being said, a few of our POST requests can be used for fetching data (like current location data of your fleet). ## PUT PUT requests are typically used for updating an existing record (like updating all devices associated with a particular tag). ## PATCH PATCH requests are typically used for modifying an existing record (like modifying a few devices associated with a particular tag). ## DELETE DELETE requests are used for deleting a record (like deleting a tag from the system). # Response Codes All API requests will respond with appropriate [HTTP status code](https://en.wikipedia.org/wiki/List_of_HTTP_status_codes). Your API client should handle each response class differently. ## 2XX These are successful responses and indicate that the API request returned the expected response. ## 4XX These indicate that there was a problem with the request like a missing parameter or invalid values. Check the response for specific [error details](#section/Error-Responses). Requests that respond with a 4XX status code, should be modified before retrying. ## 5XX These indicate server errors when the server is unreachable or is misconfigured. In this case, you should retry the API request after some delay. # Error Responses In case of a 4XX status code, the body of the response will contain information to briefly explain the error reported. To help debugging the error, you can refer to the following table for understanding the error message. | Status Code | Message | Description | |-------------|----------------|-------------------------------------------------------------------| | 401 | Invalid token | The API token is invalid and could not be authenticated. Please refer to the [authentication section](#section/Authentication). | | 404 | Page not found | The API endpoint being accessed is invalid. | | 400 | Bad request | Default response for an invalid request. Please check the request to make sure it follows the format specified in the documentation. | # Versioning All our APIs are versioned. Our current API version is `v1` and we are continuously working on improving it further and provide additional endpoints. If we intend to make breaking changes to an API which either changes the response format or request parameter, we will increment the version. Thus, you can use our current API version worry free. # FAQs Check out our [responses to FAQs here](https://kb.samsara.com/hc/en-us/sections/360000538054-APIs). Don’t see an answer to your question? Reach out to us on [[email protected]](mailto:[email protected]). # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import openapi_client
from openapi_client.models.hos_logs_summary_response_drivers import HosLogsSummaryResponseDrivers # noqa: E501
from openapi_client.rest import ApiException
class TestHosLogsSummaryResponseDrivers(unittest.TestCase):
"""HosLogsSummaryResponseDrivers unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testHosLogsSummaryResponseDrivers(self):
"""Test HosLogsSummaryResponseDrivers"""
# FIXME: construct object with mandatory attributes with example values
# model = openapi_client.models.hos_logs_summary_response_drivers.HosLogsSummaryResponseDrivers() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
0780bc486c4355eaef2a4df385fc503799cbf3eb | 79e19819aec49b500825f82a7de149eb6a0ba81d | /leetcode/1018.py | 632dc46703f709c5e2bf6b31ac1d966e91cbfa8c | []
| no_license | seoyeonhwng/algorithm | 635e5dc4a2e9e1c50dc0c75d9a2a334110bb8e26 | 90406ee75de69996e666ea505ff5d9045c2ad941 | refs/heads/master | 2023-05-03T16:51:48.454619 | 2021-05-26T00:54:40 | 2021-05-26T00:54:40 | 297,548,218 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 361 | py | class Solution:
def prefixesDivBy5(self, A: List[int]) -> List[bool]:
answer = [False] * len(A)
answer[0], prev = (A[0] == 0), A[0]
for i in range(1, len(A)):
answer[i] = ((prev * 2 + A[i]) % 5 == 0)
prev = prev * 2 + A[i]
return answer
"""
- 왼쪽으로 shift = 2를 곱한다
""" | [
"[email protected]"
]
| |
c7984ce328339a910916d87e8b897f8b81df8ac6 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02659/s866773123.py | 485fb9b9a423580f938055c9b85b90aa424797da | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | AB = input().split()
A = int(AB[0])
B = AB[1].split('.')
B = int(B[0]) * 100 + int(B[1])
print(A * B // 100)
| [
"[email protected]"
]
| |
64db72079dc2438f42dcc5f4e3ecafa46502073d | b306aab9dcea2dd83dda700bc9f7b9f1a32cff3a | /CAIL2020/ydljy/data.py | be956ba1641be5f4507b7e05e32f92376548b540 | [
"Apache-2.0"
]
| permissive | Tulpen/CAIL | d6ca9981c7ea2603ae61675ba330a9614cd9398d | c4cfa98ab4ecedbce34a7a5a186830486047540c | refs/heads/master | 2023-04-23T20:07:56.774530 | 2021-04-16T13:18:36 | 2021-04-16T13:18:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,078 | py | """Data processor for SMP-CAIL2020-Argmine.
Author: Tsinghuaboy ([email protected])
In data file, each line contains 1 sc sentence and 5 bc sentences.
The data processor convert each line into 5 samples,
each sample with 1 sc sentence and 1 bc sentence.
Usage:
1. Tokenizer (used for RNN model):
from data import Tokenizer
vocab_file = 'vocab.txt'
sentence = '我饿了,想吃东西了。'
tokenizer = Tokenizer(vocab_file)
tokens = tokenizer.tokenize(sentence)
# ['我', '饿', '了', ',', '想', '吃', '东西', '了', '。']
ids = tokenizer.convert_tokens_to_ids(tokens)
2. Data:
from data import Data
# For training, load train and valid set
# For BERT model
data = Data('model/bert/vocab.txt', model_type='bert')
datasets = data.load_train_and_valid_files(
'SMP-CAIL2020-train.csv', 'SMP-CAIL2020-valid.csv')
train_set, valid_set_train, valid_set_valid = datasets
# For RNN model
data = Data('model/rnn/vocab.txt', model_type='rnn')
datasets = data.load_all_files(
'SMP-CAIL2020-train.csv', 'SMP-CAIL2020-valid.csv')
train_set, valid_set_train, valid_set_valid = datasets
# For testing, load test set
data = Data('model/bert/vocab.txt', model_type='bert')
test_set = data.load_file('SMP-CAIL2020-test.csv', train=False)
"""
from typing import List
import jieba
import torch
import pandas as pd
from torch.utils.data import TensorDataset
from transformers import BertTokenizer
# from pytorch_pretrained_bert import BertTokenizer
from tqdm import tqdm
class Tokenizer:
"""Tokenizer for Chinese given vocab.txt.
Attributes:
dictionary: Dict[str, int], {<word>: <index>}
"""
def __init__(self, vocab_file='vocab.txt'):
"""Initialize and build dictionary.
Args:
vocab_file: one word each line
"""
self.dictionary = {'[PAD]': 0, '[UNK]': 1}
count = 2
with open(vocab_file, encoding='utf-8') as fin:
for line in fin:
word = line.strip()
self.dictionary[word] = count
count += 1
def __len__(self):
return len(self.dictionary)
@staticmethod
def tokenize(sentence: str) -> List[str]:
"""Cut words for a sentence.
Args:
sentence: sentence
Returns:
words list
"""
return jieba.lcut(sentence)
def convert_tokens_to_ids(
self, tokens_list: List[str]) -> List[int]:
"""Convert tokens to ids.
Args:
tokens_list: word list
Returns:
index list
"""
return [self.dictionary.get(w, 1) for w in tokens_list]
class Data:
"""Data processor for BERT and RNN model for SMP-CAIL2020-Argmine.
Attributes:
model_type: 'bert' or 'rnn'
max_seq_len: int, default: 512
tokenizer: BertTokenizer for bert
Tokenizer for rnn
"""
def __init__(self,
vocab_file='',
max_seq_len: int = 512,
model_type: str = 'bert', config=None):
"""Initialize data processor for SMP-CAIL2020-Argmine.
Args:
vocab_file: one word each line
max_seq_len: max sequence length, default: 512
model_type: 'bert' or 'rnn'
If model_type == 'bert', use BertTokenizer as tokenizer
Otherwise, use Tokenizer as tokenizer
"""
self.model_type = model_type
if self.model_type == 'bert':
self.tokenizer = BertTokenizer.from_pretrained(config.bert_model_path)#BertTokenizer(vocab_file)
else: # rnn
self.tokenizer = Tokenizer(vocab_file)
self.max_seq_len = max_seq_len
def load_file(self,
file_path='SMP-CAIL2020-train.csv',
train=True) -> TensorDataset:
"""Load SMP-CAIL2020-Argmine train file and construct TensorDataset.
Args:
file_path: train file with last column as label
train:
If True, train file with last column as label
Otherwise, test file without last column as label
Returns:
BERT model:
Train:
torch.utils.data.TensorDataset
each record: (input_ids, input_mask, segment_ids, label)
Test:
torch.utils.data.TensorDataset
each record: (input_ids, input_mask, segment_ids)
RNN model:
Train:
torch.utils.data.TensorDataset
each record: (s1_ids, s2_ids, s1_length, s2_length, label)
Test:
torch.utils.data.TensorDataset
each record: (s1_ids, s2_ids, s1_length, s2_length)
"""
sc_list, bc_list, label_list = self._load_file(file_path, train)
if self.model_type == 'bert':
dataset = self._convert_sentence_pair_to_bert_dataset(
sc_list, bc_list, label_list)
else: # rnn
dataset = self._convert_sentence_pair_to_rnn_dataset(
sc_list, bc_list, label_list)
return dataset
def load_train_and_valid_files(self, train_file, valid_file):
"""Load all files for SMP-CAIL2020-Argmine.
Args:
train_file, valid_file: files for SMP-CAIL2020-Argmine
Returns:
train_set, valid_set_train, valid_set_valid
all are torch.utils.data.TensorDataset
"""
print('Loading train records for train...')
train_set = self.load_file(train_file, True)
print(len(train_set), 'training records loaded.')
print('Loading train records for valid...')
valid_set_train = self.load_file(train_file, False)
print(len(valid_set_train), 'train records loaded.')
print('Loading valid records...')
valid_set_valid = self.load_file(valid_file, False)
print(len(valid_set_valid), 'valid records loaded.')
return train_set, valid_set_train, valid_set_valid
def _load_file(self, filename, train: bool = True):
"""Load SMP-CAIL2020-Argmine train/test file.
For train file,
The ratio between positive samples and negative samples is 1:4
Copy positive 3 times so that positive:negative = 1:1
Args:
filename: SMP-CAIL2020-Argmine file
train:
If True, train file with last column as label
Otherwise, test file without last column as label
Returns:
sc_list, bc_list, label_list with the same length
sc_list, bc_list: List[List[str]], list of word tokens list
label_list: List[int], list of labels
"""
data_frame = pd.read_csv(filename)
sc_list, bc_list, label_list = [], [], []
for row in data_frame.itertuples(index=False):
# candidates = row[0:2]
answer = bool(row[-1]) if train else None
sc_tokens = self.tokenizer.tokenize(row[0])
bc_tokens = self.tokenizer.tokenize(row[1])
label = 1 if answer else 0
sc_list.append(sc_tokens)
bc_list.append(bc_tokens)
if train:
label_list.append(label)
# for i, _ in enumerate(candidates):
# bc_tokens = self.tokenizer.tokenize(candidates[i])
# if train:
# if i + 1 == answer:
# # Copy positive sample 4 times
# for _ in range(len(candidates) - 1):
# sc_list.append(sc_tokens)
# bc_list.append(bc_tokens)
# label_list.append(1)
# else:
# sc_list.append(sc_tokens)
# bc_list.append(bc_tokens)
# label_list.append(0)
# else: # test
# sc_list.append(sc_tokens)
# bc_list.append(bc_tokens)
return sc_list, bc_list, label_list
def _convert_sentence_pair_to_bert_dataset(
self, s1_list, s2_list, label_list=None):
"""Convert sentence pairs to dataset for BERT model.
Args:
sc_list, bc_list: List[List[str]], list of word tokens list
label_list: train: List[int], list of labels
test: []
Returns:
Train:
torch.utils.data.TensorDataset
each record: (input_ids, input_mask, segment_ids, label)
Test:
torch.utils.data.TensorDataset
each record: (input_ids, input_mask, segment_ids)
"""
all_input_ids, all_input_mask, all_segment_ids = [], [], []
for i, _ in tqdm(enumerate(s1_list), ncols=80):
tokens = ['[CLS]'] + s1_list[i] + ['[SEP]']
segment_ids = [0] * len(tokens)
tokens += s2_list[i] + ['[SEP]']
segment_ids += [1] * (len(s2_list[i]) + 1)
if len(tokens) > self.max_seq_len:
tokens = tokens[:self.max_seq_len]
segment_ids = segment_ids[:self.max_seq_len]
input_ids = self.tokenizer.convert_tokens_to_ids(tokens)
input_mask = [1] * len(input_ids)
tokens_len = len(input_ids)
input_ids += [0] * (self.max_seq_len - tokens_len)
segment_ids += [0] * (self.max_seq_len - tokens_len)
input_mask += [0] * (self.max_seq_len - tokens_len)
all_input_ids.append(input_ids)
all_input_mask.append(input_mask)
all_segment_ids.append(segment_ids)
all_input_ids = torch.tensor(all_input_ids, dtype=torch.long)
all_input_mask = torch.tensor(all_input_mask, dtype=torch.long)
all_segment_ids = torch.tensor(all_segment_ids, dtype=torch.long)
if label_list: # train
all_label_ids = torch.tensor(label_list, dtype=torch.long)
return TensorDataset(
all_input_ids, all_input_mask, all_segment_ids, all_label_ids)
# test
return TensorDataset(
all_input_ids, all_input_mask, all_segment_ids)
def _convert_sentence_pair_to_rnn_dataset(
self, s1_list, s2_list, label_list=None):
"""Convert sentences pairs to dataset for RNN model.
Args:
sc_list, bc_list: List[List[str]], list of word tokens list
label_list: train: List[int], list of labels
test: []
Returns:
Train:
torch.utils.data.TensorDataset
each record: (s1_ids, s2_ids, s1_length, s2_length, label)
Test:
torch.utils.data.TensorDataset
each record: (s1_ids, s2_ids, s1_length, s2_length, label)
"""
all_s1_ids, all_s2_ids = [], []
all_s1_lengths, all_s2_lengths = [], []
for i in tqdm(range(len(s1_list)), ncols=80):
tokens_s1, tokens_s2 = s1_list[i], s2_list[i]
all_s1_lengths.append(min(len(tokens_s1), self.max_seq_len))
all_s2_lengths.append(min(len(tokens_s2), self.max_seq_len))
if len(tokens_s1) > self.max_seq_len:
tokens_s1 = tokens_s1[:self.max_seq_len]
if len(tokens_s2) > self.max_seq_len:
tokens_s2 = tokens_s2[:self.max_seq_len]
s1_ids = self.tokenizer.convert_tokens_to_ids(tokens_s1)
s2_ids = self.tokenizer.convert_tokens_to_ids(tokens_s2)
if len(s1_ids) < self.max_seq_len:
s1_ids += [0] * (self.max_seq_len - len(s1_ids))
if len(s2_ids) < self.max_seq_len:
s2_ids += [0] * (self.max_seq_len - len(s2_ids))
all_s1_ids.append(s1_ids)
all_s2_ids.append(s2_ids)
all_s1_ids = torch.tensor(all_s1_ids, dtype=torch.long)
all_s2_ids = torch.tensor(all_s2_ids, dtype=torch.long)
all_s1_lengths = torch.tensor(all_s1_lengths, dtype=torch.long)
all_s2_lengths = torch.tensor(all_s2_lengths, dtype=torch.long)
if label_list: # train
all_label_ids = torch.tensor(label_list, dtype=torch.long)
return TensorDataset(
all_s1_ids, all_s2_ids, all_s1_lengths, all_s2_lengths,
all_label_ids)
# test
return TensorDataset(
all_s1_ids, all_s2_ids, all_s1_lengths, all_s2_lengths)
def test_data():
"""Test for data module."""
# For BERT model
data = Data('model/bert/vocab.txt', model_type='bert')
_, _, _ = data.load_train_and_valid_files(
'SMP-CAIL2020-train.csv',
'SMP-CAIL2020-test1.csv')
# For RNN model
data = Data('model/rnn/vocab.txt', model_type='rnn')
_, _, _ = data.load_train_and_valid_files(
'SMP-CAIL2020-train.csv',
'SMP-CAIL2020-test1.csv')
if __name__ == '__main__':
test_data()
| [
"[email protected]"
]
| |
b284f9b10b8c572c65a64f1f9b88cde920a8b781 | d0cb58e1658d4b5b88bdc07e497dc8092707ae02 | /2021/01january/24specify_data.py | 6381a461e0645467957c5e23c467055af3ce9fb7 | []
| no_license | June-fu/python365 | 27f9b753d38ade549d59aa8f2d8bda0fb8b1e20c | 242033a4b644a7566fbfa4dba9b60f60aa31fe91 | refs/heads/master | 2021-07-02T21:42:28.454091 | 2021-05-04T15:08:44 | 2021-05-04T15:08:44 | 233,629,713 | 0 | 0 | null | 2020-01-13T15:52:58 | 2020-01-13T15:36:53 | null | UTF-8 | Python | false | false | 466 | py | #!/usr/bin/python
'''
# @ Author: june-fu
# @ Create Time: 2021-02-22 23:59:17
# @ Modified by: june-fu
# @ Modified time: 2021-02-22 23:59:19
# @ Description:arguments parse_dates
'''
import pandas as pd
from io import StringIO
data =('date,A,B,C\n'
'20090101,a,1,2\n'
'20090102,b,3,4\n'
'20090103,c,4,5')
# arguments parse_dates
df = pd.read_csv(StringIO(data), index_col=0, parse_dates=True)
print(df)
# These are Python datetime objects
print(df.index) | [
"[email protected]"
]
| |
8e4439a5213755463643b9a98d6b098eb3614207 | 92e26b93057723148ecb8ca88cd6ad755f2e70f1 | /cov_exp/plain30_orth/plt.py | 15145016643831e2908e2041dc913dd1c9a66851 | []
| no_license | lyuyanyii/CIFAR | 5906ad9fbe1377edf5b055098709528e06b5ace2 | d798834942d6a9d4e3295cda77488083c1763962 | refs/heads/master | 2021-08-30T20:09:52.819883 | 2017-12-19T08:37:37 | 2017-12-19T08:37:37 | 112,701,370 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 315 | py | import matplotlib.pyplot as plt
import pickle
import numpy as np
import scipy.signal as signal
with open("hisloss.data", "rb") as f:
his = pickle.load(f)
his = np.array(his)
hisloss = his[:,1]
hisloss = signal.medfilt(hisloss, 9)
#print(np.max(hisloss[10000:]))
plt.plot(range(len(hisloss)), hisloss)
plt.show()
| [
"[email protected]"
]
| |
cc6fd9d2d1e1189cd0cf54388ff2f5212a84b77f | d7797ec584d722d5d15cb7355a63ecbdc5200123 | /EducationSystem/course_system/apps.py | fa1a9fd70ac8a24a15454501040abc0d654bf4de | []
| no_license | Rositsazz/course_system | f9790890802450b82663b4b01640c3ccdf87d721 | 5f67336a2298452efbea4cd0f7d3bfd992fd1d34 | refs/heads/master | 2021-01-22T07:23:09.956371 | 2017-06-28T10:46:56 | 2017-06-28T10:46:56 | 81,813,140 | 0 | 0 | null | 2017-06-28T10:37:04 | 2017-02-13T10:31:03 | Python | UTF-8 | Python | false | false | 100 | py | from django.apps import AppConfig
class CourseSystemConfig(AppConfig):
name = 'course_system'
| [
"[email protected]"
]
| |
e86af748470270a3bd18fbbcd3dc8e992712cb17 | 8cf0cf9b71b7c5fbaa150e9893bf461ef661045e | /ownblock/ownblock/apps/accounts/views.py | 77aad64c0c1af2bb8b440208af2f015e13b0a50a | [
"MIT"
]
| permissive | danjac/ownblock | 676b27a5aa0d4ce2ac2cd924a632489cd6fc21ee | ac662fb7efb2f04567e2f85638c1250286452611 | refs/heads/master | 2016-08-02T21:51:56.055598 | 2015-05-02T12:54:47 | 2015-05-02T12:54:47 | 34,940,828 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,442 | py | from django.db.models import Q
from rest_framework import status, viewsets, permissions
from rest_framework.response import Response
from rest_framework.views import APIView
from ..storage.models import Item
from ..parking.models import Vehicle
from ..messaging.models import Message
from ..notices.models import Notice
from ..amenities.models import Booking
from .models import User
from .serializers import (
UserSerializer,
RestrictedUserSerializer,
AuthUserSerializer,
)
class UserViewSet(viewsets.ModelViewSet):
model = User
def get_serializer_class(self):
if self.request.user.role == 'manager':
return RestrictedUserSerializer
return UserSerializer
def retrieve(self, request, *args, **kwargs):
self.object = self.get_object()
data = self.get_serializer(self.object).data
data['gravatar'] = self.object.get_gravatar_url(size=40)
notices = Notice.objects.filter(author=self.object)
data['notices'] = []
for notice in notices.iterator():
data['notices'].append({
'id': notice.id,
'title': notice.title,
'details': notice.details,
'created': notice.created,
})
if self.object != self.request.user:
messages = Message.objects.filter(
Q(sender=self.object) | Q(recipient=self.object)).filter(
Q(sender=self.request.user) | Q(recipient=self.request.user)
).order_by('-created')
data['sent_messages'] = []
data['received_messages'] = []
for message in messages.iterator():
message_data = {
'id': message.id,
'header': message.header,
'details': message.details,
'created': message.created,
}
if message.sender_id == self.object.id:
data['sent_messages'].append(message_data)
else:
data['received_messages'].append(message_data)
if self.object.role == 'resident':
items = Item.objects.filter(
resident=self.object
).select_related('place')
data['items'] = []
for item in items.iterator():
data['items'].append({
'id': item.id,
'place_id': item.place_id,
'place_name': item.place.name,
'description': item.description,
})
vehicles = Vehicle.objects.filter(
resident=self.object
)
data['vehicles'] = []
for vehicle in vehicles.iterator():
data['vehicles'].append({
'id': vehicle.id,
'description': vehicle.description,
'registration_number': vehicle.registration_number,
})
bookings = Booking.objects.filter(
resident=self.object
).select_related('amenity')
data['bookings'] = []
for booking in bookings:
data['bookings'].append({
'id': booking.id,
'amenity': {
'id': booking.amenity.id,
'name': booking.amenity.name,
},
'reserved_from': booking.reserved_from,
'reserved_to': booking.reserved_to,
})
return Response(data)
def get_queryset(self, *args, **kwargs):
qs = super().get_queryset(*args, **kwargs).select_related(
'apartment'
).filter(is_active=True).order_by('last_name', 'first_name')
if self.request.GET.get('residents'):
return qs.filter(apartment__building=self.request.building)
elif self.request.GET.get('managers'):
return qs.filter(role='manager', site=self.request.building.site)
return qs.filter(
Q(
Q(apartment__building=self.request.building) |
Q(site=self.request.building.site)
),
)
class AuthView(APIView):
permission_classes = (permissions.IsAuthenticated, )
def get_user_response(self, request):
return Response(AuthUserSerializer(
request.user, context={'request': request}).data)
def get(self, request, *args, **kwargs):
return self.get_user_response(request)
def put(self, request, *args, **kwargs):
serializer = AuthUserSerializer(request.user, data=request.DATA)
if not serializer.is_valid():
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
serializer.save(force_update=True)
return Response(serializer.data)
def patch(self, request, *args, **kwargs):
password = request.DATA.get('password')
if not password:
return Response('Password is missing',
status=status.HTTP_400_BAD_REQUEST)
request.user.set_password(request.DATA['password'])
request.user.save()
return Response()
| [
"[email protected]"
]
| |
b77cd80c0c1fbc74c1487f9da2d71f3e83e1b0ec | 54de64c1bd866c2cd1ef7f23dff20019a87ae408 | /src/bio2bel_drugbank/patent_utils.py | ea41eb5ec3822be8c47b5a766041d5e8125fa9e7 | [
"MIT"
]
| permissive | AspirinCode/drugbank-1 | 83fc8bfb3b275df085423ac53c698bc0a8bc9c27 | 1b842ed7a9de7904e8a11fd19ad35164ffb781bf | refs/heads/master | 2020-04-07T20:29:55.925875 | 2018-11-20T18:26:38 | 2018-11-20T18:26:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,704 | py | # -*- coding: utf-8 -*-
"""Utilities for downloading patents from Google.
Code modified from original work by Alexander Esser.
"""
import os
import re
from typing import Optional, Set
import requests
from bs4 import BeautifulSoup
LINK_PATTERN = "https?:\/\/patentimages\.storage\.googleapis\.com\/.+\/([A-z0-9]+\.pdf)"
LINK_RE = re.compile(LINK_PATTERN, re.IGNORECASE)
prefix_map = {
'United States': 'US',
'Canada': 'CA',
}
def download_google_patents(url: str, directory: str) -> Set[str]:
"""Crawls a list of URLs at patent.google.com and downloads the attached PDF documents
:param url: The url (e.g., https://patents.google.com/patent/US5972916)
:param directory: The output directory
"""
rv = set()
try:
r = requests.get(url)
data = r.text
soup = BeautifulSoup(data, "html.parser")
for link in soup.find_all("a"):
target = link.get("href")
link = _process_link(target, directory)
if link:
rv.add(link)
except Exception as e:
print("Could not download patent from {}: {}".format(url, str(e)))
return rv
def _process_link(target, directory: str) -> Optional[str]:
"""Download the link if it fits the description and return it if it works."""
m = LINK_RE.search(target)
if not m:
return
outfile = os.path.join(directory, m.group(1))
if os.path.exists(outfile):
return target
print(f"Downloading {target} to {outfile}")
r2 = requests.get(target, stream=True)
if r2.status_code != 200:
return
with open(outfile, 'wb') as f:
for chunk in r2:
f.write(chunk)
return target
| [
"[email protected]"
]
| |
bfca6c0531a704417241810a33f46ee4c038afad | 2b167e29ba07e9f577c20c54cb943861d0ccfa69 | /numerical_analysis_backup/small-scale-multiobj/pod50_milp/throughput/runsimu11_throughput.py | 89588a3c2132dc6081ea0222defc8c77da4d7d2d | []
| no_license | LiYan1988/kthOld_OFC | 17aeeed21e195d1a9a3262ec2e67d6b1d3f9ff0f | b1237577ea68ad735a65981bf29584ebd889132b | refs/heads/master | 2021-01-11T17:27:25.574431 | 2017-01-23T05:32:35 | 2017-01-23T05:32:35 | 79,773,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,440 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 4 15:15:10 2016
@author: li
optimize throughput
"""
#import sys
#sys.path.insert(0, '/home/li/Dropbox/KTH/numerical_analysis/ILPs')
import csv
from gurobipy import *
import numpy as np
from arch4_decomposition import Arch4_decompose
from arch1 import ModelSDM_arch1
from arch2_decomposition import Arch2_decompose
from arch5_decomposition import Arch5_decompose
np.random.seed(2010)
num_cores=3
num_slots=60
n_sim = 1 # number of simulations
n_start = 11 # index of start
n_end = n_start+n_sim # index of end
time_limit_routing = 1000 # 1000
time_limit_sa = 18000
alpha = 0
beta = 0.01
result = np.zeros((n_sim, 15))
total_cnk = []
for i in range(n_start, n_end):
filename = 'traffic_matrix__matrix_'+str(i)+'.csv'
# print filename
tm = []
with open(filename) as f:
reader = csv.reader(f)
for idx, row in enumerate(reader):
if idx>11:
row.pop()
row = [int(u) for u in row]
tm.append(row)
tm = np.array(tm)*25
total_cnk.append(tm.flatten().astype(bool).sum())
result[i-n_start, 14] = tm.flatten().astype(bool).sum()
print "\n"
print total_cnk
print "\n"
#%% arch4
print "Architecture 4"
m = Arch4_decompose(tm, num_slots=num_slots, num_cores=num_cores,alpha=alpha,beta=beta)
m.create_model_routing(mipfocus=1,timelimit=time_limit_routing,mipgap=0.01)
m.create_model_sa(mipfocus=1,timelimit=time_limit_sa)
result[i-n_start, 0] = m.connections_lb
result[i-n_start, 1] = m.connections_ub
result[i-n_start, 2] = m.throughput_lb
result[i-n_start, 3] = m.throughput_ub
#%% arch1
print "Architecutre 1"
m = ModelSDM_arch1(tm, num_slots=num_slots, num_cores=num_cores,alpha=alpha,beta=beta)
m.create_model(mipfocus=1, timelimit=time_limit_routing,mipgap=0.01)
result[i-n_start, 4] = m.connections
result[i-n_start, 5] = m.throughput
#%% arch2
print "Architecture 2"
m = Arch2_decompose(tm, num_slots=num_slots, num_cores=num_cores,alpha=alpha,beta=beta)
m.create_model_routing(mipfocus=1,timelimit=time_limit_routing,mipgap=0.01)
m.create_model_sa(mipfocus=1,timelimit=time_limit_sa)
result[i-n_start, 6] = m.connections_lb
result[i-n_start, 7] = m.connections_ub
result[i-n_start, 8] = m.throughput_lb
result[i-n_start, 9] = m.throughput_ub
#%% arch5
print "Architecture 5"
m = Arch5_decompose(tm, num_slots=num_slots, num_cores=num_cores,alpha=alpha,beta=beta)
m.create_model_routing(mipfocus=1, timelimit=time_limit_routing, mipgap=0.01)
m.create_model_sa(mipfocus=1, timelimit=time_limit_sa)
result[i-n_start, 10] = m.connections_lb
result[i-n_start, 11] = m.connections_ub
result[i-n_start, 12] = m.throughput_lb
result[i-n_start, 13] = m.throughput_ub
file_name = "result_throughput_{}to{}.csv".format(n_start, n_end)
with open(file_name, 'w') as f:
writer = csv.writer(f, delimiter=',')
writer.writerow(['arch4_connections_lb', 'arch4_connections_ub',
'arch4_throughput_lb', 'arch4_throughput_ub',
'arch1_connections', 'arch1_throughput',
'arch2_connections_lb', 'arch2_connections_ub',
'arch2_throughput_lb', 'arch2_throughput_ub',
'arch5_connections_lb', 'arch5_connections_ub',
'arch5_throughput_lb', 'arch5_throughput_ub',
'total_cnk'])
writer.writerows(result) | [
"[email protected]"
]
| |
8c37577beb948a84c1017887ad0ff113575583c4 | 87b7d7948aa51fdb4a27540240579788896369ea | /code/runs_sacred/model_4_classes/_sources/main_0d7ea3a13b62ec2b4e0ed10b9b965fe4.py | 721ea09321b607fc28b8b2985a463f302725e990 | []
| no_license | Samuel-Levesque/Projet_GLO7030 | 6f13accd63b52107ec3e3a0b9b5f52edccda7c8d | 557bce3235f09723900f65c6e3b44a0ed9d2b519 | refs/heads/master | 2022-01-16T12:49:22.884798 | 2019-05-05T18:38:35 | 2019-05-05T18:38:35 | 177,038,991 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,978 | py | from sacred import Experiment
from sacred.observers import FileStorageObserver
from data_set_file import create_huge_data_set,create_encoding_deconding_dict
from model_creation import create_model
from trainning import train_model,load_model_weights,create_scheduler
from test_metrics import calcul_metric_concours
import torch
import torch.optim as optim
import torch.nn as nn
import numpy as np
import random
from torch.utils.data import DataLoader
#Trucs sacred
experiment_sacred=Experiment("Doodle_Boys")
experiment_sacred.observers.append(FileStorageObserver.create('runs_sacred/model_4_classes'))
#Configs
@experiment_sacred.config
def configuration():
path_data = 'D:/User/William/Documents/Devoir/Projet Deep/data/mini_train/'
path_save_model="saves_model/model_4_classes.tar"
path_load_existing_model=None
# path_load_existing_model = "saves_model/model_4_classes.tar"
path_model_weights_test = "saves_model/model_4_classes.tar"
use_gpu = True
do_training=True
do_testing=True
nb_row_per_classe=300
nb_epoch = 3
batch_size = 32
learning_rate = 0.1
type_schedule="constant"
seed=123 #marche paas
torch.manual_seed(123)
np.random.seed(123)
random.seed(123)
#Main
@experiment_sacred.automain
def main_program(path_data,path_save_model,path_load_existing_model,path_model_weights_test,
use_gpu,do_training,do_testing,
nb_row_per_classe,
nb_epoch,batch_size,
learning_rate,type_schedule,
seed
):
#Seed
# torch.manual_seed(123)
# np.random.seed(123)
# random.seed(123)
# Label encoding and decoding dicts
enc_dict, dec_dict = create_encoding_deconding_dict(path_data)
#Data_set
size_image_train = 224
data_train=create_huge_data_set(path_data,nb_rows=nb_row_per_classe,size_image=size_image_train,encoding_dict=enc_dict)
data_valid=create_huge_data_set(path_data,nb_rows=100,size_image=size_image_train,skip_rows=range(1,nb_row_per_classe),encoding_dict=enc_dict)
# Model
model = create_model(use_gpu)
if use_gpu:
model.cuda()
#Loss
criterion = nn.CrossEntropyLoss()
#Optimiser
optimizer = optim.SGD(model.parameters(), lr=learning_rate)
# Scheduler LR
scheduler = create_scheduler(start_lr=learning_rate,type=type_schedule,optimizer=optimizer)
#Data loader
train_loader=DataLoader(data_train,batch_size=batch_size,shuffle=True)
valid_loader=DataLoader(data_valid,batch_size=batch_size,shuffle=True)
#Train
if do_training:
train_model(model,train_loader,valid_loader,nb_epoch,
scheduler,optimizer,criterion,use_gpu,
path_save=path_save_model,path_start_from_existing_model=path_load_existing_model)
#Test
if do_testing:
data_test = create_huge_data_set(path_data, nb_rows=100, size_image=size_image_train,
skip_rows=range(1, nb_row_per_classe + 100), encoding_dict=enc_dict)
test_loader = DataLoader(data_test, batch_size=batch_size)
model_final,history=load_model_weights(model,path_model_weights_test,type="best",use_gpu=use_gpu,get_history=True)
# history.display()
acc,loss,score_top3,conf_mat,acc_per_class=calcul_metric_concours(model_final,test_loader,use_gpu=use_gpu,show_acc_per_class=True)
print("Accuracy test: {}".format(acc))
print("Score top 3 concours: {}".format(score_top3))
print(acc_per_class)
#Log experiment
experiment_sacred.log_scalar("Test accuracy",acc)
experiment_sacred.log_scalar("Test loss", loss)
experiment_sacred.log_scalar("Test score top3", score_top3)
experiment_sacred.log_scalar("Test confusion matrix", conf_mat)
experiment_sacred.log_scalar("Test accuracy per class", acc_per_class)
| [
"[email protected]"
]
| |
81a54439253dce29241c49928fd05e2c8db9e060 | ac7f2369cf136cef946ee6eb89c5be1edda27769 | /hare_turtle_algorithm/scratch_4.py | d209eb062e4425d63c53283db7cf6454fa6fc968 | []
| no_license | iluvjava/Silly_Python_Stuff | d244a94a6c8236713123815ccd1f1f6c27b1cb98 | eb12a67c060de783e6b00d6030668f8d32630dad | refs/heads/master | 2021-04-22T23:14:14.215801 | 2021-04-05T05:25:42 | 2021-04-05T05:25:42 | 249,879,410 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 156 | py | import numpy as np
y = x = np.array([np.arange(0, 10)]).T
print(x.T.shape)
print(y.shape)
print([email protected])
print(np.linspace(0, 100, 400)[np.newaxis, :].T)
| [
"[email protected]"
]
| |
8fcc9b9fcb2d3773828fcb001c5e5282e5601c8e | 22cec5da2b1fb83dcc9cf7c888f1e2078b05b62e | /flora/wallet/sign_coin_solutions.py | e1848b04de272fc1cbdb5bc12e37e82971b93c6b | [
"Apache-2.0"
]
| permissive | JuEnPeHa/flora-blockchain | 649d351e096e73222ab79759c71e191e42da5d34 | 656b5346752d43edb89d7f58aaf35b1cacc9a366 | refs/heads/main | 2023-07-18T08:52:51.353754 | 2021-09-07T08:13:35 | 2021-09-07T08:13:35 | 399,297,784 | 0 | 0 | Apache-2.0 | 2021-08-24T01:30:45 | 2021-08-24T01:30:44 | null | UTF-8 | Python | false | false | 2,037 | py | import inspect
from typing import List, Any
import blspy
from blspy import AugSchemeMPL
from flora.types.coin_solution import CoinSolution
from flora.types.spend_bundle import SpendBundle
from flora.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
async def sign_coin_solutions(
coin_solutions: List[CoinSolution],
secret_key_for_public_key_f: Any, # Potentially awaitable function from G1Element => Optional[PrivateKey]
additional_data: bytes,
max_cost: int,
) -> SpendBundle:
signatures: List[blspy.G2Element] = []
pk_list: List[blspy.G1Element] = []
msg_list: List[bytes] = []
for coin_solution in coin_solutions:
# Get AGG_SIG conditions
err, conditions_dict, cost = conditions_dict_for_solution(
coin_solution.puzzle_reveal, coin_solution.solution, max_cost
)
if err or conditions_dict is None:
error_msg = f"Sign transaction failed, con:{conditions_dict}, error: {err}"
raise ValueError(error_msg)
# Create signature
for pk, msg in pkm_pairs_for_conditions_dict(
conditions_dict, bytes(coin_solution.coin.name()), additional_data
):
pk_list.append(pk)
msg_list.append(msg)
if inspect.iscoroutinefunction(secret_key_for_public_key_f):
secret_key = await secret_key_for_public_key_f(pk)
else:
secret_key = secret_key_for_public_key_f(pk)
if secret_key is None:
e_msg = f"no secret key for {pk}"
raise ValueError(e_msg)
assert bytes(secret_key.get_g1()) == bytes(pk)
signature = AugSchemeMPL.sign(secret_key, msg)
assert AugSchemeMPL.verify(pk, msg, signature)
signatures.append(signature)
# Aggregate signatures
aggsig = AugSchemeMPL.aggregate(signatures)
assert AugSchemeMPL.aggregate_verify(pk_list, msg_list, aggsig)
return SpendBundle(coin_solutions, aggsig)
| [
"[email protected]"
]
| |
970e032873598b577c478df4bda72a6d70df2593 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_328/ch45_2020_04_11_19_23_00_469501.py | 654fc6cd7bedadc387007d4d63a90c312e9a584c | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | lista= []
while True:
x= int(input('Digite algum número: '))
if x>0:
lista.append(x)
elif x <= 0:
lista.reverse()
print(lista) | [
"[email protected]"
]
| |
ef76fce18c4d75abc69a31441786b2d3465aaad6 | 5ac40dd0907f6b5a7adff338465c7c41fffc4348 | /src/jukeboxcore/gui/widgets/guerilla/shotcreator_ui.py | a94b8806cff4c0c262fcc729863f846a82ed3722 | []
| permissive | JukeboxPipeline/jukebox-core | 8effaf675c8a3b39d043bb69e40b75e591bb4a21 | bac2280ca49940355270e4b69400ce9976ab2e6f | refs/heads/master | 2021-07-22T13:50:58.168148 | 2015-06-01T16:20:56 | 2015-06-01T16:20:56 | 24,540,320 | 2 | 0 | BSD-3-Clause | 2021-06-10T19:34:28 | 2014-09-27T19:06:31 | Python | UTF-8 | Python | false | false | 2,282 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'h:\projects\jukebox-core\src\jukeboxcore\gui\widgets\guerilla\shotcreator.ui'
#
# Created: Tue Jan 13 18:54:57 2015
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_shotcreator_dialog(object):
def setupUi(self, shotcreator_dialog):
shotcreator_dialog.setObjectName("shotcreator_dialog")
shotcreator_dialog.resize(694, 398)
self.gridLayout = QtGui.QGridLayout(shotcreator_dialog)
self.gridLayout.setObjectName("gridLayout")
self.name_lb = QtGui.QLabel(shotcreator_dialog)
self.name_lb.setObjectName("name_lb")
self.gridLayout.addWidget(self.name_lb, 0, 0, 1, 1)
self.name_le = QtGui.QLineEdit(shotcreator_dialog)
self.name_le.setObjectName("name_le")
self.gridLayout.addWidget(self.name_le, 0, 1, 1, 1)
self.desc_lb = QtGui.QLabel(shotcreator_dialog)
self.desc_lb.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.desc_lb.setObjectName("desc_lb")
self.gridLayout.addWidget(self.desc_lb, 1, 0, 1, 1)
self.desc_pte = QtGui.QPlainTextEdit(shotcreator_dialog)
self.desc_pte.setObjectName("desc_pte")
self.gridLayout.addWidget(self.desc_pte, 1, 1, 1, 1)
self.create_pb = QtGui.QPushButton(shotcreator_dialog)
self.create_pb.setObjectName("create_pb")
self.gridLayout.addWidget(self.create_pb, 2, 1, 1, 1)
self.retranslateUi(shotcreator_dialog)
QtCore.QMetaObject.connectSlotsByName(shotcreator_dialog)
def retranslateUi(self, shotcreator_dialog):
shotcreator_dialog.setWindowTitle(QtGui.QApplication.translate("shotcreator_dialog", "Create Shot", None, QtGui.QApplication.UnicodeUTF8))
self.name_lb.setText(QtGui.QApplication.translate("shotcreator_dialog", "Name", None, QtGui.QApplication.UnicodeUTF8))
self.desc_lb.setText(QtGui.QApplication.translate("shotcreator_dialog", "Description", None, QtGui.QApplication.UnicodeUTF8))
self.create_pb.setText(QtGui.QApplication.translate("shotcreator_dialog", "Create", None, QtGui.QApplication.UnicodeUTF8))
| [
"[email protected]"
]
| |
327bfffa563cbbdd3435fd1eb8bb852e1a0cf97b | 910d4dd8e56e9437cf09dd8b9c61167673140a1f | /dd5_Stack2_연습문제/부분집합.py | b02d1c3ead323fdbff993e93b8f47ded822adf39 | []
| no_license | nopasanadamindy/Algorithms | 10825b212395680401b200a37ab4fde9085bc61f | 44b82d2f129c4cc6e811b651c0202a18719689cb | refs/heads/master | 2022-09-28T11:39:54.630487 | 2020-05-29T09:49:56 | 2020-05-29T09:49:56 | 237,923,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 720 | py | # {1,2,3} 모든 부분 집합 출력하기
N = 3
A = [0 for _ in range(N)] # 원소의 포함여부 저장 (0, 1)
data = [1, 2, 3]
def printSet(n):
for i in range(n): # 각 부분 배열의 원소 출력
if A[i] == 1: # A[i]가 1이면 포함된 것이므로 출력.
print(data[i], end="")
print()
def powerset(n, k): # n: 원소의 갯수, k: 현재depth
if n == k: # Basis Part
printSet(n)
else: # Inductive Part
A[k] = 1 # k번 요소 O
powerset(n, k + 1) # 다음 요소 포함 여부 결정
A[k] = 0 # k번 요소 X
powerset(n, k + 1) # 다음 요소 포함 여부 결정
powerset(N, 0)
| [
"[email protected]"
]
| |
a02bee8748891f92e694a53f4c42f3fb36df9143 | 46244bb6af145cb393846505f37bf576a8396aa0 | /leetcode/066.plus_one.py | 4afb73916cebf2c50af6b1e5aa64b0ec40b737b8 | []
| no_license | aoeuidht/homework | c4fabfb5f45dbef0874e9732c7d026a7f00e13dc | 49fb2a2f8a78227589da3e5ec82ea7844b36e0e7 | refs/heads/master | 2022-10-28T06:42:04.343618 | 2022-10-15T15:52:06 | 2022-10-15T15:52:06 | 18,726,877 | 4 | 3 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
"""
"""
class Solution:
# @param digits, a list of integer digits
# @return a list of integer digits
def plusOne(self, digits):
if not digits:
return [1]
carry = 0
for i in range(len(digits)-1, -1, -1):
print i
r = digits[i] + 1
digits[i] = (r % 10)
carry = r / 10
if carry < 1:
break
return ([carry] + digits) if (carry > 0) else digits
if __name__ == '__main__':
s = Solution()
v = lambda st: (st, s.plusOne(st))
print v([0])
| [
"[email protected]"
]
| |
cdbbd4c40b880cbbffc579c8ac2750e95e75bb71 | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp42_7000.py | 7498f9768395d5a4d3115ae4ba0a49d57c335108 | []
| no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,978 | py | ITEM: TIMESTEP
7000
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
7.2345940138227149e-01 4.6476540598614875e+01
7.2345940138227149e-01 4.6476540598614875e+01
7.2345940138227149e-01 4.6476540598614875e+01
ITEM: ATOMS id type xs ys zs
8 1 0.128552 0.0630246 0.0619357
35 1 0.0601396 0.125834 0.0579319
130 1 0.0620239 0.0611971 0.118196
165 1 0.121561 0.127058 0.122569
389 1 0.122801 7.85632e-05 0.373726
1423 1 0.440806 0.503865 0.436085
4 1 0.00145638 0.0660019 0.0574981
161 1 0.0018685 0.126967 0.120668
61 1 0.878707 0.12832 -0.000217188
509 1 0.873791 0.375808 0.370374
2 1 0.0606639 0.057574 -0.00141547
510 1 0.936259 0.439689 0.379218
12 1 0.246629 0.059001 0.0580648
39 1 0.183194 0.123133 0.0624847
43 1 0.311763 0.117562 0.0645861
134 1 0.189522 0.0648692 0.118709
138 1 0.309126 0.0546212 0.124301
169 1 0.251353 0.122474 0.125306
10 1 0.317942 0.0595849 0.000554554
1437 1 0.873931 0.498099 0.378169
143 1 0.43594 -0.00534059 0.185141
16 1 0.376082 0.059471 0.063345
47 1 0.440619 0.123451 0.058817
142 1 0.437309 0.0639194 0.122341
173 1 0.370194 0.120437 0.123993
177 1 0.503774 0.117399 0.122794
20 1 0.497881 0.0599086 0.0634801
1181 1 0.869593 0.497704 0.124042
24 1 0.622682 0.0627586 0.0613091
51 1 0.560709 0.125569 0.0578838
146 1 0.565034 0.0587568 0.126662
181 1 0.622477 0.122264 0.128479
1171 1 0.56661 0.497058 0.187624
1431 1 0.691599 0.500284 0.440763
28 1 0.74877 0.0616595 0.0627112
55 1 0.687452 0.125176 0.0597502
59 1 0.809676 0.125273 0.057304
150 1 0.692701 0.0637766 0.124013
154 1 0.80358 0.0654529 0.126697
185 1 0.745165 0.130014 0.124371
511 1 0.936169 0.372248 0.434261
1545 1 0.250192 0.497895 0.500736
512 1 0.876242 0.435194 0.439084
32 1 0.872674 0.0631448 0.0528145
63 1 0.938453 0.129104 0.058489
158 1 0.937772 0.0658637 0.120985
189 1 0.868243 0.123613 0.127033
399 1 0.438347 0.00121822 0.43826
40 1 0.123344 0.190071 0.0624893
67 1 0.061922 0.25435 0.0595683
72 1 0.133622 0.312737 0.0624685
162 1 0.0613279 0.188186 0.12354
194 1 0.0579134 0.316056 0.125646
197 1 0.125838 0.250344 0.1246
193 1 0.000868717 0.25327 0.125516
36 1 0.997218 0.19095 0.0645919
1435 1 0.814979 0.497984 0.435272
1281 1 1.00149 0.49997 0.250178
1429 1 0.627468 0.499332 0.386837
44 1 0.252754 0.187648 0.0625167
71 1 0.193338 0.246197 0.065619
75 1 0.31301 0.242155 0.0679734
76 1 0.252183 0.310784 0.0611341
166 1 0.188771 0.18211 0.123487
170 1 0.312798 0.190317 0.128483
198 1 0.192784 0.310755 0.121206
201 1 0.254954 0.251877 0.124522
202 1 0.314553 0.306996 0.124304
37 1 0.122934 0.127788 0.00153675
54 1 0.685453 0.191815 0.00013005
279 1 0.689414 0.00444724 0.314638
48 1 0.375741 0.18364 0.0641309
79 1 0.436483 0.245963 0.0594011
80 1 0.377986 0.309758 0.0622608
174 1 0.438238 0.183728 0.120081
205 1 0.379909 0.245674 0.124493
206 1 0.431713 0.315859 0.126658
52 1 0.502132 0.184773 0.0556531
1553 1 0.500981 0.497773 0.502565
209 1 0.494878 0.252142 0.120503
84 1 0.495935 0.311758 0.0608669
56 1 0.623663 0.183738 0.0663643
83 1 0.559196 0.244661 0.0641338
88 1 0.623248 0.308722 0.0632706
178 1 0.556537 0.183699 0.125315
210 1 0.558979 0.312292 0.120671
213 1 0.623702 0.252075 0.124608
60 1 0.751316 0.190625 0.0595086
87 1 0.6843 0.253515 0.0617974
91 1 0.809441 0.253359 0.0601402
92 1 0.751254 0.317498 0.0631962
182 1 0.682531 0.185316 0.122869
186 1 0.810428 0.189535 0.122964
214 1 0.686531 0.319619 0.122417
217 1 0.744962 0.25211 0.127427
218 1 0.811189 0.315369 0.12395
1169 1 0.500988 0.499828 0.123902
58 1 0.811208 0.189449 0.000213794
68 1 -0.0030746 0.310442 0.0629016
64 1 0.872593 0.188861 0.0671485
95 1 0.9296 0.251326 0.0692817
96 1 0.872728 0.311167 0.0643383
190 1 0.932789 0.187715 0.123904
221 1 0.871726 0.250878 0.126301
222 1 0.936077 0.314049 0.128202
53 1 0.626523 0.122997 0.00239837
481 1 -0.00233232 0.379308 0.371507
99 1 0.0698692 0.370332 0.0629588
104 1 0.132725 0.435108 0.0643846
226 1 0.0575469 0.436034 0.122689
229 1 0.124783 0.375395 0.127106
100 1 -0.00238494 0.442914 0.0633281
225 1 -0.00447408 0.373053 0.129089
135 1 0.186565 0.000400529 0.184849
62 1 0.934627 0.197249 0.0037252
1557 1 0.631374 0.499806 0.499968
281 1 0.753278 0.0020219 0.253043
103 1 0.193817 0.370276 0.0634747
107 1 0.316543 0.368993 0.0635862
108 1 0.254407 0.433809 0.0609888
230 1 0.192594 0.432589 0.121703
233 1 0.253361 0.370163 0.124159
234 1 0.310614 0.434772 0.122718
1303 1 0.68494 0.499832 0.313981
57 1 0.74889 0.124971 -0.00479295
111 1 0.434137 0.371804 0.0640754
112 1 0.379526 0.43471 0.0590255
237 1 0.372144 0.377514 0.125438
238 1 0.437321 0.434043 0.124914
241 1 0.498001 0.368891 0.121795
116 1 0.497304 0.436309 0.0607125
277 1 0.625793 0.000628152 0.249755
502 1 0.685319 0.440824 0.3764
503 1 0.685713 0.377063 0.438696
505 1 0.746164 0.371623 0.375539
411 1 0.816512 0.00157497 0.438578
6 1 0.183284 0.0638795 -0.00197856
115 1 0.557357 0.374096 0.0609541
120 1 0.624987 0.435852 0.062274
242 1 0.559459 0.43416 0.119013
245 1 0.619542 0.374346 0.122522
1305 1 0.748577 0.501699 0.252
119 1 0.686413 0.375393 0.0604713
123 1 0.813632 0.377248 0.06223
124 1 0.749588 0.43348 0.0602707
246 1 0.682314 0.430593 0.125754
249 1 0.750122 0.378295 0.128234
250 1 0.81313 0.433156 0.126269
22 1 0.685842 0.058761 -0.00122662
46 1 0.437046 0.188562 -0.00156703
127 1 0.937209 0.378517 0.0599529
128 1 0.872609 0.441509 0.061635
253 1 0.879914 0.378305 0.124592
254 1 0.936245 0.440139 0.125389
1409 1 0.00116811 0.498174 0.37569
506 1 0.813886 0.435297 0.375083
507 1 0.81274 0.37594 0.433099
508 1 0.75235 0.435584 0.43348
136 1 0.130429 0.0633251 0.18346
163 1 0.0642844 0.12429 0.185431
258 1 0.0626707 0.0626388 0.247322
264 1 0.122467 0.0627412 0.306706
291 1 0.0673849 0.128648 0.305536
293 1 0.126393 0.123899 0.24655
289 1 0.998768 0.12333 0.251277
140 1 0.249732 0.0599185 0.189703
167 1 0.191698 0.123987 0.189693
171 1 0.313517 0.12353 0.189509
262 1 0.183233 0.0589461 0.246267
266 1 0.314834 0.0637815 0.249472
268 1 0.250061 0.0621561 0.311939
295 1 0.186613 0.126324 0.31215
297 1 0.246649 0.126435 0.255237
299 1 0.314374 0.119777 0.31392
497 1 0.503502 0.374056 0.372282
144 1 0.379654 0.0579709 0.1919
175 1 0.436289 0.121758 0.18678
270 1 0.441094 0.0637015 0.252388
272 1 0.376906 0.0686017 0.317739
301 1 0.377446 0.119876 0.253432
303 1 0.440069 0.128176 0.316893
276 1 0.497579 0.0637862 0.313563
499 1 0.560995 0.377338 0.439845
148 1 0.50105 0.0618843 0.1883
305 1 0.499461 0.130923 0.255709
152 1 0.627963 0.0644428 0.192913
179 1 0.564038 0.127673 0.187498
274 1 0.559456 0.0743504 0.250611
280 1 0.626151 0.0650221 0.319268
307 1 0.559634 0.133156 0.312291
309 1 0.623981 0.129246 0.255201
504 1 0.626374 0.436987 0.438067
156 1 0.749008 0.0601101 0.189288
183 1 0.685492 0.129157 0.190995
187 1 0.809609 0.124112 0.186662
278 1 0.689394 0.0642617 0.257972
282 1 0.8052 0.0663072 0.250196
284 1 0.751253 0.0658281 0.315226
311 1 0.688663 0.13078 0.317425
313 1 0.751228 0.128691 0.251998
315 1 0.813729 0.131315 0.313511
626 1 0.56668 0.43788 0.498761
415 1 0.934626 -0.00282726 0.43551
498 1 0.560171 0.43641 0.378076
501 1 0.626643 0.377951 0.377553
260 1 1.00084 0.064715 0.310442
132 1 1.00368 0.0634479 0.184217
160 1 0.868685 0.0661395 0.188977
191 1 0.935907 0.126375 0.181639
286 1 0.932168 0.0640314 0.251438
288 1 0.866742 0.0625634 0.310761
317 1 0.873402 0.127423 0.243852
319 1 0.927993 0.131207 0.307613
3 1 0.0581158 -0.00190146 0.0619134
1177 1 0.751377 0.494014 0.125438
168 1 0.131123 0.184282 0.182038
195 1 0.0619186 0.251811 0.183098
200 1 0.12648 0.314769 0.182843
290 1 0.0682699 0.193436 0.243153
296 1 0.125708 0.197806 0.306016
322 1 0.0627085 0.323108 0.244657
323 1 0.0611777 0.252611 0.309632
325 1 0.124 0.258409 0.246446
328 1 0.123175 0.315897 0.311894
321 1 0.00261834 0.252948 0.246918
292 1 1.0036 0.189879 0.313072
172 1 0.252467 0.187329 0.189602
199 1 0.190551 0.247671 0.18132
203 1 0.314936 0.251209 0.187796
204 1 0.253621 0.313072 0.190657
294 1 0.189229 0.192158 0.247511
298 1 0.310573 0.184121 0.250842
300 1 0.250667 0.187152 0.31468
326 1 0.190392 0.315595 0.245044
327 1 0.187356 0.249585 0.314412
329 1 0.254179 0.247527 0.254073
330 1 0.310352 0.313613 0.252407
331 1 0.314816 0.250537 0.316564
332 1 0.246705 0.314151 0.312047
176 1 0.374258 0.185664 0.191951
207 1 0.435957 0.246665 0.186958
208 1 0.374461 0.313458 0.19019
302 1 0.432612 0.188601 0.25042
304 1 0.374425 0.182642 0.316039
333 1 0.369893 0.245776 0.249567
334 1 0.438595 0.308344 0.248895
335 1 0.433205 0.250308 0.311236
336 1 0.374491 0.313021 0.315189
212 1 0.501217 0.308275 0.184892
180 1 0.499941 0.18775 0.189727
308 1 0.499901 0.198606 0.311885
337 1 0.504367 0.254397 0.250366
340 1 0.497735 0.308694 0.311539
184 1 0.620302 0.185567 0.18841
211 1 0.560284 0.24965 0.182716
216 1 0.624408 0.314685 0.186641
306 1 0.564943 0.187647 0.252178
312 1 0.626893 0.192262 0.317594
338 1 0.565502 0.3151 0.247957
339 1 0.56419 0.256323 0.318893
341 1 0.625277 0.247575 0.249511
344 1 0.626406 0.316473 0.319034
188 1 0.745194 0.188593 0.188862
215 1 0.685737 0.251487 0.188383
219 1 0.81075 0.25178 0.189911
220 1 0.747784 0.3166 0.184695
310 1 0.684747 0.187144 0.256345
314 1 0.81079 0.188943 0.248082
316 1 0.749085 0.191064 0.313833
342 1 0.682474 0.314296 0.247686
343 1 0.688598 0.251542 0.314451
345 1 0.749651 0.250412 0.254062
346 1 0.810808 0.310493 0.248272
347 1 0.810164 0.251835 0.310499
348 1 0.744454 0.312693 0.310947
164 1 1.00328 0.184529 0.184832
324 1 1.00033 0.318132 0.309838
196 1 0.998697 0.313816 0.187448
192 1 0.872146 0.188689 0.183754
223 1 0.940535 0.250959 0.194518
224 1 0.877115 0.312882 0.190439
318 1 0.942039 0.184671 0.246444
320 1 0.875249 0.19185 0.312871
349 1 0.872674 0.252178 0.249771
350 1 0.93938 0.312645 0.253081
351 1 0.942492 0.250996 0.309285
352 1 0.878816 0.311622 0.311129
141 1 0.374978 3.92766e-05 0.122342
495 1 0.431885 0.3731 0.440483
227 1 0.0610198 0.374336 0.184984
232 1 0.126975 0.436617 0.18747
354 1 0.062513 0.437497 0.246811
355 1 0.0567612 0.38038 0.31601
357 1 0.12795 0.374827 0.242928
360 1 0.123694 0.438657 0.31007
356 1 0.992778 0.438884 0.310326
1283 1 0.0658573 0.501334 0.308944
273 1 0.506477 0.00427065 0.246629
494 1 0.441121 0.431742 0.378643
275 1 0.564042 0.00585184 0.308686
231 1 0.190993 0.374897 0.183494
235 1 0.311153 0.37696 0.185077
236 1 0.251098 0.434731 0.190255
358 1 0.188591 0.437991 0.247593
359 1 0.186018 0.37192 0.310038
361 1 0.252116 0.377792 0.251082
362 1 0.319875 0.438181 0.248636
363 1 0.314823 0.373693 0.313649
364 1 0.247823 0.43407 0.315161
493 1 0.373908 0.371581 0.377474
239 1 0.43849 0.369392 0.189544
240 1 0.377445 0.4337 0.183779
365 1 0.376758 0.37242 0.250733
366 1 0.433668 0.435656 0.248918
367 1 0.436666 0.374441 0.311694
368 1 0.376764 0.434919 0.310584
372 1 0.495942 0.434176 0.309584
244 1 0.50491 0.433899 0.187062
1439 1 0.942286 0.503028 0.43994
496 1 0.370026 0.438406 0.436723
287 1 0.936656 0.00398446 0.311895
369 1 0.500541 0.370402 0.247066
243 1 0.560679 0.371185 0.183311
248 1 0.624468 0.435352 0.186798
370 1 0.561684 0.432782 0.251939
371 1 0.562493 0.374042 0.313589
373 1 0.620893 0.376883 0.252521
376 1 0.624071 0.436007 0.314989
30 1 0.936737 0.0623645 0.000342756
129 1 0.00269962 0.000235982 0.121504
500 1 0.50152 0.437805 0.442839
247 1 0.68836 0.377992 0.190801
251 1 0.811435 0.374581 0.18448
252 1 0.753016 0.437142 0.193963
374 1 0.683446 0.436022 0.252517
375 1 0.683907 0.37497 0.314856
377 1 0.745761 0.373462 0.25147
378 1 0.81527 0.437948 0.252875
379 1 0.812838 0.37478 0.309425
380 1 0.747196 0.434409 0.311165
487 1 0.187698 0.37479 0.439347
1179 1 0.811322 0.499754 0.190289
1307 1 0.810746 0.505592 0.312752
263 1 0.183463 -0.00171005 0.312065
153 1 0.749312 0.00464983 0.125944
353 1 -0.00250144 0.379131 0.24883
228 1 -0.000391314 0.437336 0.1879
255 1 0.934083 0.377709 0.191298
256 1 0.871654 0.435546 0.19066
381 1 0.873295 0.37573 0.248809
382 1 0.935019 0.440722 0.248507
383 1 0.931914 0.380095 0.308505
384 1 0.874068 0.441369 0.315725
492 1 0.254135 0.439013 0.43624
1153 1 0.000331684 0.498357 0.124853
21 1 0.620344 0.000766715 -0.00146579
386 1 0.0662106 0.0684488 0.367382
392 1 0.126811 0.0619125 0.433824
419 1 0.0557541 0.124532 0.432022
421 1 0.12483 0.127573 0.369494
45 1 0.378547 0.124434 0.00133342
417 1 0.993974 0.123545 0.369929
1055 1 0.940165 0.504438 0.0616815
390 1 0.186223 0.0581618 0.371363
394 1 0.313466 0.0616664 0.375598
396 1 0.249939 0.0590612 0.437839
423 1 0.182974 0.122141 0.435049
425 1 0.250685 0.12319 0.376158
427 1 0.305581 0.119276 0.439374
491 1 0.311575 0.375249 0.437182
27 1 0.808756 0.00399316 0.0608145
486 1 0.188179 0.437942 0.371363
398 1 0.44097 0.0645954 0.376025
400 1 0.377356 0.0581341 0.434185
429 1 0.374444 0.130744 0.382271
431 1 0.443105 0.126526 0.434328
404 1 0.500676 0.0635706 0.440855
433 1 0.502201 0.12964 0.376101
1167 1 0.442553 0.497192 0.188607
617 1 0.250893 0.379023 0.499613
489 1 0.251782 0.374845 0.376108
1043 1 0.55871 0.497263 0.0568525
402 1 0.557668 0.066272 0.375141
408 1 0.626015 0.06475 0.434459
435 1 0.561246 0.124332 0.441317
437 1 0.62323 0.127912 0.376177
613 1 0.123415 0.374029 0.497484
159 1 0.934087 0.00619878 0.187281
562 1 0.564433 0.190248 0.499584
406 1 0.689208 0.06333 0.377409
410 1 0.812277 0.0631291 0.369976
412 1 0.749065 0.0633817 0.437206
439 1 0.686039 0.122799 0.444428
441 1 0.754034 0.123997 0.371065
443 1 0.809814 0.131784 0.432745
147 1 0.562587 0.000727164 0.181478
388 1 -0.00667217 0.0631736 0.434461
155 1 0.808683 -0.00127362 0.185638
414 1 0.93414 0.0650846 0.365536
416 1 0.875739 0.0701361 0.431391
445 1 0.875632 0.129289 0.369882
447 1 0.938803 0.12923 0.438796
1183 1 0.931201 0.49678 0.189292
1291 1 0.313711 0.498161 0.310527
418 1 0.06194 0.187106 0.376438
424 1 0.123853 0.186693 0.437777
450 1 0.0594206 0.317453 0.371678
451 1 0.0646468 0.248481 0.431968
453 1 0.123632 0.251669 0.367846
456 1 0.127553 0.313207 0.434796
452 1 0.994775 0.312803 0.435383
449 1 1.00268 0.253946 0.372113
391 1 0.185279 -0.00283091 0.433333
422 1 0.187366 0.189477 0.373049
426 1 0.309395 0.185385 0.381033
428 1 0.248359 0.182331 0.440471
454 1 0.186034 0.319678 0.371947
455 1 0.18237 0.249334 0.440373
457 1 0.246698 0.253643 0.373445
458 1 0.313812 0.314132 0.375606
459 1 0.307098 0.24968 0.436605
460 1 0.247253 0.312552 0.439602
1301 1 0.623926 0.499971 0.253137
546 1 0.0569969 0.190214 0.494085
1311 1 0.935712 0.502896 0.317711
430 1 0.439075 0.190808 0.373474
432 1 0.37508 0.192366 0.439258
461 1 0.378866 0.253051 0.377676
462 1 0.438708 0.316425 0.375089
463 1 0.441406 0.249369 0.437825
464 1 0.376712 0.312293 0.441147
436 1 0.498526 0.189639 0.442379
468 1 0.498721 0.317384 0.438036
465 1 0.501899 0.250892 0.376591
434 1 0.566026 0.190338 0.377444
440 1 0.626881 0.187723 0.444149
466 1 0.563432 0.311309 0.381162
467 1 0.559754 0.247968 0.438728
469 1 0.63032 0.248573 0.382204
472 1 0.620388 0.313928 0.441874
271 1 0.439033 -0.00106042 0.318222
473 1 0.753229 0.250453 0.37312
438 1 0.689598 0.1874 0.38137
442 1 0.809045 0.186526 0.371863
474 1 0.815001 0.313861 0.369948
444 1 0.753241 0.189608 0.440551
475 1 0.811177 0.247323 0.436242
476 1 0.751959 0.314415 0.437781
470 1 0.684813 0.315197 0.380985
471 1 0.685407 0.251678 0.446317
420 1 0.997909 0.189232 0.435278
479 1 0.939295 0.252653 0.434769
478 1 0.938104 0.314663 0.369391
477 1 0.873111 0.254639 0.37348
446 1 0.938719 0.19003 0.371371
448 1 0.873682 0.189067 0.431966
480 1 0.876965 0.312689 0.437697
483 1 0.0569922 0.371938 0.438989
484 1 -0.00134206 0.435792 0.440835
485 1 0.122074 0.372907 0.377218
488 1 0.126285 0.436041 0.434475
1549 1 0.381121 0.49942 0.498838
490 1 0.313301 0.436939 0.376498
482 1 0.064173 0.439183 0.372623
157 1 0.868307 0.0069701 0.126431
1433 1 0.753198 0.501021 0.372849
269 1 0.376982 0.00081037 0.251643
285 1 0.870084 0.000646785 0.245479
1419 1 0.316831 0.500513 0.435092
1049 1 0.749367 0.499365 -8.06914e-05
1421 1 0.376461 0.495069 0.373614
1295 1 0.439645 0.498139 0.315578
15 1 0.438754 0.00303228 0.065206
1297 1 0.507068 0.495652 0.248928
1175 1 0.683844 0.495873 0.185368
403 1 0.566975 0.00452225 0.440572
395 1 0.3123 -0.000585075 0.439513
610 1 0.0609063 0.437441 0.498717
593 1 0.497808 0.250396 0.499175
614 1 0.187232 0.440176 0.500463
9 1 0.245108 0.00202299 -0.00365606
1287 1 0.188464 0.500764 0.311751
139 1 0.313057 0.00157957 0.190137
1031 1 0.191881 0.494702 0.0663062
1035 1 0.311949 0.501131 0.0634568
1161 1 0.254855 0.496256 0.122763
29 1 0.870721 -0.000234753 0.000450807
413 1 0.875065 -0.000707176 0.373339
1285 1 0.129312 0.496968 0.250386
133 1 0.122837 0.00111593 0.1228
1045 1 0.621874 0.500224 -0.000807479
283 1 0.812789 -0.00129986 0.312839
1299 1 0.565038 0.497543 0.318349
131 1 0.0663741 0.00684952 0.184368
586 1 0.308124 0.31267 0.496695
514 1 0.0661114 0.0592995 0.497838
126 1 0.936719 0.43723 -0.000539188
618 1 0.315415 0.444037 0.500092
530 1 0.558248 0.0639541 0.503641
573 1 0.880604 0.126956 0.499553
17 1 0.500793 0.000173482 2.98394e-06
525 1 0.375497 -0.00123303 0.497529
1053 1 0.876359 0.502476 0.00249757
605 1 0.871831 0.247236 0.496698
125 1 0.872662 0.373497 -0.00103968
558 1 0.433649 0.195764 0.50476
1025 1 0.999089 0.501939 0.00247765
578 1 0.0633985 0.30943 0.500574
526 1 0.436419 0.0653712 0.500581
1 1 1.00082 -0.00223193 -0.00420077
561 1 0.49684 0.123359 0.500739
101 1 0.133281 0.379761 0.0047027
109 1 0.373151 0.373592 0.00135549
1561 1 0.748435 0.504914 0.502589
566 1 0.688869 0.18698 0.499233
117 1 0.622944 0.368307 0.00276264
622 1 0.436778 0.436051 0.498559
14 1 0.433657 0.0579452 -0.00298516
625 1 0.500982 0.375712 0.500169
634 1 0.810848 0.444363 0.499293
638 1 0.936604 0.436084 0.501887
518 1 0.182865 0.0582678 0.50336
49 1 0.496425 0.125832 -0.00902918
74 1 0.311832 0.311072 0.000372142
38 1 0.189616 0.191698 0.00335144
1041 1 0.496416 0.497982 -0.00655761
34 1 0.0634845 0.187676 -0.00239472
521 1 0.249008 -0.00660895 0.500631
520 1 0.126301 0.0622904 0.562213
547 1 0.0637536 0.122087 0.559946
642 1 0.0597411 0.0575779 0.62446
677 1 0.128431 0.124784 0.622987
516 1 0.000300178 0.063029 0.559377
907 1 0.311435 -0.00124977 0.939829
513 1 -0.00163344 -0.000334918 0.498526
581 1 0.123125 0.248136 0.502039
1943 1 0.687658 0.490706 0.931849
524 1 0.244747 0.0617972 0.562151
551 1 0.189595 0.126865 0.562738
555 1 0.308629 0.128502 0.552369
646 1 0.187072 0.0671946 0.624397
650 1 0.31117 0.0652671 0.620275
681 1 0.249288 0.126762 0.619581
909 1 0.373105 -8.40608e-05 0.870463
527 1 0.437495 0.00415031 0.560414
641 1 -0.00426936 -0.00101645 0.623458
528 1 0.372371 0.0663369 0.564949
559 1 0.434526 0.124524 0.562057
654 1 0.434501 0.0624754 0.627552
685 1 0.375361 0.123588 0.630649
532 1 0.498787 0.0609038 0.566987
601 1 0.753871 0.246728 0.501564
1951 1 0.938844 0.501539 0.937489
689 1 0.491391 0.13202 0.627178
536 1 0.625442 0.0583403 0.564089
563 1 0.559668 0.126134 0.571048
658 1 0.560138 0.0636245 0.628744
693 1 0.623675 0.122774 0.625306
1021 1 0.87753 0.374464 0.869492
669 1 0.879409 0.00673035 0.625305
540 1 0.750762 0.0658896 0.56392
567 1 0.692765 0.127084 0.56069
571 1 0.813331 0.123383 0.565094
662 1 0.686999 0.0633361 0.624982
666 1 0.813392 0.0613319 0.627102
697 1 0.753659 0.121914 0.62917
522 1 0.315828 0.0614469 0.50067
515 1 0.0614112 -0.00214769 0.5607
1022 1 0.943811 0.434816 0.878991
570 1 0.814508 0.187234 0.50202
519 1 0.181519 -0.00444905 0.559169
673 1 -0.0025037 0.125391 0.625122
544 1 0.880289 0.0630278 0.562581
575 1 0.933886 0.126384 0.56237
670 1 0.938856 0.0646148 0.621931
701 1 0.872952 0.130417 0.626798
911 1 0.434057 -0.00551882 0.936008
645 1 0.126139 -6.22041e-06 0.622581
569 1 0.755751 0.126882 0.499361
1929 1 0.25134 0.500876 0.869416
787 1 0.562019 0.00108128 0.808233
552 1 0.120141 0.185039 0.559269
579 1 0.0605794 0.247573 0.561744
584 1 0.127518 0.313878 0.562735
674 1 0.063762 0.184231 0.627348
706 1 0.0645617 0.311034 0.627859
709 1 0.125825 0.248681 0.626123
580 1 0.00375345 0.309811 0.566473
705 1 0.00471667 0.248082 0.626877
647 1 0.185766 0.000853534 0.687077
543 1 0.934572 -0.00209961 0.562554
556 1 0.247262 0.186428 0.554287
583 1 0.185095 0.246007 0.561617
587 1 0.310663 0.250313 0.559886
588 1 0.248719 0.314738 0.560482
678 1 0.191963 0.184628 0.630398
682 1 0.311257 0.196608 0.624712
710 1 0.187606 0.310189 0.625493
713 1 0.248311 0.250239 0.622945
714 1 0.306411 0.315873 0.623844
667 1 0.816034 -8.07411e-05 0.683572
779 1 0.314674 -0.000742022 0.809021
565 1 0.623402 0.120924 0.509187
560 1 0.372191 0.187739 0.562663
591 1 0.436149 0.256897 0.569148
592 1 0.371225 0.312626 0.561652
686 1 0.434821 0.190923 0.622524
717 1 0.373253 0.252869 0.62533
718 1 0.441138 0.311267 0.628673
564 1 0.498065 0.186202 0.560351
721 1 0.504203 0.243046 0.625252
1949 1 0.880026 0.496081 0.876367
899 1 0.0596275 -0.00265895 0.933129
606 1 0.939622 0.310775 0.501205
596 1 0.504397 0.313089 0.56426
568 1 0.628052 0.18754 0.56002
595 1 0.564288 0.249971 0.561845
600 1 0.627396 0.312996 0.56288
690 1 0.563334 0.187674 0.625169
722 1 0.563894 0.309739 0.621208
725 1 0.628234 0.251519 0.623003
538 1 0.810274 0.0665569 0.497744
1947 1 0.818948 0.496339 0.933981
917 1 0.619495 -0.00758049 0.869356
534 1 0.687672 0.0639608 0.50471
553 1 0.242712 0.124325 0.496831
572 1 0.75623 0.186571 0.563757
599 1 0.689184 0.254521 0.560172
603 1 0.812323 0.254262 0.567951
604 1 0.751509 0.31413 0.563518
694 1 0.687257 0.194308 0.624667
698 1 0.814787 0.188258 0.626842
726 1 0.690383 0.31539 0.622634
729 1 0.751656 0.249156 0.627397
730 1 0.816775 0.317287 0.625228
781 1 0.37196 -0.000487247 0.75122
1555 1 0.566193 0.499576 0.566721
548 1 1.00355 0.184696 0.560827
576 1 0.873346 0.189107 0.556639
607 1 0.935946 0.249639 0.559766
608 1 0.875126 0.317565 0.563922
702 1 0.942737 0.191188 0.623407
733 1 0.876628 0.250058 0.623399
734 1 0.93926 0.312204 0.623448
1691 1 0.813175 0.495108 0.685383
1927 1 0.193531 0.500823 0.936511
1921 1 1.00142 0.498997 0.876526
1023 1 0.942281 0.373286 0.943807
611 1 0.0622056 0.373226 0.561208
616 1 0.127972 0.435877 0.558709
738 1 0.0643782 0.435046 0.623199
741 1 0.126444 0.377903 0.626166
612 1 1.00071 0.43367 0.559662
737 1 1.00461 0.374136 0.627225
1665 1 0.999982 0.504268 0.62481
1813 1 0.623152 0.502683 0.757126
615 1 0.184781 0.375875 0.560492
619 1 0.315107 0.381726 0.559613
620 1 0.24927 0.438362 0.56388
742 1 0.18533 0.435363 0.623237
745 1 0.247463 0.375231 0.62415
746 1 0.314461 0.438759 0.627755
1024 1 0.878779 0.435075 0.936839
557 1 0.374978 0.128469 0.496047
623 1 0.440265 0.374161 0.566352
624 1 0.37689 0.439938 0.563171
749 1 0.373267 0.373671 0.626459
750 1 0.436179 0.437938 0.627138
753 1 0.50213 0.377951 0.623058
628 1 0.495838 0.440374 0.562026
602 1 0.810549 0.310486 0.500802
609 1 0.000101122 0.37223 0.501596
793 1 0.751631 0.00468845 0.750142
1014 1 0.689711 0.433178 0.872506
1803 1 0.313972 0.503967 0.810229
627 1 0.561524 0.376224 0.56169
632 1 0.624465 0.438702 0.562261
754 1 0.56103 0.437785 0.626842
757 1 0.625462 0.378826 0.624365
1015 1 0.683745 0.369765 0.940963
1695 1 0.942558 0.499325 0.687239
651 1 0.310134 0.000469171 0.687503
1017 1 0.752565 0.370502 0.871178
1689 1 0.752204 0.498967 0.627151
1563 1 0.812794 0.495945 0.562941
631 1 0.690756 0.380956 0.559491
635 1 0.81278 0.378319 0.560965
636 1 0.752246 0.441226 0.561838
758 1 0.688141 0.434503 0.63175
761 1 0.74688 0.374752 0.624112
762 1 0.813417 0.435558 0.624606
1817 1 0.753055 0.495136 0.754604
65 1 0.00200859 0.250385 1.00042
639 1 0.940187 0.37743 0.562002
640 1 0.871539 0.431237 0.555974
765 1 0.877642 0.379393 0.620556
766 1 0.941883 0.437871 0.624064
1018 1 0.809939 0.436945 0.879502
102 1 0.196933 0.434943 1.00391
1019 1 0.8094 0.372837 0.936057
590 1 0.436266 0.31096 0.506536
1793 1 0.00398661 0.500974 0.752631
1020 1 0.746994 0.431292 0.937424
791 1 0.688489 -0.00277478 0.81048
648 1 0.121043 0.0583957 0.686025
675 1 0.0684466 0.12349 0.685356
770 1 0.063205 0.0587692 0.747939
776 1 0.11747 0.0659846 0.813527
803 1 0.0606525 0.127154 0.809193
805 1 0.123429 0.122972 0.748939
644 1 0.00016984 0.0627566 0.685988
652 1 0.245703 0.0641332 0.686682
679 1 0.185201 0.124277 0.691537
683 1 0.311525 0.125601 0.686411
774 1 0.186385 0.0600975 0.752037
778 1 0.308117 0.0603319 0.749428
780 1 0.251825 0.0585692 0.811563
807 1 0.190514 0.118517 0.813226
809 1 0.251877 0.118137 0.747862
811 1 0.316811 0.122962 0.810243
656 1 0.372819 0.0618856 0.685787
687 1 0.435908 0.12572 0.689402
782 1 0.437846 0.0634155 0.748734
784 1 0.375794 0.0601246 0.812172
813 1 0.370368 0.121438 0.749003
815 1 0.436191 0.127933 0.81464
817 1 0.499408 0.128584 0.75207
788 1 0.500078 0.0618522 0.813826
1551 1 0.436938 0.501767 0.563826
660 1 0.498956 0.0653694 0.687908
664 1 0.628042 0.060037 0.689149
691 1 0.561481 0.122729 0.68379
786 1 0.559025 0.0693465 0.754228
792 1 0.621361 0.0597747 0.811365
819 1 0.565278 0.127886 0.812955
821 1 0.623156 0.126837 0.744307
1567 1 0.941386 0.49989 0.562061
66 1 0.0660371 0.313985 0.999076
668 1 0.749737 0.0610649 0.690401
695 1 0.683013 0.125866 0.686436
699 1 0.817169 0.1315 0.689272
790 1 0.683204 0.0622763 0.751394
794 1 0.812258 0.0627159 0.752424
796 1 0.748102 0.0602179 0.81532
823 1 0.689909 0.123188 0.817338
825 1 0.748606 0.125376 0.750518
827 1 0.80876 0.125655 0.817942
1821 1 0.874825 0.495872 0.750699
69 1 0.125249 0.248129 0.999698
86 1 0.685142 0.314141 1.00111
122 1 0.810043 0.433249 0.997818
772 1 1.00059 0.0594568 0.81091
801 1 1.00431 0.12983 0.746355
672 1 0.876569 0.0629857 0.690109
703 1 0.9387 0.122611 0.696418
798 1 0.940537 0.0564948 0.750826
800 1 0.876128 0.0585347 0.808556
829 1 0.870163 0.127308 0.753362
831 1 0.939635 0.125853 0.809723
1805 1 0.376562 0.498733 0.748155
1687 1 0.68367 0.500508 0.688349
680 1 0.12669 0.1856 0.688286
707 1 0.0633253 0.25204 0.687688
712 1 0.126177 0.314266 0.691734
802 1 0.0664631 0.187561 0.745994
808 1 0.122924 0.186519 0.808223
834 1 0.0619484 0.312386 0.747285
835 1 0.065297 0.251352 0.815673
837 1 0.125487 0.245806 0.753187
840 1 0.124664 0.314769 0.81313
708 1 0.99977 0.31157 0.686638
804 1 1.00139 0.189776 0.811906
833 1 0.00138796 0.244494 0.745417
684 1 0.251274 0.18382 0.68715
711 1 0.184333 0.250916 0.692773
715 1 0.305842 0.25309 0.68687
716 1 0.244893 0.314483 0.686449
806 1 0.188705 0.185398 0.748699
810 1 0.311688 0.184688 0.747696
812 1 0.25195 0.187161 0.807969
838 1 0.186702 0.312494 0.751026
839 1 0.186741 0.248435 0.812802
841 1 0.251791 0.250015 0.748527
842 1 0.313746 0.311862 0.74877
843 1 0.316652 0.251032 0.810324
844 1 0.246749 0.309719 0.813681
688 1 0.372296 0.18701 0.687647
719 1 0.438833 0.250695 0.689864
720 1 0.375556 0.314452 0.685963
814 1 0.434657 0.184714 0.751711
816 1 0.372406 0.188964 0.809005
845 1 0.374838 0.251208 0.745043
846 1 0.434971 0.313388 0.749753
847 1 0.435982 0.24786 0.814117
848 1 0.374752 0.313901 0.815865
849 1 0.503129 0.249545 0.749249
724 1 0.500913 0.307833 0.684878
852 1 0.496462 0.309394 0.813795
820 1 0.499022 0.187023 0.81994
692 1 0.496368 0.18828 0.688687
696 1 0.623857 0.18751 0.682375
723 1 0.566801 0.250268 0.688464
728 1 0.624837 0.314039 0.685603
818 1 0.564782 0.186901 0.745163
824 1 0.623079 0.190147 0.81495
850 1 0.563185 0.314082 0.746421
851 1 0.563701 0.247312 0.807662
853 1 0.621871 0.250029 0.747968
856 1 0.623759 0.312894 0.80908
700 1 0.751874 0.185875 0.689576
727 1 0.686644 0.253752 0.686105
731 1 0.817292 0.253832 0.689158
732 1 0.753364 0.316069 0.685942
822 1 0.686188 0.186782 0.750925
826 1 0.814761 0.186579 0.753176
828 1 0.748174 0.186009 0.81581
854 1 0.687887 0.31412 0.746905
855 1 0.687655 0.254763 0.808153
857 1 0.750548 0.247 0.75398
858 1 0.819442 0.312286 0.753793
859 1 0.814024 0.250077 0.814526
860 1 0.752366 0.312122 0.807777
676 1 1.00697 0.183799 0.689788
836 1 1.00579 0.315527 0.808823
704 1 0.884745 0.188548 0.689552
735 1 0.942048 0.25044 0.685493
736 1 0.876494 0.311842 0.683221
830 1 0.93726 0.184917 0.753346
832 1 0.877538 0.185339 0.812909
861 1 0.884386 0.250874 0.751769
862 1 0.943427 0.312663 0.749472
863 1 0.940148 0.250085 0.815218
864 1 0.878583 0.313626 0.813606
739 1 0.0622092 0.373534 0.688091
744 1 0.124357 0.438936 0.68881
866 1 0.0650629 0.436417 0.75478
867 1 0.0701104 0.379289 0.816857
869 1 0.126206 0.377829 0.749614
872 1 0.129499 0.445246 0.81258
868 1 1.00061 0.442453 0.816866
1671 1 0.188744 0.503331 0.689532
743 1 0.187158 0.378057 0.685469
747 1 0.313026 0.375264 0.690381
748 1 0.250494 0.439247 0.688676
870 1 0.186415 0.442522 0.747624
871 1 0.187845 0.378197 0.811114
873 1 0.250426 0.374033 0.746647
874 1 0.305004 0.441481 0.750714
875 1 0.311699 0.372381 0.810329
876 1 0.250113 0.438763 0.807056
1012 1 0.498439 0.437637 0.935513
751 1 0.437973 0.376309 0.689113
752 1 0.372933 0.436553 0.69199
877 1 0.377224 0.374173 0.750503
878 1 0.439105 0.437632 0.751027
879 1 0.438175 0.37547 0.81251
880 1 0.374 0.435036 0.811447
881 1 0.498929 0.371236 0.751713
1009 1 0.497567 0.379988 0.874053
884 1 0.501037 0.439479 0.813179
756 1 0.498519 0.435553 0.689372
755 1 0.562164 0.373563 0.6853
760 1 0.626924 0.434824 0.687532
882 1 0.563475 0.435785 0.750447
883 1 0.561982 0.372679 0.814654
885 1 0.623516 0.372877 0.747323
888 1 0.620134 0.438167 0.813877
1010 1 0.560817 0.433473 0.876421
93 1 0.868443 0.252651 0.998477
1016 1 0.617944 0.438089 0.93688
759 1 0.689522 0.374028 0.691348
763 1 0.819722 0.376308 0.688517
764 1 0.753774 0.433904 0.685774
886 1 0.688874 0.444246 0.750171
887 1 0.683944 0.379166 0.806586
889 1 0.749103 0.376316 0.750122
890 1 0.81143 0.436833 0.750431
891 1 0.816325 0.376516 0.81578
892 1 0.751739 0.435666 0.811919
865 1 0.00356826 0.376961 0.752409
740 1 0.00682604 0.438569 0.68975
767 1 0.938433 0.374441 0.685172
768 1 0.879194 0.439719 0.690243
893 1 0.883159 0.377043 0.749671
894 1 0.942622 0.440634 0.750491
895 1 0.941523 0.379622 0.811505
896 1 0.877823 0.438165 0.811263
1011 1 0.56077 0.373741 0.935524
898 1 0.0609698 0.0615097 0.875382
904 1 0.120447 0.0581093 0.942375
931 1 0.0636595 0.124825 0.939188
933 1 0.123957 0.123088 0.875454
900 1 0.000386377 0.0590057 0.937003
929 1 0.00271309 0.125172 0.876828
902 1 0.187206 0.061552 0.878412
906 1 0.312871 0.0541039 0.874158
908 1 0.249489 0.0652397 0.939658
935 1 0.188785 0.125157 0.933246
937 1 0.261406 0.118855 0.873699
939 1 0.316576 0.120855 0.937873
910 1 0.436552 0.0570089 0.872924
912 1 0.374345 0.0552077 0.934071
941 1 0.375888 0.117649 0.876264
943 1 0.438097 0.125375 0.934396
945 1 0.499955 0.124359 0.872807
995 1 0.0685415 0.377886 0.944965
633 1 0.752546 0.374752 0.503503
997 1 0.131813 0.379726 0.875133
1013 1 0.628702 0.371961 0.87699
916 1 0.501656 0.0568814 0.936075
914 1 0.563876 0.0614942 0.872777
920 1 0.62427 0.0585178 0.934028
947 1 0.565645 0.126302 0.936471
949 1 0.623355 0.122167 0.871268
82 1 0.560916 0.312546 0.999533
1667 1 0.0600651 0.500149 0.689391
994 1 0.0627763 0.436605 0.874542
1008 1 0.372654 0.440828 0.94024
50 1 0.564523 0.186358 0.996303
996 1 1.00125 0.43778 0.940209
918 1 0.686881 0.0608146 0.875695
922 1 0.812824 0.0620989 0.880622
924 1 0.743826 0.0616869 0.935371
951 1 0.68006 0.123081 0.938462
953 1 0.74698 0.12228 0.877994
955 1 0.818305 0.130552 0.937804
795 1 0.806589 -0.00104722 0.813652
1007 1 0.434619 0.375897 0.93433
926 1 0.937618 0.0651643 0.873001
928 1 0.879134 0.0602827 0.935861
957 1 0.876136 0.124457 0.867919
959 1 0.933601 0.129011 0.942167
960 1 0.876117 0.189599 0.934769
930 1 0.0654225 0.192273 0.873897
936 1 0.127544 0.185882 0.939264
962 1 0.0676279 0.31782 0.881195
963 1 0.0673427 0.248322 0.935481
965 1 0.12535 0.24935 0.8765
968 1 0.131094 0.319385 0.935034
932 1 0.000722945 0.18932 0.932045
961 1 0.00405908 0.257291 0.870709
964 1 1.00093 0.315465 0.941237
993 1 1.00394 0.379577 0.872604
1004 1 0.254338 0.44225 0.933187
934 1 0.191397 0.184664 0.866218
938 1 0.314213 0.186622 0.872318
940 1 0.250644 0.182045 0.935574
966 1 0.188468 0.312967 0.876532
967 1 0.189554 0.245098 0.931564
969 1 0.251171 0.248454 0.872866
970 1 0.307875 0.312742 0.871282
971 1 0.310955 0.247725 0.936203
972 1 0.252218 0.313019 0.93388
1005 1 0.369061 0.377314 0.875801
998 1 0.188793 0.441007 0.877128
942 1 0.43498 0.187172 0.874811
944 1 0.375273 0.187141 0.933489
973 1 0.371727 0.251523 0.875585
974 1 0.435227 0.313417 0.877166
975 1 0.434344 0.249781 0.933501
976 1 0.373777 0.313785 0.935506
980 1 0.496952 0.313709 0.939395
948 1 0.501578 0.190916 0.936855
977 1 0.502456 0.249591 0.870085
1003 1 0.31368 0.375996 0.939798
1002 1 0.314949 0.439872 0.868943
946 1 0.560274 0.187733 0.874198
952 1 0.626441 0.194318 0.934311
978 1 0.564867 0.310884 0.874651
979 1 0.565392 0.251714 0.927466
981 1 0.625241 0.251134 0.872851
984 1 0.624693 0.312486 0.933649
1006 1 0.429391 0.437771 0.87494
1001 1 0.250399 0.374313 0.874991
1000 1 0.129054 0.437194 0.942808
988 1 0.748066 0.313303 0.934697
987 1 0.810337 0.254823 0.936739
986 1 0.818498 0.31344 0.870634
985 1 0.749409 0.252409 0.876624
983 1 0.690082 0.252305 0.933825
982 1 0.687305 0.314678 0.871239
950 1 0.687563 0.186777 0.875912
954 1 0.810699 0.192221 0.878795
956 1 0.7482 0.186216 0.93898
999 1 0.191952 0.381417 0.942198
990 1 0.941229 0.319355 0.878351
991 1 0.94042 0.249364 0.933077
958 1 0.941957 0.186134 0.872053
989 1 0.87638 0.249539 0.874672
992 1 0.875025 0.315512 0.935102
771 1 0.0631879 0.000258648 0.812841
661 1 0.622953 0.000149141 0.630295
1807 1 0.434668 0.499424 0.812502
1811 1 0.560257 0.501111 0.814438
925 1 0.872534 0.00104581 0.873342
897 1 0.00178828 -0.00420825 0.87658
643 1 0.0557384 0.00117441 0.68985
1941 1 0.623209 0.497903 0.870316
1677 1 0.378172 0.501107 0.631196
915 1 0.569654 -0.0019318 0.936163
637 1 0.876363 0.369817 0.500739
799 1 0.936832 -0.00502061 0.814754
923 1 0.809612 0.00155874 0.938856
90 1 0.808423 0.317483 1.00115
1559 1 0.688949 0.501999 0.557994
81 1 0.502191 0.249465 0.999055
1799 1 0.188658 0.503169 0.811006
769 1 0.996272 -0.00292914 0.75051
98 1 0.0691753 0.439219 1.00298
1933 1 0.375847 0.503282 0.874504
927 1 0.934333 -0.00324277 0.937027
671 1 0.939331 0.00281526 0.686074
1925 1 0.127275 0.502585 0.878281
1669 1 0.121566 0.501075 0.627358
1693 1 0.87703 0.497016 0.620874
919 1 0.686309 -0.000328338 0.934096
1937 1 0.49023 0.498245 0.873764
913 1 0.507089 -0.00674374 0.874027
78 1 0.43491 0.310474 0.994507
77 1 0.371041 0.249063 1.00079
33 1 -0.00165645 0.133325 1.00004
110 1 0.435972 0.438737 0.99627
113 1 0.494602 0.377584 1
94 1 0.936303 0.307356 0.999617
41 1 0.251755 0.124592 1.00533
118 1 0.683616 0.436094 0.998945
97 1 0.00560698 0.378789 1.00806
106 1 0.312882 0.438717 1
70 1 0.189944 0.304892 0.999267
26 1 0.805038 0.0647419 0.996867
621 1 0.376948 0.377028 0.503238
18 1 0.559888 0.0599769 0.998551
598 1 0.681122 0.317578 0.501327
89 1 0.747523 0.252204 1.00062
42 1 0.316564 0.182952 1.00244
85 1 0.622137 0.255068 0.997024
630 1 0.695816 0.439623 0.498825
73 1 0.253373 0.247813 0.998789
585 1 0.243574 0.244869 0.498151
114 1 0.561095 0.43401 1.00093
629 1 0.627539 0.378052 0.502852
121 1 0.747578 0.375263 0.99736
105 1 0.253632 0.374592 0.998494
545 1 0.00224336 0.120531 0.499839
554 1 0.310558 0.191986 0.499791
589 1 0.368939 0.251449 0.497076
577 1 1.00296 0.252804 0.501549
597 1 0.62344 0.249805 0.500152
574 1 0.940322 0.191154 0.498889
549 1 0.122273 0.122462 0.498432
582 1 0.190704 0.312002 0.50105
594 1 0.557103 0.311726 0.498403
550 1 0.181297 0.186305 0.496279
542 1 0.93355 0.0589477 0.501781
537 1 0.756154 0.0039511 0.502387
1032 1 0.129447 0.564322 0.0679548
1059 1 0.065999 0.626819 0.064
1154 1 0.0638823 0.559838 0.129117
1189 1 0.12774 0.627736 0.127726
1028 1 0.00184071 0.564103 0.0669725
1185 1 -0.00300657 0.623766 0.132565
1122 1 0.0568578 0.939076 -0.00352094
1036 1 0.247358 0.563132 0.0622339
1063 1 0.185701 0.633572 0.0612891
1067 1 0.307523 0.622013 0.0648938
1158 1 0.192615 0.557573 0.124547
1162 1 0.313206 0.560507 0.12716
1193 1 0.249082 0.626718 0.122836
1411 1 0.0642662 0.494766 0.435891
1137 1 0.495695 0.87408 0.00136876
1138 1 0.562007 0.936185 0.00246663
11 1 0.312865 0.998492 0.0580585
1415 1 0.188445 0.494647 0.436692
1040 1 0.375836 0.562654 0.0657666
1071 1 0.437671 0.627839 0.0610853
1166 1 0.439196 0.562685 0.127524
1197 1 0.378144 0.620969 0.120022
1044 1 0.498533 0.566671 0.0601403
1201 1 0.502375 0.62264 0.122422
387 1 0.0617609 1.00075 0.433522
149 1 0.627658 1.00479 0.126844
1048 1 0.627123 0.559681 0.0571268
1075 1 0.560693 0.621334 0.0593583
1170 1 0.562692 0.563241 0.122342
1205 1 0.627603 0.62232 0.122191
1052 1 0.752065 0.563119 0.0576874
1079 1 0.684753 0.628129 0.0630683
1083 1 0.810035 0.628497 0.0625762
1174 1 0.691674 0.563976 0.128284
1178 1 0.808608 0.558432 0.124335
1209 1 0.747243 0.623346 0.123094
1056 1 0.871603 0.564614 0.06195
1087 1 0.936127 0.622234 0.070371
1182 1 0.939459 0.556904 0.127333
1213 1 0.866679 0.629124 0.123817
13 1 0.375001 0.998116 0.00145028
259 1 0.0617089 1.0039 0.311156
1064 1 0.123668 0.689696 0.0661475
1091 1 0.0605113 0.747805 0.0638264
1096 1 0.123586 0.811312 0.0577846
1186 1 0.0615333 0.684045 0.129313
1218 1 0.0612658 0.805914 0.128762
1221 1 0.123651 0.748079 0.126999
1060 1 1.00241 0.683239 0.0669203
1092 1 1.00335 0.806189 0.0681945
1082 1 0.808654 0.691063 0.00570046
409 1 0.753507 0.999067 0.376979
137 1 0.245964 1.0013 0.128529
1068 1 0.246454 0.688429 0.0651924
1095 1 0.184921 0.752953 0.0634163
1099 1 0.310777 0.747783 0.0649566
1100 1 0.249842 0.811737 0.0632794
1190 1 0.186622 0.689196 0.129986
1194 1 0.307283 0.683845 0.127434
1222 1 0.187791 0.806994 0.129375
1225 1 0.252474 0.749581 0.127371
1226 1 0.309681 0.81007 0.120234
1101 1 0.371576 0.744219 -0.00363087
1293 1 0.378664 0.496974 0.246086
265 1 0.247646 0.996727 0.249661
1072 1 0.369296 0.690693 0.0619638
1103 1 0.433613 0.754171 0.0581964
1104 1 0.371264 0.80984 0.0589139
1198 1 0.440144 0.688379 0.12279
1229 1 0.374687 0.746731 0.120167
1230 1 0.436119 0.815155 0.124276
1076 1 0.502079 0.685228 0.0593723
1108 1 0.500755 0.813179 0.0634006
1233 1 0.499304 0.752537 0.125264
1080 1 0.621357 0.686178 0.0637758
1107 1 0.559168 0.752617 0.0645046
1112 1 0.626793 0.810939 0.0684798
1202 1 0.55926 0.686201 0.123938
1234 1 0.564318 0.805598 0.128323
1237 1 0.625215 0.74691 0.128546
1537 1 0.00274122 0.500286 0.499164
393 1 0.251358 0.998638 0.371931
1047 1 0.689216 0.498782 0.0665484
1051 1 0.809238 0.496261 0.0564394
1084 1 0.743047 0.685321 0.0674463
1111 1 0.684792 0.746714 0.0678823
1115 1 0.809065 0.757232 0.0670527
1116 1 0.745065 0.81423 0.0567814
1206 1 0.684954 0.685768 0.128552
1210 1 0.80764 0.697231 0.120688
1238 1 0.688746 0.819188 0.125594
1241 1 0.745725 0.756149 0.125884
1242 1 0.812065 0.811817 0.125665
1106 1 0.56675 0.813522 0.00640689
1217 1 0.997848 0.742219 0.126993
1427 1 0.561194 0.499119 0.438817
1088 1 0.873145 0.690694 0.0640473
1119 1 0.937519 0.745661 0.0669316
1120 1 0.876238 0.808928 0.0605601
1214 1 0.934427 0.684117 0.127624
1245 1 0.878051 0.755956 0.125821
1246 1 0.943115 0.810458 0.125687
1098 1 0.309169 0.811021 -0.00131428
1123 1 0.0579373 0.869975 0.0618113
1128 1 0.113657 0.936044 0.0618047
1250 1 0.0648534 0.938921 0.126193
1253 1 0.124408 0.868991 0.123931
1173 1 0.619513 0.499267 0.123325
1249 1 0.999987 0.87313 0.12364
1159 1 0.193651 0.494553 0.184425
1155 1 0.0640484 0.496249 0.183193
1127 1 0.185348 0.876413 0.0609179
1131 1 0.314633 0.87663 0.063058
1132 1 0.253834 0.934252 0.0645236
1254 1 0.178287 0.938467 0.121771
1257 1 0.250128 0.876505 0.124719
1258 1 0.317783 0.938107 0.127037
401 1 0.506645 0.998148 0.380388
1577 1 0.25055 0.617234 0.498654
1105 1 0.508277 0.750279 -0.000585374
1536 1 0.875048 0.937378 0.432266
151 1 0.690986 1.00547 0.190727
1535 1 0.94019 0.874544 0.439765
1135 1 0.438441 0.871127 0.0615206
1136 1 0.376324 0.937716 0.0621543
1261 1 0.372431 0.869888 0.12177
1262 1 0.435393 0.938659 0.124905
1140 1 0.495048 0.935859 0.0664102
145 1 0.499576 0.99803 0.123386
1534 1 0.941382 0.939124 0.376772
1089 1 0.999522 0.749082 0.00761306
1265 1 0.496639 0.875521 0.126324
1139 1 0.559927 0.878274 0.0649687
1144 1 0.628963 0.939348 0.0593487
1266 1 0.566951 0.940735 0.123855
1269 1 0.630127 0.879887 0.124949
1542 1 0.188165 0.559374 0.496681
1533 1 0.871457 0.877662 0.372772
1143 1 0.69543 0.880059 0.0648538
1147 1 0.81296 0.871707 0.0636934
1148 1 0.754757 0.940969 0.0682134
1270 1 0.691778 0.939808 0.125388
1273 1 0.756782 0.87571 0.129919
1274 1 0.814875 0.936912 0.126764
1538 1 0.065502 0.56284 0.503035
1585 1 0.501206 0.627062 0.503062
1124 1 1.00253 0.93757 0.0629476
1151 1 0.938267 0.876952 0.0599486
1152 1 0.873696 0.932712 0.0563983
1277 1 0.873313 0.873037 0.12424
1278 1 0.934328 0.939965 0.124381
1505 1 0.998236 0.876582 0.373352
31 1 0.940728 0.997651 0.0627973
1289 1 0.253961 0.49215 0.251077
1134 1 0.436736 0.935434 0.00108923
1130 1 0.310389 0.937918 -0.000371015
1160 1 0.126102 0.55731 0.183333
1187 1 0.0616289 0.623805 0.193741
1282 1 0.0627952 0.559055 0.246778
1288 1 0.128266 0.562475 0.318863
1315 1 0.067002 0.620112 0.317738
1317 1 0.12865 0.616831 0.246088
1532 1 0.7459 0.940293 0.439789
7 1 0.190957 0.995196 0.0612206
1164 1 0.254693 0.560065 0.183935
1191 1 0.188451 0.619426 0.182868
1195 1 0.316795 0.622385 0.190532
1286 1 0.184161 0.555544 0.249397
1290 1 0.315888 0.5546 0.246638
1292 1 0.250128 0.558142 0.304946
1319 1 0.193831 0.619631 0.311499
1321 1 0.248279 0.619908 0.248559
1323 1 0.312458 0.614506 0.307686
1531 1 0.807644 0.878728 0.43354
1168 1 0.375935 0.56493 0.184865
1199 1 0.435925 0.622812 0.18377
1294 1 0.436437 0.560536 0.247765
1296 1 0.377725 0.557376 0.310427
1325 1 0.378765 0.620165 0.251528
1327 1 0.434371 0.626753 0.313945
1300 1 0.502022 0.567076 0.311005
1329 1 0.496665 0.620482 0.248171
1172 1 0.504268 0.554552 0.192208
1176 1 0.623497 0.560414 0.188936
1203 1 0.561815 0.621642 0.184865
1298 1 0.564878 0.565347 0.250857
1304 1 0.624547 0.560472 0.312018
1331 1 0.571353 0.620591 0.313301
1333 1 0.621637 0.627023 0.248765
1180 1 0.750223 0.560981 0.188907
1207 1 0.686732 0.631335 0.196063
1211 1 0.807162 0.627987 0.188304
1302 1 0.685908 0.559729 0.249207
1306 1 0.81333 0.561511 0.249241
1308 1 0.74785 0.55916 0.311064
1335 1 0.682686 0.623901 0.314958
1337 1 0.7479 0.622689 0.247766
1339 1 0.807263 0.621769 0.312551
1530 1 0.815519 0.93791 0.371811
1529 1 0.744429 0.874888 0.377365
1284 1 1.00325 0.560132 0.313919
1313 1 0.99444 0.619386 0.247625
1156 1 0.997862 0.556929 0.186536
1184 1 0.870073 0.561268 0.187015
1215 1 0.932409 0.624627 0.185824
1310 1 0.937144 0.55467 0.253493
1312 1 0.867744 0.561356 0.313158
1341 1 0.869964 0.624458 0.250174
1343 1 0.935687 0.62515 0.312486
1192 1 0.124319 0.68462 0.195546
1219 1 0.0611993 0.746689 0.193226
1224 1 0.123311 0.80656 0.190648
1314 1 0.0602744 0.68823 0.256714
1320 1 0.121298 0.681717 0.317325
1346 1 0.0603237 0.810089 0.248326
1347 1 0.0574385 0.74881 0.316868
1349 1 0.116419 0.747347 0.251498
1352 1 0.117493 0.813202 0.311719
1188 1 0.994317 0.68335 0.194437
1196 1 0.255397 0.682745 0.190157
1223 1 0.191332 0.753857 0.191537
1227 1 0.316263 0.744404 0.182329
1228 1 0.251226 0.817824 0.188002
1318 1 0.188203 0.686135 0.251245
1322 1 0.311831 0.686582 0.256861
1324 1 0.252606 0.682484 0.314437
1350 1 0.186078 0.813149 0.251171
1351 1 0.184771 0.749112 0.310438
1353 1 0.25257 0.74883 0.251256
1354 1 0.312481 0.810752 0.255281
1355 1 0.314234 0.748812 0.313507
1356 1 0.248734 0.813998 0.310918
1200 1 0.374697 0.682923 0.179267
1231 1 0.432552 0.751237 0.184516
1232 1 0.373883 0.810593 0.190891
1326 1 0.436553 0.688016 0.245245
1328 1 0.373936 0.686144 0.312932
1357 1 0.369405 0.750514 0.250051
1358 1 0.435435 0.819907 0.250203
1359 1 0.43957 0.747358 0.311792
1360 1 0.37884 0.813227 0.317067
1332 1 0.499735 0.685709 0.311179
1236 1 0.496699 0.813909 0.18815
1361 1 0.49925 0.752695 0.243971
1204 1 0.498963 0.688948 0.187917
1364 1 0.507286 0.807512 0.310867
1208 1 0.625603 0.68742 0.186852
1235 1 0.567012 0.741454 0.18801
1240 1 0.625887 0.817558 0.186774
1330 1 0.559222 0.681315 0.251794
1336 1 0.626335 0.688338 0.314945
1362 1 0.568381 0.805431 0.246666
1363 1 0.568148 0.747584 0.311156
1365 1 0.628372 0.745542 0.250498
1368 1 0.623919 0.810968 0.313899
1212 1 0.750964 0.689023 0.183238
1239 1 0.689738 0.747947 0.181266
1243 1 0.813361 0.753195 0.184423
1244 1 0.753384 0.811166 0.188671
1334 1 0.690376 0.686501 0.252511
1338 1 0.809832 0.693499 0.246861
1340 1 0.747506 0.685297 0.315755
1366 1 0.688863 0.809906 0.244947
1367 1 0.687346 0.74916 0.318959
1369 1 0.746833 0.745273 0.249756
1370 1 0.809847 0.810257 0.254713
1371 1 0.813035 0.75098 0.314072
1372 1 0.745088 0.811869 0.312869
1345 1 -0.00171173 0.74735 0.253822
1316 1 -0.00204793 0.680046 0.31296
1220 1 0.997541 0.805535 0.188902
1348 1 0.000347573 0.811331 0.308845
1216 1 0.870122 0.685188 0.189318
1247 1 0.935158 0.751788 0.187414
1248 1 0.875802 0.815454 0.185604
1342 1 0.930317 0.688627 0.251715
1344 1 0.86458 0.684699 0.313957
1373 1 0.873267 0.753055 0.246676
1374 1 0.936566 0.811708 0.251651
1375 1 0.932638 0.74658 0.311733
1376 1 0.870324 0.811682 0.316764
1527 1 0.684956 0.874413 0.43845
1649 1 0.500266 0.876218 0.503793
1251 1 0.0596337 0.881204 0.190585
1256 1 0.125533 0.936617 0.186781
1378 1 0.0602787 0.936733 0.249866
1379 1 0.0619643 0.87677 0.310825
1381 1 0.128514 0.870631 0.252717
1384 1 0.123104 0.937454 0.31408
1142 1 0.688242 0.943279 -0.00430617
19 1 0.564948 1.00108 0.0625408
1255 1 0.188193 0.874447 0.183769
1259 1 0.312233 0.872397 0.186621
1260 1 0.250139 0.934915 0.183279
1382 1 0.186265 0.937355 0.250154
1383 1 0.187695 0.878919 0.312222
1385 1 0.246977 0.874594 0.249211
1386 1 0.3083 0.935437 0.250487
1387 1 0.313978 0.874054 0.312282
1388 1 0.25045 0.932903 0.311418
1425 1 0.497616 0.505532 0.372742
1578 1 0.320517 0.690088 0.500429
267 1 0.314801 1.0004 0.30963
1565 1 0.877383 0.500338 0.497095
1027 1 0.0636521 0.501528 0.0633553
1263 1 0.436487 0.879462 0.188967
1264 1 0.37378 0.937284 0.192624
1389 1 0.374658 0.876513 0.253277
1390 1 0.439902 0.942582 0.253061
1391 1 0.440308 0.876136 0.309845
1392 1 0.378832 0.934785 0.312458
1393 1 0.501311 0.878068 0.250363
1268 1 0.50057 0.937092 0.187358
1121 1 0.000931298 0.876775 -0.000444487
1526 1 0.685774 0.939056 0.374488
1396 1 0.506081 0.938747 0.31376
1267 1 0.560467 0.87086 0.188108
1272 1 0.628189 0.942257 0.187773
1394 1 0.563297 0.936517 0.253209
1395 1 0.56591 0.871063 0.313836
1397 1 0.622549 0.874006 0.250888
1400 1 0.626937 0.936152 0.306482
1524 1 0.504095 0.938557 0.439916
1521 1 0.503583 0.872555 0.377931
1271 1 0.683921 0.882536 0.18677
1275 1 0.813579 0.872706 0.186269
1276 1 0.753689 0.938944 0.189248
1398 1 0.69271 0.938314 0.250709
1399 1 0.688244 0.875862 0.314422
1401 1 0.748666 0.873279 0.245147
1402 1 0.809533 0.937687 0.250261
1403 1 0.810485 0.879447 0.311076
1404 1 0.749929 0.936819 0.312378
1377 1 0.998229 0.868202 0.246924
1380 1 0.998769 0.944859 0.313537
1252 1 0.996026 0.943444 0.184854
1279 1 0.939775 0.875221 0.185382
1280 1 0.874878 0.93962 0.192931
1405 1 0.87272 0.874657 0.246657
1406 1 0.937086 0.940356 0.249485
1407 1 0.940817 0.874847 0.307201
1408 1 0.876614 0.935037 0.309486
1525 1 0.62693 0.873325 0.380199
1410 1 0.0645601 0.560975 0.377657
1416 1 0.123823 0.558842 0.442744
1443 1 0.0635709 0.62838 0.436311
1445 1 0.126159 0.622809 0.380208
1528 1 0.6258 0.945438 0.439142
385 1 -0.000680605 1.00753 0.375186
1414 1 0.194005 0.559137 0.376235
1418 1 0.314431 0.559106 0.370721
1420 1 0.253536 0.55804 0.437865
1447 1 0.191181 0.619439 0.437357
1449 1 0.248675 0.624266 0.376787
1451 1 0.311855 0.619832 0.437469
1645 1 0.380315 0.875416 0.500985
1417 1 0.251869 0.500601 0.371844
1422 1 0.434192 0.566885 0.376307
1424 1 0.373821 0.559184 0.439906
1453 1 0.367867 0.622561 0.373549
1455 1 0.437766 0.630434 0.441122
1428 1 0.499676 0.567707 0.444763
1457 1 0.505572 0.620497 0.374826
1641 1 0.24709 0.877911 0.501195
1309 1 0.872251 0.497569 0.251179
1522 1 0.569118 0.935721 0.378685
1426 1 0.562405 0.558817 0.379512
1432 1 0.624465 0.564353 0.445163
1459 1 0.561808 0.621251 0.440574
1461 1 0.626663 0.618346 0.383709
1566 1 0.939501 0.562537 0.499474
1523 1 0.565538 0.876709 0.438157
257 1 -0.00153054 1.00149 0.246902
1430 1 0.68494 0.558457 0.380119
1434 1 0.817147 0.56225 0.376574
1436 1 0.753579 0.564423 0.435929
1463 1 0.68955 0.621136 0.438966
1465 1 0.746646 0.618687 0.374636
1467 1 0.815276 0.622406 0.434169
1412 1 1.00151 0.56062 0.43659
1441 1 1.00086 0.624247 0.377991
1438 1 0.937741 0.56248 0.376085
1440 1 0.876753 0.562304 0.441087
1469 1 0.874524 0.622354 0.376173
1471 1 0.938048 0.625026 0.437387
1039 1 0.434953 0.49796 0.0619928
1558 1 0.689146 0.569193 0.505054
1442 1 0.0613448 0.689095 0.378236
1448 1 0.127679 0.684015 0.438593
1474 1 0.0619438 0.809021 0.376606
1475 1 0.06491 0.745193 0.436822
1477 1 0.124914 0.749643 0.37164
1480 1 0.129076 0.812442 0.437572
1589 1 0.6232 0.629576 0.503599
1550 1 0.44053 0.568371 0.502282
1446 1 0.188227 0.684199 0.371806
1450 1 0.307469 0.690968 0.37656
1452 1 0.250593 0.690687 0.442655
1478 1 0.189877 0.816978 0.372272
1479 1 0.188878 0.752188 0.437476
1481 1 0.248725 0.75104 0.374027
1482 1 0.311486 0.81487 0.373052
1483 1 0.312466 0.750173 0.438185
1484 1 0.251418 0.812381 0.442054
407 1 0.688793 1.00338 0.437579
5 1 0.128202 0.998861 0.00527175
1519 1 0.437286 0.872613 0.432885
1520 1 0.379077 0.935246 0.431521
1454 1 0.440806 0.686829 0.376899
1456 1 0.376788 0.68987 0.43371
1485 1 0.378974 0.750195 0.375201
1486 1 0.439672 0.813312 0.368887
1487 1 0.436297 0.754861 0.438439
1488 1 0.370794 0.812649 0.440496
1460 1 0.494308 0.691536 0.44553
1518 1 0.445184 0.937741 0.375389
1492 1 0.502697 0.813982 0.442039
1489 1 0.502637 0.753648 0.377926
1491 1 0.561929 0.749732 0.437765
1493 1 0.623656 0.745901 0.379897
1458 1 0.564386 0.684176 0.381251
1464 1 0.625446 0.683829 0.44346
1496 1 0.6262 0.815004 0.442584
1490 1 0.564581 0.810939 0.377853
1495 1 0.685901 0.753305 0.441093
1497 1 0.744769 0.752087 0.378547
1498 1 0.80651 0.814722 0.376882
1499 1 0.809532 0.750574 0.437487
1500 1 0.747672 0.811507 0.438746
1494 1 0.68701 0.810803 0.37746
1468 1 0.748458 0.684994 0.434757
1466 1 0.806011 0.687297 0.372401
1462 1 0.686108 0.685306 0.379704
1546 1 0.314491 0.554289 0.498041
261 1 0.126306 0.993448 0.245125
1473 1 0.997465 0.739704 0.374786
1476 1 -0.00209529 0.809441 0.4334
1444 1 0.00130467 0.687016 0.440269
1470 1 0.935126 0.681205 0.377165
1501 1 0.87306 0.745096 0.372396
1503 1 0.930531 0.754356 0.433891
1472 1 0.868859 0.687697 0.438036
1504 1 0.871419 0.815089 0.431671
1502 1 0.940629 0.81356 0.372717
1517 1 0.379293 0.875828 0.373803
397 1 0.374158 0.998187 0.372268
1508 1 0.00220164 0.936949 0.437895
1509 1 0.11544 0.874937 0.375653
1506 1 0.0575626 0.935292 0.373989
1507 1 0.0607239 0.870375 0.435649
1512 1 0.121771 0.932397 0.437666
1614 1 0.439896 0.813154 0.498865
1157 1 0.125848 0.49379 0.123785
1515 1 0.313101 0.872877 0.438802
1165 1 0.377949 0.496537 0.127674
1516 1 0.24878 0.935245 0.434197
1586 1 0.559486 0.688992 0.502592
1514 1 0.314256 0.938003 0.374854
1513 1 0.251454 0.875638 0.371058
1510 1 0.18372 0.935901 0.375796
1511 1 0.188791 0.87382 0.433454
1621 1 0.623913 0.749788 0.502039
1569 1 1.00663 0.627446 0.497558
405 1 0.62405 0.996209 0.373036
1541 1 0.123932 0.496277 0.503142
1413 1 0.127822 0.498446 0.373081
529 1 0.497253 1.00346 0.497979
23 1 0.685591 1.00287 0.0607515
1163 1 0.315524 0.499005 0.187079
1573 1 0.12772 0.623099 0.499646
1642 1 0.314607 0.93604 0.497585
1646 1 0.439438 0.939413 0.49591
1602 1 0.0642046 0.803644 0.494233
1625 1 0.751892 0.753175 0.49965
1653 1 0.628623 0.878066 0.503309
1593 1 0.756251 0.627023 0.498618
1594 1 0.804427 0.688925 0.491615
1598 1 0.937688 0.687348 0.499568
1618 1 0.564214 0.814061 0.500653
25 1 0.748045 0.997944 -0.000893659
1554 1 0.562791 0.563282 0.502278
1066 1 0.311601 0.685443 0.00358461
1601 1 -0.00155632 0.749701 0.493638
1141 1 0.626821 0.870343 -0.00167437
1038 1 0.432873 0.561138 0.00277681
1658 1 0.81004 0.937559 0.496847
1606 1 0.185462 0.812422 0.493559
1613 1 0.37493 0.75436 0.503365
1581 1 0.375191 0.621435 0.499298
1094 1 0.190503 0.810796 0.00348395
1605 1 0.126915 0.745867 0.498692
1145 1 0.748883 0.873709 -0.000404696
1630 1 0.933621 0.809562 0.495843
1133 1 0.370208 0.873378 -0.00333735
1657 1 0.746851 0.87284 0.497704
1113 1 0.753165 0.749935 0.00339678
1102 1 0.434825 0.808964 -0.00652327
1070 1 0.44369 0.690203 -0.00219257
1046 1 0.687189 0.566405 -0.00365099
1074 1 0.563923 0.688823 0.00304393
541 1 0.875814 0.998728 0.499912
1073 1 0.498899 0.626645 0.000501698
1058 1 0.0654355 0.689991 0.00233811
1544 1 0.128267 0.557403 0.56296
1571 1 0.0647708 0.627006 0.562034
1666 1 0.0632546 0.566131 0.62109
1701 1 0.123763 0.624237 0.625193
1570 1 0.0635621 0.691062 0.501707
1697 1 1.00022 0.626657 0.621903
2020 1 0.00111242 0.93416 0.934626
1629 1 0.871764 0.753973 0.499508
1931 1 0.311613 0.505599 0.938439
1548 1 0.248756 0.5575 0.56539
1575 1 0.18652 0.623414 0.559859
1579 1 0.313261 0.622004 0.562715
1670 1 0.183381 0.561781 0.624348
1674 1 0.317303 0.563302 0.625214
1705 1 0.250133 0.625633 0.626532
653 1 0.369985 0.994037 0.621647
2048 1 0.873826 0.937941 0.941499
1033 1 0.250352 0.503705 1.00317
1552 1 0.37735 0.56284 0.559121
1583 1 0.439484 0.627604 0.564187
1678 1 0.440191 0.560179 0.629382
1709 1 0.37418 0.62789 0.625082
1556 1 0.497106 0.564433 0.563956
1574 1 0.185975 0.687048 0.498914
2047 1 0.941492 0.875606 0.935807
1683 1 0.566198 0.495454 0.691795
663 1 0.688589 0.99922 0.690688
901 1 0.122427 0.996111 0.8746
1539 1 0.0636449 0.49923 0.564726
1713 1 0.494158 0.629252 0.629501
1560 1 0.623646 0.563719 0.564265
1587 1 0.559159 0.626575 0.567512
1682 1 0.561992 0.561164 0.63118
1717 1 0.622122 0.624541 0.623652
1543 1 0.186324 0.497754 0.568693
2046 1 0.937915 0.93615 0.878747
1564 1 0.751703 0.564966 0.565339
1591 1 0.688081 0.625924 0.567393
1595 1 0.812524 0.622883 0.567664
1686 1 0.687735 0.562401 0.62913
1690 1 0.8107 0.560581 0.628074
1721 1 0.748882 0.621875 0.626774
789 1 0.62481 0.993454 0.750602
1662 1 0.935356 0.939786 0.495991
1685 1 0.627914 0.495355 0.627807
1679 1 0.439105 0.498285 0.68837
1540 1 1.00064 0.562935 0.557946
1568 1 0.873184 0.557642 0.564558
1599 1 0.941323 0.624183 0.561641
1694 1 0.941273 0.565594 0.6242
1725 1 0.881206 0.626583 0.628153
1795 1 0.0707284 0.506592 0.810313
2045 1 0.879993 0.873367 0.873813
783 1 0.433794 0.999041 0.813123
657 1 0.501665 1.00294 0.627803
1576 1 0.12413 0.686464 0.562323
1603 1 0.0620242 0.747406 0.560956
1608 1 0.12743 0.81191 0.55552
1698 1 0.0640901 0.690112 0.626264
1730 1 0.0607429 0.810556 0.61798
1733 1 0.125828 0.750866 0.626737
1150 1 0.939342 0.941724 0.99939
1057 1 1.00263 0.622405 1.00202
523 1 0.307814 1.00171 0.560076
1580 1 0.254982 0.689713 0.564061
1607 1 0.185649 0.749571 0.565857
1611 1 0.315654 0.752761 0.562865
1612 1 0.249339 0.813328 0.557007
1702 1 0.189049 0.690819 0.628358
1706 1 0.310552 0.693173 0.627405
1734 1 0.190257 0.817613 0.620359
1737 1 0.251863 0.751977 0.624066
1738 1 0.311017 0.823536 0.623071
1935 1 0.429215 0.506066 0.936547
1584 1 0.372768 0.692006 0.567188
1615 1 0.434667 0.751371 0.562891
1616 1 0.375799 0.817364 0.56796
1710 1 0.437381 0.691062 0.623682
1741 1 0.376969 0.753531 0.625763
1742 1 0.437531 0.813738 0.625048
1620 1 0.503863 0.812149 0.563391
1588 1 0.498972 0.684805 0.565802
1634 1 0.0568619 0.937156 0.499749
1745 1 0.500764 0.74869 0.621847
1592 1 0.622676 0.686876 0.561059
1619 1 0.565332 0.754401 0.562108
1624 1 0.623776 0.813767 0.559977
1714 1 0.563498 0.689224 0.626289
1746 1 0.562211 0.813932 0.621747
1749 1 0.630992 0.746431 0.620598
797 1 0.872334 0.995697 0.746016
1797 1 0.126027 0.505319 0.747877
1626 1 0.812084 0.815561 0.499215
517 1 0.122024 0.997309 0.494579
1596 1 0.751867 0.68513 0.560607
1623 1 0.688688 0.744577 0.559662
1627 1 0.814692 0.741527 0.565024
1628 1 0.752294 0.814808 0.560357
1718 1 0.691427 0.691181 0.625475
1722 1 0.812927 0.685457 0.630786
1750 1 0.686203 0.809766 0.619882
1753 1 0.751609 0.751838 0.623601
1754 1 0.809685 0.814484 0.626529
1675 1 0.314068 0.501167 0.691822
1923 1 0.0641867 0.499931 0.937523
1086 1 0.938028 0.685088 1.00399
1114 1 0.814085 0.812619 0.998566
1604 1 0.000766879 0.808042 0.555862
1729 1 0.99931 0.750243 0.621992
1572 1 0.999567 0.689635 0.563801
1600 1 0.878781 0.684301 0.563663
1631 1 0.937131 0.751425 0.559956
1632 1 0.869328 0.80902 0.567703
1726 1 0.940029 0.69376 0.620957
1757 1 0.876301 0.746548 0.624817
1758 1 0.937136 0.812806 0.622941
1681 1 0.501542 0.500583 0.628065
1635 1 0.0591774 0.870762 0.5602
1640 1 0.119684 0.936389 0.562442
1762 1 0.0595234 0.930512 0.628851
1765 1 0.124688 0.87314 0.622061
1819 1 0.818065 0.497584 0.815387
1639 1 0.187322 0.880105 0.561622
1643 1 0.31508 0.879081 0.559159
1644 1 0.247067 0.936043 0.559836
1766 1 0.182612 0.93785 0.626513
1769 1 0.245014 0.875203 0.623647
1770 1 0.306942 0.935897 0.621289
1647 1 0.44094 0.87529 0.565986
1648 1 0.378489 0.941886 0.557966
1773 1 0.373657 0.876911 0.628972
1774 1 0.438338 0.942692 0.626403
1652 1 0.50219 0.942826 0.565665
2044 1 0.752845 0.930396 0.937235
2043 1 0.8153 0.871907 0.939484
1777 1 0.503445 0.87585 0.627756
1651 1 0.556605 0.874723 0.562324
1656 1 0.618355 0.940472 0.565631
1778 1 0.559743 0.940383 0.630015
1781 1 0.62485 0.87695 0.62185
2042 1 0.817581 0.934008 0.874239
775 1 0.184183 0.995274 0.815038
2041 1 0.755083 0.872502 0.870203
921 1 0.753318 0.995086 0.878424
773 1 0.121045 0.998253 0.749215
1655 1 0.68948 0.876492 0.568242
1659 1 0.813011 0.875639 0.561105
1660 1 0.752678 0.936191 0.558843
1782 1 0.69133 0.940241 0.630532
1785 1 0.75634 0.879863 0.624484
1786 1 0.813135 0.940993 0.618432
1061 1 0.126841 0.628537 1.00293
1809 1 0.500953 0.500894 0.751555
1761 1 -0.0036927 0.876751 0.625008
1636 1 -0.00520314 0.936623 0.558063
1663 1 0.933199 0.876217 0.560128
1664 1 0.875133 0.936529 0.560846
1789 1 0.871458 0.873088 0.623752
1790 1 0.935729 0.935134 0.621615
1026 1 0.0611657 0.557841 1.00253
1801 1 0.252979 0.507725 0.750177
1672 1 0.122986 0.565119 0.684335
1699 1 0.0648323 0.627566 0.681649
1794 1 0.0620733 0.567082 0.750237
1800 1 0.127841 0.568566 0.810627
1827 1 0.0642382 0.630401 0.813336
1829 1 0.123843 0.630029 0.750878
1668 1 -0.0028643 0.560193 0.690268
2039 1 0.691408 0.871519 0.934363
1825 1 -0.000511279 0.626152 0.753659
1085 1 0.870826 0.624861 0.999185
1823 1 0.937065 0.502152 0.814921
659 1 0.560328 0.999568 0.688695
2038 1 0.694599 0.935543 0.878476
1676 1 0.249587 0.563498 0.685817
1703 1 0.183013 0.628899 0.686887
1707 1 0.309378 0.623341 0.690449
1798 1 0.184945 0.569185 0.747311
1802 1 0.314556 0.562149 0.749336
1804 1 0.243855 0.566259 0.813631
1831 1 0.186066 0.625445 0.815553
1833 1 0.252579 0.629979 0.751357
1835 1 0.310318 0.626785 0.81357
1633 1 -0.00120256 0.870745 0.501141
1815 1 0.691852 0.497308 0.815306
649 1 0.25158 1.00032 0.623572
1680 1 0.374472 0.56378 0.690574
1711 1 0.431949 0.62908 0.68796
1806 1 0.434648 0.565497 0.749906
1808 1 0.375005 0.562407 0.809611
1837 1 0.370382 0.629309 0.743347
1839 1 0.439759 0.6271 0.810485
1050 1 0.810704 0.556488 0.997536
2036 1 0.4989 0.937825 0.942668
1812 1 0.494266 0.565287 0.808873
1841 1 0.495414 0.627931 0.747062
1684 1 0.499157 0.565543 0.688681
1688 1 0.628544 0.567243 0.692071
1715 1 0.566291 0.625608 0.693315
1810 1 0.561237 0.564924 0.749902
1816 1 0.62182 0.56581 0.809693
1843 1 0.561843 0.631268 0.805981
1845 1 0.629004 0.629128 0.753953
1673 1 0.248252 0.495746 0.628653
2035 1 0.5619 0.872711 0.944071
777 1 0.251758 0.999479 0.750274
1692 1 0.74751 0.558706 0.693507
1719 1 0.691554 0.624935 0.686495
1723 1 0.817225 0.626587 0.68767
1814 1 0.693001 0.559049 0.756688
1818 1 0.812023 0.559996 0.747456
1820 1 0.753969 0.559971 0.814184
1847 1 0.688207 0.626411 0.814552
1849 1 0.752422 0.629087 0.747186
1851 1 0.811112 0.622278 0.809422
2034 1 0.565833 0.933188 0.885204
2037 1 0.629721 0.877048 0.879863
1661 1 0.87271 0.876355 0.49852
1796 1 1.00158 0.558625 0.813887
1696 1 0.878167 0.561214 0.686643
1727 1 0.936147 0.62736 0.690463
1822 1 0.941084 0.559634 0.749096
1824 1 0.867854 0.562438 0.809962
1853 1 0.875574 0.622484 0.747989
1855 1 0.939053 0.622764 0.815566
535 1 0.685448 1.00217 0.566666
1704 1 0.124396 0.690414 0.68725
1731 1 0.0600697 0.749649 0.68578
1736 1 0.126513 0.812626 0.683227
1826 1 0.0623773 0.687825 0.755068
1832 1 0.130106 0.689385 0.813438
1858 1 0.0677303 0.816495 0.750695
1859 1 0.0611663 0.749792 0.809624
1861 1 0.122603 0.752826 0.752373
1864 1 0.124502 0.808826 0.815651
1860 1 1.00252 0.8158 0.813279
1828 1 0.996849 0.692801 0.813453
1700 1 0.998833 0.690905 0.689003
1708 1 0.25048 0.694062 0.685282
1735 1 0.190446 0.753661 0.689717
1739 1 0.313428 0.754332 0.686338
1740 1 0.250142 0.816176 0.681768
1830 1 0.189411 0.687926 0.751892
1834 1 0.311952 0.693336 0.746938
1836 1 0.249294 0.682764 0.812493
1862 1 0.184259 0.814287 0.746255
1863 1 0.190015 0.755817 0.815966
1865 1 0.251193 0.752783 0.751304
1866 1 0.316564 0.8139 0.747659
1867 1 0.30942 0.749374 0.811283
1868 1 0.251889 0.811246 0.814408
1712 1 0.373345 0.690677 0.68393
1743 1 0.437318 0.751061 0.689745
1744 1 0.374457 0.809272 0.684953
1838 1 0.439049 0.688612 0.753365
1840 1 0.369471 0.68344 0.811557
1869 1 0.378663 0.748179 0.747492
1870 1 0.437783 0.811686 0.750833
1871 1 0.439247 0.750337 0.811928
1872 1 0.375879 0.808438 0.813521
1876 1 0.503619 0.815498 0.813329
1873 1 0.50518 0.751908 0.745108
1716 1 0.50326 0.689422 0.687743
1748 1 0.502954 0.809048 0.682524
1844 1 0.500556 0.687111 0.807391
1720 1 0.625915 0.682626 0.690077
1747 1 0.564988 0.753487 0.686867
1752 1 0.621753 0.81201 0.683374
1842 1 0.566945 0.691178 0.752208
1848 1 0.62797 0.687449 0.812749
1874 1 0.560369 0.815862 0.752785
1875 1 0.565863 0.756191 0.813189
1877 1 0.628229 0.752978 0.752509
1880 1 0.631122 0.817981 0.816291
1724 1 0.750903 0.6903 0.686085
1751 1 0.686228 0.754142 0.690421
1755 1 0.815978 0.753082 0.689727
1756 1 0.74718 0.814677 0.686836
1846 1 0.693976 0.686951 0.744782
1850 1 0.810528 0.684409 0.747285
1852 1 0.753223 0.687661 0.807949
1878 1 0.688063 0.814195 0.751328
1879 1 0.691321 0.753094 0.815087
1881 1 0.752654 0.747793 0.746822
1882 1 0.808653 0.814382 0.751287
1883 1 0.814301 0.74941 0.813512
1884 1 0.749385 0.810999 0.809484
1857 1 0.999601 0.748262 0.747681
1732 1 -0.00318708 0.813371 0.687474
1728 1 0.877512 0.691617 0.685992
1759 1 0.938074 0.754163 0.68973
1760 1 0.879165 0.811651 0.682614
1854 1 0.935843 0.684759 0.750626
1856 1 0.876907 0.682739 0.81607
1885 1 0.87413 0.752772 0.747487
1886 1 0.940993 0.812529 0.753799
1887 1 0.938133 0.752142 0.809856
1888 1 0.877761 0.808628 0.815433
1763 1 0.0628944 0.873781 0.686998
1768 1 0.117575 0.937796 0.68779
1890 1 0.0605841 0.93845 0.753026
1891 1 0.0640167 0.877062 0.811483
1893 1 0.124848 0.873237 0.753692
1896 1 0.127006 0.93539 0.810665
2040 1 0.628887 0.933626 0.941318
1889 1 1.00024 0.87532 0.745928
1764 1 -0.000103747 0.938615 0.687104
2033 1 0.498396 0.874364 0.871117
1547 1 0.321582 0.500636 0.565975
655 1 0.436207 0.999784 0.68721
1767 1 0.181771 0.87969 0.690067
1771 1 0.31341 0.876048 0.690904
1772 1 0.245481 0.938478 0.686387
1894 1 0.18421 0.93812 0.748346
1895 1 0.186219 0.870628 0.810948
1897 1 0.245573 0.873049 0.753412
1898 1 0.310171 0.931073 0.753036
1899 1 0.314073 0.87004 0.81217
1900 1 0.246803 0.932517 0.818979
785 1 0.501089 1.00464 0.753179
1030 1 0.185404 0.562545 1.00731
1775 1 0.440276 0.874133 0.690021
1776 1 0.374997 0.940386 0.686857
1901 1 0.376813 0.876326 0.745108
1902 1 0.433225 0.940169 0.751725
1903 1 0.435073 0.872907 0.808125
1904 1 0.373655 0.929475 0.811432
1780 1 0.500356 0.938291 0.688801
1908 1 0.494566 0.936206 0.810972
1945 1 0.751935 0.499134 0.875538
1081 1 0.749128 0.631106 0.997922
539 1 0.816087 1.00502 0.563176
1905 1 0.496074 0.873225 0.751004
1779 1 0.563432 0.875571 0.688027
1784 1 0.620456 0.936734 0.689661
1906 1 0.559667 0.937363 0.753651
1907 1 0.562155 0.880138 0.811494
1909 1 0.626213 0.87071 0.74883
1912 1 0.630837 0.935446 0.808255
1034 1 0.314626 0.56283 1.00367
1984 1 0.874641 0.686618 0.933599
1065 1 0.249816 0.626391 1.005
2013 1 0.875156 0.749388 0.87537
1783 1 0.683939 0.876963 0.683571
1787 1 0.814954 0.877421 0.68627
1788 1 0.748532 0.941491 0.689834
1910 1 0.686667 0.937685 0.74927
1911 1 0.690886 0.875884 0.814634
1913 1 0.748312 0.878654 0.74866
1914 1 0.813973 0.938407 0.75095
1915 1 0.816965 0.870161 0.814139
1916 1 0.75021 0.934754 0.812865
1892 1 0.00252703 0.938438 0.815303
1791 1 0.937998 0.874117 0.686094
1792 1 0.878128 0.937876 0.678219
1917 1 0.877463 0.871518 0.745239
1918 1 0.938077 0.937826 0.747695
1919 1 0.941929 0.877104 0.812181
1920 1 0.875774 0.934451 0.811622
2021 1 0.123944 0.871713 0.870539
1922 1 0.064199 0.564956 0.872877
1928 1 0.129366 0.562738 0.946117
1955 1 0.0628347 0.628637 0.944884
1957 1 0.124884 0.626399 0.879115
1953 1 0.00128432 0.627183 0.880253
1924 1 0.998543 0.561405 0.939576
1029 1 0.128022 0.500294 1.00134
2024 1 0.120867 0.935207 0.941832
2018 1 0.0678895 0.935873 0.87097
2019 1 0.0610592 0.874221 0.934794
1077 1 0.626599 0.62601 0.992381
905 1 0.249899 1.0008 0.871605
1926 1 0.188912 0.55981 0.878483
1930 1 0.308037 0.564358 0.870549
1932 1 0.247292 0.570683 0.943528
1959 1 0.181448 0.629739 0.939671
1961 1 0.244746 0.630171 0.877192
1963 1 0.311764 0.625354 0.935874
2029 1 0.373302 0.87578 0.877701
1934 1 0.43802 0.564622 0.871491
1936 1 0.370005 0.565163 0.936471
1965 1 0.376781 0.627414 0.871162
1967 1 0.440388 0.623486 0.934166
1940 1 0.49665 0.559087 0.935391
2025 1 0.244412 0.872582 0.874489
2030 1 0.436532 0.933609 0.87335
1969 1 0.503305 0.626638 0.873505
1938 1 0.557087 0.563022 0.871677
1944 1 0.627476 0.556477 0.934269
1971 1 0.558459 0.626418 0.936964
1973 1 0.620045 0.618432 0.874172
2032 1 0.371751 0.937978 0.935522
2014 1 0.938708 0.812106 0.873908
665 1 0.749583 1.00009 0.62481
1093 1 0.124498 0.74677 1.0032
1942 1 0.684831 0.561273 0.869178
1946 1 0.81286 0.561762 0.881876
1948 1 0.750441 0.561438 0.939637
1975 1 0.691802 0.627215 0.934346
1977 1 0.750429 0.626403 0.874075
1979 1 0.813983 0.627098 0.932803
531 1 0.561981 1.00584 0.565599
2031 1 0.442845 0.872362 0.935502
2026 1 0.311842 0.937102 0.874798
1980 1 0.750784 0.690392 0.938442
1978 1 0.811078 0.684079 0.874314
1950 1 0.934113 0.564598 0.876965
1952 1 0.874314 0.564543 0.938344
1981 1 0.873476 0.621788 0.875196
1983 1 0.93329 0.626549 0.936912
1062 1 0.185898 0.688543 1.00012
2017 1 0.000789519 0.872979 0.871064
1954 1 0.0654754 0.694454 0.869947
1960 1 0.120537 0.690411 0.939805
1986 1 0.0569147 0.810834 0.875663
1987 1 0.0600832 0.750739 0.937046
1989 1 0.125468 0.749713 0.875837
1992 1 0.126337 0.811834 0.942028
1988 1 0.999589 0.813748 0.939663
1985 1 0.996221 0.751877 0.876919
1956 1 1.00093 0.690338 0.939271
2015 1 0.941556 0.748165 0.937499
1958 1 0.185959 0.688736 0.876814
1962 1 0.31002 0.687976 0.87634
1964 1 0.250237 0.691955 0.940272
1990 1 0.185864 0.814056 0.876935
1991 1 0.186907 0.753193 0.939531
1993 1 0.253965 0.749537 0.873255
1994 1 0.312438 0.812927 0.876549
1995 1 0.309757 0.749601 0.936156
1996 1 0.248014 0.807961 0.938691
1939 1 0.561724 0.497994 0.928553
2016 1 0.879926 0.810229 0.934355
2027 1 0.311423 0.873622 0.938075
2000 1 0.371102 0.811004 0.939887
1999 1 0.438348 0.750557 0.936143
1998 1 0.436384 0.810181 0.874797
1997 1 0.367492 0.744998 0.874093
1968 1 0.373845 0.683995 0.935353
1966 1 0.440277 0.687646 0.868235
2001 1 0.507421 0.752297 0.872178
2023 1 0.187132 0.869992 0.938829
2022 1 0.181335 0.936902 0.88102
1982 1 0.9386 0.686962 0.8742
2004 1 0.50317 0.814585 0.933672
1972 1 0.500345 0.690162 0.931307
1970 1 0.568562 0.693663 0.870272
1976 1 0.626077 0.689623 0.93889
2002 1 0.568997 0.815 0.873522
2003 1 0.564644 0.750749 0.939156
2005 1 0.625919 0.75247 0.877993
2008 1 0.623737 0.813233 0.938647
1078 1 0.691098 0.688778 1.00386
2028 1 0.246837 0.939782 0.934915
2007 1 0.68301 0.74681 0.940434
2006 1 0.693655 0.808747 0.875041
2011 1 0.814019 0.748575 0.940721
2010 1 0.810555 0.810902 0.870424
2009 1 0.75164 0.745386 0.879836
2012 1 0.751059 0.807891 0.938505
1974 1 0.690773 0.68671 0.879135
903 1 0.184022 1.0005 0.940226
1110 1 0.684367 0.809874 0.998653
1126 1 0.187926 0.934981 0.999344
1146 1 0.808634 0.936296 0.999228
1069 1 0.380232 0.625572 0.99928
1090 1 0.0650927 0.811223 0.999598
1042 1 0.559075 0.561875 0.996322
1054 1 0.937724 0.563778 0.999069
1149 1 0.875377 0.865551 0.996475
1118 1 0.936208 0.809426 1.00374
1129 1 0.250485 0.871142 0.998272
1109 1 0.625134 0.748055 1.00497
533 1 0.626158 1.00544 0.499128
1654 1 0.691841 0.940983 0.502253
1125 1 0.125046 0.875711 0.99938
1650 1 0.560688 0.941457 0.502554
1117 1 0.877896 0.748421 0.998781
1097 1 0.248137 0.745956 1.0039
1609 1 0.247081 0.750711 0.502024
1617 1 0.503393 0.750585 0.506692
1622 1 0.683955 0.815722 0.502367
1597 1 0.87367 0.619843 0.504828
1637 1 0.125716 0.873128 0.503309
1638 1 0.181913 0.939225 0.49496
1582 1 0.434857 0.688173 0.507096
1562 1 0.807792 0.562784 0.506123
1590 1 0.691694 0.684579 0.501257
1037 1 0.372563 0.496608 1.00207
1610 1 0.313542 0.817933 0.506286
| [
"[email protected]"
]
| |
66d97cf320481774462c902df971c3063f758013 | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYBBHToTauTau_M-100_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467523/HTT_24Jul_newTES_manzoni_Up_Jobs/base_cfg.py | 0293621aeda87a45bbf66fd504f3590c52ec1173 | []
| no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60,473 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("H2TAUTAU")
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-100_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_9_1_kFk.root')
)
process.cmgDiTauCorSVFitPreSel = cms.EDProducer("TauTauWithSVFitProducer",
diTauSrc = cms.InputTag("recoilCorMETDiTau"),
SVFitVersion = cms.int32(2),
verbose = cms.untracked.bool(False)
)
process.cmgTauEleCorSVFitPreSel = cms.EDProducer("TauEleWithSVFitProducer",
diTauSrc = cms.InputTag("recoilCorMETTauEle"),
SVFitVersion = cms.int32(2),
verbose = cms.untracked.bool(False)
)
process.cmgTauMuCorSVFitPreSel = cms.EDProducer("TauMuWithSVFitProducer",
diTauSrc = cms.InputTag("recoilCorMETTauMu"),
SVFitVersion = cms.int32(2),
verbose = cms.untracked.bool(False)
)
process.diTauSVFit = cms.EDProducer("TauTauWithSVFitProducer",
diTauSrc = cms.InputTag("cmgDiTauCorPreSel"),
SVFitVersion = cms.int32(2),
verbose = cms.untracked.bool(False)
)
process.genWorZ = cms.EDProducer("GenParticlePruner",
src = cms.InputTag("genParticlesPruned"),
select = cms.vstring('keep status()==3 & pdgId = {W+}',
'keep status()==3 & pdgId = {W-}',
'keep status()==3 & pdgId = {Z0}',
'keep status()==3 & pdgId = {gamma}',
'keep status()==3 & pdgId = {h0}',
'keep status()==3 & pdgId = 35',
'keep status()==3 & pdgId = 36')
)
process.mvaMETDiTau = cms.EDProducer("MVAMETProducerDiTau",
pucmetSrc = cms.InputTag("pcMet"),
enable = cms.bool(True),
tkmetSrc = cms.InputTag("tkMet"),
verbose = cms.untracked.bool(False),
nopumetSrc = cms.InputTag("nopuMet"),
rhoSrc = cms.InputTag("kt6PFJets","rho"),
pfmetSrc = cms.InputTag("pfMetForRegression"),
nJetsPtGt1Src = cms.InputTag("nJetsPtGt1"),
pumetSrc = cms.InputTag("puMet"),
weights_gbrmetu1cov = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbru1cov_53_Dec2012.root'),
weights_gbrmetu2cov = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbru2cov_53_Dec2012.root'),
vertexSrc = cms.InputTag("goodPVFilter"),
jetSrc = cms.InputTag("cmgPFJetSel"),
leadJetSrc = cms.InputTag("cmgPFBaseJetLead"),
recBosonSrc = cms.InputTag("cmgDiTauPreSel"),
weights_gbrmetphi = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbrmetphi_53_Dec2012.root'),
weights_gbrmet = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbrmet_53_Dec2012.root'),
puJetIdLabel = cms.string('met53x')
)
process.mvaMETTauEle = cms.EDProducer("MVAMETProducerTauEle",
pucmetSrc = cms.InputTag("pcMet"),
enable = cms.bool(True),
tkmetSrc = cms.InputTag("tkMet"),
verbose = cms.untracked.bool(False),
nopumetSrc = cms.InputTag("nopuMet"),
rhoSrc = cms.InputTag("kt6PFJets","rho"),
pfmetSrc = cms.InputTag("pfMetForRegression"),
nJetsPtGt1Src = cms.InputTag("nJetsPtGt1"),
pumetSrc = cms.InputTag("puMet"),
weights_gbrmetu1cov = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbru1cov_53_Dec2012.root'),
weights_gbrmetu2cov = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbru2cov_53_Dec2012.root'),
vertexSrc = cms.InputTag("goodPVFilter"),
jetSrc = cms.InputTag("cmgPFJetSel"),
leadJetSrc = cms.InputTag("cmgPFBaseJetLead"),
recBosonSrc = cms.InputTag("cmgTauElePreSel"),
weights_gbrmetphi = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbrmetphi_53_Dec2012.root'),
weights_gbrmet = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbrmet_53_Dec2012.root'),
puJetIdLabel = cms.string('met53x')
)
process.mvaMETTauMu = cms.EDProducer("MVAMETProducerTauMu",
pucmetSrc = cms.InputTag("pcMet"),
enable = cms.bool(True),
tkmetSrc = cms.InputTag("tkMet"),
verbose = cms.untracked.bool(False),
nopumetSrc = cms.InputTag("nopuMet"),
rhoSrc = cms.InputTag("kt6PFJets","rho"),
pfmetSrc = cms.InputTag("pfMetForRegression"),
nJetsPtGt1Src = cms.InputTag("nJetsPtGt1"),
pumetSrc = cms.InputTag("puMet"),
weights_gbrmetu1cov = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbru1cov_53_Dec2012.root'),
weights_gbrmetu2cov = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbru2cov_53_Dec2012.root'),
vertexSrc = cms.InputTag("goodPVFilter"),
jetSrc = cms.InputTag("cmgPFJetSel"),
leadJetSrc = cms.InputTag("cmgPFBaseJetLead"),
recBosonSrc = cms.InputTag("cmgTauMuPreSel"),
weights_gbrmetphi = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbrmetphi_53_Dec2012.root'),
weights_gbrmet = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/mvaMET/gbrmet_53_Dec2012.root'),
puJetIdLabel = cms.string('met53x')
)
process.recoilCorMETDiTau = cms.EDProducer("RecoilCorrectedMETProducerDiTau",
leptonLeg = cms.int32(0),
force = cms.bool(False),
verbose = cms.untracked.bool(False),
genBosonSrc = cms.InputTag("genWorZ"),
fileZmmMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_zmm53XRR_2012_njet.root'),
fileZmmData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_datamm53XRR_2012_njet.root'),
fileCorrectTo = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection//recoilfit_htt53X_20pv_njet.root'),
enable = cms.bool(True),
correctionType = cms.int32(1),
jetSrc = cms.InputTag("cmgPFJetForRecoil"),
recBosonSrc = cms.InputTag("cmgDiTauPtSel")
)
process.recoilCorMETTauEle = cms.EDProducer("RecoilCorrectedMETProducerTauEle",
leptonLeg = cms.int32(0),
force = cms.bool(False),
verbose = cms.untracked.bool(False),
genBosonSrc = cms.InputTag("genWorZ"),
fileZmmMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_zmm53XRR_2012_njet.root'),
fileZmmData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_datamm53XRR_2012_njet.root'),
fileCorrectTo = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection//recoilfit_htt53X_20pv_njet.root'),
enable = cms.bool(True),
correctionType = cms.int32(1),
jetSrc = cms.InputTag("cmgPFJetForRecoil"),
recBosonSrc = cms.InputTag("cmgTauEleTauPtSel")
)
process.recoilCorMETTauMu = cms.EDProducer("RecoilCorrectedMETProducerTauMu",
leptonLeg = cms.int32(0),
force = cms.bool(False),
verbose = cms.untracked.bool(False),
genBosonSrc = cms.InputTag("genWorZ"),
fileZmmMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_zmm53XRR_2012_njet.root'),
fileZmmData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_datamm53XRR_2012_njet.root'),
fileCorrectTo = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection//recoilfit_htt53X_20pv_njet.root'),
enable = cms.bool(True),
correctionType = cms.int32(1),
jetSrc = cms.InputTag("cmgPFJetForRecoil"),
recBosonSrc = cms.InputTag("cmgTauMuTauPtSel")
)
process.recoilCorrectedMETDiTau = cms.EDProducer("RecoilCorrectedMETProducerDiTau",
leptonLeg = cms.int32(0),
force = cms.bool(False),
verbose = cms.untracked.bool(False),
genBosonSrc = cms.InputTag("genWorZ"),
fileZmmMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_zmm53XRR_2012_njet.root'),
fileZmmData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_datamm53XRR_2012_njet.root'),
fileCorrectTo = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_ztt53X_20pv_njet.root'),
enable = cms.bool(True),
correctionType = cms.int32(2),
jetSrc = cms.InputTag("cmgPFJetForRecoil"),
recBosonSrc = cms.InputTag("cmgDiTauSel")
)
process.recoilCorrectedMETMuEle = cms.EDProducer("RecoilCorrectedMETProducerMuEle",
leptonLeg = cms.int32(2),
force = cms.bool(False),
verbose = cms.untracked.bool(False),
genBosonSrc = cms.InputTag("genWorZ"),
fileZmmMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_zmm53X_20pv_njet.root'),
fileZmmData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_datamm53X_20pv_njet.root'),
metSrc = cms.InputTag("cmgPFMET"),
enable = cms.bool(True),
correctionType = cms.int32(2),
jetSrc = cms.InputTag("cmgPFJetForRecoil"),
recBosonSrc = cms.InputTag("cmgMuEleSel"),
fileCorrectTo = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_wjets53X_20pv_njet.root')
)
process.recoilCorrectedMETTauEle = cms.EDProducer("RecoilCorrectedMETProducerTauEle",
leptonLeg = cms.int32(2),
force = cms.bool(False),
verbose = cms.untracked.bool(False),
genBosonSrc = cms.InputTag("genWorZ"),
fileZmmMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_zmm53X_20pv_njet.root'),
fileZmmData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_datamm53X_20pv_njet.root'),
fileCorrectTo = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_wjets53X_20pv_njet.root'),
enable = cms.bool(True),
correctionType = cms.int32(2),
jetSrc = cms.InputTag("cmgPFJetForRecoil"),
recBosonSrc = cms.InputTag("cmgTauEleSel")
)
process.recoilCorrectedMETTauMu = cms.EDProducer("RecoilCorrectedMETProducerTauMu",
leptonLeg = cms.int32(2),
force = cms.bool(False),
verbose = cms.untracked.bool(False),
genBosonSrc = cms.InputTag("genWorZ"),
fileZmmMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_zmm53X_20pv_njet.root'),
fileZmmData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_datamm53X_20pv_njet.root'),
fileCorrectTo = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/Utilities/data/metRecoilCorrection/recoilfit_wjets53X_20pv_njet.root'),
enable = cms.bool(True),
correctionType = cms.int32(2),
jetSrc = cms.InputTag("cmgPFJetForRecoil"),
recBosonSrc = cms.InputTag("cmgTauMuSel")
)
process.tauEleSVFit = cms.EDProducer("TauEleWithSVFitProducer",
diTauSrc = cms.InputTag("cmgTauEleCorPreSel"),
SVFitVersion = cms.int32(2),
verbose = cms.untracked.bool(False)
)
process.tauMuSVFit = cms.EDProducer("TauMuWithSVFitProducer",
diTauSrc = cms.InputTag("cmgTauMuCorPreSel"),
SVFitVersion = cms.int32(2),
verbose = cms.untracked.bool(False)
)
process.vertexWeight05AugReReco = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_170249-172619_7TeV_ReReco5Aug_Collisions11_JSON_v2.pileup_v2.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer11MC.root')
)
process.vertexWeight2011AB = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_160404-180252_4.6invfb.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer11MC.root')
)
process.vertexWeight2011B = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_2011B.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer11MC.root')
)
process.vertexWeight2invfb = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_160404-173692_2.1invfb.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer11MC.root')
)
process.vertexWeight3D05AugReReco = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_170249-172619_7TeV_ReReco5Aug_Collisions11_JSON_v2.pileupTruth_v2_finebin.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Summer11MC.root')
)
process.vertexWeight3D2011AB = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_160404-180252_4.6invfb.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Summer11MC.root')
)
process.vertexWeight3D2011B = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_2011B.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Summer11MC.root')
)
process.vertexWeight3D2invfb = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_160404-173692_2.1invfb.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Summer11MC.root')
)
process.vertexWeight3DFall1105AugReReco = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_170249-172619_7TeV_ReReco5Aug_Collisions11_JSON_v2.pileupTruth_v2_finebin.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Fall11MC.root')
)
process.vertexWeight3DFall112011AB = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_160404-180252_4.6invfb.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Fall11MC.root')
)
process.vertexWeight3DFall112011B = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_2011B.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Fall11MC.root')
)
process.vertexWeight3DFall112invfb = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_160404-173692_2.1invfb.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Fall11MC.root')
)
process.vertexWeight3DFall11May10ReReco = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_160404-163869_7TeV_May10ReReco_Collisions11_JSON_v3.pileupTruth_v2_finebin.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Fall11MC.root')
)
process.vertexWeight3DFall11PromptRecov4 = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_165088-167913_7TeV_PromptReco_JSON.pileupTruth_v2_finebin.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Fall11MC.root')
)
process.vertexWeight3DFall11PromptRecov6 = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_172620-173692_PromptReco_JSON.pileupTruth_v2_finebin.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Fall11MC.root')
)
process.vertexWeight3DMay10ReReco = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_160404-163869_7TeV_May10ReReco_Collisions11_JSON_v3.pileupTruth_v2_finebin.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Summer11MC.root')
)
process.vertexWeight3DPromptRecov4 = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_165088-167913_7TeV_PromptReco_JSON.pileupTruth_v2_finebin.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Summer11MC.root')
)
process.vertexWeight3DPromptRecov6 = cms.EDProducer("PileUpWeight3DProducer",
verbose = cms.untracked.bool(False),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_172620-173692_PromptReco_JSON.pileupTruth_v2_finebin.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup3D_Summer11MC.root')
)
process.vertexWeightEPSJul8 = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Pileup_2011_EPS_8_jul.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer11MC.root')
)
process.vertexWeightFall1105AugReReco = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_170249-172619_7TeV_ReReco5Aug_Collisions11_JSON_v2.pileup_v2.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Fall11MC.root')
)
process.vertexWeightFall112011AB = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_160404-180252_4.6invfb.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Fall11MC.root')
)
process.vertexWeightFall112011B = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_2011B.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Fall11MC.root')
)
process.vertexWeightFall112invfb = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_160404-173692_2.1invfb.pileup.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Fall11MC.root')
)
process.vertexWeightFall11EPSJul8 = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Pileup_2011_EPS_8_jul.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Fall11MC.root')
)
process.vertexWeightFall11LeptonPhoton = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Pileup_2011_to_172802_LP_LumiScale.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Fall11MC.root')
)
process.vertexWeightFall11May10ReReco = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_160404-163869_7TeV_May10ReReco_Collisions11_JSON_v3.pileup_v2.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Fall11MC.root')
)
process.vertexWeightFall11PromptRecov4 = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_165088-167913_7TeV_PromptReco_JSON.pileup_v2.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Fall11MC.root')
)
process.vertexWeightFall11PromptRecov6 = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_172620-173692_PromptReco_JSON.pileup_v2.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Fall11MC.root')
)
process.vertexWeightLeptonPhoton = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Pileup_2011_to_172802_LP_LumiScale.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer11MC.root')
)
process.vertexWeightMay10ReReco = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_160404-163869_7TeV_May10ReReco_Collisions11_JSON_v3.pileup_v2.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer11MC.root')
)
process.vertexWeightPromptRecov4 = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_165088-167913_7TeV_PromptReco_JSON.pileup_v2.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer11MC.root')
)
process.vertexWeightPromptRecov6 = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(1),
inputHistData = cms.string('/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/PileUp/Cert_172620-173692_PromptReco_JSON.pileup_v2.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer11MC.root')
)
process.vertexWeightSummer12MC53X2012ABCDData = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(2),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_2012ABCD.true.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer12MC53X.true.root')
)
process.vertexWeightSummer12MC53X2012BCDData = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(2),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_2012BCD.true.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer12MC53X.true.root')
)
process.vertexWeightSummer12MC53X2012D6fbData = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(2),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_2012D6fb_203894_207898.true.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer12MC53X.true.root')
)
process.vertexWeightSummer12MC53XHCPData = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(2),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_2012HCP_190456_203002.true.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer12MC53X.true.root')
)
process.vertexWeightSummer12MC53XICHEPData = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(2),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_2012ICHEP_start_196509.true.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer12MC53X.true.root')
)
process.vertexWeightSummer12MCICHEPData = cms.EDProducer("PileUpWeightProducer",
src = cms.InputTag("addPileupInfo"),
verbose = cms.untracked.bool(False),
type = cms.int32(2),
inputHistData = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_2012ICHEP_start_196509.true.root'),
inputHistMC = cms.string('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/RootTools/data/vertexWeight/Pileup_Summer12MC52X.true.root')
)
process.cmgBaseMETFromPFMET = cms.EDFilter("PFMETPOProducer",
cfg = cms.PSet(
ptThreshold = cms.double(-1.0),
inputCollection = cms.InputTag("pfMet")
),
cuts = cms.PSet(
)
)
process.cmgDiTau = cms.EDFilter("DiTauPOProducer",
cfg = cms.PSet(
leg2Collection = cms.InputTag("cmgTauSel"),
leg1Collection = cms.InputTag("cmgTauSel"),
metsigCollection = cms.InputTag(""),
metCollection = cms.InputTag("cmgPFMET")
),
cuts = cms.PSet(
baseline = cms.PSet(
tau1Leg = cms.PSet(
iso = cms.string('leg1().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10.0'),
kinematics = cms.PSet(
eta = cms.string('abs(leg1().eta())<2.1'),
pt = cms.string('leg1().pt()>35.')
),
id = cms.PSet(
decay = cms.string('leg1().tauID("decayModeFinding")')
)
),
mass = cms.string('mass()>10'),
tau2Leg = cms.PSet(
iso = cms.string('leg2().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10.0'),
kinematics = cms.PSet(
eta = cms.string('abs(leg2().eta())<2.1'),
pt = cms.string('leg2().pt()>35.')
),
id = cms.PSet(
decay = cms.string('leg2().tauID("decayModeFinding")')
)
)
)
)
)
process.cmgDiTauCor = cms.EDFilter("DiTauUpdatePOProducer",
cfg = cms.PSet(
shift1Prong1Pi0 = cms.double(0.012),
diObjectCollection = cms.InputTag("mvaMETDiTau"),
leg1Collection = cms.InputTag(""),
shiftMet = cms.bool(True),
shiftTaus = cms.bool(True),
uncertainty = cms.double(0.03),
nSigma = cms.double(1),
shift3Prong = cms.double(0.012),
shift1ProngNoPi0 = cms.double(0.0),
leg2Collection = cms.InputTag(""),
ptDependence1Pi0 = cms.double(0.0),
ptDependence3Prong = cms.double(0.0)
),
cuts = cms.PSet(
)
)
process.cmgDiTauCorSVFitFullSel = cms.EDFilter("CmgDiTauSelector",
src = cms.InputTag("cmgDiTauCorSVFitPreSel"),
cut = cms.string('')
)
process.cmgDiTauCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgDiTauSel"),
minNumber = cms.uint32(1)
)
process.cmgDiTauPreSel = cms.EDFilter("CmgDiTauSelector",
src = cms.InputTag("cmgDiTau"),
cut = cms.string('leg1().pt()>38. && leg2().pt()>38. && leg1().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10. && leg2().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10.')
)
process.cmgDiTauPtSel = cms.EDFilter("CmgDiTauSelector",
src = cms.InputTag("cmgDiTauCor"),
cut = cms.string('leg1().pt()>45. && leg2().pt()>45.')
)
process.cmgDiTauSel = cms.EDFilter("CmgDiTauSelector",
src = cms.InputTag("cmgDiTau"),
cut = cms.string(' pt()>0 ')
)
process.cmgMuEle = cms.EDFilter("MuElePOProducer",
cfg = cms.PSet(
leg2Collection = cms.InputTag("cmgElectronSel"),
leg1Collection = cms.InputTag("cmgMuonSel"),
metCollection = cms.InputTag("")
),
cuts = cms.PSet(
)
)
process.cmgMuEleCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgMuEleSel"),
minNumber = cms.uint32(1)
)
process.cmgMuEleSel = cms.EDFilter("CmgMuEleSelector",
src = cms.InputTag("cmgMuEle"),
cut = cms.string('pt()>0')
)
process.cmgPFJetForRecoil = cms.EDFilter("CMGJetPUIDSelector",
src = cms.InputTag("cmgPFJetForRecoilPresel"),
cut = cms.string(''),
puJetIDParams = cms.VPSet(cms.PSet(
minDiscs = cms.vdouble(-0.95, -0.96, -0.94, -0.95),
maxPt = cms.double(20.0),
minPt = cms.double(0.0),
maxEtas = cms.vdouble(2.5, 2.75, 3.0, 5.0)
),
cms.PSet(
minDiscs = cms.vdouble(-0.63, -0.6, -0.55, -0.45),
maxPt = cms.double(99999.0),
minPt = cms.double(20.0),
maxEtas = cms.vdouble(2.5, 2.75, 3.0, 5.0)
)),
puIDName = cms.string('full53x')
)
process.cmgPFJetForRecoilPresel = cms.EDFilter("CmgPFJetSelector",
src = cms.InputTag("cmgPFJetSel"),
cut = cms.string('pt()>30 && abs(eta)<4.7 && getSelection("cuts_looseJetId")')
)
process.cmgPFJetPUIDSel = cms.EDFilter("CMGJetPUIDSelector",
src = cms.InputTag("cmgPFJetSel"),
cut = cms.string(''),
puJetIDParams = cms.VPSet(cms.PSet(
minDiscs = cms.vdouble(-0.95, -0.96, -0.94, -0.95),
maxPt = cms.double(20.0),
minPt = cms.double(0.0),
maxEtas = cms.vdouble(2.5, 2.75, 3.0, 5.0)
),
cms.PSet(
minDiscs = cms.vdouble(-0.63, -0.6, -0.55, -0.45),
maxPt = cms.double(99999.0),
minPt = cms.double(20.0),
maxEtas = cms.vdouble(2.5, 2.75, 3.0, 5.0)
)),
puIDName = cms.string('full53x')
)
process.cmgPFJetSel = cms.EDFilter("CmgPFJetSelector",
src = cms.InputTag("cmgPFJet"),
cut = cms.string('pt()>0')
)
process.cmgTauEle = cms.EDFilter("TauElePOProducer",
cfg = cms.PSet(
leg2Collection = cms.InputTag("cmgElectronSel"),
leg1Collection = cms.InputTag("cmgTauSel"),
metsigCollection = cms.InputTag(""),
metCollection = cms.InputTag("cmgPFMET")
),
cuts = cms.PSet(
baseline = cms.PSet(
tauLeg = cms.PSet(
iso = cms.string('leg1().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10.0'),
kinematics = cms.PSet(
eta = cms.string('abs(leg1().eta())<2.3'),
pt = cms.string('leg1().pt()>15.0')
),
id = cms.PSet(
decay = cms.string('leg1().tauID("decayModeFinding")')
)
),
eleLeg = cms.PSet(
kinematics = cms.PSet(
eta = cms.string('abs(leg2().eta())<2.1'),
pt = cms.string('leg2().pt()>20.0')
),
ID = cms.PSet(
hitsnum = cms.string('leg2().numberOfHits==0'),
mvaID = cms.string('(abs(leg2().sourcePtr().superCluster().eta())<0.8 && leg2().mvaNonTrigV0() > 0.925) || (abs(leg2().sourcePtr().superCluster().eta())>0.8 && abs(leg2().sourcePtr().superCluster().eta())<1.479 && leg2().mvaNonTrigV0() > 0.975) || (abs(leg2().sourcePtr().superCluster().eta())>1.479 && leg2().mvaNonTrigV0() > 0.985)'),
convVeto = cms.string('leg2().passConversionVeto()!=0')
)
)
)
)
)
process.cmgTauEleCor = cms.EDFilter("TauEleUpdatePOProducer",
cfg = cms.PSet(
shift1Prong1Pi0 = cms.double(0.0),
diObjectCollection = cms.InputTag("mvaMETTauEle"),
leg1Collection = cms.InputTag(""),
metCollection = cms.InputTag("recoilCorrectedMET"),
uncertainty = cms.double(0.03),
nSigma = cms.double(0),
shift3Prong = cms.double(0.0),
shift1ProngNoPi0 = cms.double(0.0),
leg2Collection = cms.InputTag(""),
ptDependence1Pi0 = cms.double(0.0),
ptDependence3Prong = cms.double(0.0)
),
cuts = cms.PSet(
)
)
process.cmgTauEleCorSVFitFullSel = cms.EDFilter("CmgTauEleSelector",
src = cms.InputTag("cmgTauEleCorSVFitPreSel"),
cut = cms.string('')
)
process.cmgTauEleCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgTauEleSel"),
minNumber = cms.uint32(1)
)
process.cmgTauEleMVAPreSel = cms.EDFilter("TauEleUpdatePOProducer",
cfg = cms.PSet(
shift1Prong1Pi0 = cms.double(0.0),
diObjectCollection = cms.InputTag("cmgTauElePreSel"),
leg1Collection = cms.InputTag(""),
metCollection = cms.InputTag("recoilCorrectedMET"),
uncertainty = cms.double(0.03),
nSigma = cms.double(0),
shift3Prong = cms.double(0.0),
shift1ProngNoPi0 = cms.double(0.0),
leg2Collection = cms.InputTag(""),
ptDependence1Pi0 = cms.double(0.0),
ptDependence3Prong = cms.double(0.0)
),
cuts = cms.PSet(
)
)
process.cmgTauElePreSel = cms.EDFilter("CmgTauEleSelector",
src = cms.InputTag("cmgTauEle"),
cut = cms.string('getSelection("cuts_baseline")')
)
process.cmgTauEleSel = cms.EDFilter("CmgTauEleSelector",
src = cms.InputTag("cmgTauEle"),
cut = cms.string('pt()>0')
)
process.cmgTauEleTauPtSel = cms.EDFilter("CmgTauEleSelector",
src = cms.InputTag("cmgTauEleCor"),
cut = cms.string('leg1().pt()>18.')
)
process.cmgTauMu = cms.EDFilter("TauMuPOProducer",
cfg = cms.PSet(
leg2Collection = cms.InputTag("cmgMuonSel"),
leg1Collection = cms.InputTag("cmgTauSel"),
metsigCollection = cms.InputTag(""),
metCollection = cms.InputTag("cmgPFMET")
),
cuts = cms.PSet(
caloMuVeto = cms.string('leg1().eOverP()>0.2'),
baseline = cms.PSet(
tauLeg = cms.PSet(
iso = cms.string('leg1().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10.0'),
kinematics = cms.PSet(
eta = cms.string('abs(leg1().eta())<2.3'),
pt = cms.string('leg1().pt()>15.0')
),
id = cms.PSet(
muRejection = cms.string('leg1().tauID("againstMuonTight") > 0.5'),
decay = cms.string('leg1().tauID("decayModeFinding")')
)
),
muLeg = cms.PSet(
kinematics = cms.PSet(
eta = cms.string('abs(leg2().eta())<2.1'),
pt = cms.string('leg2().pt()>17.0')
)
),
mass = cms.string('mass()>10')
)
)
)
process.cmgTauMuCor = cms.EDFilter("TauMuUpdatePOProducer",
cfg = cms.PSet(
shift1Prong1Pi0 = cms.double(0.0),
diObjectCollection = cms.InputTag("mvaMETTauMu"),
leg1Collection = cms.InputTag(""),
metCollection = cms.InputTag("recoilCorrectedMET"),
uncertainty = cms.double(0.03),
nSigma = cms.double(0),
shift3Prong = cms.double(0.0),
shift1ProngNoPi0 = cms.double(0.0),
leg2Collection = cms.InputTag(""),
ptDependence1Pi0 = cms.double(0.0),
ptDependence3Prong = cms.double(0.0)
),
cuts = cms.PSet(
)
)
process.cmgTauMuCorSVFitFullSel = cms.EDFilter("CmgTauMuSelector",
src = cms.InputTag("cmgTauMuCorSVFitPreSel"),
cut = cms.string('')
)
process.cmgTauMuCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgTauMuSel"),
minNumber = cms.uint32(1)
)
process.cmgTauMuMVAPreSel = cms.EDFilter("TauMuUpdatePOProducer",
cfg = cms.PSet(
shift1Prong1Pi0 = cms.double(0.0),
diObjectCollection = cms.InputTag("cmgTauMuPreSel"),
leg1Collection = cms.InputTag(""),
metCollection = cms.InputTag("recoilCorrectedMET"),
uncertainty = cms.double(0.03),
nSigma = cms.double(0),
shift3Prong = cms.double(0.0),
shift1ProngNoPi0 = cms.double(0.0),
leg2Collection = cms.InputTag(""),
ptDependence1Pi0 = cms.double(0.0),
ptDependence3Prong = cms.double(0.0)
),
cuts = cms.PSet(
)
)
process.cmgTauMuPreSel = cms.EDFilter("CmgTauMuSelector",
src = cms.InputTag("cmgTauMu"),
cut = cms.string('getSelection("cuts_baseline")')
)
process.cmgTauMuSel = cms.EDFilter("CmgTauMuSelector",
src = cms.InputTag("cmgTauMu"),
cut = cms.string('pt()>0')
)
process.cmgTauMuTauPtSel = cms.EDFilter("CmgTauMuSelector",
src = cms.InputTag("cmgTauMuCor"),
cut = cms.string('leg1().pt()>18.')
)
process.diTauFullSelCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgDiTauCorSVFitFullSel"),
minNumber = cms.uint32(1)
)
process.diTauPreSelCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgDiTauCorSVFitPreSel"),
minNumber = cms.uint32(1)
)
process.goodPVFilter = cms.EDFilter("VertexSelector",
filter = cms.bool(True),
src = cms.InputTag("offlinePrimaryVertices"),
cut = cms.string('!isFake && ndof > 4 && abs(z) <= 24 && position.Rho <= 2')
)
process.muEleFullSelCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgMuEleCorSVFitFullSel"),
minNumber = cms.uint32(1)
)
process.muElePreSelCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgMuEleCorSVFitPreSel"),
minNumber = cms.uint32(1)
)
process.tauEleFullSelCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgTauEleCorSVFitFullSel"),
minNumber = cms.uint32(1)
)
process.tauElePreSelCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgTauEleCorSVFitPreSel"),
minNumber = cms.uint32(1)
)
process.tauMuFullSelCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgTauMuCorSVFitFullSel"),
minNumber = cms.uint32(1)
)
process.tauMuPreSelCount = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("cmgTauMuCorSVFitPreSel"),
minNumber = cms.uint32(1)
)
process.diTau_fullsel_tree_CMG = cms.OutputModule("PoolOutputModule",
outputCommands = cms.untracked.vstring('drop *',
'drop *',
'keep *_source_*_*',
'keep *_generator_*_*',
'keep *_TriggerResults__*',
'keep *_addPileupInfo__HLT',
'keep *_genJetSel__PAT',
'keep *_tauGenJetsSelectorAllHadrons__PAT',
'keep *_genParticlesPruned__PAT',
'keep *_vertexWeight*__*',
'keep *_ak5CaloJets_rho_RECO',
'keep *_ak5PFJets_rho_RECO',
'keep *_ak5TrackJets_rho_RECO',
'keep *_ak7BasicJets_rho_RECO',
'keep *_ak7CaloJets_rho_RECO',
'keep *_ak7PFJets_rho_RECO',
'keep *_kt4CaloJets_rho_RECO',
'keep *_kt4PFJets_rho_RECO',
'keep *_kt6CaloJets_rho_RECO',
'keep *_kt6CaloJetsCentral_rho_RECO',
'keep *_kt6PFJets_rho_RECO',
'keep *_kt6PFJetsCentralChargedPileUp_rho_RECO',
'keep *_kt6PFJetsCentralNeutral_rho_RECO',
'keep *_kt6PFJetsCentralNeutralTight_rho_RECO',
'keep *_TriggerResults__RECO',
'keep *_offlinePrimaryVertices__RECO',
'keep *_pfMetSignificance__PAT',
'keep *_ak5PFJetsCHS_rho_PAT',
'keep *_ak5PFJetsCHSpruned_rho_PAT',
'keep *_kt6PFJetsCHSForIso_rho_PAT',
'keep *_kt6PFJetsForIso_rho_PAT',
'keep *_kt6PFJetsForRhoComputationVoronoi_rho_PAT',
'keep *_TriggerResults__PAT',
'keep *_nJetsPtGt1__PAT',
'keep *_cmgPFBaseJetLead__PAT',
'keep *_cmgPFBaseJetLeadCHS__PAT',
'keep *_cmgPFMET__PAT',
'keep *_cmgPFMETRaw__PAT',
'keep *_cmgDiElectronSel__PAT',
'keep *_cmgDiMuonSel__PAT',
'keep *_cmgElectronSel__PAT',
'keep *_cmgMuonSel__PAT',
'keep *_cmgPFJetLooseJetIdFailed__PAT',
'keep *_cmgPFJetMediumJetIdFailed__PAT',
'keep *_cmgPFJetSel__PAT',
'keep *_cmgPFJetSelCHS__PAT',
'keep *_cmgPFJetTightJetIdFailed__PAT',
'keep *_cmgPFJetVeryLooseJetId95Failed__PAT',
'keep *_cmgPFJetVeryLooseJetId95gammaFailed__PAT',
'keep *_cmgPFJetVeryLooseJetId95h0Failed__PAT',
'keep *_cmgPFJetVeryLooseJetId99Failed__PAT',
'keep *_cmgPhotonSel__PAT',
'keep *_cmgStructuredPFJetSel__PAT',
'keep *_cmgTriggerObjectListSel__PAT',
'keep *_cmgTriggerObjectSel__PAT',
'keep *_patElectronsWithTrigger__PAT',
'keep *_patMuonsWithTrigger__PAT',
'keep *_nopuMet__PAT',
'keep *_pcMet__PAT',
'keep *_pfMetForRegression__PAT',
'keep *_puMet__PAT',
'keep *_tkMet__PAT',
'keep *_TriggerResults__H2TAUTAU',
'keep *_cmgDiTauCorSVFitFullSel__H2TAUTAU',
'keep *_mvaMETdiTau__H2TAUTAU',
'keep *_goodPVFilter__H2TAUTAU',
'keep *_genParticles_*_*'),
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('diTauPath')
),
fileName = cms.untracked.string('diTau_fullsel_tree_CMG.root')
)
process.diTauPreSelSkimSequence = cms.Sequence(process.diTauPreSelCount)
process.muEleFullSelSkimSequence = cms.Sequence(process.muEleFullSelCount)
process.tauEleMvaMETRecoilSequence = cms.Sequence(process.goodPVFilter+process.mvaMETTauEle+process.cmgTauEleCor+process.cmgTauEleTauPtSel+process.recoilCorMETTauEle)
process.tauEleFullSelSkimSequence = cms.Sequence(process.tauEleFullSelCount)
process.mvaMETSequence = cms.Sequence(process.goodPVFilter+process.mvaMETDiTau+process.cmgDiTauCor+process.cmgDiTauPtSel+process.recoilCorMETDiTau)
process.tauMuStdSequence = cms.Sequence(process.cmgTauMu+process.cmgTauMuPreSel)
process.tauEleStdSequence = cms.Sequence(process.cmgTauEle+process.cmgTauElePreSel)
process.tauMuMvaMETrecoilSequence = cms.Sequence(process.goodPVFilter+process.mvaMETTauMu+process.cmgTauMuCor+process.cmgTauMuTauPtSel+process.recoilCorMETTauMu)
process.diTauFullSelSkimSequence = cms.Sequence(process.diTauFullSelCount)
process.metRecoilCorrectionInputSequence = cms.Sequence(process.cmgPFJetForRecoilPresel+process.cmgPFJetForRecoil+process.genWorZ)
process.metRecoilCorrectionSequence = cms.Sequence(process.metRecoilCorrectionInputSequence+process.recoilCorrectedMETTauMu+process.recoilCorrectedMETTauEle+process.recoilCorrectedMETMuEle)
process.diTauCorSVFitSequence = cms.Sequence(process.mvaMETSequence+process.cmgDiTauCorSVFitPreSel+process.cmgDiTauCorSVFitFullSel)
process.tauElePreSelSkimSequence = cms.Sequence(process.tauElePreSelCount)
process.muElePreSelSkimSequence = cms.Sequence(process.muElePreSelCount)
process.tauEleCorSVFitSequence = cms.Sequence(process.tauEleMvaMETRecoilSequence+process.cmgTauEleCorSVFitPreSel+process.cmgTauEleCorSVFitFullSel)
process.vertexWeightSequence = cms.Sequence(process.vertexWeightEPSJul8+process.vertexWeightLeptonPhoton+process.vertexWeightMay10ReReco+process.vertexWeightPromptRecov4+process.vertexWeight05AugReReco+process.vertexWeightPromptRecov6+process.vertexWeight2invfb+process.vertexWeight2011B+process.vertexWeight2011AB+process.vertexWeightFall11EPSJul8+process.vertexWeightFall11LeptonPhoton+process.vertexWeightFall11May10ReReco+process.vertexWeightFall11PromptRecov4+process.vertexWeightFall1105AugReReco+process.vertexWeightFall11PromptRecov6+process.vertexWeightFall112invfb+process.vertexWeightFall112011B+process.vertexWeightFall112011AB+process.vertexWeight3DMay10ReReco+process.vertexWeight3DPromptRecov4+process.vertexWeight3D05AugReReco+process.vertexWeight3DPromptRecov6+process.vertexWeight3D2invfb+process.vertexWeight3D2011B+process.vertexWeight3D2011AB+process.vertexWeight3DFall11May10ReReco+process.vertexWeight3DFall11PromptRecov4+process.vertexWeight3DFall1105AugReReco+process.vertexWeight3DFall11PromptRecov6+process.vertexWeight3DFall112invfb+process.vertexWeight3DFall112011B+process.vertexWeight3DFall112011AB+process.vertexWeightSummer12MCICHEPData+process.vertexWeightSummer12MC53XICHEPData+process.vertexWeightSummer12MC53XHCPData+process.vertexWeightSummer12MC53X2012D6fbData+process.vertexWeightSummer12MC53X2012ABCDData+process.vertexWeightSummer12MC53X2012BCDData)
process.diTauStdSequence = cms.Sequence(process.cmgDiTau+process.cmgDiTauPreSel)
process.tauMuPreSelSkimSequence = cms.Sequence(process.tauMuPreSelCount)
process.tauMuFullSelSkimSequence = cms.Sequence(process.tauMuFullSelCount)
process.genSequence = cms.Sequence(process.metRecoilCorrectionInputSequence+process.vertexWeightSequence)
process.tauEleSequence = cms.Sequence(process.tauEleStdSequence+process.tauEleCorSVFitSequence)
process.tauMuCorSVFitSequence = cms.Sequence(process.tauMuMvaMETrecoilSequence+process.cmgTauMuCorSVFitPreSel+process.cmgTauMuCorSVFitFullSel)
process.tauMuSequence = cms.Sequence(process.tauMuStdSequence+process.tauMuCorSVFitSequence)
process.diTauSequence = cms.Sequence(process.diTauStdSequence+process.diTauCorSVFitSequence)
process.diTauPath = cms.Path(process.genSequence+process.diTauSequence+process.diTauFullSelSkimSequence)
process.tauElePath = cms.Path(process.genSequence+process.tauEleSequence+process.tauEleFullSelSkimSequence)
process.tauMuPath = cms.Path(process.genSequence+process.tauMuSequence+process.tauMuFullSelSkimSequence)
process.outpath = cms.EndPath(process.diTau_fullsel_tree_CMG)
process.MessageLogger = cms.Service("MessageLogger",
suppressInfo = cms.untracked.vstring(),
debugs = cms.untracked.PSet(
placeholder = cms.untracked.bool(True)
),
suppressDebug = cms.untracked.vstring(),
cout = cms.untracked.PSet(
placeholder = cms.untracked.bool(True)
),
cerr_stats = cms.untracked.PSet(
optionalPSet = cms.untracked.bool(True),
output = cms.untracked.string('cerr'),
threshold = cms.untracked.string('WARNING')
),
warnings = cms.untracked.PSet(
placeholder = cms.untracked.bool(True)
),
default = cms.untracked.PSet(
),
errors = cms.untracked.PSet(
placeholder = cms.untracked.bool(True)
),
cerr = cms.untracked.PSet(
INFO = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
noTimeStamps = cms.untracked.bool(False),
FwkReport = cms.untracked.PSet(
optionalPSet = cms.untracked.bool(True),
reportEvery = cms.untracked.int32(5000),
limit = cms.untracked.int32(10000000)
),
default = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
Root_NoDictionary = cms.untracked.PSet(
optionalPSet = cms.untracked.bool(True),
limit = cms.untracked.int32(0)
),
optionalPSet = cms.untracked.bool(True),
FwkJob = cms.untracked.PSet(
optionalPSet = cms.untracked.bool(True),
limit = cms.untracked.int32(0)
),
FwkSummary = cms.untracked.PSet(
optionalPSet = cms.untracked.bool(True),
reportEvery = cms.untracked.int32(1),
limit = cms.untracked.int32(10000000)
),
threshold = cms.untracked.string('INFO')
),
FrameworkJobReport = cms.untracked.PSet(
optionalPSet = cms.untracked.bool(True),
default = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
FwkJob = cms.untracked.PSet(
optionalPSet = cms.untracked.bool(True),
limit = cms.untracked.int32(10000000)
)
),
suppressWarning = cms.untracked.vstring(),
statistics = cms.untracked.vstring('cerr_stats'),
infos = cms.untracked.PSet(
optionalPSet = cms.untracked.bool(True),
Root_NoDictionary = cms.untracked.PSet(
optionalPSet = cms.untracked.bool(True),
limit = cms.untracked.int32(0)
),
placeholder = cms.untracked.bool(True)
),
destinations = cms.untracked.vstring('warnings',
'errors',
'infos',
'debugs',
'cout',
'cerr'),
debugModules = cms.untracked.vstring(),
categories = cms.untracked.vstring('FwkJob',
'FwkReport',
'FwkSummary',
'Root_NoDictionary'),
fwkJobReports = cms.untracked.vstring('FrameworkJobReport')
)
process.HepPDTESSource = cms.ESSource("HepPDTESSource",
pdtFileName = cms.FileInPath('SimGeneral/HepPDTESSource/data/pythiaparticle.tbl')
)
process.diObjectFactory = cms.PSet(
leg2Collection = cms.InputTag("dummy"),
leg1Collection = cms.InputTag("dummy"),
metCollection = cms.InputTag("")
)
process.diTauCuts = cms.PSet(
baseline = cms.PSet(
tau1Leg = cms.PSet(
iso = cms.string('leg1().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10.0'),
kinematics = cms.PSet(
eta = cms.string('abs(leg1().eta())<2.3'),
pt = cms.string('leg1().pt()>15.0')
),
id = cms.PSet(
decay = cms.string('leg1().tauID("decayModeFinding")')
)
),
mass = cms.string('mass()>10'),
tau2Leg = cms.PSet(
iso = cms.string('leg2().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10.0'),
kinematics = cms.PSet(
eta = cms.string('abs(leg2().eta())<2.3'),
pt = cms.string('leg2().pt()>15.0')
),
id = cms.PSet(
decay = cms.string('leg2().tauID("decayModeFinding")')
)
)
)
)
process.ditauFactory = cms.PSet(
leg2Collection = cms.InputTag("cmgTauSel"),
leg1Collection = cms.InputTag("cmgTauSel"),
metsigCollection = cms.InputTag(""),
metCollection = cms.InputTag("cmgPFMET")
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.maxLuminosityBlocks = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.muEleFactory = cms.PSet(
leg2Collection = cms.InputTag("cmgElectronSel"),
leg1Collection = cms.InputTag("cmgMuonSel"),
metCollection = cms.InputTag("")
)
process.options = cms.untracked.PSet(
wantSummary = cms.untracked.bool(False)
)
process.puJetIdAlgo = cms.PSet(
tmvaVariables = cms.vstring('nvtx',
'jetPt',
'jetEta',
'jetPhi',
'dZ',
'beta',
'betaStar',
'nCharged',
'nNeutrals',
'dR2Mean',
'ptD',
'frac01',
'frac02',
'frac03',
'frac04',
'frac05'),
tmvaMethod = cms.string('JetIDMVAMET'),
cutBased = cms.bool(False),
tmvaWeights = cms.string('CMGTools/External/data/TMVAClassificationCategory_JetID_MET_53X_Dec2012.weights.xml'),
tmvaSpectators = cms.vstring(),
label = cms.string('met53x'),
version = cms.int32(-1),
JetIdParams = cms.PSet(
Pt2030_Tight = cms.vdouble(-2, -2, -2, -2, -2),
Pt2030_Loose = cms.vdouble(-2, -2, -2, -2, -2),
Pt3050_Medium = cms.vdouble(-2, -2, -2, -2, -2),
Pt1020_MET = cms.vdouble(-0.2, -0.2, -0.5, -0.3),
Pt2030_Medium = cms.vdouble(-2, -2, -2, -2, -2),
Pt010_Tight = cms.vdouble(-2, -2, -2, -2, -2),
Pt1020_Tight = cms.vdouble(-2, -2, -2, -2, -2),
Pt3050_MET = cms.vdouble(-0.2, -0.2, 0.0, 0.2),
Pt010_MET = cms.vdouble(-0.2, -0.3, -0.5, -0.5),
Pt1020_Loose = cms.vdouble(-2, -2, -2, -2, -2),
Pt010_Medium = cms.vdouble(-2, -2, -2, -2, -2),
Pt1020_Medium = cms.vdouble(-2, -2, -2, -2, -2),
Pt2030_MET = cms.vdouble(-0.2, -0.2, -0.2, 0.1),
Pt010_Loose = cms.vdouble(-2, -2, -2, -2, -2),
Pt3050_Loose = cms.vdouble(-2, -2, -2, -2, -2),
Pt3050_Tight = cms.vdouble(-2, -2, -2, -2, -2)
),
impactParTkThreshold = cms.double(1.0)
)
process.tauEFactory = cms.PSet(
leg2Collection = cms.InputTag("cmgElectronSel"),
leg1Collection = cms.InputTag("cmgTauSel"),
metCollection = cms.InputTag("cmgPFMET")
)
process.tauEleCuts = cms.PSet(
baseline = cms.PSet(
tauLeg = cms.PSet(
iso = cms.string('leg1().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10.0'),
kinematics = cms.PSet(
eta = cms.string('abs(leg1().eta())<2.3'),
pt = cms.string('leg1().pt()>15.0')
),
id = cms.PSet(
decay = cms.string('leg1().tauID("decayModeFinding")')
)
),
eleLeg = cms.PSet(
kinematics = cms.PSet(
eta = cms.string('abs(leg2().eta())<2.1'),
pt = cms.string('leg2().pt()>20.0')
),
ID = cms.PSet(
hitsnum = cms.string('leg2().numberOfHits==0'),
mvaID = cms.string('(abs(leg2().sourcePtr().superCluster().eta())<0.8 && leg2().mvaNonTrigV0() > 0.925) || (abs(leg2().sourcePtr().superCluster().eta())>0.8 && abs(leg2().sourcePtr().superCluster().eta())<1.479 && leg2().mvaNonTrigV0() > 0.975) || (abs(leg2().sourcePtr().superCluster().eta())>1.479 && leg2().mvaNonTrigV0() > 0.985)'),
convVeto = cms.string('leg2().passConversionVeto()!=0')
)
)
)
)
process.tauMuCuts = cms.PSet(
caloMuVeto = cms.string('leg1().eOverP()>0.2'),
baseline = cms.PSet(
tauLeg = cms.PSet(
iso = cms.string('leg1().tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 10.0'),
kinematics = cms.PSet(
eta = cms.string('abs(leg1().eta())<2.3'),
pt = cms.string('leg1().pt()>15.0')
),
id = cms.PSet(
muRejection = cms.string('leg1().tauID("againstMuonTight") > 0.5'),
decay = cms.string('leg1().tauID("decayModeFinding")')
)
),
muLeg = cms.PSet(
kinematics = cms.PSet(
eta = cms.string('abs(leg2().eta())<2.1'),
pt = cms.string('leg2().pt()>17.0')
)
),
mass = cms.string('mass()>10')
)
)
process.tauMuFactory = cms.PSet(
leg2Collection = cms.InputTag("cmgMuonSel"),
leg1Collection = cms.InputTag("cmgTauSel"),
metCollection = cms.InputTag("cmgPFMET")
)
process.schedule = cms.Schedule(*[ process.diTauPath, process.outpath ])
| [
"[email protected]"
]
| |
0fdfbfd787cc27d2b00a01b2ccef13060671930d | 81835671049fd32f173d438ca85a8e81482bc76a | /src/muypicky/settings/local.py | 233d240db6568b445a0afe234a1df87000ee4b57 | []
| no_license | laura8857/django_restaurant | acd344423bd71194f6763e899edaf94955bf06ce | f9882d3d2d8998c1e99a7ecb706be66fab8c4425 | refs/heads/master | 2021-09-09T07:42:25.636140 | 2017-12-31T07:03:36 | 2017-12-31T07:03:36 | 111,422,444 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,119 | py | """
Django settings for muypicky project.
Generated by 'django-admin startproject' using Django 1.11.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'p!a0#ndo^-lp14=odpiw=cs@(+6a-k67#y&5hw5wnsk$px#--h'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'muypicky.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'muypicky.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
| |
ccd33ffca33b1eb475dfbf98b4abf2ae6d4a4905 | 4d6af0afb8c4a9521838dd4f8cd09a57fd2ece08 | /onlineShop/venv/bin/flask | 2d4a8d96bf7aaecf88e21fa2443f666e6cf8c0b2 | []
| no_license | candyer/web-exercise | a8ff8dbc1511343e29716fd932480a5c00751de5 | cab628e67bbf7230a1bd27eca871ed8353f992fe | refs/heads/master | 2023-01-12T13:30:06.323234 | 2019-08-18T20:29:43 | 2019-08-18T20:29:43 | 53,623,102 | 1 | 1 | null | 2023-01-04T15:13:14 | 2016-03-10T22:55:48 | Python | UTF-8 | Python | false | false | 259 | #!/Users/candy/Documents/web-exercise/online_shop/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
ad99634d0137842d7042a688010637b825ee29ce | 46ae8264edb9098c9875d2a0a508bc071201ec8b | /res/scripts/client/gui/scaleform/daapi/view/metapromopremiumigrwindowmeta.py | 814c7e9466ff495e9f3a447d32cb6863c4598fcd | []
| no_license | Difrex/wotsdk | 1fc6156e07e3a5302e6f78eafdea9bec4c897cfb | 510a34c67b8f4c02168a9830d23f5b00068d155b | refs/heads/master | 2021-01-01T19:12:03.592888 | 2016-10-08T12:06:04 | 2016-10-08T12:06:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 899 | py | # Embedded file name: scripts/client/gui/Scaleform/daapi/view/meta/PromoPremiumIgrWindowMeta.py
from gui.Scaleform.framework.entities.abstract.AbstractWindowView import AbstractWindowView
class PromoPremiumIgrWindowMeta(AbstractWindowView):
"""
DO NOT MODIFY!
Generated with yaml.
__author__ = 'yaml_processor'
@extends AbstractWindowView
"""
def as_setTitleS(self, value):
if self._isDAAPIInited():
return self.flashObject.as_setTitle(value)
def as_setTextS(self, value):
if self._isDAAPIInited():
return self.flashObject.as_setText(value)
def as_setWindowTitleS(self, value):
if self._isDAAPIInited():
return self.flashObject.as_setWindowTitle(value)
def as_setApplyButtonLabelS(self, value):
if self._isDAAPIInited():
return self.flashObject.as_setApplyButtonLabel(value) | [
"[email protected]"
]
| |
cfed4084fec538d16981ee31a7f600850dcf0d86 | bc23dd0952a7235d2a63f59c83a4a283bbfa49f8 | /backend/manage.py | 5f19f0c6d16370616739c0eaa3ae0a8a5c26e630 | []
| no_license | crowdbotics-apps/dee-world-originals-3621 | 94e8af26153de836e9e313e84f3a7a39f21deb66 | 833a299efb37158bde7446d287ffccaf57f3c829 | refs/heads/master | 2020-05-25T11:09:59.009295 | 2019-05-21T06:24:38 | 2019-05-21T06:24:38 | 187,774,566 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 822 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dee_world_originals_3621.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"[email protected]"
]
| |
f665485f49d5e9f3218ee0ee9cc87b2b65a9a74d | 8ce70bf719616200f623629e1c3fca20e9f3d369 | /GetSharedExternallyDriveACLs.py | 06736e36300dc2838dfc0a696d4501ec625e9bf1 | []
| no_license | glmyers/GAM-Scripts3 | 055f1f398971d2aa630372e1594aeea960f7b104 | 115e1ae811b72570c674a0b0284494f57660ad79 | refs/heads/master | 2023-08-02T01:41:09.853011 | 2021-09-15T19:36:37 | 2021-09-15T19:36:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,387 | py | #!/usr/bin/env python3
"""
# Purpose: For a Google Drive User(s), show all drive file ACLs for files shared outside of a list of specified domains
# You specify a list of domains, DOMAIN_LIST, and indicate whether this list is exclusive/inclusive
# EXCLUSIVE_DOMAINS = True: exclude domains in DOMAIN_LIST from the output
# EXCLUSIVE_DOMAINS = False: include domains in DOMAIN_LIST in the output
# You can include/exclude shares to anyone in the ouput
# INCLUDE_ANYONE = True: include shares to anyone in the output
# INCLUDE_ANYONE = False: exclude shares to anyone from the output
# Note: This script can use Basic or Advanced GAM:
# https://github.com/jay0lee/GAM
# https://github.com/taers232c/GAMADV-XTD3
# Customize: Set DOMAIN_LIST, EXCLUSIVE_DOMAINS, INCLUDE_ANYONE
# Python: Use python or python3 below as appropriate to your system; verify that you have version 3
# $ python -V or python3 -V
# Python 3.x.y
# Usage:
# 1: Get ACLs for all files, if you don't want all users, replace all users with your user selection in the command below
# $ Basic GAM: gam all users print filelist id title permissions owners > filelistperms.csv
# $ Advanced GAM: You can have GAM do some pre-filtering
# $ EXCLUSIVE_DOMAINS = True:
# Add the following clause to the command for each domain in DOMAIN_LIST: pm not domain domainx.com em
# $ EXCLUSIVE_DOMAINS = False:
# Add the following clause to the command for each domain in DOMAIN_LIST: pm domain domainx.com em
# $ INCLUDE_ANYONE = True
# Add the following clause to the command: pm type anyone em
# $ gam config auto_batch_min 1 redirect csv ./filelistperms.csv multiprocess all users print filelist fields id,name,permissions,owners.emailaddress <pm clauses>
# 2: From that list of ACLs, output a CSV file with headers "Owner,driveFileId,driveFileTitle,permissionId,role,type,emailAddress,domain"
# that lists the driveFileIds and permissionIds for all ACLs shared with the selected domains.
# (n.b., role, type, emailAddress, domain and driveFileTitle are not used in the next step, they are included for documentation purposes)
# $ python3 GetSharedExternallyDriveACLs.py filelistperms.csv deleteperms.csv
# 3: Inspect deleteperms.csv, verify that it makes sense and then proceed
# 4: If desired, delete the ACLs
# $ gam csv ./deleteperms.csv gam user "~Owner" delete drivefileacl "~driveFileId" "~permissionId"
"""
import csv
import re
import sys
FILE_NAME = 'name'
ALT_FILE_NAME = 'title'
# Substitute your domain(s) in the list below, e.g., DOMAIN_LIST = ['domain.com',] DOMAIN_LIST = ['domain1.com', 'domain2.com',]
DOMAIN_LIST = ['domain.com']
# EXCLUSIVE_DOMAINS = True: You're interested only in domains not in DOMAIN_LIST which would typically be your internal domains
# EXCLUSIVE_DOMAINS = False: You're interested only in domains in DOMAIN_LIST which would typically be external domains
# Indicate whether the list is exclusive or inclusive
EXCLUSIVE_DOMAINS = True
# Indicate whether shares to anyone should be included
INCLUDE_ANYONE = True
QUOTE_CHAR = '"' # Adjust as needed
LINE_TERMINATOR = '\n' # On Windows, you probably want '\r\n'
PERMISSIONS_N_TYPE = re.compile(r"permissions.(\d+).type")
if (len(sys.argv) > 2) and (sys.argv[2] != '-'):
outputFile = open(sys.argv[2], 'w', encoding='utf-8', newline='')
else:
outputFile = sys.stdout
outputCSV = csv.DictWriter(outputFile, ['Owner', 'driveFileId', 'driveFileTitle',
'permissionId', 'role', 'type', 'emailAddress', 'domain'],
lineterminator=LINE_TERMINATOR, quotechar=QUOTE_CHAR)
outputCSV.writeheader()
if (len(sys.argv) > 1) and (sys.argv[1] != '-'):
inputFile = open(sys.argv[1], 'r', encoding='utf-8')
else:
inputFile = sys.stdin
for row in csv.DictReader(inputFile, quotechar=QUOTE_CHAR):
for k, v in iter(row.items()):
mg = PERMISSIONS_N_TYPE.match(k)
if mg and v:
permissions_N = mg.group(1)
if v == 'domain':
emailAddress = ''
domain = row[f'permissions.{permissions_N}.domain']
elif v in ['user', 'group']:
if row.get(f'permissions.{permissions_N}.deleted') == 'True':
continue
emailAddress = row[f'permissions.{permissions_N}.emailAddress']
domain = emailAddress[emailAddress.find('@')+1:]
else: #anyone
if not INCLUDE_ANYONE:
continue
emailAddress = ''
domain = ''
if ((row[f'permissions.{permissions_N}.role'] != 'owner') and
((v == 'anyone') or # Can only be true if INCLUDE_ANYONE = True
(EXCLUSIVE_DOMAINS and domain not in DOMAIN_LIST) or
(not EXCLUSIVE_DOMAINS and domain in DOMAIN_LIST))):
outputCSV.writerow({'Owner': row['owners.0.emailAddress'],
'driveFileId': row['id'],
'driveFileTitle': row.get(FILE_NAME, row.get(ALT_FILE_NAME, 'Unknown')),
'permissionId': f'id:{row[f"permissions.{permissions_N}.id"]}',
'role': row[f'permissions.{permissions_N}.role'],
'type': v,
'emailAddress': emailAddress,
'domain': domain})
if inputFile != sys.stdin:
inputFile.close()
if outputFile != sys.stdout:
outputFile.close()
| [
"[email protected]"
]
| |
bf7de811bfea6dda3995b659cf1eefa05341ded2 | 74472ae20fa049a82b20b8ba7ea80394c43d5a01 | /messenger/urls.py | 14204e888b18dbed10f155dcb11b12b8c5abf853 | []
| no_license | Adelgh/Projet | ad2d02e92f7ab3adef4c2646ba0c0838bc2e799e | 1e1918f5ee47312dce47e2ae384c0168ffce7664 | refs/heads/master | 2021-01-15T08:27:42.502063 | 2017-08-28T14:34:16 | 2017-08-28T14:34:16 | 99,567,683 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 686 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.inbox, name='inbox'),
url(r'^new/$', views.new, name='new_message'),
url(r'^send/$', views.send, name='send_message'),
url(r'^send1/$', views.send1, name='send_message1'),
url(r'^delete/$', views.delete, name='delete_message'),
url(r'^users/$', views.users, name='users_message'),
url(r'^check/$', views.check, name='check_message'),
url(r'^filter/$', views.filter, name='filter'),
url(r'^latest/$', views.latest, name='latest_message'),
url(r'^upload/$', views.upload, name='upload'),
url(r'^(?P<username>[^/]+)/$', views.messages, name='messages'),
] | [
"[email protected]"
]
| |
69a5cd6ca0707fa39f1ba4e4b1b627696dc2efe1 | fe6f6d11dde2a3205ae9758c7d4eb1f824b84102 | /venv/lib/python2.7/site-packages/flaskext/script.py | dbe746c0a81ccab2e592f3b48707ca177a080587 | [
"MIT"
]
| permissive | mutaihillary/mycalculator | ebf12a5ac90cb97c268b05606c675d64e7ccf8a6 | 55685dd7c968861f18ae0701129f5af2bc682d67 | refs/heads/master | 2023-01-10T14:56:11.780045 | 2016-09-20T12:30:21 | 2016-09-20T12:30:21 | 68,580,251 | 0 | 0 | MIT | 2022-12-26T20:15:21 | 2016-09-19T07:27:48 | Python | UTF-8 | Python | false | false | 20,546 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import with_statement
import os
import sys
import code
import string
import getpass
import inspect
import warnings
import argparse
from flask import Flask, _request_ctx_stack
__all__ = ["Command", "Shell", "Server", "Manager", "Option",
"prompt", "prompt_pass", "prompt_bool", "prompt_choices"]
def prompt(name, default=None):
"""
Grab user input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = raw_input(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_pass(name, default=None):
"""
Grabs hidden (password) input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = getpass.getpass(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_bool(name, default=False, yes_choices=None, no_choices=None):
"""
Grabs user input from command line and converts to boolean
value.
:param name: prompt text
:param default: default value if no input provided.
:param yes_choices: default 'y', 'yes', '1', 'on', 'true', 't'
:param no_choices: default 'n', 'no', '0', 'off', 'false', 'f'
"""
yes_choices = yes_choices or ('y', 'yes', '1', 'on', 'true', 't')
no_choices = no_choices or ('n', 'no', '0', 'off', 'false', 'f')
while True:
rv = prompt(name + '?', default and yes_choices[0] or no_choices[0])
if not rv:
return default
if rv.lower() in yes_choices:
return True
elif rv.lower() in no_choices:
return False
def prompt_choices(name, choices, default=None,
resolve=string.lower, no_choice=('none',)):
"""
Grabs user input from command line from set of provided choices.
:param name: prompt text
:param choices: list or tuple of available choices. Choices may be
single strings or (key, value) tuples.
:param default: default value if no input provided.
:param no_choice: acceptable list of strings for "null choice"
"""
_choices = []
options = []
for choice in choices:
if isinstance(choice, basestring):
options.append(choice)
else:
options.append("%s [%s]" % (choice[1], choice[0]))
choice = choice[0]
_choices.append(choice)
while True:
rv = prompt(name + '? - (%s)' % ', '.join(options), default)
if not rv:
return default
rv = resolve(rv)
if rv in no_choice:
return None
if rv in _choices:
return rv
class Option(object):
"""
Stores positional and optional arguments for `ArgumentParser.add_argument
<http://argparse.googlecode.com/svn/trunk/doc/add_argument.html>`_.
:param name_or_flags: Either a name or a list of option strings,
e.g. foo or -f, --foo
:param action: The basic type of action to be taken when this argument
is encountered at the command-line.
:param nargs: The number of command-line arguments that should be consumed.
:param const: A constant value required by some action and nargs selections.
:param default: The value produced if the argument is absent from
the command-line.
:param type: The type to which the command-line arg should be converted.
:param choices: A container of the allowable values for the argument.
:param required: Whether or not the command-line option may be omitted
(optionals only).
:param help: A brief description of what the argument does.
:param metavar: A name for the argument in usage messages.
:param dest: The name of the attribute to be added to the object
returned by parse_args().
"""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class Command(object):
"""
Base class for creating commands.
"""
option_list = []
@property
def description(self):
description = self.__doc__ or ''
return description.strip()
def add_option(self, option):
"""
Adds Option to option list.
"""
self.option_list.append(option)
def get_options(self):
"""
By default, returns self.option_list.Override if you
need to do instance-specific configuration.
"""
return self.option_list
def create_parser(self, prog):
parser = argparse.ArgumentParser(prog=prog,
description=self.description)
for option in self.get_options():
parser.add_argument(*option.args, **option.kwargs)
return parser
def handle(self, app, *args, **kwargs):
"""
Handles the command with given app. Default behaviour is to call within
a test request context.
"""
with app.test_request_context():
self.run(*args, **kwargs)
def run(self):
"""
Runs a command. This must be implemented by the subclass. Should take
arguments as configured by the Command options.
"""
raise NotImplementedError
def prompt(self, name, default=None):
warnings.warn_explicit(
"Command.prompt is deprecated, use prompt() function instead")
prompt(name, default)
def prompt_pass(self, name, default=None):
warnings.warn_explicit(
"Command.prompt_pass is deprecated, use prompt_pass() function instead")
prompt_pass(name, default)
def prompt_bool(self, name, default=False):
warnings.warn_explicit(
"Command.prompt_bool is deprecated, use prompt_bool() function instead")
prompt_bool(name, default)
def prompt_choices(self, name, choices, default=None):
warnings.warn_explicit(
"Command.choices is deprecated, use prompt_choices() function instead")
prompt_choices(name, choices, default)
class Shell(Command):
"""
Runs a Python shell inside Flask application context.
:param banner: banner appearing at top of shell when started
:param make_context: a callable returning a dict of variables
used in the shell namespace. By default
returns a dict consisting of just the app.
:param use_ipython: use IPython shell if available, ignore if not.
The IPython shell can be turned off in command
line by passing the **--no-ipython** flag.
"""
banner = ''
description = 'Runs a Python shell inside Flask application context.'
def __init__(self, banner=None, make_context=None, use_ipython=True):
self.banner = banner or self.banner
self.use_ipython = use_ipython
if make_context is None:
make_context = lambda: dict(app=_request_ctx_stack.top.app)
self.make_context = make_context
def get_options(self):
return (
Option('--no-ipython',
action="store_true",
dest='no_ipython',
default=not(self.use_ipython)),)
def get_context(self):
"""
Returns a dict of context variables added to the shell namespace.
"""
return self.make_context()
def run(self, no_ipython):
"""
Runs the shell. Unless no_ipython is True or use_python is False
then runs IPython shell if that is installed.
"""
context = self.get_context()
if not no_ipython:
try:
import IPython
try:
sh = IPython.Shell.IPShellEmbed(banner=self.banner)
except AttributeError:
sh = IPython.frontend.terminal.embed.InteractiveShellEmbed(banner1=self.banner)
sh(global_ns=dict(), local_ns=context)
return
except ImportError:
pass
code.interact(self.banner, local=context)
class Server(Command):
"""
Runs the Flask development server i.e. app.run()
:param host: server host
:param port: server port
:param use_debugger: if False, will no longer use Werkzeug debugger.
This can be overriden in the command line
by passing the **-d** flag.
:param use_reloader: if False, will no longer use auto-reloader.
This can be overriden in the command line by
passing the **-r** flag.
:param options: :func:`werkzeug.run_simple` options.
"""
description = 'Runs the Flask development server i.e. app.run()'
def __init__(self, host='127.0.0.1', port=5000, use_debugger=True,
use_reloader=True, **options):
self.port = port
self.host = host
self.use_debugger = use_debugger
self.use_reloader = use_reloader
self.server_options = options
def get_options(self):
options = (
Option('-t', '--host',
dest='host',
default=self.host),
Option('-p', '--port',
dest='port',
type=int,
default=self.port),
)
if self.use_debugger:
options += (Option('-d', '--no-debug',
action='store_false',
dest='use_debugger',
default=self.use_debugger),)
else:
options += (Option('-d', '--debug',
action='store_true',
dest='use_debugger',
default=self.use_debugger),)
if self.use_reloader:
options += (Option('-r', '--no-reload',
action='store_false',
dest='use_reloader',
default=self.use_reloader),)
else:
options += (Option('-r', '--reload',
action='store_true',
dest='use_reloader',
default=self.use_reloader),)
return options
def handle(self, app, host, port, use_debugger, use_reloader):
# we don't need to run the server in request context
# so just run it directly
app.run(host=host,
port=port,
debug=use_debugger,
use_debugger=use_debugger,
use_reloader=use_reloader,
**self.server_options)
class InvalidCommand(Exception):
pass
class Manager(object):
"""
Controller class for handling a set of commands.
Typical usage::
class Print(Command):
def run(self):
print "hello"
app = Flask(__name__)
manager = Manager(app)
manager.add_command("print", Print())
if __name__ == "__main__":
manager.run()
On command line::
python manage.py print
> hello
:param app: Flask instance or callable returning a Flask instance.
:param with_default_commands: load commands **runserver** and **shell**
by default.
"""
def __init__(self, app, with_default_commands=True, usage=None):
self.app = app
self._commands = dict()
self._options = list()
if with_default_commands:
self.add_default_commands()
self.usage = usage
def add_default_commands(self):
"""
Adds the shell and runserver default commands. To override these
simply add your own equivalents using add_command or decorators.
"""
self.add_command("shell", Shell())
self.add_command("runserver", Server())
def create_app(self, **kwargs):
if isinstance(self.app, Flask):
return self.app
return self.app(**kwargs)
def create_parser(self, prog):
"""
Creates an ArgumentParser instance from options returned
by get_options(), and a subparser for the given command.
"""
prog = os.path.basename(prog)
parser = argparse.ArgumentParser(prog=prog)
for option in self.get_options():
parser.add_argument(*option.args, **option.kwargs)
return parser
def get_options(self):
return self._options
def add_option(self, *args, **kwargs):
"""
Adds an application-wide option. This is useful if you want to set variables
applying to the application setup, rather than individual commands.
For this to work, the manager must be initialized with a factory
function rather than an instance. Otherwise any options you set will
be ignored.
The arguments are then passed to your function, e.g.::
def create_app(config=None):
app = Flask(__name__)
if config:
app.config.from_pyfile(config)
return app
manager = Manager(create_app)
manager.add_option("-c", "--config", dest="config", required=False)
and are evoked like this::
> python manage.py -c dev.cfg mycommand
Any manager options passed in the command line will not be passed to
the command.
Arguments for this function are the same as for the Option class.
"""
self._options.append(Option(*args, **kwargs))
def command(self, func):
"""
Adds a command function to the registry.
:param func: command function.Arguments depend on the
options.
"""
args, varargs, keywords, defaults = inspect.getargspec(func)
options = []
# first arg is always "app" : ignore
defaults = defaults or []
kwargs = dict(zip(*[reversed(l) for l in (args, defaults)]))
for arg in args:
if arg in kwargs:
default=kwargs[arg]
if isinstance(default, bool):
options.append(Option('-%s' % arg[0],
'--%s' % arg,
action="store_true",
dest=arg,
required=False,
default=default))
else:
options.append(Option('-%s' % arg[0],
'--%s' % arg,
dest=arg,
type=unicode,
required=False,
default=default))
else:
options.append(Option(arg, type=unicode))
command = Command()
command.run = func
command.__doc__ = func.__doc__
command.option_list = options
self.add_command(func.__name__, command)
return func
def shell(self, func):
"""
Decorator that wraps function in shell command. This is equivalent to::
def _make_context(app):
return dict(app=app)
manager.add_command("shell", Shell(make_context=_make_context))
The decorated function should take a single "app" argument, and return
a dict.
For more sophisticated usage use the Shell class.
"""
self.add_command('shell', Shell(make_context=func))
return func
def option(self, *args, **kwargs):
"""
Decorator to add an option to a function. Automatically registers the
function - do not use together with ``@command``. You can add as many
``@option`` calls as you like, for example::
@option('-n', '--name', dest='name')
@option('-u', '--url', dest='url')
def hello(name, url):
print "hello", name, url
Takes the same arguments as the ``Option`` constructor.
"""
option = Option(*args, **kwargs)
def decorate(func):
name = func.__name__
if name not in self._commands:
command = Command()
command.run = func
command.__doc__ = func.__doc__
command.option_list = []
self.add_command(name, command)
self._commands[name].option_list.append(option)
return func
return decorate
def add_command(self, name, command):
"""
Adds command to registry.
:param command: Command instance
"""
self._commands[name] = command
def get_usage(self):
"""
Returns string consisting of all commands and their
descriptions.
"""
pad = max(map(len, self._commands.iterkeys())) + 2
format = ' %%- %ds%%s' % pad
rv = []
if self.usage:
rv.append(self.usage)
for name, command in self._commands.iteritems():
usage = name
description = command.description or ''
usage = format % (name, description)
rv.append(usage)
return "\n".join(rv)
def print_usage(self):
"""
Prints result of get_usage()
"""
print self.get_usage()
def handle(self, prog, name, args=None):
args = list(args or [])
try:
command = self._commands[name]
except KeyError:
raise InvalidCommand, "Command %s not found" % name
help_args = ('-h', '--help')
# remove -h from args if present, and add to remaining args
app_args = [a for a in args if a not in help_args]
app_parser = self.create_parser(prog)
app_namespace, remaining_args = app_parser.parse_known_args(app_args)
for arg in help_args:
if arg in args:
remaining_args.append(arg)
command_parser = command.create_parser(prog + " " + name)
if getattr(command, 'capture_all_args', False):
command_namespace, unparsed_args = \
command_parser.parse_known_args(remaining_args)
positional_args = [unparsed_args]
else:
command_namespace = command_parser.parse_args(remaining_args)
positional_args = []
app = self.create_app(**app_namespace.__dict__)
command.handle(app, *positional_args, **command_namespace.__dict__)
def run(self, commands=None, default_command=None):
"""
Prepares manager to receive command line input. Usually run
inside "if __name__ == "__main__" block in a Python script.
:param commands: optional dict of commands. Appended to any commands
added using add_command().
:param default_command: name of default command to run if no
arguments passed.
"""
if commands:
self._commands.update(commands)
try:
try:
command = sys.argv[1]
except IndexError:
command = default_command
if command is None:
raise InvalidCommand, "Please provide a command"
self.handle(sys.argv[0], command, sys.argv[2:])
sys.exit(0)
except InvalidCommand, e:
print e
self.print_usage()
sys.exit(1)
| [
"[email protected]"
]
| |
8c9e26db66935091b5ff391425547f99e9a0a6e4 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /9AMT6SC4Jz8tExihs_23.py | 306863290370f89d947dad424359f11ee3c866c3 | []
| no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py |
from itertools import product
def generate_nonconsecutive(n):
return " ".join([combo for combo in ["".join(list(combi)) for combi in product("01",repeat=n)] if "11" not in combo])
| [
"[email protected]"
]
| |
6ddca35b1612d57330fd4fc592c1f7de0f2633d3 | fffbf9e1ac40fdbd77f5b6baf34662478da8162e | /library/setup.py | fc56d3fee0faa0c1dde3eb53f3b7cd88eb98ddf7 | [
"MIT"
]
| permissive | yorkrobotlab/inky | aa4c41ce17e8e47c3f6b2a16368560be6c66f051 | 65f9abb7cb09e2a9d9b31e484a576d230d8c28a1 | refs/heads/master | 2022-10-03T04:43:19.189473 | 2020-03-11T11:21:04 | 2020-03-11T11:21:04 | 259,630,799 | 1 | 0 | MIT | 2020-04-28T12:29:16 | 2020-04-28T12:29:15 | null | UTF-8 | Python | false | false | 2,147 | py | #!/usr/bin/env python
"""
Copyright (c) 2017 Pimoroni.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from setuptools import setup
classifiers = [
'Development Status :: 5 - Production/Stable',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development',
'Topic :: System :: Hardware'
]
setup(
name='inky',
version='0.0.6',
author='Philip Howard',
author_email='[email protected]',
description='Inky pHAT Driver',
long_description=open('README.rst').read() + '\n' + open('CHANGELOG.txt').read(),
license='MIT',
keywords='Raspberry Pi e-paper display driver',
url='http://www.pimoroni.com',
project_urls={'GitHub': 'https://www.github.com/pimoroni/inky'},
classifiers=classifiers,
py_modules=[],
packages=['inky'],
include_package_data=True,
install_requires=['numpy'],
extras_require={
'rpi-gpio-output': ['spidev', 'RPi.GPIO', 'smbus2']
}
)
| [
"[email protected]"
]
| |
6ec3ed2310d22d5053b1b70b6f71702a2d754566 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2622/60761/241627.py | eeb5762e38fb07e7a8d25eeacf6434a9ae034580 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 120 | py | numlist=input("")
numset=set(numlist)
for num in numset:
if numlist.count(num)>(len(numlist)//2):
print(num) | [
"[email protected]"
]
| |
8c73ab259674a5071d772e7bf8fe594b8cc04ed7 | 531904363a322e5c9df323254213d1d66d5ef9a5 | /lib/c4dtools/utils.py | 021f462b384eba0a0e85f1fe8020e79b5c0842db | [
"MIT"
]
| permissive | AlbertAlomar/c4ddev | 0cf0e29c0b8d3396f578b595ad1145be08ab5562 | 3c97f00198b734d3af5609f11e291d313abedb4c | refs/heads/master | 2021-05-09T02:32:40.502947 | 2018-01-28T00:02:36 | 2018-01-28T00:02:36 | 119,212,951 | 0 | 0 | null | 2018-01-27T23:57:38 | 2018-01-27T23:57:37 | null | UTF-8 | Python | false | false | 21,642 | py | # -*- coding: utf8; -*-
#
# Copyright (C) 2015 Niklas Rosenstein
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
''' Common utility functions related to the Cinema 4D API. '''
import c4d
import warnings
OrderedDict = require('./structures/ordereddict')
# General Helpers
def serial_info():
''' Retrieve the serial information for the current instance of
Cinema 4D.
:return: A tuple of ``(sinfo, is_multi)``. The ``sinfo`` is the
dictionary returned by :func:`c4d.GeGetSerialInfo`. ``is_multi``
is True when it's a multi license.
'''
is_multi = True
sinfo = c4d.GeGetSerialInfo(c4d.SERIALINFO_MULTILICENSE)
if not sinfo['nr']:
is_multi = False
sinfo = c4d.GeGetSerialInfo(c4d.SERIALINFO_CINEMA4D)
return sinfo, is_multi
def flush_console():
''' Flush the Cinema 4D scripting console. '''
c4d.CallCommand(13957)
def update_viewport():
''' Shortcut for using :func:`c4d.DrawViews` to update the Cinema 4D
viewport. Check the source code for the flags that are passed to the
function. '''
return c4d.DrawViews(
c4d.DRAWFLAGS_ONLY_ACTIVE_VIEW | c4d.DRAWFLAGS_NO_THREAD |
c4d.DRAWFLAGS_NO_REDUCTION | c4d.DRAWFLAGS_STATICBREAK)
# Iterators
def walk_hierarchy(node, yield_depth=False, _depth=0):
''' Iterator for walking over the hierarchy of a :class:`c4d.BaseList2D`
node. *node* can also be a list, in which case all items of the list are
walked. If *yield_depth* is True, a tuple with the second item being
the index of the depth is yielded instead of only the nodes.
.. code-block:: python
for obj in walk_hierarchy(doc.GetObjects()):
print(obj.GetName())
:param node: A :class:`c4d.BaseList2D` object or :class:`list` of such.
:param yield_depth: If True, the generator yields tuples of
``(node, depth)`` instead of only the current node.
:return: A generator yielding the nodes of the hierarchy, or tuples of
such.
'''
if isinstance(node, c4d.C4DAtom):
if yield_depth:
yield node, _depth
else:
yield node
for child in node.GetChildren():
for __ in walk(child, yield_depth, _depth + 1):
yield __
else:
for node in node:
for __ in walk(node, yield_depth, _depth + 1):
yield __
def walk_timeline(doc, start, end, update=True):
''' Iterate over each frame in the document from *start* to *end*
and yield the current frame number while redrawing the viewport if
*update* is True. The document time will be reset to the original
time at the end of the iteration.
.. code-block:: python
for frame in iter_timeline(doc, 0, 100):
pass # process current frame here
:param doc: The :class:`c4d.BaseDocument` to iterate in.
:param start: The start time, either :class:`c4d.BaseTime` or a frame
number.
:param end: The end time, either :class:`c4d.BaseTime` or a frame
number.
:param update: If True, the viewport is updated with
:func:`update_viewport` and :func:`c4d.GeSyncMessage` before the
current frame number is passed to the caller. This is usually desired.
'''
fps = doc.GetFps()
time = doc.GetTime()
if isinstance(start, c4d.BaseTime):
start = start.GetFrame(fps)
if isinstance(end, c4d.BaseTime):
end = end.GetFrame(fps)
for frame in xrange(start, end + 1):
doc.SetTime(c4d.BaseTime(frame, fps))
if update:
update_viewport()
c4d.GeSyncMessage(c4d.EVMSG_TIMECHANGED)
yield frame
doc.SetTime(time)
if update:
update_viewport()
c4d.GeSyncMessage(c4d.EVMSG_TIMECHANGED)
def walk_container(bc):
''' Walk over all entries in the :class:`c4d.BaseContainer`. Usually,
you would do this with :meth:`~c4d.BaseContainer.__iter__`, but it
poses two issues:
1. If a subcontainer is yielded, it is actually a copy of that container
2. If a datatype in the container can not be represented in Python, it
will raise an :class:`AttributeError`
This function uses :meth:`~c4d.BaseContainer.GetIndexId` to iterate
over all entries. However, this in turn poses the restriction that
containers with multiple entries for the same ID can not be handled
properly and only the first value for that ID is yielded. '''
index = 0
ident = bc.GetIndexId(index)
while ident != c4d.NOTOK:
try:
yield ident, bc[ident]
except AttributeError:
pass
index += 1
ident = bc.GetIndexId(index)
def walk_shaders(node):
''' Walk over the shader-list of a :class:`c4d.BaseList2D` node. It
is safe to remove shaders during iteration. '''
shader = node.GetFirstShader()
while shader:
next_ = shader.GetNext()
yield shader
shader = next_
# Document Helpers
def remove_document(doc, new_active_doc=None):
''' The Cinema 4D API only provides a :class:`~c4d.documents.KillDocument`
function that not only removes the specified :class:`c4d.BaseDocument`
from the document list, but really *kills* it, ie. it can not be used
anymore aftert the function is called.
This function *only* removes the document from the Cinema 4D document
list so that it is still valid and can be accessed from Python.
:param doc: The :class:`c4d.BaseDocument` to remove.
:param new_active_doc: If specified, this will become the new
active Cinema 4D document. Otherwise, the next document of *doc*
will be used (C4D default behaviour) or a new empty document is
created if none exists.
'''
if type(doc) is not c4d.documents.BaseDocument:
raise TypeError("doc must be a BaseDocument object")
if new_active_doc is not None and \
type(new_active_doc) is not c4d.documents.BaseDocument:
raise TypeError("new_active_doc must be a BaseDocument object")
successor = new_active_doc or doc.GetPred() or doc.GetNext()
doc.Remove()
# Note: The document will be removed before eventually inserting
# a new document because if *doc* is the active document and is
# empty, InsertBaseDocument will actually kill it before inserting
# the new document.
if not successor:
successor = c4d.documents.BaseDocument()
c4d.documents.InsertBaseDocument(successor)
c4d.documents.SetActiveDocument(successor)
class TemporaryDocument(object):
''' The *TemporaryDocument* provides, as the name implies, a temporary
`c4d.documents.BaseDocument` that can be used to perform operations in
an isolated environment such as calling modeling commands or
`c4d.CallCommand()`.
When the *TemporaryDocument* is created, it is not immediately
activated. To do so, one must call the `attach()` method or use
it as a context-manager. When the document is no longer needed, the
context-manager will close the document and remove it from the
Cinema 4D document list or `detach()` must be called manually.
The *TemporaryDocument* can be re-used after it has been closed.
Use the `get()` method to obtain the wrapped *BaseDocument* or catch
the return value of the context-manager.
.. note:: If `detach()` was not called after `attach()` and the
*TemporaryDocument* is being deleted via the garbage collector,
a `RuntimeWarning` will be issued but the document will not be
detached.
.. important:: The *TemporaryDocument* will not expect that the
internal `BaseDocument` might actually be removed by any other
mechanism but the :meth:`detach` method. '''
__slots__ = ('_bdoc', '_odoc')
def __init__(self):
super(TemporaryDocument, self).__init__()
self._bdoc = c4d.documents.BaseDocument()
self._odoc = None
def __del__(self):
if self._odoc is not None:
warnings.warn(
"TemporaryDocument not detached before being "
"garbage collected", RuntimeWarning)
def __enter__(self):
''' Attaches the real temporary document and returns it. '''
self.attach()
return self.get()
def __exit__(self, *args):
self.detach()
def attach(self):
''' Attaches the temporary document to the Cinema 4D document list.
It will also be promoted to be the active document. A call to
`detach()` must be paired with `attach()`.
The document that is active before this method is called will
be saved and promoted back to being the active document with
calling :meth:`detach`.
Returns *self* for method-chaining. '''
if self._odoc is not None:
raise RuntimeErrorn("attach() has already been called")
self._odoc = c4d.documents.GetActiveDocument()
c4d.documents.InsertBaseDocument(self._bdoc)
c4d.documents.SetActiveDocument(self._bdoc)
return self
def detach(self, do_recall=True):
''' Detaches the temporary document from the Cinema 4D document
list and promotes the previous active document back to its original
status unless *do_recall* is False.
Returns *self* for method-chaining. '''
if self._odoc is None:
raise RuntimeError("attach() has not been called before")
remove_document(self._bdoc, self._odoc() if do_recall else None)
self._odoc = None
return self
def get(self):
''' Returns the internal *BaseDocument* object. '''
return self._bdoc
def is_attached(self):
''' Returns `True` if this *TemporaryDocument* is attached, that is,
inside the Cinema 4D document list, and `False` if it's not. '''
attached = self._odoc is not None
return attached
class UndoHandler(object):
''' The *UndoHandler* is a useful class to temporarily apply changes
to components of Cinema 4D objects, tags, materials, nodes, documents
etc. and revert them at a specific point.
Internally, the *UndoHandler* simply stores a list of callables
that are called upon `revert()`. All methods that store the
original state of a node simply append a callable to it. Custom
callables can be added with `custom()`. '''
__slots__ = ('_flist',)
def __init__(self):
super(UndoHandler, self).__init__()
self._flist = []
def __enter__(self):
return self
def __exit__(self, *args):
self.revert()
def revert(self):
''' Reverts back to the original states that have been kept track
of with this *UndoHandler* and flushes these states. '''
flist, self._flist = self._flist, []
[f() for f in reversed(flist)]
def custom(self, target):
''' Adds a custom callable object that is invoked when :meth:`revert`
is called. It must accept no arguments. '''
if not callable(target):
raise TypeError("<target> must be callable", type(target))
self._flist.append(target)
def matrix(self, op):
''' Restores ops current matrix upon :meth:`revert`. '''
ml = op.GetMl()
def revert_matrix():
op.SetMl(ml)
self._flist.append(revert_matrix)
def location(self, node):
''' Tracks the hierarchical location of *node* and restores it upon
:meth:`revert`. This method only supports materials, tags and objects.
This will also remove nodes that were not inserted any where before. '''
pred_node = node.GetPred()
next_node = node.GetNext()
parent = node.GetUp()
tag_host = node.GetObject() if isinstance(node, c4d.BaseTag) else None
doc = node.GetDocument()
if not any([pred_node, next_node, parent, tag_host]) and doc:
supported_classes = (c4d.BaseMaterial, c4d.BaseObject)
if not isinstance(node, supported_classes):
raise TypeError(
"only materials and objects are supported when "
"located at their root", type(node))
def revert_hierarchy():
node.Remove()
if pred_node and pred_node.GetUp() == parent:
node.InsertAfter(pred_node)
elif next_node and next_node.GetUp() == parent:
node.InsertBefore(next_node)
elif parent:
node.InsertUnder(parent)
elif tag_host:
tag_host.InsertTag(node)
elif doc:
if isinstance(node, c4d.BaseMaterial):
doc.InsertMaterial(node)
elif isinstance(node, c4d.BaseObject):
doc.InsertObject(node)
else:
raise RuntimeError("unexpected type of <node>", type(node))
self._flist.append(revert_hierarchy)
def container(self, node):
''' Grabs a copy of the nodes :class:`c4d.BaseContainer` and
restores it upon :meth:`revert`. '''
data = node.GetData()
def revert_container():
node.SetData(data)
self._flist.append(revert_container)
def full(self, node):
''' Gets a complete copy of *node* and restores its complete state
upon :meth:`revert`. This is like using :data:`c4d.UNDOTYPE_CHANGE`
with :meth:`c4d.documents.BaseDocument.AddUndo` except that it does not
include the hierarchical location. For that, you can use the
:meth:`location`. '''
flags = c4d.COPYFLAGS_NO_HIERARCHY | c4d.COPYFLAGS_NO_BRANCHES
clone = node.GetClone(flags)
def revert_node():
clone.CopyTo(node, c4d.COPYFLAGS_0)
self._flist.append(revert_node)
# Object Helpers
def duplicate_object(obj, n=None):
''' Duplicate *obj* and return it. If *n* is not None, it must be a
number. If a number is specified, this function creates *n* duplicates
instead and returns a list of the duplicates.
This function uses the Cinema 4D "Duplicate" tool to create the
copies of *obj*. In many cases, this is more desirable than using
:class:`~c4d.C4DAtom.GetClone` since the :class:`c4d.AliasTrans`
class is only available since R17.
:param obj: The :class:`c4d.BaseObject` to clone.
:param n: None if a single clone should be created and returned, o
a number in which case a list of *n* duplicates will be returned.
:return: A list of the cloned objects or a single object if *n* was None.
:raise RuntimeError: If *obj* is not inserted in a `c4d.BaseDocument`
or if the objects could not be cloned.
'''
doc = obj.GetDocument()
if not doc:
raise RuntimeError("obj must be in a c4d.BaseDocument")
bc = c4d.BaseContainer()
bc[c4d.MDATA_DUPLICATE_COPIES] = 1 if n is None else n
bc[c4d.MDATA_DUPLICATE_INSTANCES] = c4d.MDATA_DUPLICATE_INSTANCES_OFF
bc[c4d.MDATA_ARRANGE_MODE] = c4d.MDATA_ARRANGE_MODE_SELECTMODE
result = c4d.utils.SendModelingCommand(
c4d.ID_MODELING_DUPLICATE_TOOL, [obj], bc=bc, doc=doc)
if not result:
raise RuntimeError("could not duplicate object")
name = obj.GetName()
root = obj.GetNext()
assert root.CheckType(c4d.Onull)
clones = []
for child in root.GetChildren():
child.SetName(name)
child.Remove()
clones.append(child)
root.Remove()
return clones
def move_axis(obj, new_axis):
''' Simulate the "Axis Move" mode in Cinema 4D. This function moves
the axis of a :class:`c4d.BaseObject` to the specified *new_axis* in
*local space*. Child objects will remain at their original position
relative to *global space*. If *obj* is a :class:`c4d.PointObject`,
same applies for the object's points.
.. code-block:: python
import c4d
from nr.c4d.utils import move_axis
# Rotate the axis of an object by 45 Degrees around the X axis.
doc.AddUndo(c4d.UNDOTYPE_HIERARCHY_PSR, op)
mat = op.GetMl() * c4d.utils.MatrixRotX(c4d.utils.Rad(45))
move_axis(op, mat)
:param obj: :class:`c4d.BaseObject`
:param new_axis: :class:`c4d.Matrix` -- The new object axis. '''
mat = ~new_axis * obj.GetMl()
if obj.CheckType(c4d.Opoint):
points = [p * mat for p in obj.GetAllPoints()]
obj.SetAllPoints(points)
obj.Message(c4d.MSG_UPDATE)
for child in obj.GetChildren():
child.SetMl(mat * child.GetMl())
obj.SetMl(new_axis)
class PolygonObjectInfo(object):
''' This class stores the points and polygons of a :class:`c4d.PolygonObject`
and computes the normals and polygon middle points.
:param op: The :class:`c4d.PolygonObject` to initialize the object for.
:param points: True if the object points should be stored.
:param polygons: True if the object polygons should be stored.
:param normals: True if the object normals should be computed.
:param midpoints: True if the polygon midpoints should be computed.
:param vertex_normals: True if the vertex normals should be computed
(implies the *normals* parameter).
.. attribute:: points
The points of the object.
.. attribute:: polygons
The polygons of the object.
.. attribute:: normals
The polygon normals, if enabled.
.. attribute:: vertex_normals
The vertex normals, if enabled.
.. attribute:: midpoints
The polygon midpoints, if enabled.
.. attribute:: pointcount
.. attribute:: polycount
'''
def __init__(self, op, points=False, polygons=False, normals=False,
midpoints=False, vertex_normals=False):
super(PolygonObjectInfo, self).__init__()
self.points = None
self.polygons = None
self.normals = None
self.vertex_normals = None
self.midpoints = None
self.pointcount = op.GetPointCount()
self.polycount = op.GetPolygonCount()
if points or normals or vertex_normals or midpoints:
self.points = op.GetAllPoints()
if polygons or normals or vertex_normals or midpoints:
self.polygons = op.GetAllPolygons()
if normals or vertex_normals:
self.normals = [None] * self.polycount
if vertex_normals:
self.vertex_normals = [
[c4d.Vector(), 0] for __ in xrange(self.pointcount)]
if midpoints:
self.midpoints = [None] * self.polycount
if normals or vertex_normals or midpoints:
m3 = 1.0 / 3.0
m4 = 1.0 / 4.0
for i, p in enumerate(self.polygons):
a, b, c, d = self.points[p.a], self.points[p.b], self.points[p.c], self.points[p.d]
if normals or vertex_normals:
n = (a - b).Cross(a - d)
n.Normalize()
self.normals[i] = n
if midpoints:
m = a + b + c
if p.c == p.d:
m *= m3
else:
m += d
m *= m4
self.midpoints[i] = m
if vertex_normals:
def _add(index, n):
data = self.vertex_normals[index]
data[0] += n
data[1] += index
for n, p in itertools.izip(self.normals, self.polygons):
_add(p.a, n)
_add(p.b, n)
_add(p.c, n)
if p.c != p.d:
_add(p.d, n)
self.vertex_normals[:] = (x.GetNormalized() for x in self.vertex_normals)
if not points:
self.points = None
if not polygons:
self.polygons = None
def assoc_mats_with_objects(doc):
''' This function goes through the complete object hierarchy of the
passed :class:`c4d.BaseDocument` and all materials with the objects
that carry a texture-tag with that material. The returnvalue is an
:class:`OrderedDict` instance. The keys of the dictionary-like object
are the materials in the document, their associated values are lists
of :class:`c4d.BaseObject`. Note that an object *can* occure twice in
the same list when the object has two tags with the same material on
it.
:param doc: :class:`c4d.BaseDocument`
:return: :class:`OrderedDict`
'''
data = OrderedDict()
def callback(op):
for tag in op.GetTags():
if tag.CheckType(c4d.Ttexture):
mat = tag[c4d.TEXTURETAG_MATERIAL]
if not mat: continue
data.setdefault(mat, []).append(op)
for child in op.GetChildren():
callback(child)
for obj in doc.GetObjects():
callback(obj)
return data
# Bitmaps
def load_bitmap(filename):
''' Loads a `c4d.bitmaps.BaseBitmap` from the specified *filename*
and returns it or None if the file could not be loaded.
:param filename: :class:`str` -- The file to load the image from.
:return: :class:`c4d.BaseBitmap` -- The loaded bitmap or None. '''
bmp = c4d.bitmaps.BaseBitmap()
if bmp.InitWith(filename)[0] != c4d.IMAGERESULT_OK:
return None
return bmp
# Other
def find_root(node):
''' Finds the top-most object of the hierarchy of *node* and returns
it. Note that this could very well be the *node* passed to this function.
:param node: A :class:`c4d.BaseList2D` object or an object that
implements the hierarchy interface.
:return: The node at the root of the hierarchy. '''
parent = node.GetUp()
while parent:
node = parent
parent = node.GetUp()
return node
# Cinema 4D SDK
def candidates(value, obj, callback=lambda vref, vcmp, kcmp: vref == vcmp):
''' Searches for *value* in *obj* and returns a list of all keys where
the callback returns True, being passed *value* as first argument,
the value to compare it with as the second argument and the name
of the attribute as the third.
Returns: list of str
'''
results = []
for k, v in vars(obj).iteritems():
if callback(value, v, k):
results.append(k)
return results
| [
"[email protected]"
]
| |
3a2dbda0d6edea8b04c5c326afe5c8171c834539 | f3bd271bf00325881fb5b2533b9ef7f7448a75ec | /xcp2k/classes/_restart2.py | 4de1a3a5a71bd0926db09c8a34da1ed829325acb | []
| no_license | obaica/xcp2k | 7f99fc9d494859e16b9b0ea8e217b0493f4b2f59 | 6e15c2c95658f545102595dc1783f5e03a9e6916 | refs/heads/master | 2020-07-15T17:27:43.378835 | 2019-02-11T16:32:24 | 2019-02-11T16:32:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 659 | py | from xcp2k.inputsection import InputSection
from _each11 import _each11
class _restart2(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Section_parameters = None
self.Add_last = None
self.Common_iteration_levels = None
self.Filename = None
self.Log_print_key = None
self.EACH = _each11()
self._name = "RESTART"
self._keywords = {'Common_iteration_levels': 'COMMON_ITERATION_LEVELS', 'Log_print_key': 'LOG_PRINT_KEY', 'Add_last': 'ADD_LAST', 'Filename': 'FILENAME'}
self._subsections = {'EACH': 'EACH'}
self._attributes = ['Section_parameters']
| [
"[email protected]"
]
| |
960d2fbde5d08095542b53926dcab3915b657c1b | 5f6019aefd4b940451ae81fb0e430e97d19626cb | /2016/martian/get_lines.py | 85a7a3481b29fc18839f72a0420d73a8b1eefc05 | []
| no_license | cligs/projects | 7cee393ccdd5fdf8477a89f07ae7a93fe78511e6 | d8a60564d3436a207ce4d94dbdefed9bf5402a9c | refs/heads/master | 2022-04-28T20:38:27.267358 | 2022-04-12T09:08:05 | 2022-04-12T09:08:05 | 42,662,737 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,207 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Filename: get_lines.py
# Authors: #cf
# 2016-05-21
import re
import os
import glob
import pandas as pd
WorkDir = "/media/christof/data/Dropbox/0-Analysen/2016/martians/diffs5/"
DiffTable = WorkDir+"DiffTable_2016-04-29.csv"
DiffedText = WorkDir+"martians_wdiffed-prep.txt"
Types = ["deletion-major", "deletion-minor", "expansion-major", "expansion-minor"]
def get_lines(DiffTable, DiffedText, Types):
"""
Collect line IDs with expansions / deletions, get lines from diffed text, write into separate file.
Author: #cf.
"""
print("get_lines...")
# Open and read the DiffTable
with open(DiffTable, "r") as InFile:
Diffs = pd.DataFrame.from_csv(InFile, sep="\t")
with open(DiffedText, "r") as InFile:
Text = InFile.read()
Text = re.split("\n", Text)
#print(Diffs.head())
# For each type of edir, get the line-ids
for Type in Types:
Items = Diffs.loc[Diffs['type'] == Type]
#print(Type, len(Items))
ItemIDs = Items.index.values
#print(ItemIDs)
LineIDs = []
for ItemID in ItemIDs:
LineID = int(ItemID[:-2])
LineIDs.append(LineID)
#print(len(LineIDs))
#print(LineIDs)
Lines = []
for LineID in LineIDs:
Line = "-- " + '{:05d}'.format(LineID-1) + ": " + Text[LineID-2]
Lines.append(Line)
Line = "=> " + '{:05d}'.format(LineID) + ": " + Text[LineID-1]
Lines.append(Line)
Line = "-- " + '{:05d}'.format(LineID+1) + ": " + Text[LineID-0] + "\n"
Lines.append(Line)
Lines = "\n".join(Lines)
LinesFile = "./lines/lines_"+str(Type)+".txt"
with open(LinesFile, "w") as OutFile:
OutFile.write(Lines)
#with open(TXMFolder+Filename[:-7]+".xml", "w") as OutFile:
# OutFile.write(NewNewText)
print("Done.")
get_lines(DiffTable, DiffedText, Types)
| [
"[email protected]"
]
| |
3f25fb7ce6da69d951596e88ada26bf2a14bd5d8 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_285/ch5_2019_06_06_18_54_46_963120.py | 985b9e61a770d1502eea4003eb618e39ff03abfa | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | def eh_primo(n):
primo=True
if n<=1:
primo=False
for e in range(2,n):
if n%e==0 and e!=n:
primo=False
return primo
lista_primos=[]
def maior_primo_menor_que(n):
if n<0:
return -1
else:
for i in range(0,n+1):
if eh_primo(i):
lista_primos.append(i)
maiorprimo=lista_primos[-1]
return maiorprimo
| [
"[email protected]"
]
| |
4a95b21c810a8139cdf6848ac7d6fbe6c2f553ff | 4b3ae6048ced0d7f88a585af29fa3a7b15005749 | /Python/Python_Fundamentals/makingTuples.py | 826647c1102aa86bdc343efe54ec68cda094a6db | []
| no_license | ajag408/DojoAssignments | a6320856466ac21d38e8387bdcbbe2a02009e418 | 03baa0ff5261aee6ffedf724657b3a8c7cdffe47 | refs/heads/master | 2022-12-11T15:50:46.839881 | 2021-06-07T20:57:17 | 2021-06-07T20:57:17 | 79,872,914 | 0 | 0 | null | 2022-12-08T00:35:09 | 2017-01-24T02:58:15 | Python | UTF-8 | Python | false | false | 209 | py | def dictToTuple(dict):
return dict.items()
# my_dict = {
# "Speros": "(555) 555-5555",
# "Michael": "(999) 999-9999",
# "Jay": "(777) 777-7777"
# }
#
# answer = dictToTuple(my_dict)
# print answer
| [
"[email protected]"
]
| |
518f8410f8bc49ab48576f99926b7c130acc5de7 | 177338a720f904f63926da055364cc0e2c0a850c | /python_stu/s11_22_pager/app01/migrations/0001_initial.py | e1df1b6e1002b6b8fc5e9aec9c576e3d2b84b7e1 | []
| no_license | xuefenga616/mygit | 60ef7bf7201603e13d4621cf7a39dea8ec92e0b7 | be3b8003fcc900ce7ca6616a9ddebb0edcbc1407 | refs/heads/master | 2020-09-13T11:50:55.448041 | 2017-08-27T10:59:00 | 2017-08-27T10:59:00 | 67,042,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='UserList',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('username', models.CharField(max_length=32)),
('age', models.IntegerField()),
],
),
]
| [
"[email protected]"
]
| |
a7aaea88c780b8bd4794ae81c8be3b058b2d5c5d | c4b8e1e09dedbccd37ca008ecaaca4438610bbaf | /z3/building_a_house.py | 9d5015a6cc2a0dd406b4c96667b91c8ae531df7d | [
"MIT"
]
| permissive | hakank/hakank | 4806598b98cb36dd51b24b0ab688f52dadfe9626 | c337aaf8187f15dcdc4d5b09cd2ed0dbdb2e72c2 | refs/heads/master | 2023-08-15T00:21:52.750270 | 2023-07-27T16:21:40 | 2023-07-27T16:21:40 | 11,933,517 | 336 | 97 | MIT | 2023-07-27T11:19:42 | 2013-08-06T20:12:10 | JavaScript | UTF-8 | Python | false | false | 3,932 | py | #!/usr/bin/python -u
# -*- coding: latin-1 -*-
#
# Building a house, simple scheduling problem in Z3
#
# This model is adapted OPL model sched_intro.mod (examples).
# """
# This is a basic problem that involves building a house. The masonry,
# roofing, painting, etc. must be scheduled. Some tasks must
# necessarily take place before others, and these requirements are
# expressed through precedence constraints.
# """
#
# The OPL solution is
# """
# Masonry : 0..35
# Carpentry: 35..50
# Plumbing : 35..75
# Ceiling : 35..50
# Roofing : 50..55
# Painting : 50..60
# Windows : 55..60
# Facade : 75..85
# Garden : 75..80
# Moving : 85..90
# """
#
# With the extra objective (from the OPL model sched_time.mod) the result is
#
# masonry : [20 -- 35 --> 55]
# carpentry: [75 -- 15 --> 90]
# plumbing : [55 -- 40 --> 95]
# ceiling : [75 -- 15 --> 90]
# roofing : [90 -- 5 --> 95]
# painting : [90 -- 10 --> 100]
# windows : [95 -- 5 --> 100]
# facade : [95 -- 10 --> 105]
# garden : [95 -- 5 --> 100]
# moving : [105 -- 5 --> 110]
#
#
# This Z3 model was written by Hakan Kjellerstrand ([email protected])
# See also my Z3 page: http://hakank.org/z3/
#
from z3_utils_hakank import *
# handle the precedences
# the task x must be finished before task y begin
def prec(sol, x, y, s, d):
sol.add(s[x] + d[x] <= s[y])
sol = SolverFor("LIA")
# data
num_tasks = 10
# for the precedences
masonry,carpentry,plumbing,ceiling,roofing,painting,windows,facade,garden,moving = range(num_tasks)
tasks = [masonry,carpentry,plumbing,ceiling,roofing,painting,windows,facade,garden,moving]
tasks_s = ["masonry","carpentry","plumbing","ceiling","roofing","painting","windows","facade","garden","moving"]
duration = [35,15,40,15, 5,10, 5,10, 5, 5];
height = [ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1];
total_duration = sum(duration)
# precendeces
num_precedences = 14;
precedences = [
[masonry, carpentry],
[masonry, plumbing],
[masonry, ceiling],
[carpentry, roofing],
[ceiling, painting],
[roofing, windows],
[roofing, facade],
[plumbing, facade],
[roofing, garden],
[plumbing, garden],
[windows, moving],
[facade, moving],
[garden, moving],
[painting, moving]
]
# variables
start = makeIntVector(sol,"start",num_tasks, 0, total_duration)
end = makeIntVector(sol,"end",num_tasks, 0, total_duration)
limitx = makeIntVar(sol,"limitx",1,3)
makespan = makeIntVar(sol,"makespan", 0,total_duration)
# the extra objective z (see above)
z = makeIntVar(sol,"z", 0, 10000)
# select which variable we should minimize: makespan or z
min_val = makespan # (then we ignore the z part)
# min_val = z
# constraints
# This takes a long time to calculate
# print("before cumulative")
cumulative(sol, start, duration, height, limitx, 0, total_duration)
# print("after cumulative")
if min_val == z:
sol.add(z ==
400 * maximum2(sol,[end[moving]- 100, 0]) +
200 * maximum2(sol,[25 - start[masonry], 0]) +
300 * maximum2(sol,[75 - start[carpentry], 0]) +
100 * maximum2(sol,[75 - start[ceiling], 0]))
else:
sol.add(z == 0)
for t in range(num_tasks):
sol.add(end[t] == start[t] + duration[t])
# makespan is the end time of the last task
maximum(sol, makespan, end)
# precedences
for p in range(num_precedences):
prec(sol,precedences[p][0], precedences[p][1], start, duration)
# minimize makespan;
while sol.check() == sat:
mod = sol.model()
print("makespan:", mod[makespan])
if min_val == z:
print("z:", mod[z])
print("start:", [mod[start[t]] for t in range(num_tasks)])
print("end :", [mod[end[t]] for t in range(num_tasks)])
for i in range(num_tasks):
print("%-10s: %3i..(%3i)..%3i" % (tasks_s[i], mod[start[i]].as_long(), duration[i], mod[end[i]].as_long()))
print()
getLessSolution(sol,mod,min_val)
| [
"[email protected]"
]
| |
6f0e96c1993a1b210e4d7c1365b69706190d11d7 | 60814a33c10069ac92f2621463bfa0acfed16f7e | /StarmerxSpider/pool.py | 017a3a649a0fdb4e6467d2b191a5ff4a54083268 | []
| no_license | ijt0walle/LiuFan_Spider | 967138c79bb4f6097fb8d898892a02c5fd6a454c | 25c07e7d594a835d123530bb49bce77a5bd7f662 | refs/heads/master | 2021-01-25T13:18:28.306502 | 2017-08-15T02:32:08 | 2017-08-15T02:32:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,839 | py | #!/usr/bin/python
# coding=utf8
from Queue import Queue
import threading
import contextlib
WorkerStop = object()
class ThreadPool:
workers = 0
thread_factory = threading.Thread
current_thread = staticmethod(threading.currentThread)
def __init__(self, max_threads=32, name=None):
self.queue = Queue(0)
self.max_threads = max_threads
self.name = name
self.waiters = [] # 存放等待线程的列表
self.working = [] # 存放工作线程的列表
def start(self):
need_size = self.queue.qsize()
while self.workers < min(self.max_threads, need_size):
self.start_a_worker()
def start_a_worker(self):
self.workers += 1
new_thread = self.thread_factory(target=self._worker, name='New Worker')
new_thread.start()
def call_in_thread(self, func, *args, **kwargs):
self.call_in_thread_with_callback(None, func, *args, **kwargs)
def call_in_thread_with_callback(self, on_result, func, *args, **kwargs):
job = (func, args, kwargs, on_result)
self.queue.put(job)
@contextlib.contextmanager
def _work_state(self, states, worker_thread):
assert isinstance(states, list)
states.append(worker_thread) # 把当前执行线程加入线程状态列表states
try:
yield
finally:
states.remove(worker_thread) # 执行完成后从状态列表中移除
def _worker(self):
ct = self.current_thread() # 获取当前线程id
job = self.queue.get()
while job is not WorkerStop:
with self._work_state(self.working, ct):
func, args, kwargs, on_result = job
del job
try:
result = func(*args, **kwargs)
success = True
except:
success = False
del func, args, kwargs
if on_result is not None:
try:
on_result(success, result)
except:
pass
del on_result, result
with self._work_state(self.waiters, ct):
job = self.queue.get()
def stop(self):
"""
Close threads
:return:
"""
while self.workers:
self.queue.put(WorkerStop)
self.workers -= 1
if __name__ == '__main__':
def show_timestamp(name):
import time
print '%s: %s' % (name, time.time())
time.sleep(1)
pool = ThreadPool(10)
for i in range(100):
pool.call_in_thread(show_timestamp, i)
print '# Before start()'
pool.start()
print '# After start()'
pool.stop()
print '# After stop()'
| [
"[email protected]"
]
| |
440008a7a36ecaef1ea45f372d64494846621011 | 6669b132eb482f95c1f40d35ecae14a544fe9197 | /dp/no70.py | f978622a8425456080815f3c7ee609f8abec503a | []
| no_license | markdannel/leetcode | 94dade2e5a286d04075e70e48015459ea6ac383a | 6a2ac436599ecebc527efe0d6bfe0f6f825311fb | refs/heads/master | 2021-06-06T20:56:34.868122 | 2020-10-21T12:16:56 | 2020-10-21T12:16:56 | 140,668,176 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 934 | py | # 假设你正在爬楼梯。需要 n 阶你才能到达楼顶。
# 每次你可以爬 1 或 2 个台阶。你有多少种不同的方法可以爬到楼顶呢?
# 注意:给定 n 是一个正整数。
# 示例 1:
# 输入: 2
# 输出: 2
# 解释: 有两种方法可以爬到楼顶。
# 1. 1 阶 + 1 阶
# 2. 2 阶
# 示例 2:
# 输入: 3
# 输出: 3
# 解释: 有三种方法可以爬到楼顶。
# 1. 1 阶 + 1 阶 + 1 阶
# 2. 1 阶 + 2 阶
# 3. 2 阶 + 1 阶
# 明确「状态」 -> 定义 dp 数组/函数的含义 -> 明确「选择」-> 明确 base case
class Solution:
def climbStairs(self, n: int) -> int:
memo = [0]*n
def dpp(n):
if n <= 2:
return n
if memo[n-1] > 0:
return memo[n-1]
memo[n-1] = dpp(n-1) + dpp(n-2)
return memo[n-1]
return dpp(n)
s=Solution()
print(s.climbStairs(5))
| [
"[email protected]"
]
| |
e73305264df6b1aea70f4552a91dc35e2b2d9d40 | 159d2b827db0ae748b739378cab43a24e1ebaa38 | /buildtools/scons-local-3.0.0/scons-local-3.0.0/SCons/Platform/sunos.py | 3279fb9c5b725417a732aa469ae0a7a65daf1880 | [
"MIT",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
]
| permissive | mqnc/c-sick | 2ef474f5626fcf47b5ee0793220dd7693656b488 | 65b54b21d9492fae7c7cac299f56c8e6583ef555 | refs/heads/master | 2020-03-23T15:02:03.057094 | 2019-10-18T13:51:33 | 2019-10-18T13:51:33 | 141,716,128 | 1 | 1 | BSD-3-Clause | 2019-07-24T06:30:00 | 2018-07-20T13:34:48 | Python | UTF-8 | Python | false | false | 1,919 | py | """engine.SCons.Platform.sunos
Platform-specific initialization for Sun systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001 - 2017 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/sunos.py rel_3.0.0:4395:8972f6a2f699 2017/09/18 12:59:24 bdbaddog"
from . import posix
def generate(env):
posix.generate(env)
# Based on sunSparc 8:32bit
# ARG_MAX=1048320 - 3000 for environment expansion
env['MAXLINELENGTH'] = 1045320
env['PKGINFO'] = 'pkginfo'
env['PKGCHK'] = '/usr/sbin/pkgchk'
env['ENV']['PATH'] = env['ENV']['PATH'] + ':/opt/SUNWspro/bin:/usr/ccs/bin'
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| [
"[email protected]"
]
| |
f114cc7a55d5cfd56927c7da8e0c7f5d3752c94f | d7016f69993570a1c55974582cda899ff70907ec | /sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/aio/_event_hub_management_client.py | b45cc43bc478f9a8629155aa12abd9c16f8499af | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 5,134 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from .. import models as _models
from ..._serialization import Deserializer, Serializer
from ._configuration import EventHubManagementClientConfiguration
from .operations import ConsumerGroupsOperations, EventHubsOperations, NamespacesOperations, Operations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class EventHubManagementClient: # pylint: disable=client-accepts-api-version-keyword
"""Azure Event Hubs client.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.eventhub.v2015_08_01.aio.operations.Operations
:ivar namespaces: NamespacesOperations operations
:vartype namespaces: azure.mgmt.eventhub.v2015_08_01.aio.operations.NamespacesOperations
:ivar event_hubs: EventHubsOperations operations
:vartype event_hubs: azure.mgmt.eventhub.v2015_08_01.aio.operations.EventHubsOperations
:ivar consumer_groups: ConsumerGroupsOperations operations
:vartype consumer_groups:
azure.mgmt.eventhub.v2015_08_01.aio.operations.ConsumerGroupsOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Subscription credentials that uniquely identify a Microsoft Azure
subscription. The subscription ID forms part of the URI for every service call. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2015-08-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = EventHubManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.namespaces = NamespacesOperations(self._client, self._config, self._serialize, self._deserialize)
self.event_hubs = EventHubsOperations(self._client, self._config, self._serialize, self._deserialize)
self.consumer_groups = ConsumerGroupsOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "EventHubManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
| [
"[email protected]"
]
| |
6fdbd5b5071b2d1bf7b6d51cb3afd1714a662463 | c0caed81b5b3e1498cbca4c1627513c456908e38 | /src/python/bindings/app/pyrosetta_toolkit/window_main/IO/GUIInput.py | 5a129fd75f03c94253db0a23872b7a99486f5d71 | [
"LicenseRef-scancode-other-permissive"
]
| permissive | malaifa/source | 5b34ac0a4e7777265b291fc824da8837ecc3ee84 | fc0af245885de0fb82e0a1144422796a6674aeae | refs/heads/master | 2021-01-19T22:10:22.942155 | 2017-04-19T14:13:07 | 2017-04-19T14:13:07 | 88,761,668 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 11,930 | py |
#!/usr/bin/python
# (c) Copyright Rosetta Commons Member Institutions.
# (c) This file is part of the Rosetta software suite and is made available under license.
# (c) The Rosetta software is developed by the contributing members of the Rosetta Commons.
# (c) For more information, see http://www.rosettacommons.org. Questions about this can be
# (c) addressed to University of Washington UW TechTransfer, email: [email protected].
## @file /GUIs/pyrosetta_toolkit/window_main/IO/GUIInput.py
## @brief Class responsible for managing input variables of the GUI.
## @author Jared Adolf-Bryfogle ([email protected])
## @author Steven Combs ([email protected])
#Rosetta Imports
from rosetta import *
#Python Imports
import urllib2
import os.path
import re
#Tkinter Imports
from Tkinter import *
from Tkinter import StringVar
import tkFileDialog
import tkSimpleDialog
#Toolkit Imports
from app.pyrosetta_toolkit.window_main import global_variables
from app.pyrosetta_toolkit.modules.RegionalScoring import RegionalScoring
from app.pyrosetta_toolkit.modules.Region import Region
from app.pyrosetta_toolkit.modules.Region import Regions
from app.pyrosetta_toolkit.modules.tools import input as input_tools
from app.pyrosetta_toolkit.window_modules.clean_pdb.FixPDBWindow import FixPDBWindow
from app.pyrosetta_toolkit.window_modules.options_system.OptionSystemManager import OptionSystemManager
#from app.pyrosetta_toolkit import main_window
class GUIInput:
def __init__(self, toolkit):
self.toolkit = toolkit; #Basically an AP of the toolkit
self.pose = self.toolkit.pose
self.pdb_path = StringVar(); self.pdb_path.set("");
self.PDBLIST = StringVar(); self.PDBLIST.set("")
self.region_start=StringVar(); #Start of Region
self.region_end=StringVar(); #End of Region
self.region_chain=StringVar(); #Chain of Region
self.region_sequence=StringVar(); #Sequence in Entry
self.loops_as_strings = []; #Array of Regions: start:end:chain
self.regions = Regions(); #This will replace loops_as_strings
self.loops = Loops()
#These are set if a user selects a residue from the sequence
self.residue_string = StringVar(); #String that is displayed when individual reside is selected
self.residue_resnum = StringVar();
self.residue_rosetta_resnum = StringVar();
self.residue_chain = StringVar()
self.constraint_file_paths = []; #Path to constraint file if loaded through GUI (Not options system).
self.param_pathlist_file = ""; #Path to a file which lists paths to all params to use. One on each line
self.param_paths = []; #Array of parameter paths.
self.loaded_paths = []; #Since ResidueTypeSet is a singleton, with horrible exception handling, WE need to keep track of it.
self.nonstandard_ResidueTypeSet = ""; #This is set through the ncaa window or loading a param path file.
self.options_manager= OptionSystemManager(); #This is due to Protocols needing Rosetta to be reinitialized without loosing already set options - to set the seed up before multiprocessing runs!
self.options_manager= OptionSystemManager(); #This is due to Protocols needing Rosetta to be reinitialized without loosing already set options - to set the seed up before multiprocessing runs!
self.pdb_url = "http://www.rcsb.org/pdb/files"
#if 0: self.toolkit = main_window()
######### Functions that cannot be put in input_tools or do not belong in the main frame, as they set a variable within this class. ################
def choose_load_pose(self, message="Load Pose"):
"""
Loads a Pose through the tk File Dialog
"""
infilename = tkFileDialog.askopenfilename(initialdir=global_variables.current_directory, title=message)
if not infilename:return
global_variables.current_directory= os.path.dirname(infilename)
print global_variables.current_directory
self.load_pose(infilename)
def fetch_pdb(self):
"""
Fetches the PDB, opens FixPDBWindow to allow the user to clean the PDB before trying to load it.
"""
#Create default directory
outpath = self.toolkit.toolkit_home+"/PDBs"
if not os.path.exists(outpath):os.mkdir(outpath)
global_variables.current_directory = outpath
#Ask for PDB id.
pdbID = tkSimpleDialog.askstring(title="Fetch pdb", prompt="Please enter PDB ID.")
if not pdbID: return
#Open and Write the PDB
FILE = urllib2.urlopen(self.pdb_url+'/'+pdbID.lower()+'.pdb')
OUTFILE = open(outpath+'/'+pdbID.upper()+'.pdb', 'w')
for line in FILE:
OUTFILE.write(line)
OUTFILE.close()
fetched_pdb = outpath+'/'+pdbID.upper()+'.pdb'
print "PDB saved to pyrosetta_toolkit/PDBs"
cleaner = FixPDBWindow(self, self.toolkit.score_class, self.toolkit.pose, fetched_pdb)
cleaner.runfixPDBWindow(self.toolkit._tk_, 0, 0)
def select_pose_then_launch_fixpdb(self):
"""
This way of loading a pose asks the user to open a PDB, then launches the fixPDBWindow as per Labonte's suggestion.
"""
infilename = tkFileDialog.askopenfilename(initialdir=global_variables.current_directory, title="Select PDB file")
if not infilename:return
global_variables.current_directory= os.path.dirname(infilename)
print global_variables.current_directory
cleaner = FixPDBWindow(self, self.toolkit.score_class, self.toolkit.pose, infilename)
cleaner.cleaned_pdb_path.set(infilename)
cleaner.runfixPDBWindow(self.toolkit._tk_, 0, 0)
cleaner.enable_load()
def load_pose(self, path):
"""
Load a pose into the toolkit.pose variable. Setup nessessary variables/etc for objects and window objects of the toolkit. Can have NCAA that have been enabled.
Please use this when loading the main pose into the toolkit.
"""
if not os.path.exists(path):
print "PDB path does not exist. Cannot load pose."
return
self.pdb_path.set(path)
print self.pdb_path.get()
#Turn off PyMOL Observer if on - It will try to update on new pose.
observer_on = self.toolkit.pymol_class.auto_send.get()
if observer_on:
self.toolkit.pymol_class.auto_send.set(False)
#Load Pose
if self.nonstandard_ResidueTypeSet:
self.toolkit.pose.assign(pose_from_file(self.nonstandard_ResidueTypeSet, path))
else:
pose_from_file(self.toolkit.pose, self.pdb_path.get())
self.toolkit.native_pose.assign(self.toolkit.pose); #Set native pose for RMSD.
print self.toolkit.pose
#Reinitialize PyMOL
self.toolkit.pymol_class.SendNewPose()
if observer_on:
self.toolkit.pymol_class.auto_send.set(True)
self.regional_score_class = RegionalScoring(self.toolkit.pose, self.toolkit.score_class.score);
#Reinitialize Output
pdbname = os.path.basename(self.pdb_path.get())
pdbname = pdbname.split(".")[0]
self.toolkit.output_class.outname.set(pdbname)
self.toolkit.output_class.outdir.set(os.path.dirname(self.pdb_path.get()))
#Reinitialize Sequence + Regions
self.reinit_regions_on_new_pose()
self.region_sequence.set(self.toolkit.pose.sequence())
self.residue_rosetta_resnum.set("")
#Reinitialize Design
self.toolkit.DesignDic = dict()
def return_loaded_pose(self, path):
"""
Load and return a pose. Can have NCAA that have been enabled.
"""
p = Pose()
if self.nonstandard_ResidueTypeSet:
p.assign(pose_from_file(self.nonstandard_ResidueTypeSet, path))
else:
pose_from_file(p, self.pdb_path.get())
return p
def set_PDBLIST(self):
infilename = tkFileDialog.askopenfilename(initialdir=global_variables.current_directory,title='Open PDBLIST')
if not infilename:return
global_variables.current_directory =os.path.dirname(infilename)
print "PDBLIST set"
self.PDBLIST.set(infilename)
def load_param_list(self, infilename=False):
"""
Loads paths from param path files into an array. Creates a residue type set from the params.
"""
if not infilename:
infilename = tkFileDialog.askopenfilename(initialdir=global_variables.current_directory,title='Open param pathList file')
if not infilename: return
self.param_pathlist_file = infilename
global_variables.current_directory =os.path.dirname(infilename)
FILE = open(infilename, 'r')
for line in FILE:
if re.search("#", line): continue
line = line.strip()
self.param_paths.append(line)
self.nonstandard_ResidueTypeSet, self.loaded_paths = input_tools.get_residuetypeset_from_path_array(self.param_paths, self.loaded_paths)
FILE.close()
def load_loop(self, infilename=False):
"""
Returns a loops_as_strings array to be used by the InputFrame
"""
if not self.toolkit.pose.total_residue():
print "\nNumbering conversion requires a pose to be loaded...please load a pose.."
return
if not infilename:
infilename = tkFileDialog.askopenfilename(initialdir=global_variables.current_directory,title='Open Loop file')
if not infilename: return
global_variables.current_directory =os.path.dirname(infilename)
FILE = open(infilename, 'r')
loops_as_strings = []
for line in FILE:
print line
line = line.strip()
lineSP = line.split()
start = lineSP[1]
end = lineSP[2]
chain_start = self.toolkit.pose.pdb_info().chain(int(start))
chain_end = self.toolkit.pose.pdb_info().chain(int(end))
if chain_start != chain_end:
print "Invalid loop for GUI. Start and end residues are on different chains.\n"
return
loop_string = repr(self.toolkit.pose.pdb_info().number(int(start)))+":"+repr(self.toolkit.pose.pdb_info().number(int(end)))+":"+chain_start
loops_as_strings.append(loop_string)
FILE.close()
return loops_as_strings
def reinit_regions_on_new_pose(self):
if not self.regions:return #If regions not yet set
for region in self.regions:
if not region.region_exists(self.pose):
loop_string = region.get_region_string()
self.loops_as_strings.remove(loop_string)
self.regions.remove_region(loop_string)
else:
loop_string = region.get_region_string()
print loop_string +" found in new Pose"
#Here is where we have to actually interact with the frame.
self.toolkit.input_frame.loops_listbox.delete(0, END)
for loop_string in self.loops_as_strings:
self.toolkit.input_frame.loops_listbox.insert(END, loop_string)
###Region Setting + Residue Setting ####
def set_residue_of_interest(self, resnum, chain, rosetta_resnum):
"""
Sets current individual residue information.
All Strings.
"""
self.residue_resnum.set(resnum)
self.residue_chain.set(chain)
self.residue_rosetta_resnum.set(rosetta_resnum)
| [
"[email protected]"
]
| |
24d00706810b45332650e6f2373530e74e5de2fa | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03624/s778098430.py | 55d87c556857c9b5ceb9dae5cfda1de24006c1e7 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | s = input()
ans = 'None'
for i in range(26):
c = chr(ord('a') + i)
if c not in s:
ans = c
break
print(ans)
| [
"[email protected]"
]
| |
f08fa45d2f7184da8a83e99c2fa82e9f5560718c | 7456c190ac67e9bf383c645839ac210f6f87f626 | /Scattering_Analysis.py | 17b0643b6d1db264200c9e075b5f6124f10e2277 | []
| no_license | joebatley/PythonCode | 6a8b9d775577b4ba5b48a43b839576b1a861464e | de2748fdd40a0c21f7292c7188b8873f95bc759a | refs/heads/master | 2021-01-11T11:03:42.894488 | 2014-09-29T13:35:02 | 2014-09-29T13:35:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,338 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Jun 6 10:44:51 2014
@author: Joe
"""
import numpy
import pylab as plt
from scipy import interpolate
from scipy.interpolate import splrep, splev
from scipy.integrate import quad
import Stoner
from Stoner.Folders import DataFolder
import Stoner.Analysis as Analysis
import scipy.optimize
from lmfit import minimize, Parameters, Parameter, report_fit
import Stoner.PlotFormats as SPF
import Stoner.Plot as SP
from Stoner.Util import format_error
def BG(params,t,rho):
K = params['K'].value
Dt = params['Dt'].value
rho_0 = params['rho_0'].value
a=numpy.ones(len(t))
b=numpy.ones(len(t))
c=numpy.ones(len(t))
for i in range(len(t)):
func_ph = lambda x:(x**5)/((numpy.exp(x)-1)*(1-numpy.exp(-x)))#((numpy.sinh(x))**2)
func_sd = lambda x:(x**3)/((numpy.exp(x)-1)*(1-numpy.exp(-x)))
func_ee = lambda x:(x**2)/((numpy.exp(x)-1)*(1-numpy.exp(-x)))
ph = quad(func_ph,0,(Dt/t[i]))
sd = quad(func_sd,0,(Dt/t[i]))
ee = quad(func_ee,0,(Dt/t[i]))
a[i]=ph[0]
b[i]=sd[0]
c[i]=ee[0]
model3 = rho_0 + K * ((t/Dt)**5) * a + K * ((t/Dt)**3) * b + K * ((t/Dt)**2) * c
model2 = rho_0 + K * ((t/Dt)**5) * a + K * ((t/Dt)**3) * b
model1 = rho_0 + K * ((t/Dt)**5) * a
return model1-rho
################ IMPORT FILE #######################
sample = 'SC021'
datadir = '/Volumes/data/Projects/Spincurrents/Joe Batley/Measurements/SC021/Transport/Scattering Analysis/'
R = Analysis.AnalyseFile(datadir+'SC021_1_B_Cu_resistivity_vs_T.txt')
L = Analysis.AnalyseFile(datadir+'SC021_Spindiffusion_length_vs_T.txt')
################ FIT RESISTANCE DATA #######################
# create a set of Parameters
params = Parameters()
params.add('K', value= 9e-8,min=0.5e-8,max=5e-7)
params.add('Dt', value= 190,min=100,max=500)
params.add('rho_0', value= 2.9e-8,min=0.5e-8,max=10e-8)
# do fit, here with leastsq model
result = minimize(BG, params, args=(R.column('T (K)'), R.column('res')))
# calculate final result
final = R.column('res') + result.residual
R.add_column(final,column_header='BG')
# write error report
report_fit(params)
print params['K']
################ GET SCATTERING TIME #######################
rho = R.interpolate(L.column('T'))
print R.column_headers
tsf = L.column('Lam_Cu')**2*rho[:,2]*1.6e-19*1.81e28
tau = Analysis.AnalyseFile()
tau.add_column(L.column('T'),'T (K)')
tau.add_column(1/tsf,r'1/$\tau_{sf}$')
tau_err = (L.column('Lam_err')/L.column('Lam_Cu'))/tsf
tau.add_column(tau_err,'1/t_err')
################ FIT SCATTERING TIME #######################
def phonon(sc_params,t,tau):
func_ph = lambda x:(x**5)/((numpy.exp(x)-1)*(1-numpy.exp(-x)))
K = params['K'].value
Dt = params['Dt'].value
e = sc_params['epsilon'].value
i = sc_params['imp'].value
a=numpy.ones(len(t))
for j in range(len(t)):
ph = quad(func_ph,0,(Dt/t[j]))
a[j] = ph[0]
rho_ph = K * ((t/Dt)**5) * numpy.array(a)
tau_ph_sf = ((e*8.45e28*(1.6e-19**2)*rho_ph)/9.1e-31)+i
return (tau_ph_sf-tau)
# create a set of Parameters
sc_params = Parameters()
sc_params.add('epsilon', value= 9e20)
sc_params.add('imp', value= 1e9)
# do fit, here with leastsq model
q=SP.PlotFile(tau.clone)
d = Analysis.AnalyseFile(tau.clone)
d.del_rows('T (K)',lambda x,y:x<100 and x>230)
sc_result = minimize(phonon, sc_params, args=(d.column('T (K)'), d.column(r'1/$\tau_{sf}$')))
# calculate final result
sc_final = (d.column(r'1/$\tau_{sf}$')) + sc_result.residual
d.add_column(sc_final,column_header='fit')
# write error report
report_fit(sc_params)
e_ph = sc_params['epsilon'].value
e_ph_err = sc_params['epsilon'].stderr
print r'$\epsilon_ph$ = ' + str(e_ph) + '$\pm$' + str(e_ph_err)
print format_error(e_ph,e_ph_err,latex=True)
e_imp = sc_params['imp'].value*9.1e-31/(8.45e28*(1.6e-19**2)*params['rho_0'].value)
e_imp_err = e_imp*numpy.sqrt((sc_params['imp'].stderr/sc_params['imp'].value)**2 + (params['rho_0'].stderr/params['rho_0'].value)**2)
print r'$\epsilon_imp$ = ' + str(e_imp) + '$\pm$' + str(e_imp_err)
print format_error(e_imp,e_imp_err,latex=True)
################ PLOT SCATTERING DATA #######################
fit=SP.PlotFile(d.clone)
fit.template=SPF.JTBPlotStyle
t=SP.PlotFile(tau.clone)
t.template=SPF.JTBPlotStyle
BG=SP.PlotFile(R.clone)
BG.template=SPF.JTBPlotStyle
fit.figure()
t.figure()
BG.figure()
f=plt.gcf()
f.set_size_inches((6.5,3.75),forward=True) # Set for A4 - will make wrapper for this someday
plt.subplot2grid((1,2),(0,0))
l = r'$\epsilon_{ph}$ = ' + format_error(e_ph,e_ph_err,latex=True) + '\n' + r'$\epsilon_{imp}$ = ' + format_error(e_imp,e_imp_err,latex=True)
t.plot_xy('T (K)',r'1/$\tau_{sf}$',yerr='1/t_err',linestyle='',linewidth=1,marker='o')
fit.plot_xy('T (K)','fit',label=l,linestyle='-',linewidth=2,marker='')
t.ylabel = r'1/$\tau_{sf}$'
t.title = sample
################ PLOT B-G DATA #######################
#label_fit='B-G fit\n K = ' + str(params['K'].value) + '\n'+r'$\theta_D$ = ' + str(params['Dt'].value) + '\n'+r'$\rho_0$ = ' + str(params['rho_0'].value)
#label = 'data'
plt.subplot2grid((1,2),(0,1))
BG.plot_xy('T (K)','res',linestyle='',linewidth=3,marker='o',label = r'Cu spacer $\rho$')
BG.plot_xy('T (K)','BG',linestyle='-',linewidth=2,marker='',label = 'B-G fit')
BG.ylabel = r'$\rho (\Omega m)$'
plt.tight_layout()
| [
"[email protected]"
]
| |
b73b9a5fa18f5955b3b7c1c96e2cc8c872c9b97c | 1eab574606dffb14a63195de994ee7c2355989b1 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/globals/protocolstack/iptvglobals/igmpgrouprange/igmpgrouprange.py | ae57a679eaf427beb79281ee834097062c2960a6 | [
"MIT"
]
| permissive | steiler/ixnetwork_restpy | 56b3f08726301e9938aaea26f6dcd20ebf53c806 | dd7ec0d311b74cefb1fe310d57b5c8a65d6d4ff9 | refs/heads/master | 2020-09-04T12:10:18.387184 | 2019-11-05T11:29:43 | 2019-11-05T11:29:43 | 219,728,796 | 0 | 0 | null | 2019-11-05T11:28:29 | 2019-11-05T11:28:26 | null | UTF-8 | Python | false | false | 8,422 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class IgmpGroupRange(Base):
"""
The IgmpGroupRange class encapsulates a list of igmpGroupRange resources that is be managed by the user.
A list of resources can be retrieved from the server using the IgmpGroupRange.find() method.
The list can be managed by the user by using the IgmpGroupRange.add() and IgmpGroupRange.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'igmpGroupRange'
def __init__(self, parent):
super(IgmpGroupRange, self).__init__(parent)
@property
def Count(self):
"""The count of multicast groups in a range.
Returns:
number
"""
return self._get_attribute('count')
@Count.setter
def Count(self, value):
self._set_attribute('count', value)
@property
def FilterMode(self):
"""Define the Group Record type included in the Report messages.
Returns:
str
"""
return self._get_attribute('filterMode')
@FilterMode.setter
def FilterMode(self, value):
self._set_attribute('filterMode', value)
@property
def Increment(self):
"""The value used to enumerate all the addresses in the range.
Returns:
str
"""
return self._get_attribute('increment')
@Increment.setter
def Increment(self, value):
self._set_attribute('increment', value)
@property
def IpAddress(self):
"""The IP address of the first multicast group in the range.
Returns:
str
"""
return self._get_attribute('ipAddress')
@IpAddress.setter
def IpAddress(self, value):
self._set_attribute('ipAddress', value)
@property
def Name(self):
"""-The name of the range containing multicast groups.
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
@property
def ObjectId(self):
"""Unique identifier for this object
Returns:
str
"""
return self._get_attribute('objectId')
@property
def SourceCount(self):
"""The count of sources in the range.
Returns:
number
"""
return self._get_attribute('sourceCount')
@SourceCount.setter
def SourceCount(self, value):
self._set_attribute('sourceCount', value)
@property
def SourceIncrement(self):
"""The value used to enumerate all the source addresses in the range.
Returns:
str
"""
return self._get_attribute('sourceIncrement')
@SourceIncrement.setter
def SourceIncrement(self, value):
self._set_attribute('sourceIncrement', value)
@property
def SourceIpAddress(self):
"""The starting IP address of a range of sources.
Returns:
str
"""
return self._get_attribute('sourceIpAddress')
@SourceIpAddress.setter
def SourceIpAddress(self, value):
self._set_attribute('sourceIpAddress', value)
@property
def Type(self):
"""The type of the multicast group range.
Returns:
str
"""
return self._get_attribute('type')
@Type.setter
def Type(self, value):
self._set_attribute('type', value)
def update(self, Count=None, FilterMode=None, Increment=None, IpAddress=None, Name=None, SourceCount=None, SourceIncrement=None, SourceIpAddress=None, Type=None):
"""Updates a child instance of igmpGroupRange on the server.
Args:
Count (number): The count of multicast groups in a range.
FilterMode (str): Define the Group Record type included in the Report messages.
Increment (str): The value used to enumerate all the addresses in the range.
IpAddress (str): The IP address of the first multicast group in the range.
Name (str): -The name of the range containing multicast groups.
SourceCount (number): The count of sources in the range.
SourceIncrement (str): The value used to enumerate all the source addresses in the range.
SourceIpAddress (str): The starting IP address of a range of sources.
Type (str): The type of the multicast group range.
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
def add(self, Count=None, FilterMode=None, Increment=None, IpAddress=None, Name=None, SourceCount=None, SourceIncrement=None, SourceIpAddress=None, Type=None):
"""Adds a new igmpGroupRange node on the server and retrieves it in this instance.
Args:
Count (number): The count of multicast groups in a range.
FilterMode (str): Define the Group Record type included in the Report messages.
Increment (str): The value used to enumerate all the addresses in the range.
IpAddress (str): The IP address of the first multicast group in the range.
Name (str): -The name of the range containing multicast groups.
SourceCount (number): The count of sources in the range.
SourceIncrement (str): The value used to enumerate all the source addresses in the range.
SourceIpAddress (str): The starting IP address of a range of sources.
Type (str): The type of the multicast group range.
Returns:
self: This instance with all currently retrieved igmpGroupRange data using find and the newly added igmpGroupRange data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the igmpGroupRange data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, Count=None, FilterMode=None, Increment=None, IpAddress=None, Name=None, ObjectId=None, SourceCount=None, SourceIncrement=None, SourceIpAddress=None, Type=None):
"""Finds and retrieves igmpGroupRange data from the server.
All named parameters support regex and can be used to selectively retrieve igmpGroupRange data from the server.
By default the find method takes no parameters and will retrieve all igmpGroupRange data from the server.
Args:
Count (number): The count of multicast groups in a range.
FilterMode (str): Define the Group Record type included in the Report messages.
Increment (str): The value used to enumerate all the addresses in the range.
IpAddress (str): The IP address of the first multicast group in the range.
Name (str): -The name of the range containing multicast groups.
ObjectId (str): Unique identifier for this object
SourceCount (number): The count of sources in the range.
SourceIncrement (str): The value used to enumerate all the source addresses in the range.
SourceIpAddress (str): The starting IP address of a range of sources.
Type (str): The type of the multicast group range.
Returns:
self: This instance with matching igmpGroupRange data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of igmpGroupRange data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the igmpGroupRange data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| [
"[email protected]"
]
| |
05ef3ed3def4fa3a9dce401fbc1e16f639f43e18 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/123/usersdata/28/25531/submittedfiles/testes.py | 09cbf99930f122c284e846161c34c4f872f93fa3 | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | b = -3
a = abs(b - 0)
print (a)
#def distancia(p0, p):
# d = 0
# for i in range(0, len(a), 1):
# d = abs( | [
"[email protected]"
]
| |
9803533659323737846d4ea744ff2a7ff8d98bde | 02508aa773dcbd9939eb879952ee2cb3dd90bcad | /torch/distributed/nn/__init__.py | b3cedcc6011169161bed18af8797397420af528b | [
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
]
| permissive | dhivyadharshin/pytorch | d8a3b7f3c03e21e776ea34788d13743467b738c8 | 6a170011876bb8bd1909e8f60fba1270ac7a5577 | refs/heads/master | 2023-07-18T07:31:52.918955 | 2021-08-17T18:12:01 | 2021-08-17T18:12:01 | 397,330,616 | 5 | 0 | NOASSERTION | 2021-08-17T18:12:02 | 2021-08-17T16:57:16 | null | UTF-8 | Python | false | false | 84 | py | from .api.remote_module import RemoteModule
from .functional import * # noqa: F403
| [
"[email protected]"
]
| |
e54ff071f98fe853abfa4d2a1d3dfda418acb12f | 55647a80c8b412af9df0ba3f50595cc2f29c25e6 | /res/scripts/common/Lib/plat-mac/Carbon/File.py | c0c25c19dcfc7e1e788e857d3bcbd6b39a4c0c21 | []
| no_license | cnsuhao/WOT-0.9.17-CT | 0035eb6070fb4fab8d8ee9f8bbc676c10d511cfb | d1f932d8cabaf8aa21708622e87f83c8d24d6451 | refs/heads/master | 2021-06-08T18:11:07.039293 | 2016-11-19T19:12:37 | 2016-11-19T19:12:37 | null | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 358 | py | # 2016.11.19 20:00:52 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/plat-mac/Carbon/File.py
from _File import *
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\common\Lib\plat-mac\Carbon\File.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.11.19 20:00:52 Střední Evropa (běžný čas)
| [
"[email protected]"
]
| |
1905be51f9c00ebc3b098c76678348ffeb71035e | b7888fb921abeb2ad44ce6409bf62ecef77a458e | /src/djanban/apps/members/migrations/0019_remove_member_spent_time_factor.py | 2ab10ee0796f693207d2b06d3d54d558aeeb0a74 | [
"MIT"
]
| permissive | my-favorite-repositories/djanban | 303ce59f821d01f727536068b83f8e8485b7d649 | 6451688d49cf235d03c604b19a6a8480b33eed87 | refs/heads/master | 2021-03-01T14:23:19.745085 | 2018-05-15T17:12:01 | 2018-05-15T17:12:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-02-24 23:58
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('members', '0018_auto_20170225_0040'),
]
operations = [
migrations.RemoveField(
model_name='member',
name='spent_time_factor',
),
]
| [
"[email protected]"
]
| |
796ab06892c97d5436ecff7b9232792d22646d92 | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.4/tests/modeltests/empty/models.py | a2bbbad12f05aa2f32123e6f738a7f5a884c9e63 | []
| no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.4/tests/modeltests/empty/models.py | [
"[email protected]"
]
| |
cd5c850731be449ad2ef5c6e65eca0f57007320f | d4f1bd5e52fe8d85d3d0263ede936928d5811bff | /Python/Problem Solving/BOJ/boj2743.py | 88dc5176dd1699438bbf486e4a340f3bfb6c415e | []
| no_license | ambosing/PlayGround | 37f7d071c4402599995a50cac1e7f1a85c6d10dd | 0d5262dbb2fa2128ecb3fd969244fa647b104928 | refs/heads/master | 2023-04-08T04:53:31.747838 | 2023-03-23T06:32:47 | 2023-03-23T06:32:47 | 143,112,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | s = input()
len_s = 0
for i in s:
len_s += 1
print(len_s)
| [
"[email protected]"
]
| |
ddcfb84625fbf00abc35ea2d697ae18e14dca3fa | b9d648a7cb56412f367492f93bb9acd27ab53e84 | /baralho_renzo.py | 0cd602729055a5e63d8d39cf19c97486e0b24d49 | [
"MIT"
]
| permissive | maribedran/oopy | 0d9a34ab820f427f0b6738fa49e434d780e7bf27 | 3f0629afee10f60f214cff04d07a27daa2fc8208 | refs/heads/master | 2020-12-26T13:02:16.938449 | 2016-04-17T15:40:54 | 2016-04-17T15:40:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 783 | py | from collections import namedtuple
from itertools import chain, product
from random import shuffle
Carta = namedtuple('Carta', 'valor naipe')
class Baralho():
def __init__(self):
valores = chain((str(i) for i in range(2, 11)), 'JQKA')
naipes = '♣♡♠♢'
self.cartas = [Carta(valor, naipe) for naipe, valor in product(naipes, valores)]
def __repr__(self):
return repr(self.cartas)
def __getitem__(self, item):
return self.cartas[item]
def __len__(self):
return len(self.cartas)
def __setitem__(self, key, value):
self.cartas[key] = value
baralho = Baralho()
print(baralho)
mao = baralho[:5]
print(mao)
print(len(baralho))
shuffle(baralho)
print(baralho)
for carta in baralho:
print(carta)
| [
"[email protected]"
]
| |
6a24ee0acfd7a5e70f4e6b359e81a3a6662bbc34 | d6be053915c065fe6da71afddd28429d144fee68 | /streamlit_tutorial/main_concepts/app_02_st-write.py | b0292b32740b780363ad49a71892cadeb280fa04 | []
| no_license | DrShushen/practice_py | 61bc28f52783f8304cce1d834def4934ba6ee8e1 | cf40ec43ccd73aa835c4e65e6a4b41408b90a3ea | refs/heads/master | 2023-01-08T06:57:10.852157 | 2023-01-03T22:58:11 | 2023-01-03T22:58:11 | 211,668,464 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | import streamlit as st
import pandas as pd
st.write("Here's our first attempt at using data to create a table:")
st.write(pd.DataFrame({"first column": [1, 2, 3, 4], "second column": [10, 20, 30, 40]}))
| [
"[email protected]"
]
| |
e719eb80d4457db6ea99dc3821c5929866765f80 | e311664619d469addd2c77566ec97d24affcbfd9 | /src/apps/sistema/admin.py | e984d54edcb131f638648697eb5a1205922b2c8b | []
| no_license | danielhuamani/Proyecto-taller-base-datos | 361dc8c915dff36a9ce96a7147c11f0af9d51227 | 5d791383f77f8042a2890db4cfd31079c6d1dc7b | refs/heads/master | 2016-08-11T13:47:03.169317 | 2015-12-22T04:28:52 | 2015-12-22T04:28:52 | 46,673,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 193 | py | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Usuario
@admin.register(Usuario)
class UsuarioAdmin(admin.ModelAdmin):
list_display = ('email', 'password')
| [
"[email protected]"
]
| |
7e3a908b15a1ae21c5a415ad9a3bd556966e7eed | b21abd3873c76739ceefd1b4613a343ba2b454d1 | /jwst/associations/__init__.py | d301f9c7e12a2e7fee6c0d7ec0e7ed537bfa1211 | [
"BSD-2-Clause"
]
| permissive | rij/jwst | 96a7baf95de953c51bbe67f3cdd459c114c47eef | 1d3acecb28d9a3dcb44b993e451b69da9856187d | refs/heads/master | 2020-12-24T09:56:21.784342 | 2016-06-09T19:17:01 | 2016-06-09T19:17:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | from __future__ import absolute_import
from .association import (Association, AssociationRegistry)
from .pool import AssociationPool
from .generate import generate
| [
"[email protected]"
]
| |
4abb0b3416a912f00495bdac12ea344e0e5c4234 | 6490638f15a2dfbe0cec9725186f9784d57c92f0 | /UnitTest/testSEGMENT.py | b39851c1efe3ad9480414b8fea7c6e6a7eb3a621 | [
"MIT"
]
| permissive | khawatkom/SpacePyLibrary | af9c490ef796b9d37a13298c41df1fb5bf6b3cee | c94415e9d85519f345fc56938198ac2537c0c6d0 | refs/heads/master | 2020-05-14T21:52:39.388979 | 2019-04-17T17:06:04 | 2019-04-17T17:06:04 | 181,970,668 | 1 | 0 | null | 2019-04-17T21:26:44 | 2019-04-17T21:26:44 | null | UTF-8 | Python | false | false | 2,419 | py | #!/usr/bin/env python3
#******************************************************************************
# (C) 2018, Stefan Korner, Austria *
# *
# The Space Python Library is free software; you can redistribute it and/or *
# modify it under under the terms of the MIT License as published by the *
# Massachusetts Institute of Technology. *
# *
# The Space Python Library is distributed in the hope that it will be useful, *
# but WITHOUT ANY WARRANTY; without even the implied warranty of *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the MIT License *
# for more details. *
#******************************************************************************
# CCSDS Stack - Unit Tests *
#******************************************************************************
import CCSDS.SEGMENT, testData
#############
# functions #
#############
def test_SEGMENT_DUoperations():
"""function to test the transfer segment data units"""
tcSegment1 = CCSDS.SEGMENT.TCsegment(testData.TC_SEGMENT_01)
if tcSegment1.sequenceFlags != testData.TC_SEGMENT_01_sequenceFlags:
print("tcSegment1 sequenceFlags wrong:", tcSegment1.sequenceFlags, "- should be", testData.TC_SEGMENT_01_sequenceFlags)
return False
if tcSegment1.mapId != testData.TC_SEGMENT_01_mapId:
print("tcSegment1 mapId wrong:", tcSegment1.mapId, "- should be", testData.TC_SEGMENT_01_mapId)
return False
tcSegment2 = CCSDS.SEGMENT.TCsegment(testData.TC_SEGMENT_02)
if tcSegment2.sequenceFlags != testData.TC_SEGMENT_02_sequenceFlags:
print("tcSegment2 sequenceFlags wrong:", tcSegment2.sequenceFlags, "- should be", testData.TC_SEGMENT_02_sequenceFlags)
return False
if tcSegment2.mapId != testData.TC_SEGMENT_02_mapId:
print("tcSegment2 mapId wrong:", tcSegment2.mapId, "- should be", testData.TC_SEGMENT_02_mapId)
return False
return True
########
# main #
########
if __name__ == "__main__":
print("***** test_SEGMENT_DUoperations() start")
retVal = test_SEGMENT_DUoperations()
print("***** test_SEGMENT_DUoperations() done:", retVal)
| [
"[email protected]"
]
| |
68b54e70382cdd4a0198bd401b8281a41aedd8bf | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03835/s488234349.py | e56e10d8295dca46ab608cfccce640f2972f7441 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 149 | py | k,s = map(int, input().split())
ans = 0
for i in range(k+1):
for j in range(k+1):
t = s - (i + j)
if 0 <= t <= k:
ans += 1
print(ans) | [
"[email protected]"
]
| |
706418248d2b6c25981b3d2197b0838bed81d752 | 15ed3ab4510677e6df9b11af8fd7a36fc6d826fc | /rado/2014-10/mc2/nice-res-2014-10-29/mc2.py | 3188ce25fe60f46da2959be84ae326f91d4945c8 | []
| no_license | pe-ge/Computational-analysis-of-memory-capacity-in-echo-state-networks | 929347575538de7015190d35a7c2f5f0606235f2 | 85873d8847fb2876cc8a6a2073c2d1779ea1b20b | refs/heads/master | 2020-04-02T08:08:38.595974 | 2018-01-17T08:12:26 | 2018-01-17T08:12:26 | 61,425,490 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,473 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
mc2.py
Created 17.10.2014
Based on mc.py
Goal: Measuring Memory Capacity for random matrices
"""
from numpy import *
import matplotlib.pyplot as plt
p = 1 # one input node
q = 100 # 100 reservoir nodes
r = 200 # 200 output nodes
params = {
'MEMORY_MAX': 200,
'ITERATIONS': 2000,
'ITERATIONS_SKIPPED': 1000,
'ITERATIONS_COEF_MEASURE': 1000,
'RUNS': 1,
'NETS': 100,
}
sigma = 0.10
dist_WI = lambda: random.uniform(-0.1,0.1,[q,p])
dist_input = lambda: random.uniform(-1., 1., params['ITERATIONS']) # maybe [1,1] ?
#dist_W = lambda sigma: random.uniform(-.13, .13, [q,q])
dist_W = lambda sigma: random.normal(0., sigma, [q,q])
def memory_capacity(W, WI, params):
"""Calculates memory capacity of a NN
[given by its input weights WI and reservoir weights W].
W = q x q matrix storing hidden reservoir weights
WI = q x p matrix storing input weights
Returns: a tuple (MC, std)
MC: memory capacity for history 0..(MEMORY_MAX - 1)
[a vector of length MEMORY_MAX]
std: standard deviation for each value of MC
"""
# load parameters to local variables for better readibility
MEMORY_MAX = params['MEMORY_MAX']
ITERATIONS = params['ITERATIONS']
ITERATIONS_SKIPPED = params['ITERATIONS_SKIPPED']
ITERATIONS_MEASURED = ITERATIONS - ITERATIONS_SKIPPED
ITERATIONS_COEF_MEASURE = params['ITERATIONS_COEF_MEASURE']
RUNS = params['RUNS']
# dist_input = lambda: random.uniform(-1., 1., ITERATIONS) # maybe [1,1] ?
# vector initialization
X = zeros([q,1]) # reservoir activations, @FIXME, maybe try only q, instead of [q, 1] (performance?)
S = zeros([q,ITERATIONS_MEASURED])
# generate random input
u = dist_input() # all input; dimension: [ITERATIONS, 1]
# run 2000 iterations and fill the matrices D and S
for it in range(ITERATIONS):
X = tanh(dot(W, X) + dot(WI, u[it]))
if it >= ITERATIONS_SKIPPED:
# record the state of reservoir activations X into S
S[:, it - ITERATIONS_SKIPPED] = X[:,0]
# prepare matrix D of desired values (that is, shifted inputs)
assert MEMORY_MAX < ITERATIONS_SKIPPED
D = zeros([MEMORY_MAX, ITERATIONS_MEASURED])
for h in range(MEMORY_MAX): # fill each row
#FIXME maybe should be: 'ITERATIONS - (h+1)', it depends, whether we measure 0th iteration as well
D[h,:] = u[ITERATIONS_SKIPPED - h : ITERATIONS - h]
# calculate pseudoinverse S+ and with it, the matrix WO
S_PINV = linalg.pinv(S)
WO = dot(D, S_PINV)
# do a new run for an unbiased test of quality of our newly trained WO
# we skip MEMORY_MAX iterations to have large enough window
MC = zeros([RUNS, MEMORY_MAX]) # here we store memory capacity
for run in range(RUNS):
u = dist_input()
X = zeros([q,1])
o = zeros([MEMORY_MAX, ITERATIONS_COEF_MEASURE]) # 200 x 1000
for it in range(ITERATIONS_COEF_MEASURE + MEMORY_MAX):
X = tanh(dot(W, X) + dot(WI, u[it]))
if it >= MEMORY_MAX:
# we calculate output nodes using WO ( @FIXME maybe not a column, but a row?)
o[:, it - MEMORY_MAX] = dot(WO, X)[:,0]
# correlate outputs with inputs (shifted)
for h in range(MEMORY_MAX):
k = h + 1
cc = corrcoef(u[MEMORY_MAX - h : MEMORY_MAX + ITERATIONS_COEF_MEASURE - h], o[h, : ]) [0, 1]
MC[run, h] = cc * cc
return (average(MC, axis=0), std(MC, axis=0) / sqrt(RUNS))
def kindofvector(vec):
shp = vec.shape
if len(shp) == 1:
print('vector of length %d' % shp[0])
else:
if shp[0] == 1:
print('a long row (with %d columns)' % shp[1])
elif shp[1] == 1:
print('a long column (with %d rows)' % shp[0])
elif shp[0] > shp[1]:
print('a tall rectangle matrix (%d x %d)' % shp)
elif shp[0] < shp[1]:
print('a wide rectangle matrix (%d x %d)' % shp)
elif shp[0] == shp[1]:
print('a square matrix (%d x %d)' % shp)
else:
print('an alien matrix of shape: %s' % str(shp))
def main_plot_MCk():
# plot it
#sigma = 0.10
NETS = 100
# initial setup
MCs = zeros([NETS, params['MEMORY_MAX']])
for net in range(NETS):
W = dist_W(sigma)
WI = dist_WI()
# calculate MC for history 0..199
MC, _ = memory_capacity(W, WI, params)
MC.shape = (1, MC.size)
MCs[net, :] = MC
x = array(range(params['MEMORY_MAX']))
y = average(MCs, axis=0)
error = std(MCs, axis=0) / sqrt(NETS)
print("MC = %f" % sum(y))
# with open('cc-vs-history-values.txt', 'w') as f:
# print(y, file=f)
# with open('cc-vs-history-derivatives.txt', 'w') as f:
# print(diff(y), file=f)
plt.errorbar(x, y, yerr=(error * 3))
#plt.plot(range(params['MEMORY_MAX']-1), diff(y))
plt.grid(True)
plt.ylabel('correlation coefficient')
plt.xlabel('memory size')
plt.ylim([0,1])
plt.title('Memory capacity ($\sigma_{W^R}$ = %.3f) (confidence = $3\sigma$) (runs = %d) ' % (sigma, params['RUNS']))
plt.show()
plt.plot(range(params['MEMORY_MAX']-1), diff(y))
plt.show()
def main_plot_MC_sigma():
# 0.13s na iteraciu (tu 4000)
POINTS = 100
NETS = params['NETS']
sigmas = linspace(0.001, 0.2, POINTS)
params['RUNS'] = 1
y = zeros([NETS, POINTS])
for i, sigma in enumerate(sigmas):
for net in range(NETS):
W = dist_W(sigma)
WI = dist_WI()
MC, _ = memory_capacity(W, WI, params)
y[net, i] = sum(MC)
print("\rsigma: %.3f (%d of %d), net: (%d of %d)" % (sigma, i, POINTS, net, NETS), end="")
y, error = (average(y, axis=0), std(y, axis=0) / sqrt(NETS))
x = sigmas
plt.errorbar(x, y, yerr=(error * 3))
plt.plot(sigmas, y)
plt.show()
def main():
#main_plot_MC_sigma()
main_plot_MCk()
if __name__ == '__main__':
main() | [
"[email protected]"
]
| |
c6ebae4c39070dc4f956e0aa6d460198565a6724 | 8e01f8c0c6ae1ab1f2cd34408577dc8affb8288e | /slingsby/articles/migrations/0002_auto_20150305_2339.py | e7d8f839fe8f0fac077f1092d88772230280b0ed | []
| no_license | TelemarkAlpint/slingsby | b8122f0a367d81c2cf8809a91827426de9e93e2c | e480ebf12f7d5dddeca242b1c0ed508631a6674c | refs/heads/master | 2021-01-10T15:12:28.205841 | 2017-09-13T21:20:01 | 2017-09-13T21:20:25 | 8,419,417 | 2 | 0 | null | 2020-03-17T21:19:35 | 2013-02-25T22:11:16 | Python | UTF-8 | Python | false | false | 512 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('articles', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='subpagearticle',
name='title',
field=models.CharField(help_text=b'Hva skal undersiden hete?', unique=True, max_length=200, verbose_name=b'tittel'),
preserve_default=True,
),
]
| [
"[email protected]"
]
| |
86158ba972b588256136d51e4ae6672785f5eee1 | b4afb44b8f483c048716fe12d778186ce68ac846 | /AutoFrameworkForAppiumPy/com/qa/automation/appium/cases/ios/ffan/common/check_network.py | 47a35ee729958f64215c24e69771b597aaf20c60 | []
| no_license | liu111xiao111/UItest | 64309b2c85f6d2334d64bb0875ba9ced459ebb1e | 67e2acc9a99da81022e286e8d8ec7ccb12636ff3 | refs/heads/master | 2021-09-01T18:30:28.044296 | 2017-12-28T04:36:46 | 2017-12-28T04:36:46 | 115,585,226 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 670 | py | # -*- coding: utf-8 -*-
from subprocess import Popen, PIPE
CONNECTION = 0
NOCONNECTION = -1
NOFIND = -1
class CheckNetworkStatus:
'''
usage : Check Network status.
'''
def __init__(self):
pass
def checkNetwork(self):
cmd = 'adb shell ping -w 3 baidu.com'
ret = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
out, err = ret.communicate()
if out.find('unknown') == NOFIND:
return CONNECTION;
else:
print(err);
return NOCONNECTION;
if __name__ == '__main__':
checkNetworkStatus = CheckNetworkStatus()
checkNetworkStatus.checkNetwork() | [
"[email protected]"
]
| |
9e90df95e59de10a43e6085045adb77c5d333abc | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.5/django/conf/locale/et/__init__.py | 1de82d1d47675d85c8bc8f0ca94a0c8ea68b3dc2 | []
| no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.5/django/conf/locale/et/__init__.py | [
"[email protected]"
]
| |
364728cd83b127d6eeb34938d1d4dd9be8dd794e | e993c53e4e1a52acc8279129c67feb0d3a1b9cbc | /catkin_ws/src/o2as_cad_matching/cfg/CadMatching.cfg | 2ffd7a6a9e0af0082bdd6b1054b9a6e0b92447db | [
"MIT"
]
| permissive | DevwratJoshi/ur-o2as | 134ec87d371a7d9f9b64cbeb4030b23cf114812d | 265249c27908a79a301014168394db0c0dc2204c | refs/heads/master | 2021-01-03T16:03:57.344339 | 2020-02-17T03:58:39 | 2020-02-17T03:58:39 | 240,143,319 | 0 | 0 | MIT | 2020-02-13T00:21:52 | 2020-02-13T00:21:51 | null | UTF-8 | Python | false | false | 2,602 | cfg | #!/usr/bin/env python
PACKAGE = "o2as_cad_matching"
from math import pi
from dynamic_reconfigure.parameter_generator_catkin import *
gen = ParameterGenerator()
object_id = gen.enum([
gen.const("01_BASE" , int_t, 1, "Base Panel"),
gen.const("02_PANEL" , int_t, 2, "Output shaft fixing plate"),
gen.const("03_PANEL2" , int_t, 3, "Motor fixing plate"),
gen.const("04_37D_GEARMOTOR_50_70" , int_t, 4, "Geared motor (Gear ratio 1:70)"),
gen.const("05_MBRFA30_2_P6" , int_t, 5, "Pulley for Round Belt (4mm) - Setscrew P.D. 30mm"),
gen.const("06_MBT4_400" , int_t, 6, "Polyurethane round belt (welded joint product) P.D. 4mm L=400mm"),
gen.const("07_SBARB6200ZZ_30" , int_t, 7, "Bearings with Housings (Double Bearings)"),
gen.const("08_KZAF1075NA4WA55GA20AA0" , int_t, 8, "Drive shaft (Straight) D10h6"),
gen.const("09_EDCS10" , int_t, 9, "End Cap for Shaft"),
gen.const("10_CLBPS10_17_4" , int_t, 10, "Bearing Spacers For Inner Ring (output pulley)"),
gen.const("11_MBRAC60_2_10" , int_t, 11, "Pulley for Round Belts Clamping Type P.D. 60mm"),
gen.const("12_CLBUS6_9_9_5" , int_t, 12, "Bearing Spacers For Inner Ring (tension pulley)"),
gen.const("13_MBGA30_2" , int_t, 13, "Idler for Round Belt - Wide"),
gen.const("14_BGPSL6_9_L30_F8" , int_t, 14, "Bearing Shaft Screw"),
gen.const("15_SLBNR6" , int_t, 15, "M6 Hex Nut (Fixing for idler shaft)"),
gen.const("16_SPWF6" , int_t, 16, "M6 Flat Washer (Fixing for idler shaft)"),
gen.const("17_SCB4_10" , int_t, 17, "10mm M4 Socket Head Cap Screw (metric coarse thread)"),
gen.const("100_robotiq_calib_marker" , int_t, 100, "Aruco calibration board for robotiq gripper"),
gen.const("102_CalTarget15" , int_t, 102, "3D calibration target")],
"An enum to set object id")
# camera = gen.enum([
# gen.const("phoxi" , int_t, 1, "PhoXi camera."),
# gen.const("b_bot_camera" , int_t, 2, "RealSense camera attached on the b_bot.")],
# "An enum to set camera name")
gen.add("object_id" , int_t , 1 << 0, "id of the object to be detected." , 7, 1, 1000, edit_method=object_id)
# gen.add("camera" , int_t , 1 << 0, "id of the camera to be used to detect object." , 1, 1, 2, edit_method=camera)
exit(gen.generate(PACKAGE, "o2as_cad_matching", "CadMatching"))
| [
"[email protected]"
]
| |
96818b870c57707871eaa9aaa64013c4dddb882d | e483b0515cca39f4ddac19645f03fc1695d1939f | /google/ads/google_ads/v1/proto/errors/mutate_error_pb2.py | 7d4341f914f9ea47ea5cab80d378555a1d3b6cf3 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
]
| permissive | BrunoWMello/google-ads-python | 0af63d2ca273eee96efd8a33252d27112c049442 | 9b074a037d10f0c1208a00d5d41a8e5e25405f28 | refs/heads/master | 2020-05-27T04:37:47.669144 | 2019-05-24T17:07:31 | 2019-05-24T17:07:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 4,602 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v1/proto/errors/mutate_error.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v1/proto/errors/mutate_error.proto',
package='google.ads.googleads.v1.errors',
syntax='proto3',
serialized_options=_b('\n\"com.google.ads.googleads.v1.errorsB\020MutateErrorProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v1/errors;errors\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V1.Errors\312\002\036Google\\Ads\\GoogleAds\\V1\\Errors\352\002\"Google::Ads::GoogleAds::V1::Errors'),
serialized_pb=_b('\n7google/ads/googleads_v1/proto/errors/mutate_error.proto\x12\x1egoogle.ads.googleads.v1.errors\x1a\x1cgoogle/api/annotations.proto\"\xee\x01\n\x0fMutateErrorEnum\"\xda\x01\n\x0bMutateError\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07UNKNOWN\x10\x01\x12\x16\n\x12RESOURCE_NOT_FOUND\x10\x03\x12!\n\x1dID_EXISTS_IN_MULTIPLE_MUTATES\x10\x07\x12\x1d\n\x19INCONSISTENT_FIELD_VALUES\x10\x08\x12\x16\n\x12MUTATE_NOT_ALLOWED\x10\t\x12\x1e\n\x1aRESOURCE_NOT_IN_GOOGLE_ADS\x10\n\x12\x1b\n\x17RESOURCE_ALREADY_EXISTS\x10\x0b\x42\xeb\x01\n\"com.google.ads.googleads.v1.errorsB\x10MutateErrorProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v1/errors;errors\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V1.Errors\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V1\\Errors\xea\x02\"Google::Ads::GoogleAds::V1::Errorsb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_MUTATEERRORENUM_MUTATEERROR = _descriptor.EnumDescriptor(
name='MutateError',
full_name='google.ads.googleads.v1.errors.MutateErrorEnum.MutateError',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RESOURCE_NOT_FOUND', index=2, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ID_EXISTS_IN_MULTIPLE_MUTATES', index=3, number=7,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INCONSISTENT_FIELD_VALUES', index=4, number=8,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MUTATE_NOT_ALLOWED', index=5, number=9,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RESOURCE_NOT_IN_GOOGLE_ADS', index=6, number=10,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RESOURCE_ALREADY_EXISTS', index=7, number=11,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=142,
serialized_end=360,
)
_sym_db.RegisterEnumDescriptor(_MUTATEERRORENUM_MUTATEERROR)
_MUTATEERRORENUM = _descriptor.Descriptor(
name='MutateErrorEnum',
full_name='google.ads.googleads.v1.errors.MutateErrorEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_MUTATEERRORENUM_MUTATEERROR,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=122,
serialized_end=360,
)
_MUTATEERRORENUM_MUTATEERROR.containing_type = _MUTATEERRORENUM
DESCRIPTOR.message_types_by_name['MutateErrorEnum'] = _MUTATEERRORENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
MutateErrorEnum = _reflection.GeneratedProtocolMessageType('MutateErrorEnum', (_message.Message,), dict(
DESCRIPTOR = _MUTATEERRORENUM,
__module__ = 'google.ads.googleads_v1.proto.errors.mutate_error_pb2'
,
__doc__ = """Container for enum describing possible mutate errors.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v1.errors.MutateErrorEnum)
))
_sym_db.RegisterMessage(MutateErrorEnum)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
]
| |
53c338a2bf6735812b6fac338fbbd370974fc717 | 45a0434de7cb5aaf51f372a9ea39c2e62528e8d7 | /models/hier_seq2seq_sampled_softmax_v2.py | 57077006e62eeedc48d15c7745215cf8d05261ee | []
| no_license | hongtaowutj/Seq2Seq-Keyphrase-Generation | 44b5b24f3af7a85c24fc5ef231c53c1dac7e48ff | 6f2d08222b108b543b7628b32e98480f2e3a32b0 | refs/heads/master | 2020-03-27T10:43:09.941194 | 2018-07-23T07:21:35 | 2018-07-23T07:21:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,712 | py | # -*- coding: utf-8 -*-
# author: @inimah
# date: 25.04.2018
import os
import sys
sys.path.append(os.getcwd())
sys.path.insert(0,'..')
import numpy as np
import tensorflow as tf
from keras.models import Model
from keras.layers import Input, Embedding, Dropout
from keras.layers import LSTM, GRU, MaxPooling1D, Conv1D, GlobalMaxPool1D
from keras.layers import Dense, Lambda, Reshape, TimeDistributed, concatenate, Activation, Add
import keras.backend as K
from keras.models import load_model
from keras.utils import to_categorical
from keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard, Callback
from utils.sampled_softmax import SamplingLayer
from utils.predict_softmax import PredictionLayer
from utils.data_iterator import Dataiterator
from utils.data_connector import DataConnector
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
class HierarchySampledSoftmax():
def __init__(self, encoder_length, decoder_length, max_sents, embedding_dim, birnn_dim, rnn_dim, vocab_size, num_samples, filepath, filename, batch_train_iter, batch_val_iter, batch_size, steps_epoch, val_steps, epochs):
self.encoder_length = encoder_length
self.decoder_length = decoder_length
self.max_sents = max_sents
self.vocab_size = vocab_size
self.embedding_dim = embedding_dim
self.birnn_dim = birnn_dim
self.rnn_dim = rnn_dim
self.num_samples = num_samples
self.filepath = filepath
self.filename = filename
self.batch_train_iter = batch_train_iter
self.batch_val_iter = batch_val_iter
self.batch_size = batch_size
self.steps_epoch = steps_epoch
self.val_steps = val_steps
self.epochs = epochs
self.oov_size = 0
# for storing trained graph models
self.in_document = None
self.in_decoder = None
self.oov_lambda = None
self.oov_activator = None
self.encoder_model = None
self.decoder_model = None
self.sent_encoder = None
self.dec_embedded_sequences = None
self.embed_decoder = None
self.oov_embed_decoder = None
self.labels = None
self.fwd_decoder = None
self.pred_softmax = None
self.train_model = None
self.history = None
self.eval_model = None
self.perplexity_model = None
self.prediction_model = None
self.pred_softmax = None
def train_hier_sampled_softmax(self, pretrained_embedding, oov_embedding):
### Encoder model
self.vocab_size = pretrained_embedding.shape[0]
self.oov_size = oov_embedding.shape[0]
valid_words = self.vocab_size - self.oov_size
# sentence input
in_sentence = Input(shape=(self.encoder_length,), name='sent-input', dtype='int32')
oov_in_sentence = Lambda(lambda x: x - valid_words)(in_sentence)
oov_in_sentence = Activation('relu')(oov_in_sentence)
# document input
in_document = Input(shape=(self.max_sents, self.encoder_length), name='doc-input', dtype='int32')
# embedding layer
embed_encoder = Embedding(self.vocab_size, self.embedding_dim, input_length=self.encoder_length, weights = [pretrained_embedding], trainable = False, name='embedding-encoder')
oov_embed_encoder = Embedding(self.oov_size, self.embedding_dim, input_length=self.encoder_length, weights = [oov_embedding], trainable = True, name='oov_embedding_encoder')
in_enc_embedded = embed_encoder(in_sentence)
oov_in_enc_embedded = oov_embed_encoder(oov_in_sentence)
# Add the embedding matrices
enc_embedded_sequences = Add()([in_enc_embedded, oov_in_enc_embedded])
# CNN Block to capture N-grams features
filter_length = [5,3,2]
nb_filter = [16, 32, 64]
pool_length = 2
for i in range(len(nb_filter)):
enc_embedded_sequences = Conv1D(filters=nb_filter[i],
kernel_size=filter_length[i],
padding='valid',
activation='relu',
kernel_initializer='glorot_normal',
strides=1, name='conv_%s'%str(i+1))(enc_embedded_sequences)
enc_embedded_sequences = Dropout(0.1, name='dropout_%s'%str(i+1))(enc_embedded_sequences)
enc_embedded_sequences = MaxPooling1D(pool_size=pool_length, name='maxpool_%s'%str(i+1))(enc_embedded_sequences)
# Bidirectional GRU to capture sentence features from CNN N-grams features
fwd_encoder = GRU(self.birnn_dim, name='fwd-sent-encoder')
bwd_encoder = GRU(self.birnn_dim, name='bwd-sent-encoder', go_backwards=True)
out_encoder_1 = fwd_encoder(enc_embedded_sequences)
out_encoder_2 = bwd_encoder(enc_embedded_sequences)
out_bidir_encoder = concatenate([out_encoder_1, out_encoder_2], axis=-1, name='bidir-sent-encoder')
#### 1. Sentence Encoder
sent_encoder = Model(inputs=in_sentence, outputs=out_bidir_encoder)
self.sent_encoder = sent_encoder
#### 2. Document Encoder
encoded = TimeDistributed(sent_encoder, name='sent-doc-encoded')(in_document)
# Bidirectional GRU to capture document features from encoded sentence
fwd_doc_encoder = GRU(self.birnn_dim, return_state=True, name='fwd-doc-encoder')
bwd_doc_encoder = GRU(self.birnn_dim, return_state=True, name='bwd-doc-encoder', go_backwards=True)
out_encoder_doc_1, doc_eh_1 = fwd_doc_encoder(encoded)
out_encoder_doc_2, doc_eh_2 = bwd_doc_encoder(encoded)
out_bidir_doc_encoder = concatenate([out_encoder_doc_1, out_encoder_doc_2],axis=-1)
encoder_model = Model(inputs=in_document, outputs=out_bidir_doc_encoder)
self.encoder_model = encoder_model
### Decoder model
# input placeholder for teacher forcing (link ground truth to decoder input)
in_decoder = Input(shape=(None, ), name='decoder_input', dtype='int32')
oov_lambda = Lambda(lambda x: x - valid_words)
oov_activator = Activation('relu')
oov_in_decoder = oov_lambda(in_decoder)
oov_in_decoder = oov_activator(oov_in_decoder)
embed_decoder = Embedding(self.vocab_size, self.embedding_dim, weights = [pretrained_embedding], trainable = False, name='embedding_decoder')
oov_embed_decoder = Embedding(self.oov_size, self.embedding_dim, weights = [oov_embedding], trainable = True, name='oov_embedding_decoder')
in_dec_embedded = embed_decoder(in_decoder)
oov_in_dec_embedded = oov_embed_decoder(oov_in_decoder)
# Add the embedding matrices
dec_embedded_sequences = Add()([in_dec_embedded, oov_in_dec_embedded])
labels = Input((self.decoder_length+1,1), dtype='int32', name='labels_')
fwd_decoder = GRU(self.rnn_dim, return_state=True)
sampling_softmax = SamplingLayer(self.num_samples, self.vocab_size, mode='train')
s0 = Input(shape=(self.rnn_dim,), name='s0')
s = [s0]
losses = []
for t in range(self.decoder_length+1):
label_t = Lambda(lambda x: labels[:,t,:], name='label-%s'%t)(labels)
x_dec = Lambda(lambda x: dec_embedded_sequences[:,t,:], name='dec_embedding-%s'%t)(dec_embedded_sequences)
x_dec = Reshape((1, self.embedding_dim))(x_dec)
if t==0:
s = out_bidir_doc_encoder
s, _ = fwd_decoder(x_dec, initial_state=s)
loss = sampling_softmax([s, label_t])
losses.append(loss)
s = [s]
model = Model(inputs=[in_document, in_decoder, s0, labels], outputs=losses)
self.train_model = model
self.in_document = in_document
self.out_bidir_doc_encoder = out_bidir_doc_encoder
self.in_decoder = in_decoder
self.oov_lambda = oov_lambda
self.oov_activator = oov_activator
self.embed_decoder = embed_decoder
self.oov_embed_decoder = oov_embed_decoder
self.dec_embedded_sequences = dec_embedded_sequences
self.labels = labels
self.fwd_decoder = fwd_decoder
return self.train_model
def compile_(self):
self.train_model.compile(loss=lambda y_true, loss: loss, optimizer='rmsprop')
print("\n--- Hierarchical Seq2Seq (CNN + Bidirectional-GRU) with sampled softmax: trainable model --- \n")
self.train_model.summary()
def train_(self):
earlystop_callbacks = [EarlyStopping(monitor='val_loss', patience=5),
ModelCheckpoint(filepath=os.path.join(self.filepath,'%s.{epoch:02d}-{val_loss:.2f}.check'%(self.filename)), monitor='val_loss', save_best_only=True, save_weights_only=True),
TensorBoard(log_dir='./Graph', histogram_freq=0, batch_size=self.batch_size, write_graph=True, write_grads=False, write_images=True)]
def train_gen(batch_size):
while True:
train_batches = [[[X, y_in, states, labels], y_output] for X, y_in, states, labels, y_output in self.batch_train_iter]
for train_batch in train_batches:
yield train_batch
def val_gen(batch_size):
while True:
val_batches = [[[X, y_in, states, labels], y_output] for X, y_in, states, labels, y_output in self.batch_val_iter]
for val_batch in val_batches:
yield val_batch
self.history = self.train_model.fit_generator(train_gen(self.batch_size), validation_data=val_gen(self.batch_size), validation_steps=self.val_steps, steps_per_epoch=self.steps_epoch, epochs = self.epochs, callbacks = earlystop_callbacks)
def plot_(self):
plt.clf()
plt.plot(self.history.history['loss'])
plt.plot(self.history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['training', 'validation'], loc='upper right')
plt.savefig(os.path.join(self.filepath,'loss_%s.png'%(self.filename)))
def plot_time(self):
plt.clf()
plt.plot(self.history.history['times'])
plt.title('running time')
plt.ylabel('time')
plt.xlabel('epoch')
plt.legend(['training'], loc='upper right')
plt.savefig(os.path.join(self.filepath,'time_%s.png'%(self.filename)))
def eval_sampled_softmax(self, stored_weights):
eval_softmax = SamplingLayer(self.num_samples, self.vocab_size, mode='eval')
s0 = Input(shape=(self.rnn_dim,), name='s0')
s = [s0]
eval_losses = []
for t in range(self.decoder_length+1):
label_t = Lambda(lambda x: self.labels[:,t,:], name='label-%s'%t)(self.labels)
x_dec = Lambda(lambda x: self.dec_embedded_sequences[:,t,:], name='dec_embedding-%s'%t)(self.dec_embedded_sequences)
x_dec = Reshape((1, self.embedding_dim))(x_dec)
if t==0:
s = self.out_bidir_doc_encoder
s, _ = self.fwd_decoder(x_dec, initial_state=s)
loss = eval_softmax([s, label_t])
eval_losses.append(loss)
s = [s]
eval_model = Model(inputs=[self.in_document, self.in_decoder, s0, self.labels], outputs=eval_losses)
eval_model.compile(loss=lambda y_true, loss: loss, optimizer='rmsprop')
eval_model.load_weights(os.path.join(self.filepath, '%s'%(stored_weights)))
eval_model.summary()
self.eval_model = eval_model
return self.eval_model
def predict_sampled_softmax(self, stored_weights):
pred_softmax = PredictionLayer(self.num_samples, self.vocab_size, mode='predict')
s0 = Input(shape=(self.rnn_dim,), name='s0')
s = [s0]
probs = []
for t in range(self.decoder_length+1):
label_t = Lambda(lambda x: self.labels[:,t,:], name='label-%s'%t)(self.labels)
x_dec = Lambda(lambda x: self.dec_embedded_sequences[:,t,:], name='dec_embedding-%s'%t)(self.dec_embedded_sequences)
x_dec = Reshape((1, self.embedding_dim))(x_dec)
if t==0:
s = self.out_bidir_doc_encoder
s, _ = self.fwd_decoder(x_dec, initial_state=s)
softmax_prob = pred_softmax([s, label_t])
probs.append(softmax_prob)
s = [s]
prediction_model = Model(inputs=[self.in_document, self.in_decoder, s0, self.labels], outputs=probs)
prediction_model.compile(loss=lambda y_true, loss: loss, optimizer='rmsprop')
prediction_model.load_weights(os.path.join(self.filepath, '%s'%(stored_weights)))
prediction_model.summary()
encoder_model = Model(inputs=self.in_document, outputs=self.out_bidir_doc_encoder)
self.encoder_model = encoder_model
self.prediction_model = prediction_model
self.pred_softmax = pred_softmax
return self.prediction_model
def create_decoder_model(self):
in_state_decoder = Input(shape=(self.rnn_dim,))
in_label = Input(shape=(None,))
oov_in_decoder = self.oov_lambda(self.in_decoder)
oov_in_decoder = self.oov_activator(oov_in_decoder)
in_dec_embedded = self.embed_decoder(self.in_decoder)
oov_in_dec_embedded = self.oov_embed_decoder(oov_in_decoder)
dec_embedded_sequences = Add()([in_dec_embedded, oov_in_dec_embedded])
s, _ = self.fwd_decoder(dec_embedded_sequences, initial_state=[in_state_decoder])
softmax_prob = self.pred_softmax([s, in_label])
decoder_states = [s]
decoder_model = Model([self.in_decoder] + [in_state_decoder] + [in_label], [softmax_prob] + decoder_states)
self.decoder_model = decoder_model
return self.decoder_model
| [
"[email protected]"
]
| |
1b211e2af8bd47e80d621774445befeff125077b | 068d271e241d8cdb46dbf4243166e4b8ee7025b2 | /day10/day10/5.进程_server.py | 6bcdd06a147b6b6b577b373066d3137b7a7fe994 | []
| no_license | caiqinxiong/python | f6e226e76cb62aac970bcfbcb6c8adfc64858b60 | 9029f6c528d2cb742b600af224e803baa74cbe6a | refs/heads/master | 2023-05-26T19:41:34.911885 | 2020-05-15T09:02:08 | 2020-05-15T09:02:08 | 195,261,757 | 1 | 0 | null | 2021-06-10T23:33:33 | 2019-07-04T15:01:42 | JavaScript | UTF-8 | Python | false | false | 509 | py | import socket
from multiprocessing import Process
def func(conn):
while True:
conn.send(b'hello') # msg必须是字节类型
message = conn.recv(1024) # n是接受消息的最大字节数
print(message)
if __name__ == '__main__':
sk = socket.socket()
sk.bind(('127.0.0.1',9001))
sk.listen()
while True:
conn,addr = sk.accept() # 接受客户端请求建立连接 -- 三次握手
Process(target=func,args=(conn,)).start()
conn.close()
| [
"[email protected]"
]
| |
2d742d0514e2f6b3d504abf2415972f06a362098 | acd41dc7e684eb2e58b6bef2b3e86950b8064945 | /res/packages/scripts/scripts/client/gui/shared/utils/decorators.py | fd6a83b489698ea258530d5b9b8f79437a048a1b | []
| no_license | webiumsk/WoT-0.9.18.0 | e07acd08b33bfe7c73c910f5cb2a054a58a9beea | 89979c1ad547f1a1bbb2189f5ee3b10685e9a216 | refs/heads/master | 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 5,312 | py | # 2017.05.04 15:26:22 Střední Evropa (letní čas)
# Embedded file name: scripts/client/gui/shared/utils/decorators.py
import time
import adisp
import BigWorld
from debug_utils import LOG_DEBUG
from gui.Scaleform.Waiting import Waiting
from debug_utils import LOG_WARNING
from string import join
class process(object):
def __init__(self, *kargs):
self.__currentMessage = None
self.__messages = kargs
self.__messages2Show = list(self.__messages)
return
def __hideWaiting(self):
if self.__currentMessage is not None:
Waiting.hide(self.__currentMessage)
self.__currentMessage = None
return
def __nextWaiting(self):
if len(self.__messages2Show):
self.__hideWaiting()
self.__currentMessage = self.__messages2Show.pop(0)
Waiting.show(self.__currentMessage)
def __stepCallback(self, isStop):
if not isStop:
return self.__nextWaiting()
self.__hideWaiting()
self.__messages2Show = list(self.__messages)
def __call__(self, func):
def wrapper(*kargs, **kwargs):
self.__nextWaiting()
return adisp.process(func, self.__stepCallback)(*kargs, **kwargs)
return wrapper
def async(func, cbname = 'callback', cbwrapper = lambda x: x):
def wrapper(*kargs, **kwargs):
if cbname in func.func_code.co_varnames:
idx = func.func_code.co_varnames.index(cbname)
if idx >= len(kargs) and cbname not in kwargs:
return adisp.async(func, cbname, cbwrapper)(*kargs, **kwargs)
return func(*kargs, **kwargs)
return wrapper
def dialog(func):
def wrapper(*kargs, **kwargs):
Waiting.suspend()
def cbwrapper(cb):
def callback(result):
Waiting.resume()
cb(result)
return callback
return async(func, 'callback', cbwrapper)(*kargs, **kwargs)
return wrapper
def debugTime(func):
def wrapper(*args, **kwargs):
startTime = time.time()
func(*args, **kwargs)
LOG_DEBUG("Method '%s' measuring time: %.10f" % (func.__name__, time.time() - startTime))
return wrapper
IS_DEVELOPMENT = True
class _TrackFrameEnabled(object):
def __init__(self, logID):
super(_TrackFrameEnabled, self).__init__()
self.__logID = logID
def __call__(self, func):
def wrapper(*args, **kwargs):
BigWorld.PFbeginFrame(self.__logID)
func(*args, **kwargs)
BigWorld.PFendFrame()
return wrapper
class _TrackFrameDisabled(object):
def __init__(self, logID):
super(_TrackFrameDisabled, self).__init__()
def __call__(self, func):
return func
if IS_DEVELOPMENT:
trackFrame = _TrackFrameEnabled
else:
trackFrame = _TrackFrameDisabled
def makeArr(obj):
if isinstance(obj, tuple):
if len(obj) > 1:
return [obj[0], obj[1]]
else:
return [obj[0], obj[0]]
return [obj, obj]
class ReprInjector(object):
@classmethod
def withParent(cls, *argNames):
return InternalRepresenter(True, argNames)
@classmethod
def simple(cls, *argNames):
return InternalRepresenter(False, argNames)
class InternalRepresenter(object):
def __init__(self, reprParentFlag, argNames):
self.argNames = argNames
self.reprParentFlag = reprParentFlag
def __call__(self, clazz):
if '__repr__' in dir(clazz):
if hasattr(clazz, '__repr_params__') and self.reprParentFlag is not False:
clazz.__repr_params__ = tuple((arg for arg in self.argNames if arg not in clazz.__repr_params__)) + tuple((arg for arg in clazz.__repr_params__ if arg[0:2] != '__'))
else:
clazz.__repr_params__ = self.argNames
else:
clazz.__repr_params__ = self.argNames
representation = []
attrMethNames = []
for i in xrange(len(clazz.__repr_params__)):
attrMethNames.append(makeArr(clazz.__repr_params__[i]))
if attrMethNames[-1][0][:2] == '__':
if clazz.__name__[0] != '_':
attrMethNames[-1][0] = join(['_', clazz.__name__, attrMethNames[-1][0]], sep='')
else:
attrMethNames[-1][0] = join([clazz.__name__, attrMethNames[-1][0]], sep='')
representation.append('{0} = {{{1}}}'.format(attrMethNames[-1][1], i))
representation = join([clazz.__name__,
'(',
join(representation, sep=', '),
')'], sep='')
def __repr__(self):
formatedArgs = []
for attrMethName, reprName in attrMethNames:
attr = getattr(self, attrMethName, 'N/A')
if callable(attr):
attr = getattr(self, attrMethName, 'N/A')()
formatedArgs.append(attr)
return representation.format(*formatedArgs)
clazz.__repr__ = __repr__
return clazz
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\gui\shared\utils\decorators.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:26:23 Střední Evropa (letní čas)
| [
"[email protected]"
]
| |
b694a28ffa1f63fc562066d77f3e7125197de3a4 | 28e8ff11d8d4d633e9bd69390eb8504dc52d34ac | /python/paddle/distributed/fleet/runtime/parameter_server_runtime.py | 0d4c3944c72a2edaae4d97be1b7587969285cba1 | [
"Apache-2.0"
]
| permissive | zhupengyang/Paddle | 1cda899a18de8725bfbbcedfcf45bce0bc2706a7 | 226b4a95df0c992dfaf37cca2c0e89eb730dd298 | refs/heads/develop | 2023-08-30T22:09:01.452182 | 2023-03-01T09:35:50 | 2023-03-01T09:35:50 | 186,746,928 | 0 | 0 | Apache-2.0 | 2023-07-19T01:56:42 | 2019-05-15T04:05:53 | C++ | UTF-8 | Python | false | false | 28,201 | py | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import warnings
import paddle
from paddle.framework import core
from paddle.static import (
CompiledProgram,
Executor,
ParallelExecutor,
Program,
Variable,
default_main_program,
default_startup_program,
save_inference_model,
)
from ..base.private_helper_function import wait_server_ready
from .runtime_base import RuntimeBase
__all__ = []
class ParameterServerRuntime(RuntimeBase):
def __init__(self):
super().__init__()
self._communicator = None
def _set_basic_info(self, context):
self.context = context
self.role_maker = context["role_maker"]
self.origin_main_program = context["origin_main_program"]
self.origin_startup_program = context["origin_startup_program"]
self.async_strategy = self._get_distributed_strategy()
self.compiled_strategy = self.build_compiled_startegy()
def _get_distributed_strategy(self):
strategy = None
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
StrategyFactory,
)
dist_strategy = self.context["valid_strategy"]
k_steps = dist_strategy.a_sync_configs["k_steps"]
if not dist_strategy.a_sync and k_steps == 0:
strategy = StrategyFactory.create_sync_strategy()
if dist_strategy.a_sync and k_steps == 0:
strategy = StrategyFactory.create_async_strategy()
if dist_strategy.a_sync and k_steps > 0:
strategy = StrategyFactory.create_geo_strategy(k_steps)
if not strategy:
raise ValueError("k_steps must be invalid value, please check")
return strategy
def build_compiled_startegy(self):
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
CompileTimeStrategy,
)
compiled_config = CompileTimeStrategy(
self.origin_main_program,
self.origin_main_program,
self.async_strategy,
self.role_maker,
)
return compiled_config
def _load_sparse_params(
self, executor, dirname, varnames, main_program=None
):
assert vars is not None
check_vars = []
load_prog = Program()
load_block = load_prog.global_block()
def _in_varnames(var):
return var.name in varnames
load_vars = list(
filter(_in_varnames, default_main_program().list_vars())
)
if main_program is None:
main_program = self.origin_main_program
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_varname_parts,
)
for each_var in load_vars:
assert isinstance(each_var, Variable)
origin_varname, _, _ = _get_varname_parts(each_var.name)
new_var = paddle.static.io._clone_var_in_block(load_block, each_var)
var_path = os.path.join(dirname, origin_varname)
if not os.path.exists(var_path):
raise ValueError(
"SelectedRows var {} can not find at {}".format(
new_var.name, var_path
)
)
if os.path.isfile(var_path):
load_block.append_op(
type='sparse_tensor_load',
inputs={},
outputs={'Out': [new_var]},
attrs={
'file_path': os.path.join(dirname, origin_varname),
'node_index': self.role_maker._server_index(),
'node_num': self.role_maker._server_num(),
'shape': each_var.shape,
},
)
check_vars.append(each_var)
executor.run(load_prog)
def _load_distributed_params(self, dirname, varnames):
from paddle.distributed.communicator import LargeScaleKV
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_varname_parts,
)
scale_kv = LargeScaleKV()
for varname in varnames:
origin_varname, _, _ = _get_varname_parts(varname)
sparse_dir = os.path.join(dirname, origin_varname, varname)
scale_kv.load(varname, sparse_dir)
@staticmethod
def __exclude_vars(exclude_var_names=[]):
def is_valid(var):
if var.name in exclude_var_names:
return False
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_varname_parts,
)
origin_varname, _, _ = _get_varname_parts(var.name)
if origin_varname.endswith("@GRAD"):
return False
if origin_varname == "learning_rate_0":
return False
if (
var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH
or var.desc.type() == core.VarDesc.VarType.FETCH_LIST
or var.desc.type() == core.VarDesc.VarType.READER
):
return False
return var.persistable
return is_valid
def _init_worker(self):
def sync_strategy_envs():
kwargs = {}
kwargs[
"pserver_endpoints"
] = self.role_maker._get_pserver_endpoints()
kwargs["trainer_id"] = self.role_maker._worker_index()
return kwargs
def geo_strategy_envs():
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
get_sparse_tablenames,
)
def get_sparse_attrs():
opt_init_map = {}
opt_init_map["gaussian_random"] = ["seed", "mean", "std"]
opt_init_map["fill_constant"] = ["value"]
opt_init_map["uniform_random"] = ["seed", "min", "max"]
opt_init_map["truncated_gaussian_random"] = [
"seed",
"mean",
"std",
]
dist_varnames = get_sparse_tablenames(
self.origin_main_program, True
)
sparse_varnames = get_sparse_tablenames(
self.origin_main_program, False
)
if len(dist_varnames) != 0:
raise ValueError(
"GeoStrategy can not support large scale embeding now, please use paddle.static.nn.embedding"
)
init_attrs = []
for value_name in sparse_varnames:
value_var = self.origin_main_program.global_block().vars[
value_name
]
value_attr = [
value_name,
",".join([str(dim) for dim in value_var.shape]),
]
for op in self.origin_startup_program.global_block().ops:
if (
op.type in opt_init_map.keys()
and value_name == op.output("Out")[0]
):
init_attr = [op.type]
for attr in opt_init_map[op.type]:
init_attr.append(str(op.attr(attr)))
value_attr.append("&".join(init_attr))
init_attrs.append(":".join(value_attr))
break
return "#".join(init_attrs)
kwargs = {}
kwargs["trainers"] = self.role_maker._worker_num()
kwargs["sparse_attrs"] = get_sparse_attrs()
return kwargs
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
GeoStrategy,
SyncStrategy,
)
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_lr_ops,
_has_global_step,
)
trainer_config = self.async_strategy.get_trainer_runtime_config()
print(trainer_config)
dist_strategy = self.context["valid_strategy"]
launch_barrier = dist_strategy.a_sync_configs["launch_barrier"]
if launch_barrier:
# for trainer wait server ready
wait_server_ready(self.role_maker._get_pserver_endpoints())
# for ps-heter mode, wait heter worker ready
if (
self.role_maker._is_heter_parameter_server_mode
and self.role_maker._is_worker()
):
wait_server_ready(self.role_maker._get_heter_worker_endpoints())
lrs = _has_global_step(_get_lr_ops(self.origin_main_program))
if lrs:
kwargs = {"need_global_step": "1"}
else:
kwargs = {"need_global_step": "0"}
if isinstance(self.async_strategy, GeoStrategy):
geo_kwargs = geo_strategy_envs()
kwargs.update(geo_kwargs)
if isinstance(self.async_strategy, SyncStrategy):
sync_kwargs = sync_strategy_envs()
kwargs.update(sync_kwargs)
kwargs = kwargs if kwargs else None
send_ctx = self.compiled_strategy.get_communicator_send_context()
if self.compiled_strategy.is_geo_mode():
recv_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=4
)
else:
recv_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=1
)
from paddle.distributed.communicator import Communicator
self._communicator = Communicator(
trainer_config.mode, kwargs, trainer_config.get_communicator_flags()
)
self._communicator.init_with_ctx(send_ctx, recv_ctx)
if not self._communicator.is_running():
self._communicator.start()
else:
warnings.warn("communicator has been initialized, skip")
def _get_executor(self):
executor = Executor(paddle.CPUPlace())
if self.role_maker._is_heter_parameter_server_mode:
heter_worker_device_guard = (
self.context["valid_strategy"]
.a_sync_configs["heter_worker_device_guard"]
.upper()
)
if heter_worker_device_guard not in ["GPU", "XPU", "CPU"]:
raise ValueError(
"Heter Worker Not Support Device {}".format(
heter_worker_device_guard
)
)
if self.role_maker._is_heter_worker():
if heter_worker_device_guard == "GPU":
executor = Executor(
paddle.CUDAPlace(
int(os.getenv("FLAGS_selected_gpus", "0"))
)
)
elif heter_worker_device_guard == "XPU":
executor = Executor(
paddle.XPUPlace(
int(os.getenv("FLAGS_selected_xpus", "0"))
)
)
return executor
def _init_server(self, *args, **kwargs):
if len(args) > 1:
raise ValueError("init server can only accept 1 args: `dirname`")
elif len(args) == 1:
model_dirname = args[0]
else:
model_dirname = None
executor = self._get_executor()
if (
self.role_maker._is_heter_worker()
and self.context["valid_strategy"].a_sync_configs["launch_barrier"]
):
# for heter trainer wait server ready
wait_server_ready(self.role_maker._get_pserver_endpoints())
executor.run(default_startup_program())
if self.role_maker._is_heter_worker():
self._init_worker()
return
sparse_varnames = self.compiled_strategy.get_sparse_varname_on_ps(False)
sparse_related_optimize_varnames = []
for var_name in sparse_varnames:
sparse_related_optimize_varnames += (
self.compiled_strategy.get_optimize_varname_on_ps(var_name)
)
sparse_related_optimize_varnames = list(
set(sparse_related_optimize_varnames)
)
distribtued_varnames = self.compiled_strategy.get_sparse_varname_on_ps(
True
)
distributed_related_optimize_varnames = []
for var_name in distribtued_varnames:
distributed_related_optimize_varnames += (
self.compiled_strategy.get_optimize_varname_on_ps(var_name)
)
distributed_related_optimize_varnames = list(
set(distributed_related_optimize_varnames)
)
remaining_vars = list(
filter(
ParameterServerRuntime.__exclude_vars(
sparse_varnames
+ distribtued_varnames
+ sparse_related_optimize_varnames
+ distributed_related_optimize_varnames
),
default_main_program().list_vars(),
)
)
if not model_dirname:
return
if not os.path.isdir(model_dirname):
raise ValueError("There is no directory named '%s'", model_dirname)
# load dense
paddle.static.load_vars(
executor,
main_program=default_main_program(),
dirname=model_dirname,
vars=remaining_vars,
)
# load sparse
self._load_sparse_params(
executor=executor,
dirname=model_dirname,
varnames=sparse_varnames + sparse_related_optimize_varnames,
)
# load large scale
self._load_distributed_params(
dirname=model_dirname,
varnames=distribtued_varnames
+ distributed_related_optimize_varnames,
)
def _run_server(self):
executor = self._get_executor()
executor.run(default_main_program())
def _stop_worker(self):
self._communicator.stop()
executor = self._get_executor()
executor.close()
def _get_optimizer_status(self, op, param_name):
supported_opts = [
"sgd",
"adam",
"adagrad",
"adamax",
"momentum",
"lars_momentum",
"rmsprop",
"decayed_adagrad",
"ftrl",
]
reshaped_val_map = {}
reshaped_val_map["sgd"] = []
reshaped_val_map["adam"] = ["moment1_0", "moment2_0"]
reshaped_val_map["adagrad"] = ["moment_0"]
reshaped_val_map["adamax"] = ["moment_0", "inf_norm_0"]
reshaped_val_map["momentum"] = ["velocity_0"]
reshaped_val_map["lars_momentum"] = ["velocity_0"]
reshaped_val_map["rmsprop"] = [
"momentum_0",
"mean_square_0",
"mean_grad_0",
]
reshaped_val_map["decayed_adagrad"] = ["moment_0"]
reshaped_val_map["ftrl"] = ["squared_0", "linear_0"]
orishaped_val_map = {}
orishaped_val_map["adam"] = ["beta1_pow_acc_0", "beta2_pow_acc_0"]
orishaped_val_map["adamax"] = ["beta1_pow_acc_0"]
if op not in supported_opts:
raise ValueError(
"fleet can not support optimizer: {}, only this can be supported: {}".format(
op, supported_opts
)
)
reshaped_names = [
param_name + "_" + val for val in reshaped_val_map[op]
]
if op not in orishaped_val_map:
origin_names = []
else:
origin_names = [
param_name + "_" + val for val in orishaped_val_map[op]
]
return reshaped_names, origin_names
def _get_optimizer_op(self, param_name):
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_optimize_ops,
)
opts = _get_optimize_ops(self.origin_main_program)
for op in opts:
if (
"Param" in op.input_names
and "LearningRate" in op.input_names
and op.input("Param")[0] == param_name
):
return op
def _save_dense_params(self, executor, dirname, context, main_program):
self._communicator.recv()
prog = Program()
block = prog.global_block()
local_vars = []
for name, var_ctx in context.items():
if len(var_ctx.origin_varnames()) != 1:
raise ValueError("Dense can not support split now.")
varname = var_ctx.origin_varnames()[0]
local_vars.append(varname)
optimizer = self._get_optimizer_op(varname)
reshaped_varnames, origin_varnames = self._get_optimizer_status(
optimizer.type, varname
)
for var_name in [varname] + reshaped_varnames + origin_varnames:
var = self.origin_main_program.global_block().vars[var_name]
block.append_op(
type='recv_save',
attrs={
"trainer_id": self.role_maker._worker_index(),
"shape": var.shape,
"slice_shapes": [",".join([str(i) for i in var.shape])],
"slice_varnames": [var.name],
"remote_varnames": [var.name],
"is_sparse": False,
"endpoints": var_ctx.split_endpoints(),
"file_path": os.path.join(dirname, var.name),
},
)
executor.run(prog)
return local_vars
def _save_sparse_params(self, executor, dirname, context, main_program):
prog = Program()
block = prog.global_block()
local_vars = []
for name, var_ctx in context.items():
if len(var_ctx.origin_varnames()) != 1:
raise ValueError("Dense can not support split now.")
varname = var_ctx.origin_varnames()[0]
local_vars.append(varname)
optimizer = self._get_optimizer_op(varname)
reshaped_varnames, origin_varnames = self._get_optimizer_status(
optimizer.type, varname
)
var = self.origin_main_program.global_block().vars[varname]
slice_shapes = []
dims1 = ",".join([str(i) for i in var.shape[1:]])
for section in var_ctx.sections():
slice_shapes.append(str(section) + dims1)
block.append_op(
type='recv_save',
attrs={
"trainer_id": self.role_maker._worker_index(),
"shape": var.shape,
"slice_shapes": slice_shapes,
"slice_varnames": var_ctx.split_varnames(),
"remote_varnames": var_ctx.split_varnames(),
"is_sparse": True,
"endpoints": var_ctx.split_endpoints(),
"pserver_num": len(
self.role_maker._get_pserver_endpoints()
),
"file_path": os.path.join(dirname, var.name),
},
)
for reshaped_varname in reshaped_varnames:
var = self.origin_main_program.global_block().vars[
reshaped_varname
]
slice_varnames = []
remote_varnames = []
for i in range(len(var_ctx.split_varnames())):
slice_varnames.append(
"{}.block{}".format(reshaped_varname, i)
)
remote_varnames.append(reshaped_varname)
block.append_op(
type='recv_save',
attrs={
"trainer_id": self.role_maker._worker_index(),
"shape": var.shape,
"slice_shapes": slice_shapes,
"slice_varnames": slice_varnames,
"remote_varnames": remote_varnames,
"is_sparse": True,
"endpoints": var_ctx.split_endpoints(),
"pserver_num": len(
self.role_maker._get_pserver_endpoints()
),
"file_path": os.path.join(dirname, var.name),
},
)
for origin_varname in origin_varnames:
var = self.origin_main_program.global_block().vars[
origin_varname
]
block.append_op(
type='recv_save',
attrs={
"trainer_id": self.role_maker._worker_index(),
"shape": var.shape,
"slice_shapes": [",".join([str(i) for i in var.shape])],
"slice_varnames": [origin_varname],
"remote_varnames": [origin_varname],
"is_sparse": False,
"endpoints": var_ctx.split_endpoints()[:1],
"file_path": os.path.join(dirname, var.name),
},
)
executor.run(prog)
return context.keys()
def _save_distributed_params(self, executor, dirname, context, mode):
prog = Program()
block = prog.global_block()
for name, var_ctx in context.items():
block.append_op(
type='checkpoint_notify',
attrs={
"varname": name,
"mode": mode,
"slice_varnames": var_ctx.split_varnames(),
"remote_varnames": var_ctx.split_varnames(),
"endpoints": var_ctx.split_endpoints(),
"dirname": dirname,
},
)
executor.run(prog)
return context.keys()
def _save_distributed_persistables(
self, executor, dirname, main_program, mode
):
dense_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=1, use_origin_program=True
)
sparse_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=2, use_origin_program=True
)
distributed_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=3, use_origin_program=True
)
recv_dense_varnames = self._save_dense_params(
executor, dirname, dense_ctx, main_program
)
recv_sparse_varnames = self._save_sparse_params(
executor, dirname, sparse_ctx, main_program
)
recv_distributed_varnames = self._save_distributed_params(
executor, dirname, distributed_ctx, mode
)
saved_varnames = (
recv_dense_varnames
+ list(recv_sparse_varnames)
+ list(recv_distributed_varnames)
)
remaining_vars = list(
filter(
ParameterServerRuntime.__exclude_vars(saved_varnames),
main_program.list_vars(),
)
)
paddle.static.save_vars(
executor,
main_program=main_program,
dirname=dirname,
vars=remaining_vars,
)
def _ps_inference_save_persistables(
self, executor, dirname, main_program=None, mode=0, **kwargs
):
"""
This function filters out all variables with `persistable==True` from the
give `main_program` and then saves these variables to the folder `dirname`
or file `filename`.
The `dirname` is used to specify the folder where persistable variables
are going to be saved. If you would like to save variables in separate
files, set `filename` None; if you would like to save all variables in a
single file, use `filename` to specify the file name.
"""
if isinstance(executor, ParallelExecutor):
raise TypeError(
"in fleet.save_persistables() function, executor must be as Executor type, ParallelExecutor is not allowed"
)
if not isinstance(executor, Executor):
raise TypeError(
"in fleet.save_persistables() function, executor must be as Executor type"
)
if main_program is None:
main_program = self.compiled_strategy.get_origin_ps_main_program()
if isinstance(main_program, CompiledProgram):
raise TypeError(
"in fleet.save_persistables() function, main_program must be as Program type, CompiledProgram is not allowed"
)
self._save_distributed_persistables(
executor, dirname, main_program, mode
)
def _ps_inference_save_inference_model(
self,
executor,
dirname,
feeded_var_names,
target_vars,
main_program=None,
export_for_deployment=True,
):
"""
Prune the given `main_program` to build a new program especially for inference,
and then save it and all related parameters to given `dirname` by the `executor`.
"""
if isinstance(executor, ParallelExecutor):
raise TypeError(
"in fleet.save_inference_model() function, executor must be as Executor type, ParallelExecutor is not allowed"
)
if not isinstance(executor, Executor):
raise TypeError(
"in fleet.save_inference_model() function, executor must be as Executor type"
)
if main_program is not None:
if isinstance(main_program, CompiledProgram):
raise TypeError(
"in fleet.save_inference_model() function, main_program must be as Program type, CompiledProgram is not allowed"
)
save_inference_model(
dirname,
feeded_var_names,
target_vars,
executor,
main_program,
None,
None,
export_for_deployment,
)
else:
save_inference_model(
dirname,
feeded_var_names,
target_vars,
executor,
self.origin_main_program,
None,
None,
export_for_deployment,
True,
)
model_basename = "__model__"
model_filename = os.path.join(dirname, model_basename)
with open(model_filename, "rb") as f:
program_desc_str = f.read()
program = Program.parse_from_string(program_desc_str)
program._copy_dist_param_info_from(default_main_program())
self._ps_inference_save_persistables(
executor, dirname, program, mode=0
)
def _save_inference_model(self, *args, **kwargs):
self._ps_inference_save_inference_model(*args, **kwargs)
def _save_persistables(self, *args, **kwargs):
self._ps_inference_save_persistables(*args, **kwargs)
| [
"[email protected]"
]
| |
71bae5b57bd978555acd8f94f55a75779c4e5c5a | a4e59c4f47873daf440374367a4fb0383194d2ce | /Python/129.py | 73d145f89ea0e90f96faa9bfc9b762b6c30ecb3d | []
| no_license | maxjing/LeetCode | e37cbe3d276e15775ae028f99cf246150cb5d898 | 48cb625f5e68307390d0ec17b1054b10cc87d498 | refs/heads/master | 2021-05-23T17:50:18.613438 | 2021-04-02T17:14:55 | 2021-04-02T17:14:55 | 253,406,966 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 617 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sumNumbers(self, root: TreeNode) -> int:
return self.pathsSum(root, 0)
def pathsSum(self, node, pathSum):
if node is None:
return 0
#2020.05.15 totally forgot the following step
pathSum = 10 * pathSum + node.val
if node.left is None and node.right is None:
return pathSum
return self.pathsSum(node.left, pathSum) + self.pathsSum(node.right, pathSum)
| [
"[email protected]"
]
| |
2f4db38a3a1591db3042a3d16dbd30478a312b0e | 07b37ca45d38edea112895049acf76d96ff07eff | /3.Processing&UnderstadingText/recommended_dependency_parser.py | 2ced66cfba94395977f015428833ccba515d6df6 | []
| no_license | KRBhavaniSankar/NLTK | e335944de346be72a01c92221b0bf58d85475fb9 | 4b228338566996fbccee72cb6afaa199a6496787 | refs/heads/master | 2020-03-12T23:03:59.981112 | 2018-05-11T01:15:28 | 2018-05-11T01:15:28 | 130,858,622 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,000 | py |
sentence = 'The brown fox is quick and he is jumping over the lazy dog'
#Load dependencies
from spacy.lang.en import English
parser = English()
parsed_sent = parser(unicode(sentence))
print(parsed_sent) ,type(parsed_sent)
dependency_pattern = '{left}<---{word}[{w_type}]--->{right}\n--------'
for token in parsed_sent:
print dependency_pattern.format(word=token.orth_,
w_type=token.dep_,
left=[t.orth_
for t
in token.lefts],
right=[t.orth_
for t
in token.rights])
from nltk.parse.stanford import StanfordDependencyParser
sdp = StanfordDependencyParser(path_to_jar='/home/bhavani/work/Python/programs/NLTK/stanford-parser-full-2018-02-27/stanford-parser.jar',
path_to_models_jar='/home/bhavani/work/Python/programs/NLTK/stanford-parser-full-2018-02-27/stanford-parser-3.9.1-models.jar')
result = list(sdp.raw_parse(sentence))
#print(result[0])
#print(type(result[0]))
dep_tree = [parse.tree() for parse in result][0]
print dep_tree
#dep_tree.draw()
# generation of annotated dependency tree shown in Figure 3-4
from graphviz import Source
dep_tree_dot_repr = [parse for parse in result][0].to_dot()
source = Source(dep_tree_dot_repr, filename="dep_tree", format="png")
source.view()
#Building our own dependecny parsers
import nltk
tokens = nltk.word_tokenize(sentence)
dependency_rules = """
'fox' -> 'The' | 'brown'
'quick' -> 'fox' | 'is' | 'and' | 'jumping'
'jumping' -> 'he' | 'is' | 'dog'
'dog' -> 'over' | 'the' | 'lazy'
"""
dependency_grammar = nltk.grammar.DependencyGrammar.fromstring(dependency_rules)
print dependency_grammar
dp = nltk.ProjectiveDependencyParser(dependency_grammar)
res = [item for item in dp.parse(tokens)]
tree = res[0]
print tree
tree.draw() | [
"[email protected]"
]
| |
d018f1d0babe1ace6fc29381446346cddfd4e2a2 | 39e1320c74bcf0bbebb855645b4f538e9ef361f4 | /src/genui/projects/models.py | 352d603101f8e355490bba68659b5203e196e5ba | [
"MIT"
]
| permissive | Tontolda/genui | 4c684e08e78b848e5afa7e4333bbea46c30d9d51 | c5b7da7c5a99fc16d34878e2170145ac7c8e31c4 | refs/heads/master | 2023-04-14T12:57:31.774323 | 2021-01-29T08:01:30 | 2021-01-29T08:01:30 | 344,443,814 | 0 | 0 | NOASSERTION | 2021-04-24T14:56:35 | 2021-03-04T11:00:54 | null | UTF-8 | Python | false | false | 2,454 | py | from django.conf import settings
from django.db import models
from polymorphic.models import PolymorphicModel
from abc import ABCMeta, abstractmethod
from django.utils import timezone
class PolymorphicAbstractModelMeta(ABCMeta, type(PolymorphicModel)):
pass
class PolymorphicAbstractModel(PolymorphicModel):
__metaclass__ = PolymorphicAbstractModelMeta
class Meta:
abstract = True
class BaseProject(PolymorphicAbstractModel):
name = models.CharField(max_length=256, blank=False)
description = models.TextField(max_length=10000, blank=True)
created = models.DateTimeField(blank=True)
updated = models.DateTimeField(blank=True, verbose_name="Last Update")
owner = models.ForeignKey(settings.AUTH_USER_MODEL, null=False, on_delete=models.CASCADE)
class Meta:
abstract = True
@abstractmethod
def update(self):
pass
class BaseDataSet(PolymorphicAbstractModel):
project = models.ForeignKey(BaseProject, on_delete=models.CASCADE)
name = models.CharField(max_length=256, blank=False)
description = models.TextField(max_length=10000, blank=True)
created = models.DateTimeField()
updated = models.DateTimeField('last_updated')
class Meta:
abstract = True
@abstractmethod
def update(self):
pass
class Project(BaseProject):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.pk is None:
self.created = timezone.now()
self.update()
def update(self):
self.updated = timezone.now()
def save(self, *args, **kwargs):
self.update()
super().save(*args, **kwargs)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class DataSet(BaseDataSet):
project = models.ForeignKey(Project, on_delete=models.CASCADE, blank=False)
class Meta:
abstract = True
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.pk is None:
self.created = timezone.now()
self.update()
def update(self):
if self.pk is not None:
self.project.update()
self.updated = timezone.now()
def save(self, *args, **kwargs):
self.update()
self.project.save()
super().save(*args, **kwargs)
| [
"[email protected]"
]
| |
35b0b17ca058a4213445abfdb3222aa67dceb8e9 | 2ff7e53d5e512cd762217ca54317982e07a2bb0c | /eve-8.51.857815/carbonui/camera/behaviors/cameraBehavior.py | 0e12ce3324f0f6b154ae49f02a86cd143e92084a | []
| no_license | nanxijw/Clara-Pretty-One-Dick | 66d3d69426642b79e8fd4cc8e0bec23adeeca6d6 | 50de3488a2140343c364efc2615cf6e67f152be0 | refs/heads/master | 2021-01-19T09:25:07.555284 | 2015-02-17T21:49:33 | 2015-02-17T21:49:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,472 | py | #Embedded file name: carbonui/camera/behaviors\cameraBehavior.py
"""
Simple base class for camera behaviors.
Contains base functionality and Corified versions of common functions needed.
"""
class CameraBehavior(object):
__guid__ = 'cameras.CameraBehavior'
def __init__(self):
self.gameWorldClient = sm.GetService('gameWorldClient')
self.gameWorld = None
self._LoadGameWorld()
def _LoadGameWorld(self):
if self.gameWorldClient.HasGameWorld(session.worldspaceid):
self.gameWorld = self.gameWorldClient.GetGameWorld(session.worldspaceid)
def ProcessCameraUpdate(self, camera, now, frameTime):
"""
Implemented in derived classes, what do I do when the camera tells me to update?
"""
pass
def _GetEntity(self, entID):
return sm.GetService('entityClient').FindEntityByID(entID)
def _GetEntityModel(self, entID):
entity = sm.GetService('entityClient').FindEntityByID(entID)
if entity and entity.HasComponent('paperdoll'):
return entity.GetComponent('paperdoll').doll.avatar
def Reset(self):
"""
Implemented in derived classes.
Used for when changing the scene and values need to be reset
"""
pass
def OnBehaviorAdded(self, camera):
"""
Implemented in derived classes.
Used for custom behavior for when this behavior is added to a camera
"""
pass
| [
"[email protected]"
]
| |
e1ba925a4435f433489b2055d93863fc11d8fbdc | dea198896f679e577a3fd0923e3fa4470da4b9cc | /journal/pyfakefs_mutants/BOR_BitOr_mutant_1507055613.py | 24e4baeeed3757779ce893133b9ecc68ff79cce6 | []
| no_license | naustarg/cbmcmutate | f138ab2b04b4be70d735de90815ac670ae6042ce | a6ee6fd395338bb2dfd6bdffabb2dc484cb303f1 | refs/heads/master | 2020-04-04T08:10:15.913309 | 2018-05-21T18:23:58 | 2018-05-21T18:23:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219,331 | py | # line: 91
'A fake filesystem implementation for unit testing.\n\n:Includes:\n * FakeFile: Provides the appearance of a real file.\n * FakeDirectory: Provides the appearance of a real directory.\n * FakeFilesystem: Provides the appearance of a real directory hierarchy.\n * FakeOsModule: Uses FakeFilesystem to provide a fake os module replacement.\n * FakePathModule: Faked os.path module replacement.\n * FakeFileOpen: Faked file() and open() function replacements.\n\n:Usage:\n\n>>> from pyfakefs import fake_filesystem\n>>> filesystem = fake_filesystem.FakeFilesystem()\n>>> os_module = fake_filesystem.FakeOsModule(filesystem)\n>>> pathname = \'/a/new/dir/new-file\'\n\nCreate a new file object, creating parent directory objects as needed:\n\n>>> os_module.path.exists(pathname)\nFalse\n>>> new_file = filesystem.CreateFile(pathname)\n\nFile objects can\'t be overwritten:\n\n>>> os_module.path.exists(pathname)\nTrue\n>>> try:\n... filesystem.CreateFile(pathname)\n... except IOError as e:\n... assert e.errno == errno.EEXIST, \'unexpected errno: %d\' % e.errno\n... assert e.strerror == \'File already exists in fake filesystem\'\n\nRemove a file object:\n\n>>> filesystem.RemoveObject(pathname)\n>>> os_module.path.exists(pathname)\nFalse\n\nCreate a new file object at the previous path:\n\n>>> beatles_file = filesystem.CreateFile(pathname,\n... contents=\'Dear Prudence\\nWon\\\'t you come out to play?\\n\')\n>>> os_module.path.exists(pathname)\nTrue\n\nUse the FakeFileOpen class to read fake file objects:\n\n>>> file_module = fake_filesystem.FakeFileOpen(filesystem)\n>>> for line in file_module(pathname):\n... print(line.rstrip())\n...\nDear Prudence\nWon\'t you come out to play?\n\nFile objects cannot be treated like directory objects:\n\n>>> os_module.listdir(pathname) #doctest: +NORMALIZE_WHITESPACE\nTraceback (most recent call last):\n File "fake_filesystem.py", line 291, in listdir\n raise OSError(errno.ENOTDIR,\nOSError: [Errno 20] Fake os module: not a directory: \'/a/new/dir/new-file\'\n\nThe FakeOsModule can list fake directory objects:\n\n>>> os_module.listdir(os_module.path.dirname(pathname))\n[\'new-file\']\n\nThe FakeOsModule also supports stat operations:\n\n>>> import stat\n>>> stat.S_ISREG(os_module.stat(pathname).st_mode)\nTrue\n>>> stat.S_ISDIR(os_module.stat(os_module.path.dirname(pathname)).st_mode)\nTrue\n'
# line: 92
import codecs
# line: 93
import errno
# line: 94
import heapq
# line: 95
import io
# line: 96
import locale
# line: 97
import platform
# line: 98
import os
# line: 99
import sys
# line: 100
import time
# line: 101
import warnings
# line: 103
from collections import namedtuple
# line: 105
import stat
# line: 106
from copy import copy
# line: 108
__pychecker__ = 'no-reimportself'
# line: 110
__version__ = '3.3'
# line: 112
PERM_READ = 256
# line: 113
PERM_WRITE = 128
# line: 114
PERM_EXE = 64
# line: 115
PERM_DEF = 511
# line: 116
PERM_DEF_FILE = 438
# line: 117
PERM_ALL = 4095
# line: 119
_OpenModes = namedtuple('open_modes', 'must_exist can_read can_write truncate append must_not_exist')
# line: 125
_OPEN_MODE_MAP = {'r': (True, True, False, False, False, False), 'w': (False, False, True, True, False, False), 'a': (False, False, True, False, True, False), 'r+': (True, True, True, False, False, False), 'w+': (False, True, True, True, False, False), 'a+': (False, True, True, False, True, False), }
# line: 136
if ((sys.version_info[0] < 3) and (sys.platform != 'win32')):
# line: 137
_OPEN_MODE_MAP['rw'] = (True, True, True, False, False, False)
# line: 139
if (sys.version_info >= (3, 3)):
# line: 140
_OPEN_MODE_MAP['x'] = (False, False, True, False, False, True)
# line: 141
_OPEN_MODE_MAP['x+'] = (False, True, True, False, False, True)
# line: 143
if sys.platform.startswith('linux'):
# line: 146
_MAX_LINK_DEPTH = 40
else:
# line: 149
_MAX_LINK_DEPTH = 32
# line: 151
FAKE_PATH_MODULE_DEPRECATION = 'Do not instantiate a FakePathModule directly; let FakeOsModule instantiate it. See the FakeOsModule docstring for details.'
# line: 155
if (sys.platform == 'win32'):
# line: 157
OSError = WindowsError
# line: 160
class FakeLargeFileIoException(Exception):
# line: 163
'Exception thrown on unsupported operations for fake large files.\n Fake large files have a size with no real content.\n '
# line: 165
def __init__(self, file_path):
# line: 166
super(FakeLargeFileIoException, self).__init__(('Read and write operations not supported for fake large file: %s' % file_path))
# line: 171
def CopyModule(old):
# line: 172
'Recompiles and creates new module object.'
# line: 173
saved = sys.modules.pop(old.__name__, None)
# line: 174
new = __import__(old.__name__)
# line: 175
sys.modules[old.__name__] = saved
# line: 176
return new
# line: 179
class _FakeStatResult(object):
# line: 183
'Mimics os.stat_result for use as return type of `stat()` and similar.\n This is needed as `os.stat_result` has no possibility to set\n nanosecond times directly.\n '
# line: 184
long_type = (long if (sys.version_info < (3,)) else int)
# line: 186
def __init__(self, initial_time=None):
# line: 187
self.use_float = FakeOsModule.stat_float_times
# line: 188
self.st_mode = None
# line: 189
self.st_ino = None
# line: 190
self.st_dev = None
# line: 191
self.st_nlink = 0
# line: 192
self.st_uid = None
# line: 193
self.st_gid = None
# line: 194
self.st_size = None
# line: 195
if (initial_time is not None):
# line: 196
self._st_atime_ns = self.long_type((initial_time * 1000000000.0))
else:
# line: 198
self._st_atime_ns = None
# line: 199
self._st_mtime_ns = self._st_atime_ns
# line: 200
self._st_ctime_ns = self._st_atime_ns
# line: 202
def __eq__(self, other):
# line: 203
return (isinstance(other, _FakeStatResult) and (self._st_atime_ns == other._st_atime_ns) and (self._st_ctime_ns == other._st_ctime_ns) and (self._st_mtime_ns == other._st_mtime_ns) and (self.st_size == other.st_size) and (self.st_gid == other.st_gid) and (self.st_uid == other.st_uid) and (self.st_nlink == other.st_nlink) and (self.st_dev == other.st_dev) and (self.st_ino == other.st_ino) and (self.st_mode == other.st_mode))
# line: 217
def __ne__(self, other):
# line: 218
return (not (self == other))
# line: 220
def copy(self):
# line: 223
'Return a copy where the float usage is hard-coded to mimic the behavior\n of the real os.stat_result.\n '
# line: 224
use_float = self.use_float()
# line: 225
stat_result = copy(self)
# line: 226
stat_result.use_float = (lambda : use_float)
# line: 227
return stat_result
# line: 229
def set_from_stat_result(self, stat_result):
# line: 233
'Set values from a real os.stat_result.\n Note: values that are controlled by the fake filesystem are not set.\n This includes st_ino, st_dev and st_nlink.\n '
# line: 234
self.st_mode = stat_result.st_mode
# line: 235
self.st_uid = stat_result.st_uid
# line: 236
self.st_gid = stat_result.st_gid
# line: 237
self.st_size = stat_result.st_size
# line: 238
if (sys.version_info < (3, 3)):
# line: 239
self._st_atime_ns = self.long_type((stat_result.st_atime * 1000000000.0))
# line: 240
self._st_mtime_ns = self.long_type((stat_result.st_mtime * 1000000000.0))
# line: 241
self._st_ctime_ns = self.long_type((stat_result.st_ctime * 1000000000.0))
else:
# line: 243
self._st_atime_ns = stat_result.st_atime_ns
# line: 244
self._st_mtime_ns = stat_result.st_mtime_ns
# line: 245
self._st_ctime_ns = stat_result.st_ctime_ns
# line: 247
@property
# line: 247
def st_ctime(self):
# line: 249
'Return the creation time in seconds.'
# line: 250
ctime = (self._st_ctime_ns / 1000000000.0)
# line: 251
return (ctime if self.use_float() else int(ctime))
# line: 253
@property
# line: 253
def st_atime(self):
# line: 255
'Return the access time in seconds.'
# line: 256
atime = (self._st_atime_ns / 1000000000.0)
# line: 257
return (atime if self.use_float() else int(atime))
# line: 259
@property
# line: 259
def st_mtime(self):
# line: 261
'Return the modification time in seconds.'
# line: 262
mtime = (self._st_mtime_ns / 1000000000.0)
# line: 263
return (mtime if self.use_float() else int(mtime))
# line: 265
@st_ctime.setter
# line: 265
def st_ctime(self, val):
# line: 267
'Set the creation time in seconds.'
# line: 268
self._st_ctime_ns = self.long_type((val * 1000000000.0))
# line: 270
@st_atime.setter
# line: 270
def st_atime(self, val):
# line: 272
'Set the access time in seconds.'
# line: 273
self._st_atime_ns = self.long_type((val * 1000000000.0))
# line: 275
@st_mtime.setter
# line: 275
def st_mtime(self, val):
# line: 277
'Set the modification time in seconds.'
# line: 278
self._st_mtime_ns = self.long_type((val * 1000000000.0))
# line: 280
def __getitem__(self, item):
# line: 281
'Implement item access to mimic `os.stat_result` behavior.'
# line: 282
if (item == stat.ST_MODE):
# line: 283
return self.st_mode
# line: 284
if (item == stat.ST_INO):
# line: 285
return self.st_ino
# line: 286
if (item == stat.ST_DEV):
# line: 287
return self.st_dev
# line: 288
if (item == stat.ST_NLINK):
# line: 289
return self.st_nlink
# line: 290
if (item == stat.ST_UID):
# line: 291
return self.st_uid
# line: 292
if (item == stat.ST_GID):
# line: 293
return self.st_gid
# line: 294
if (item == stat.ST_SIZE):
# line: 295
return self.st_size
# line: 296
if (item == stat.ST_ATIME):
# line: 298
return int(self.st_atime)
# line: 299
if (item == stat.ST_MTIME):
# line: 300
return int(self.st_mtime)
# line: 301
if (item == stat.ST_CTIME):
# line: 302
return int(self.st_ctime)
# line: 304
if (sys.version_info >= (3, 3)):
# line: 306
@property
# line: 306
def st_atime_ns(self):
# line: 308
'Return the access time in nanoseconds.'
# line: 309
return self._st_atime_ns
# line: 311
@property
# line: 311
def st_mtime_ns(self):
# line: 313
'Return the modification time in nanoseconds.'
# line: 314
return self._st_mtime_ns
# line: 316
@property
# line: 316
def st_ctime_ns(self):
# line: 318
'Return the creation time in nanoseconds.'
# line: 319
return self._st_ctime_ns
# line: 321
@st_atime_ns.setter
# line: 321
def st_atime_ns(self, val):
# line: 323
'Set the access time in nanoseconds.'
# line: 324
self._st_atime_ns = val
# line: 326
@st_mtime_ns.setter
# line: 326
def st_mtime_ns(self, val):
# line: 328
'Set the modification time of the fake file in nanoseconds.'
# line: 329
self._st_mtime_ns = val
# line: 331
@st_ctime_ns.setter
# line: 331
def st_ctime_ns(self, val):
# line: 333
'Set the creation time of the fake file in nanoseconds.'
# line: 334
self._st_ctime_ns = val
# line: 337
class FakeFile(object):
# line: 353
"Provides the appearance of a real file.\n\n Attributes currently faked out:\n st_mode: user-specified, otherwise S_IFREG\n st_ctime: the time.time() timestamp of the file change time (updated\n each time a file's attributes is modified).\n st_atime: the time.time() timestamp when the file was last accessed.\n st_mtime: the time.time() timestamp when the file was last modified.\n st_size: the size of the file\n st_nlink: the number of hard links to the file\n st_ino: the inode number - a unique number identifying the file\n st_dev: a unique number identifying the (fake) file system device the file belongs to\n\n Other attributes needed by os.stat are assigned default value of None\n these include: st_uid, st_gid\n "
# line: 355
def __init__(self, name, st_mode=(stat.S_IFREG | PERM_DEF_FILE), contents=None, filesystem=None, encoding=None, errors=None):
# line: 371
'init.\n\n Args:\n name: name of the file/directory, without parent path information\n st_mode: the stat.S_IF* constant representing the file type (i.e.\n stat.S_IFREG, stat.S_IFDIR)\n contents: the contents of the filesystem object; should be a string or byte object for\n regular files, and a list of other FakeFile or FakeDirectory objects\n for FakeDirectory objects\n filesystem: the fake filesystem where the file is created.\n New in pyfakefs 2.9.\n encoding: if contents is a unicode string, the encoding used for serialization\n errors: the error mode used for encoding/decoding errors\n New in pyfakefs 3.2.\n '
# line: 372
self.name = name
# line: 373
self.stat_result = _FakeStatResult(time.time())
# line: 374
self.stat_result.st_mode = st_mode
# line: 375
self.encoding = encoding
# line: 376
self.errors = (errors or 'strict')
# line: 377
self._byte_contents = self._encode_contents(contents)
# line: 378
self.stat_result.st_size = (len(self._byte_contents) if (self._byte_contents is not None) else 0)
# line: 381
if (filesystem is None):
# line: 382
raise ValueError('filesystem shall not be None')
# line: 383
self.filesystem = filesystem
# line: 384
self.epoch = 0
# line: 385
self.parent_dir = None
# line: 387
@property
# line: 387
def byte_contents(self):
# line: 389
return self._byte_contents
# line: 391
@property
# line: 391
def contents(self):
# line: 393
'Return the contents as string with the original encoding.'
# line: 394
if ((sys.version_info >= (3, 0)) and isinstance(self.byte_contents, bytes)):
# line: 395
return self.byte_contents.decode((self.encoding or locale.getpreferredencoding(False)), errors=self.errors)
# line: 398
return self.byte_contents
# line: 400
def SetLargeFileSize(self, st_size):
# line: 413
"Sets the self.st_size attribute and replaces self.content with None.\n\n Provided specifically to simulate very large files without regards\n to their content (which wouldn't fit in memory).\n Note that read/write operations with such a file raise FakeLargeFileIoException.\n\n Args:\n st_size: (int) The desired file size\n\n Raises:\n IOError: if the st_size is not a non-negative integer,\n or if st_size exceeds the available file system space\n "
# line: 414
self._check_positive_int(st_size)
# line: 415
if self.st_size:
# line: 416
self.SetSize(0)
# line: 417
self.filesystem.ChangeDiskUsage(st_size, self.name, self.st_dev)
# line: 418
self.st_size = st_size
# line: 419
self._byte_contents = None
# line: 421
def _check_positive_int(self, size):
# line: 423
int_types = ((int, long) if (sys.version_info < (3, 0)) else int)
# line: 424
if ((not isinstance(size, int_types)) or (size < 0)):
# line: 425
raise IOError(errno.ENOSPC, ('Fake file object: size must be a non-negative integer, but is %s' % size), self.name)
# line: 429
def IsLargeFile(self):
# line: 430
'Return True if this file was initialized with size but no contents.'
# line: 431
return (self._byte_contents is None)
# line: 433
def _encode_contents(self, contents):
# line: 435
if ((sys.version_info >= (3, 0)) and isinstance(contents, str)):
# line: 436
contents = bytes(contents, (self.encoding or locale.getpreferredencoding(False)), self.errors)
elif ((sys.version_info < (3, 0)) and isinstance(contents, unicode)):
# line: 438
contents = contents.encode((self.encoding or locale.getpreferredencoding(False)), self.errors)
# line: 439
return contents
# line: 441
def _set_initial_contents(self, contents):
# line: 450
'Sets the file contents and size.\n Called internally after initial file creation.\n\n Args:\n contents: string, new content of file.\n Raises:\n IOError: if the st_size is not a non-negative integer,\n or if st_size exceeds the available file system space\n '
# line: 451
contents = self._encode_contents(contents)
# line: 452
st_size = len(contents)
# line: 454
if self._byte_contents:
# line: 455
self.SetSize(0)
# line: 456
current_size = (self.st_size or 0)
# line: 457
self.filesystem.ChangeDiskUsage((st_size - current_size), self.name, self.st_dev)
# line: 458
self._byte_contents = contents
# line: 459
self.st_size = st_size
# line: 460
self.epoch += 1
# line: 462
def SetContents(self, contents, encoding=None):
# line: 475
'Sets the file contents and size and increases the modification time.\n\n Args:\n contents: (str, bytes, unicode) new content of file.\n encoding: (str) the encoding to be used for writing the contents\n if they are a unicode string.\n If not given, the locale preferred encoding is used.\n New in pyfakefs 2.9.\n\n Raises:\n IOError: if the st_size is not a non-negative integer,\n or if st_size exceeds the available file system space.\n '
# line: 476
self.encoding = encoding
# line: 477
self._set_initial_contents(contents)
# line: 478
current_time = time.time()
# line: 479
self.st_ctime = current_time
# line: 480
self.st_mtime = current_time
# line: 482
def GetSize(self):
# line: 485
'Returns the size in bytes of the file contents.\n New in pyfakefs 2.9.\n '
# line: 486
return self.st_size
# line: 488
def GetPath(self):
# line: 489
'Return the full path of the current object.'
# line: 490
names = []
# line: 491
obj = self
# line: 492
while obj:
# line: 493
names.insert(0, obj.name)
# line: 494
obj = obj.parent_dir
# line: 495
sep = self.filesystem._path_separator(self.name)
# line: 496
return self.filesystem.NormalizePath(sep.join(names[1:]))
# line: 498
def SetSize(self, st_size):
# line: 507
'Resizes file content, padding with nulls if new size exceeds the old.\n\n Args:\n st_size: The desired size for the file.\n\n Raises:\n IOError: if the st_size arg is not a non-negative integer\n or if st_size exceeds the available file system space\n '
# line: 509
self._check_positive_int(st_size)
# line: 510
current_size = (self.st_size or 0)
# line: 511
self.filesystem.ChangeDiskUsage((st_size - current_size), self.name, self.st_dev)
# line: 512
if self._byte_contents:
# line: 513
if (st_size < current_size):
# line: 514
self._byte_contents = self._byte_contents[:st_size]
elif (sys.version_info < (3, 0)):
# line: 517
self._byte_contents = ('%s%s' % (self._byte_contents, ('\x00' * (st_size - current_size))))
else:
# line: 520
self._byte_contents += ('\x00' * (st_size - current_size))
# line: 521
self.st_size = st_size
# line: 522
self.epoch += 1
# line: 524
def SetATime(self, st_atime):
# line: 529
'Set the self.st_atime attribute.\n\n Args:\n st_atime: The desired access time.\n '
# line: 530
self.st_atime = st_atime
# line: 532
def SetMTime(self, st_mtime):
# line: 537
'Set the self.st_mtime attribute.\n\n Args:\n st_mtime: The desired modification time.\n '
# line: 538
self.st_mtime = st_mtime
# line: 540
def SetCTime(self, st_ctime):
# line: 546
'Set the self.st_ctime attribute.\n New in pyfakefs 3.0.\n\n Args:\n st_ctime: The desired creation time.\n '
# line: 547
self.st_ctime = st_ctime
# line: 549
def __getattr__(self, item):
# line: 550
'Forward some properties to stat_result.'
# line: 551
return getattr(self.stat_result, item)
# line: 553
def __setattr__(self, key, value):
# line: 554
'Forward some properties to stat_result.'
# line: 555
if (key in ('st_mode', 'st_ino', 'st_dev', 'st_nlink', 'st_uid', 'st_gid', 'st_size', 'st_atime', 'st_mtime', 'st_ctime', 'st_atime_ns', 'st_mtime_ns', 'st_ctime_ns')):
# line: 558
return setattr(self.stat_result, key, value)
# line: 559
return super(FakeFile, self).__setattr__(key, value)
# line: 561
def __str__(self):
# line: 562
return ('%s(%o)' % (self.name, self.st_mode))
# line: 564
def SetIno(self, st_ino):
# line: 571
'Set the self.st_ino attribute.\n Note that a unique inode is assigned automatically to a new fake file.\n Using this function does not guarantee uniqueness and should used with caution.\n\n Args:\n st_ino: (int) The desired inode.\n '
# line: 572
self.st_ino = st_ino
# line: 575
class FakeFileFromRealFile(FakeFile):
# line: 580
'Represents a fake file copied from the real file system.\n \n The contents of the file are read on demand only.\n New in pyfakefs 3.2.\n '
# line: 582
def __init__(self, file_path, filesystem, read_only=True):
# line: 593
'init.\n\n Args:\n file_path: path to the existing file.\n filesystem: the fake filesystem where the file is created.\n read_only: if set, the file is treated as read-only, e.g. a write access raises an exception;\n otherwise, writing to the file changes the fake file only as usually.\n\n Raises:\n OSError: if the file does not exist in the real file system.\n '
# line: 594
real_stat = os.stat(file_path)
# line: 596
super(FakeFileFromRealFile, self).__init__(name=os.path.basename(file_path), filesystem=filesystem)
# line: 598
self.stat_result.set_from_stat_result(real_stat)
# line: 599
if read_only:
# line: 600
self.st_mode &= 261924
# line: 601
self.file_path = file_path
# line: 602
self.contents_read = False
# line: 604
@property
# line: 604
def byte_contents(self):
# line: 606
if (not self.contents_read):
# line: 607
self.contents_read = True
# line: 608
with io.open(self.file_path, 'rb') as f:
# line: 609
self._byte_contents = f.read()
# line: 611
self.st_atime = os.stat(self.file_path).st_atime
# line: 612
return self._byte_contents
# line: 614
def IsLargeFile(self):
# line: 615
'The contents are never faked.'
# line: 616
return False
# line: 619
class FakeDirectory(FakeFile):
# line: 620
'Provides the appearance of a real directory.'
# line: 622
def __init__(self, name, perm_bits=PERM_DEF, filesystem=None):
# line: 629
'init.\n\n Args:\n name: name of the file/directory, without parent path information\n perm_bits: permission bits. defaults to 0o777.\n filesystem: if set, the fake filesystem where the directory is created\n '
# line: 630
FakeFile.__init__(self, name, (stat.S_IFDIR & perm_bits), {}, filesystem=filesystem)
# line: 632
self.st_nlink += 1
# line: 634
def SetContents(self, contents, encoding=None):
# line: 635
error_class = (OSError if self.filesystem.is_windows_fs else IOError)
# line: 636
raise error_class(errno.EISDIR, 'Trying to write to directory')
# line: 638
@property
# line: 638
def contents(self):
# line: 640
'Return the list of contained directory entries.'
# line: 641
return self.byte_contents
# line: 643
@property
# line: 643
def ordered_dirs(self):
# line: 645
'Return the list of contained directory entry names ordered by creation order.'
# line: 646
return [item[0] for item in sorted(self.byte_contents.items(), key=(lambda entry: entry[1].st_ino))]
# line: 649
def AddEntry(self, path_object):
# line: 658
'Adds a child FakeFile to this directory.\n\n Args:\n path_object: FakeFile instance to add as a child of this directory.\n\n Raises:\n OSError: if the directory has no write permission (Posix only)\n OSError: if the file or directory to be added already exists\n '
# line: 659
if ((not (self.st_mode & PERM_WRITE)) and (not self.filesystem.is_windows_fs)):
# line: 660
raise OSError(errno.EACCES, 'Permission Denied', self.GetPath())
# line: 662
if (path_object.name in self.contents):
# line: 663
raise OSError(errno.EEXIST, 'Object already exists in fake filesystem', self.GetPath())
# line: 667
self.contents[path_object.name] = path_object
# line: 668
path_object.parent_dir = self
# line: 669
self.st_nlink += 1
# line: 670
path_object.st_nlink += 1
# line: 671
path_object.st_dev = self.st_dev
# line: 672
if (path_object.st_nlink == 1):
# line: 673
self.filesystem.ChangeDiskUsage(path_object.GetSize(), path_object.name, self.st_dev)
# line: 675
def GetEntry(self, pathname_name):
# line: 686
'Retrieves the specified child file or directory entry.\n\n Args:\n pathname_name: basename of the child object to retrieve.\n\n Returns:\n fake file or directory object.\n\n Raises:\n KeyError: if no child exists by the specified name.\n '
# line: 687
return self.contents[pathname_name]
# line: 689
def RemoveEntry(self, pathname_name, recursive=True):
# line: 701
'Removes the specified child file or directory.\n\n Args:\n pathname_name: basename of the child object to remove.\n recursive: if True (default), the entries in contained directories are deleted first.\n Needed to propagate removal errors (e.g. permission problems) from contained entries.\n New in pyfakefs 2.9.\n\n Raises:\n KeyError: if no child exists by the specified name.\n OSError: if user lacks permission to delete the file, or (Windows only) the file is open.\n '
# line: 702
entry = self.contents[pathname_name]
# line: 703
if ((entry.st_mode & PERM_WRITE) == 0):
# line: 704
raise OSError(errno.EACCES, 'Trying to remove object without write permission', pathname_name)
# line: 706
if (self.filesystem.is_windows_fs and self.filesystem.HasOpenFile(entry)):
# line: 707
raise OSError(errno.EACCES, 'Trying to remove an open file', pathname_name)
# line: 708
if (recursive and isinstance(entry, FakeDirectory)):
# line: 709
while entry.contents:
# line: 710
entry.RemoveEntry(list(entry.contents)[0])
elif (entry.st_nlink == 1):
# line: 712
self.filesystem.ChangeDiskUsage((- entry.GetSize()), pathname_name, entry.st_dev)
# line: 714
self.st_nlink -= 1
# line: 715
entry.st_nlink -= 1
# line: 716
assert (entry.st_nlink >= 0)
# line: 718
del self.contents[pathname_name]
# line: 720
def GetSize(self):
# line: 723
'Return the total size of all files contained in this directory tree.\n New in pyfakefs 2.9.\n '
# line: 724
return sum([item[1].GetSize() for item in self.contents.items()])
# line: 726
def HasParentObject(self, dir_object):
# line: 728
'Return `True` if dir_object is a direct or indirect parent directory,\n or if both are the same object.'
# line: 729
obj = self
# line: 730
while obj:
# line: 731
if (obj == dir_object):
# line: 732
return True
# line: 733
obj = obj.parent_dir
# line: 734
return False
# line: 736
def __str__(self):
# line: 737
description = (super(FakeDirectory, self).__str__() + ':\n')
# line: 738
for item in self.contents:
# line: 739
item_desc = self.contents[item].__str__()
# line: 740
for line in item_desc.split('\n'):
# line: 741
if line:
# line: 742
description = (((description + ' ') + line) + '\n')
# line: 743
return description
# line: 746
class FakeDirectoryFromRealDirectory(FakeDirectory):
# line: 751
'Represents a fake directory copied from the real file system.\n \n The contents of the directory are read on demand only.\n New in pyfakefs 3.2.\n '
# line: 753
def __init__(self, dir_path, filesystem, read_only):
# line: 765
'init.\n\n Args:\n dir_path: full directory path\n filesystem: the fake filesystem where the directory is created\n read_only: if set, all files under the directory are treated as read-only,\n e.g. a write access raises an exception;\n otherwise, writing to the files changes the fake files only as usually.\n \n Raises:\n OSError if the directory does not exist in the real file system\n '
# line: 766
real_stat = os.stat(dir_path)
# line: 767
super(FakeDirectoryFromRealDirectory, self).__init__(name=os.path.split(dir_path)[1], perm_bits=real_stat.st_mode, filesystem=filesystem)
# line: 772
self.st_ctime = real_stat.st_ctime
# line: 773
self.st_atime = real_stat.st_atime
# line: 774
self.st_mtime = real_stat.st_mtime
# line: 775
self.st_gid = real_stat.st_gid
# line: 776
self.st_uid = real_stat.st_uid
# line: 777
self.dir_path = dir_path
# line: 778
self.read_only = read_only
# line: 779
self.contents_read = False
# line: 781
@property
# line: 781
def contents(self):
# line: 783
'Return the list of contained directory entries, loading them if not already loaded.'
# line: 784
if (not self.contents_read):
# line: 785
self.contents_read = True
# line: 786
self.filesystem.add_real_paths([os.path.join(self.dir_path, entry) for entry in os.listdir(self.dir_path)], read_only=self.read_only)
# line: 789
return self.byte_contents
# line: 791
def GetSize(self):
# line: 793
if (not self.contents_read):
# line: 794
return 0
# line: 795
return super(FakeDirectoryFromRealDirectory, self).GetSize()
# line: 798
class FakeFilesystem(object):
# line: 809
'Provides the appearance of a real directory tree for unit testing.\n\n Attributes:\n path_separator: The path separator, corresponds to `os.path.sep`.\n alternative_path_separator: Corresponds to `os.path.altsep`.\n is_windows_fs: `True` in a Windows file system, `False` otherwise.\n is_case_sensitive: `True` if a case-sensitive file system is assumed.\n root: The root `FakeDirectory` entry of the file system.\n cwd: The current working directory path.\n umask: The umask used for newly created files, see `os.umask`.\n '
# line: 811
def __init__(self, path_separator=os.path.sep, total_size=None):
# line: 823
"init.\n\n Args:\n path_separator: optional substitute for os.path.sep\n total_size: if not None, the total size in bytes of the root filesystem.\n New in pyfakefs 2.9.\n\n Example usage to emulate real file systems:\n filesystem = FakeFilesystem(\n alt_path_separator='/' if _is_windows else None)\n\n "
# line: 824
self.path_separator = path_separator
# line: 825
self.alternative_path_separator = os.path.altsep
# line: 826
if (path_separator != os.sep):
# line: 827
self.alternative_path_separator = None
# line: 832
self.is_windows_fs = (sys.platform == 'win32')
# line: 836
self.is_case_sensitive = (sys.platform not in ['win32', 'cygwin', 'darwin'])
# line: 838
self.root = FakeDirectory(self.path_separator, filesystem=self)
# line: 839
self.cwd = self.root.name
# line: 841
self.umask = os.umask(18)
# line: 842
os.umask(self.umask)
# line: 845
self.open_files = []
# line: 847
self._free_fd_heap = []
# line: 849
self._last_ino = 0
# line: 850
self._last_dev = 0
# line: 851
self.mount_points = {}
# line: 852
self.AddMountPoint(self.root.name, total_size)
# line: 854
@staticmethod
# line: 854
def _matching_string(matched, string):
# line: 858
'Return the string as byte or unicode depending \n on the type of matched, assuming string is an ASCII string.\n '
# line: 859
if (string is None):
# line: 860
return string
# line: 861
if (sys.version_info < (3,)):
# line: 862
if isinstance(matched, unicode):
# line: 863
return unicode(string)
else:
# line: 865
return string
elif isinstance(matched, bytes):
# line: 868
return bytes(string, 'ascii')
else:
# line: 870
return string
# line: 872
def _path_separator(self, path):
# line: 873
'Return the path separator as the same type as path'
# line: 874
return self._matching_string(path, self.path_separator)
# line: 876
def _alternative_path_separator(self, path):
# line: 877
'Return the alternative path separator as the same type as path'
# line: 878
return self._matching_string(path, self.alternative_path_separator)
# line: 880
def _IsLinkSupported(self):
# line: 882
return ((not self.is_windows_fs) or (sys.version_info >= (3, 2)))
# line: 884
def AddMountPoint(self, path, total_size=None):
# line: 900
'Add a new mount point for a filesystem device.\n The mount point gets a new unique device number.\n New in pyfakefs 2.9.\n\n Args:\n path: The root path for the new mount path.\n\n total_size: The new total size of the added filesystem device\n in bytes. Defaults to infinite size.\n\n Returns:\n The newly created mount point dict.\n\n Raises:\n OSError: if trying to mount an existing mount point again.\n '
# line: 901
path = self.NormalizePath(path)
# line: 902
if (path in self.mount_points):
# line: 903
raise OSError(errno.EEXIST, 'Mount point cannot be added twice', path)
# line: 904
self._last_dev += 1
# line: 905
self.mount_points[path] = {'idev': self._last_dev, 'total_size': total_size, 'used_size': 0, }
# line: 909
root_dir = (self.root if (path == self.root.name) else self.CreateDirectory(path))
# line: 910
root_dir.st_dev = self._last_dev
# line: 911
return self.mount_points[path]
# line: 913
def _AutoMountDriveIfNeeded(self, path, force=False):
# line: 914
if (self.is_windows_fs and (force or (not self._MountPointForPath(path)))):
# line: 915
drive = self.SplitDrive(path)[0]
# line: 916
if drive:
# line: 917
return self.AddMountPoint(path=drive)
# line: 919
def _MountPointForPath(self, path):
# line: 920
def to_str(string):
# line: 921
'Convert the str, unicode or byte object to a str using the default encoding.'
# line: 922
if ((string is None) or isinstance(string, str)):
# line: 923
return string
# line: 924
if (sys.version_info < (3, 0)):
# line: 925
return string.encode(locale.getpreferredencoding(False))
else:
# line: 927
return string.decode(locale.getpreferredencoding(False))
# line: 929
path = self.NormalizePath(self.NormalizeCase(path))
# line: 930
if (path in self.mount_points):
# line: 931
return self.mount_points[path]
# line: 932
mount_path = self._matching_string(path, '')
# line: 933
drive = self.SplitDrive(path)[:1]
# line: 934
for root_path in self.mount_points:
# line: 935
root_path = self._matching_string(path, root_path)
# line: 936
if (drive and (not root_path.startswith(drive))):
# line: 937
continue
# line: 938
if (path.startswith(root_path) and (len(root_path) > len(mount_path))):
# line: 939
mount_path = root_path
# line: 940
if mount_path:
# line: 941
return self.mount_points[to_str(mount_path)]
# line: 942
mount_point = self._AutoMountDriveIfNeeded(path, force=True)
# line: 943
assert mount_point
# line: 944
return mount_point
# line: 946
def _MountPointForDevice(self, idev):
# line: 947
for mount_point in self.mount_points.values():
# line: 948
if (mount_point['idev'] == idev):
# line: 949
return mount_point
# line: 951
def GetDiskUsage(self, path=None):
# line: 961
"Return the total, used and free disk space in bytes as named tuple,\n or placeholder values simulating unlimited space if not set.\n Note: This matches the return value of shutil.disk_usage().\n New in pyfakefs 2.9.\n\n Args:\n path: The disk space is returned for the file system device where\n path resides.\n Defaults to the root path (e.g. '/' on Unix systems).\n "
# line: 962
DiskUsage = namedtuple('usage', 'total, used, free')
# line: 963
if (path is None):
# line: 964
mount_point = self.mount_points[self.root.name]
else:
# line: 966
mount_point = self._MountPointForPath(path)
# line: 967
if (mount_point and (mount_point['total_size'] is not None)):
# line: 968
return DiskUsage(mount_point['total_size'], mount_point['used_size'], (mount_point['total_size'] - mount_point['used_size']))
# line: 970
return DiskUsage((((1024 * 1024) * 1024) * 1024), 0, (((1024 * 1024) * 1024) * 1024))
# line: 972
def SetDiskUsage(self, total_size, path=None):
# line: 986
"Changes the total size of the file system, preserving the used space.\n Example usage: set the size of an auto-mounted Windows drive.\n New in pyfakefs 2.9.\n\n Args:\n total_size: The new total size of the filesystem in bytes.\n\n path: The disk space is changed for the file system device where\n path resides.\n Defaults to the root path (e.g. '/' on Unix systems).\n\n Raises:\n IOError: if the new space is smaller than the used size.\n "
# line: 987
if (path is None):
# line: 988
path = self.root.name
# line: 989
mount_point = self._MountPointForPath(path)
# line: 990
if ((mount_point['total_size'] is not None) and (mount_point['used_size'] > total_size)):
# line: 991
raise IOError(errno.ENOSPC, ('Fake file system: cannot change size to %r bytes - used space is larger' % total_size), path)
# line: 994
mount_point['total_size'] = total_size
# line: 996
def ChangeDiskUsage(self, usage_change, file_path, st_dev):
# line: 1010
'Change the used disk space by the given amount.\n New in pyfakefs 2.9.\n\n Args:\n usage_change: Number of bytes added to the used space.\n If negative, the used space will be decreased.\n\n file_path: The path of the object needing the disk space.\n\n st_dev: The device ID for the respective file system.\n\n Raises:\n IOError: if usage_change exceeds the free file system space\n '
# line: 1011
mount_point = self._MountPointForDevice(st_dev)
# line: 1012
if mount_point:
# line: 1013
if (mount_point['total_size'] is not None):
# line: 1014
if ((mount_point['total_size'] - mount_point['used_size']) < usage_change):
# line: 1015
raise IOError(errno.ENOSPC, ('Fake file system: disk is full, failed to add %r bytes' % usage_change), file_path)
# line: 1018
mount_point['used_size'] += usage_change
# line: 1020
def GetStat(self, entry_path, follow_symlinks=True):
# line: 1034
"Return the os.stat-like tuple for the FakeFile object of entry_path.\n New in pyfakefs 3.0.\n\n Args:\n entry_path: path to filesystem object to retrieve.\n follow_symlinks: if False and entry_path points to a symlink, the link itself is inspected\n instead of the linked object.\n\n Returns:\n the FakeStatResult object corresponding to entry_path.\n\n Raises:\n OSError: if the filesystem object doesn't exist.\n "
# line: 1036
try:
# line: 1037
file_object = self.ResolveObject(entry_path, follow_symlinks, allow_fd=True)
# line: 1038
return file_object.stat_result.copy()
# line: 1039
except IOError as io_error:
# line: 1040
raise OSError(io_error.errno, io_error.strerror, entry_path)
# line: 1042
def ChangeMode(self, path, mode, follow_symlinks=True):
# line: 1051
'Change the permissions of a file as encoded in integer mode.\n New in pyfakefs 3.0.\n\n Args:\n path: (str) Path to the file.\n mode: (int) Permissions.\n follow_symlinks: if False and entry_path points to a symlink, the link itself is affected\n instead of the linked object.\n '
# line: 1052
try:
# line: 1053
file_object = self.ResolveObject(path, follow_symlinks, allow_fd=True)
# line: 1054
except IOError as io_error:
# line: 1055
if (io_error.errno == errno.ENOENT):
# line: 1056
raise OSError(errno.ENOENT, 'No such file or directory in fake filesystem', path)
# line: 1059
raise
# line: 1060
file_object.st_mode = ((file_object.st_mode & (~ PERM_ALL)) | (mode & PERM_ALL))
# line: 1062
file_object.st_ctime = time.time()
# line: 1064
def UpdateTime(self, path, times=None, ns=None, follow_symlinks=True):
# line: 1086
'Change the access and modified times of a file.\n New in pyfakefs 3.0.\n\n Args:\n path: (str) Path to the file.\n times: 2-tuple of int or float numbers, of the form (atime, mtime) \n which is used to set the access and modified times in seconds. \n If None, both times are set to the current time.\n ns: 2-tuple of int numbers, of the form (atime, mtime) which is \n used to set the access and modified times in nanoseconds. \n If None, both times are set to the current time.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: If `False` and entry_path points to a symlink, \n the link itself is queried instead of the linked object. \n New in Python 3.3. New in pyfakefs 3.0.\n \n Raises:\n TypeError: If anything other than the expected types is \n specified in the passed `times` or `ns` tuple, \n or if the tuple length is not equal to 2.\n ValueError: If both times and ns are specified.\n '
# line: 1087
if ((times is not None) and (ns is not None)):
# line: 1088
raise ValueError("utime: you may specify either 'times' or 'ns' but not both")
# line: 1089
if ((times is not None) and (len(times) != 2)):
# line: 1090
raise TypeError("utime: 'times' must be either a tuple of two ints or None")
# line: 1091
if ((ns is not None) and (len(ns) != 2)):
# line: 1092
raise TypeError("utime: 'ns' must be a tuple of two ints")
# line: 1094
try:
# line: 1095
file_object = self.ResolveObject(path, follow_symlinks, allow_fd=True)
# line: 1096
except IOError as io_error:
# line: 1097
if (io_error.errno == errno.ENOENT):
# line: 1098
raise OSError(errno.ENOENT, 'No such file or directory in fake filesystem', path)
# line: 1101
raise
# line: 1102
if (times is not None):
# line: 1103
for file_time in times:
# line: 1104
if (not isinstance(file_time, (int, float))):
# line: 1105
raise TypeError('atime and mtime must be numbers')
# line: 1107
file_object.st_atime = times[0]
# line: 1108
file_object.st_mtime = times[1]
elif (ns is not None):
# line: 1110
for file_time in ns:
# line: 1111
if (not isinstance(file_time, int)):
# line: 1112
raise TypeError('atime and mtime must be ints')
# line: 1114
file_object.st_atime_ns = ns[0]
# line: 1115
file_object.st_mtime_ns = ns[1]
else:
# line: 1117
current_time = time.time()
# line: 1118
file_object.st_atime = current_time
# line: 1119
file_object.st_mtime = current_time
# line: 1121
def SetIno(self, path, st_ino):
# line: 1129
"Set the self.st_ino attribute of file at 'path'.\n Note that a unique inode is assigned automatically to a new fake file.\n Using this function does not guarantee uniqueness and should used with caution.\n\n Args:\n path: Path to file.\n st_ino: The desired inode.\n "
# line: 1130
self.GetObject(path).SetIno(st_ino)
# line: 1132
def AddOpenFile(self, file_obj):
# line: 1142
'Add file_obj to the list of open files on the filesystem.\n\n The position in the self.open_files array is the file descriptor number.\n\n Args:\n file_obj: file object to be added to open files list.\n\n Returns:\n File descriptor number for the file object.\n '
# line: 1143
if self._free_fd_heap:
# line: 1144
open_fd = heapq.heappop(self._free_fd_heap)
# line: 1145
self.open_files[open_fd] = file_obj
# line: 1146
return open_fd
# line: 1148
self.open_files.append(file_obj)
# line: 1149
return (len(self.open_files) - 1)
# line: 1151
def CloseOpenFile(self, file_des):
# line: 1158
'Remove file object with given descriptor from the list of open files.\n\n Sets the entry in open_files to None.\n\n Args:\n file_des: descriptor of file object to be removed from open files list.\n '
# line: 1159
self.open_files[file_des] = None
# line: 1160
heapq.heappush(self._free_fd_heap, file_des)
# line: 1162
def GetOpenFile(self, file_des):
# line: 1174
'Return an open file.\n\n Args:\n file_des: file descriptor of the open file.\n\n Raises:\n OSError: an invalid file descriptor.\n TypeError: filedes is not an integer.\n\n Returns:\n Open file object.\n '
# line: 1175
if (not isinstance(file_des, int)):
# line: 1176
raise TypeError('an integer is required')
# line: 1177
if ((file_des >= len(self.open_files)) or (self.open_files[file_des] is None)):
# line: 1179
raise OSError(errno.EBADF, 'Bad file descriptor', file_des)
# line: 1180
return self.open_files[file_des]
# line: 1182
def HasOpenFile(self, file_object):
# line: 1191
'Return True if the given file object is in the list of open files.\n New in pyfakefs 2.9.\n\n Args:\n file_object: The FakeFile object to be checked.\n\n Returns:\n True if the file is open.\n '
# line: 1192
return (file_object in [wrapper.GetObject() for wrapper in self.open_files if wrapper])
# line: 1194
def NormalizePathSeparator(self, path):
# line: 1204
'Replace all appearances of alternative path separator with path separator.\n Do nothing if no alternative separator is set.\n New in pyfakefs 2.9.\n\n Args:\n path: the path to be normalized.\n\n Returns:\n The normalized path that will be used internally.\n '
# line: 1205
if (sys.version_info >= (3, 6)):
# line: 1206
path = os.fspath(path)
# line: 1207
if ((self.alternative_path_separator is None) or (not path)):
# line: 1208
return path
# line: 1209
return path.replace(self._alternative_path_separator(path), self._path_separator(path))
# line: 1211
def CollapsePath(self, path):
# line: 1230
"Mimic os.path.normpath using the specified path_separator.\n\n Mimics os.path.normpath using the path_separator that was specified\n for this FakeFilesystem. Normalizes the path, but unlike the method\n NormalizePath, does not make it absolute. Eliminates dot components\n (. and ..) and combines repeated path separators (//). Initial ..\n components are left in place for relative paths. If the result is an empty\n path, '.' is returned instead.\n\n This also replaces alternative path separator with path separator. That is,\n it behaves like the real os.path.normpath on Windows if initialized with\n '\\' as path separator and '/' as alternative separator.\n\n Args:\n path: (str) The path to normalize.\n\n Returns:\n (str) A copy of path with empty components and dot components removed.\n "
# line: 1231
path = self.NormalizePathSeparator(path)
# line: 1232
(drive, path) = self.SplitDrive(path)
# line: 1233
sep = self._path_separator(path)
# line: 1234
is_absolute_path = path.startswith(sep)
# line: 1235
path_components = path.split(sep)
# line: 1236
collapsed_path_components = []
# line: 1237
dot = self._matching_string(path, '.')
# line: 1238
dotdot = self._matching_string(path, '..')
# line: 1239
for component in path_components:
# line: 1240
if ((not component) or (component == dot)):
# line: 1241
continue
# line: 1242
if (component == dotdot):
# line: 1243
if (collapsed_path_components and (collapsed_path_components[(-1)] != dotdot)):
# line: 1246
collapsed_path_components.pop()
# line: 1247
continue
elif is_absolute_path:
# line: 1250
continue
# line: 1251
collapsed_path_components.append(component)
# line: 1252
collapsed_path = sep.join(collapsed_path_components)
# line: 1253
if is_absolute_path:
# line: 1254
collapsed_path = (sep + collapsed_path)
# line: 1255
return ((drive + collapsed_path) or dot)
# line: 1257
def NormalizeCase(self, path):
# line: 1267
'Return a normalized case version of the given path for case-insensitive\n file systems. For case-sensitive file systems, return path unchanged.\n New in pyfakefs 2.9.\n\n Args:\n path: the file path to be transformed\n\n Returns:\n A version of path matching the case of existing path elements.\n '
# line: 1268
def components_to_path():
# line: 1269
if (len(path_components) > len(normalized_components)):
# line: 1270
normalized_components.extend(path_components[len(normalized_components):])
# line: 1271
sep = self._path_separator(path)
# line: 1272
normalized_path = sep.join(normalized_components)
# line: 1273
if (path.startswith(sep) and (not normalized_path.startswith(sep))):
# line: 1274
normalized_path = (sep + normalized_path)
# line: 1275
return normalized_path
# line: 1277
if (self.is_case_sensitive or (not path)):
# line: 1278
return path
# line: 1279
path_components = self.GetPathComponents(path)
# line: 1280
normalized_components = []
# line: 1281
current_dir = self.root
# line: 1282
for component in path_components:
# line: 1283
if (not isinstance(current_dir, FakeDirectory)):
# line: 1284
return components_to_path()
# line: 1285
(dir_name, current_dir) = self._DirectoryContent(current_dir, component)
# line: 1286
if ((current_dir is None) or (isinstance(current_dir, FakeDirectory) and (current_dir._byte_contents is None) and (current_dir.st_size == 0))):
# line: 1290
return components_to_path()
# line: 1291
normalized_components.append(dir_name)
# line: 1292
return components_to_path()
# line: 1294
def NormalizePath(self, path):
# line: 1306
'Absolutize and minimalize the given path.\n\n Forces all relative paths to be absolute, and normalizes the path to\n eliminate dot and empty components.\n\n Args:\n path: path to normalize\n\n Returns:\n The normalized path relative to the current working directory, or the root\n directory if path is empty.\n '
# line: 1307
path = self.NormalizePathSeparator(path)
# line: 1308
if (not path):
# line: 1309
path = self.path_separator
elif (not self._StartsWithRootPath(path)):
# line: 1312
root_name = self._matching_string(path, self.root.name)
# line: 1313
empty = self._matching_string(path, '')
# line: 1314
path = self._path_separator(path).join(((((self.cwd != root_name) and self.cwd) or empty), path))
# line: 1316
if (path == self._matching_string(path, '.')):
# line: 1317
path = self.cwd
# line: 1318
return self.CollapsePath(path)
# line: 1320
def SplitPath(self, path):
# line: 1332
'Mimic os.path.split using the specified path_separator.\n\n Mimics os.path.split using the path_separator that was specified\n for this FakeFilesystem.\n\n Args:\n path: (str) The path to split.\n\n Returns:\n (str) A duple (pathname, basename) for which pathname does not\n end with a slash, and basename does not contain a slash.\n '
# line: 1333
(drive, path) = self.SplitDrive(path)
# line: 1334
path = self.NormalizePathSeparator(path)
# line: 1335
sep = self._path_separator(path)
# line: 1336
path_components = path.split(sep)
# line: 1337
if (not path_components):
# line: 1338
return ('', '')
# line: 1339
basename = path_components.pop()
# line: 1340
if (not path_components):
# line: 1341
return ('', basename)
# line: 1342
for component in path_components:
# line: 1343
if component:
# line: 1346
while (not path_components[(-1)]):
# line: 1347
path_components.pop()
# line: 1348
return ((drive + sep.join(path_components)), basename)
# line: 1350
return ((drive or sep), basename)
# line: 1352
def SplitDrive(self, path):
# line: 1363
'Splits the path into the drive part and the rest of the path.\n New in pyfakefs 2.9.\n\n Taken from Windows specific implementation in Python 3.5 and slightly adapted.\n\n Args:\n path: the full path to be split.\n\n Returns: a tuple of the drive part and the rest of the path, or of an empty string\n and the full path if drive letters are not supported or no drive is present.\n '
# line: 1364
if (sys.version_info >= (3, 6)):
# line: 1365
path = os.fspath(path)
# line: 1366
if self.is_windows_fs:
# line: 1367
if (len(path) >= 2):
# line: 1368
path = self.NormalizePathSeparator(path)
# line: 1369
sep = self._path_separator(path)
# line: 1371
if (sys.version_info >= (2, 7, 8)):
# line: 1372
if ((path[0:2] == (sep * 2)) and (path[2:3] != sep)):
# line: 1375
sep_index = path.find(sep, 2)
# line: 1376
if (sep_index == (-1)):
# line: 1377
return (path[:0], path)
# line: 1378
sep_index2 = path.find(sep, (sep_index + 1))
# line: 1379
if (sep_index2 == (sep_index + 1)):
# line: 1380
return (path[:0], path)
# line: 1381
if (sep_index2 == (-1)):
# line: 1382
sep_index2 = len(path)
# line: 1383
return (path[:sep_index2], path[sep_index2:])
# line: 1384
if (path[1:2] == self._matching_string(path, ':')):
# line: 1385
return (path[:2], path[2:])
# line: 1386
return (path[:0], path)
# line: 1388
def _JoinPathsWithDriveSupport(self, *all_paths):
# line: 1389
'Taken from Python 3.5 os.path.join() code in ntpath.py and slightly adapted'
# line: 1390
base_path = all_paths[0]
# line: 1391
paths_to_add = all_paths[1:]
# line: 1392
sep = self._path_separator(base_path)
# line: 1393
seps = [sep, self._alternative_path_separator(base_path)]
# line: 1394
(result_drive, result_path) = self.SplitDrive(base_path)
# line: 1395
for path in paths_to_add:
# line: 1396
(drive_part, path_part) = self.SplitDrive(path)
# line: 1397
if (path_part and (path_part[:1] in seps)):
# line: 1399
if (drive_part or (not result_drive)):
# line: 1400
result_drive = drive_part
# line: 1401
result_path = path_part
# line: 1402
continue
elif (drive_part and (drive_part != result_drive)):
# line: 1404
if (self.is_case_sensitive or (drive_part.lower() != result_drive.lower())):
# line: 1406
result_drive = drive_part
# line: 1407
result_path = path_part
# line: 1408
continue
# line: 1410
result_drive = drive_part
# line: 1412
if (result_path and (result_path[(-1):] not in seps)):
# line: 1413
result_path = (result_path + sep)
# line: 1414
result_path = (result_path + path_part)
# line: 1416
colon = self._matching_string(base_path, ':')
# line: 1417
if (result_path and (result_path[:1] not in seps) and result_drive and (result_drive[(-1):] != colon)):
# line: 1419
return ((result_drive + sep) + result_path)
# line: 1420
return (result_drive + result_path)
# line: 1422
def JoinPaths(self, *paths):
# line: 1431
'Mimic os.path.join using the specified path_separator.\n\n Args:\n *paths: (str) Zero or more paths to join.\n\n Returns:\n (str) The paths joined by the path separator, starting with the last\n absolute path in paths.\n '
# line: 1432
if (sys.version_info >= (3, 6)):
# line: 1433
paths = [os.fspath(path) for path in paths]
# line: 1434
if (len(paths) == 1):
# line: 1435
return paths[0]
# line: 1436
if self.is_windows_fs:
# line: 1437
return self._JoinPathsWithDriveSupport(*paths)
# line: 1438
joined_path_segments = []
# line: 1439
sep = self._path_separator(paths[0])
# line: 1440
for path_segment in paths:
# line: 1441
if self._StartsWithRootPath(path_segment):
# line: 1443
joined_path_segments = [path_segment]
else:
# line: 1445
if (joined_path_segments and (not joined_path_segments[(-1)].endswith(sep))):
# line: 1447
joined_path_segments.append(sep)
# line: 1448
if path_segment:
# line: 1449
joined_path_segments.append(path_segment)
# line: 1450
return self._matching_string(paths[0], '').join(joined_path_segments)
# line: 1452
def GetPathComponents(self, path):
# line: 1473
'Breaks the path into a list of component names.\n\n Does not include the root directory as a component, as all paths\n are considered relative to the root directory for the FakeFilesystem.\n Callers should basically follow this pattern:\n\n >>> file_path = self.NormalizePath(file_path)\n >>> path_components = self.GetPathComponents(file_path)\n >>> current_dir = self.root\n >>> for component in path_components:\n >>> if component not in current_dir.contents:\n >>> raise IOError\n >>> DoStuffWithComponent(current_dir, component)\n >>> current_dir = current_dir.GetEntry(component)\n\n Args:\n path: path to tokenize\n\n Returns:\n The list of names split from path\n '
# line: 1474
if ((not path) or (path == self._path_separator(path))):
# line: 1475
return []
# line: 1476
(drive, path) = self.SplitDrive(path)
# line: 1477
path_components = path.split(self._path_separator(path))
# line: 1478
assert (drive or path_components)
# line: 1479
if (not path_components[0]):
# line: 1481
path_components = path_components[1:]
# line: 1482
if drive:
# line: 1483
path_components.insert(0, drive)
# line: 1484
return path_components
# line: 1486
def StartsWithDriveLetter(self, file_path):
# line: 1496
'Return True if file_path starts with a drive letter.\n New in pyfakefs 2.9.\n\n Args:\n file_path: the full path to be examined.\n\n Returns:\n True if drive letter support is enabled in the filesystem and\n the path starts with a drive letter.\n '
# line: 1497
colon = self._matching_string(file_path, ':')
# line: 1498
return (self.is_windows_fs and (len(file_path) >= 2) and file_path[:1].isalpha and (file_path[1:2] == colon))
# line: 1501
def _StartsWithRootPath(self, file_path):
# line: 1502
root_name = self._matching_string(file_path, self.root.name)
# line: 1503
return (file_path.startswith(root_name) or ((not self.is_case_sensitive) and file_path.lower().startswith(root_name.lower())) or self.StartsWithDriveLetter(file_path))
# line: 1508
def _IsRootPath(self, file_path):
# line: 1509
root_name = self._matching_string(file_path, self.root.name)
# line: 1510
return ((file_path == root_name) or ((not self.is_case_sensitive) and (file_path.lower() == root_name.lower())) or ((len(file_path) == 2) and self.StartsWithDriveLetter(file_path)))
# line: 1514
def _EndsWithPathSeparator(self, file_path):
# line: 1515
return (file_path and (file_path.endswith(self._path_separator(file_path)) or ((self.alternative_path_separator is not None) and file_path.endswith(self._alternative_path_separator(file_path)))))
# line: 1519
def _DirectoryContent(self, directory, component):
# line: 1520
if (not isinstance(directory, FakeDirectory)):
# line: 1521
return (None, None)
# line: 1522
if (component in directory.contents):
# line: 1523
return (component, directory.contents[component])
# line: 1524
if (not self.is_case_sensitive):
# line: 1525
matching_content = [(subdir, directory.contents[subdir]) for subdir in directory.contents if (subdir.lower() == component.lower())]
# line: 1528
if matching_content:
# line: 1529
return matching_content[0]
# line: 1531
return (None, None)
# line: 1533
def Exists(self, file_path):
# line: 1544
'Return true if a path points to an existing file system object.\n\n Args:\n file_path: path to examine.\n\n Returns:\n (bool) True if the corresponding object exists.\n\n Raises:\n TypeError: if file_path is None.\n '
# line: 1545
if (sys.version_info >= (3, 6)):
# line: 1546
file_path = os.fspath(file_path)
# line: 1547
if (file_path is None):
# line: 1548
raise TypeError
# line: 1549
if (not file_path):
# line: 1550
return False
# line: 1551
try:
# line: 1552
file_path = self.ResolvePath(file_path)
# line: 1553
except (IOError, OSError):
# line: 1554
return False
# line: 1555
if (file_path == self.root.name):
# line: 1556
return True
# line: 1557
path_components = self.GetPathComponents(file_path)
# line: 1558
current_dir = self.root
# line: 1559
for component in path_components:
# line: 1560
current_dir = self._DirectoryContent(current_dir, component)[1]
# line: 1561
if (not current_dir):
# line: 1562
return False
# line: 1563
return True
# line: 1565
def ResolvePath(self, file_path, allow_fd=False, raw_io=True):
# line: 1601
"Follow a path, resolving symlinks.\n\n ResolvePath traverses the filesystem along the specified file path,\n resolving file names and symbolic links until all elements of the path are\n exhausted, or we reach a file which does not exist. If all the elements\n are not consumed, they just get appended to the path resolved so far.\n This gives us the path which is as resolved as it can be, even if the file\n does not exist.\n\n This behavior mimics Unix semantics, and is best shown by example. Given a\n file system that looks like this:\n\n /a/b/\n /a/b/c -> /a/b2 c is a symlink to /a/b2\n /a/b2/x\n /a/c -> ../d\n /a/x -> y\n\n Then:\n /a/b/x => /a/b/x\n /a/c => /a/d\n /a/x => /a/y\n /a/b/c/d/e => /a/b2/d/e\n\n Args:\n file_path: path to examine.\n allow_fd: If `True`, `file_path` may be open file descriptor\n raw_io: `True` if called from low-level I/O functions\n\n Returns:\n resolved_path (string) or None.\n\n Raises:\n TypeError: if file_path is None.\n IOError: if file_path is '' or a part of the path doesn't exist.\n "
# line: 1603
def _ComponentsToPath(component_folders):
# line: 1604
sep = (self._path_separator(component_folders[0]) if component_folders else self.path_separator)
# line: 1606
path = sep.join(component_folders)
# line: 1607
if (not self._StartsWithRootPath(path)):
# line: 1608
path = (sep + path)
# line: 1609
return path
# line: 1611
def _ValidRelativePath(file_path):
# line: 1612
slash_dotdot = self._matching_string(file_path, '/..')
# line: 1613
while (file_path and (slash_dotdot in file_path)):
# line: 1614
file_path = file_path[:file_path.rfind(slash_dotdot)]
# line: 1615
if (not self.Exists(self.NormalizePath(file_path))):
# line: 1616
return False
# line: 1617
return True
# line: 1619
def _FollowLink(link_path_components, link):
# line: 1639
'Follow a link w.r.t. a path resolved so far.\n\n The component is either a real file, which is a no-op, or a symlink.\n In the case of a symlink, we have to modify the path as built up so far\n /a/b => ../c should yield /a/../c (which will normalize to /a/c)\n /a/b => x should yield /a/x\n /a/b => /x/y/z should yield /x/y/z\n The modified path may land us in a new spot which is itself a\n link, so we may repeat the process.\n\n Args:\n link_path_components: The resolved path built up to the link so far.\n link: The link object itself.\n\n Returns:\n (string) the updated path resolved after following the link.\n\n Raises:\n IOError: if there are too many levels of symbolic link\n '
# line: 1640
link_path = link.contents
# line: 1641
sep = self._path_separator(link_path)
# line: 1642
alt_sep = self._alternative_path_separator(link_path)
# line: 1646
if ((not link_path.startswith(sep)) and ((alt_sep is None) or (not link_path.startswith(alt_sep)))):
# line: 1652
components = link_path_components[:(-1)]
# line: 1653
components.append(link_path)
# line: 1654
link_path = sep.join(components)
# line: 1656
return self.CollapsePath(link_path)
# line: 1658
if (allow_fd and (sys.version_info >= (3, 3)) and isinstance(file_path, int)):
# line: 1659
return self.GetOpenFile(file_path).GetObject().GetPath()
# line: 1661
if (sys.version_info >= (3, 6)):
# line: 1662
file_path = os.fspath(file_path)
# line: 1663
if (file_path is None):
# line: 1665
raise TypeError('Expected file system path string, received None')
# line: 1666
if ((not file_path) or (not _ValidRelativePath(file_path))):
# line: 1669
raise IOError(errno.ENOENT, ("No such file or directory: '%s'" % file_path))
# line: 1671
file_path = self.NormalizePath(self.NormalizeCase(file_path))
# line: 1672
if self._IsRootPath(file_path):
# line: 1673
return file_path
# line: 1675
current_dir = self.root
# line: 1676
path_components = self.GetPathComponents(file_path)
# line: 1678
resolved_components = []
# line: 1679
link_depth = 0
# line: 1680
while path_components:
# line: 1681
component = path_components.pop(0)
# line: 1682
resolved_components.append(component)
# line: 1683
current_dir = self._DirectoryContent(current_dir, component)[1]
# line: 1684
if (current_dir is None):
# line: 1690
resolved_components.extend(path_components)
# line: 1691
break
# line: 1694
if stat.S_ISLNK(current_dir.st_mode):
# line: 1698
if (link_depth > _MAX_LINK_DEPTH):
# line: 1699
error_class = (OSError if raw_io else IOError)
# line: 1700
raise error_class(errno.ELOOP, ("Too many levels of symbolic links: '%s'" % _ComponentsToPath(resolved_components)))
# line: 1704
link_path = _FollowLink(resolved_components, current_dir)
# line: 1708
target_components = self.GetPathComponents(link_path)
# line: 1709
path_components = (target_components + path_components)
# line: 1710
resolved_components = []
# line: 1711
current_dir = self.root
# line: 1712
link_depth += 1
# line: 1713
return _ComponentsToPath(resolved_components)
# line: 1715
def GetObjectFromNormalizedPath(self, file_path):
# line: 1727
'Search for the specified filesystem object within the fake filesystem.\n\n Args:\n file_path: specifies target FakeFile object to retrieve, with a\n path that has already been normalized/resolved.\n\n Returns:\n the FakeFile object corresponding to file_path.\n\n Raises:\n IOError: if the object is not found.\n '
# line: 1728
if (sys.version_info >= (3, 6)):
# line: 1729
file_path = os.fspath(file_path)
# line: 1730
if (file_path == self.root.name):
# line: 1731
return self.root
# line: 1732
path_components = self.GetPathComponents(file_path)
# line: 1733
target_object = self.root
# line: 1734
try:
# line: 1735
for component in path_components:
# line: 1736
if stat.S_ISLNK(target_object.st_mode):
# line: 1737
target_object = self.ResolveObject(target_object.contents)
# line: 1738
if (not stat.S_ISDIR(target_object.st_mode)):
# line: 1739
if (not self.is_windows_fs):
# line: 1740
raise IOError(errno.ENOTDIR, 'Not a directory in fake filesystem', file_path)
# line: 1743
raise IOError(errno.ENOENT, 'No such file or directory in fake filesystem', file_path)
# line: 1746
target_object = target_object.GetEntry(component)
# line: 1747
except KeyError:
# line: 1748
raise IOError(errno.ENOENT, 'No such file or directory in fake filesystem', file_path)
# line: 1751
return target_object
# line: 1753
def GetObject(self, file_path):
# line: 1764
'Search for the specified filesystem object within the fake filesystem.\n\n Args:\n file_path: specifies target FakeFile object to retrieve.\n\n Returns:\n the FakeFile object corresponding to file_path.\n\n Raises:\n IOError: if the object is not found.\n '
# line: 1765
if (sys.version_info >= (3, 6)):
# line: 1766
file_path = os.fspath(file_path)
# line: 1767
file_path = self.NormalizePath(self.NormalizeCase(file_path))
# line: 1768
return self.GetObjectFromNormalizedPath(file_path)
# line: 1770
def ResolveObject(self, file_path, follow_symlinks=True, allow_fd=False):
# line: 1784
'Search for the specified filesystem object, resolving all links.\n\n Args:\n file_path: Specifies target FakeFile object to retrieve.\n follow_symlinks: If `False`, the link itself is resolved,\n otherwise the object linked to.\n allow_fd: If `True`, `file_path` may be open file descriptor\n\n Returns:\n the FakeFile object corresponding to file_path.\n\n Raises:\n IOError: if the object is not found.\n '
# line: 1785
if (allow_fd and (sys.version_info >= (3, 3)) and isinstance(file_path, int)):
# line: 1786
return self.GetOpenFile(file_path).GetObject()
# line: 1788
if follow_symlinks:
# line: 1789
if (sys.version_info >= (3, 6)):
# line: 1790
file_path = os.fspath(file_path)
# line: 1791
return self.GetObjectFromNormalizedPath(self.ResolvePath(file_path))
# line: 1792
return self.LResolveObject(file_path)
# line: 1794
def LResolveObject(self, path):
# line: 1808
'Search for the specified object, resolving only parent links.\n\n This is analogous to the stat/lstat difference. This resolves links *to*\n the object but not of the final object itself.\n\n Args:\n path: specifies target FakeFile object to retrieve.\n\n Returns:\n the FakeFile object corresponding to path.\n\n Raises:\n IOError: if the object is not found.\n '
# line: 1809
if (sys.version_info >= (3, 6)):
# line: 1810
path = os.fspath(path)
# line: 1811
if (path == self.root.name):
# line: 1813
return self.root
# line: 1816
sep = self._path_separator(path)
# line: 1817
alt_sep = self._alternative_path_separator(path)
# line: 1818
if (path.endswith(sep) or (alt_sep and path.endswith(alt_sep))):
# line: 1819
path = path[:(-1)]
# line: 1821
(parent_directory, child_name) = self.SplitPath(path)
# line: 1822
if (not parent_directory):
# line: 1823
parent_directory = self.cwd
# line: 1824
try:
# line: 1825
parent_obj = self.ResolveObject(parent_directory)
# line: 1826
assert parent_obj
# line: 1827
if (not isinstance(parent_obj, FakeDirectory)):
# line: 1828
if ((not self.is_windows_fs) and isinstance(parent_obj, FakeFile)):
# line: 1829
raise IOError(errno.ENOTDIR, 'The parent object is not a directory', path)
# line: 1831
raise IOError(errno.ENOENT, 'No such file or directory in fake filesystem', path)
# line: 1834
return parent_obj.GetEntry(child_name)
# line: 1835
except KeyError:
# line: 1836
raise IOError(errno.ENOENT, 'No such file or directory in the fake filesystem', path)
# line: 1840
def AddObject(self, file_path, file_object, error_class=OSError):
# line: 1851
'Add a fake file or directory into the filesystem at file_path.\n\n Args:\n file_path: the path to the file to be added relative to self.\n file_object: file or directory to add.\n error_class: the error class to be thrown if file_path does\n not correspond to a directory (used internally(\n\n Raises:\n IOError or OSError: if file_path does not correspond to a directory.\n '
# line: 1852
if (not file_path):
# line: 1853
target_directory = self.root
else:
# line: 1855
target_directory = self.ResolveObject(file_path)
# line: 1856
if (not stat.S_ISDIR(target_directory.st_mode)):
# line: 1857
raise error_class(errno.ENOTDIR, 'Not a directory in the fake filesystem', file_path)
# line: 1860
target_directory.AddEntry(file_object)
# line: 1862
def RenameObject(self, old_file_path, new_file_path, force_replace=False):
# line: 1883
'Renames a FakeFile object at old_file_path to new_file_path, preserving all properties.\n\n Args:\n old_file_path: Path to filesystem object to rename.\n new_file_path: Path to where the filesystem object will live after this call.\n force_replace: If set and destination is an existing file, it will be replaced\n even under Windows if the user has permissions, otherwise replacement\n happens under Unix only.\n\n Raises:\n OSError: if old_file_path does not exist.\n OSError: if new_file_path is an existing directory\n (Windows, or Posix if old_file_path points to a regular file)\n OSError: if old_file_path is a directory and new_file_path a file\n OSError: if new_file_path is an existing file and force_replace not set\n (Windows only).\n OSError: if new_file_path is an existing file and could not be removed\n (Posix, or Windows with force_replace set).\n OSError: if dirname(new_file_path) does not exist.\n OSError: if the file would be moved to another filesystem (e.g. mount point).\n '
# line: 1884
old_file_path = self.NormalizePath(old_file_path)
# line: 1885
new_file_path = self.NormalizePath(new_file_path)
# line: 1886
if ((not self.Exists(old_file_path)) and (not self.IsLink(old_file_path))):
# line: 1887
raise OSError(errno.ENOENT, 'Fake filesystem object: can not rename nonexistent file', old_file_path)
# line: 1891
old_object = self.LResolveObject(old_file_path)
# line: 1892
if (not self.is_windows_fs):
# line: 1893
if (self.IsDir(old_file_path, follow_symlinks=False) and self.IsLink(new_file_path)):
# line: 1895
raise OSError(errno.ENOTDIR, 'Cannot rename directory to symlink', new_file_path)
# line: 1898
if (self.IsDir(new_file_path, follow_symlinks=False) and self.IsLink(old_file_path)):
# line: 1900
raise OSError(errno.EISDIR, 'Cannot rename symlink to directory', new_file_path)
# line: 1904
if (self.Exists(new_file_path) or self.IsLink(new_file_path)):
# line: 1905
if (old_file_path == new_file_path):
# line: 1906
return
# line: 1908
new_object = self.GetObject(new_file_path)
# line: 1909
if (old_object == new_object):
# line: 1910
if (old_file_path.lower() == new_file_path.lower()):
# line: 1912
pass
else:
# line: 1915
return
elif (stat.S_ISDIR(new_object.st_mode) or stat.S_ISLNK(new_object.st_mode)):
# line: 1918
if self.is_windows_fs:
# line: 1919
if force_replace:
# line: 1920
raise OSError(errno.EACCES, 'Fake filesystem object: can not replace existing directory', new_file_path)
else:
# line: 1924
raise OSError(errno.EEXIST, 'Fake filesystem object: can not rename to existing directory', new_file_path)
# line: 1927
if (not stat.S_ISLNK(new_object.st_mode)):
# line: 1928
if new_object.contents:
# line: 1929
raise OSError(errno.EEXIST, 'Fake filesystem object: can not rename to non-empty directory', new_file_path)
# line: 1932
if stat.S_ISREG(old_object.st_mode):
# line: 1933
raise OSError(errno.EISDIR, 'Fake filesystem object: cannot rename file to directory', new_file_path)
elif stat.S_ISDIR(old_object.st_mode):
# line: 1937
raise OSError(errno.ENOTDIR, 'Fake filesystem object: cannot rename directory to file', new_file_path)
elif (self.is_windows_fs and (not force_replace)):
# line: 1941
raise OSError(errno.EEXIST, 'Fake filesystem object: can not rename to existing file', new_file_path)
else:
# line: 1945
try:
# line: 1946
self.RemoveObject(new_file_path)
# line: 1947
except IOError as exc:
# line: 1948
raise OSError(exc.errno, exc.strerror, exc.filename)
# line: 1950
(old_dir, old_name) = self.SplitPath(old_file_path)
# line: 1951
(new_dir, new_name) = self.SplitPath(new_file_path)
# line: 1952
if (not self.Exists(new_dir)):
# line: 1953
raise OSError(errno.ENOENT, 'No such fake directory', new_dir)
# line: 1954
old_dir_object = self.ResolveObject(old_dir)
# line: 1955
new_dir_object = self.ResolveObject(new_dir)
# line: 1956
if (old_dir_object.st_dev != new_dir_object.st_dev):
# line: 1957
raise OSError(errno.EXDEV, 'Fake filesystem object: cannot rename across file systems', old_file_path)
# line: 1960
if (not stat.S_ISDIR(new_dir_object.st_mode)):
# line: 1961
raise OSError((errno.EACCES if self.is_windows_fs else errno.ENOTDIR), 'Fake filesystem object: target parent is not a directory', new_file_path)
# line: 1964
if new_dir_object.HasParentObject(old_object):
# line: 1965
raise OSError(errno.EINVAL, 'Fake filesystem object: invalid target for rename', new_file_path)
# line: 1969
object_to_rename = old_dir_object.GetEntry(old_name)
# line: 1970
old_dir_object.RemoveEntry(old_name, recursive=False)
# line: 1971
object_to_rename.name = new_name
# line: 1972
if (new_name in new_dir_object.contents):
# line: 1974
new_dir_object.RemoveEntry(new_name)
# line: 1975
new_dir_object.AddEntry(object_to_rename)
# line: 1977
def RemoveObject(self, file_path):
# line: 1987
"Remove an existing file or directory.\n\n Args:\n file_path: the path to the file relative to self.\n\n Raises:\n IOError: if file_path does not correspond to an existing file, or if part\n of the path refers to something other than a directory.\n OSError: if the directory is in use (eg, if it is '/').\n "
# line: 1988
file_path = self.NormalizePath(self.NormalizeCase(file_path))
# line: 1989
if self._IsRootPath(file_path):
# line: 1990
raise OSError(errno.EBUSY, 'Fake device or resource busy', file_path)
# line: 1992
try:
# line: 1993
(dirname, basename) = self.SplitPath(file_path)
# line: 1994
target_directory = self.ResolveObject(dirname)
# line: 1995
target_directory.RemoveEntry(basename)
# line: 1996
except KeyError:
# line: 1997
raise IOError(errno.ENOENT, 'No such file or directory in the fake filesystem', file_path)
# line: 2000
except AttributeError:
# line: 2001
raise IOError(errno.ENOTDIR, 'Not a directory in the fake filesystem', file_path)
# line: 2005
def CreateDirectory(self, directory_path, perm_bits=PERM_DEF):
# line: 2019
'Create directory_path, and all the parent directories.\n\n Helper method to set up your test faster.\n\n Args:\n directory_path: The full directory path to create.\n perm_bits: The permission bits as set by `chmod`.\n\n Returns:\n the newly created FakeDirectory object.\n\n Raises:\n OSError: if the directory already exists.\n '
# line: 2020
directory_path = self.NormalizePath(directory_path)
# line: 2021
self._AutoMountDriveIfNeeded(directory_path)
# line: 2022
if self.Exists(directory_path):
# line: 2023
raise OSError(errno.EEXIST, 'Directory exists in fake filesystem', directory_path)
# line: 2026
path_components = self.GetPathComponents(directory_path)
# line: 2027
current_dir = self.root
# line: 2029
new_dirs = []
# line: 2030
for component in path_components:
# line: 2031
directory = self._DirectoryContent(current_dir, component)[1]
# line: 2032
if (not directory):
# line: 2033
new_dir = FakeDirectory(component, filesystem=self)
# line: 2034
new_dirs.append(new_dir)
# line: 2035
current_dir.AddEntry(new_dir)
# line: 2036
current_dir = new_dir
else:
# line: 2038
if stat.S_ISLNK(directory.st_mode):
# line: 2039
directory = self.ResolveObject(directory.contents)
# line: 2040
current_dir = directory
# line: 2041
if ((directory.st_mode & stat.S_IFDIR) != stat.S_IFDIR):
# line: 2042
raise OSError(errno.ENOTDIR, 'Not a directory', current_dir.GetPath())
# line: 2046
for new_dir in new_dirs:
# line: 2047
new_dir.st_mode = (stat.S_IFDIR | perm_bits)
# line: 2049
self._last_ino += 1
# line: 2050
current_dir.SetIno(self._last_ino)
# line: 2051
return current_dir
# line: 2053
def CreateFile(self, file_path, st_mode=(stat.S_IFREG | PERM_DEF_FILE), contents='', st_size=None, create_missing_dirs=True, apply_umask=False, encoding=None, errors=None):
# line: 2079
'Create file_path, including all the parent directories along the way.\n\n This helper method can be used to set up tests more easily.\n\n Args:\n file_path: The path to the file to create.\n st_mode: The stat constant representing the file type.\n contents: The contents of the file.\n st_size: The file size; only valid if contents not given.\n create_missing_dirs: If `True`, auto create missing directories.\n apply_umask: `True` if the current umask must be applied on st_mode.\n encoding: Ff contents is a unicode string, the encoding used\n for serialization.\n New in pyfakefs 2.9.\n errors: The error mode used for encoding/decoding errors.\n New in pyfakefs 3.2.\n\n Returns:\n the newly created FakeFile object.\n\n Raises:\n IOError: if the file already exists.\n IOError: if the containing directory is required and missing.\n '
# line: 2080
return self.CreateFileInternally(file_path, st_mode, contents, st_size, create_missing_dirs, apply_umask, encoding, errors)
# line: 2084
def add_real_file(self, file_path, read_only=True):
# line: 2109
"Create file_path, including all the parent directories along the way, for an existing\n real file. The contents of the real file are read only on demand.\n New in pyfakefs 3.2.\n\n Args:\n file_path: Path to an existing file in the real file system\n read_only: If `True` (the default), writing to the fake file\n raises an exception. Otherwise, writing to the file changes\n the fake file only.\n\n Returns:\n the newly created FakeFile object.\n\n Raises:\n OSError: if the file does not exist in the real file system.\n IOError: if the file already exists in the fake file system.\n\n .. note:: On MacOS and BSD, accessing the fake file's contents will update both the real and fake files' `atime.` (access time). In this particular case, `add_real_file()` violates the rule that `pyfakefs` must not modify the real file system. Further, Windows offers the option to enable atime, and older versions of Linux may also modify atime.\n "
# line: 2110
return self.CreateFileInternally(file_path, read_from_real_fs=True, read_only=read_only)
# line: 2114
def add_real_directory(self, dir_path, read_only=True, lazy_read=True):
# line: 2139
'Create a fake directory corresponding to the real directory at the specified\n path. Add entries in the fake directory corresponding to the entries in the\n real directory.\n New in pyfakefs 3.2.\n\n Args:\n dir_path: The path to the existing directory.\n read_only: If set, all files under the directory are treated as\n read-only, e.g. a write access raises an exception;\n otherwise, writing to the files changes the fake files only\n as usually.\n lazy_read: If set (default), directory contents are only read when\n accessed, and only until the needed subdirectory level.\n *Note:* this means that the file system size is only updated\n at the time the directory contents are read; set this to\n `False` only if you are dependent on accurate file system\n size in your test\n\n Returns:\n the newly created FakeDirectory object.\n\n Raises:\n OSError: if the directory does not exist in the real file system.\n IOError: if the directory already exists in the fake file system.\n '
# line: 2140
if (not os.path.exists(dir_path)):
# line: 2141
raise IOError(errno.ENOENT, 'No such directory', dir_path)
# line: 2142
if lazy_read:
# line: 2143
parent_path = os.path.split(dir_path)[0]
# line: 2144
if self.Exists(parent_path):
# line: 2145
parent_dir = self.GetObject(parent_path)
else:
# line: 2147
parent_dir = self.CreateDirectory(parent_path)
# line: 2148
new_dir = FakeDirectoryFromRealDirectory(dir_path, filesystem=self, read_only=read_only)
# line: 2149
parent_dir.AddEntry(new_dir)
# line: 2150
self._last_ino += 1
# line: 2151
new_dir.SetIno(self._last_ino)
else:
# line: 2153
new_dir = self.CreateDirectory(dir_path)
# line: 2154
for (base, _, files) in os.walk(dir_path):
# line: 2155
for fileEntry in files:
# line: 2156
self.add_real_file(os.path.join(base, fileEntry), read_only)
# line: 2157
return new_dir
# line: 2159
def add_real_paths(self, path_list, read_only=True, lazy_dir_read=True):
# line: 2176
'This convenience method adds multiple files and/or directories from the\n real file system to the fake file system. See `add_real_file()` and\n `add_real_directory()`.\n New in pyfakefs 3.2.\n\n Args:\n path_list: List of file and directory paths in the real file system.\n read_only: If set, all files and files under under the directories are treated as read-only,\n e.g. a write access raises an exception;\n otherwise, writing to the files changes the fake files only as usually.\n lazy_dir_read: Uses lazy reading of directory contents if set\n (see `add_real_directory`)\n\n Raises:\n OSError: if any of the files and directories in the list does not exist in the real file system.\n OSError: if any of the files and directories in the list already exists in the fake file system.\n '
# line: 2177
for path in path_list:
# line: 2178
if os.path.isdir(path):
# line: 2179
self.add_real_directory(path, read_only, lazy_dir_read)
else:
# line: 2181
self.add_real_file(path, read_only)
# line: 2183
def CreateFileInternally(self, file_path, st_mode=(stat.S_IFREG | PERM_DEF_FILE), contents='', st_size=None, create_missing_dirs=True, apply_umask=False, encoding=None, errors=None, read_from_real_fs=False, read_only=True, raw_io=False):
# line: 2203
'Internal fake file creator that supports both normal fake files and fake\n files based on real files.\n\n Args:\n file_path: path to the file to create.\n st_mode: the stat.S_IF constant representing the file type.\n contents: the contents of the file.\n st_size: file size; only valid if contents not given.\n create_missing_dirs: if True, auto create missing directories.\n apply_umask: whether or not the current umask must be applied on st_mode.\n encoding: if contents is a unicode string, the encoding used for serialization.\n errors: the error mode used for encoding/decoding errors\n read_from_real_fs: if True, the contents are reaf from the real file system on demand.\n read_only: if set, the file is treated as read-only, e.g. a write access raises an exception;\n otherwise, writing to the file changes the fake file only as usually.\n raw_io: `True` if called from low-level API (`os.open`)\n '
# line: 2204
error_class = (OSError if raw_io else IOError)
# line: 2205
file_path = self.NormalizePath(file_path)
# line: 2208
if (self.Exists(file_path) or self.IsLink(file_path)):
# line: 2209
raise OSError(errno.EEXIST, 'File already exists in fake filesystem', file_path)
# line: 2212
(parent_directory, new_file) = self.SplitPath(file_path)
# line: 2213
if (not parent_directory):
# line: 2214
parent_directory = self.cwd
# line: 2215
self._AutoMountDriveIfNeeded(parent_directory)
# line: 2216
if (not self.Exists(parent_directory)):
# line: 2217
if (not create_missing_dirs):
# line: 2218
raise error_class(errno.ENOENT, 'No such fake directory', parent_directory)
# line: 2219
self.CreateDirectory(parent_directory)
else:
# line: 2221
parent_directory = self.NormalizeCase(parent_directory)
# line: 2222
if apply_umask:
# line: 2223
st_mode &= (~ self.umask)
# line: 2224
if read_from_real_fs:
# line: 2225
file_object = FakeFileFromRealFile(file_path, filesystem=self, read_only=read_only)
else:
# line: 2227
file_object = FakeFile(new_file, st_mode, filesystem=self, encoding=encoding, errors=errors)
# line: 2229
self._last_ino += 1
# line: 2230
file_object.SetIno(self._last_ino)
# line: 2231
self.AddObject(parent_directory, file_object, error_class)
# line: 2233
if ((not read_from_real_fs) and ((contents is not None) or (st_size is not None))):
# line: 2234
try:
# line: 2235
if (st_size is not None):
# line: 2236
file_object.SetLargeFileSize(st_size)
else:
# line: 2238
file_object._set_initial_contents(contents)
# line: 2239
except IOError:
# line: 2240
self.RemoveObject(file_path)
# line: 2241
raise
# line: 2243
return file_object
# line: 2246
def CreateLink(self, file_path, link_target, create_missing_dirs=True):
# line: 2261
'Create the specified symlink, pointed at the specified link target.\n\n Args:\n file_path: path to the symlink to create\n link_target: the target of the symlink\n create_missing_dirs: If `True`, any missing parent directories of\n file_path will be created\n\n Returns:\n the newly created FakeFile object.\n\n Raises:\n OSError: if the symlink could not be created (see `CreateFile`).\n OSError: if on Windows before Python 3.2.\n '
# line: 2262
if (not self._IsLinkSupported()):
# line: 2263
raise OSError('Symbolic links are not supported on Windows before Python 3.2')
# line: 2265
if (not self.IsLink(file_path)):
# line: 2266
file_path = self.ResolvePath(file_path)
# line: 2267
if (sys.version_info >= (3, 6)):
# line: 2268
link_target = os.fspath(link_target)
# line: 2269
return self.CreateFileInternally(file_path, st_mode=(stat.S_IFLNK | PERM_DEF), contents=link_target, create_missing_dirs=create_missing_dirs, raw_io=True)
# line: 2273
def CreateHardLink(self, old_path, new_path):
# line: 2289
"Create a hard link at new_path, pointing at old_path.\n New in pyfakefs 2.9.\n\n Args:\n old_path: an existing link to the target file.\n new_path: the destination path to create a new link at.\n\n Returns:\n the FakeFile object referred to by old_path.\n\n Raises:\n OSError: if something already exists at new_path.\n OSError: if old_path is a directory.\n OSError: if the parent directory doesn't exist.\n OSError: if on Windows before Python 3.2.\n "
# line: 2290
if (not self._IsLinkSupported()):
# line: 2291
raise OSError('Links are not supported on Windows before Python 3.2')
# line: 2292
new_path_normalized = self.NormalizePath(new_path)
# line: 2293
if self.Exists(new_path_normalized):
# line: 2294
raise OSError(errno.EEXIST, 'File already exists in fake filesystem', new_path)
# line: 2298
(new_parent_directory, new_basename) = self.SplitPath(new_path_normalized)
# line: 2299
if (not new_parent_directory):
# line: 2300
new_parent_directory = self.cwd
# line: 2302
if (not self.Exists(new_parent_directory)):
# line: 2303
raise OSError(errno.ENOENT, 'No such fake directory', new_parent_directory)
# line: 2307
try:
# line: 2308
old_file = self.ResolveObject(old_path)
# line: 2309
except:
# line: 2310
raise OSError(errno.ENOENT, 'No such file or directory in fake filesystem', old_path)
# line: 2314
if (old_file.st_mode & stat.S_IFDIR):
# line: 2315
raise OSError((errno.EACCES if self.is_windows_fs else errno.EPERM), 'Cannot create hard link to directory', old_path)
# line: 2320
old_file.name = new_basename
# line: 2321
self.AddObject(new_parent_directory, old_file)
# line: 2322
return old_file
# line: 2324
def ReadLink(self, path):
# line: 2338
'Read the target of a symlink.\n New in pyfakefs 3.0.\n\n Args:\n path: symlink to read the target of.\n\n Returns:\n the string representing the path to which the symbolic link points.\n\n Raises:\n TypeError: if path is None\n OSError: (with errno=ENOENT) if path is not a valid path, or\n (with errno=EINVAL) if path is valid, but is not a symlink.\n '
# line: 2339
if (path is None):
# line: 2340
raise TypeError
# line: 2341
try:
# line: 2342
link_obj = self.LResolveObject(path)
# line: 2343
except IOError as exc:
# line: 2344
raise OSError(exc.errno, 'Fake path does not exist', path)
# line: 2345
if (stat.S_IFMT(link_obj.st_mode) != stat.S_IFLNK):
# line: 2346
raise OSError(errno.EINVAL, 'Fake filesystem: not a symlink', path)
# line: 2347
return link_obj.contents
# line: 2349
def MakeDirectory(self, dir_name, mode=PERM_DEF):
# line: 2362
"Create a leaf Fake directory.\n New in pyfakefs 3.0.\n\n Args:\n dir_name: (str) Name of directory to create. Relative paths are assumed\n to be relative to '/'.\n mode: (int) Mode to create directory with. This argument defaults to\n 0o777. The umask is applied to this mode.\n\n Raises:\n OSError: if the directory name is invalid or parent directory is read only\n or as per `FakeFilesystem.AddObject()`.\n "
# line: 2363
if (sys.version_info >= (3, 6)):
# line: 2364
dir_name = os.fspath(dir_name)
# line: 2365
if self._EndsWithPathSeparator(dir_name):
# line: 2366
dir_name = dir_name[:(-1)]
# line: 2367
if (not dir_name):
# line: 2368
raise OSError(errno.ENOENT, 'Empty directory name')
# line: 2370
(parent_dir, _) = self.SplitPath(dir_name)
# line: 2371
if parent_dir:
# line: 2372
base_dir = self.CollapsePath(parent_dir)
# line: 2373
ellipsis = self._matching_string(parent_dir, (self.path_separator + '..'))
# line: 2374
if parent_dir.endswith(ellipsis):
# line: 2375
(base_dir, dummy_dotdot, _) = parent_dir.partition(ellipsis)
# line: 2376
if (not self.Exists(base_dir)):
# line: 2377
raise OSError(errno.ENOENT, 'No such fake directory', base_dir)
# line: 2379
dir_name = self.NormalizePath(dir_name)
# line: 2380
if self.Exists(dir_name):
# line: 2381
raise OSError(errno.EEXIST, 'Fake object already exists', dir_name)
# line: 2382
(head, tail) = self.SplitPath(dir_name)
# line: 2384
self.AddObject(head, FakeDirectory(tail, (mode & (~ self.umask)), filesystem=self))
# line: 2387
def MakeDirectories(self, dir_name, mode=PERM_DEF, exist_ok=False):
# line: 2402
'Create a leaf Fake directory and create any non-existent parent dirs.\n New in pyfakefs 3.0.\n\n Args:\n dir_name: (str) Name of directory to create.\n mode: (int) Mode to create directory (and any necessary parent\n directories) with. This argument defaults to 0o777. The umask is\n applied to this mode.\n exist_ok: (boolean) If exist_ok is False (the default), an OSError is\n raised if the target directory already exists. New in Python 3.2.\n\n Raises:\n OSError: if the directory already exists and exist_ok=False, or as per\n `FakeFilesystem.CreateDirectory()`.\n '
# line: 2403
dir_name = self.NormalizePath(dir_name)
# line: 2404
path_components = self.GetPathComponents(dir_name)
# line: 2408
current_dir = self.root
# line: 2409
for component in path_components:
# line: 2410
if ((component not in current_dir.contents) or (not isinstance(current_dir.contents, dict))):
# line: 2412
break
else:
# line: 2414
current_dir = current_dir.contents[component]
# line: 2415
try:
# line: 2416
self.CreateDirectory(dir_name, (mode & (~ self.umask)))
# line: 2417
except (IOError, OSError) as e:
# line: 2418
if ((not exist_ok) or (not isinstance(self.ResolveObject(dir_name), FakeDirectory))):
# line: 2420
if isinstance(e, OSError):
# line: 2421
raise
# line: 2422
raise OSError(e.errno, e.strerror, e.filename)
# line: 2424
def _IsType(self, path, st_flag, follow_symlinks=True):
# line: 2438
"Helper function to implement isdir(), islink(), etc.\n\n See the stat(2) man page for valid stat.S_I* flag values\n\n Args:\n path: path to file to stat and test\n st_flag: the stat.S_I* flag checked for the file's st_mode\n\n Returns:\n boolean (the st_flag is set in path's st_mode)\n\n Raises:\n TypeError: if path is None\n "
# line: 2439
if (sys.version_info >= (3, 6)):
# line: 2440
path = os.fspath(path)
# line: 2441
if (path is None):
# line: 2442
raise TypeError
# line: 2443
try:
# line: 2444
obj = self.ResolveObject(path, follow_symlinks)
# line: 2445
if obj:
# line: 2446
return (stat.S_IFMT(obj.st_mode) == st_flag)
# line: 2447
except (IOError, OSError):
# line: 2448
return False
# line: 2449
return False
# line: 2451
def IsDir(self, path, follow_symlinks=True):
# line: 2463
'Determine if path identifies a directory.\n New in pyfakefs 3.0.\n\n Args:\n path: path to filesystem object.\n\n Returns:\n True if path points to a directory (following symlinks).\n\n Raises:\n TypeError: if path is None.\n '
# line: 2464
return self._IsType(path, stat.S_IFDIR, follow_symlinks)
# line: 2466
def IsFile(self, path, follow_symlinks=True):
# line: 2478
'Determine if path identifies a regular file.\n New in pyfakefs 3.0.\n\n Args:\n path: path to filesystem object.\n\n Returns:\n True if path points to a regular file (following symlinks).\n\n Raises:\n TypeError: if path is None.\n '
# line: 2479
return self._IsType(path, stat.S_IFREG, follow_symlinks)
# line: 2481
def IsLink(self, path):
# line: 2493
'Determine if path identifies a symbolic link.\n New in pyfakefs 3.0.\n\n Args:\n path: path to filesystem object.\n\n Returns:\n True if path points to a symlink (S_IFLNK set in st_mode)\n\n Raises:\n TypeError: if path is None.\n '
# line: 2494
return self._IsType(path, stat.S_IFLNK, follow_symlinks=False)
# line: 2496
def ConfirmDir(self, target_directory):
# line: 2508
'Test that the target is actually a directory, raising OSError if not.\n New in pyfakefs 3.0.\n\n Args:\n target_directory: path to the target directory within the fake filesystem.\n\n Returns:\n the FakeDirectory object corresponding to target_directory.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 2509
try:
# line: 2510
directory = self.ResolveObject(target_directory)
# line: 2511
except IOError as exc:
# line: 2512
raise OSError(exc.errno, exc.strerror, target_directory)
# line: 2513
if (not (directory.st_mode & stat.S_IFDIR)):
# line: 2514
raise OSError(errno.ENOTDIR, 'Fake os module: not a directory', target_directory)
# line: 2517
return directory
# line: 2519
def RemoveFile(self, path):
# line: 2530
'Remove the FakeFile object at the specified file path.\n New in pyfakefs 3.0.\n\n Args:\n path: path to file to be removed.\n\n Raises:\n OSError: if path points to a directory.\n OSError: if path does not exist.\n OSError: if removal failed.\n '
# line: 2531
path = self.NormalizePath(path)
# line: 2532
if self.Exists(path):
# line: 2533
obj = self.ResolveObject(path)
# line: 2534
if (stat.S_IFMT(obj.st_mode) == stat.S_IFDIR):
# line: 2535
link_obj = self.LResolveObject(path)
# line: 2536
if (stat.S_IFMT(link_obj.st_mode) != stat.S_IFLNK):
# line: 2537
raise OSError(errno.EISDIR, ("Is a directory: '%s'" % path))
# line: 2539
try:
# line: 2540
self.RemoveObject(path)
# line: 2541
except IOError as exc:
# line: 2542
raise OSError(exc.errno, exc.strerror, exc.filename)
# line: 2544
def RemoveDirectory(self, target_directory, allow_symlink=False):
# line: 2557
"Remove a leaf Fake directory.\n New in pyfakefs 3.0.\n\n Args:\n target_directory: (str) Name of directory to remove.\n allow_symlink: (bool) if `target_directory` is a symlink,\n the function just returns, otherwise it raises (Posix only)\n\n Raises:\n OSError: if target_directory does not exist.\n OSError: if target_directory does not point to a directory.\n OSError: if removal failed per FakeFilesystem.RemoveObject. Cannot remove '.'.\n "
# line: 2558
if (target_directory in ('.', u'.')):
# line: 2559
raise OSError(errno.EINVAL, "Invalid argument: '.'")
# line: 2560
target_directory = self.NormalizePath(target_directory)
# line: 2561
if self.ConfirmDir(target_directory):
# line: 2562
if ((not self.is_windows_fs) and self.IsLink(target_directory)):
# line: 2563
if allow_symlink:
# line: 2564
return
# line: 2565
raise OSError(errno.ENOTDIR, 'Cannot remove symlink', target_directory)
# line: 2567
dir_object = self.ResolveObject(target_directory)
# line: 2568
if dir_object.contents:
# line: 2569
raise OSError(errno.ENOTEMPTY, 'Fake Directory not empty', target_directory)
# line: 2571
try:
# line: 2572
self.RemoveObject(target_directory)
# line: 2573
except IOError as exc:
# line: 2574
raise OSError(exc.errno, exc.strerror, exc.filename)
# line: 2576
def ListDir(self, target_directory):
# line: 2588
'Return a list of file names in target_directory.\n New in pyfakefs 3.0.\n\n Args:\n target_directory: path to the target directory within the fake filesystem.\n\n Returns:\n a list of file names within the target directory in arbitrary order.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 2589
target_directory = self.ResolvePath(target_directory, allow_fd=True)
# line: 2590
directory = self.ConfirmDir(target_directory)
# line: 2591
directory_contents = directory.contents
# line: 2592
return list(directory_contents.keys())
# line: 2594
if (sys.version_info >= (3, 5)):
# line: 2595
class DirEntry:
# line: 2596
'Emulates os.DirEntry. Note that we did not enforce keyword only arguments.'
# line: 2598
def __init__(self, filesystem):
# line: 2603
'Initialize the dir entry with unset values.\n\n Args:\n filesystem: the fake filesystem used for implementation.\n '
# line: 2604
self._filesystem = filesystem
# line: 2605
self.name = ''
# line: 2606
self.path = ''
# line: 2607
self._inode = None
# line: 2608
self._islink = False
# line: 2609
self._isdir = False
# line: 2610
self._statresult = None
# line: 2611
self._statresult_symlink = None
# line: 2613
def inode(self):
# line: 2614
'Return the inode number of the entry.'
# line: 2615
if (self._inode is None):
# line: 2616
self.stat(follow_symlinks=False)
# line: 2617
return self._inode
# line: 2619
def is_dir(self, follow_symlinks=True):
# line: 2629
'Return True if this entry is a directory entry.\n\n Args:\n follow_symlinks: If True, also return True if this entry is a symlink\n pointing to a directory.\n\n Returns:\n True if this entry is an existing directory entry, or if\n follow_symlinks is set, and this entry points to an existing directory entry.\n '
# line: 2630
return (self._isdir and (follow_symlinks or (not self._islink)))
# line: 2632
def is_file(self, follow_symlinks=True):
# line: 2642
'Return True if this entry is a regular file entry.\n\n Args:\n follow_symlinks: If True, also return True if this entry is a symlink\n pointing to a regular file.\n\n Returns:\n True if this entry is an existing file entry, or if\n follow_symlinks is set, and this entry points to an existing file entry.\n '
# line: 2643
return ((not self._isdir) and (follow_symlinks or (not self._islink)))
# line: 2645
def is_symlink(self):
# line: 2646
'Return True if this entry is a symbolic link (even if broken).'
# line: 2647
return self._islink
# line: 2649
def stat(self, follow_symlinks=True):
# line: 2655
'Return a stat_result object for this entry.\n\n Args:\n follow_symlinks: If False and the entry is a symlink, return the\n result for the symlink, otherwise for the object it points to.\n '
# line: 2656
if follow_symlinks:
# line: 2657
if (self._statresult_symlink is None):
# line: 2658
file_object = self._filesystem.ResolveObject(self.path)
# line: 2659
if self._filesystem.is_windows_fs:
# line: 2662
file_object.st_ino = 0
# line: 2663
file_object.st_dev = 0
# line: 2664
file_object.st_nlink = 0
# line: 2665
self._statresult_symlink = file_object.stat_result.copy()
# line: 2666
return self._statresult_symlink
# line: 2668
if (self._statresult is None):
# line: 2669
file_object = self._filesystem.LResolveObject(self.path)
# line: 2670
self._inode = file_object.st_ino
# line: 2671
if self._filesystem.is_windows_fs:
# line: 2672
file_object.st_ino = 0
# line: 2673
file_object.st_dev = 0
# line: 2674
file_object.st_nlink = 0
# line: 2675
self._statresult = file_object.stat_result.copy()
# line: 2676
return self._statresult
# line: 2678
class ScanDirIter:
# line: 2681
'Iterator for DirEntry objects returned from `scandir()` function.\n New in pyfakefs 3.0.\n '
# line: 2683
def __init__(self, filesystem, path):
# line: 2684
self.filesystem = filesystem
# line: 2685
self.path = self.filesystem.ResolvePath(path)
# line: 2686
contents = {}
# line: 2687
try:
# line: 2688
contents = self.filesystem.ConfirmDir(path).contents
# line: 2689
except OSError:
# line: 2690
pass
# line: 2691
self.contents_iter = iter(contents)
# line: 2693
def __iter__(self):
# line: 2694
return self
# line: 2696
def __next__(self):
# line: 2697
entry = self.contents_iter.__next__()
# line: 2698
dir_entry = self.filesystem.DirEntry(self.filesystem)
# line: 2699
dir_entry.name = entry
# line: 2700
dir_entry.path = self.filesystem.JoinPaths(self.path, dir_entry.name)
# line: 2701
dir_entry._isdir = self.filesystem.IsDir(dir_entry.path)
# line: 2702
dir_entry._islink = self.filesystem.IsLink(dir_entry.path)
# line: 2703
return dir_entry
# line: 2705
if (sys.version_info >= (3, 6)):
# line: 2706
def __enter__(self):
# line: 2707
return self
# line: 2709
def __exit__(self, exc_type, exc_val, exc_tb):
# line: 2710
self.close()
# line: 2712
def close(self):
# line: 2713
pass
# line: 2715
def ScanDir(self, path=''):
# line: 2728
'Return an iterator of DirEntry objects corresponding to the entries\n in the directory given by path.\n New in pyfakefs 3.0.\n\n Args:\n path: path to the target directory within the fake filesystem.\n\n Returns:\n an iterator to an unsorted list of os.DirEntry objects for each entry in path.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 2729
return self.ScanDirIter(self, path)
# line: 2731
def __str__(self):
# line: 2732
return str(self.root)
# line: 2735
class FakePathModule(object):
# line: 2740
'Faked os.path module replacement.\n\n FakePathModule should *only* be instantiated by FakeOsModule. See the\n FakeOsModule docstring for details.\n '
# line: 2741
_OS_PATH_COPY = CopyModule(os.path)
# line: 2743
def __init__(self, filesystem, os_module=None):
# line: 2749
'Init.\n\n Args:\n filesystem: FakeFilesystem used to provide file system information\n os_module: (deprecated) FakeOsModule to assign to self.os\n '
# line: 2750
self.filesystem = filesystem
# line: 2751
self._os_path = self._OS_PATH_COPY
# line: 2752
if (os_module is None):
# line: 2753
warnings.warn(FAKE_PATH_MODULE_DEPRECATION, DeprecationWarning, stacklevel=2)
# line: 2755
self._os_path.os = self.os = os_module
# line: 2756
self.sep = self.filesystem.path_separator
# line: 2757
self.altsep = self.filesystem.alternative_path_separator
# line: 2759
def exists(self, path):
# line: 2767
'Determine whether the file object exists within the fake filesystem.\n\n Args:\n path: path to the file object.\n\n Returns:\n bool (if file exists).\n '
# line: 2768
return self.filesystem.Exists(path)
# line: 2770
def lexists(self, path):
# line: 2778
'Test whether a path exists. Returns True for broken symbolic links.\n\n Args:\n path: path to the symlink object.\n\n Returns:\n bool (if file exists).\n '
# line: 2779
return (self.exists(path) or self.islink(path))
# line: 2781
def getsize(self, path):
# line: 2789
'Return the file object size in bytes.\n\n Args:\n path: path to the file object.\n\n Returns:\n file size in bytes.\n '
# line: 2790
try:
# line: 2791
file_obj = self.filesystem.ResolveObject(path)
# line: 2792
return file_obj.st_size
# line: 2793
except IOError as exc:
# line: 2794
raise os.error(exc.errno, exc.strerror)
# line: 2796
def isabs(self, path):
# line: 2797
'Return True if path is an absolute pathname.'
# line: 2798
if self.filesystem.is_windows_fs:
# line: 2799
path = self.splitdrive(path)[1]
# line: 2800
if (sys.version_info >= (3, 6)):
# line: 2801
path = os.fspath(path)
# line: 2802
sep = self.filesystem._path_separator(path)
# line: 2803
altsep = self.filesystem._alternative_path_separator(path)
# line: 2804
if self.filesystem.is_windows_fs:
# line: 2805
return ((len(path) > 0) and (path[:1] in (sep, altsep)))
else:
# line: 2807
return (path.startswith(sep) or ((altsep is not None) and path.startswith(altsep)))
# line: 2809
def isdir(self, path):
# line: 2810
'Determine if path identifies a directory.'
# line: 2811
return self.filesystem.IsDir(path)
# line: 2813
def isfile(self, path):
# line: 2814
'Determine if path identifies a regular file.'
# line: 2815
return self.filesystem.IsFile(path)
# line: 2817
def islink(self, path):
# line: 2828
'Determine if path identifies a symbolic link.\n\n Args:\n path: path to filesystem object.\n\n Returns:\n True if path points to a symbolic link.\n\n Raises:\n TypeError: if path is None.\n '
# line: 2829
return self.filesystem.IsLink(path)
# line: 2831
def getmtime(self, path):
# line: 2843
'Returns the modification time of the fake file.\n\n Args:\n path: the path to fake file.\n\n Returns:\n (int, float) the modification time of the fake file\n in number of seconds since the epoch.\n\n Raises:\n OSError: if the file does not exist.\n '
# line: 2844
try:
# line: 2845
file_obj = self.filesystem.ResolveObject(path)
# line: 2846
except IOError as exc:
# line: 2847
raise OSError(errno.ENOENT, str(exc))
# line: 2848
return file_obj.st_mtime
# line: 2850
def getatime(self, path):
# line: 2863
'Returns the last access time of the fake file.\n\n Note: Access time is not set automatically in fake filesystem on access.\n\n Args:\n path: the path to fake file.\n\n Returns:\n (int, float) the access time of the fake file in number of seconds since the epoch.\n\n Raises:\n OSError: if the file does not exist.\n '
# line: 2864
try:
# line: 2865
file_obj = self.filesystem.ResolveObject(path)
# line: 2866
except IOError as exc:
# line: 2867
raise OSError(errno.ENOENT, str(exc))
# line: 2868
return file_obj.st_atime
# line: 2870
def getctime(self, path):
# line: 2881
'Returns the creation time of the fake file.\n\n Args:\n path: the path to fake file.\n\n Returns:\n (int, float) the creation time of the fake file in number of seconds since the epoch.\n\n Raises:\n OSError: if the file does not exist.\n '
# line: 2882
try:
# line: 2883
file_obj = self.filesystem.ResolveObject(path)
# line: 2884
except IOError as exc:
# line: 2885
raise OSError(errno.ENOENT, str(exc))
# line: 2886
return file_obj.st_ctime
# line: 2888
def abspath(self, path):
# line: 2889
'Return the absolute version of a path.'
# line: 2891
def getcwd():
# line: 2892
'Return the current working directory.'
# line: 2894
if ((sys.version_info < (3,)) and isinstance(path, unicode)):
# line: 2895
return self.os.getcwdu()
elif ((sys.version_info >= (3,)) and isinstance(path, bytes)):
# line: 2897
return self.os.getcwdb()
else:
# line: 2899
return self.os.getcwd()
# line: 2901
if (sys.version_info >= (3, 6)):
# line: 2902
path = os.fspath(path)
# line: 2904
sep = self.filesystem._path_separator(path)
# line: 2905
altsep = self.filesystem._alternative_path_separator(path)
# line: 2906
if (not self.isabs(path)):
# line: 2907
path = self.join(getcwd(), path)
elif ((self.filesystem.is_windows_fs and path.startswith(sep)) or ((altsep is not None) and path.startswith(altsep))):
# line: 2911
cwd = getcwd()
# line: 2912
if self.filesystem.StartsWithDriveLetter(cwd):
# line: 2913
path = self.join(cwd[:2], path)
# line: 2914
return self.normpath(path)
# line: 2916
def join(self, *p):
# line: 2917
'Return the completed path with a separator of the parts.'
# line: 2918
return self.filesystem.JoinPaths(*p)
# line: 2920
def split(self, path):
# line: 2923
'Split the path into the directory and the filename of the path.\n New in pyfakefs 3.0.\n '
# line: 2924
return self.filesystem.SplitPath(path)
# line: 2926
def splitdrive(self, path):
# line: 2929
'Split the path into the drive part and the rest of the path, if supported.\n New in pyfakefs 2.9.\n '
# line: 2930
return self.filesystem.SplitDrive(path)
# line: 2932
def normpath(self, path):
# line: 2933
'Normalize path, eliminating double slashes, etc.'
# line: 2934
return self.filesystem.CollapsePath(path)
# line: 2936
def normcase(self, path):
# line: 2939
'Convert to lower case under windows, replaces additional path separator.\n New in pyfakefs 2.9.\n '
# line: 2940
path = self.filesystem.NormalizePathSeparator(path)
# line: 2941
if self.filesystem.is_windows_fs:
# line: 2942
path = path.lower()
# line: 2943
return path
# line: 2945
def relpath(self, path, start=None):
# line: 2946
'We mostly rely on the native implementation and adapt the path separator.'
# line: 2947
if (not path):
# line: 2948
raise ValueError('no path specified')
# line: 2949
if (sys.version_info >= (3, 6)):
# line: 2950
path = os.fspath(path)
# line: 2951
if (start is not None):
# line: 2952
start = os.fspath(start)
# line: 2953
if (start is None):
# line: 2954
start = self.filesystem.cwd
# line: 2955
if (self.filesystem.alternative_path_separator is not None):
# line: 2956
path = path.replace(self.filesystem.alternative_path_separator, self._os_path.sep)
# line: 2957
start = start.replace(self.filesystem.alternative_path_separator, self._os_path.sep)
# line: 2958
path = path.replace(self.filesystem.path_separator, self._os_path.sep)
# line: 2959
start = start.replace(self.filesystem.path_separator, self._os_path.sep)
# line: 2960
path = self._os_path.relpath(path, start)
# line: 2961
return path.replace(self._os_path.sep, self.filesystem.path_separator)
# line: 2963
def realpath(self, filename):
# line: 2967
'Return the canonical path of the specified filename, eliminating any\n symbolic links encountered in the path.\n New in pyfakefs 3.0.\n '
# line: 2968
if self.filesystem.is_windows_fs:
# line: 2969
return self.abspath(filename)
# line: 2970
if (sys.version_info >= (3, 6)):
# line: 2971
filename = os.fspath(filename)
# line: 2972
(path, ok) = self._joinrealpath(filename[:0], filename, {})
# line: 2973
return self.abspath(path)
# line: 2975
if ((sys.platform != 'win32') or (sys.version_info >= (3, 2))):
# line: 2976
def samefile(self, path1, path2):
# line: 2987
'Return whether path1 and path2 point to the same file.\n Windows support new in Python 3.2.\n New in pyfakefs 3.3.\n\n Args:\n path1: first file path or path object (Python >=3.6)\n path2: second file path or path object (Python >=3.6)\n\n Raises:\n OSError: if one of the paths does not point to an existing file system object.\n '
# line: 2988
stat1 = self.filesystem.GetStat(path1)
# line: 2989
stat2 = self.filesystem.GetStat(path2)
# line: 2990
return ((stat1.st_ino == stat2.st_ino) and (stat1.st_dev == stat2.st_dev))
# line: 2992
def _joinrealpath(self, path, rest, seen):
# line: 2996
'Join two paths, normalizing and eliminating any symbolic links\n encountered in the second path.\n Taken from Python source and adapted.\n '
# line: 2997
curdir = self.filesystem._matching_string(path, '.')
# line: 2998
pardir = self.filesystem._matching_string(path, '..')
# line: 3000
sep = self.filesystem._path_separator(path)
# line: 3001
if self.isabs(rest):
# line: 3002
rest = rest[1:]
# line: 3003
path = sep
# line: 3005
while rest:
# line: 3006
(name, _, rest) = rest.partition(sep)
# line: 3007
if ((not name) or (name == curdir)):
# line: 3009
continue
# line: 3010
if (name == pardir):
# line: 3012
if path:
# line: 3013
(path, name) = self.filesystem.SplitPath(path)
# line: 3014
if (name == pardir):
# line: 3015
path = self.filesystem.JoinPaths(path, pardir, pardir)
else:
# line: 3017
path = pardir
# line: 3018
continue
# line: 3019
newpath = self.filesystem.JoinPaths(path, name)
# line: 3020
if (not self.filesystem.IsLink(newpath)):
# line: 3021
path = newpath
# line: 3022
continue
# line: 3024
if (newpath in seen):
# line: 3026
path = seen[newpath]
# line: 3027
if (path is not None):
# line: 3029
continue
# line: 3032
return (self.filesystem.JoinPaths(newpath, rest), False)
# line: 3033
seen[newpath] = None
# line: 3034
(path, ok) = self._joinrealpath(path, self.filesystem.ReadLink(newpath), seen)
# line: 3035
if (not ok):
# line: 3036
return (self.filesystem.JoinPaths(path, rest), False)
# line: 3037
seen[newpath] = path
# line: 3038
return (path, True)
# line: 3040
def dirname(self, path):
# line: 3043
'Returns the first part of the result of `split()`.\n New in pyfakefs 3.0.\n '
# line: 3044
return self.split(path)[0]
# line: 3046
def expanduser(self, path):
# line: 3049
"Return the argument with an initial component of ~ or ~user\n replaced by that user's home directory.\n "
# line: 3050
return self._os_path.expanduser(path).replace(self._os_path.sep, self.sep)
# line: 3052
def ismount(self, path):
# line: 3062
'Return true if the given path is a mount point.\n New in pyfakefs 2.9.\n\n Args:\n path: path to filesystem object to be checked\n\n Returns:\n True if path is a mount point added to the fake file system.\n Under Windows also returns True for drive and UNC roots (independent of their existence).\n '
# line: 3063
if (sys.version_info >= (3, 6)):
# line: 3064
path = os.fspath(path)
# line: 3065
if (not path):
# line: 3066
return False
# line: 3067
normed_path = self.filesystem.NormalizePath(path)
# line: 3068
sep = self.filesystem._path_separator(path)
# line: 3069
if self.filesystem.is_windows_fs:
# line: 3070
if (self.filesystem.alternative_path_separator is not None):
# line: 3071
path_seps = (sep, self.filesystem._alternative_path_separator(path))
else:
# line: 3075
path_seps = (sep,)
# line: 3076
(drive, rest) = self.filesystem.SplitDrive(normed_path)
# line: 3077
if (drive and (drive[:1] in path_seps)):
# line: 3078
return ((not rest) or (rest in path_seps))
# line: 3079
if (rest in path_seps):
# line: 3080
return True
# line: 3081
for mount_point in self.filesystem.mount_points:
# line: 3082
if (normed_path.rstrip(sep) == mount_point.rstrip(sep)):
# line: 3083
return True
# line: 3084
return False
# line: 3086
if (sys.version_info < (3, 0)):
# line: 3087
def walk(self, top, func, arg):
# line: 3095
'Directory tree walk with callback function.\n New in pyfakefs 3.0.\n\n Args:\n top: root path to traverse. The root itself is not included in the called elements.\n func: function to be called for each visited path node.\n arg: first argument to be called with func (apart from dirname and filenames).\n '
# line: 3096
try:
# line: 3097
names = self.filesystem.ListDir(top)
# line: 3098
except os.error:
# line: 3099
return
# line: 3100
func(arg, top, names)
# line: 3101
for name in names:
# line: 3102
name = self.filesystem.JoinPaths(top, name)
# line: 3103
if self.filesystem.is_windows_fs:
# line: 3104
if self.filesystem.IsDir(name):
# line: 3105
self.walk(name, func, arg)
else:
# line: 3107
try:
# line: 3108
st = self.filesystem.GetStat(name, follow_symlinks=False)
# line: 3109
except os.error:
# line: 3110
continue
# line: 3111
if stat.S_ISDIR(st.st_mode):
# line: 3112
self.walk(name, func, arg)
# line: 3114
def __getattr__(self, name):
# line: 3115
'Forwards any non-faked calls to the real os.path.'
# line: 3116
return getattr(self._os_path, name)
# line: 3119
class FakeOsModule(object):
# line: 3130
'Uses FakeFilesystem to provide a fake os module replacement.\n\n Do not create os.path separately from os, as there is a necessary circular\n dependency between os and os.path to replicate the behavior of the standard\n Python modules. What you want to do is to just let FakeOsModule take care of\n os.path setup itself.\n\n # You always want to do this.\n filesystem = fake_filesystem.FakeFilesystem()\n my_os_module = fake_filesystem.FakeOsModule(filesystem)\n '
# line: 3132
_stat_float_times = (sys.version_info >= (2, 5))
# line: 3134
def __init__(self, filesystem, os_path_module=None):
# line: 3140
'Also exposes self.path (to fake os.path).\n\n Args:\n filesystem: FakeFilesystem used to provide file system information\n os_path_module: (deprecated) optional FakePathModule instance\n '
# line: 3141
self.filesystem = filesystem
# line: 3142
self.sep = filesystem.path_separator
# line: 3143
self.altsep = filesystem.alternative_path_separator
# line: 3144
self._os_module = os
# line: 3145
if (os_path_module is None):
# line: 3146
self.path = FakePathModule(self.filesystem, self)
else:
# line: 3148
warnings.warn(FAKE_PATH_MODULE_DEPRECATION, DeprecationWarning, stacklevel=2)
# line: 3150
self.path = os_path_module
# line: 3151
if (sys.version_info < (3, 0)):
# line: 3152
self.fdopen = self._fdopen_ver2
else:
# line: 3154
self.fdopen = self._fdopen
# line: 3156
def _fdopen(self, *args, **kwargs):
# line: 3168
'Redirector to open() builtin function.\n\n Args:\n *args: pass through args\n **kwargs: pass through kwargs\n\n Returns:\n File object corresponding to file_des.\n\n Raises:\n TypeError: if file descriptor is not an integer.\n '
# line: 3169
if (not isinstance(args[0], int)):
# line: 3170
raise TypeError('an integer is required')
# line: 3171
return FakeFileOpen(self.filesystem)(*args, **kwargs)
# line: 3173
def _fdopen_ver2(self, file_des, mode='r', bufsize=None):
# line: 3188
'Returns an open file object connected to the file descriptor file_des.\n\n Args:\n file_des: An integer file descriptor for the file object requested.\n mode: additional file flags. Currently checks to see if the mode matches\n the mode of the requested file object.\n bufsize: ignored. (Used for signature compliance with __builtin__.fdopen)\n\n Returns:\n File object corresponding to file_des.\n\n Raises:\n OSError: if bad file descriptor or incompatible mode is given.\n TypeError: if file descriptor is not an integer.\n '
# line: 3189
if (not isinstance(file_des, int)):
# line: 3190
raise TypeError('an integer is required')
# line: 3192
try:
# line: 3193
return FakeFileOpen(self.filesystem).Call(file_des, mode=mode)
# line: 3194
except IOError as exc:
# line: 3195
raise OSError(exc)
# line: 3197
def _umask(self):
# line: 3198
'Return the current umask.'
# line: 3199
if self.filesystem.is_windows_fs:
# line: 3201
return 0
# line: 3202
if (sys.platform == 'win32'):
# line: 3204
return 2
else:
# line: 3209
mask = os.umask(0)
# line: 3210
os.umask(mask)
# line: 3211
return mask
# line: 3214
def open(self, file_path, flags, mode=None, dir_fd=None):
# line: 3233
'Return the file descriptor for a FakeFile.\n\n Args:\n file_path: the path to the file\n flags: low-level bits to indicate io operation\n mode: bits to define default permissions\n Note: only basic modes are supported, OS-specific modes are ignored\n dir_fd: If not `None`, the file descriptor of a directory,\n with `file_path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Returns:\n A file descriptor.\n\n Raises:\n IOError: if the path cannot be found\n ValueError: if invalid mode is given\n NotImplementedError: if `os.O_EXCL` is used without `os.O_CREAT`\n '
# line: 3234
file_path = self._path_with_dir_fd(file_path, self.open, dir_fd)
# line: 3235
if (mode is None):
# line: 3236
if self.filesystem.is_windows_fs:
# line: 3237
mode = 438
else:
# line: 3239
mode = (511 & (~ self._umask()))
# line: 3241
open_modes = _OpenModes(must_exist=(not (flags & os.O_CREAT)), can_read=(not (flags & os.O_WRONLY)), can_write=(flags & (os.O_RDWR | os.O_WRONLY)), truncate=(flags & os.O_TRUNC), append=(flags & os.O_APPEND), must_not_exist=(flags & os.O_EXCL))
# line: 3249
if (open_modes.must_not_exist and open_modes.must_exist):
# line: 3250
raise NotImplementedError('O_EXCL without O_CREAT mode is not supported')
# line: 3252
if ((not self.filesystem.is_windows_fs) and (not open_modes.can_write) and self.filesystem.Exists(file_path)):
# line: 3256
obj = self.filesystem.ResolveObject(file_path)
# line: 3257
if isinstance(obj, FakeDirectory):
# line: 3258
dir_wrapper = FakeDirWrapper(obj, file_path, self.filesystem)
# line: 3259
file_des = self.filesystem.AddOpenFile(dir_wrapper)
# line: 3260
dir_wrapper.filedes = file_des
# line: 3261
return file_des
# line: 3264
str_flags = 'b'
# line: 3265
delete_on_close = False
# line: 3266
if hasattr(os, 'O_TEMPORARY'):
# line: 3267
delete_on_close = ((flags & os.O_TEMPORARY) == os.O_TEMPORARY)
# line: 3268
fake_file = FakeFileOpen(self.filesystem, delete_on_close=delete_on_close, raw_io=True)(file_path, str_flags, open_modes=open_modes)
# line: 3271
self.chmod(file_path, mode)
# line: 3272
return fake_file.fileno()
# line: 3274
def close(self, file_des):
# line: 3283
'Close a file descriptor.\n\n Args:\n file_des: An integer file descriptor for the file object requested.\n\n Raises:\n OSError: bad file descriptor.\n TypeError: if file descriptor is not an integer.\n '
# line: 3284
file_handle = self.filesystem.GetOpenFile(file_des)
# line: 3285
file_handle.close()
# line: 3287
def read(self, file_des, num_bytes):
# line: 3300
'Read number of bytes from a file descriptor, returns bytes read.\n\n Args:\n file_des: An integer file descriptor for the file object requested.\n num_bytes: Number of bytes to read from file.\n\n Returns:\n Bytes read from file.\n\n Raises:\n OSError: bad file descriptor.\n TypeError: if file descriptor is not an integer.\n '
# line: 3301
file_handle = self.filesystem.GetOpenFile(file_des)
# line: 3302
file_handle.raw_io = True
# line: 3303
return file_handle.read(num_bytes)
# line: 3305
def write(self, file_des, contents):
# line: 3318
'Write string to file descriptor, returns number of bytes written.\n\n Args:\n file_des: An integer file descriptor for the file object requested.\n contents: String of bytes to write to file.\n\n Returns:\n Number of bytes written.\n\n Raises:\n OSError: bad file descriptor.\n TypeError: if file descriptor is not an integer.\n '
# line: 3319
file_handle = self.filesystem.GetOpenFile(file_des)
# line: 3320
file_handle.raw_io = True
# line: 3321
file_handle._sync_io()
# line: 3322
file_handle.write(contents)
# line: 3323
file_handle.flush()
# line: 3324
return len(contents)
# line: 3326
@classmethod
# line: 3326
def stat_float_times(cls, newvalue=None):
# line: 3337
"Determine whether a file's time stamps are reported as floats or ints.\n New in pyfakefs 2.9.\n\n Calling without arguments returns the current value. The value is shared\n by all instances of FakeOsModule.\n\n Args:\n newvalue: if True, mtime, ctime, atime are reported as floats.\n Else, as ints (rounding down).\n "
# line: 3338
if (newvalue is not None):
# line: 3339
cls._stat_float_times = bool(newvalue)
# line: 3340
return cls._stat_float_times
# line: 3342
def fstat(self, file_des):
# line: 3353
"Return the os.stat-like tuple for the FakeFile object of file_des.\n\n Args:\n file_des: file descriptor of filesystem object to retrieve.\n\n Returns:\n the FakeStatResult object corresponding to entry_path.\n\n Raises:\n OSError: if the filesystem object doesn't exist.\n "
# line: 3355
file_object = self.filesystem.GetOpenFile(file_des).GetObject()
# line: 3356
return file_object.stat_result.copy()
# line: 3358
def umask(self, new_mask):
# line: 3369
'Change the current umask.\n\n Args:\n new_mask: An integer.\n\n Returns:\n The old mask.\n\n Raises:\n TypeError: new_mask is of an invalid type.\n '
# line: 3370
if (not isinstance(new_mask, int)):
# line: 3371
raise TypeError('an integer is required')
# line: 3372
old_umask = self.filesystem.umask
# line: 3373
self.filesystem.umask = new_mask
# line: 3374
return old_umask
# line: 3376
def chdir(self, target_directory):
# line: 3385
'Change current working directory to target directory.\n\n Args:\n target_directory: path to new current working directory.\n\n Raises:\n OSError: if user lacks permission to enter the argument directory or if\n the target is not a directory\n '
# line: 3386
target_directory = self.filesystem.ResolvePath(target_directory, allow_fd=True)
# line: 3387
self.filesystem.ConfirmDir(target_directory)
# line: 3388
directory = self.filesystem.ResolveObject(target_directory)
# line: 3390
if (not (directory.st_mode | PERM_EXE)):
# line: 3391
raise OSError(errno.EACCES, 'Fake os module: permission denied', directory)
# line: 3393
self.filesystem.cwd = target_directory
# line: 3395
def getcwd(self):
# line: 3396
'Return current working directory.'
# line: 3397
return self.filesystem.cwd
# line: 3399
if (sys.version_info < (3,)):
# line: 3400
def getcwdu(self):
# line: 3401
'Return current working directory as unicode. Python 2 only.'
# line: 3402
return unicode(self.filesystem.cwd)
else:
# line: 3405
def getcwdb(self):
# line: 3406
'Return current working directory as bytes. Python 3 only.'
# line: 3407
return bytes(self.filesystem.cwd, locale.getpreferredencoding(False))
# line: 3409
def listdir(self, target_directory):
# line: 3421
'Return a list of file names in target_directory.\n\n Args:\n target_directory: path to the target directory within the fake\n filesystem.\n\n Returns:\n a list of file names within the target directory in arbitrary order.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 3422
return self.filesystem.ListDir(target_directory)
# line: 3424
if (sys.platform.startswith('linux') and (sys.version_info >= (3, 3))):
# line: 3425
def listxattr(self, path=None, follow_symlinks=True):
# line: 3426
'Dummy implementation that returns an empty list - used by shutil.'
# line: 3427
return []
# line: 3429
if (sys.version_info >= (3, 5)):
# line: 3430
def scandir(self, path=''):
# line: 3442
'Return an iterator of DirEntry objects corresponding to the entries\n in the directory given by path.\n\n Args:\n path: path to the target directory within the fake filesystem.\n\n Returns:\n an iterator to an unsorted list of os.DirEntry objects for each entry in path.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 3443
return self.filesystem.ScanDir(path)
# line: 3445
def _ClassifyDirectoryContents(self, root):
# line: 3460
'Classify contents of a directory as files/directories.\n\n Args:\n root: (str) Directory to examine.\n\n Returns:\n (tuple) A tuple consisting of three values: the directory examined, a\n list containing all of the directory entries, and a list containing all\n of the non-directory entries. (This is the same format as returned by\n the os.walk generator.)\n\n Raises:\n Nothing on its own, but be ready to catch exceptions generated by\n underlying mechanisms like os.listdir.\n '
# line: 3461
dirs = []
# line: 3462
files = []
# line: 3463
for entry in self.listdir(root):
# line: 3464
if self.path.isdir(self.path.join(root, entry)):
# line: 3465
dirs.append(entry)
else:
# line: 3467
files.append(entry)
# line: 3468
return (root, dirs, files)
# line: 3470
def walk(self, top, topdown=True, onerror=None, followlinks=False):
# line: 3486
'Perform an os.walk operation over the fake filesystem.\n\n Args:\n top: root directory from which to begin walk.\n topdown: determines whether to return the tuples with the root as the\n first entry (True) or as the last, after all the child directory\n tuples (False).\n onerror: if not None, function which will be called to handle the\n os.error instance provided when os.listdir() fails.\n followlinks: if True, symbolic links are followed. New in pyfakefs 2.9.\n\n Yields:\n (path, directories, nondirectories) for top and each of its\n subdirectories. See the documentation for the builtin os module for\n further details.\n '
# line: 3487
def do_walk(top, topMost=False):
# line: 3488
top = self.path.normpath(top)
# line: 3489
if ((not topMost) and (not followlinks) and self.path.islink(top)):
# line: 3490
return
# line: 3491
try:
# line: 3492
top_contents = self._ClassifyDirectoryContents(top)
# line: 3493
except OSError as exc:
# line: 3494
top_contents = None
# line: 3495
if (onerror is not None):
# line: 3496
onerror(exc)
# line: 3498
if (top_contents is not None):
# line: 3499
if topdown:
# line: 3500
yield top_contents
# line: 3502
for directory in top_contents[1]:
# line: 3503
if ((not followlinks) and self.path.islink(directory)):
# line: 3504
continue
# line: 3505
for contents in do_walk(self.path.join(top, directory)):
# line: 3506
yield contents
# line: 3508
if (not topdown):
# line: 3509
yield top_contents
# line: 3511
return do_walk(top, topMost=True)
# line: 3514
def readlink(self, path, dir_fd=None):
# line: 3530
'Read the target of a symlink.\n\n Args:\n path: Symlink to read the target of.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Returns:\n the string representing the path to which the symbolic link points.\n\n Raises:\n TypeError: if `path` is None\n OSError: (with errno=ENOENT) if path is not a valid path, or\n (with errno=EINVAL) if path is valid, but is not a symlink.\n '
# line: 3531
path = self._path_with_dir_fd(path, self.readlink, dir_fd)
# line: 3532
return self.filesystem.ReadLink(path)
# line: 3534
def stat(self, entry_path, dir_fd=None, follow_symlinks=None):
# line: 3551
"Return the os.stat-like tuple for the FakeFile object of entry_path.\n\n Args:\n entry_path: path to filesystem object to retrieve.\n dir_fd: (int) If not `None`, the file descriptor of a directory,\n with `entry_path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and `entry_path` points to a symlink,\n the link itself is changed instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n\n Returns:\n the FakeStatResult object corresponding to entry_path.\n\n Raises:\n OSError: if the filesystem object doesn't exist.\n "
# line: 3552
if (follow_symlinks is None):
# line: 3553
follow_symlinks = True
elif (sys.version_info < (3, 3)):
# line: 3555
raise TypeError("stat() got an unexpected keyword argument 'follow_symlinks'")
# line: 3556
entry_path = self._path_with_dir_fd(entry_path, self.stat, dir_fd)
# line: 3557
return self.filesystem.GetStat(entry_path, follow_symlinks)
# line: 3559
def lstat(self, entry_path, dir_fd=None):
# line: 3573
"Return the os.stat-like tuple for entry_path, not following symlinks.\n\n Args:\n entry_path: path to filesystem object to retrieve.\n dir_fd: If not `None`, the file descriptor of a directory, with `entry_path`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Returns:\n the FakeStatResult object corresponding to `entry_path`.\n\n Raises:\n OSError: if the filesystem object doesn't exist.\n "
# line: 3575
entry_path = self._path_with_dir_fd(entry_path, self.lstat, dir_fd)
# line: 3576
return self.filesystem.GetStat(entry_path, follow_symlinks=False)
# line: 3578
def remove(self, path, dir_fd=None):
# line: 3591
'Remove the FakeFile object at the specified file path.\n\n Args:\n path: Path to file to be removed.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if path points to a directory.\n OSError: if path does not exist.\n OSError: if removal failed.\n '
# line: 3592
path = self._path_with_dir_fd(path, self.remove, dir_fd)
# line: 3593
self.filesystem.RemoveFile(path)
# line: 3595
def unlink(self, path, dir_fd=None):
# line: 3608
'Remove the FakeFile object at the specified file path.\n\n Args:\n path: Path to file to be removed.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if path points to a directory.\n OSError: if path does not exist.\n OSError: if removal failed.\n '
# line: 3609
path = self._path_with_dir_fd(path, self.unlink, dir_fd)
# line: 3610
self.filesystem.RemoveFile(path)
# line: 3612
def rename(self, old_file_path, new_file_path, dir_fd=None):
# line: 3631
'Rename a FakeFile object at old_file_path to new_file_path,\n preserving all properties.\n Also replaces existing new_file_path object, if one existed (Unix only).\n\n Args:\n old_file_path: Path to filesystem object to rename.\n new_file_path: Path to where the filesystem object will live after this call.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `old_file_path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if old_file_path does not exist.\n OSError: if new_file_path is an existing directory.\n OSError: if new_file_path is an existing file (Windows only)\n OSError: if new_file_path is an existing file and could not be removed (Unix)\n OSError: if `dirname(new_file)` does not exist\n OSError: if the file would be moved to another filesystem (e.g. mount point)\n '
# line: 3632
old_file_path = self._path_with_dir_fd(old_file_path, self.rename, dir_fd)
# line: 3633
self.filesystem.RenameObject(old_file_path, new_file_path)
# line: 3635
if (sys.version_info >= (3, 3)):
# line: 3636
def replace(self, old_file_path, new_file_path):
# line: 3652
'Renames a FakeFile object at old_file_path to new_file_path,\n preserving all properties.\n Also replaces existing new_file_path object, if one existed.\n New in pyfakefs 3.0.\n\n Args:\n old_file_path: path to filesystem object to rename\n new_file_path: path to where the filesystem object will live after this call\n\n Raises:\n OSError: if old_file_path does not exist.\n OSError: if new_file_path is an existing directory.\n OSError: if new_file_path is an existing file and could not be removed\n OSError: if `dirname(new_file)` does not exist\n OSError: if the file would be moved to another filesystem (e.g. mount point)\n '
# line: 3653
self.filesystem.RenameObject(old_file_path, new_file_path, force_replace=True)
# line: 3655
def rmdir(self, target_directory, dir_fd=None):
# line: 3667
"Remove a leaf Fake directory.\n\n Args:\n target_directory: (str) Name of directory to remove.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `target_directory` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if target_directory does not exist or is not a directory,\n or as per FakeFilesystem.RemoveObject. Cannot remove '.'.\n "
# line: 3668
target_directory = self._path_with_dir_fd(target_directory, self.rmdir, dir_fd)
# line: 3669
self.filesystem.RemoveDirectory(target_directory)
# line: 3671
def removedirs(self, target_directory):
# line: 3680
'Remove a leaf fake directory and all empty intermediate ones.\n\n Args:\n target_directory: the directory to be removed.\n\n Raises:\n OSError: if target_directory does not exist or is not a directory.\n OSError: if target_directory is not empty.\n '
# line: 3681
target_directory = self.filesystem.NormalizePath(target_directory)
# line: 3682
directory = self.filesystem.ConfirmDir(target_directory)
# line: 3683
if directory.contents:
# line: 3684
raise OSError(errno.ENOTEMPTY, 'Fake Directory not empty', self.path.basename(target_directory))
else:
# line: 3687
self.rmdir(target_directory)
# line: 3688
(head, tail) = self.path.split(target_directory)
# line: 3689
if (not tail):
# line: 3690
(head, tail) = self.path.split(head)
# line: 3691
while (head and tail):
# line: 3692
head_dir = self.filesystem.ConfirmDir(head)
# line: 3693
if head_dir.contents:
# line: 3694
break
# line: 3696
self.filesystem.RemoveDirectory(head, allow_symlink=True)
# line: 3697
(head, tail) = self.path.split(head)
# line: 3699
def mkdir(self, dir_name, mode=PERM_DEF, dir_fd=None):
# line: 3714
"Create a leaf Fake directory.\n\n Args:\n dir_name: (str) Name of directory to create.\n Relative paths are assumed to be relative to '/'.\n mode: (int) Mode to create directory with. This argument defaults to\n 0o777. The umask is applied to this mode.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `dir_name` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if the directory name is invalid or parent directory is read only\n or as per FakeFilesystem.AddObject.\n "
# line: 3715
dir_name = self._path_with_dir_fd(dir_name, self.mkdir, dir_fd)
# line: 3716
self.filesystem.MakeDirectory(dir_name, mode)
# line: 3718
def makedirs(self, dir_name, mode=PERM_DEF, exist_ok=None):
# line: 3733
'Create a leaf Fake directory + create any non-existent parent dirs.\n\n Args:\n dir_name: (str) Name of directory to create.\n mode: (int) Mode to create directory (and any necessary parent\n directories) with. This argument defaults to 0o777. The umask is\n applied to this mode.\n exist_ok: (boolean) If exist_ok is False (the default), an OSError is\n raised if the target directory already exists. New in Python 3.2.\n New in pyfakefs 2.9.\n\n Raises:\n OSError: if the directory already exists and exist_ok=False, or as per\n `FakeFilesystem.CreateDirectory()`.\n '
# line: 3734
if (exist_ok is None):
# line: 3735
exist_ok = False
elif (sys.version_info < (3, 2)):
# line: 3737
raise TypeError("makedir() got an unexpected keyword argument 'exist_ok'")
# line: 3738
self.filesystem.MakeDirectories(dir_name, mode, exist_ok)
# line: 3740
def _path_with_dir_fd(self, path, fct, dir_fd):
# line: 3741
'Return the path considering dir_fd. Raise on nmvalid parameters.'
# line: 3742
if (dir_fd is not None):
# line: 3743
if (sys.version_info < (3, 3)):
# line: 3744
raise TypeError(("%s() got an unexpected keyword argument 'dir_fd'" % fct.__name__))
# line: 3747
real_fct = getattr(os, fct.__name__)
# line: 3748
if (real_fct not in self.supports_dir_fd):
# line: 3749
raise NotImplementedError('dir_fd unavailable on this platform')
# line: 3750
if isinstance(path, int):
# line: 3751
raise ValueError(("%s: Can't specify dir_fd without matching path" % fct.__name__))
# line: 3753
if (not self.path.isabs(path)):
# line: 3754
return self.path.join(self.filesystem.GetOpenFile(dir_fd).GetObject().GetPath(), path)
# line: 3756
return path
# line: 3758
def access(self, path, mode, dir_fd=None, follow_symlinks=None):
# line: 3774
'Check if a file exists and has the specified permissions.\n\n Args:\n path: (str) Path to the file.\n mode: (int) Permissions represented as a bitwise-OR combination of\n os.F_OK, os.R_OK, os.W_OK, and os.X_OK.\n dir_fd: If not `None`, the file descriptor of a directory, with `path`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and `path` points to a symlink,\n the link itself is queried instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n\n Returns:\n bool, `True` if file is accessible, `False` otherwise.\n '
# line: 3775
if ((follow_symlinks is not None) and (sys.version_info < (3, 3))):
# line: 3776
raise TypeError("access() got an unexpected keyword argument 'follow_symlinks'")
# line: 3777
path = self._path_with_dir_fd(path, self.access, dir_fd)
# line: 3778
try:
# line: 3779
stat_result = self.stat(path, follow_symlinks=follow_symlinks)
# line: 3780
except OSError as os_error:
# line: 3781
if (os_error.errno == errno.ENOENT):
# line: 3782
return False
# line: 3783
raise
# line: 3784
return ((mode & ((stat_result.st_mode >> 6) & 7)) == mode)
# line: 3786
def chmod(self, path, mode, dir_fd=None, follow_symlinks=None):
# line: 3798
'Change the permissions of a file as encoded in integer mode.\n\n Args:\n path: (str) Path to the file.\n mode: (int) Permissions.\n dir_fd: If not `None`, the file descriptor of a directory, with `path`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and `path` points to a symlink,\n the link itself is queried instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n '
# line: 3799
if (follow_symlinks is None):
# line: 3800
follow_symlinks = True
elif (sys.version_info < (3, 3)):
# line: 3802
raise TypeError("chmod() got an unexpected keyword argument 'follow_symlinks'")
# line: 3803
path = self._path_with_dir_fd(path, self.chmod, dir_fd)
# line: 3804
self.filesystem.ChangeMode(path, mode, follow_symlinks)
# line: 3806
def lchmod(self, path, mode):
# line: 3813
'Change the permissions of a file as encoded in integer mode.\n If the file is a link, the permissions of the link are changed.\n\n Args:\n path: (str) Path to the file.\n mode: (int) Permissions.\n '
# line: 3814
if self.filesystem.is_windows_fs:
# line: 3815
raise (NameError, "name 'lchmod' is not defined")
# line: 3816
self.filesystem.ChangeMode(path, mode, follow_symlinks=False)
# line: 3818
def utime(self, path, times=None, ns=None, dir_fd=None, follow_symlinks=None):
# line: 3842
'Change the access and modified times of a file.\n\n Args:\n path: (str) Path to the file.\n times: 2-tuple of int or float numbers, of the form (atime, mtime) \n which is used to set the access and modified times in seconds. \n If None, both times are set to the current time.\n ns: 2-tuple of int numbers, of the form (atime, mtime) which is \n used to set the access and modified times in nanoseconds. \n If None, both times are set to the current time.\n New in Python 3.3. New in pyfakefs 3.3.\n dir_fd: If not `None`, the file descriptor of a directory, with `path`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and `path` points to a symlink,\n the link itself is queried instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n \n Raises:\n TypeError: If anything other than the expected types is \n specified in the passed `times` or `ns` tuple, \n or if the tuple length is not equal to 2.\n ValueError: If both times and ns are specified.\n '
# line: 3843
if (follow_symlinks is None):
# line: 3844
follow_symlinks = True
elif (sys.version_info < (3, 3)):
# line: 3846
raise TypeError("utime() got an unexpected keyword argument 'follow_symlinks'")
# line: 3847
path = self._path_with_dir_fd(path, self.utime, dir_fd)
# line: 3848
if ((ns is not None) and (sys.version_info < (3, 3))):
# line: 3849
raise TypeError("utime() got an unexpected keyword argument 'ns'")
# line: 3851
self.filesystem.UpdateTime(path, times, ns, follow_symlinks)
# line: 3853
def chown(self, path, uid, gid, dir_fd=None, follow_symlinks=None):
# line: 3872
'Set ownership of a faked file.\n\n Args:\n path: (str) Path to the file or directory.\n uid: (int) Numeric uid to set the file or directory to.\n gid: (int) Numeric gid to set the file or directory to.\n dir_fd: (int) If not `None`, the file descriptor of a directory,\n with `path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and path points to a symlink,\n the link itself is changed instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n\n Raises:\n OSError: if path does not exist.\n\n `None` is also allowed for `uid` and `gid`. This permits `os.rename` to\n use `os.chown` even when the source file `uid` and `gid` are `None` (unset).\n '
# line: 3873
if (follow_symlinks is None):
# line: 3874
follow_symlinks = True
elif (sys.version_info < (3, 3)):
# line: 3876
raise TypeError("chown() got an unexpected keyword argument 'follow_symlinks'")
# line: 3877
path = self._path_with_dir_fd(path, self.chown, dir_fd)
# line: 3878
try:
# line: 3879
file_object = self.filesystem.ResolveObject(path, follow_symlinks, allow_fd=True)
# line: 3880
except IOError as io_error:
# line: 3881
if (io_error.errno == errno.ENOENT):
# line: 3882
raise OSError(errno.ENOENT, 'No such file or directory in fake filesystem', path)
# line: 3885
raise
# line: 3886
if (not ((isinstance(uid, int) or (uid is None)) and (isinstance(gid, int) or (gid is None)))):
# line: 3888
raise TypeError('An integer is required')
# line: 3889
if (uid != (-1)):
# line: 3890
file_object.st_uid = uid
# line: 3891
if (gid != (-1)):
# line: 3892
file_object.st_gid = gid
# line: 3894
def mknod(self, filename, mode=None, device=None, dir_fd=None):
# line: 3914
"Create a filesystem node named 'filename'.\n\n Does not support device special files or named pipes as the real os\n module does.\n\n Args:\n filename: (str) Name of the file to create\n mode: (int) Permissions to use and type of file to be created.\n Default permissions are 0o666. Only the stat.S_IFREG file type\n is supported by the fake implementation. The umask is applied\n to this mode.\n device: not supported in fake implementation\n dir_fd: If not `None`, the file descriptor of a directory,\n with `filename` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if called with unsupported options or the file can not be\n created.\n "
# line: 3915
if self.filesystem.is_windows_fs:
# line: 3916
raise (AttributeError, "module 'os' has no attribute 'mknode'")
# line: 3917
if (mode is None):
# line: 3918
mode = (stat.S_IFREG | PERM_DEF_FILE)
# line: 3919
if (device or (not (mode & stat.S_IFREG))):
# line: 3920
raise OSError(errno.ENOENT, 'Fake os mknod implementation only supports regular files.')
# line: 3924
filename = self._path_with_dir_fd(filename, self.mknod, dir_fd)
# line: 3925
(head, tail) = self.path.split(filename)
# line: 3926
if (not tail):
# line: 3927
if self.filesystem.Exists(head):
# line: 3928
raise OSError(errno.EEXIST, ('Fake filesystem: %s: %s' % (os.strerror(errno.EEXIST), filename)))
# line: 3930
raise OSError(errno.ENOENT, ('Fake filesystem: %s: %s' % (os.strerror(errno.ENOENT), filename)))
# line: 3932
if (tail in ('.', u'.', '..', u'..')):
# line: 3933
raise OSError(errno.ENOENT, ('Fake fileystem: %s: %s' % (os.strerror(errno.ENOENT), filename)))
# line: 3935
if self.filesystem.Exists(filename):
# line: 3936
raise OSError(errno.EEXIST, ('Fake fileystem: %s: %s' % (os.strerror(errno.EEXIST), filename)))
# line: 3938
try:
# line: 3939
self.filesystem.AddObject(head, FakeFile(tail, (mode & (~ self.filesystem.umask)), filesystem=self.filesystem))
# line: 3942
except IOError as e:
# line: 3943
raise OSError(e.errno, ('Fake filesystem: %s: %s' % (os.strerror(e.errno), filename)))
# line: 3946
def symlink(self, link_target, path, dir_fd=None):
# line: 3958
'Creates the specified symlink, pointed at the specified link target.\n\n Args:\n link_target: The target of the symlink.\n path: Path to the symlink to create.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `link_target` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if the file already exists.\n '
# line: 3959
link_target = self._path_with_dir_fd(link_target, self.symlink, dir_fd)
# line: 3960
self.filesystem.CreateLink(path, link_target, create_missing_dirs=False)
# line: 3962
def link(self, oldpath, newpath, dir_fd=None):
# line: 3980
"Create a hard link at new_path, pointing at old_path.\n New in pyfakefs 2.9.\n\n Args:\n old_path: An existing link to the target file.\n new_path: The destination path to create a new link at.\n dir_fd: If not `None`, the file descriptor of a directory, with `oldpath`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Returns:\n the FakeFile object referred to by `oldpath`.\n\n Raises:\n OSError: if something already exists at new_path.\n OSError: if the parent directory doesn't exist.\n OSError: if on Windows before Python 3.2.\n "
# line: 3981
oldpath = self._path_with_dir_fd(oldpath, self.link, dir_fd)
# line: 3982
self.filesystem.CreateHardLink(oldpath, newpath)
# line: 3984
def fsync(self, file_des):
# line: 3994
'Perform fsync for a fake file (in other words, do nothing).\n New in pyfakefs 2.9.\n\n Args:\n file_des: file descriptor of the open file.\n\n Raises:\n OSError: file_des is an invalid file descriptor.\n TypeError: file_des is not an integer.\n '
# line: 3996
self.filesystem.GetOpenFile(file_des)
# line: 3998
def fdatasync(self, file_des):
# line: 4008
'Perform fdatasync for a fake file (in other words, do nothing).\n New in pyfakefs 2.9.\n\n Args:\n file_des: file descriptor of the open file.\n\n Raises:\n OSError: file_des is an invalid file descriptor.\n TypeError: file_des is not an integer.\n '
# line: 4010
self.filesystem.GetOpenFile(file_des)
# line: 4012
def __getattr__(self, name):
# line: 4013
'Forwards any unfaked calls to the standard os module.'
# line: 4014
return getattr(self._os_module, name)
# line: 4017
class FakeIoModule(object):
# line: 4026
'Uses FakeFilesystem to provide a fake io module replacement.\n New in pyfakefs 2.9.\n\n Currently only used to wrap `io.open()` which is an alias to `open()`.\n\n You need a fake_filesystem to use this:\n filesystem = fake_filesystem.FakeFilesystem()\n my_io_module = fake_filesystem.FakeIoModule(filesystem)\n '
# line: 4028
def __init__(self, filesystem):
# line: 4032
'\n Args:\n filesystem: FakeFilesystem used to provide file system information\n '
# line: 4033
self.filesystem = filesystem
# line: 4034
self._io_module = io
# line: 4036
def open(self, file_path, mode='r', buffering=(-1), encoding=None, errors=None, newline=None, closefd=True, opener=None):
# line: 4040
'Redirect the call to FakeFileOpen.\n See FakeFileOpen.Call() for description.\n '
# line: 4041
if ((opener is not None) and (sys.version_info < (3, 3))):
# line: 4042
raise TypeError("open() got an unexpected keyword argument 'opener'")
# line: 4043
fake_open = FakeFileOpen(self.filesystem, use_io=True)
# line: 4044
return fake_open(file_path, mode, buffering, encoding, errors, newline, closefd, opener)
# line: 4046
def __getattr__(self, name):
# line: 4047
'Forwards any unfaked calls to the standard io module.'
# line: 4048
return getattr(self._io_module, name)
# line: 4051
class FakeFileWrapper(object):
# line: 4056
'Wrapper for a stream object for use by a FakeFile object.\n\n If the wrapper has any data written to it, it will propagate to\n the FakeFile object on close() or flush().\n '
# line: 4057
def __init__(self, file_object, file_path, update=False, read=False, append=False, delete_on_close=False, filesystem=None, newline=None, binary=True, closefd=True, encoding=None, errors=None, raw_io=False, is_stream=False, use_io=True):
# line: 4061
self._file_object = file_object
# line: 4062
self._file_path = file_path
# line: 4063
self._append = append
# line: 4064
self._read = read
# line: 4065
self.allow_update = update
# line: 4066
self._closefd = closefd
# line: 4067
self._file_epoch = file_object.epoch
# line: 4068
self.raw_io = raw_io
# line: 4069
self._binary = binary
# line: 4070
self.is_stream = is_stream
# line: 4071
contents = file_object.byte_contents
# line: 4072
self._encoding = encoding
# line: 4073
errors = (errors or 'strict')
# line: 4074
if encoding:
# line: 4075
file_wrapper = FakeFileWrapper(file_object, file_path, update, read, append, delete_on_close=False, filesystem=filesystem, newline=None, binary=True, closefd=closefd, is_stream=True)
# line: 4079
codec_info = codecs.lookup(encoding)
# line: 4080
self._io = codecs.StreamReaderWriter(file_wrapper, codec_info.streamreader, codec_info.streamwriter, errors)
else:
# line: 4083
if ((not binary) and (sys.version_info >= (3, 0))):
# line: 4084
io_class = io.StringIO
else:
# line: 4086
io_class = io.BytesIO
# line: 4087
io_args = ({} if binary else {'newline': newline, })
# line: 4088
if (contents and (not binary)):
# line: 4089
contents = contents.decode((encoding or locale.getpreferredencoding(False)), errors=errors)
# line: 4091
if (contents and (not update)):
# line: 4092
self._io = io_class(contents, **io_args)
else:
# line: 4094
self._io = io_class(**io_args)
# line: 4096
if contents:
# line: 4097
if update:
# line: 4098
if (not encoding):
# line: 4099
self._io.write(contents)
# line: 4100
if (not append):
# line: 4101
self._io.seek(0)
else:
# line: 4103
self._read_whence = 0
# line: 4104
if (read and (not use_io)):
# line: 4105
self._read_seek = 0
else:
# line: 4107
self._read_seek = self._io.tell()
else:
# line: 4109
self._read_whence = 0
# line: 4110
self._read_seek = 0
# line: 4112
if delete_on_close:
# line: 4113
assert filesystem, 'delete_on_close=True requires filesystem'
# line: 4114
self._filesystem = filesystem
# line: 4115
self.delete_on_close = delete_on_close
# line: 4118
self.name = file_object.opened_as
# line: 4119
self.filedes = None
# line: 4121
def __enter__(self):
# line: 4122
"To support usage of this fake file with the 'with' statement."
# line: 4123
return self
# line: 4125
def __exit__(self, type, value, traceback):
# line: 4126
"To support usage of this fake file with the 'with' statement."
# line: 4127
self.close()
# line: 4129
def _raise(self, message):
# line: 4130
if self.raw_io:
# line: 4131
raise OSError(errno.EBADF, message)
# line: 4132
if (sys.version_info < (3, 0)):
# line: 4133
raise IOError(message)
# line: 4134
raise io.UnsupportedOperation(message)
# line: 4136
def GetObject(self):
# line: 4137
'Return the FakeFile object that is wrapped by the current instance.'
# line: 4138
return self._file_object
# line: 4140
def fileno(self):
# line: 4141
'Return the file descriptor of the file object.'
# line: 4142
return self.filedes
# line: 4144
def close(self):
# line: 4145
'Close the file.'
# line: 4147
if (self not in self._filesystem.open_files):
# line: 4148
return
# line: 4150
if (self.allow_update and (not self.raw_io)):
# line: 4151
self._file_object.SetContents(self._io.getvalue(), self._encoding)
# line: 4152
if self._closefd:
# line: 4153
self._filesystem.CloseOpenFile(self.filedes)
# line: 4154
if self.delete_on_close:
# line: 4155
self._filesystem.RemoveObject(self.GetObject().GetPath())
# line: 4157
def flush(self):
# line: 4158
"Flush file contents to 'disk'."
# line: 4159
self._check_open_file()
# line: 4160
if self.allow_update:
# line: 4161
self._io.flush()
# line: 4162
self._file_object.SetContents(self._io.getvalue(), self._encoding)
# line: 4163
self._file_epoch = self._file_object.epoch
# line: 4165
def seek(self, offset, whence=0):
# line: 4166
"Move read/write pointer in 'file'."
# line: 4167
self._check_open_file()
# line: 4168
if (not self._append):
# line: 4169
self._io.seek(offset, whence)
else:
# line: 4171
self._read_seek = offset
# line: 4172
self._read_whence = whence
# line: 4173
if (not self.is_stream):
# line: 4174
self.flush()
# line: 4176
def tell(self):
# line: 4181
"Return the file's current position.\n\n Returns:\n int, file's current position in bytes.\n "
# line: 4182
self._check_open_file()
# line: 4183
self._flush_for_read()
# line: 4184
if (not self._append):
# line: 4185
return self._io.tell()
# line: 4186
if self._read_whence:
# line: 4187
write_seek = self._io.tell()
# line: 4188
self._io.seek(self._read_seek, self._read_whence)
# line: 4189
self._read_seek = self._io.tell()
# line: 4190
self._read_whence = 0
# line: 4191
self._io.seek(write_seek)
# line: 4192
return self._read_seek
# line: 4194
def _flush_for_read(self):
# line: 4196
if self._flushes_after_read():
# line: 4197
self.flush()
# line: 4199
def _flushes_after_read(self):
# line: 4200
return ((not self.is_stream) and ((not self._filesystem.is_windows_fs) or (sys.version_info[0] > 2)))
# line: 4204
def _sync_io(self):
# line: 4205
'Update the stream with changes to the file object contents.'
# line: 4206
if (self._file_epoch == self._file_object.epoch):
# line: 4207
return
# line: 4209
if isinstance(self._io, io.BytesIO):
# line: 4210
contents = self._file_object.byte_contents
else:
# line: 4212
contents = self._file_object.contents
# line: 4214
is_stream_reader_writer = isinstance(self._io, codecs.StreamReaderWriter)
# line: 4215
if is_stream_reader_writer:
# line: 4216
self._io.stream.allow_update = True
# line: 4217
whence = self._io.tell()
# line: 4218
self._io.seek(0)
# line: 4219
self._io.truncate()
# line: 4220
self._io.write(contents)
# line: 4221
if self._append:
# line: 4222
self._io.seek(0, os.SEEK_END)
else:
# line: 4224
self._io.seek(whence)
# line: 4226
if is_stream_reader_writer:
# line: 4227
self._io.stream.allow_update = False
# line: 4228
self._file_epoch = self._file_object.epoch
# line: 4230
def _ReadWrapper(self, name):
# line: 4241
'Wrap a stream attribute in a read wrapper.\n\n Returns a read_wrapper which tracks our own read pointer since the\n stream object has no concept of a different read and write pointer.\n\n Args:\n name: the name of the attribute to wrap. Should be a read call.\n\n Returns:\n either a read_error or read_wrapper function.\n '
# line: 4242
io_attr = getattr(self._io, name)
# line: 4244
def read_wrapper(*args, **kwargs):
# line: 4256
"Wrap all read calls to the stream object.\n\n We do this to track the read pointer separate from the write\n pointer. Anything that wants to read from the stream object\n while we're in append mode goes through this.\n\n Args:\n *args: pass through args\n **kwargs: pass through kwargs\n Returns:\n Wrapped stream object method\n "
# line: 4257
self._io.seek(self._read_seek, self._read_whence)
# line: 4258
ret_value = io_attr(*args, **kwargs)
# line: 4259
self._read_seek = self._io.tell()
# line: 4260
self._read_whence = 0
# line: 4261
self._io.seek(0, 2)
# line: 4262
return ret_value
# line: 4264
return read_wrapper
# line: 4266
def _OtherWrapper(self, name, writing):
# line: 4274
'Wrap a stream attribute in an other_wrapper.\n\n Args:\n name: the name of the stream attribute to wrap.\n\n Returns:\n other_wrapper which is described below.\n '
# line: 4275
io_attr = getattr(self._io, name)
# line: 4277
def other_wrapper(*args, **kwargs):
# line: 4289
'Wrap all other calls to the stream Object.\n\n We do this to track changes to the write pointer. Anything that\n moves the write pointer in a file open for appending should move\n the read pointer as well.\n\n Args:\n *args: pass through args\n **kwargs: pass through kwargs\n Returns:\n Wrapped stream object method\n '
# line: 4290
write_seek = self._io.tell()
# line: 4291
ret_value = io_attr(*args, **kwargs)
# line: 4292
if (write_seek != self._io.tell()):
# line: 4293
self._read_seek = self._io.tell()
# line: 4294
self._read_whence = 0
# line: 4295
if ((not writing) or (sys.version_info >= (3,))):
# line: 4296
return ret_value
# line: 4298
return other_wrapper
# line: 4300
def _TruncateWrapper(self):
# line: 4305
'Wrap truncate() to allow flush after truncate.\n\n Returns:\n wrapper which is described below.\n '
# line: 4306
io_attr = getattr(self._io, 'truncate')
# line: 4308
def truncate_wrapper(*args, **kwargs):
# line: 4309
'Wrap truncate call to call flush after truncate.'
# line: 4310
if self._append:
# line: 4311
self._io.seek(self._read_seek, self._read_whence)
# line: 4312
size = io_attr(*args, **kwargs)
# line: 4313
self.flush()
# line: 4314
if (not self.is_stream):
# line: 4315
self._file_object.SetSize(size)
# line: 4316
buffer_size = len(self._io.getvalue())
# line: 4317
if (buffer_size < size):
# line: 4318
self._io.seek(buffer_size)
# line: 4319
self._io.write(('\x00' * (size - buffer_size)))
# line: 4320
self._file_object.SetContents(self._io.getvalue(), self._encoding)
# line: 4321
if (sys.version_info >= (3,)):
# line: 4322
return size
# line: 4324
return truncate_wrapper
# line: 4326
def _WriteWrapper(self, name):
# line: 4331
'Wrap write() to adapt return value for Python 2.\n\n Returns:\n wrapper which is described below.\n '
# line: 4332
io_attr = getattr(self._io, name)
# line: 4334
def write_wrapper(*args, **kwargs):
# line: 4335
'Wrap trunctae call to call flush after truncate.'
# line: 4336
ret_value = io_attr(*args, **kwargs)
# line: 4337
if (sys.version_info >= (3,)):
# line: 4338
return ret_value
# line: 4340
return write_wrapper
# line: 4342
def Size(self):
# line: 4343
'Return the content size in bytes of the wrapped file.'
# line: 4344
return self._file_object.st_size
# line: 4346
def __getattr__(self, name):
# line: 4347
if self._file_object.IsLargeFile():
# line: 4348
raise FakeLargeFileIoException(self._file_path)
# line: 4350
reading = (name.startswith('read') or (name == 'next'))
# line: 4351
truncate = (name == 'truncate')
# line: 4352
writing = (name.startswith('write') or truncate)
# line: 4353
if (reading or writing):
# line: 4354
self._check_open_file()
# line: 4355
if ((not self._read) and reading):
# line: 4356
def read_error(*args, **kwargs):
# line: 4357
'Throw an error unless the argument is zero.'
# line: 4358
if (args and (args[0] == 0)):
# line: 4359
if (self._filesystem.is_windows_fs and self.raw_io):
# line: 4360
return ('' if self._binary else u'')
# line: 4361
self._raise('File is not open for reading.')
# line: 4363
return read_error
# line: 4365
if ((not self.allow_update) and writing):
# line: 4366
def write_error(*args, **kwargs):
# line: 4367
'Throw an error.'
# line: 4368
if self.raw_io:
# line: 4369
if (self._filesystem.is_windows_fs and args and (len(args[0]) == 0)):
# line: 4370
return 0
# line: 4371
self._raise('File is not open for writing.')
# line: 4373
return write_error
# line: 4375
if reading:
# line: 4376
self._sync_io()
# line: 4377
self._flush_for_read()
# line: 4378
if truncate:
# line: 4379
return self._TruncateWrapper()
# line: 4380
if self._append:
# line: 4381
if reading:
# line: 4382
return self._ReadWrapper(name)
else:
# line: 4384
return self._OtherWrapper(name, writing)
# line: 4385
if writing:
# line: 4386
return self._WriteWrapper(name)
# line: 4388
return getattr(self._io, name)
# line: 4390
def _check_open_file(self):
# line: 4391
if ((not self.is_stream) and (not (self in self._filesystem.open_files))):
# line: 4392
raise ValueError('I/O operation on closed file')
# line: 4394
def __iter__(self):
# line: 4395
if (not self._read):
# line: 4396
self._raise('File is not open for reading')
# line: 4397
return self._io.__iter__()
# line: 4400
class FakeDirWrapper(object):
# line: 4402
'Wrapper for a FakeDirectory object to be used in open files list.\n '
# line: 4403
def __init__(self, file_object, file_path, filesystem):
# line: 4404
self._file_object = file_object
# line: 4405
self._file_path = file_path
# line: 4406
self._filesystem = filesystem
# line: 4407
self.filedes = None
# line: 4409
def GetObject(self):
# line: 4410
'Return the FakeFile object that is wrapped by the current instance.'
# line: 4411
return self._file_object
# line: 4413
def fileno(self):
# line: 4414
'Return the file descriptor of the file object.'
# line: 4415
return self.filedes
# line: 4417
def close(self):
# line: 4418
'Close the directory.'
# line: 4419
self._filesystem.CloseOpenFile(self.filedes)
# line: 4422
class FakeFileOpen(object):
# line: 4427
'Faked `file()` and `open()` function replacements.\n\n Returns FakeFile objects in a FakeFilesystem in place of the `file()`\n or `open()` function.\n '
# line: 4428
__name__ = 'FakeFileOpen'
# line: 4430
def __init__(self, filesystem, delete_on_close=False, use_io=False, raw_io=False):
# line: 4438
'init.\n\n Args:\n filesystem: FakeFilesystem used to provide file system information\n delete_on_close: optional boolean, deletes file on close()\n use_io: if True, the io.open() version is used (ignored for Python 3,\n where io.open() is an alias to open() )\n '
# line: 4439
self.filesystem = filesystem
# line: 4440
self._delete_on_close = delete_on_close
# line: 4441
self._use_io = (use_io or (sys.version_info >= (3, 0)) or (platform.python_implementation() == 'PyPy'))
# line: 4443
self.raw_io = raw_io
# line: 4445
def __call__(self, *args, **kwargs):
# line: 4446
'Redirects calls to file() or open() to appropriate method.'
# line: 4447
if self._use_io:
# line: 4448
return self.Call(*args, **kwargs)
else:
# line: 4450
return self._call_ver2(*args, **kwargs)
# line: 4452
def _call_ver2(self, file_path, mode='r', buffering=(-1), flags=None, open_modes=None):
# line: 4453
'Limits args of open() or file() for Python 2.x versions.'
# line: 4455
mode = (flags or mode)
# line: 4456
return self.Call(file_path, mode, buffering, open_modes=open_modes)
# line: 4458
def Call(self, file_, mode='r', buffering=(-1), encoding=None, errors=None, newline=None, closefd=True, opener=None, open_modes=None):
# line: 4484
"Return a file-like object with the contents of the target file object.\n\n Args:\n file_: path to target file or a file descriptor.\n mode: additional file modes. All r/w/a/x r+/w+/a+ modes are supported.\n 't', and 'U' are ignored, e.g., 'wU' is treated as 'w'. 'b' sets\n binary mode, no end of line translations in StringIO.\n buffering: ignored. (Used for signature compliance with __builtin__.open)\n encoding: the encoding used to encode unicode strings / decode bytes.\n New in pyfakefs 2.9.\n errors: ignored, this relates to encoding.\n newline: controls universal newlines, passed to stream object.\n closefd: if a file descriptor rather than file name is passed, and set\n to false, then the file descriptor is kept open when file is closed.\n opener: not supported.\n open_modes: Modes for opening files if called from low-level API\n\n Returns:\n a file-like object containing the contents of the target file.\n\n Raises:\n IOError: if the target object is a directory, the path is invalid or\n permission is denied.\n "
# line: 4485
orig_modes = mode
# line: 4487
binary = ((sys.version_info < (3, 0)) or ('b' in mode))
# line: 4489
mode = mode.replace('t', '').replace('b', '')
# line: 4490
mode = mode.replace('rU', 'r').replace('U', 'r')
# line: 4492
if (not self.raw_io):
# line: 4493
if (mode not in _OPEN_MODE_MAP):
# line: 4494
raise ValueError(('Invalid mode: %r' % orig_modes))
# line: 4495
open_modes = _OpenModes(*_OPEN_MODE_MAP[mode])
# line: 4497
file_object = None
# line: 4498
filedes = None
# line: 4500
if isinstance(file_, int):
# line: 4501
filedes = file_
# line: 4502
wrapper = self.filesystem.GetOpenFile(filedes)
# line: 4503
self._delete_on_close = wrapper.delete_on_close
# line: 4504
file_object = self.filesystem.GetOpenFile(filedes).GetObject()
# line: 4505
file_path = file_object.name
else:
# line: 4507
file_path = file_
# line: 4508
real_path = self.filesystem.ResolvePath(file_path, raw_io=self.raw_io)
# line: 4509
if self.filesystem.Exists(file_path):
# line: 4510
file_object = self.filesystem.GetObjectFromNormalizedPath(real_path)
# line: 4511
closefd = True
# line: 4513
error_class = (OSError if self.raw_io else IOError)
# line: 4514
if (open_modes.must_not_exist and (file_object or self.filesystem.IsLink(file_path))):
# line: 4515
raise error_class(errno.EEXIST, 'File exists', file_path)
# line: 4516
if file_object:
# line: 4517
if ((open_modes.can_read and (not (file_object.st_mode & PERM_READ))) or (open_modes.can_write and (not (file_object.st_mode & PERM_WRITE)))):
# line: 4519
raise error_class(errno.EACCES, 'Permission denied', file_path)
# line: 4520
if open_modes.can_write:
# line: 4521
if open_modes.truncate:
# line: 4522
file_object.SetContents('')
else:
# line: 4524
if open_modes.must_exist:
# line: 4525
raise error_class(errno.ENOENT, 'No such file or directory', file_path)
# line: 4526
file_object = self.filesystem.CreateFileInternally(real_path, create_missing_dirs=False, apply_umask=True, raw_io=self.raw_io)
# line: 4529
if stat.S_ISDIR(file_object.st_mode):
# line: 4530
if self.filesystem.is_windows_fs:
# line: 4531
raise OSError(errno.EPERM, 'Fake file object: is a directory', file_path)
else:
# line: 4533
raise error_class(errno.EISDIR, 'Fake file object: is a directory', file_path)
# line: 4537
file_object.opened_as = file_path
# line: 4539
fakefile = FakeFileWrapper(file_object, file_path, update=open_modes.can_write, read=open_modes.can_read, append=open_modes.append, delete_on_close=self._delete_on_close, filesystem=self.filesystem, newline=newline, binary=binary, closefd=closefd, encoding=encoding, errors=errors, raw_io=self.raw_io, use_io=self._use_io)
# line: 4553
if (filedes is not None):
# line: 4554
fakefile.filedes = filedes
# line: 4556
self.filesystem.open_files[filedes] = fakefile
else:
# line: 4558
fakefile.filedes = self.filesystem.AddOpenFile(fakefile)
# line: 4559
return fakefile
# line: 4562
def _RunDoctest():
# line: 4563
import doctest
# line: 4564
from pyfakefs import fake_filesystem
# line: 4565
return doctest.testmod(fake_filesystem)
# line: 4568
if (__name__ == '__main__'):
# line: 4569
_RunDoctest() | [
"[email protected]"
]
| |
c8dd68af4b22458adfed98f16219e4ab50e39b6b | adad318902262dffa68023ef1be3f9290d6b7ded | /ppci/irutils/io.py | c0d177ba761e624bff0dc95c3391b99b4100b7bb | [
"BSD-2-Clause"
]
| permissive | ForceBru/ppci | e2324b772ed7486f1154bbf85c3a4469515059ab | 7bca8fa0c7adce37c62716186c786ce5cbd9d189 | refs/heads/master | 2022-11-07T19:18:11.907546 | 2020-06-24T12:38:12 | 2020-06-24T12:38:12 | 271,379,816 | 0 | 0 | BSD-2-Clause | 2020-06-24T12:38:13 | 2020-06-10T20:35:58 | Python | UTF-8 | Python | false | false | 23,140 | py | """ Module to serialize and deserialize IR-code.
Take an IR-module, and turn it into a dict or json.
Then, this item can be persistet or send via e-mail.
Next up, take this dict / json and reconstruct the
identical IR-module from it.
This can be useful in these scenario's:
- Compilation caching: store the IR-code and load from disk when required later on.
- Distributed compilation: transfer IR-code across processes.
.. doctest::
>>> import io
>>> from ppci.api import c_to_ir
>>> from ppci.irutils import to_json, from_json
>>> c_src = "int add(int a, int b) { return a + b; }" # Define some C-code
>>> mod = c_to_ir(io.StringIO(c_src), "x86_64") # turn C-code into IR-code
>>> mod.stats()
'functions: 1, blocks: 2, instructions: 11'
>>> json_txt = to_json(mod) # Turn module into JSON
>>> mod2 = from_json(json_txt) # Load module from JSON.
>>> mod2.stats()
'functions: 1, blocks: 2, instructions: 11'
"""
import json
from .. import ir
from ..utils.binary_txt import bin2asc, asc2bin
def to_json(module):
""" Convert an IR-module to json format.
Args:
module: the IR-module intended for serialization.
Returns:
A JSON string representing the module.
"""
d = to_dict(module)
return json.dumps(d, indent=2, sort_keys=True)
def to_dict(module):
w = DictWriter()
d = w.write_module(module)
return d
def from_json(json_txt):
""" Construct a module from valid json.
Args:
json_txt: A string with valid JSON.
Returns:
The IR-module as represented by JSON.
"""
return from_dict(json_txt)
def from_dict(d):
r = DictReader()
return r.construct(d)
class DictWriter:
""" Serialize an IR-module as a dict. """
def __init__(self):
pass
def write_module(self, module):
json_externals = []
for external in module.externals:
json_external = self.write_external(external)
json_externals.append(json_external)
json_variables = []
for variable in module.variables:
json_variable = self.write_variable(variable)
json_variables.append(json_variable)
json_functions = []
for function in module.functions:
json_function = self.write_subroutine(function)
json_functions.append(json_function)
return {
"name": module.name,
"externals": json_externals,
"variables": json_variables,
"subroutines": json_functions,
}
def write_external(self, external):
if isinstance(external, ir.ExternalVariable):
json_external = {
"kind": "variable",
"name": external.name,
}
elif isinstance(external, ir.ExternalFunction):
json_external = {
"kind": "function",
"name": external.name,
"parameter_types": [
self.write_type(ty) for ty in external.argument_types
],
"return_type": self.write_type(external.return_ty),
}
elif isinstance(external, ir.ExternalProcedure):
json_external = {
"kind": "procedure",
"name": external.name,
"parameter_types": [
self.write_type(ty) for ty in external.argument_types
],
}
else: # pragma: no cover
raise NotImplementedError(str(external))
return json_external
def write_binding(self, binding):
return str(binding)
def write_variable(self, variable):
if isinstance(variable, ir.Variable):
json_variable = {
"name": variable.name,
"binding": self.write_binding(variable.binding),
"amount": variable.amount,
"alignment": variable.alignment,
}
else: # pragma: no cover
raise NotImplementedError(str(variable))
return json_variable
def write_subroutine(self, subroutine):
json_binding = self.write_binding(subroutine.binding)
json_parameters = []
for parameter in subroutine.arguments:
json_parameter = {
"name": parameter.name,
"type": self.write_type(parameter.ty),
}
json_parameters.append(json_parameter)
json_blocks = []
for block in subroutine.blocks:
json_block = self.write_block(block)
json_blocks.append(json_block)
json_subroutine = {
"binding": json_binding,
"name": subroutine.name,
"parameters": json_parameters,
"blocks": json_blocks,
}
if isinstance(subroutine, ir.Function):
json_subroutine["kind"] = "function"
json_subroutine["return_type"] = self.write_type(
subroutine.return_ty
)
else:
assert isinstance(subroutine, ir.Procedure)
json_subroutine["kind"] = "procedure"
return json_subroutine
def write_block(self, block):
json_instructions = []
for instruction in block.instructions:
json_instruction = self.write_instruction(instruction)
json_instructions.append(json_instruction)
json_block = {
"name": block.name,
"instructions": json_instructions,
}
return json_block
def write_instruction(self, instruction):
if isinstance(instruction, ir.Load):
json_instruction = {
"kind": "load",
"name": instruction.name,
"type": self.write_type(instruction.ty),
"address": self.write_value_ref(instruction.address),
"volatile": instruction.volatile,
}
elif isinstance(instruction, ir.Store):
json_instruction = {
"kind": "store",
"address": self.write_value_ref(instruction.address),
"value": self.write_value_ref(instruction.value),
"volatile": instruction.volatile,
}
elif isinstance(instruction, ir.Alloc):
json_instruction = {
"kind": "alloc",
"type": self.write_type(instruction.ty),
"size": instruction.amount,
"alignment": instruction.alignment,
"name": instruction.name,
}
elif isinstance(instruction, ir.Binop):
json_instruction = {
"kind": "binop",
"name": instruction.name,
"type": self.write_type(instruction.ty),
"a": self.write_value_ref(instruction.a),
"operation": instruction.operation,
"b": self.write_value_ref(instruction.b),
}
elif isinstance(instruction, ir.Unop):
json_instruction = {
"kind": "unop",
"name": instruction.name,
"type": self.write_type(instruction.ty),
"a": self.write_value_ref(instruction.a),
"operation": instruction.operation,
}
elif isinstance(instruction, ir.AddressOf):
json_instruction = {
"kind": "addressof",
"name": instruction.name,
"type": self.write_type(instruction.ty),
"src": self.write_value_ref(instruction.src),
}
elif isinstance(instruction, ir.Exit):
json_instruction = {
"kind": "exit",
}
elif isinstance(instruction, ir.Return):
json_instruction = {
"kind": "return",
"result": self.write_value_ref(instruction.result),
}
elif isinstance(instruction, ir.Jump):
json_instruction = {
"kind": "jump",
"target": self.write_block_ref(instruction.target),
}
elif isinstance(instruction, ir.CJump):
json_instruction = {
"kind": "cjump",
"a": self.write_value_ref(instruction.a),
"b": self.write_value_ref(instruction.b),
"condition": instruction.cond,
"yes_block": self.write_block_ref(instruction.lab_yes),
"no_block": self.write_block_ref(instruction.lab_no),
}
elif isinstance(instruction, ir.Cast):
json_instruction = {
"kind": "cast",
"name": instruction.name,
"type": self.write_type(instruction.ty),
"value": self.write_value_ref(instruction.src),
}
elif isinstance(instruction, ir.Const):
json_instruction = {
"kind": "const",
"name": instruction.name,
"type": self.write_type(instruction.ty),
"value": instruction.value,
}
elif isinstance(instruction, ir.LiteralData):
json_instruction = {
"kind": "literaldata",
"name": instruction.name,
"data": bin2asc(instruction.data),
}
elif isinstance(instruction, ir.ProcedureCall):
json_arguments = [
self.write_value_ref(argument)
for argument in instruction.arguments
]
json_instruction = {
"kind": "procedurecall",
"callee": self.write_value_ref(instruction.callee),
"arguments": json_arguments,
}
elif isinstance(instruction, ir.FunctionCall):
json_arguments = [
self.write_value_ref(argument)
for argument in instruction.arguments
]
json_instruction = {
"kind": "functioncall",
"name": instruction.name,
"type": self.write_type(instruction.ty),
"callee": self.write_value_ref(instruction.callee),
"arguments": json_arguments,
}
elif isinstance(instruction, ir.Phi):
json_phi_inputs = []
for phi_input_block, phi_input_value in instruction.inputs.items():
json_phi_input = {
"block": self.write_block_ref(phi_input_block),
"value": self.write_value_ref(phi_input_value),
}
json_phi_inputs.append(json_phi_input)
json_instruction = {
"kind": "phi",
"name": instruction.name,
"type": self.write_type(instruction.ty),
"inputs": json_phi_inputs,
}
else: # pragma: no cover
raise NotImplementedError(str(instruction))
return json_instruction
def write_type(self, ty):
if isinstance(ty, (ir.BasicTyp, ir.PointerTyp)):
json_type = {
"kind": "basic",
"name": ty.name,
}
elif isinstance(ty, ir.BlobDataTyp):
json_type = {
"kind": "blob",
"size": ty.size,
"alignment": ty.alignment,
}
else: # pragma: no cover
raise NotImplementedError(str(ty))
return json_type
def write_value_ref(self, value):
return value.name
def write_block_ref(self, block):
return block.name
class Scope:
def __init__(self):
self.value_map = {}
self.block_map = {}
class DictReader:
""" Construct IR-module from given json.
"""
def __init__(self):
# self.subroutines = []
self.scopes = []
self.undefined_values = {}
def construct(self, json_txt):
d = json.loads(json_txt)
name = d["name"]
json_externals = d["externals"]
json_variables = d["variables"]
json_subroutines = d["subroutines"]
self.enter_scope()
module = ir.Module(name)
for json_external in json_externals:
external = self.construct_external(json_external)
module.add_external(external)
for json_variable in json_variables:
variable = self.construct_variable(json_variable)
module.add_variable(variable)
for json_subroutine in json_subroutines:
subroutine = self.construct_subroutine(json_subroutine)
module.add_function(subroutine)
assert not self.undefined_values
self.leave_scope()
return module
def construct_external(self, json_external):
etype = json_external["kind"]
name = json_external["name"]
if etype == "variable":
external = ir.ExternalVariable(name)
elif etype == "function":
argument_types = [
self.get_type(pt) for pt in json_external["parameter_types"]
]
return_type = self.get_type(json_external["return_type"])
external = ir.ExternalFunction(name, argument_types, return_type)
elif etype == "procedure":
argument_types = [
self.get_type(pt) for pt in json_external["parameter_types"]
]
external = ir.ExternalProcedure(name, argument_types)
else: # pragma: no cover
raise NotImplementedError(etype)
self.register_value(external)
return external
def construct_binding(self, json_binding):
bindings = {
"local": ir.Binding.LOCAL,
"global": ir.Binding.GLOBAL,
}
binding = bindings[json_binding]
return binding
def construct_variable(self, json_variable):
name = json_variable["name"]
binding = self.construct_binding(json_variable["binding"])
amount = json_variable["amount"]
alignment = json_variable["alignment"]
variable = ir.Variable(name, binding, amount, alignment)
self.register_value(variable)
return variable
def construct_subroutine(self, json_subroutine):
name = json_subroutine["name"]
json_blocks = json_subroutine["blocks"]
json_parameters = json_subroutine["parameters"]
binding = self.construct_binding(json_subroutine["binding"])
stype = json_subroutine["kind"]
if stype == "function":
return_type = self.get_type(json_subroutine["return_type"])
subroutine = ir.Function(name, binding, return_type)
elif stype == "procedure":
subroutine = ir.Procedure(name, binding)
else: # pragma: no cover
raise NotImplementedError(stype)
self.register_value(subroutine)
# self.subroutines.append(subroutine)
self.enter_scope()
for json_parameter in json_parameters:
name = json_parameter["name"]
ty = self.get_type(json_parameter["type"])
parameter = ir.Parameter(name, ty)
self.register_value(parameter)
subroutine.add_parameter(parameter)
for json_block in json_blocks:
block = self.construct_block(json_block, subroutine)
if subroutine.entry is None:
subroutine.entry = block
subroutine.add_block(block)
self.leave_scope()
# self.subroutines.pop()
return subroutine
def construct_block(self, json_block, subroutine):
name = json_block["name"]
json_instructions = json_block["instructions"]
block = self.new_block(name, subroutine)
for json_instruction in json_instructions:
instruction = self.construct_instruction(json_instruction)
block.add_instruction(instruction)
return block
def construct_instruction(self, json_instruction):
itype = json_instruction["kind"]
if itype == "load":
name = json_instruction["name"]
ty = self.get_type(json_instruction["type"])
address = self.get_value_ref(json_instruction["address"])
instruction = ir.Load(address, name, ty)
self.register_value(instruction)
elif itype == "store":
value = self.get_value_ref(json_instruction["value"])
address = self.get_value_ref(json_instruction["address"])
instruction = ir.Store(value, address)
elif itype == "alloc":
name = json_instruction["name"]
amount = json_instruction["size"]
alignment = json_instruction["alignment"]
instruction = ir.Alloc(name, amount, alignment)
self.register_value(instruction)
elif itype == "addressof":
name = json_instruction["name"]
ty = self.get_type(json_instruction["type"])
src = self.get_value_ref(json_instruction["src"])
instruction = ir.AddressOf(src, name)
self.register_value(instruction)
elif itype == "binop":
name = json_instruction["name"]
ty = self.get_type(json_instruction["type"])
a = self.get_value_ref(json_instruction["a"])
operation = json_instruction["operation"]
b = self.get_value_ref(json_instruction["b"])
instruction = ir.Binop(a, operation, b, name, ty)
self.register_value(instruction)
elif itype == "unop":
name = json_instruction["name"]
ty = self.get_type(json_instruction["type"])
a = self.get_value_ref(json_instruction["a"])
operation = json_instruction["operation"]
instruction = ir.Unop(operation, a, name, ty)
self.register_value(instruction)
elif itype == "cast":
name = json_instruction["name"]
ty = self.get_type(json_instruction["type"])
value = self.get_value_ref(json_instruction["value"])
instruction = ir.Cast(value, name, ty)
self.register_value(instruction)
elif itype == "const":
name = json_instruction["name"]
ty = self.get_type(json_instruction["type"])
value = json_instruction["value"]
instruction = ir.Const(value, name, ty)
self.register_value(instruction)
elif itype == "literaldata":
name = json_instruction["name"]
data = asc2bin(json_instruction["data"])
instruction = ir.LiteralData(data, name)
self.register_value(instruction)
elif itype == "phi":
name = json_instruction["name"]
ty = self.get_type(json_instruction["type"])
instruction = ir.Phi(name, ty)
for json_phi_input in json_instruction["inputs"]:
instruction.set_incoming(
self.get_block_ref(json_phi_input["block"]),
self.get_value_ref(json_phi_input["value"], ty=ty),
)
self.register_value(instruction)
elif itype == "jump":
target = self.get_block_ref(json_instruction["target"])
instruction = ir.Jump(target)
elif itype == "cjump":
a = self.get_value_ref(json_instruction["a"])
cond = json_instruction["condition"]
b = self.get_value_ref(json_instruction["b"])
lab_yes = self.get_block_ref(json_instruction["yes_block"])
lab_no = self.get_block_ref(json_instruction["no_block"])
instruction = ir.CJump(a, cond, b, lab_yes, lab_no)
elif itype == "procedurecall":
callee = self.get_value_ref(json_instruction["callee"])
arguments = []
for json_argument in json_instruction["arguments"]:
arguments.append(self.get_value_ref(json_argument))
instruction = ir.ProcedureCall(callee, arguments)
elif itype == "functioncall":
name = json_instruction["name"]
ty = self.get_type(json_instruction["type"])
callee = self.get_value_ref(json_instruction["callee"])
arguments = []
for json_argument in json_instruction["arguments"]:
arguments.append(self.get_value_ref(json_argument))
instruction = ir.FunctionCall(callee, arguments, name, ty)
self.register_value(instruction)
elif itype == "exit":
instruction = ir.Exit()
elif itype == "return":
result = self.get_value_ref(json_instruction["result"])
instruction = ir.Return(result)
else: # pragma: no cover
raise NotImplementedError(itype)
return instruction
def get_type(self, json_type):
tkind = json_type["kind"]
if tkind == "basic":
typ_map = {ty.name: ty for ty in ir.all_types}
typ = typ_map[json_type["name"]]
elif tkind == "blob":
size = json_type["size"]
alignment = json_type["alignment"]
typ = ir.BlobDataTyp(size, alignment)
else: # pragma: no cover
raise NotImplementedError(tkind)
return typ
def register_value(self, value):
if value.name in self.undefined_values:
old_value = self.undefined_values.pop(value.name)
old_value.replace_by(value)
assert value.name not in self.scopes[-1].value_map
self.scopes[-1].value_map[value.name] = value
def get_value_ref(self, name, ty=ir.ptr):
""" Retrieve reference to a value.
"""
for scope in reversed(self.scopes):
if name in scope.value_map:
value = scope.value_map[name]
break
else:
if name in self.undefined_values:
value = self.undefined_values[name]
else:
value = ir.Undefined(name, ty)
self.undefined_values[name] = value
return value
def enter_scope(self):
self.scopes.append(Scope())
def leave_scope(self):
self.scopes.pop()
# def register_block(self, block):
# if block.name in self.block_map:
# #block.replace_incoming()
# old_block = self.block_map[block.name]
# old_block.replace_by(block)
# self.block_map[block.name] = block
def new_block(self, name, subroutine):
if name in self.scopes[-1].block_map:
block = self.scopes[-1].block_map[name]
assert block.function is None
else:
block = ir.Block(name)
self.scopes[-1].block_map[name] = block
block.function = subroutine
return block
def get_block_ref(self, name):
if name in self.scopes[-1].block_map:
block = self.scopes[-1].block_map[name]
else:
block = ir.Block(name)
self.scopes[-1].block_map[name] = block
return block
| [
"[email protected]"
]
| |
da4a32962100827a0d1787dccf3a4722e5168197 | c7bde9b78057cc60c9ccd03145b5baf55284c875 | /blah.py | 356e579ee69aaab75cf027fcac689390897a3106 | []
| no_license | sseaver/tic_tac_toe | cb38f1cd11d879ee94eb91fbcd9b40a5736aaea8 | 7607123435419b0862d7e36373263f3592eeca9e | refs/heads/master | 2021-01-17T19:13:53.498545 | 2016-09-21T21:58:10 | 2016-09-21T21:58:10 | 68,764,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,016 | py |
def winning():
column_zero = [row[0] for row in game_matrix]
column_one = [row[1] for row in game_matrix]
column_two = [row[2] for row in game_matrix]
if column_zero == ["X", "X", "X"]:
return True
print ("X has won the game!")
elif column_zero == ["O", "O", "O"]:
return True
print ("O has won the game!")
else:
return False
if column_one == ["X", "X", "X"]:
return True
print ("X has won the game!")
elif column_one == ["O", "O", "O"]:
return True
print ("O has won the game!")
else:
return False
if column_two == ["X", "X", "X"]:
return True
print ("X has won the game!")
elif column_two == ["O", "O", "O"]:
return True
print ("O has won the game!")
else:
return False
if game_matrix[0] == ["X", "X", "X"]:
return True
print ("X has won the game!")
elif game_matrix[0] == ["O", "O", "O"]:
return True
print ("O has won the game!")
else:
return False
if game_matrix[1] == ["X", "X", "X"]:
return True
print ("X has won the game!")
elif game_matrix[1] == ["O", "O", "O"]:
return True
print ("O has won the game!")
else:
return False
if game_matrix[2] == ["X", "X", "X"]:
return True
print ("X has won the game!")
elif game_matrix[2] == ["O", "O", "O"]:
return True
print ("O has won the game!")
else:
return False
########################################################
game_over = winning()
draw_board(game_matrix)
print ("X goes first!")
while not game_over:
print ("Pick a coordinate to place your 'X'")
x_move(int(input("Row: ")), int(input("Column: ")))
draw_board(game_matrix)
game_over = winning()
print ("Pick a coordinate to place your 'O'")
o_move(int(input("Row: ")), int(input("Column: ")))
draw_board(game_matrix)
game_over = winning()
| [
"[email protected]"
]
| |
fd8b773813e15c6655ea2f1fa0dd72bbe07d2e9c | 2d5171ac7f2640ed73b48aebf4b96e29d5cad818 | /ABC164/D.py | 1d6040a7c8e43dbb4d68df474ac22cf5856aacee | []
| no_license | kentahoriuchi/Atcorder | d7b8308424175f32d47f24bb15303695780e1611 | f6449d4e9dc7d92210497e3445515fe95b74c659 | refs/heads/master | 2023-06-06T09:26:46.963642 | 2021-06-13T15:08:04 | 2021-06-13T15:08:04 | 255,396,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | S = input()
count = 0
l = [0]*2019
l[0] = 1
mod = 2019
tmp = 0
for i in range(1,len(S)+1):
tmp = (tmp + int(S[len(S)-i])*pow(10,i-1,mod))%mod
l[tmp] += 1
for j in range(2019):
if l[j] >= 2:
count += l[j]*(l[j]-1)//2
print(count)
| [
"[email protected]"
]
| |
a1453f77a8fd8eb705cbce7eeabba2f607626caa | ec0b8bfe19b03e9c3bb13d9cfa9bd328fb9ca3f1 | /res/packages/scripts/scripts/client/gui/prb_control/entities/sandbox/pre_queue/ctx.py | 72525ca7d0d926af5be80b5ae0c31108efc663d7 | []
| no_license | webiumsk/WOT-0.9.20.0 | de3d7441c5d442f085c47a89fa58a83f1cd783f2 | 811cb4e1bca271372a1d837a268b6e0e915368bc | refs/heads/master | 2021-01-20T22:11:45.505844 | 2017-08-29T20:11:38 | 2017-08-29T20:11:38 | 101,803,045 | 0 | 1 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,055 | py | # 2017.08.29 21:45:38 Střední Evropa (letní čas)
# Embedded file name: scripts/client/gui/prb_control/entities/sandbox/pre_queue/ctx.py
from constants import QUEUE_TYPE
from gui.prb_control.entities.base.pre_queue.ctx import QueueCtx
from gui.shared.utils.decorators import ReprInjector
@ReprInjector.withParent(('getVehicleInventoryID', 'vInvID'), ('getWaitingID', 'waitingID'))
class SandboxQueueCtx(QueueCtx):
"""
Sandbox enqueue context
"""
def __init__(self, vInventoryID, waitingID = ''):
super(SandboxQueueCtx, self).__init__(entityType=QUEUE_TYPE.SANDBOX, waitingID=waitingID)
self.__vInventoryID = vInventoryID
def getVehicleInventoryID(self):
"""
Gets the selected vehicle inventory ID
"""
return self.__vInventoryID
# okay decompyling c:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\gui\prb_control\entities\sandbox\pre_queue\ctx.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.08.29 21:45:38 Střední Evropa (letní čas)
| [
"[email protected]"
]
| |
78fc991a1014c50f1012814c34bcd40c58596c95 | 1a194118c60a22b0b4e039e3949403d00b2df8ea | /others/find_planned_roll.py | 3083ee0ac915357f643152f702fcf8eb5377049f | []
| no_license | tisobe/Ocat | 645e62cbd05a4b050c6ca45966271e4108d2fe74 | a1d66ee8163f73a23ce3964f1347365c8a4e36ae | refs/heads/master | 2020-12-24T06:47:07.753060 | 2016-11-17T18:24:54 | 2016-11-17T18:24:54 | 73,399,591 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,737 | py | #!/usr/bin/env /proj/sot/ska/bin/python
#############################################################################################################
# #
# find_planned_roll.py: find roll angle and the range from currently planned table #
# #
# author: t. isobe ([email protected]) #
# #
# Last Update: Nov 01, 2016 #
# #
#############################################################################################################
import sys
import os
import string
import re
import copy
import math
import Cookie
import unittest
import time
from os.path import join, dirname, realpath
BASE_DIR = dirname(dirname(realpath(__file__)))
#
#--- reading directory list
#
path = '/proj/web-r2d2-v/lib/python2.7/site-packages/r2d2-v/ocatsite/static/dir_list_py'
f = open(path, 'r')
data = [line.strip() for line in f.readlines()]
f.close()
for ent in data:
atemp = re.split('::', ent)
var = atemp[1].strip()
line = atemp[0].strip()
exec "%s = %s" %(var, line)
#
#--- append path to a private folders
#
sys.path.append(base_dir)
sys.path.append(mta_dir)
import mta_common_functions as mcf
import convertTimeFormat as tcnv
#------------------------------------------------------------------------------------------------------
#-- find_planned_roll: find roll angle and the range from currently planned table --
#------------------------------------------------------------------------------------------------------
def find_planned_roll():
"""
find roll angle and the range from currently planned table
input: none but read from /proj/web-icxc/htdocs/mp/lts/lts-current.html
output: <out_dir>/mp_long_term_roll --- a list of obsid:roll:range
"""
f = open('/proj/web-icxc/htdocs/mp/lts/lts-current.html', 'r')
data = [line.strip() for line in f.readlines()]
f.close()
ofile = BASE_DIR + '/ocatsite/data_save/mp_long_term_roll'
fo = open(ofile, 'w')
for ent in data:
#
#--- after "LTS changes", the file list different information
#
mc = re.search('LTS changes', ent)
if mc is not None:
break
#
#--- find obsid
#
mc = re.search('target.cgi', ent)
if mc is not None:
atemp = re.split('target_param.cgi\?', ent)
btemp = re.split('">', atemp[1])
obsid = btemp[0]
#
#--- find the positions of roll/range information
#
btemp = re.split('\s+', ent)
bcnt = 0
for test in btemp:
mc1 = re.search('ACIS', test)
mc2 = re.search('HRC', test)
if (mc1 is not None) or (mc2 is not None):
break
else:
bcnt += 1
#
#--- count back from the instrument column to find the information needed
#
pl_roll = btemp[bcnt - 4]
pl_range = btemp[bcnt - 3]
line = obsid + ':' + pl_roll + ':' + pl_range + '\n'
fo.write(line)
fo.close()
#------------------------------------------------------------------------------------------------------
if __name__ == "__main__":
find_planned_roll()
| [
"[email protected]"
]
| |
b16a7d2303c6584ceaae6c79e8bd71faad1e197f | ba0cbdae81c171bd4be7b12c0594de72bd6d625a | /MyToontown/py2/toontown/effects/FireworkSparkles.pyc.py | 7c201e409d069ab5b1d3220e537f0c082007c3e2 | []
| no_license | sweep41/Toontown-2016 | 65985f198fa32a832e762fa9c59e59606d6a40a3 | 7732fb2c27001264e6dd652c057b3dc41f9c8a7d | refs/heads/master | 2021-01-23T16:04:45.264205 | 2017-06-04T02:47:34 | 2017-06-04T02:47:34 | 93,279,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,693 | py | # 2013.08.22 22:19:53 Pacific Daylight Time
# Embedded file name: toontown.effects.FireworkSparkles
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from direct.particles import ParticleEffect, Particles, ForceGroup
from EffectController import EffectController
from PooledEffect import PooledEffect
import random
class FireworkSparkles(PooledEffect, EffectController):
__module__ = __name__
def __init__(self):
PooledEffect.__init__(self)
EffectController.__init__(self)
model = loader.loadModel('phase_4/models/props/tt_m_efx_ext_fireworkCards')
self.card = model.find('**/tt_t_efx_ext_particleSpark_sharp')
self.cardScale = 16.0
self.setDepthWrite(0)
self.setColorScaleOff()
self.setLightOff()
self.startDelay = 0.0
self.effectScale = 1.0
self.effectColor = Vec4(1, 1, 1, 1)
self.f = ParticleEffect.ParticleEffect('Sparkles')
self.f.reparentTo(self)
self.p0 = Particles.Particles('particles-2')
self.p0.setFactory('PointParticleFactory')
self.p0.setRenderer('SpriteParticleRenderer')
self.p0.setEmitter('SphereVolumeEmitter')
self.f.addParticles(self.p0)
f0 = ForceGroup.ForceGroup('Gravity')
force0 = LinearVectorForce(Vec3(0.0, 0.0, -15.0), 1.0, 0)
force0.setVectorMasks(1, 1, 1)
force0.setActive(1)
f0.addForce(force0)
self.f.addForceGroup(f0)
self.p0.setPoolSize(64)
self.p0.setBirthRate(0.02)
self.p0.setLitterSize(10)
self.p0.setLitterSpread(0)
self.p0.setSystemLifespan(0.0)
self.p0.setLocalVelocityFlag(1)
self.p0.setSystemGrowsOlderFlag(0)
self.p0.factory.setLifespanBase(1.5)
self.p0.factory.setLifespanSpread(1.0)
self.p0.factory.setMassBase(1.0)
self.p0.factory.setMassSpread(0.0)
self.p0.factory.setTerminalVelocityBase(400.0)
self.p0.factory.setTerminalVelocitySpread(0.0)
self.p0.renderer.setAlphaMode(BaseParticleRenderer.PRALPHAOUT)
self.p0.renderer.setUserAlpha(1.0)
self.p0.renderer.setColorBlendMode(ColorBlendAttrib.MAdd, ColorBlendAttrib.OIncomingAlpha, ColorBlendAttrib.OOne)
self.p0.renderer.setFromNode(self.card)
self.p0.renderer.setColor(Vec4(1.0, 1.0, 1.0, 1.0))
self.p0.renderer.setXScaleFlag(1)
self.p0.renderer.setYScaleFlag(1)
self.p0.renderer.setAnimAngleFlag(1)
self.p0.renderer.setNonanimatedTheta(0.0)
self.p0.renderer.setAlphaBlendMethod(BaseParticleRenderer.PPBLENDLINEAR)
self.p0.renderer.setAlphaDisable(0)
self.p0.renderer.getColorInterpolationManager().addLinear(0.0, 0.1, Vec4(0, 0, 0, 0), self.effectColor, 1)
self.p0.emitter.setEmissionType(BaseParticleEmitter.ETRADIATE)
self.p0.emitter.setAmplitudeSpread(0.0)
self.p0.emitter.setOffsetForce(Vec3(0.0, 0.0, 0.0))
self.p0.emitter.setExplicitLaunchVector(Vec3(1.0, 0.0, 0.0))
self.p0.emitter.setRadiateOrigin(Point3(0.0, 0.0, 0.0))
self.setEffectScale(self.effectScale)
self.setEffectColor(self.effectColor)
def createTrack(self):
self.track = Sequence(Wait(self.startDelay), Func(self.p0.setBirthRate, 0.03), Func(self.p0.clearToInitial), Func(self.f.start, self, self), Wait(0.3), Func(self.p0.setBirthRate, 100.0), Wait(2.5), Func(self.cleanUpEffect))
def setEffectScale(self, scale):
self.effectScale = scale
self.p0.renderer.setInitialXScale(1.2 * self.cardScale * scale)
self.p0.renderer.setFinalXScale(1.5 * self.cardScale * scale)
self.p0.renderer.setInitialYScale(1.5 * self.cardScale * scale)
self.p0.renderer.setFinalYScale(1.2 * self.cardScale * scale)
self.p0.emitter.setAmplitude(25.0 * scale)
self.p0.emitter.setRadius(400.0 * scale)
def setRadius(self, radius):
self.p0.emitter.setRadius(radius)
def setEffectColor(self, color):
self.effectColor = color
self.p0.renderer.setColor(self.effectColor)
def cleanUpEffect(self):
EffectController.cleanUpEffect(self)
if self.pool and self.pool.isUsed(self):
self.pool.checkin(self)
def destroy(self):
EffectController.destroy(self)
PooledEffect.destroy(self)
# okay decompyling C:\Users\Maverick\Documents\Visual Studio 2010\Projects\Unfreezer\py2\toontown\effects\FireworkSparkles.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2013.08.22 22:19:53 Pacific Daylight Time
| [
"[email protected]"
]
| |
6ea4fb9c6455a858013bd542634687b28ef21118 | 90c6262664d013d47e9a3a9194aa7a366d1cabc4 | /tests/storage/cases/test_KT1CM1g1o9RKDdtDKgcBWE59X2KgTc2TcYtC_alpha.py | 7fd7dede51a72fd1062c3b8743dc3d392f7fa5e8 | [
"MIT"
]
| permissive | tqtezos/pytezos | 3942fdab7aa7851e9ea81350fa360180229ec082 | a4ac0b022d35d4c9f3062609d8ce09d584b5faa8 | refs/heads/master | 2021-07-10T12:24:24.069256 | 2020-04-04T12:46:24 | 2020-04-04T12:46:24 | 227,664,211 | 1 | 0 | MIT | 2020-12-30T16:44:56 | 2019-12-12T17:47:53 | Python | UTF-8 | Python | false | false | 1,160 | py | from unittest import TestCase
from tests import get_data
from pytezos.michelson.converter import build_schema, decode_micheline, encode_micheline, micheline_to_michelson
class StorageTestKT1CM1g1o9RKDdtDKgcBWE59X2KgTc2TcYtC_alpha(TestCase):
@classmethod
def setUpClass(cls):
cls.maxDiff = None
cls.contract = get_data('storage/mainnet/KT1CM1g1o9RKDdtDKgcBWE59X2KgTc2TcYtC_alpha.json')
def test_storage_encoding_KT1CM1g1o9RKDdtDKgcBWE59X2KgTc2TcYtC_alpha(self):
type_expr = self.contract['script']['code'][1]
val_expr = self.contract['script']['storage']
schema = build_schema(type_expr)
decoded = decode_micheline(val_expr, type_expr, schema)
actual = encode_micheline(decoded, schema)
self.assertEqual(val_expr, actual)
def test_storage_schema_KT1CM1g1o9RKDdtDKgcBWE59X2KgTc2TcYtC_alpha(self):
_ = build_schema(self.contract['script']['code'][0])
def test_storage_format_KT1CM1g1o9RKDdtDKgcBWE59X2KgTc2TcYtC_alpha(self):
_ = micheline_to_michelson(self.contract['script']['code'])
_ = micheline_to_michelson(self.contract['script']['storage'])
| [
"[email protected]"
]
| |
ff838d42226d27d960c29df551b9804f4672fa7b | a3b749c69f9ed3d10e1013754428b3a07ef49137 | /presupuestos/urls_presupuestos.py | 6fc26f4e85dda9c6aa5b0fe7ea4920f69b929b79 | []
| no_license | adrian052/SPSD | 7dd8b4aece3ad2e3ece34624f86d488c0f368dcf | f93dee58ada43abe0e3cc06ca3e4ef2d17006791 | refs/heads/main | 2023-05-27T16:40:09.995245 | 2021-06-08T22:32:59 | 2021-06-08T22:32:59 | 315,161,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 437 | py | from django.urls import path
from presupuestos.views import lista_presupuestos, nuevo_presupuesto, \
eliminar_presupuesto, editar_presupuesto
app_name = 'presupuestos'
urlpatterns = [
path('lista/', lista_presupuestos, name='lista'),
path('nuevo/', nuevo_presupuesto, name='nuevo'),
path('eliminar/<int:anio>', eliminar_presupuesto, name='eliminar'),
path('editar/<int:anio>', editar_presupuesto, name="editar"),
]
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.