blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
33dc3dfc91acdf0546b4037e41f01b34a182023c | ffeacff13af906bf5e7a02018a2543902f5dc8ef | /01-Python核心编程/代码/01-Python基础入门/02-变量.py | 7bc1c7bc22e85be68c5320b18334c3329a42044d | [
"MIT"
] | permissive | alikslee/Python-itheima-2019 | 457080ee83d0f5f7eaba426da0ea86405d2d5248 | 691035d5ff0e362139c7dbe82f730ec0e060fd2e | refs/heads/main | 2023-01-01T16:27:20.062463 | 2020-10-22T16:20:29 | 2020-10-22T16:20:29 | 305,959,901 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | """
1. 定义变量
语法:变量名 = 值
2. 使用变量
3. 看变量的特点
"""
# 定义变量:存储数据TOM
my_name = 'TOM'
print(my_name)
# 定义变量:存储数据 黑马程序员
schoolName = '我是黑马程序员,我爱Python'
print(schoolName)
| [
"[email protected]"
] | |
a6eea0af306527465a1dc805ddefe392e87b3795 | ed7a9902168b0a5340e1bd293d3fa93cedd73149 | /hr/__init__.py | e9904d4c48e398373c63c30b70ccd20917a48a73 | [] | no_license | srikanthpragada/PYTHON_07_SEP_2018_DEMO | ec15129de5118d0765dfe920942f78f5b903ab79 | fe3782186d71a425df5f99f93e398bdc30716808 | refs/heads/master | 2020-03-28T10:10:26.637869 | 2018-10-16T03:31:51 | 2018-10-16T03:31:51 | 148,088,136 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 64 | py |
print("Importing package hr")
__all__ = ['jobfuns', 'empfuns'] | [
"[email protected]"
] | |
9d0128dc70eb0120c6494ed71e98e88372a65f88 | add5790098575fc81f774605944a682c7b301b2a | /scripts/chaos.py | 4206aab19593cd518631462d0e19aadb3ba05638 | [] | no_license | firemark/python-interpreters-benchmark | 85135df95568866527a7cfadea3a26d62f6baad2 | eb0c7ec3153579010c1e215f6030a0fc9f39dd91 | refs/heads/master | 2016-09-06T11:47:54.585586 | 2015-08-23T23:06:22 | 2015-08-23T23:06:22 | 35,060,200 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 824 | py | #from https://gist.github.com/aliles/1087520#file-chaos-py
"Logistic map iteration timing"
from itertools import islice
import time
def logistic_map(r, x):
assert r > 0, 'R must be a positive number'
assert 0 < x < 1, 'X must be a number between 0 and 1'
while True:
x = r * x * (1 - x)
yield x
if __name__ == '__main__':
times = []
minimum = float("inf")
maximum = float("-inf")
iterator = logistic_map(3.65, 0.01)
for loop in xrange(15):
start = time.clock()
for x in islice(iterator, 500):
minimum = min(minimum, x)
maximum = max(maximum, x)
stop = time.clock()
times.append(stop - start)
print "Iteration times"
print ",".join(str(i * 10**3) for i in times)
print "X range"
print minimum, maximum
| [
"[email protected]"
] | |
9a88300c76be9816dc8f4201016a2df5e67a2647 | fe5200ebd83028745f646643a086b144a54b5bbe | /source/chap7/files/graphdfs.py | a644ec6b83042855ab3813d3913ab69936d2979f | [] | no_license | kentdlee/CS2Plus | 7561220fd43b611f62a9fdc25b86ae731aa35de9 | 2056f47a6c9e7c0d3f25a093011c3bb84c909a2f | refs/heads/master | 2023-01-27T22:03:33.116038 | 2023-01-19T03:58:49 | 2023-01-19T03:58:49 | 228,526,240 | 3 | 4 | null | 2023-01-19T00:47:25 | 2019-12-17T03:36:30 | HTML | UTF-8 | Python | false | false | 7,224 | py | from xml.dom import minidom
import turtle
import math
from xml.dom import minidom
import turtle
def drawArrow(turtle):
x1 = turtle.xcor()
y1 = turtle.ycor()
turtle.right(180)
turtle.forward(8)
turtle.left(90)
turtle.forward(3)
x2 = turtle.xcor()
y2 = turtle.ycor()
turtle.backward(6)
x3 = turtle.xcor()
y3 = turtle.ycor()
turtle.begin_fill()
turtle.goto(x1,y1)
turtle.goto(x2,y2)
turtle.goto(x3,y3)
turtle.end_fill()
class Vertex:
def __init__(self,vertexId,x,y,label):
self.vertexId = vertexId
self.x = x
self.y = y
self.label = int(label)
self.edges = []
self.previous = None
self.previousEdge = None
def draw(self,turtle,color="white"):
x = self.x
y = self.y
turtle.penup()
turtle.goto(x,y-20)
turtle.pendown()
turtle.color("black")
turtle.fillcolor(color)
turtle.begin_fill()
turtle.circle(20)
turtle.end_fill()
turtle.penup()
turtle.goto(x+2,y+12)
turtle.write(self.label,align="center",font=("Arial",12,"bold"))
def __str__(self):
return "Vertex: " + "\n label: " + str(self.label) + "\n id: " + str(self.vertexId) + "\n x: " + str(self.x) + "\n y: " + str(self.y)
class Edge:
def __init__(self,v1,v2,weight=0):
self.v1 = v1
self.v2 = v2
self.weight = weight
def __lt__(self,other):
return self.weight < other.weight
def __str__(self):
return "Edge: " + "\n v1: " + str(self.v1) + "\n v2: " + str(self.v2)
def draw(self,turtle,vertexDict,color="grey",width=1):
turtle.color(color)
turtle.width(width)
x1 = float(vertexDict[self.v1].x)
y1 = float(vertexDict[self.v1].y)
x2 = float(vertexDict[self.v2].x)
y2 = float(vertexDict[self.v2].y)
x = x1-x2
y = y1-y2
d = math.sqrt(y**2 + x**2)
angle = (math.acos(x/d)/math.pi)*180
if y1 < y2:
angle = angle + 2 * (180-angle)
turtle.penup()
turtle.goto(x2,y2)
turtle.pendown()
turtle.color(color)
turtle.setheading(angle)
turtle.penup()
turtle.forward(0)
turtle.pendown()
turtle.forward(d-20)
drawArrow(turtle)
turtle.width(1)
if self.weight != 0:
x = (x1 + x2) / 2
y = (y1 + y2) / 2
turtle.penup()
turtle.goto(x+1,y+12)
turtle.color("black")
turtle.write(str(self.weight),align="center",font=("Arial",12,"bold"))
turtle.setheading(0)
def minCost(unvisited,vertexDict):
minVal = infinity
minId = -1
for ident in unvisited:
if vertexDict[ident].cost < minVal:
minId = ident
minVal = vertexDict[ident].cost
return minId
def main():
xmldoc = minidom.parse("neiowagraph.xml")
graph = xmldoc.getElementsByTagName("Graph")[0]
vertices = graph.getElementsByTagName("Vertices")[0].getElementsByTagName("Vertex")
edges = graph.getElementsByTagName("Edges")[0].getElementsByTagName("Edge")
width = float(graph.attributes["width"].value)
height = float(graph.attributes["height"].value)
turtle.setup(0.65*width,0.65*height)
t = turtle.Turtle()
screen = t.getscreen()
screen.setworldcoordinates(0,height,width,0)
screen.title("A Weighted, Directed Graph")
screen.tracer(0)
t.speed(100)
t.ht()
vertexDict = {}
vCount = 0
for vertex in vertices:
vertexId = int(vertex.attributes["vertexId"].value)
x = float(vertex.attributes["x"].value)
y = float(vertex.attributes["y"].value)
label = vertex.attributes["label"].value
v = Vertex(vertexId, x, y, label)
vertexDict[vertexId] = v
vCount += 1
edgeList = []
for edgeNode in edges:
edge = Edge(int(edgeNode.attributes["head"].value), int(edgeNode.attributes["tail"].value))
if "weight" in edgeNode.attributes:
edge.weight = float(edgeNode.attributes["weight"].value)
vertexDict[edge.v2].edges.append(edge)
edgeList.append(edge)
for edge in edgeList:
edge.draw(t,vertexDict)
for vertexId in vertexDict:
vertex = vertexDict[vertexId]
vertex.draw(t,(0.8,1,0.4))
# Run Depth First Search
visited = []
stack = []
target = vertexDict[9]
for ident in vertexDict:
vertex = vertexDict[ident]
if vertex.label == 0:
source = vertex
stack.append(source)
found = False
while (len(stack) > 0) and not found:
current = stack.pop()
print(current)
visited.append(current.vertexId)
if current.vertexId == target.vertexId:
found = True
else:
for edge in current.edges:
vId = edge.v1
vertex = vertexDict[vId]
vertex.previous = current
if not vId in visited:
stack.append(vertex)
if found:
print("Found target")
current = target
while current.vertexId != source.vertexId:
next = current
current = current.previous
print("Coloring edge:", current)
for edge in current.edges:
if edge.v1 == next.vertexId:
print("found edge: ", edge)
edge.draw(t,vertexDict,"blue",2)
## Run Dijkstra's Algorithm
#previous = list(range(30))
#visited = []
#source.cost = 0
#unvisited = [source.vertexId]
#while len(unvisited) > 0:
#currentId = minCost(unvisited,vertexDict)
#current = vertexDict[currentId]
#print("Examining: ", current)
#visited.append(currentId)
#unvisited.remove(currentId)
#for edge in current.edges:
#if edge.v1 == currentId:
#adjacentId = edge.v2
#else:
#adjacentId = edge.v1
#if not adjacentId in visited:
#adjacent = vertexDict[adjacentId]
#if current.cost + edge.weight < adjacent.cost:
#adjacent.cost = current.cost + edge.weight
#adjacent.previous = currentId
#adjacent.previousEdge = edge
#if not adjacentId in unvisited:
#unvisited.append(adjacentId)
#for vertexId in vertexDict:
#vertex = vertexDict[vertexId]
#if vertex.previousEdge != None:
#vertex.previousEdge.draw(t,vertexDict,"purple",2)
for vertexId in vertexDict:
vertex = vertexDict[vertexId]
vertex.draw(t,(0.8,1,0.4))
#vertex.drawCost(t,"orange")
screen.update()
screen.exitonclick()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
cc44e29031cd48478c4008a214d3ab556eb48bc9 | acff427a36d6340486ff747ae9e52f05a4b027f2 | /main/multimedia/misc/gd/actions.py | 042228023565d91f338fabc969069644018d6479 | [] | no_license | jeremie1112/pisilinux | 8f5a03212de0c1b2453132dd879d8c1556bb4ff7 | d0643b537d78208174a4eeb5effeb9cb63c2ef4f | refs/heads/master | 2020-03-31T10:12:21.253540 | 2018-10-08T18:53:50 | 2018-10-08T18:53:50 | 152,126,584 | 2 | 1 | null | 2018-10-08T18:24:17 | 2018-10-08T18:24:17 | null | UTF-8 | Python | false | false | 790 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/licenses/gpl.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
from pisi.actionsapi import get
def setup():
shelltools.system("./bootstrap.sh")
autotools.configure("--disable-static \
--with-fontconfig \
--with-png \
--with-freetype \
--with-jpeg \
--without-xpm")
def build():
autotools.make()
def install():
autotools.rawInstall("DESTDIR=%s" % get.installDIR())
pisitools.dohtml(".")
pisitools.dodoc("COPYING", "README*")
| [
"[email protected]"
] | |
0b6fc9ada6e11aede244fdf4656ba71c235dc6d2 | e1efc8e0b0e4629dea61504fbc816c0527691bd9 | /19.操作系统/6-计算机组成.py | 599824c990306f94405ce44162e609db550b9291 | [] | no_license | xiongmengmeng/xmind-technology | 2bb67a0bf92cfd660cac01f8ab3a2454423ccba5 | e2fdb6987ef805a65f0a4feb52d84383853f4b77 | refs/heads/main | 2023-07-31T07:10:29.868120 | 2021-09-11T08:18:17 | 2021-09-11T08:18:17 | 307,636,242 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,463 | py | import os,sys
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0,parentdir)
import xmind
xmind_name="操作系统"
w = xmind.load(os.path.dirname(os.path.abspath(__file__))+"\\"+xmind_name+".xmind")
s2=w.createSheet()
s2.setTitle("计算机组成")
r2=s2.getRootTopic()
r2.setTitle("计算机组成")
content={
'操作系统提供了几种抽象模型':[
{'文件':[
'对I/O设备的抽象'
]},
{'虚拟内存':[
'对程序存储器的抽象'
]},
{'进程':[
'对一个正在运行程序的抽象'
]},
{'虚拟机':[
'对整个操作系统的抽象'
]}
],
'计算机系统组成':[
{'硬件':[
'芯片、电路板、磁盘、键盘、显示器'
]},
{'操作系统':[
'为用户层和硬件提供各自的接口,屏蔽了不同应用和硬件之间的差异,达到统一标准的作用'
]},
'软件'
],
'计算机的两种运行模式':[
{'内核态':[
'操作系统具有硬件的访问权,可以执行机器能够运行的任何指令',
'软件中最基础的部分是操作系统,运行在内核态中'
]},
{'用户态':[
'软件的其余部分运行在用户态下'
]}
],
'计算机硬件(五部分)':[
{'运算器':[
'功能:对数据和信息进行加工和运算'
'组成:算数逻辑单元+寄存器',
'基本运算包括加、减、乘、除、移位等操作'
]},
{'控制器':[
'功能:按照指定顺序改变主电路或控制电路的部件,控制命令执行',
'组成:程序计数器、指令寄存器、解码译码器'
]},
{'存储器':[
'保存信息',
{'两种':[
{'主存(内存):':[
'一个临时存储设备,CPU 主要交互对象',
'物理组成上说:内存是由一系列 DRAM(dynamic random access memory) 动态随机存储构成的集合',
'逻辑上说:内存是一个线性的字节数组,有它唯一的地址编号,从0开始'
]},
'外存:硬盘软盘'
]}
]},
{'输入设备':[
'给计算机获取外部信息的设备',
'组成:键盘和鼠标'
]},
{'输出设备':[
'给用户呈现根据输入设备获取的信息经过一系列的计算后得到显示的设备',
'组成:显示器、打印机'
]},
{'注':[
{'处理器(Processor)/CPU(central processing unit)':[
'运算器+控制器',
'解释(并执行)存储在主存储器中的指令的引擎'
]},
{'I/O设备':[
'系统和外部世界的连接',
{'四类':[
'用于用户输入的键盘',
'用于用户输入的鼠标',
'用于用户输出的显示器',
'磁盘驱动:用来长时间的保存数据和程序,刚开始的时候,可执行程序就保存在磁盘上',
]}
]},
]}
],
'计算机硬件(其它)':[
{'总线(Buses)':[
'在组件之间来回传输字节信息,通常被设计成传送定长(4或8字节)的字节块'
]},
{'控制器(controller) /适配器(Adapter)':[
'I/O设备连接I/O总线'
]}
]
}
#构建xmind
xmind.build(content,r2)
#保存xmind
xmind.save(w,os.path.dirname(os.path.abspath(__file__))+"\\"+xmind_name+".xmind") | [
"[email protected]"
] | |
152a6146d2345f4c3caf0ae61e212e199ced1eb1 | 34f3cfeac7fd5a7bbbc5e362bef8bc316f81c1d0 | /examples/hello_world.py | a949333bb699c9d60f65578b4953d65ee7b47ca1 | [
"MIT"
] | permissive | eerimoq/asn1tools | 860b3623955c12dfb9763ff4e20a805beb7436ba | de25657f7c79100d1ba5312dd7474ff3e0d0ad2e | refs/heads/master | 2023-03-16T09:28:11.924274 | 2023-03-10T20:24:34 | 2023-03-10T20:24:34 | 99,156,277 | 272 | 98 | MIT | 2023-01-03T13:40:36 | 2017-08-02T20:05:05 | Python | UTF-8 | Python | false | false | 758 | py | #!/usr/bin/env python
"""The asn1tools hello world example.
Example execution:
$ ./hello_world.py
Message: {'number': 2, 'text': 'Hi!'}
Encoded: 010203486921
Decoded: {'number': 2, 'text': 'Hi!'}
$
"""
from __future__ import print_function
from binascii import hexlify
import asn1tools
SPECIFICATION = '''
HelloWorld DEFINITIONS ::= BEGIN
Message ::= SEQUENCE {
number INTEGER,
text UTF8String
}
END
'''
hello_world = asn1tools.compile_string(SPECIFICATION, 'uper')
message = {'number': 2, 'text': u'Hi!'}
encoded = hello_world.encode('Message', message)
decoded = hello_world.decode('Message', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
| [
"[email protected]"
] | |
fc48e39bb0aee373153cef35813f955f5833f74f | 1aa6e732645f4603c05a1c9262f6fbb1af76b056 | /patchinfo/Sample.py | cf14f4943aea03a06512c40ef63dc666be04ea36 | [] | no_license | nauddin257/DualBootPatcher | f2831bdc72d8f94787a1d3ad94d0d85103316dd5 | 024af7ecb38ba6b4e3f1ae16ab81e32cd213864f | refs/heads/master | 2020-12-11T07:26:46.916515 | 2013-11-14T03:54:41 | 2013-11-14T03:54:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,179 | py | # This file describes the ROM/Kernel/etc with the following information:
# - Pattern in file name
# - Patch to use
# - Type of ramdisk (if any)
# - Message to display to user
#
# Please copy this file to a new one before editing.
from fileinfo import FileInfo
import re
file_info = FileInfo()
# This is the regular expression for detecting a ROM using the filename. Here
# are some basic rules for how regex's work:
# Pattern | Meaning
# -----------------------------------------------
# ^ | Beginning of filename
# $ | End of filename
# . | Any character
# \. | Period
# [a-z] | Lowercase English letters
# [a-zA-Z] | All English letters
# [0-9] | Numbers
# * | 0 or more of previous pattern
# + | 1 or more of previous pattern
# We'll use the SuperSU zip as an example.
#
# Filename: UPDATE-SuperSU-v1.65.zip
# Pattern: ^UPDATE-SuperSU-v[0-9\.]+\.zip$
#
# So, when we go patch a file, if we see "UPDATE-SuperSU-v" at the beginning,
# followed by 1 or more of either numbers or a period and then ".zip", then we
# know it's a SuperSU zip. Of course, a simpler pattern like ^.*SuperSU.*\.zip$
# would work just as well.
filename_regex = r"^.*SuperSU.*\.zip$"
# This is the type of ramdisk. Run the 'list-ramdisks' file in the useful/
# folder to see what choices are available. (It's pretty obvious, you'll see)
file_info.ramdisk = 'jflte/AOSP/AOSP.def'
# If the zip file you're patching does not have a kernel, set this to false.
file_info.has_boot_image = True
# If the boot image has a different name or is in a subfolder, change this.
file_info.bootimg = 'boot.img'
# This is the patch file you generated. Just copy the patch into a subfolder in
# patches/ and put the path here.
file_info.patch = 'jflte/AOSP/YourROM.patch'
def print_message():
# This is the message that is shown if the file to be patched is this one.
print("Detected The Name of Some ROM")
###
def matches(filename):
if re.search(filename_regex, filename):
return True
else:
return False
def get_file_info():
return file_info
| [
"[email protected]"
] | |
d513893828bbf80619ff10477ea3f18b0cdf6215 | cc578cec7c485e2c1060fd075ccc08eb18124345 | /cs15211/PerfectSquares.py | f097d8aa0569feaba85550e5a0ea219fa94bdfa0 | [
"Apache-2.0"
] | permissive | JulyKikuAkita/PythonPrac | 18e36bfad934a6112f727b4906a5e4b784182354 | 0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c | refs/heads/master | 2021-01-21T16:49:01.482561 | 2019-02-07T06:15:29 | 2019-02-07T06:15:29 | 91,907,704 | 1 | 1 | Apache-2.0 | 2019-02-07T06:15:30 | 2017-05-20T18:12:53 | Python | UTF-8 | Python | false | false | 6,693 | py | __source__ = 'https://leetcode.com/problems/perfect-squares/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/perfect-squares.py
# Time: O(n * sqrt(n))
# Space: O(n)
#
# Description: Leetcode # 279. Perfect Squares
#
# Given a positive integer n, find the least number of perfect
# square numbers (for example, 1, 4, 9, 16, ...) which sum to n.
#
# For example, given n = 12, return 3 because 12 = 4 + 4 + 4;
# given n = 13, return 2 because 13 = 4 + 9.
#
# Companies
# Google
# Related Topics
# Math Dynamic Programming Breadth-first Search
# Similar Questions
# Count Primes Ugly Number II
#
#dp
# http://bookshadow.com/weblog/2015/09/09/leetcode-perfect-squares/
# O(n * sqrt n)
# @Not getting dp yet
import unittest
class Solution(object):
_num = [0]
def numSquares(self, n):
"""
:type n: int
:rtype: int
"""
num = self._num
while len(num) <= n:
num += min(num[-i*i] for i in xrange(1, int(len(num)**0.5+1))) + 1,
#print num
return num[n]
#Recursion
class Solution2(object):
def numSquares(self, n):
"""
:type n: int
:rtype: in
"""
num, a, b, res = 2, 0, 0, n
while num * num <= n:
a = n / (num * num)
b = n % (num * num)
res = min(res, a + self.numSquares(b))
num += 1
return res
# Lagrange's Four-Square Theorem
# http://bookshadow.com/weblog/2015/09/09/leetcode-perfect-squares/
# O (sqrt n )
import math
class Solution3(object):
def numSquares(self, n):
"""
:type n: int
:rtype: in
"""
while n % 4 == 0:
n /= 4
if n % 8 == 7:
return 4
a = 0
while a*a <= n:
b = math.sqrt( n - a * a)
if ( a*a + b*b == n):
return ~~a + ~~b # no logical expression in python
break
a += 1
return 3
class SolutionDFS(object):
def __init__(self):
self.cnt = 0xF7777777
def numSquares(self, n):
"""
:type n: int
:rtype: int
"""
if n <= 0:
return 0
if n == 1:
return 1
self.dfs(n, 0, [], 1)
return self.cnt
def dfs(self, n, sum, tmp, idx):
if sum > n or idx * idx > n :
return
if sum == n:
self.cnt = min(self.cnt, len(tmp))
return
while idx * idx <= n:
tmp.append(idx)
self.dfs(n, sum + idx * idx, tmp, idx)
tmp.pop()
idx += 1
print tmp, idx, self.cnt, sum
class SolutionDP(object):
def numSquares(self, n):
"""
:type n: int
:rtype: int
"""
if n <= 0:
return 0
if n == 1:
return 1
dp = [ 0xF7777777 for i in xrange(n+1)]
for i in xrange(n):
if i * i <= n:
dp[i * i] = 1
for i in xrange(n+1):
for j in xrange(1, n - i):
if j * j + i <= n:
dp[ j * j + i ] = min(dp[ j * j + i ], dp[i] + 1)
return dp[n]
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
#print Solution().numSquares(12)
#print Solution2().numSquares(12)
print Solution3().numSquares(12)
print SolutionDFS().numSquares(10)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought:
#
dp[n] indicates that the perfect squares count of the given n, and we have:
dp[0] = 0
dp[1] = dp[0]+1 = 1
dp[2] = dp[1]+1 = 2
dp[3] = dp[2]+1 = 3
dp[4] = Min{ dp[4-1*1]+1, dp[4-2*2]+1 }
= Min{ dp[3]+1, dp[0]+1 }
= 1
dp[5] = Min{ dp[5-1*1]+1, dp[5-2*2]+1 }
= Min{ dp[4]+1, dp[1]+1 }
= 2
.
.
.
dp[13] = Min{ dp[13-1*1]+1, dp[13-2*2]+1, dp[13-3*3]+1 }
= Min{ dp[12]+1, dp[9]+1, dp[4]+1 }
= 2
.
.
.
dp[n] = Min{ dp[n - i*i] + 1 }, n - i*i >=0 && i >= 1
# 15ms 97.04%
class Solution {
public int numSquares(int n) {
int[] dp = new int[n + 1];
Arrays.fill(dp, Integer.MAX_VALUE);
dp[0] = 0;
int cur = 0;
for (int j = 1; (cur = j * j) <= n; j++) {
for (int i = cur; i <= n; i++) {
dp[i] = Math.min(dp[i], dp[i - cur] + 1);
}
}
return dp[n];
}
}
#Note
dp arr for n = 5 will be:
0, MAX, MAX, MAX, MAX, MAX
0, 1 , MAX, MAX, MAX, MAX
0, 1, 2, MAX, MAX, MAX
0, 1, 2, 3, MAX, MAX
0, 1, 2, 3, 4, , MAX
0, 1, 2, 3, 4, 2
# Recurstion case:
# dp[n] = Math.min(dp[n], dp[n - i*i] + 1 ), n - i*i >=0 && i >= 1
# 21ms 94.70%
class Solution {
public int numSquares(int n) {
int[] dp = new int[n + 1];
Arrays.fill(dp, Integer.MAX_VALUE);
dp[0] = 0;
for (int i = 1; i * i <= n ;i++) {
int cur = i * i;
for (int j = cur; j <= n; j++) {
dp[j] = Math.min(dp[j], dp[j - cur] + 1);
}
}
return dp[n];
}
}
3.Mathematical Solution
# 1ms 100%
class Solution {
public int numSquares(int n) {
//base case
if(n < 4) return n;
// If n is a perfect square, return 1.
if (isSquare(n)) return 1;
// The result is 4 if and only if n can be written in the
// form of 4^k*(8*m + 7). Please refer to
// Legendre's three-square theorem.
while ((n & 3) == 0) { // n % 4 == 0
n >>= 2;
}
if ((n & 7) == 7) { // n % 8 == 7
return 4;
}
// Check whether 2 is the result.
int sqrtN = (int)(Math.sqrt(n));
for(int i = 1; i <= sqrtN; i++) {
if (isSquare(n - i * i)) return 2;
}
return 3;
}
public boolean isSquare(int n) {
int sqrtN = (int)Math.sqrt(n);
return sqrtN * sqrtN == n;
}
}
//https://www.cnblogs.com/grandyang/p/4800552.html
//there are only 4 possible result 1,2,3,4
//check if a * a + b * b == n
# 98.70% 2ms
class Solution {
public int numSquares(int n) {
//base case
if (n < 4) return n;
// The result is 4 if and only if n can be written in the
// form of 4^k*(8*m + 7). // Legendre's three-square theorem.
while (n % 4 == 0) n /= 4; //to make it smaller
if (n % 8 == 7) return 4;
int a, b;
for (int i = 0; i < n; i++) {
a = i;
b = (int) Math.sqrt(n - a * a);
if (a * a + b * b == n) //perfect square -> a or b == 0
return a == 0 || b == 0 ? 1 : 2;
}
return 3;
}
}
''' | [
"[email protected]"
] | |
4b89a17ac220b090f950e378b29399758610f9db | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /UraniaDev_v6r1/PIDCalib/PIDPerfScripts/scripts/python/MCResampling/MakePIDdistributionsRunRange.py | b824a21ad3c2ed644ac8c5f12215c65dd3166258 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,453 | py | #!/bin/env python
from PIDPerfScripts.StartScreen import *
import ROOT
import sys
import os.path
from array import *
import argparse
#make projections
def makeprojections(hs,projlist,axes=[]):
dim=hs.GetNdimensions()
axs={}
for a in range(0,dim):
#print hs.GetAxis(a).GetName()
if str(hs.GetAxis(a).GetName()) in axes:
axs[a]=hs.GetAxis(a).GetName()
projdim=[]
for d in range(0,dim):
if d not in axs.keys():
projdim.append(d)
if len(axs)==1:
for xbin in range(0,hs.GetAxis(axs.keys()[0]).GetNbins()+2):
hs.GetAxis(axs.keys()[0]).SetRange(xbin,xbin)
tmpname=str(hs.GetName())+"_proj_"+axs[axs.keys()[0]]+"bin"+str(xbin)
projlist[str(hs.GetName())+tmpname]=hs.Projection(len(projdim),array("i",projdim))
projlist[str(hs.GetName())+tmpname].SetName(str(hs.GetName())+tmpname)
projlist[str(hs.GetName())+tmpname].SetTitle(str(hs.GetName())+tmpname)
if len(axs)==2:
for xbin in range(0,hs.GetAxis(axs.keys()[0]).GetNbins()+2):
hs.GetAxis(axs.keys()[0]).SetRange(xbin,xbin)
for ybin in range(0,hs.GetAxis(axs.keys()[1]).GetNbins()+2):
hs.GetAxis(axs.keys()[1]).SetRange(ybin,ybin)
tmpname=str(hs.GetName())+"_proj_"+axs[axs.keys()[0]]+"bin"+str(xbin)+"_"+axs[axs.keys()[1]]+"bin"+str(ybin)
projlist[str(hs.GetName())+tmpname]=hs.Projection(len(projdim),array("i",projdim))
projlist[str(hs.GetName())+tmpname].SetName(str(hs.GetName())+tmpname)
projlist[str(hs.GetName())+tmpname].SetTitle(str(hs.GetName())+tmpname)
if len(axs)==3:
for xbin in range(0,hs.GetAxis(axs.keys()[0]).GetNbins()+2):
hs.GetAxis(axs.keys()[0]).SetRange(xbin,xbin)
for ybin in range(0,hs.GetAxis(axs.keys()[1]).GetNbins()+2):
hs.GetAxis(axs.keys()[1]).SetRange(ybin,ybin)
for zbin in range(0,hs.GetAxis(axs.keys()[2]).GetNbins()+2):
hs.GetAxis(axs.keys()[2]).SetRange(zbin,zbin)
tmpname=str(hs.GetName())+"_proj_"+axs[axs.keys()[0]]+"bin"+str(xbin)+"_"+axs[axs.keys()[1]]+"bin"+str(ybin)+"_"+axs[axs.keys()[2]]+"bin"+str(zbin)
projlist[str(hs.GetName())+tmpname]=hs.Projection(len(projdim),array("i",projdim))
projlist[str(hs.GetName())+tmpname].SetName(str(hs.GetName())+tmpname)
projlist[str(hs.GetName())+tmpname].SetTitle(str(hs.GetName())+tmpname)
#function to fill THnSparse with dataset
def mcreweight_datasettothsparse(ds,Part,hsparse,varnames,weight,h1d_list=[]):
if len(h1d_list)==len(varnames):
for i in range(0,ds.numEntries()):
Dset = ds.get(i)
tmparray=[]
tmpwgt=ds.weight()
#print tmpwgt
for k in range(len(varnames)):
tmparray.append(Dset.getRealValue(varnames[k]))
h1d_list[k].Fill(float(Dset.getRealValue(varnames[k])),tmpwgt)
hsparse.Fill(array("d",tmparray),tmpwgt)
else:
for i in range(0,ds.numEntries()):
Dset = ds.get(i)
tmparray=[]
tmpwgt=ds.weight()
#print tmpwgt
for k in range(len(varnames)):
tmparray.append(Dset.getRealValue(varnames[k]))
hsparse.Fill(array("d",tmparray),tmpwgt)
#function to fill THnSpare with dataset list
def mcreweight_dslisttothsparse(StripVersion, MagPolarity, RunMin, RunMax, verbose, allowMissing,Part,nm,totalbinning,weight,cuts="",kinbinning=[]):
dummynbins=[]
dummymins=[]
dummymaxs=[]
for b in range(len(totalbinning)):
dummynbins.append(len((totalbinning[b])[2])-1)
dummymins.append(-100.)
dummymaxs.append(100.)
#defining histograms
from ROOT import THnSparseD, TH1D
varnames={}
kinvarnames=[]
hs = THnSparseD(nm,nm,len(totalbinning),array("i",dummynbins),array("d",dummymins),array("d",dummymaxs))
h1d_list = {}
hprojections_list={}
for i in range(0,len(totalbinning)):
h1d_list[i]= TH1D("H1D_"+(totalbinning[i])[0],(totalbinning[i])[0],len((totalbinning[i])[2])-1,array("d",(totalbinning[i])[2]))
hs.GetAxis(i).Set(len((totalbinning[i])[2])-1,array("d",(totalbinning[i])[2]))
hs.GetAxis(i).SetTitle((totalbinning[i])[0])
hs.GetAxis(i).SetName((totalbinning[i])[0])
varnames[i]=(totalbinning[i])[1]
for i in range(0,len(kinbinning)):
kinvarnames.append((kinbinning[i])[0])
#DataDict = GetRunDictionary(StripVersion, Part)
#IndexDict = GetMinMaxFileDictionary(DataDict, MagPolarity, RunMin, RunMax, -1)
print "Filling Histogram"
#for i in xrange(IndexDict['minIndex'], IndexDict['maxIndex']+1):
# #ds = GetDataSet(StripVersion, MagPolarity, Part, 'runNumber>='+str(RunMin)+' && runNumber<='+str(RunMax), i, verbose, allowMissing)
# ds = GetDataSet(StripVersion, MagPolarity, Part, 'runNumber>='+str(RunMin)+' && runNumber<='+str(RunMax), opts.pidVars, opts.xVarName, opts.yVarName, opts.zVarName, i, verbose, allowMissing)
# if ds is not None:
# if cuts!="":
# cutds=ds.reduce(cuts)
# mcreweight_datasettothsparse(cutds,Part,hs,varnames,weight,h1d_list)
# cutds.IsA().Destructor(cutds) #free memory from cut dataset
# else:
# mcreweight_datasettothsparse(ds,Part,hs,varnames,weight,h1d_list)
# ds.Delete() #needed to free memory
if "Turbo" not in StripVersion:
files = GetFiles(StripVersion,MagPolarity,Part,RunMin,RunMax,100000,opts.verbose)
elif "Turbo" in StripVersion:
files = GetWGPFiles(StripVersion,MagPolarity,opts.verbose)
for file in files:
#ds = GetDataSet(StripVersion, MagPolarity, Part, 'runNumber>='+str(RunMin)+' && runNumber<='+str(RunMax), i, verbose, allowMissing)
ds = GetDataSet(StripVersion, MagPolarity, Part, 'runNumber>='+str(RunMin)+' && runNumber<='+str(RunMax), opts.pidVars, opts.xVarName, opts.yVarName, opts.zVarName, file, verbose, allowMissing)
if ds is not None:
if cuts!="":
cutds=ds.reduce(cuts)
mcreweight_datasettothsparse(cutds,Part,hs,varnames,weight,h1d_list)
cutds.IsA().Destructor(cutds) #free memory from cut dataset
else:
mcreweight_datasettothsparse(ds,Part,hs,varnames,weight,h1d_list)
ds.Delete() #needed to free memory
#making projections
print "Filling projections for kinematic variables"
#print kinvarnames
makeprojections(hs,hprojections_list,kinvarnames)
h1d_list[len(totalbinning)]=hs
h1d_list.update(hprojections_list)
return h1d_list
class ShowArgumentsParserMCreweight(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n\n' %message)
parser.print_usage(sys.stderr)
sys.stderr.write('\n'+self.description)
sys.exit(2)
if '__main__' == __name__:
start()
print ""
parser = ShowArgumentsParserMCreweight(
formatter_class=argparse.RawDescriptionHelpFormatter,
prog=os.path.basename(sys.argv[0]),
description=("""Make pid distributions for a given:
a) Sample version <sampleVersion> (e.g. \"20\" for Run 1 Stripping 20, \"Turbo16\" for Run 2 WGP)
b) magnet polarity <magPol> (\"MagUp\" or \"MagDown\")
c) particle type <partName> (\"K\", \"P\", \"Pi\", \"e\" or \"Mu\")
d) PID variable, <pidVar>
Multiple PID variables can be specified if necessary, e.g. \"[DLLK,DLLp]\".
Note that there is no space after the comma.
For a full list of arguments, do: 'python {0} -h'
""").format(os.path.basename(sys.argv[0])),
epilog ="""To use the 'MuonUnBiased' hadron samples for muon misID studies, one of the
following tracks types should be used instead: \"K_MuonUnBiased\", \"Pi_MuonUnBiased\"
or \"P_MuonUnBiased\"."""
)
## add the positional arguments
parser.add_argument('sampleVersion', metavar='<sampleVersion>', help="Sets the stripping version for Run I data, or the Turbo WGP production version for Run II")
parser.add_argument('magPol', metavar='<magPol>', help="Sets the magnet polarity")
parser.add_argument('partName', metavar='<partName>', help="Sets the particle type")
parser.add_argument('pidVars', metavar='<pidVars>', help="Sets the PID variables to store")
## add the optional arguments
parser.add_argument('-x', '--minRun', dest="runMin", metavar="NUM",default=0, help="Sets the minimum run number to process (if applicable)")
parser.add_argument('-y', '--maxRun', dest="runMax", metavar="NUM",default=9999999, help="Sets the maximum run number to process (if applicable)")
parser.add_argument('-c', '--cuts', dest='cuts', metavar='CUTS', default='DEFAULT:ALL', help=("Sets the list of cuts to apply to the calibration "
"sample(s) prior to determine the PID efficiencies "
"(default: (default)s). "
"NB. It is up to the user to ensure that their reference "
"sample has the same cuts applied."
))
parser.add_argument("-o", "--outputDir", dest="outputDir", metavar="DIR",
help="Save the performance histograms to directory DIR "
"(default: current directory)")
binGroup = parser.add_argument_group("binning options")
binGroup.add_argument("-X", "--xVarName", dest="xVarName", metavar="NAME", default="P", help="Sets the NAME of the 1st (x) bin variable "
"(default: %(default)s)")
binGroup.add_argument("-Y", "--yVarName", dest="yVarName", metavar="NAME", default="ETA", help="Sets the NAME of the 2nd (y) bin variable "
"(default: %(default)s). "
"If 1D binning is required, then this option should "
"be set to an empty string")
binGroup.add_argument("-Z", "--zVarName", dest="zVarName", metavar="NAME", default="nTracks", help="Sets the NAME of the 3rd (z) bin variable "
"(default: %(default)s). "
"If 2D binning is required, then this option should "
"be set to an empty string")
binGroup.add_argument("-s", "--schemeName", dest="schemeName", metavar="NAME", default=None, help="Sets the NAME of the binning scheme, "
"as defined in the module 'PIDPerfScripts.binning'. "
"If this option is not set, the default "
"binning scheme is used.")
binGroup.add_argument("-b", "--binSchemeFile", dest="binSchemeFile",
metavar="NAME", default=None,
help="Sets the NAME of the python file containing "
"user-defined binning schema. Without this option, "
"the script will only look for binning schema in "
"the 'PIDPerfScripts.binning' module")
addGroup = parser.add_argument_group("further options")
addGroup.add_argument("-q", "--quiet", dest="verbose", action="store_false",
default=True,
help="Suppresses the printing of verbose information")
addGroup.add_argument("-M", "--allow-missing", dest="allowMissing",
action="store_true", default=False,
help="Allow missing calibration subsamples. "
"N.B. You should only use this option if requested to "
"do so by the PIDCalib authors")
opts = parser.parse_args()
from PIDPerfScripts.Definitions import *
from PIDPerfScripts.DataFuncs import *
from PIDPerfScripts.PerfResults import *
from PIDPerfScripts.PerfCalcFuncs import *
print "checking options"
# print opts.cuts
StripVersion = None
MagPolarity = None
PartName = None
YourPart = None
RunMin = 0
RunMax = 9999999
# set the stripping version
StripVersion=opts.sampleVersion
CheckStripVer(StripVersion)
# set the magnet polarity
MagPolarity=opts.magPol
CheckMagPol(MagPolarity)
# set the particle name
PartName=opts.partName
# CheckPartType(PartName)
YourPart = opts.cuts
YourPart_dict={"DEFAULT":"ALL"}
# set run range
RunMin = opts.runMin
RunMax = opts.runMax
if RunMin is not None:
try:
int(RunMin)
except ValueError:
parser.error(
"Argument to --minRun ('%s') is not an integer'." %RunMin)
if RunMax is None:
parser.error(
"Min run was specified as %s, but no max run was given." %RunMin)
if RunMax is not None:
try:
int(RunMax)
except ValueError:
parser.error(
"Argument to --maxRun ('%s') is not an integer'." %RunMax)
if RunMin is None:
parser.error(
"Max run was specified as %s, but no min run was given." %RunMax)
# set particle types
if PartName.startswith("["):
if not PartName.endswith("]"):
parser.error("Invalid Particles string %s" %PartName)
PartName = PartName[1:-1].split(',')
elif PartName.startswith("("):
if not PartName.endswith(")"):
parser.error("Invalid Particles string %s" %PartName)
PartName = PartName[1:-1].split(',')
else:
PartName = (PartName,)
for tmpp in PartName:
CheckPartType(tmpp)
# set cuts of particles you want to get the weighted pid
if YourPart.startswith("["):
if not YourPart.endswith("]"):
parser.error("Invalid Particles string %s" %YourPart)
YourPart = YourPart[1:-1].split(',')
elif YourPart.startswith("("):
if not YourPart.endswith(")"):
parser.error("Invalid Particles string %s" %YourPart)
YourPart = YourPart[1:-1].split(',')
else:
YourPart = (YourPart,)
if len(YourPart)>0:
YourPart_dict={}
for ypt in YourPart:
tmps=ypt.split(':')
YourPart_dict[tmps[0]]=tmps[1]
print "setting cuts as"
print YourPart_dict
# set pid variables to store
pidVars = opts.pidVars
if pidVars.startswith("["):
if not pidVars.endswith("]"):
parser.error("Invalid pid variables string %s" %pidVars)
pidVars = pidVars[1:-1].split(',')
elif pidVars.startswith("("):
if not pidVars.endswith(")"):
parser.error("Invalid pid variables string %s" %pidVars)
pidVars = pidVars[1:-1].split(',')
else:
pidVars = (pidVars,)
ct=0
pidVars_dict={}
for v in pidVars:
pidVars_dict[ct]=v
ct=ct+1
XVarName = opts.xVarName
if XVarName=='':
parser.error("Argument to --xBinVarName is an empty string.")
YVarName = opts.yVarName
ZVarName = opts.zVarName
if ZVarName!='' and YVarName=='':
parser.error("Argument to --yVarName is an empty string,"
"but argument to --zVarName. If you planned to"
"include only two binning variables. Did you"
"mean to do --yVarName='{0}' --zVarName='{1}'?".format(
ZVarName, YVarName))
SchemeName=opts.schemeName
#=============================================================================
# Declare Binning Schema (RooBinnings)
#=============================================================================
from PIDPerfScripts.Binning import GetBinScheme
if opts.binSchemeFile is not None:
import imp
try:
imp.load_source('userbinning', opts.binSchemeFile)
except IOError:
msg="Failed to load binning scheme file '{0}'".format(opts.binSchemeFile)
raise IOError(msg)
ROOT.gSystem.Load('libRooStats.so')
# ROOT.gSystem.Load('libCintex.so')
# cintex=ROOT.Cintex
# cintex.Enable()
ROOT.gSystem.Load('libPIDPerfToolsLib.so')
ROOT.gSystem.Load('libPIDPerfToolsDict.so')
# help(ROOT.PerfCalculator)
#======================================================================
# Open file to write TH1Fs to
#======================================================================
fnameSuffix=''
if SchemeName is not None:
fnameSuffix+='_{0}'.format(SchemeName)
if "Turbo" not in StripVersion:
fname = "PIDHists_Strip{strp}_{pol}{suf}.root".format(
strp=StripVersion, pol=MagPolarity,
suf=fnameSuffix)
elif "Turbo" in StripVersion:
fname = "PIDHists_{strp}_{pol}{suf}.root".format(
strp=StripVersion, pol=MagPolarity,
suf=fnameSuffix)
if opts.outputDir is not None:
fname = "%s/%s" %(opts.outputDir, fname)
print "Saving histograms to %s" %fname
f_Out = ROOT.TFile.Open(fname, "RECREATE")
if not f_Out:
raise IOError("Failed to open file %s for writing" %fname)
#======================================================================
# Getting Datasets
#======================================================================
print "Getting datasets and filling histograms"
#create directory structure
for k,yp in YourPart_dict.iteritems():
f_Out.cd()
labdir=f_Out.mkdir(k)
labdir.cd()
for Part in PartName:
if labdir:
part_dir=labdir.mkdir(Part)
else:
part_dir=f_Out.mkdir(Part)
#f_Out.ls()
for Part in PartName:
f_Out.cd()
_tmpcuts=opts.cuts
if "Turbo" not in StripVersion:
files = GetFiles(StripVersion,MagPolarity,Part,RunMin,RunMax,100000,opts.verbose)
elif "Turbo" in StripVersion:
files = GetWGPFiles(StripVersion,MagPolarity,opts.verbose)
_tmpDataSet = GetDataSet(StripVersion, MagPolarity, Part, "", opts.pidVars, opts.xVarName, opts.yVarName, opts.zVarName, files[0], opts.verbose, opts.allowMissing)
#dsl=GetDataSets(StripVersion, MagPolarity, Part, "", RunMin, RunMax,opts.verbose,opts.allowMissing)
X_Bin = GetBinScheme(Part, XVarName, SchemeName)
Y_Bin = None
if YVarName!='':
Y_Bin = GetBinScheme(Part, YVarName, SchemeName)
Z_Bin = None
if ZVarName!='':
Z_Bin = GetBinScheme(Part, ZVarName, SchemeName)
if X_Bin:
XVarNamec=_tmpDataSet.Get_ParamName(XVarName)
if Y_Bin:
YVarNamec=_tmpDataSet.Get_ParamName(YVarName)
if Z_Bin:
ZVarNamec=_tmpDataSet.Get_ParamName(ZVarName)
XBinning=[]
YBinning=[]
ZBinning=[]
for i in range(X_Bin.numBins()):
XBinning.append(X_Bin.binLow(i))
XBinning.append(X_Bin.binHigh(X_Bin.numBins()-1))
if Y_Bin:
for i in range(Y_Bin.numBins()):
YBinning.append(Y_Bin.binLow(i))
YBinning.append(Y_Bin.binHigh(Y_Bin.numBins()-1))
if Z_Bin:
for i in range(Z_Bin.numBins()):
ZBinning.append(Z_Bin.binLow(i))
ZBinning.append(Z_Bin.binHigh(Z_Bin.numBins()-1))
#=============================================================================
# Declare Binning Schema (RooBinnings)
#=============================================================================
if opts.verbose:
print('========== Binning Schema ==========')
if SchemeName is None:
print "Default binning schema"
else:
print "Using binning schema '{0}'".format(SchemeName)
print('====================================')
_nprobnn=1000
_ndlls=300
totalbinning={}
kinbinning={}
_nvars=0
totalbinning[_nvars]=(XVarName,XVarNamec,XBinning)
kinbinning[_nvars]=(XVarName,XVarNamec,XBinning)
_nvars=1
if Y_Bin:
totalbinning[_nvars]=(YVarName,YVarNamec,YBinning)
kinbinning[_nvars]=(YVarName,YVarNamec,YBinning)
_nvars= _nvars+1
if Z_Bin:
totalbinning[_nvars]=(ZVarName,ZVarNamec,ZBinning)
kinbinning[_nvars]=(ZVarName,ZVarNamec,ZBinning)
_nvars= _nvars+1
for vpid in range(len(pidVars_dict)):
_tmpbinning=[]
#add underflow bin
_tmpbinning.append(-1001.)
_tmpbinning.append(-999.)
if "DLL" not in pidVars_dict[vpid]:
for nb in range(_nprobnn+1):
_tmpbinning.append(0.+float(nb)/float(_nprobnn))
else:
for nb in range(_ndlls+1):
_tmpbinning.append(-150.+float(nb))
print vpid
#print _tmpbinning
totalbinning[_nvars]=(pidVars_dict[vpid],_tmpDataSet.Get_ParamName(pidVars_dict[vpid]),_tmpbinning)
_nvars= _nvars+1
for k,yp in YourPart_dict.iteritems():
_tmpcuts=""
print "Filling histograms for " +k
if "ALL" not in yp:
_tmpcuts=yp
print "Applying cut "+_tmpcuts
hs=mcreweight_dslisttothsparse(StripVersion, MagPolarity, RunMin, RunMax,opts.verbose,opts.allowMissing,Part,"histo_PID",totalbinning,"nsig_sw",_tmpcuts,kinbinning)
f_Out.cd()
if len(YourPart_dict)>0:
f_Out.cd(k+"/"+Part)
else:
f_Out.cd(Part)
for th in hs.itervalues():
th.Write()
#======================================================================
# Close file
#======================================================================
f_Out.Close()
print "Done filling PID distributions"
| [
"[email protected]"
] | |
0b5ca29eaae9763b72fa05cf02e3700b7b140027 | 0912be54934d2ac5022c85151479a1460afcd570 | /Ch02_Code/GUI_tabbed_two_mighty.py | 96aa5a91f4e767b625a86a078a5ffa0a9a25fa21 | [
"MIT"
] | permissive | actuarial-tools/Python-GUI-Programming-Cookbook-Third-Edition | 6d9d155663dda4450d0b180f43bab46c24d18d09 | 8c9fc4b3bff8eeeda7f18381faf33c19e98a14fe | refs/heads/master | 2023-01-31T13:11:34.315477 | 2020-12-15T08:21:06 | 2020-12-15T08:21:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 989 | py | '''
Created on May 1, 2019
@author: Burkhard A. Meier
'''
#======================
# imports
#======================
import tkinter as tk
from tkinter import ttk
# Create instance
win = tk.Tk()
# Add a title
win.title("Python GUI")
tabControl = ttk.Notebook(win) # Create Tab Control
tab1 = ttk.Frame(tabControl) # Create a tab
tabControl.add(tab1, text='Tab 1') # Add the tab
tab2 = ttk.Frame(tabControl) # Add a second tab
tabControl.add(tab2, text='Tab 2') # Make second tab visible
tabControl.pack(expand=1, fill="both") # Pack to make visible
# LabelFrame using tab1 as the parent
mighty = ttk.LabelFrame(tab1, text=' Mighty Python ')
mighty.grid(column=0, row=0, padx=8, pady=4)
# Label using mighty as the parent
a_label = ttk.Label(mighty, text="Enter a name:")
a_label.grid(column=0, row=0, sticky='W')
#======================
# Start GUI
#======================
win.mainloop()
| [
"[email protected]"
] | |
b20cc0a645c622379c65505f264a6d4367a0bd51 | c9094a4ed256260bc026514a00f93f0b09a5d60c | /tests/components/nest/test_events.py | 692507d6ff905025d88fe3a713fd7366e14966e3 | [
"Apache-2.0"
] | permissive | turbokongen/home-assistant | 824bc4704906ec0057f3ebd6d92788e096431f56 | 4ab0151fb1cbefb31def23ba850e197da0a5027f | refs/heads/dev | 2023-03-12T05:49:44.508713 | 2021-02-17T14:06:16 | 2021-02-17T14:06:16 | 50,231,140 | 4 | 1 | Apache-2.0 | 2023-02-22T06:14:30 | 2016-01-23T08:55:09 | Python | UTF-8 | Python | false | false | 9,106 | py | """Test for Nest binary sensor platform for the Smart Device Management API.
These tests fake out the subscriber/devicemanager, and are not using a real
pubsub subscriber.
"""
from google_nest_sdm.device import Device
from google_nest_sdm.event import EventMessage
from homeassistant.util.dt import utcnow
from .common import async_setup_sdm_platform
from tests.common import async_capture_events
DOMAIN = "nest"
DEVICE_ID = "some-device-id"
PLATFORM = "camera"
NEST_EVENT = "nest_event"
EVENT_SESSION_ID = "CjY5Y3VKaTZwR3o4Y19YbTVfMF..."
EVENT_ID = "FWWVQVUdGNUlTU2V4MGV2aTNXV..."
async def async_setup_devices(hass, device_type, traits={}):
"""Set up the platform and prerequisites."""
devices = {
DEVICE_ID: Device.MakeDevice(
{
"name": DEVICE_ID,
"type": device_type,
"traits": traits,
},
auth=None,
),
}
return await async_setup_sdm_platform(hass, PLATFORM, devices=devices)
def create_device_traits(event_trait):
"""Create fake traits for a device."""
return {
"sdm.devices.traits.Info": {
"customName": "Front",
},
event_trait: {},
"sdm.devices.traits.CameraLiveStream": {
"maxVideoResolution": {
"width": 640,
"height": 480,
},
"videoCodecs": ["H264"],
"audioCodecs": ["AAC"],
},
}
def create_event(event_type, device_id=DEVICE_ID, timestamp=None):
"""Create an EventMessage for a single event type."""
events = {
event_type: {
"eventSessionId": EVENT_SESSION_ID,
"eventId": EVENT_ID,
},
}
return create_events(events=events, device_id=device_id)
def create_events(events, device_id=DEVICE_ID, timestamp=None):
"""Create an EventMessage for events."""
if not timestamp:
timestamp = utcnow()
return EventMessage(
{
"eventId": "some-event-id",
"timestamp": timestamp.isoformat(timespec="seconds"),
"resourceUpdate": {
"name": device_id,
"events": events,
},
},
auth=None,
)
async def test_doorbell_chime_event(hass):
"""Test a pubsub message for a doorbell event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits("sdm.devices.traits.DoorbellChime"),
)
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get("camera.front")
assert entry is not None
assert entry.unique_id == "some-device-id-camera"
assert entry.original_name == "Front"
assert entry.domain == "camera"
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "Front"
assert device.model == "Doorbell"
assert device.identifiers == {("nest", DEVICE_ID)}
timestamp = utcnow()
await subscriber.async_receive_event(
create_event("sdm.devices.events.DoorbellChime.Chime", timestamp=timestamp)
)
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 1
assert events[0].data == {
"device_id": entry.device_id,
"type": "doorbell_chime",
"timestamp": event_time,
}
async def test_camera_motion_event(hass):
"""Test a pubsub message for a camera motion event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.CAMERA",
create_device_traits("sdm.devices.traits.CameraMotion"),
)
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get("camera.front")
assert entry is not None
timestamp = utcnow()
await subscriber.async_receive_event(
create_event("sdm.devices.events.CameraMotion.Motion", timestamp=timestamp)
)
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 1
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_motion",
"timestamp": event_time,
}
async def test_camera_sound_event(hass):
"""Test a pubsub message for a camera sound event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.CAMERA",
create_device_traits("sdm.devices.traits.CameraSound"),
)
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get("camera.front")
assert entry is not None
timestamp = utcnow()
await subscriber.async_receive_event(
create_event("sdm.devices.events.CameraSound.Sound", timestamp=timestamp)
)
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 1
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_sound",
"timestamp": event_time,
}
async def test_camera_person_event(hass):
"""Test a pubsub message for a camera person event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits("sdm.devices.traits.CameraEventImage"),
)
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get("camera.front")
assert entry is not None
timestamp = utcnow()
await subscriber.async_receive_event(
create_event("sdm.devices.events.CameraPerson.Person", timestamp=timestamp)
)
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 1
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_person",
"timestamp": event_time,
}
async def test_camera_multiple_event(hass):
"""Test a pubsub message for a camera person event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits("sdm.devices.traits.CameraEventImage"),
)
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get("camera.front")
assert entry is not None
event_map = {
"sdm.devices.events.CameraMotion.Motion": {
"eventSessionId": EVENT_SESSION_ID,
"eventId": EVENT_ID,
},
"sdm.devices.events.CameraPerson.Person": {
"eventSessionId": EVENT_SESSION_ID,
"eventId": EVENT_ID,
},
}
timestamp = utcnow()
await subscriber.async_receive_event(create_events(event_map, timestamp=timestamp))
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 2
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_motion",
"timestamp": event_time,
}
assert events[1].data == {
"device_id": entry.device_id,
"type": "camera_person",
"timestamp": event_time,
}
async def test_unknown_event(hass):
"""Test a pubsub message for an unknown event type."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits("sdm.devices.traits.DoorbellChime"),
)
await subscriber.async_receive_event(create_event("some-event-id"))
await hass.async_block_till_done()
assert len(events) == 0
async def test_unknown_device_id(hass):
"""Test a pubsub message for an unknown event type."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits("sdm.devices.traits.DoorbellChime"),
)
await subscriber.async_receive_event(
create_event("sdm.devices.events.DoorbellChime.Chime", "invalid-device-id")
)
await hass.async_block_till_done()
assert len(events) == 0
async def test_event_message_without_device_event(hass):
"""Test a pubsub message for an unknown event type."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits("sdm.devices.traits.DoorbellChime"),
)
timestamp = utcnow()
event = EventMessage(
{
"eventId": "some-event-id",
"timestamp": timestamp.isoformat(timespec="seconds"),
},
auth=None,
)
await subscriber.async_receive_event(event)
await hass.async_block_till_done()
assert len(events) == 0
| [
"[email protected]"
] | |
509fcbdd523fa11e40a849845f97193b811db41c | 8d2a124753905fb0455f624b7c76792c32fac070 | /pytnon-month01/month01-shibw-notes/day15-shibw/project/student_system/usl.py | 4ad416702de887a3725f0e5f9c0bd4ae8cdd8769 | [] | no_license | Jeremy277/exercise | f38e4f19aae074c804d265f6a1c49709fd2cae15 | a72dd82eb2424e4ae18e2f3e9cc66fc4762ec8fa | refs/heads/master | 2020-07-27T09:14:00.286145 | 2019-09-17T11:31:44 | 2019-09-17T11:31:44 | 209,041,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,952 | py | from project.student_system.bll import StudentManagerController
from project.student_system.model import StudentModel
class StudentManagerView:
def __init__(self):
self.__manager = StudentManagerController()
def __display_menu(self):
print('+---------------------+')
print('| 1)添加学生信息 |')
print('| 2)显示学生信息 |')
print('| 3)删除学生信息 |')
print('| 4)修改学生信息 |')
print('| 5)按照成绩升序排序 |')
print('+---------------------+')
def __select_menu(self):
option = input('请输入:')
if option == '1':
self.__input_students()
elif option == '2':
self.__output_students(self.__manager.stu_list)
elif option == '3':
self.__delete_student()
elif option == '4':
self.__modify_student()
elif option == '5':
self.__output_student_by_socre()
def main(self):
'''
界面入口
:return:
'''
while True:
self.__display_menu()
self.__select_menu()
# 输入学生__input_students
def __input_students(self):
#收集学生信息
#要求输入 姓名 年龄 成绩
#创建学生对象(姓名 年龄 成绩)
#去控制器找add_student方法
name = input('请输入学生姓名:')
age = int(input('请输入学生年龄:'))
score = int(input('请输入学生成绩:'))
stu = StudentModel(name,age,score)
self.__manager.add_student(stu)
# 输出学生__output_students
def __output_students(self,list):
for item in list:
print(item.name,item.age,item.score,item.id)
# 删除学生__delete_student
def __delete_student(self):
#需要用户输入学生id
#调用管理器对象的删除学生方法
#如果结果为True 显示删除成功
#否则显示删除失败
id = int(input('请输入要删除学生的编号:'))
if self.__manager.remove_student(id):
print('删除成功')
else:
print('删除失败')
#修改学生信息__modify_student
def __modify_student(self):
#收集用户输入的信息保存到对象
#调用管理器的修改学生的方法
id = int(input('请输入要修改学生的编号:'))
name = input('请输入新的学生姓名:')
age = int(input('请输入新的学生年龄:'))
score = int(input('请输入新的学生成绩:'))
stu = StudentModel(name,age,score,id)
if self.__manager.update_student(stu):
print('修改成功')
else:
print('修改失败')
def __output_student_by_socre(self):
self.__manager.order_by_score()
self.__output_students(self.__manager.stu_list) | [
"[email protected]"
] | |
78d406860f54317f57d41d8f3bdb89b4bed931b9 | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/cone/colorbar/_thicknessmode.py | 780825a965633045eb793b2aedd2476e4bbd835e | [
"MIT"
] | permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 508 | py | import _plotly_utils.basevalidators
class ThicknessmodeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="thicknessmode", parent_name="cone.colorbar", **kwargs
):
super(ThicknessmodeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
values=kwargs.pop("values", ["fraction", "pixels"]),
**kwargs
)
| [
"[email protected]"
] | |
831330dcefafc3860800291775c941a2014c0554 | f1cb02057956e12c352a8df4ad935d56cb2426d5 | /LeetCode/1426. Counting Elements/Solution.py | d1562d8f530d1dc240ef0657eef54bb2fb1ca1c0 | [] | no_license | nhatsmrt/AlgorithmPractice | 191a6d816d98342d723e2ab740e9a7ac7beac4ac | f27ba208b97ed2d92b4c059848cc60f6b90ce75e | refs/heads/master | 2023-06-10T18:28:45.876046 | 2023-05-26T07:46:42 | 2023-05-26T07:47:10 | 147,932,664 | 15 | 2 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | from collections import Counter
class Solution:
def countElements(self, arr: List[int]) -> int:
cnter = Counter()
for num in arr:
cnter[num] += 1
ret = 0
for num in cnter:
if num + 1 in cnter:
ret += cnter[num]
return ret
| [
"[email protected]"
] | |
4941db21d1801a4b32c8b425abfa53ba39a2daf7 | 0e7aed5eef2e1d132a7e75dd8f439ae76c87639c | /python/99_recovery_binary_search_tree.py | f33ff8d9a0ddc717ee9257ba29adfbe15bb56a73 | [
"MIT"
] | permissive | liaison/LeetCode | 2a93df3b3ca46b34f922acdbc612a3bba2d34307 | bf03743a3676ca9a8c107f92cf3858b6887d0308 | refs/heads/master | 2022-09-05T15:04:19.661298 | 2022-08-19T19:29:19 | 2022-08-19T19:29:19 | 52,914,957 | 17 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,097 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def recoverTree(self, root: Optional[TreeNode]) -> None:
"""
Do not return anything, modify root in-place instead.
"""
first_switch, second_switch = None, None
prev_node = None
def dfs(curr_node):
nonlocal first_switch, second_switch, prev_node
if not curr_node:
return
# inorder travesal
dfs(curr_node.left)
if prev_node:
if curr_node.val < prev_node.val:
second_switch = curr_node
if not first_switch:
first_switch = prev_node
else:
return
# prev_node need to be global
prev_node = curr_node
dfs(curr_node.right)
dfs(root)
first_switch.val, second_switch.val = second_switch.val, first_switch.val
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class SolutionIteration:
def recoverTree(self, root: Optional[TreeNode]) -> None:
"""
Do not return anything, modify root in-place instead.
"""
first_switch, second_switch = None, None
prev = None
stack = []
curr = root
while stack or curr:
while curr:
stack.append(curr)
curr = curr.left
curr = stack.pop()
if prev and prev.val > curr.val:
second_switch = curr
if not first_switch:
first_switch = prev
else:
break
prev = curr
curr = curr.right
first_switch.val, second_switch.val = second_switch.val, first_switch.val
| [
"[email protected]"
] | |
9e07b5398d27a117cd0715e83244ef70c8effa19 | 425db5a849281d333e68c26a26678e7c8ce11b66 | /LeetCodeSolutions/LeetCode_0077.py | 3acdf7b01e3c58d9d3ca83d70ec1fcf15e98ba77 | [
"MIT"
] | permissive | lih627/python-algorithm-templates | e8092b327a02506086414df41bbfb2af5d6b06dc | a61fd583e33a769b44ab758990625d3381793768 | refs/heads/master | 2021-07-23T17:10:43.814639 | 2021-01-21T17:14:55 | 2021-01-21T17:14:55 | 238,456,498 | 29 | 8 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | class Solution:
def combine(self, n: int, k: int) -> List[List[int]]:
if n < k or k == 0 or n < 1:
return []
if n == k:
return [list(range(1, n + 1))]
elems = list(range(1, n + 1))
res = []
def helper(idx, cur_num, tmp):
if cur_num == k:
for _ in range(idx, n):
res.append(tmp + [elems[_]])
return
for _ in range(idx, n):
helper(_ + 1, cur_num + 1, tmp + [elems[_]])
helper(0, 1, [])
return res
| [
"[email protected]"
] | |
9fc53681ad817b7fa251631d75071089db5d2263 | 3aafaa865594aa58d056a79fdae4d0658774d3ab | /lpot/policy/policy.py | 7c0681ec514a8231ce3bcfe65665bfd8becfff2b | [
"Apache-2.0",
"MIT",
"Intel"
] | permissive | asamarah1/lpot | 56aac0d46692e1864de2f06390ab435cd079e741 | 881bde402db387b04c2f33cc96fb817f47c4d623 | refs/heads/master | 2023-01-20T15:55:39.088923 | 2020-12-01T13:22:59 | 2020-12-01T14:25:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,827 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
POLICIES = {}
def policy_registry(cls):
"""The class decorator used to register all PrunePolicy subclasses.
Args:
cls (class): The class of register.
Returns:
cls: The class of register.
"""
assert cls.__name__.endswith(
'PrunePolicy'
), "The name of subclass of PrunePolicy should end with \'PrunePolicy\' substring."
if cls.__name__[:-len('PrunePolicy')].lower() in POLICIES:
raise ValueError('Cannot have two policies with the same name')
POLICIES[cls.__name__[:-len('PrunePolicy')].lower()] = cls
return cls
class PrunePolicy:
def __init__(self, model, local_config, global_config, adaptor):
"""The base clase of Prune policies
Args:
model (object): The original model (currently torhc.nn.module
instance).
local_config (Conf): configs specific for this pruning instance
global_config (Conf): global configs which may be overwritten by
local_config
adaptor (Adaptor): Correspond adaptor for current framework
"""
self.model = model
self.adaptor = adaptor
self.tensor_dims = [4]
if local_config.method:
self.method = local_config.method
else:
self.method = "per_tensor"
if local_config.init_sparsity:
self.init_sparsity = local_config["init_sparsity"]
else:
self.init_sparsity = global_config.pruning["init_sparsity"]
if local_config.target_sparsity:
self.target_sparsity = local_config.target_sparsity
else:
self.target_sparsity = global_config.pruning.target_sparsity
self.start_epoch = global_config.pruning["start_epoch"]
self.end_epoch = global_config.pruning["end_epoch"]
self.freq = global_config.pruning["frequency"]
if local_config.weights:
self.weights = local_config.weights
else:
self.weights = self.adaptor.get_all_weight_names(self.model)
self.is_last_epoch = False
self.masks = {}
def on_epoch_begin(self, epoch):
raise NotImplementedError
def on_batch_begin(self, batch_id):
raise NotImplementedError
def on_epoch_end(self):
raise NotImplementedError
def on_batch_end(self):
raise NotImplementedError
def update_sparsity(self, epoch):
""" update sparsity goals according to epoch numbers
Args:
epoch (int): the epoch number
Returns:
sprsity (float): sparsity target in this epoch
"""
if self.start_epoch == self.end_epoch:
return self.init_sparsity
if epoch < self.start_epoch:
return 0
if epoch > self.end_epoch:
return self.target_sparsity
return self.init_sparsity + (self.target_sparsity - self.init_sparsity) * (
(epoch - self.start_epoch) // self.freq) * self.freq / \
(self.end_epoch - self.start_epoch)
| [
"[email protected]"
] | |
f54e17f1039b967e95884b5985520df3dfd65637 | e71ecfe679dd8c800e8b0960d4ba68e19401a4fc | /stepik_lesson/course_512/24462_step_7/stepik_lesson_24462_step_7.py | 63695a2af1d67f45221fa836de778932c8006dcf | [] | no_license | igizm0/SimplePyScripts | 65740038d36aab50918ca5465e21c41c87713630 | 62c8039fbb92780c8a7fbb561ab4b86cc2185c3d | refs/heads/master | 2021-04-12T10:48:17.769548 | 2017-06-15T18:53:04 | 2017-06-15T18:53:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,536 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
"""
Вам дано описание наследования классов в следующем формате.
<имя класса 1> : <имя класса 2> <имя класса 3> ... <имя класса k>
Это означает, что класс 1 отнаследован от класса 2, класса 3, и т. д.
Или эквивалентно записи:
class Class1(Class2, Class3 ... ClassK):
pass
Класс A является прямым предком класса B, если B отнаследован от A:
class B(A):
pass
Класс A является предком класса B, если
A = B;
A - прямой предок B
существует такой класс C, что C - прямой предок B и A - предок C
Например:
class B(A):
pass
class C(B):
pass
# A -- предок С
Вам необходимо отвечать на запросы, является ли один класс предком другого класса
Важное примечание:
Создавать классы не требуется.
Мы просим вас промоделировать этот процесс, и понять существует ли путь от одного класса до другого.
Формат входных данных
В первой строке входных данных содержится целое число n - число классов.
В следующих n строках содержится описание наследования классов. В i-й строке указано от каких классов наследуется
i-й класс. Обратите внимание, что класс может ни от кого не наследоваться. Гарантируется, что класс не наследуется
сам от себя (прямо или косвенно), что класс не наследуется явно от одного класса более одного раза.
В следующей строке содержится число q - количество запросов.
В следующих q строках содержится описание запросов в формате <имя класса 1> <имя класса 2>.
Имя класса – строка, состоящая из символов латинского алфавита, длины не более 50.
Формат выходных данных
Для каждого запроса выведите в отдельной строке слово "Yes", если класс 1 является предком класса 2, и "No",
если не является.
Sample Input:
4
A
B : A
C : A
D : B C
4
A B
B D
C D
D A
Sample Output:
Yes
Yes
Yes
No
"""
# Пример использования. В консоли:
# > python stepik_lesson_24462_step_7.py < in
# Yes
# Yes
# Yes
# No
if __name__ == '__main__':
# A
# B : A
# C : A
# D : B C
#
# A B
# B D
# C D
# D A
# TODO: по заданию необязательно через классы делать,
# поэтому можно этот класс заменить словарем вида { 'name': '...', 'parents': [...] }
# И, соответственно, функцию has_parent вынести из класса и поменять, чтобы она работала с словарем.
class Class:
def __init__(self, name):
self.name = name
self.list_parent_class = list()
def has_parent(self, name):
# Поиск предка в текущем классе
for parent in self.list_parent_class:
if parent.name == name:
return True
# Рекурсивный поиск предка у предков текущем классе
for parent in self.list_parent_class:
if parent.has_parent(name):
return True
return False
def __str__(self):
return 'Class <"{}": {}>'.format(self.name, [cls.name for cls in self.list_parent_class])
def __repr__(self):
return self.__str__()
from collections import OrderedDict, defaultdict
class_dict = OrderedDict()
# Словарь, в котором по ключу находится объект класса, а по
# значению -- список названий (строка) классов, от которых от наследуется
class_line_dict = defaultdict(list)
# Алгоритм:
# * Нахождение всех классов и добавление их в class_dict
# * Если у класса указано наследование, добавление названия (строка) предков в class_line_dict
# * После нахождения всех классов, выполняется перебор class_line_dict, чтобы заполнить список
# предков найденных классов. К этому моменту всевозможные классы уже будут храниться в class_dict
n = int(input())
for _ in range(n):
s = input()
# print(s)
clsn = s.split(' : ')
cls1_name = clsn[0]
# Добавление класса в словарь
cls = Class(cls1_name)
class_dict[cls1_name] = cls
# Попалось описание с наследованием
if len(clsn) == 2:
class_line_dict[cls] += clsn[1].split()
for cls, names_cls_list in class_line_dict.items():
for name_cls in names_cls_list:
cls.list_parent_class.append(class_dict[name_cls])
n = int(input())
for _ in range(n):
# a -- предок, b -- класс,
# т.е. проверяем, что у класса b есть предок a
a, b = input().split()
# Дурацкое у них условие: каждый класс является предком самого себя.
# С второго теста есть такая проверка.
if a == b:
print('Yes')
else:
print('Yes' if class_dict[b].has_parent(a) else 'No')
| [
"[email protected]"
] | |
bd9832d0c46aeb06f3e1be823cd8ddeeeff12f6b | 658e2e3cb8a4d5343a125f7deed19c9ebf06fa68 | /archived/Iris/predict_MLib.py | ad961a61d650da3d0598048dca22187b70f6101d | [] | no_license | yennanliu/analysis | 3f0018809cdc2403f4fbfe4b245df1ad73fa08a5 | 643ad3fed41961cddd006fadceb0e927f1db1f23 | refs/heads/master | 2021-01-23T21:48:58.572269 | 2020-10-13T22:47:12 | 2020-10-13T22:47:12 | 57,648,676 | 11 | 9 | null | null | null | null | UTF-8 | Python | false | false | 666 | py | # python 2.7
# import pyspark library
from pyspark import SparkConf, SparkContext
# spark_sklearn provides the same API as sklearn but uses Spark MLLib
# under the hood to perform the actual computations in a distributed way
# (passed in via the SparkContext instance).
from spark_sklearn import GridSearchCV
# import ML library
from sklearn import svm, grid_search, datasets
sc =SparkContext()
iris = datasets.load_iris()
parameters = {'kernel':('linear', 'rbf'), 'C':[1, 10]}
svr = svm.SVC()
clf = GridSearchCV(sc, svr, parameters)
clf.fit(iris.data, iris.target)
print ("==================")
print (clf.predict(iris.data))
print ("==================") | [
"[email protected]"
] | |
d24fae521ef6c2f8b0e3cf7b13a138e1f4046455 | 8dbb2a3e2286c97b1baa3ee54210189f8470eb4d | /kubernetes-stubs/client/models/v1_sysctl.pyi | e638c63d5d823971ab9ff9df1684ff0940b88d6c | [] | no_license | foodpairing/kubernetes-stubs | e4b0f687254316e6f2954bacaa69ff898a88bde4 | f510dc3d350ec998787f543a280dd619449b5445 | refs/heads/master | 2023-08-21T21:00:54.485923 | 2021-08-25T03:53:07 | 2021-08-25T04:45:17 | 414,555,568 | 0 | 0 | null | 2021-10-07T10:26:08 | 2021-10-07T10:26:08 | null | UTF-8 | Python | false | false | 288 | pyi | import datetime
import typing
import kubernetes.client
class V1Sysctl:
name: str
value: str
def __init__(self, *, name: str, value: str) -> None: ...
def to_dict(self) -> V1SysctlDict: ...
class V1SysctlDict(typing.TypedDict, total=False):
name: str
value: str
| [
"[email protected]"
] | |
673d91a849350b9e67aade6bec505c7dab3c0e0c | 110044654f706e920380dad2779bb32a77f1f26f | /test/scons-time/run/option/prefix.py | df13cd03a7471c885ba2284a7fdb2d97f3999c1f | [
"MIT",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | SCons/scons | 89327bb9635cee6e7cc59249edca9cd859d7d1ff | b2a7d7066a2b854460a334a5fe737ea389655e6e | refs/heads/master | 2023-09-01T19:37:03.603772 | 2023-08-28T04:32:42 | 2023-08-28T04:32:42 | 104,670,160 | 1,827 | 342 | MIT | 2023-09-14T15:13:21 | 2017-09-24T19:23:46 | Python | UTF-8 | Python | false | false | 1,756 | py | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify specifying an alternate file prefix with the --prefix option.
"""
import TestSCons_time
test = TestSCons_time.TestSCons_time()
test.write_fake_scons_py()
test.write_sample_project('foo.tar.gz')
test.run(arguments = 'run --prefix bar foo.tar.gz')
test.must_exist('bar-000-0.log',
'bar-000-0.prof',
'bar-000-1.log',
'bar-000-1.prof',
'bar-000-2.log',
'bar-000-2.prof')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| [
"[email protected]"
] | |
03a9ddfa73884e7bfecba1c93affc417bf62874c | a1119965e2e3bdc40126fd92f4b4b8ee7016dfca | /trunk/geoip_server/geoip_server.py | 694cfbf4361cb21ff3247af3076efb976476662d | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | SeattleTestbed/attic | 0e33211ddf39efdbcf5573d4fc7fa5201aa7310d | f618a962ce2fd3c4838564e8c62c10924f5df45f | refs/heads/master | 2021-06-10T23:10:47.792847 | 2017-05-15T12:05:43 | 2017-05-15T12:05:43 | 20,154,061 | 0 | 1 | null | 2014-10-16T17:21:06 | 2014-05-25T12:34:00 | Python | UTF-8 | Python | false | false | 3,070 | py | #!/usr/bin/env python
"""
<Author>
Evan Meagher
<Start Date>
Nov 26, 2009
<Description>
Starts an XML-RPC server that allows remote clients to execute geolocation
queries using the pygeoip library.
<Usage>
python geoip_server.py /path/to/GeoIP.dat PORT
Where /path/to/GeoIP.dat is the path to a legal GeoIP database and PORT is
the port on which to host the server. Databases can be downloaded at
http://www.maxmind.com/app/ip-location.
More information on pygeoip at http://code.google.com/p/pygeoip/.
More information on geoip_server.py at
http://seattle.poly.edu/wiki/GeoIPServer
"""
import sys
sys.path.append("./seattle/seattle_repy")
import pygeoip
import repyportability
import xmlrpclib
from SimpleXMLRPCServer import SimpleXMLRPCServer
class SafeGeoIPServer(pygeoip.GeoIP):
"""
<Purpose>
Provides safe wrappers around the GeoIP server method calls.
This allows us to check that each request is well-formatted before
executing them on pygeoip.
This class does not introduce any new methods; it only overrides
existing methods in pygeoip.GeoIP.
"""
def record_by_addr(self, addr):
"""
<Purpose>
Returns the GeoIP record for the specified IP address.
<Arguments>
addr: A public IPv4 address.
<Side Effects>
None
<Exceptions>
None
<Return>
A dictionary containing GeoIP information for the address
specified, if valid.
Returns False on errors.
"""
if not _is_public_ipv4(addr):
return xmlrpclib.Fault(xmlrpclib.INVALID_METHOD_PARAMS, "Not a public IP address")
return super(SafeGeoIPServer, self).record_by_addr(addr)
def _is_public_ipv4(addr):
"""
<Purpose>
Determines if an IPv4 address is public or not.
<Arguments>
addr: An IPv4 address.
<Side Effects>
None
<Exceptions>
None, assuming that the provided value is a valid IPv4 address.
<Returns>
True if it is a public IP address, False otherwise.
"""
# We need to do some range comparisons for Class B and C addresses,
# so preprocess them into ints.
ip_int_tokens = [int(token) for token in addr.split('.')]
if ip_int_tokens[0] == 10:
# Class A private address is in the form 10.*.*.*
return False
# Class B private addresses are in the range 172.16.0.0/16 to
# 172.31.255.255/16
elif ip_int_tokens[0] == 172:
if 16 <= ip_int_tokens[1] and ip_int_tokens[1] < 32:
return False
# Class C private addresses are in the form 192.168.*.*
elif ip_int_tokens[0:2] == [192, 168]:
return False
return True
# Handle arguments
if len(sys.argv) < 3:
print "Usage: python geoip_server.py /path/to/GeoIP.dat PORT"
raise RuntimeError
geoipdb_filename = sys.argv[1]
port = int(sys.argv[2])
# Get external IP
ext_ip = repyportability.getmyip()
# Create server
server = SimpleXMLRPCServer((ext_ip, port), allow_none=True)
# Initialize and register geoip object
gic = SafeGeoIPServer(geoipdb_filename)
server.register_instance(gic)
# Run the server's main loop
server.serve_forever()
| [
"USER@DOMAIN"
] | USER@DOMAIN |
f2d0bdcee3ce59d545e749c749eb9687f69c787a | e58f8258837fdf559cde6008ca46ecedb1735347 | /scripts/extract_changelog.py | 7cba17577c9a58cf1e5f59f53cb4544824cdc2a4 | [
"MIT"
] | permissive | Dotnester/hyperqueue | 5a4bbd4966dc5c79e48b0f2012576e418a2b8230 | cf5227c2157ab431ee7018a40bbbf0558afe4f27 | refs/heads/main | 2023-07-08T22:22:15.992034 | 2021-08-08T09:38:06 | 2021-08-08T09:38:06 | 379,584,453 | 0 | 0 | MIT | 2021-06-23T11:47:10 | 2021-06-23T11:47:09 | null | UTF-8 | Python | false | false | 1,033 | py | import sys
from os.path import dirname, abspath, join
CURRENT_DIR = dirname(abspath(__file__))
CHANGELOG_PATH = join(dirname(CURRENT_DIR), "CHANGELOG.md")
def normalize(version: str) -> str:
return version.strip().lstrip("v").lower()
def get_matching_lines(text: str, tag: str):
lines = list(text.splitlines(keepends=False))
for (index, line) in enumerate(lines):
if line.startswith("# "):
version = normalize(line.lstrip("# "))
if version == tag:
for matching_line in lines[index + 1:]:
if matching_line.startswith("# "):
return
yield matching_line
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: python extract_changelog <tag>")
exit(1)
tag = normalize(sys.argv[1])
with open(CHANGELOG_PATH) as f:
text = f.read()
output = f"# HyperQueue {tag}\n"
for line in get_matching_lines(text, tag):
output += f"{line}\n"
print(output)
| [
"[email protected]"
] | |
839eed4612014b865c80342615cde0f6ddac614d | 58afefdde86346760bea40690b1675c6639c8b84 | /leetcode/minimum-moves-to-equal-array-elements/286144821.py | dc0c0055a3678c35f3e5333e2f8ffd941978ff97 | [] | no_license | ausaki/data_structures_and_algorithms | aaa563f713cbab3c34a9465039d52b853f95548e | 4f5f5124534bd4423356a5f5572b8a39b7828d80 | refs/heads/master | 2021-06-21T10:44:44.549601 | 2021-04-06T11:30:21 | 2021-04-06T11:30:21 | 201,942,771 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | # title: minimum-moves-to-equal-array-elements
# detail: https://leetcode.com/submissions/detail/286144821/
# datetime: Sun Dec 15 23:17:51 2019
# runtime: 300 ms
# memory: 14 MB
class Solution:
def minMoves(self, nums: List[int]) -> int:
m = nums[0]
s0 = nums[0]
for i in range(1, len(nums)):
s0 += nums[i]
if nums[i] < m:
m = nums[i]
return s0 - m * len(nums)
| [
"[email protected]"
] | |
b809857ce9e48ae6ea3743c92b3a80ee7ad9cbd3 | c886b04cdbe32e0997d9bc0259b90575ebb2d084 | /system/vhost/zkeys_database_conf.py | 351ec5b296da0e45d04398b1a40109dfa5125e71 | [] | no_license | bmhxbai/AngelSword | 3ce7b9f9f9e6114f8d4cff15e17d1fd8225a786c | a048dbfcbcf0097c6cf683ab9cd5ce975bddcf68 | refs/heads/master | 2020-06-25T07:23:15.505146 | 2017-07-01T17:57:34 | 2017-07-01T17:57:34 | 96,964,744 | 2 | 1 | null | 2017-07-12T04:20:20 | 2017-07-12T04:20:20 | null | UTF-8 | Python | false | false | 1,256 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
name: 宏杰Zkeys虚拟主机默认数据库漏洞
referer: http://www.wooyun.org/bugs/wooyun-2014-048350
author: Lucifer
description: 宏杰Zkeys虚拟主机默认开启999端口,默认数据库密码zkeys可连接root。
'''
import sys
import pymysql
import warnings
from termcolor import cprint
from urllib.parse import urlparse
class zkeys_database_conf_BaseVerify:
def __init__(self, url):
self.url = url
def run(self):
if r"http" in self.url:
#提取host
host = urlparse(self.url)[1]
flag = host.find(":")
if flag != -1:
host = host[:flag]
else:
host = self.url
try:
conn = pymysql.connect(host=host, user="root", passwd="zkeys", port=3306, connect_timeout=6)
if conn.ping().server_status == 0:
cprint("[+]存在宏杰Zkeys虚拟主机默认数据库漏洞...(高危)\tpayload: "+host+":3306"+" root:zkeys", "red")
except:
cprint("[-] "+__file__+"====>连接超时", "cyan")
if __name__ == "__main__":
warnings.filterwarnings("ignore")
testVuln = zkeys_database_conf_BaseVerify(sys.argv[1])
testVuln.run() | [
"[email protected]"
] | |
0e02481e8375d280ec68fb8d4c16894dc62b6acb | 0bf833b379210656214b4064d0a71850352e014e | /vsCode/DL_rawLevel/DL_rawLevel/4.ANN/test.py | 3bf3a6f55736816652323c2826993e413b759991 | [
"MIT"
] | permissive | pimier15/pyDLBasic | f10168e352b94b9d77fda3c1b89a294b66fd8e4e | 80336cddbcb3ec1f70106b74ff0b3172510b769e | refs/heads/master | 2021-07-20T00:37:15.363651 | 2017-10-28T02:32:47 | 2017-10-28T02:32:47 | 107,188,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 496 | py | import numpy as np
ys = np.array([[1,2] , [3,4]])
#ys = np.array([1,2 , 3,4])
ts = np.array([[0,1] , [0,1]])
#ts = np.array([0,1 , 0,1])
IsOneHot = True
res = None
batch_size = ys.shape[0]
#res = -np.sum(ts*np.log(ys + 1e-7) ) / batch_size
#res = -np.sum(np.log(ys[np.arange(batch_size) , t]) ) / batch_size
ly = np.log(ys + 1e-7)
lymulti = ts * ly
print(ly)
print(lymulti)
print()
res1 = np.sum(lymulti , axis = 0)
res2 =np.sum(lymulti , axis = 1)
res2 =np.sum(lymulti )
print()
| [
"[email protected]"
] | |
fc2fa0fbaa66103a9e382ebb8547fd0c14538125 | dce4cfdae2d1e7285b92f657f2509143595f4cc3 | /loadsbroker/client/base.py | faf44a100b06fc418c09f1db7c7545768e417185 | [
"Apache-2.0"
] | permissive | loads/loads-broker | ed8a0c0b79b132a0b4e73da59a391193e9e99147 | b6134690e1bd7b07e226bee16e6f779ef0f170d9 | refs/heads/master | 2021-01-17T13:46:16.539082 | 2017-03-29T22:06:20 | 2017-03-29T22:06:20 | 23,458,608 | 7 | 1 | Apache-2.0 | 2021-04-27T19:20:00 | 2014-08-29T09:02:14 | Python | UTF-8 | Python | false | false | 599 | py |
class BaseCommand(object):
"""Base Command Class"""
arguments = {}
def __init__(self, session, root):
self.session = session
self.root = root
def __call__(self, args):
return self.session.get(self.root).json()
def args2options(self, args):
options = {}
for option in self.arguments:
if option.startswith('--'):
option = option[2:]
normalized = option.replace('-', '_')
if normalized in args:
options[normalized] = getattr(args, normalized)
return options
| [
"[email protected]"
] | |
ae8ffa89543123c6624d4622af045eca5333796a | 15a961eee6a1b49aa9ec8efb3acc80e1913015f7 | /tfx/tools/cli/handler/beam_handler_test.py | 75406b979462f07fd4e6c8938a24503eebb03caa | [
"Apache-2.0"
] | permissive | tbligny/tfx | a9414824031bd7d1f6618f583fe2b7d2ef3a9a36 | e4e6e6ec271cc54f88dacb707bf0e3a8366d4b78 | refs/heads/master | 2022-05-25T13:31:57.460735 | 2020-02-11T05:03:18 | 2020-02-11T05:04:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,109 | py | # Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.tools.cli.handler.beam_handler."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import sys
import click
import mock
import tensorflow.compat.v1 as tf
from tfx.tools.cli import labels
from tfx.tools.cli.handler import beam_handler
def _MockSubprocess(cmd, env): # pylint: disable=invalid-name, unused-argument
# Store pipeline_args in a json file
pipeline_args_path = env[labels.TFX_JSON_EXPORT_PIPELINE_ARGS_PATH]
pipeline_name = 'chicago_taxi_beam'
pipeline_root = os.path.join(os.environ['HOME'], 'tfx', 'pipelines',
pipeline_name)
pipeline_args = {
'pipeline_name': pipeline_name,
'pipeline_root': pipeline_root
}
with open(pipeline_args_path, 'w') as f:
json.dump(pipeline_args, f)
return 0
def _MockSubprocess2(cmd, env): # pylint: disable=invalid-name, unused-argument
# Store pipeline_args in a json file
pipeline_args_path = env[labels.TFX_JSON_EXPORT_PIPELINE_ARGS_PATH]
pipeline_args = {}
with open(pipeline_args_path, 'w') as f:
json.dump(pipeline_args, f)
return 0
def _MockSubprocess3(cmd, env): # pylint: disable=unused-argument
click.echo(cmd)
return 0
class BeamHandlerTest(tf.test.TestCase):
def setUp(self):
super(BeamHandlerTest, self).setUp()
self._tmp_dir = os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR',
self.get_temp_dir())
self._home = os.path.join(self._tmp_dir, self._testMethodName)
self._olddir = os.getcwd()
os.chdir(self._tmp_dir)
self._original_home_value = os.environ.get('HOME', '')
os.environ['HOME'] = self._home
self._original_beam_home_value = os.environ.get('BEAM_HOME', '')
os.environ['BEAM_HOME'] = os.path.join(os.environ['HOME'], 'beam')
self._beam_home = os.environ['BEAM_HOME']
# Flags for handler.
self.engine = 'beam'
self.chicago_taxi_pipeline_dir = os.path.join(
os.path.dirname(os.path.dirname(__file__)), 'testdata')
self.pipeline_path = os.path.join(self.chicago_taxi_pipeline_dir,
'test_pipeline_beam_1.py')
self.pipeline_name = 'chicago_taxi_beam'
self.pipeline_root = os.path.join(self._home, 'tfx', 'pipelines',
self.pipeline_name)
self.run_id = 'dummyID'
# Pipeline args for mocking subprocess
self.pipeline_args = {
'pipeline_name': 'chicago_taxi_beam',
'pipeline_dsl_path': self.pipeline_path
}
def tearDown(self):
super(BeamHandlerTest, self).tearDown()
if self._home:
os.environ['HOME'] = self._original_home_value
if self._beam_home:
os.environ['BEAM_HOME'] = self._original_beam_home_value
os.chdir(self._olddir)
@mock.patch('subprocess.call', _MockSubprocess)
def testSavePipeline(self):
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
pipeline_args = handler._extract_pipeline_args()
handler._save_pipeline(pipeline_args)
self.assertTrue(
tf.io.gfile.exists(
os.path.join(handler._handler_home_dir,
self.pipeline_args[labels.PIPELINE_NAME])))
@mock.patch('subprocess.call', _MockSubprocess)
def testCreatePipeline(self):
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
handler.create_pipeline()
handler_pipeline_path = os.path.join(
handler._handler_home_dir, self.pipeline_args[labels.PIPELINE_NAME], '')
self.assertTrue(
tf.io.gfile.exists(
os.path.join(handler_pipeline_path, 'pipeline_args.json')))
@mock.patch('subprocess.call', _MockSubprocess)
def testCreatePipelineExistentPipeline(self):
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
handler.create_pipeline()
# Run create_pipeline again to test.
with self.assertRaises(SystemExit) as err:
handler.create_pipeline()
self.assertEqual(
str(err.exception), 'Pipeline "{}" already exists.'.format(
self.pipeline_args[labels.PIPELINE_NAME]))
@mock.patch('subprocess.call', _MockSubprocess)
def testUpdatePipeline(self):
# First create pipeline with test_pipeline.py
pipeline_path_1 = os.path.join(self.chicago_taxi_pipeline_dir,
'test_pipeline_beam_1.py')
flags_dict_1 = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: pipeline_path_1
}
handler = beam_handler.BeamHandler(flags_dict_1)
handler.create_pipeline()
# Update test_pipeline and run update_pipeline
pipeline_path_2 = os.path.join(self.chicago_taxi_pipeline_dir,
'test_pipeline_beam_2.py')
flags_dict_2 = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: pipeline_path_2
}
handler = beam_handler.BeamHandler(flags_dict_2)
handler.update_pipeline()
handler_pipeline_path = os.path.join(
handler._handler_home_dir, self.pipeline_args[labels.PIPELINE_NAME], '')
self.assertTrue(
tf.io.gfile.exists(
os.path.join(handler_pipeline_path, 'pipeline_args.json')))
@mock.patch('subprocess.call', _MockSubprocess)
def testUpdatePipelineNoPipeline(self):
# Update pipeline without creating one.
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
with self.assertRaises(SystemExit) as err:
handler.update_pipeline()
self.assertEqual(
str(err.exception), 'Pipeline "{}" does not exist.'.format(
self.pipeline_args[labels.PIPELINE_NAME]))
@mock.patch('subprocess.call', _MockSubprocess)
def testCompilePipeline(self):
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
with self.captureWritesToStream(sys.stdout) as captured:
handler.compile_pipeline()
self.assertIn('Pipeline compiled successfully', captured.contents())
@mock.patch('subprocess.call', _MockSubprocess2)
def testCompilePipelineNoPipelineArgs(self):
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
with self.assertRaises(SystemExit) as err:
handler.compile_pipeline()
self.assertEqual(
str(err.exception),
'Unable to compile pipeline. Check your pipeline dsl.')
@mock.patch('subprocess.call', _MockSubprocess)
def testDeletePipeline(self):
# First create a pipeline.
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
handler.create_pipeline()
# Now delete the pipeline created aand check if pipeline folder is deleted.
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_NAME: self.pipeline_name
}
handler = beam_handler.BeamHandler(flags_dict)
handler.delete_pipeline()
handler_pipeline_path = os.path.join(
handler._handler_home_dir, self.pipeline_args[labels.PIPELINE_NAME], '')
self.assertFalse(tf.io.gfile.exists(handler_pipeline_path))
@mock.patch('subprocess.call', _MockSubprocess)
def testDeletePipelineNonExistentPipeline(self):
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_NAME: self.pipeline_name
}
handler = beam_handler.BeamHandler(flags_dict)
with self.assertRaises(SystemExit) as err:
handler.delete_pipeline()
self.assertEqual(
str(err.exception), 'Pipeline "{}" does not exist.'.format(
flags_dict[labels.PIPELINE_NAME]))
def testListPipelinesNonEmpty(self):
# First create two pipelines in the dags folder.
handler_pipeline_path_1 = os.path.join(os.environ['BEAM_HOME'],
'pipeline_1')
handler_pipeline_path_2 = os.path.join(os.environ['BEAM_HOME'],
'pipeline_2')
tf.io.gfile.makedirs(handler_pipeline_path_1)
tf.io.gfile.makedirs(handler_pipeline_path_2)
# Now, list the pipelines
flags_dict = {labels.ENGINE_FLAG: self.engine}
handler = beam_handler.BeamHandler(flags_dict)
with self.captureWritesToStream(sys.stdout) as captured:
handler.list_pipelines()
self.assertIn('pipeline_1', captured.contents())
self.assertIn('pipeline_2', captured.contents())
def testListPipelinesEmpty(self):
flags_dict = {labels.ENGINE_FLAG: self.engine}
handler = beam_handler.BeamHandler(flags_dict)
with self.captureWritesToStream(sys.stdout) as captured:
handler.list_pipelines()
self.assertIn('No pipelines to display.', captured.contents())
@mock.patch('subprocess.call', _MockSubprocess)
def testPipelineSchemaNoPipelineRoot(self):
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
handler.create_pipeline()
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_NAME: self.pipeline_name,
}
handler = beam_handler.BeamHandler(flags_dict)
with self.assertRaises(SystemExit) as err:
handler.get_schema()
self.assertEqual(
str(err.exception),
'Create a run before inferring schema. If pipeline is already running, then wait for it to successfully finish.'
)
@mock.patch('subprocess.call', _MockSubprocess)
def testPipelineSchemaNoSchemaGenOutput(self):
# First create a pipeline.
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
handler.create_pipeline()
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_NAME: self.pipeline_name,
}
handler = beam_handler.BeamHandler(flags_dict)
tf.io.gfile.makedirs(self.pipeline_root)
with self.assertRaises(SystemExit) as err:
handler.get_schema()
self.assertEqual(
str(err.exception),
'Either SchemaGen component does not exist or pipeline is still running. If pipeline is running, then wait for it to successfully finish.'
)
@mock.patch('subprocess.call', _MockSubprocess)
def testPipelineSchemaSuccessfulRun(self):
# First create a pipeline.
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_DSL_PATH: self.pipeline_path
}
handler = beam_handler.BeamHandler(flags_dict)
handler.create_pipeline()
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_NAME: self.pipeline_name,
}
handler = beam_handler.BeamHandler(flags_dict)
# Create fake schema in pipeline root.
schema_path = os.path.join(self.pipeline_root, 'SchemaGen', 'schema', '3')
tf.io.gfile.makedirs(schema_path)
with open(os.path.join(schema_path, 'schema.pbtxt'), 'w') as f:
f.write('SCHEMA')
with self.captureWritesToStream(sys.stdout) as captured:
handler.get_schema()
curr_dir_path = os.path.join(os.getcwd(), 'schema.pbtxt')
self.assertIn('Path to schema: {}'.format(curr_dir_path),
captured.contents())
self.assertIn(
'*********SCHEMA FOR {}**********'.format(self.pipeline_name.upper()),
captured.contents())
self.assertTrue(tf.io.gfile.exists(curr_dir_path))
@mock.patch('subprocess.call', _MockSubprocess3)
def testCreateRun(self):
# Create a pipeline in dags folder.
handler_pipeline_path = os.path.join(
os.environ['BEAM_HOME'], self.pipeline_args[labels.PIPELINE_NAME])
tf.io.gfile.makedirs(handler_pipeline_path)
with open(os.path.join(handler_pipeline_path, 'pipeline_args.json'),
'w') as f:
json.dump(self.pipeline_args, f)
# Now run the pipeline
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_NAME: self.pipeline_name
}
handler = beam_handler.BeamHandler(flags_dict)
with self.captureWritesToStream(sys.stdout) as captured:
handler.create_run()
self.assertIn("[%s, '%s']" % (sys.executable, self.pipeline_path),
captured.contents())
def testCreateRunNoPipeline(self):
# Run pipeline without creating one.
flags_dict = {
labels.ENGINE_FLAG: self.engine,
labels.PIPELINE_NAME: self.pipeline_name
}
handler = beam_handler.BeamHandler(flags_dict)
with self.assertRaises(SystemExit) as err:
handler.create_run()
self.assertEqual(
str(err.exception), 'Pipeline "{}" does not exist.'.format(
flags_dict[labels.PIPELINE_NAME]))
def testDeleteRun(self):
# Create a pipeline in beam home.
handler_pipeline_path = os.path.join(
os.environ['BEAM_HOME'], self.pipeline_args[labels.PIPELINE_NAME])
tf.io.gfile.makedirs(handler_pipeline_path)
# Now run the pipeline
flags_dict = {labels.ENGINE_FLAG: self.engine, labels.RUN_ID: self.run_id}
handler = beam_handler.BeamHandler(flags_dict)
with self.captureWritesToStream(sys.stdout) as captured:
handler.delete_run()
self.assertIn('Not supported for Beam.', captured.contents())
def testTerminateRun(self):
# Create a pipeline in beam home.
handler_pipeline_path = os.path.join(
os.environ['BEAM_HOME'], self.pipeline_args[labels.PIPELINE_NAME])
tf.io.gfile.makedirs(handler_pipeline_path)
# Now run the pipeline
flags_dict = {labels.ENGINE_FLAG: self.engine, labels.RUN_ID: self.run_id}
handler = beam_handler.BeamHandler(flags_dict)
with self.captureWritesToStream(sys.stdout) as captured:
handler.terminate_run()
self.assertIn('Not supported for Beam.', captured.contents())
def testListRuns(self):
# Create a pipeline in beam home.
handler_pipeline_path = os.path.join(
os.environ['BEAM_HOME'], self.pipeline_args[labels.PIPELINE_NAME])
tf.io.gfile.makedirs(handler_pipeline_path)
# Now run the pipeline
flags_dict = {labels.ENGINE_FLAG: self.engine, labels.RUN_ID: self.run_id}
handler = beam_handler.BeamHandler(flags_dict)
with self.captureWritesToStream(sys.stdout) as captured:
handler.list_runs()
self.assertIn('Not supported for Beam.', captured.contents())
def testGetRun(self):
# Create a pipeline in beam home.
handler_pipeline_path = os.path.join(
os.environ['BEAM_HOME'], self.pipeline_args[labels.PIPELINE_NAME])
tf.io.gfile.makedirs(handler_pipeline_path)
# Now run the pipeline
flags_dict = {labels.ENGINE_FLAG: self.engine, labels.RUN_ID: self.run_id}
handler = beam_handler.BeamHandler(flags_dict)
with self.captureWritesToStream(sys.stdout) as captured:
handler.get_run()
self.assertIn('Not supported for Beam.', captured.contents())
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
1900f05d4886154e7a4d78aa476492ce0db95e71 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02381/s946222912.py | 98a2c0e51de4dd78dcdf7cf46b06bc31ed0bae32 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | import math
while True:
n = int(input())
if n == 0:
break
a = 0
st = list(map(int, input().split()))
m = sum(st)/len(st)
for x in st:
a += (x-m)**2
print(math.sqrt(a/n))
| [
"[email protected]"
] | |
43283f9c8816b12d1b6dc5433408d6622dba55ee | 0b6966a75a4c62393a38a73df5a779228639c42c | /datasets/transforms.py | a3a47f2b8e7e0c32541ca65e91e0e49875957f43 | [] | no_license | Shuai-Xie/DEAL | 7cbec778bcc83b633a1c3319d9b00c8b0f98aa00 | 06ff3ba29196e276376a9cf8d868d54fd2db2680 | refs/heads/master | 2023-08-29T20:03:44.867280 | 2021-11-12T07:50:28 | 2021-11-12T07:50:28 | 300,126,893 | 25 | 2 | null | null | null | null | UTF-8 | Python | false | false | 9,018 | py | import torch
import random
import numpy as np
from PIL import Image, ImageEnhance, ImageOps
import constants
def mapbg(bg_idx):
"""
image bg 转成 constants.BG_INDEX, 类别从 [0,..,C-1]
"""
# bg 在首部,需要调整 实际类别 前移1位
def map_headbg(target):
target = target.astype(int)
target -= 1 # 1->0
target[target == -1] = constants.BG_INDEX
return target.astype('uint8')
# bg 在尾部,直接替换为 constant 即可
def map_other(target):
target = target.astype(int)
target[target == bg_idx] = constants.BG_INDEX
return target.astype('uint8')
if bg_idx == 0:
return map_headbg
else:
return map_other
def remap(bg_idx):
"""
分割结果 -> 回归原始 bg idx,方面 vis
"""
def remap_headbg(target):
target = target.astype(int)
target += 1
target[target == constants.BG_INDEX + 1] = bg_idx
return target.astype('uint8')
def remap_other(target):
target = target.astype(int)
target[target == constants.BG_INDEX] = bg_idx
return target.astype('uint8')
if bg_idx == 0:
return remap_headbg
else:
return remap_other
class Compose:
def __init__(self, trans_list):
self.trans_list = trans_list
def __call__(self, sample):
for t in self.trans_list:
sample = t(sample)
return sample
def __repr__(self):
format_string = self.__class__.__name__ + '('
for t in self.trans_list:
format_string += '\n'
format_string += ' {0}'.format(t)
format_string += '\n)'
return format_string
class RandomHorizontalFlip:
def __call__(self, sample):
img, target = sample['img'], sample['target']
if random.random() < 0.5:
img = img.transpose(0)
target = target.transpose(0)
sample['img'], sample['target'] = img, target
return sample
class RandomVerticalFlip:
def __call__(self, sample):
img, target = sample['img'], sample['target']
if random.random() < 0.5:
img = img.transpose(1)
target = target.transpose(1)
sample['img'], sample['target'] = img, target
return sample
class RandomRightAngle:
"""随机旋转直角, 90/180/270"""
def __call__(self, sample):
img, target = sample['img'], sample['target']
if random.random() < 0.5:
k = random.randint(2, 4)
img = img.transpose(k)
target = target.transpose(k)
sample['img'], sample['target'] = img, target
return sample
class RandomDiagnoal:
"""随机对角线转换,主/副"""
def __call__(self, sample):
img, target = sample['img'], sample['target']
if random.random() < 10:
k = random.randint(5, 6) # 闭区间
img = img.transpose(k)
target = target.transpose(k)
sample['img'], sample['target'] = img, target
return sample
class RandomScaleCrop:
def __init__(self, base_size, crop_size, scales=(0.8, 1.2)):
self.base_size = min(base_size)
self.crop_size = min(crop_size)
self.scales = scales
def __call__(self, sample):
img, target = sample['img'], sample['target']
# 原图 scale
short_size = random.randint(int(self.base_size * self.scales[0]), int(self.base_size * self.scales[1]))
w, h = img.size
if h > w:
ow = short_size
oh = int(1.0 * h * ow / w)
else:
oh = short_size
ow = int(1.0 * w * oh / h)
# random scale
img = img.resize((ow, oh), Image.BILINEAR)
target = target.resize((ow, oh), Image.NEAREST)
# scale 后短边 < 要 crop 尺寸,补图
if short_size < self.crop_size:
padh = self.crop_size - oh if oh < self.crop_size else 0
padw = self.crop_size - ow if ow < self.crop_size else 0
img = ImageOps.expand(img, border=(0, 0, padw, padh), fill=0) # img fill 0, 后面还有 normalize
target = ImageOps.expand(target, border=(0, 0, padw, padh), fill=constants.BG_INDEX) # target fill bg_idx
# random crop
w, h = img.size
x1 = random.randint(0, w - self.crop_size)
y1 = random.randint(0, h - self.crop_size)
img = img.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
target = target.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
sample['img'], sample['target'] = img, target
return sample
class ColorJitter:
def __init__(self, brightness=None, contrast=None, saturation=None):
if not brightness is None and brightness > 0:
self.brightness = [max(1 - brightness, 0), 1 + brightness]
if not contrast is None and contrast > 0:
self.contrast = [max(1 - contrast, 0), 1 + contrast]
if not saturation is None and saturation > 0:
self.saturation = [max(1 - saturation, 0), 1 + saturation]
def __call__(self, sample):
img, target = sample['img'], sample['target']
r_brightness = random.uniform(self.brightness[0], self.brightness[1])
r_contrast = random.uniform(self.contrast[0], self.contrast[1])
r_saturation = random.uniform(self.saturation[0], self.saturation[1])
img = ImageEnhance.Brightness(img).enhance(r_brightness)
img = ImageEnhance.Contrast(img).enhance(r_contrast)
img = ImageEnhance.Color(img).enhance(r_saturation)
sample['img'], sample['target'] = img, target
return sample
class FixScaleCrop:
def __init__(self, crop_size): # valid, 固定原图 aspect,crop 图像中央
self.crop_size = min(crop_size)
def __call__(self, sample):
img, target = sample['img'], sample['target']
w, h = img.size
if w > h:
oh = self.crop_size
ow = int(1.0 * w * oh / h)
else:
ow = self.crop_size
oh = int(1.0 * h * ow / w)
img = img.resize((ow, oh), Image.BILINEAR)
target = target.resize((ow, oh), Image.NEAREST)
w, h = img.size # 放缩后的 size
x1 = int(round((w - self.crop_size) / 2.))
y1 = int(round((h - self.crop_size) / 2.))
img = img.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
target = target.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
sample['img'], sample['target'] = img, target
return sample
class FixedResize:
def __init__(self, size):
self.size = size
def __call__(self, sample):
img, target = sample['img'], sample['target']
img = img.resize(self.size, Image.BILINEAR)
target = target.resize(self.size, Image.NEAREST)
sample['img'], sample['target'] = img, target
return sample
class Normalize:
def __init__(self, mean=(0., 0., 0.), std=(1., 1., 1.)):
self.mean = mean
self.std = std
def __call__(self, sample):
img, target = sample['img'], sample['target']
img = np.array(img).astype(np.float32)
target = np.array(target).astype(np.float32)
img /= 255.0
img -= self.mean
img /= self.std
sample['img'], sample['target'] = img, target
return sample
class ToTensor:
def __call__(self, sample):
img, target = sample['img'], sample['target']
img = np.array(img).astype(np.float32).transpose((2, 0, 1))
target = np.array(target).astype(np.float32)
img = torch.from_numpy(img).float()
target = torch.from_numpy(target).long()
sample['img'], sample['target'] = img, target
return sample
def get_transform(split, base_size, crop_size=None):
if split == 'train':
return Compose([
# sampler
RandomScaleCrop(base_size, crop_size, scales=(0.8, 1.2)),
# flip
RandomHorizontalFlip(),
# color
ColorJitter(brightness=0.5, contrast=0.5, saturation=0.5),
# normal
Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)),
ToTensor()
])
elif split == 'val':
return Compose([
FixedResize(base_size),
Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)),
ToTensor()
])
elif split == 'test':
return Compose([
FixedResize(base_size),
Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)),
ToTensor()
])
import torchvision.transforms as transforms
def get_img_transfrom(base_size):
return transforms.Compose([
transforms.Resize((base_size[1], base_size[0])),
transforms.ToTensor(),
transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225))
])
| [
"[email protected]"
] | |
fefc83938d27fd019badb525867387ffddc88103 | 47471b8424715c1a1bc9e6745f44b89bec3add5f | /apps/account/admin.py | 536398f3e9367c65f9ffeb70845899b24cc3d01e | [] | no_license | Dimasuz/6.6_Ln_with_docker | 5f34db291afe243e8e571067e17e7b3d92145437 | d1fee864816174c0e03acc13a6849ee3277f6cec | refs/heads/master | 2023-05-27T07:09:05.187935 | 2021-06-11T20:07:14 | 2021-06-11T20:07:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 747 | py | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as DefaultUserAdmin
from django.utils.translation import gettext_lazy as _
from apps.account.models import User
@admin.register(User)
class UserAdmin(DefaultUserAdmin):
list_display = (
'username', 'first_name', 'last_name', 'is_superuser', 'is_staff', 'is_active',
)
fieldsets = (
(None, {'fields': ('username', 'password')}),
(_('Personal info'), {'fields': ('first_name', 'last_name', 'email', 'avatar')}),
(_('Permissions'), {
'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions'),
}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
| [
"[email protected]"
] | |
29040bbe393c5645cee310429de8117d60756676 | 19b82e3a7610696a70d2d3fac70cf5425e61853d | /python_learning_liaoxuefeng/function_arguments.py | 432646b83e51e0d5ccf25a6b96f4f5ece8a3fc25 | [] | no_license | xieyipeng/python | 9ef5c3e3865e78aa4948c7a89aa7d2c6713122c6 | 1f4e23369e76fc0739d4c2213b6470bba9fa288c | refs/heads/master | 2021-04-15T13:09:30.338025 | 2020-03-11T03:36:03 | 2020-03-11T03:36:03 | 126,497,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,047 | py | # 函数参数:
# 位置参数:
# def power(x):
# return x * x
# print(power(5))
def power(x, n=2): # 上面的power函数就没用了,修改为带有默认参数的函数
s = 1
while n > 0:
n = n - 1
s = s * x
return s
print(power(5, 4))
# 默认参数:
print(power(5)) # 必选参数在前,默认参数在后
def enroll(name, gender, age=6, city='beijing'):
print('name:', name)
print('gender:', gender)
print('age:', age)
print('city:', city)
enroll('Sarach', 'F')
enroll('Sarach', 'F', city='Tianjin') # 默认参数的调用
# def add_end(L=[]): # 默认参数的坑
# L.append('end')
# return L
#
#
# print(add_end())
# print(add_end())
# print(add_end()) # 所以默认参数的定义必须是只想不可变对象
def add_end(L=None): # 修改的
if L is None:
L = []
L.append('end')
return L
print(add_end())
print(add_end())
print(add_end())
# 可变参数:参数个数可变
def calc(*numbers):
sum = 0
for n in numbers:
sum = sum + n * n
return sum
print(calc(1, 2, 3, 4))
nums = [1, 2, 3]
print(calc(*nums))
# 关键字参数:扩展函数的功能
# def person(name, age, **kw): # 命名关键字参数:
# if 'city' in kw:
# pass
# if 'job' in kw:
# pass
# print('name:', name, 'age:', age, 'other:', kw)
#
#
# print(person('Michael', 30))
# print(person('Bob', 35, city='Beijing')) # 可以组装一个dict,person('Jack',24,**extra)
# 命名关键字参数:
# def person(name, age, *, city, job): # 命名关键字参数:在*之后的参数都是命名关键字参数
# print(name, age, city, job)
#
#
# print(person('Jack', 24, city='Beijing', job='English')) # 参数必须传入参数名
def person(name, age, *, city='Beijing', job): # 有默认参数
print(name, age, city, job)
print(person('Jack', 24, job='English'))
# 参数组合-顺序:必选参数->默认参数->可变参数->命名参数/命名关键字参数->关键字参数
| [
"[email protected]"
] | |
d0011a7ba2c397fd29dfb45dc2ba6c8850c56dfa | fd994f57661fc7960a2a47cb70283db0a4f0145f | /lampost/gameops/display.py | dded84469844a7faca54b223e24a586173763318 | [
"MIT"
] | permissive | NancyR/Lampost-Mud | 24d6ad979e99b27dd05546646694f1727af73bc1 | 7d313c34af0eadb2707242ca351013c97c720f51 | refs/heads/master | 2021-01-14T12:57:32.719005 | 2013-05-05T22:14:37 | 2013-05-05T22:14:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 658 | py | DEFAULT_COLOR = 'default'
SYSTEM_COLOR = 'system'
ROOM_TITLE_COLOR = 'room_title'
ROOM_COLOR = 'room'
EXIT_COLOR = 'exit'
TELL_FROM_COLOR = 'tell_from'
TELL_TO_COLOR = 'tell_to'
SAY_COLOR = 'say'
COLOR_DATA = {DEFAULT_COLOR: ("Default", 0x00000),
SYSTEM_COLOR: ("System messages", 0x002288),
ROOM_TITLE_COLOR: ("Room titles", 0x6b306b),
ROOM_COLOR: ("Rooms", 0xAD419A),
EXIT_COLOR: ("Exit descriptions", 0x808000),
TELL_FROM_COLOR: ("Tells from other players", 0x00a2e8),
TELL_TO_COLOR: ("Tells to other players", 0x0033f8),
SAY_COLOR: ("Say", 0xe15a00)}
| [
"[email protected]"
] | |
987cbeb106b713ef40e6d165b790ddb7ead12987 | 5327317139867617bf9faff600b07e9404d68126 | /data_generate_single.py | 853f78603a9082b74101b86b581f3a9b27753c9b | [] | no_license | Doreenruirui/BCI | 9091b9b4ceb021387d199a90ed001c9f8258761a | a8c5e7e3c0ed9cbef16ddc0c5b534b210f32d400 | refs/heads/master | 2021-07-14T07:19:07.715054 | 2018-12-04T10:54:52 | 2018-12-04T10:54:52 | 131,056,144 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,971 | py | import random
from os.path import join as pjoin
from data_simulate import *
import datetime
def tokenize(string):
return [int(s) for s in string.split()]
def load_vocabulary(path_data):
vocab = {}
rev_vocab = {}
line_id = 0
with open(pjoin(path_data, 'vocab')) as f_in:
for line in f_in:
word = line.strip('\n')
vocab[word] = line_id
rev_vocab[line_id] = word
line_id += 1
return vocab, rev_vocab
def refill(batches, fx, batch_size, start=0, end=-1, max_seq_len=300, sort_and_shuffle=True):
line_pairs = []
linex = fx.readline()
line_id = 0
while linex:
if line_id >= start:
newline = linex.strip()
tokens = tokenize(newline)
if len(tokens) >= 1:
tok_x = [char2id['<sos>']] + tokens[:max_seq_len][:-1]
tok_y = tokens[:max_seq_len]
line_pairs.append((tok_x, tok_y))
linex = fx.readline()
line_id += 1
if line_id == end:
break
if sort_and_shuffle:
random.shuffle(line_pairs)
line_pairs = sorted(line_pairs, key=lambda e:len(e[0]))
for batch_start in range(0, len(line_pairs), batch_size):
x_batch = [ele[0] for ele in line_pairs[batch_start:batch_start + batch_size]]
y_batch = [ele[1] for ele in line_pairs[batch_start:batch_start + batch_size]]
batches.append((x_batch, y_batch))
if sort_and_shuffle:
random.shuffle(batches)
return
def refill_noisy(batches, fx, fy, batch_size, start=0, end=-1, max_seq_len=300, sort_and_shuffle=True):
line_pairs = []
linex = fx.readline()
liney = fy.readline()
line_id = 0
while linex:
if line_id >= start:
newline_x = linex.strip()
newline_y = liney.strip()
tokens_x = tokenize(newline_x)
tokens_y = tokenize(newline_y)
if len(tokens_x) >= 1:
tok_x = [char2id['<sos>']] + tokens_x[:max_seq_len][:-1]
tok_y = tokens_y[:max_seq_len]
line_pairs.append((tok_x, tok_y))
linex = fx.readline()
liney = fy.readline()
line_id += 1
if line_id == end:
break
if sort_and_shuffle:
random.shuffle(line_pairs)
line_pairs = sorted(line_pairs, key=lambda e:len(e[0]))
for batch_start in range(0, len(line_pairs), batch_size):
x_batch = [ele[0] for ele in line_pairs[batch_start:batch_start + batch_size]]
y_batch = [ele[1] for ele in line_pairs[batch_start:batch_start + batch_size]]
batches.append((x_batch, y_batch))
if sort_and_shuffle:
random.shuffle(batches)
return
def padded(tokens, pad_v=char2id['<pad>']):
len_x = list(map(lambda x: len(x), tokens))
maxlen = max(len_x)
return list(map(lambda token_list: token_list + [pad_v] * (maxlen - len(token_list)), tokens))
def load_data(batches, file_data, dev, batch_size=128, max_seq_len=300,
prob_high=1.0, start=0, end=-1, sort_and_shuffle=False):
if prob_high == 1.0:
fx = open(pjoin(file_data, '0.0', dev + '.ids'))
refill(batches, fx, batch_size, max_seq_len=max_seq_len,
sort_and_shuffle=sort_and_shuffle, start=start, end=end)
else:
fy = open(pjoin(file_data, '0.0', dev + '.ids'))
fx = open(pjoin(file_data, '%.1f' % (1 - prob_high), dev + '.ids'))
refill_noisy(batches, fx, fy, batch_size, max_seq_len=max_seq_len,
sort_and_shuffle=sort_and_shuffle, start=start, end=end)
fy.close()
fx.close()
def iter_data(batch):
x_tokens, y_tokens = batch
x_pad = padded(x_tokens, 0)
y_pad = padded(y_tokens, 0)
source_tokens = np.transpose(np.array(x_pad), (1, 0))
source_mask = (source_tokens > 0).astype(np.int32)
target_tokens = np.array(y_pad).T
return (source_tokens, source_mask, target_tokens)
| [
"[email protected]"
] | |
2387e28c6307a5777130aedd6c83a0115cb3b4e6 | 0e7cdded06b219e20382edc8b855e4902c16bd1b | /task/download.py | dcacda001148129c45fb8111d59eebe14cc6e073 | [
"MIT"
] | permissive | cpausmit/FiBS | 71828b9fce4025a548bbeb8647ecfe5c37719f41 | 2a49bb3bea53201f1933dcf5a8d43e4774bcf9b8 | refs/heads/master | 2022-09-13T06:43:53.736788 | 2022-08-30T21:43:38 | 2022-08-30T21:43:38 | 55,325,041 | 0 | 1 | MIT | 2022-08-30T21:43:39 | 2016-04-03T01:57:53 | Python | UTF-8 | Python | false | false | 5,111 | py | #!/usr/bin/env python
#---------------------------------------------------------------------------------------------------
# Download exactly one file from a given xrootd location to another xrootd location.
#
# Ch.Paus (Mar 25, 2021)
#---------------------------------------------------------------------------------------------------
import os,sys,re,socket,datetime,time
OVERWRITE = True
# define source and target
SOURCE_SERVER = "t3serv017.mit.edu"
TARGET_SERVER = "xrootd18.cmsaf.mit.edu"
#---
SOURCE = "/data/submit/cms"
#SOURCE = "root://%s/"%(SOURCE_SERVER)
TARGET = "root://%s/"%(TARGET_SERVER)
#---------------------------------------------------------------------------------------------------
# H E L P E R S
#---------------------------------------------------------------------------------------------------
def showSetup(status):
if status == 'start':
print("\n=-=-=-= Show who and where we are =-=-=-=\n")
print(" Script: %s"%(os.path.basename(__file__)))
print(" Arguments: %s"%(" ".join(sys.argv[1:])))
print(" ")
print(" user executing: " + os.getenv('USER','unknown user'))
print(" running on : %s"%(socket.gethostname()))
print(" running in : %s"%(os.getcwd()))
print(" start time : %s"%(str(datetime.datetime.now())))
elif status == 'end':
print(" end time : %s"%(str(datetime.datetime.now())))
else:
print(" now time : %s (%s)"%(str(datetime.datetime.now()),str(status)))
print(" ")
return
def exeCmd(cmd,debug=0):
# execute a given command and show what is going on
rc = 0
if debug>1:
print(' =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=')
if debug>0:
print(' =Execute: %s'%(cmd))
rc = os.system(cmd) ##print(' !! DISABLED EXECUTION !! ')
if debug>1:
print(' =E=N=D=-=-=-=-=-=-=-=-=-=-=-=-=-=-=\n')
return rc
def extractLfn(fullFile,debug=0):
# extract the lfn rom the given file name
if debug>1:
print(" File: %s"%(fullFile))
lfn = fullFile[fullFile.index('/store'):]
#lfn = lfn.replace("wangz","paus") # for files from Qier
return lfn
def downloadFile(file_xrootd,lfn,debug=0):
# execute the file download
# peer-to-peer copy fails for now with redirector limit reached?!
#cmd = "xrdcp -T first %s%s %s%s"%(SOURCE,lfn,TARGET,lfn)
cmd = "xrdcp --path %s%s %s%s"%(SOURCE,lfn,TARGET,lfn)
print("CMD: %s"%(cmd))
rc = exeCmd(cmd,debug)
if rc == 0:
print(" download worked (%s)."%(lfn))
else:
print(" download FAILED with %d (%s)."%(rc,lfn))
return rc
def removeRemainder(lfn,debug=0):
# remove remainder from failed download
cmd = "xrdfs %s rm %s"%(TARGET_SERVER,lfn)
# cmd = "rm %s%s >& /dev/null"%(TARGET,lfn)
rc = exeCmd(cmd,debug)
if rc == 0:
print(" removed remainder: %s%s."%(TARGET,lfn))
else:
print(" removing remainder FAILED (rc=%s): %s."%(rc,lfn))
return rc
def existFile(lfn,debug=0):
# check if file exists already
if OVERWRITE: # force overwrite
return 1
cmd = "xrdfs %s ls %s >& /dev/null"%(TARGET_SERVER,lfn)
# cmd = "ls -l %s%s >& /dev/null"%(TARGET,lfn)
rc = exeCmd(cmd,debug)
if rc == 0:
print(" file listed successfully: %s."%(lfn))
else:
print(" file listing FAILED (rc=%s) so we need to download: %s."%(rc,lfn))
dir = "/".join(lfn.split("/")[:-1])
print("DIR: %s%s"%(TARGET,dir))
cmd = "ls -l %s%s >& /dev/null"%(TARGET,dir)
tmprc = exeCmd(cmd,debug)
if tmprc == 0:
print(" directory exists: %s."%(lfn))
else:
cmd = "mkdir -p %s%s >& /dev/null"%(TARGET,dir)
tmprc = exeCmd(cmd,debug)
print(" directory created (RC=%d): %s."%(int(tmprc),lfn))
return rc
#---------------------------------------------------------------------------------------------------
# M A I N
#---------------------------------------------------------------------------------------------------
debug = 2
# make announcement
showSetup('start')
# make sure we have at least one parameter
if len(sys.argv)<2:
print('\n ERROR - Missing file name as parameter.\n')
showExit(1)
# read command line parameters
fullFile = " ".join(sys.argv[1:])
# make sure to trim the input file if needed (want to go back to lfn = /store/...)
lfn = extractLfn(fullFile,debug)
# show the certificate
exeCmd("voms-proxy-init --valid 168:00 -voms cms",debug)
exeCmd("voms-proxy-info -all",debug)
# does the file exist already?
rc = existFile(lfn,debug)
if rc == 0:
print("\n Our work is done, file exists already.\nEXIT\n")
showSetup(rc)
sys.exit(rc)
# download the file to local
rc = downloadFile(fullFile,lfn,debug)
if rc != 0:
print("\n File download failed. EXIT!\n Cleanup potential remainders.")
removeRemainder(lfn,debug)
showSetup(rc)
sys.exit(rc)
# make announcement
showSetup('end')
sys.exit(0)
| [
"[email protected]"
] | |
7295512a52631127cac7a9bac633c1f3e19e03f8 | 6e423cddd8698bc662bcc3208eb7a8fdb2eb0d72 | /mlcomp/parallelm/model/constants.py | 5e8296f20d50f1acfb3eaec9e0c114645176ed6f | [
"Apache-2.0"
] | permissive | theromis/mlpiper | 7d435343af7b739767f662b97a988c2ccc7665ed | 738356ce6d5e691a5d813acafa3f0ff730e76136 | refs/heads/master | 2020-05-05T04:44:00.494105 | 2019-04-03T19:53:01 | 2019-04-03T22:02:53 | 179,722,926 | 0 | 0 | Apache-2.0 | 2019-04-05T17:06:02 | 2019-04-05T17:06:01 | null | UTF-8 | Python | false | false | 394 | py |
# Provides information about the model file path that should be fetched and used by the pipeline
METADATA_FILENAME = 'metadata.json'
# A helper file that is used to signal the uWSGI workers about new models
SYNC_FILENAME = 'sync'
# A dedicated extension that is used to avoid model file paths collisions between the
# pipeline model fetch and the agent
PIPELINE_MODEL_EXT = '.last_approved'
| [
"[email protected]"
] | |
8fa5731878c882842bf2a145cc3010af0d34ccae | fe9e19372790f3f79f134b008852c028d497c7fb | /blog/migrations/0001_initial.py | 55c531ce6369adcbed4ab5de943b193fff831878 | [] | no_license | Mohamed-awad/django_blog | 9f73cbb598c4e71670f7546150fa257d494d3041 | 40b198b53dd023ee63112189863e49f2e28d19dc | refs/heads/master | 2021-05-03T16:32:04.262701 | 2018-03-24T16:06:58 | 2018-03-24T16:06:58 | 120,438,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,707 | py | # Generated by Django 2.0.1 on 2018-02-06 07:26
import datetime
from django.conf import settings
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.CharField(max_length=100)),
('text', models.TextField()),
('create_date', models.DateTimeField(default=datetime.datetime(2018, 2, 6, 7, 26, 23, 225451, tzinfo=utc))),
('approved_comment', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('create_date', models.DateTimeField(default=datetime.datetime(2018, 2, 6, 7, 26, 23, 224974, tzinfo=utc))),
('published_date', models.DateTimeField(default=True, null=True)),
('author', models.ForeignKey(on_delete=True, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(on_delete=True, related_name='comments', to='blog.Post'),
),
]
| [
"[email protected]"
] | |
6d58b6cfcfe5f6bdddc5b38f1fa6e583aebad983 | f281d0d6431c1b45c6e5ebfff5856c374af4b130 | /DAY001~099/DAY87-BOJ2217-로프/smlee.py | 5bab04654a25b085dadf331133df46f33e52a89b | [] | no_license | tachyon83/code-rhino | ec802dc91dce20980fac401b26165a487494adb4 | b1af000f5798cd12ecdab36aeb9c7a36f91c1101 | refs/heads/master | 2022-08-13T09:10:16.369287 | 2022-07-30T11:27:34 | 2022-07-30T11:27:34 | 292,142,812 | 5 | 6 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | n = int(input())
arr = [int(input()) for _ in range(n)]
arr.sort(reverse=True)
result=[]
for i in range(len(arr)):
result.append(arr[i] * (i+1))
print(max(result))
| [
"[email protected]"
] | |
77f992b5ee77c422b6c3b1c8bd56c87d95f7d6c4 | c6053ad14e9a9161128ab43ced5604d801ba616d | /Lemon/Python_Base/api_auto_4/__init__.py | 591e89f6ae29685aa744ea28b9493c1bb8296095 | [] | no_license | HesterXu/Home | 0f6bdace39f15e8be26031f88248f2febf33954d | ef8fa0becb687b7b6f73a7167bdde562b8c539be | refs/heads/master | 2020-04-04T00:56:35.183580 | 2018-12-25T02:48:51 | 2018-12-25T02:49:05 | 155,662,403 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | # -*- coding: utf-8 -*-
# @Time : 2018/12/10/19:49
# @Author : Hester Xu
# Email : [email protected]
# @File : __init__.py.py
# @Software : PyCharm
"""
代码--放在package里 数据--放在文件夹里
1.package:
common
2.directory:
datas
test_result: log report
configs
""" | [
"[email protected]"
] | |
7e1f7eb16d48fbf502259148e251576c19aa8f53 | d35813d7e9ef6c606591ae1eb4ed3b2d5156633b | /python-daily/list_extend.py | 3e1ee4598d757d52be70a3c5d44b0daa750289cc | [] | no_license | JeremiahZhang/gopython | eb6f598c16c8a00c86245e6526261b1b2d1321f1 | ef13f16d2330849b19ec5daa9f239bf1558fa78c | refs/heads/master | 2022-08-13T22:38:12.416404 | 2022-05-16T02:32:04 | 2022-05-16T02:32:04 | 42,239,933 | 13 | 6 | null | 2022-08-01T08:13:54 | 2015-09-10T11:14:43 | Jupyter Notebook | UTF-8 | Python | false | false | 128 | py | animals = ['Python', 'Viper', 'Cobra']
def add_snake(snake_type):
animals.extend(snake_type)
print(animals)
add_snake('Boa') | [
"[email protected]"
] | |
f42e215f64f793c923ad2ebc2a3027e6e38fe807 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03796/s497366765.py | 3301405c3e00a707add21602658cfa1546569d62 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | N = int(input())
power = 1
for n in range(1, N+1):
power = power % (10**9+7) * n
print(power % (10**9+7))
| [
"[email protected]"
] | |
6fc6c184d8c93782a2b1661cc99dcafccc772bb1 | a89dbc6d2d9c44b7c4a19f297b0885e8127a3cdb | /tests/test_xvnc.py | cf04b338505f7b2d82a17bbbe6d3134bc5ccefd4 | [
"BSD-2-Clause"
] | permissive | ponty/PyVirtualDisplay | 79a6e53eaaa146665409aaa144a67faf12c38d72 | 8368c4138a09efb5bba45cc7c3ff95f602d21881 | refs/heads/master | 2023-07-05T00:33:21.167789 | 2023-03-12T07:54:41 | 2023-03-12T07:54:41 | 1,354,580 | 554 | 82 | BSD-2-Clause | 2021-04-02T20:56:40 | 2011-02-11T10:41:09 | Python | UTF-8 | Python | false | false | 1,917 | py | import tempfile
from pathlib import Path
from tutil import has_xvnc, rfbport, worker
from vncdotool import api
from pyvirtualdisplay import Display
from pyvirtualdisplay.xvnc import XvncDisplay
if has_xvnc():
def test_xvnc():
with tempfile.TemporaryDirectory() as temp_dir:
vnc_png = Path(temp_dir) / "vnc.png"
password = "123456"
passwd_file = Path(temp_dir) / "pwd.txt"
vncpasswd_generated = b"\x49\x40\x15\xf9\xa3\x5e\x8b\x22"
passwd_file.write_bytes(vncpasswd_generated)
if worker() == 0:
with Display(backend="xvnc"):
with api.connect("localhost:0") as client:
client.timeout = 1
client.captureScreen(vnc_png)
with XvncDisplay():
with api.connect("localhost:0") as client:
client.timeout = 1
client.captureScreen(vnc_png)
sconnect = "localhost:%s" % (rfbport() - 5900)
with Display(backend="xvnc", rfbport=rfbport()):
with api.connect(sconnect) as client:
client.timeout = 1
client.captureScreen(vnc_png)
with XvncDisplay(rfbport=rfbport()):
with api.connect(sconnect) as client:
client.timeout = 1
client.captureScreen(vnc_png)
with Display(backend="xvnc", rfbport=rfbport(), rfbauth=passwd_file):
with api.connect(sconnect, password=password) as client:
client.timeout = 1
client.captureScreen(vnc_png)
with XvncDisplay(rfbport=rfbport(), rfbauth=passwd_file):
with api.connect(sconnect, password=password) as client:
client.timeout = 1
client.captureScreen(vnc_png)
| [
"ponty@home"
] | ponty@home |
22ee0ba721abb697347257b1a5d2542edcf4c1e9 | 797e83cd492c22c8b7e456b76ae9efb45e102e30 | /chapter3_ScriptExectionContext/redirect.py | def595d1368afa8dfb1bf48215eba992e256569d | [] | no_license | skyaiolos/ProgrammingPython4th | 013e2c831a6e7836826369d55aa9435fe91c2026 | a6a98077440f5818fb0bd430a8f9a5d8bf0ce6d7 | refs/heads/master | 2021-01-23T11:20:38.292728 | 2017-07-20T03:22:59 | 2017-07-20T03:22:59 | 93,130,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,316 | py | """
file-like objects that save standard output text in a string and provide
standard input text from a string; redirect runs a passed-in function
with its output and input streams reset to these file-like class objects;
"""
import sys # get built-in modules
class Output: # simulated output file
def __init__(self):
self.text = '' # empty string when created
def write(self, string): # add a string of bytes
self.text += string
def writelines(self, lines): # add each line in a list
for line in lines: self.write(line)
class Input: # simulated input file
def __init__(self, input=''): # default argument
self.text = input # save string when created
def read(self, size=None): # optional argument
if size == None: # read N bytes, or all
res, self.text = self.text, ''
else:
res, self.text = self.text[:size], self.text[size:]
return res
def readline(self):
eoln = self.text.find('\n') # find offset of next eoln
if eoln == -1: # slice off through eoln
res, self.text = self.text, ''
else:
res, self.text = self.text[:eoln+1], self.text[eoln+1:]
return res
def redirect(function, pargs, kargs, input): # redirect stdin/out
savestreams = sys.stdin, sys.stdout # run a function object
sys.stdin = Input(input) # return stdout text
sys.stdout = Output()
try:
result = function(*pargs, **kargs) # run function with args
output = sys.stdout.text
finally:
sys.stdin, sys.stdout = savestreams # restore if exc or not
return (result, output) # return result if no exc
# Output
# Provides the write method interface (a.k.a. protocol) expected of output files but
# saves all output in an in-memory string as it is written.
# Input
# Provides the interface expected of input files, but provides input on demand from
# an in-memory string passed in at object construction time. | [
"[email protected]"
] | |
dbbeccc73dc6022533c406f9b0cb7b945674c7d3 | 21e177a4d828f4e0a003e9424c4952dbc0b47d29 | /testlints/test_lint_subject_locality_name_max_length.py | 43b55f38acbf73df4543e4b07ebccf054dd11684 | [] | no_license | 846468230/Plint | 1071277a55144bb3185347a58dd9787562fc0538 | c7e7ca27e5d04bbaa4e7ad71d8e86ec5c9388987 | refs/heads/master | 2020-05-15T12:11:22.358000 | 2019-04-19T11:46:05 | 2019-04-19T11:46:05 | 182,255,941 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,257 | py | import sys
sys.path.append("..")
from lints import base
from lints import lint_subject_locality_name_max_length
import unittest
import os
from cryptography import x509
from cryptography.hazmat.backends import default_backend
class TestSubjectLocalityNameLengthGood(unittest.TestCase):
'''test lint_subject_locality_name_max_length.py'''
def test_SubjectLocalityNameLengthGood(self):
certPath ='..\\testCerts\\subjectLocalityNameLengthGood.pem'
lint_subject_locality_name_max_length.init()
with open(certPath, "rb") as f:
cert = x509.load_pem_x509_certificate(f.read(), default_backend())
out = base.Lints["e_subject_locality_name_max_length"].Execute(cert)
self.assertEqual(base.LintStatus.Pass,out.Status)
def test_SubjectLocalityNameLong(self):
certPath ='..\\testCerts\\subjectLocalityNameLong.pem'
lint_subject_locality_name_max_length.init()
with open(certPath, "rb") as f:
cert = x509.load_pem_x509_certificate(f.read(), default_backend())
out = base.Lints["e_subject_locality_name_max_length"].Execute(cert)
self.assertEqual(base.LintStatus.Error,out.Status)
if __name__=="__main__":
unittest.main(verbosity=2) | [
"[email protected]"
] | |
3e6b65965404c9a88926cd2f245c924f11869101 | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/VBF_HToTauTau_M-120_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0_1377467448/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_66/run_cfg.py | 09d33bfcf02f1d13dc3723c60dd660d9b368a10a | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,483 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/VBF_HToTauTau_M-120_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0_1377467448/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/user/cmgtools/CMG/VBF_HToTauTau_M-120_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_392.root',
'/store/cmst3/user/cmgtools/CMG/VBF_HToTauTau_M-120_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_393.root',
'/store/cmst3/user/cmgtools/CMG/VBF_HToTauTau_M-120_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_394.root',
'/store/cmst3/user/cmgtools/CMG/VBF_HToTauTau_M-120_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_395.root',
'/store/cmst3/user/cmgtools/CMG/VBF_HToTauTau_M-120_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_396.root')
)
| [
"[email protected]"
] | |
29a9940ae1963530c38cc81ee7feb8f6627c38e0 | f1d49f5155d63172e58547e706a4f11a2dcd2cbc | /lib/crypto/receipt.py | 5b563ba67b2fd1aee5396523b8a817bfa46ca171 | [] | no_license | rtnpro/zamboni | a831f308c994025b38616fcd039ceb856d2cafaf | 6a8cb8b5e81ed0ad72200a1586af54d9c3865d17 | refs/heads/master | 2021-01-16T19:09:07.937849 | 2012-11-06T17:19:49 | 2012-11-08T18:49:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,163 | py | import json
import urllib2
from django.conf import settings
from django_statsd.clients import statsd
import commonware.log
import jwt
log = commonware.log.getLogger('z.crypto')
class SigningError(Exception):
pass
def sign(receipt):
"""
Send the receipt to the signing service.
This could possibly be made async via celery.
"""
destination = settings.SIGNING_SERVER
# If no destination is set. Just ignore this request.
if not destination:
return
destination += '/1.0/sign'
timeout = settings.SIGNING_SERVER_TIMEOUT
receipt_json = json.dumps(receipt)
log.info('Calling service: %s' % destination)
log.info('Receipt contents: %s' % receipt_json)
headers = {'Content-Type': 'application/json'}
data = receipt if isinstance(receipt, basestring) else receipt_json
request = urllib2.Request(destination, data, headers)
try:
with statsd.timer('services.sign.receipt'):
response = urllib2.urlopen(request, timeout=timeout)
except urllib2.HTTPError, error:
# Will occur when a 3xx or greater code is returned
log.error('Posting to signing failed: %s, %s'
% (error.code, error.read().strip()))
raise SigningError
except:
# Will occur when some other error occurs.
log.error('Posting to signing failed', exc_info=True)
raise SigningError
if response.getcode() != 200:
log.error('Posting to signing failed: %s'
% (response.getcode()))
raise SigningError
return json.loads(response.read())['receipt']
def decode(receipt):
"""
Decode and verify that the receipt is sound from a crypto point of view.
Will raise errors if the receipt is not valid, returns receipt contents
if it is valid.
"""
raise NotImplementedError
def crack(receipt):
"""
Crack open the receipt, without checking that the crypto is valid.
Returns a list of all the elements of a receipt, which by default is
cert, receipt.
"""
return map(lambda x: jwt.decode(x.encode('ascii'), verify=False),
receipt.split('~'))
| [
"[email protected]"
] | |
44e112a36cf0d7b8baf551bc5d5d8a4c9a9f32a3 | 82b536adad8c7fb2661e901af5936a81358d91dd | /test/__init__.py | a58a0123eec160adafdbf401495360ab57c118a2 | [
"MIT"
] | permissive | nickstenning/tagalog | 65fb9e6c670f9c6aef87775820143f1a5fd04a18 | c6847a957dc4f96836a5cf13c4eb664fccafaac2 | refs/heads/master | 2023-09-05T03:57:41.940595 | 2016-06-18T21:36:28 | 2016-06-18T21:36:28 | 8,113,627 | 3 | 0 | MIT | 2023-03-31T14:38:52 | 2013-02-09T19:10:08 | Python | UTF-8 | Python | false | false | 89 | py | import os
# Use UTF8 for stdin, stdout, stderr
os.environ['PYTHONIOENCODING'] = 'utf-8'
| [
"[email protected]"
] | |
172982b7a6506280c67c338a2ed05e734620f7ea | cc4424bcbc6ca3d04c1be2effa043076816b3a12 | /Spider/VideoDownloading/shipin.py | ea67b404f6573e189a7fa5ca9ae0d133ef2b47f7 | [] | no_license | IronmanJay/Python_Project | a65a47c64b7121993be1ef38a678ffd4771ffaf1 | 91293b05eb28697f5dec7f99a0f608904f6a0b1f | refs/heads/master | 2023-06-11T07:56:18.121706 | 2023-06-07T02:31:06 | 2023-06-07T02:31:06 | 253,717,731 | 15 | 4 | null | 2022-11-21T20:51:58 | 2020-04-07T07:22:35 | Python | UTF-8 | Python | false | false | 621 | py | import requests
url = 'http://v3-default.ixigua.com/dd48e529360eaf527c9a929e8622e8d8/5d4d0a65/video/m/220c097cb8f27af496eaddb07c5d42edc601162e796e0000acb22a2eb570/?rc=M2psOGZoZ3NrbzMzZTczM0ApdSk6OjUzNTg0NDgzNTw7PDNAKTk8OTU8OzY8PDdnODM6NzNnKXUpQGczdSlAZjN1KTk0ZHIwajItcnIuMF8tLTQtMHNzOmkvNDIvNS0yLS0tMi4tLS4vaS1gY2BgLzFjYl8zLjQyMDA6YzpiMHAjOmEtcCM6YDU0Og%3D%3D'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763',
}
r = requests.get(url=url,headers=headers)
with open('1.mp4','wb') as fp:
fp.write(r.content) | [
"[email protected]"
] | |
9c5a9a552daa857baa5ab25b8dea7fccee69eada | 372c618e3abf56f59027ba1cbfce8102a6ea2903 | /sugargame/canvas.py | 82992b3e5d2e511ef049fe38d9e16e7d2546c7d8 | [] | no_license | sugar-activities/4259-activity | 0bb728d1f49fcc01ba91c4c07a15eca3b267868e | db87efad848f073b1c45d2a5debb2d4ca8eab729 | refs/heads/master | 2021-01-19T23:15:31.307314 | 2017-04-21T05:09:21 | 2017-04-21T05:09:21 | 88,938,120 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,039 | py | import os
from gi.repository import Gtk
from gi.repository import GLib
import pygame
import event
CANVAS = None
class PygameCanvas(Gtk.EventBox):
"""
mainwindow is the activity intself.
"""
def __init__(self, mainwindow, pointer_hint = True):
Gtk.EventBox.__init__(self)
global CANVAS
assert CANVAS == None, "Only one PygameCanvas can be created, ever."
CANVAS = self
# Initialize Events translator before widget gets "realized".
self.translator = event.Translator(mainwindow, self)
self._mainwindow = mainwindow
self.set_can_focus(True)
self._socket = Gtk.Socket()
self.add(self._socket)
self.show_all()
def run_pygame(self, main_fn):
# Run the main loop after a short delay. The reason for the delay is that the
# Sugar activity is not properly created until after its constructor returns.
# If the Pygame main loop is called from the activity constructor, the
# constructor never returns and the activity freezes.
GLib.idle_add(self._run_pygame_cb, main_fn)
def _run_pygame_cb(self, main_fn):
assert pygame.display.get_surface() is None, "PygameCanvas.run_pygame can only be called once."
# Preinitialize Pygame with the X window ID.
assert pygame.display.get_init() == False, "Pygame must not be initialized before calling PygameCanvas.run_pygame."
os.environ['SDL_WINDOWID'] = str(self._socket.get_id())
pygame.init()
# Restore the default cursor.
self._socket.get_window().set_cursor(None)
# Initialize the Pygame window.
r = self.get_allocation()
pygame.display.set_mode((r.width, r.height), pygame.RESIZABLE)
# Hook certain Pygame functions with GTK equivalents.
self.translator.hook_pygame()
# Run the Pygame main loop.
main_fn()
return False
def get_pygame_widget(self):
return self._socket
| [
"[email protected]"
] | |
c1baadb28696c7b692331bd49fd54779350c9199 | ea0928f988c87c4ac04bf7dc4e9fe0b25495d22c | /append.py | 22dfbaf3ece8ee4b802feff98a716ac58f3fe026 | [] | no_license | opasha/Python | 8c7c9aa7c363d27a7628714543e674a4edc8ae13 | b3559e92c6c06753717cec5181868d0cecc9a2ac | refs/heads/master | 2021-01-19T23:54:01.438772 | 2017-05-05T04:07:56 | 2017-05-05T04:07:56 | 89,051,109 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 322 | py | numbers = [1, 2, 3, 4, 5] # .append means to add something to the end of the list
print numbers
numbers.append(6)
print numbers
numbers.append(7)
print numbers
print list(range(10))
print range(10)
for number in range (8, 21): # for i in range (): statement
numbers.append(number)
print numbers
print numbers[::-1]
| [
"[email protected]"
] | |
07491e39d8215bad8fc61f2dfe20894595808718 | bc25195db1151a867343b5991fe51096ce2d57a8 | /tmp/portmidi/i.py | cff73824a778636fcd9c351feb4b491d5cb7f67c | [
"MIT"
] | permissive | ptone/protomidi | 04b78139c499518440afe5faecba9664ca068226 | 3dd169e66359ab17319880581771172a5867b261 | refs/heads/master | 2020-04-18T06:22:15.835383 | 2012-11-19T05:40:40 | 2012-11-19T05:40:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | import time
import pprint
import protomidi.portmidi as io
input = io.Input('SH-201 MIDI 1')
while 1:
for msg in input:
print(msg)
time.sleep(0.001)
| [
"[email protected]"
] | |
4568bdca6024375029e24fe57f3dd21cc996b16d | 0d560495d4e5be2004a5a08d696a1b4cb2b91742 | /backend/folk_games_25344/urls.py | 8d4f66d6cd8dc7ef11318eb019a291a2b54ca1f2 | [] | no_license | crowdbotics-apps/folk-games-25344 | 54fa818b69c50867425814ff5133c1bf87d3d2d4 | a00bf5a15f184fcfe138e84c9e386688e5121cd8 | refs/heads/master | 2023-03-29T14:24:41.695249 | 2021-03-29T15:12:24 | 2021-03-29T15:12:24 | 352,686,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,223 | py | """folk_games_25344 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Folk games"
admin.site.site_title = "Folk games Admin Portal"
admin.site.index_title = "Folk games Admin"
# swagger
api_info = openapi.Info(
title="Folk games API",
default_version="v1",
description="API documentation for Folk games App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
| [
"[email protected]"
] | |
3868c0327babcaf1a3a4450e0841ecd55612000f | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /hGzNSr5CSEpTsmy5W_21.py | 6ca84b106469f4b5d4ea74e03f6c62e56bbcb055 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py |
def not_good_math(n, k):
if k:
a = n-1 if n%10 else n//10
return not_good_math(a,k-1)
else:
return n
| [
"[email protected]"
] | |
d13de3fc39daba7cccce1b46249e9f50ee05cfda | 5b771c11e8967038025376c6ec31962ca90748dd | /django_by_example_book/01_building_blog/blog/models.py | f61f39b725aead3d787f5b675f2d303f20eee4b3 | [] | no_license | AsemAntar/Django_Projects | 7135eca3b4bcb656fc88e0838483c97d7f1746e1 | 4141c2c7e91845eec307f6dd6c69199302eabb16 | refs/heads/master | 2022-12-10T06:32:35.787504 | 2020-05-26T14:43:01 | 2020-05-26T14:43:01 | 216,863,494 | 0 | 0 | null | 2022-12-05T13:31:53 | 2019-10-22T16:47:28 | Python | UTF-8 | Python | false | false | 2,066 | py | from django.db import models
from django.utils import timezone
from django.urls import reverse
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
# create custom manager
class PublishedManager(models.Manager):
def get_queryset(self):
return super(PublishedManager, self).get_queryset().filter(status='published')
class Post(models.Model):
STATUS_CHOICES = (
('draft', 'Draft'),
('published', 'Published'),
)
title = models.CharField(max_length=250, verbose_name='Post Title')
slug = models.SlugField(max_length=250, unique_for_date='publish')
author = models.ForeignKey(
User, on_delete=models.CASCADE, related_name='blog_posts')
body = models.TextField(verbose_name='Post Body')
publish = models.DateTimeField(default=timezone.now)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
status = models.CharField(
max_length=10, choices=STATUS_CHOICES, default='draft')
objects = models.Manager() # The default manager
published = PublishedManager() # Custom manager
tags = TaggableManager()
class Meta:
ordering = ('-publish',)
def __str__(self):
return self.title
# create a url linked to each post with the help of post_detail view to be used in templates
def get_absolute_url(self):
return reverse(
'blog:post_detail', args=[self.publish.year, self.publish.month, self.publish.day, self.slug]
)
class Comment(models.Model):
post = models.ForeignKey(
Post, on_delete=models.CASCADE, related_name='comments')
name = models.CharField(max_length=80)
email = models.EmailField()
body = models.TextField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ('-created',)
def __str__(self):
return 'Comment by {} on {}'.format(self.name, self.post)
| [
"[email protected]"
] | |
08b738fec65ee11ea20d6fb76dbae3a4d57f27dc | 01932366dd322ec3459db9dd85a2fd8d22a82fcb | /keras/keras41_cnn2_diabet.py | 8cfb63b0328f7edc365337ec0a562d64604f6de0 | [] | no_license | Jeong-Kyu/A_study | 653f5fd695109639badfa9e99fd5643d2e9ff1ac | 6866c88fcc25841ceae2cd278dcb5ad5654c2a69 | refs/heads/master | 2023-06-11T02:44:20.574147 | 2021-07-05T08:59:43 | 2021-07-05T08:59:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,007 | py | #실습 : 19_1, 2, 3, 4, 5, EarlyStopping까지 총 6개의 파일을 완성하시오.
import numpy as np
from sklearn.datasets import load_diabetes
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Dense, Input, Conv2D,MaxPool2D, Flatten
#1. 데이터
dataset = load_diabetes()
x = dataset.data
y = dataset.target
#print(dataset.DESCR)
#x = x / np.max(x)
#print(np.max(x), np.min(x)) # 정규화
#1_2. 데이터 전처리
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, train_size = 0.8, shuffle = True, random_state = 66 )
x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, train_size= 0.8, shuffle = True, random_state = 66, )
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
scaler.fit(x_train)
x_train = scaler.transform(x_train)
x_test = scaler.transform(x_test)
x_val = scaler.transform(x_val)
'''
print(x_train.shape)
print(x_test.shape)
print(x_val.shape)
'''
x_train = x_train.reshape(282,10,1,1)
x_test = x_test.reshape(89,10,1,1)
x_val = x_val.reshape(71,10,1,1)
#2. 모델링
model = Sequential()
model.add(Conv2D(filters = 10,kernel_size=(1,1), strides=1, padding='same', input_shape = (10,1,1))) # (input_dim * kernel_size + bias)*filter
#strides = 얼마나 건너서 자를건지 2 / (2,3)
model.add(MaxPool2D(pool_size=(1,1))) # 2 / 3 / (2,3)
# model.add(Conv2D(9,(2,3)))
# model.add(Conv2D(8,2))
model.add(Flatten())
model.add(Dense(1))
#3. 컴파일, 훈련
model.compile(loss = 'mse', optimizer = 'adam', metrics = ['mae'])
'''
EarlyStopping
'''
from tensorflow.keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='loss', patience=20, mode='auto')
model.fit(x_train, y_train, epochs=1000, batch_size=7, validation_data= (x_val, y_val), callbacks = [early_stopping])
#4. 평가, 예측
loss, mae = model.evaluate(x_test, y_test)
print("loss : ", loss)
print("mae : ", mae)
y_predict = model.predict(x_test)
#RMSE
from sklearn.metrics import mean_squared_error
def RMSE(y_test, y_predict) :
return np.sqrt(mean_squared_error(y_test, y_predict))
print("RMSE : ", RMSE(y_test, y_predict))
from sklearn.metrics import r2_score
r2 = r2_score(y_test, y_predict)
print("R2 : ", r2)
"""
**Data Set Characteristics:**
:Number of Instances: 442
:Number of Attributes: First 10 columns are numeric predictive values
:Target: Column 11 is a quantitative measure of disease progression one year after baseline
:Attribute Information:
- age age in years
- sex
- bmi body mass index
- bp average blood pressure
- s1 tc, T-Cells (a type of white blood cells)
- s2 ldl, low-density lipoproteins
- s3 hdl, high-density lipoproteins
- s4 tch, thyroid stimulating hormone
- s5 ltg, lamotrigine
- s6 glu, blood sugar level
"""
#데이터 전처리 전
# loss : 3317.64599609375
# mae : 47.06387710571289
# RMSE : 57.59901189718801
# R2 : 0.488809627121195
#데이터 엉망 처리 후
# loss : 3379.458984375
# mae : 47.35618591308594
# RMSE : 58.13311275393621
# R2 : 0.47928539874511966
#데이터 x를 전처리한 후
# loss : 3291.452880859375
# mae : 46.496116638183594
# RMSE : 57.37118551454562
# R2 : 0.49284554101046385
#데이터 x_train잡아서 전처리한 후....
# loss : 3421.5537109375
# mae : 47.82155227661133
# RMSE : 58.49405010020266
# R2 : 0.47279929140593635
#발리데이션 test분리
# loss : 3369.262451171875
# mae : 48.33604431152344
# RMSE : 58.04534944194592
# R2 : 0.5128401315682825
#Earlystopping 적용
# loss : 57.708213806152344
# mae : 5.144794464111328
# RMSE : 7.596591897809446
# R2 : 0.991656001135139
# loss : 6162.50048828125
# mae : 62.28391647338867
# RMSE : 78.50159664503691
# R2 : 0.05046805612688887'''
# cnn
# loss : 3194.252197265625
# mae : 46.7601318359375
# RMSE : 56.517714046839565
# R2 : 0.507822478023054 | [
"[email protected]"
] | |
71510ac89df23b353d808a6ad31c17a177af9447 | 52d324c6c0d0eb43ca4f3edc425a86cdc1e27d78 | /scripts/current/stage4_today_total.py | 494801c0c903240ab3698afb779c3c93ce742f6b | [
"MIT"
] | permissive | deenacse/iem | 992befd6d95accfdadc34fb7928d6b69d661d399 | 150512e857ca6dca1d47363a29cc67775b731760 | refs/heads/master | 2021-02-04T04:20:14.330527 | 2020-02-26T21:11:32 | 2020-02-26T21:11:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,997 | py | """
Sum up the hourly precipitation from NCEP stage IV and produce maps
"""
from __future__ import print_function
import datetime
import os
import sys
import pygrib
from pyiem.datatypes import distance
from pyiem.plot import MapPlot, nwsprecip
import pytz
def doday(ts, realtime):
"""
Create a plot of precipitation stage4 estimates for some day
We should total files from 1 AM to midnight local time
"""
sts = ts.replace(hour=1)
ets = sts + datetime.timedelta(hours=24)
interval = datetime.timedelta(hours=1)
now = sts
total = None
lts = None
while now < ets:
gmt = now.astimezone(pytz.utc)
fn = gmt.strftime(
("/mesonet/ARCHIVE/data/%Y/%m/%d/" "stage4/ST4.%Y%m%d%H.01h.grib")
)
if os.path.isfile(fn):
lts = now
grbs = pygrib.open(fn)
if total is None:
total = grbs[1]["values"]
lats, lons = grbs[1].latlons()
else:
total += grbs[1]["values"]
grbs.close()
now += interval
if lts is None:
if ts.hour > 1:
print(("stage4_today_total.py found no data for date: %s") % (ts,))
return
lts = lts - datetime.timedelta(minutes=1)
subtitle = "Total between 12:00 AM and %s" % (lts.strftime("%I:%M %p %Z"),)
routes = "ac"
if not realtime:
routes = "a"
for sector in ["iowa", "midwest", "conus"]:
pqstr = ("plot %s %s00 %s_stage4_1d.png %s_stage4_1d.png png") % (
routes,
ts.strftime("%Y%m%d%H"),
sector,
sector,
)
mp = MapPlot(
sector=sector,
title=("%s NCEP Stage IV Today's Precipitation")
% (ts.strftime("%-d %b %Y"),),
subtitle=subtitle,
)
clevs = [0.01, 0.1, 0.3, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10]
mp.pcolormesh(
lons,
lats,
distance(total, "MM").value("IN"),
clevs,
cmap=nwsprecip(),
units="inch",
)
# map.drawstates(zorder=2)
if sector == "iowa":
mp.drawcounties()
mp.postprocess(pqstr=pqstr)
mp.close()
def main(argv):
""" Go Main Go
So the past hour's stage IV is available by about 50 after, so we should
run for a day that is 90 minutes in the past by default
"""
if len(argv) == 4:
date = datetime.datetime(
int(argv[1]), int(argv[2]), int(argv[3]), 12, 0
)
realtime = False
else:
date = datetime.datetime.now()
date = date - datetime.timedelta(minutes=90)
date = date.replace(hour=12, minute=0, second=0, microsecond=0)
realtime = True
# Stupid pytz timezone dance
date = date.replace(tzinfo=pytz.utc)
date = date.astimezone(pytz.timezone("America/Chicago"))
doday(date, realtime)
if __name__ == "__main__":
main(sys.argv)
| [
"[email protected]"
] | |
723c6f7bef028baa67703a89851fee5669b1e438 | 1a2bf34d7fc1d227ceebf05edf00287de74259c5 | /Django/Day09/AXF/APP/views.py | 410ab6a0ae6c42ec968dfac9b50b15f473ffb049 | [] | no_license | lzn9423362/Django- | de69fee75160236e397b3bbc165281eadbe898f0 | 8c1656d20dcc4dfc29fb942b2db54ec07077e3ae | refs/heads/master | 2020-03-29T18:03:47.323734 | 2018-11-28T12:07:12 | 2018-11-28T12:07:12 | 150,192,771 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,795 | py | from django.shortcuts import render
from .models import *
# Create your views here.
def home(request):
#获取首页数据
#轮播数据u
wheel = MainWheel.objects.all()
nav = MainNav.objects.all()
mustbuy = MainMustbuy.objects.all()
shop = MainShop.objects.all()
shop1 = MainShop.objects.get(id=1)
mainlist = MainShow.objects.all()
data = {
'wheels': wheel,
'navs': nav,
'mustbuys': mustbuy,
'shop1': shop1,
'shop2': shop[1:3],
'shop3': shop[3:7],
'shop4': shop[7:11],
'main_list': mainlist,
}
return render(request, 'home/home.html', data)
def cart(request):
return render(request, 'cart/cart.html')
def mine(request):
return render(request, 'mine/mine.html')
def market(request, categoryid, cid, sortid):
leftSlider = MainFoodTpye.objects.all()
if cid == '0':
productList = Goods.objects.filter(categoryid=categoryid)
else:
productList = Goods.objects.filter(categoryid=categoryid, childcid=cid)
if sortid == '1':
productList = productList.order_by('-productnum')
elif sortid == '2':
productList = productList.order_by('-price')
elif sortid == '3':
productList = productList.order_by('price')
group = leftSlider.get(typeid=categoryid)
childList = []
childnames = group.childtypenames
arr1 =childnames.split('#')
for str in arr1:
arr2 = str.split(':')
obj = {'childName': arr2[0], 'childId': arr2[1]}
childList.append(obj)
data = {
'leftSlider': leftSlider,
'productList': productList,
'childList': childList,
'categoryid': categoryid,
'cid': cid,
}
return render(request, 'market/market.html', data) | [
"[email protected]"
] | |
31ecf624d4ad0a53984e4da6628073af9caec17b | 476415b07a8ab773ac240989b481464961119b6a | /Funcionalidades/Diseño Clases/orden.py | 7406d7ba1af43e7054866541d4af804d132f09ac | [] | no_license | rtejada/Universidad-Python | c4d46a91eee2a15d3c72035aced9d28f1527fb69 | a2c8035f57f5ae269b1b968ef187180879d14163 | refs/heads/master | 2022-12-16T21:32:47.181638 | 2020-09-17T17:42:49 | 2020-09-17T17:42:49 | 284,633,477 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py |
class Order:
count_order = 0
def __init__(self):
Order.count_order += 1
self.__id_order = Order.count_order
self.product_list = []
def add_product(self, product):
self.product_list.append(product)
def calc_total(self):
total = 0
for i in range(len(self.product_list)):
total += self.product_list[i].price
return total
def get_products(self):
return self.product_list
| [
"[email protected]"
] | |
676934452bd736024c690192d1c1131a2e64e5b2 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_7/knnoth001/question1.py | 3099688c159642da489155736d68fff2d6c0ea97 | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | '''Program that prints list of unique string of a string in the same order
Othniel KONAN
KNNOTH001
2014/04/27
'''
#VARIABLES
unik_list = []
#PROMPT THE USER TO ENTER A STRING
st = input('Enter strings (end with DONE):\n')
while st != 'DONE':
if not st in unik_list: #If it's a new string
unik_list.append(st) #Add it to the unique list
st = input() #Ask the user again
print('\nUnique list:')
#PRINT THE LIST
for l in unik_list:
print(l)
| [
"[email protected]"
] | |
ffe39281999c87161f8b9dfbad8a984c251fd2be | 3199331cede4a22b782f945c6a71150a10c61afc | /20210519PythonAdvacned/02-metaclass/hook03/user.py | b3f9c6d008e28f42933539ae1b06a6dd3ab8d1dd | [] | no_license | AuroraBoreas/language-review | 6957a3cde2ef1b6b996716addaee077e70351de8 | 2cb0c491db7d179c283dba205b4d124a8b9a52a3 | refs/heads/main | 2023-08-19T23:14:24.981111 | 2021-10-11T12:01:47 | 2021-10-11T12:01:47 | 343,345,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | "#Python is a protocol orientated lang; every top-level function or syntax has a corresponding dunder method implemented;"
from library import Base
class Derived(Base):
def bar(self):
return self.foo()
class A(Base):
def a(self):
return self.foo() | [
"[email protected]"
] | |
a7ddfca6859091ee1aa52cd9919e8345c630a6e9 | 065d4a661b39f4d747f1747ad6eaf4eb4144f368 | /deid/logger/message.py | 1248eece60713fe088582879714d1fd084146951 | [
"MIT"
] | permissive | howardpchen/deid | 4c52c3a6b7016b112027af530cd47fb18822a16d | 8ff05a43764669673968ed3b8b54ca11886d8b96 | refs/heads/master | 2020-04-13T06:27:29.299506 | 2019-01-12T04:03:38 | 2019-01-12T04:03:38 | 163,021,242 | 0 | 0 | MIT | 2018-12-24T20:18:05 | 2018-12-24T20:18:05 | null | UTF-8 | Python | false | false | 10,649 | py | '''
logger/message.py: Python logger base
Copyright (c) 2016-2018 Vanessa Sochat
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import os
import sys
from .spinner import Spinner
ABORT = -5
FLAG = -4
ERROR = -3
WARNING = -2
LOG = -1
INFO = 1
CUSTOM = 1
QUIET = 0
VERBOSE = VERBOSE1 = 2
VERBOSE2 = 3
VERBOSE3 = 4
DEBUG = 5
PURPLE = "\033[95m"
YELLOW = "\033[93m"
RED = "\033[91m"
DARKRED = "\033[31m"
CYAN = "\033[36m"
class DeidMessage:
def __init__(self, MESSAGELEVEL=None):
self.level = get_logging_level()
self.history = []
self.errorStream = sys.stderr
self.outputStream = sys.stdout
self.colorize = self.useColor()
self.colors = {ABORT: DARKRED,
FLAG: RED,
ERROR: RED,
WARNING: YELLOW,
LOG: PURPLE,
CUSTOM: PURPLE,
DEBUG: CYAN,
'OFF': "\033[0m", # end sequence
'CYAN':CYAN,
'PURPLE':PURPLE,
'RED':RED,
'DARKRED':DARKRED,
'YELLOW':YELLOW}
# Colors --------------------------------------------
def useColor(self):
'''useColor will determine if color should be added
to a print. Will check if being run in a terminal, and
if has support for asci'''
COLORIZE = get_user_color_preference()
if COLORIZE is not None:
return COLORIZE
streams = [self.errorStream, self.outputStream]
for stream in streams:
if not hasattr(stream, 'isatty'):
return False
if not stream.isatty():
return False
return True
def addColor(self, level, text):
'''addColor to the prompt (usually prefix) if terminal
supports, and specified to do so'''
if self.colorize:
if level in self.colors:
text = "%s%s%s" % (self.colors[level],
text,
self.colors["OFF"])
return text
def emitError(self, level):
'''determine if a level should print to
stderr, includes all levels but INFO and QUIET'''
if level in [ABORT,
ERROR,
WARNING,
VERBOSE,
VERBOSE1,
VERBOSE2,
VERBOSE3,
DEBUG]:
return True
return False
def emitOutput(self, level):
'''determine if a level should print to stdout
only includes INFO'''
if level in [LOG,
INFO,
CUSTOM]:
return True
return False
def isEnabledFor(self, messageLevel):
'''check if a messageLevel is enabled to emit a level
'''
if messageLevel <= self.level:
return True
return False
def emit(self, level, message, prefix=None, color=None):
'''emit is the main function to print the message
optionally with a prefix
:param level: the level of the message
:param message: the message to print
:param prefix: a prefix for the message
'''
if color is None:
color = level
if prefix is not None:
prefix = self.addColor(color, "%s " % (prefix))
else:
prefix = ""
message = self.addColor(color, message)
# Add the prefix
message = "%s%s" % (prefix, message)
if not message.endswith('\n'):
message = "%s\n" % message
# If the level is quiet, only print to error
if self.level == QUIET:
pass
# Otherwise if in range print to stdout and stderr
elif self.isEnabledFor(level):
if self.emitError(level):
self.write(self.errorStream, message)
else:
self.write(self.outputStream, message)
# Add all log messages to history
self.history.append(message)
def write(self, stream, message):
'''write will write a message to a stream,
first checking the encoding
'''
if isinstance(message, bytes):
message = message.decode('utf-8')
stream.write(message)
def get_logs(self, join_newline=True):
''''get_logs will return the complete history, joined by newline
(default) or as is.
'''
if join_newline:
return '\n'.join(self.history)
return self.history
def show_progress(
self,
iteration,
total,
length=40,
min_level=0,
prefix=None,
carriage_return=True,
suffix=None,
symbol=None):
'''create a terminal progress bar, default bar shows for verbose+
:param iteration: current iteration (Int)
:param total: total iterations (Int)
:param length: character length of bar (Int)
'''
percent = 100 * (iteration / float(total))
progress = int(length * iteration // total)
if suffix is None:
suffix = ''
if prefix is None:
prefix = 'Progress'
# Download sizes can be imperfect, setting carriage_return to False
# and writing newline with caller cleans up the UI
if percent >= 100:
percent = 100
progress = length
if symbol is None:
symbol = "="
if progress < length:
bar = symbol * progress + '|' + '-' * (length - progress - 1)
else:
bar = symbol * progress + '-' * (length - progress)
# Only show progress bar for level > min_level
if self.level > min_level:
percent = "%5s" % ("{0:.1f}").format(percent)
output = '\r' + prefix + \
" |%s| %s%s %s" % (bar, percent, '%', suffix)
sys.stdout.write(output),
if iteration == total and carriage_return:
sys.stdout.write('\n')
sys.stdout.flush()
# Logging ------------------------------------------
def abort(self, message):
self.emit(ABORT, message, 'ABORT')
def flag(self, message):
self.emit(FLAG, message, 'FLAGGED')
def error(self, message):
self.emit(ERROR, message, 'ERROR')
def warning(self, message):
self.emit(WARNING, message, 'WARNING')
def log(self, message):
self.emit(LOG, message, 'LOG')
def custom(self, prefix, message, color=PURPLE):
self.emit(CUSTOM, message, prefix, color)
def info(self, message):
self.emit(INFO, message)
def newline(self):
return self.info("")
def verbose(self, message):
self.emit(VERBOSE, message, "VERBOSE")
def verbose1(self, message):
self.emit(VERBOSE, message, "VERBOSE1")
def verbose2(self, message):
self.emit(VERBOSE2, message, 'VERBOSE2')
def verbose3(self, message):
self.emit(VERBOSE3, message, 'VERBOSE3')
def debug(self, message):
self.emit(DEBUG, message, 'DEBUG')
def is_quiet(self):
'''is_quiet returns true if the level is under 1
'''
if self.level < 1:
return False
return True
# Terminal ------------------------------------------
def table(self, rows, col_width=2):
'''table will print a table of entries. If the rows is
a dictionary, the keys are interpreted as column names. if
not, a numbered list is used.
'''
labels = [str(x) for x in range(1,len(rows)+1)]
if isinstance(rows, dict):
labels = list(rows.keys())
rows = list(rows.values())
for row in rows:
label = labels.pop(0)
label = label.ljust(col_width)
message = "\t".join(row)
self.custom(prefix=label,
message=message)
def get_logging_level():
'''get_logging_level will configure a logging to standard out based on the user's
selected level, which should be in an environment variable called
MESSAGELEVEL. if MESSAGELEVEL is not set, the maximum level
(5) is assumed (all messages).
'''
try:
level = int(os.environ.get("MESSAGELEVEL", DEBUG))
except ValueError:
level = os.environ.get("MESSAGELEVEL", DEBUG)
if level == "CRITICAL":
return FLAG
elif level == "ABORT":
return ABORT
elif level == "ERROR":
return ERROR
elif level == "WARNING":
return WARNING
elif level == "LOG":
return LOG
elif level == "INFO":
return INFO
elif level == "QUIET":
return QUIET
elif level.startswith("VERBOSE"):
return VERBOSE3
elif level == "LOG":
return LOG
elif level == "DEBUG":
return DEBUG
return level
def get_user_color_preference():
COLORIZE = os.environ.get('DEID_COLORIZE', None)
if COLORIZE is not None:
COLORIZE = convert2boolean(COLORIZE)
return COLORIZE
def convert2boolean(arg):
'''convert2boolean is used for environmental variables that must be
returned as boolean'''
if not isinstance(arg, bool):
return arg.lower() in ("yes", "true", "t", "1", "y")
return arg
DeidMessage.spinner = Spinner()
bot = DeidMessage()
| [
"[email protected]"
] | |
00f71d34637a4cb4036d4283d721b653b33e0aae | 0725ed7ab6be91dfc0b16fef12a8871c08917465 | /dp/binomial_cof.py | abe4fe8f6815852764ae85642a87766712dd4896 | [] | no_license | siddhism/leetcode | 8cb194156893fd6e9681ef50c84f0355d09e9026 | 877933424e6d2c590d6ac53db18bee951a3d9de4 | refs/heads/master | 2023-03-28T08:14:12.927995 | 2021-03-24T10:46:20 | 2021-03-24T10:46:20 | 212,151,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 622 | py | def binomial_cof(n, k):
if cache[n][k]:
print ('returning from cache')
return cache[n][k]
print ('Calling binomial for n: ', n, ' k : ', k)
if k ==0 or n ==0 or k > n:
return 1
numerator = 1
for i in range(0, k):
# have to do n. n-1. n-2
numerator = numerator * (n-i)
denominator = 1
for i in range(1, k+1):
denominator = denominator * i
answer = numerator / denominator
cache[n][k] = answer
return cache[n][k]
n = 10
k = 10
cache = [[0 for i in range(k+1)] for j in range(n+1)]
for k in range(0, 10):
print binomial_cof(5, k)
| [
"[email protected]"
] | |
542f2773fc9b93136a8fae627f7e6a3a755e16c1 | 6df5ef4e82b506f965912df2f993c2bd59c1866d | /backend/manage.py | 6bf7a41ccbd1138324341618f40bc8566e588fa2 | [] | no_license | crowdbotics-apps/testfigma-dev-22683 | e08f3fd39b841cb48fb07f472154be32a662dff1 | 1bc3c125cbce3c7b5cc64951fb6c9f44c9251200 | refs/heads/master | 2023-06-06T02:07:57.886897 | 2021-07-01T14:14:01 | 2021-07-01T14:14:01 | 382,055,850 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 639 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'testfigma_dev_22683.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
c18a02ec9e309f0fce8c4230e1121427c9e9d638 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /GZhSDwwLwakxjpnoB_15.py | 6d7fd2f9496b057ace6ef5ae1b644de30829771d | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py |
def thirdthird(lst):
return lst[2][2] if len(lst)>2 and len(lst[2])>2 else False
| [
"[email protected]"
] | |
e4accbc4ed7fd607ec786c88a75538e19d8293a6 | 6e57bdc0a6cd18f9f546559875256c4570256c45 | /cts/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py | e8c6d19516ba3d0ff8dda59c7cfb6feba17b6d21 | [] | no_license | dongdong331/test | 969d6e945f7f21a5819cd1d5f536d12c552e825c | 2ba7bcea4f9d9715cbb1c4e69271f7b185a0786e | refs/heads/master | 2023-03-07T06:56:55.210503 | 2020-12-07T04:15:33 | 2020-12-07T04:15:33 | 134,398,935 | 2 | 1 | null | 2022-11-21T07:53:41 | 2018-05-22T10:26:42 | null | UTF-8 | Python | false | false | 13,608 | py | # Copyright 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import its.device
import its.caps
import its.objects
import its.image
import os.path
from matplotlib import pylab
import matplotlib
import matplotlib.pyplot as plt
import math
import textwrap
import time
import numpy as np
import scipy.stats
import scipy.signal
# Convert a 2D array a to a 4D array with dimensions [tile_size,
# tile_size, row, col] where row, col are tile indices.
def tile(a, tile_size):
tile_rows, tile_cols = a.shape[0]/tile_size, a.shape[1]/tile_size
a = a.reshape([tile_rows, tile_size, tile_cols, tile_size])
a = a.transpose([1, 3, 0, 2])
return a
def main():
"""Capture a set of raw images with increasing gains and measure the noise.
"""
NAME = os.path.basename(__file__).split(".")[0]
BAYER_LIST = ['R', 'GR', 'GB', 'B']
# How many sensitivities per stop to sample.
steps_per_stop = 2
# How large of tiles to use to compute mean/variance.
tile_size = 64
# Exposure bracketing range in stops
bracket_stops = 4
# How high to allow the mean of the tiles to go.
max_signal_level = 0.5
# Colors used for plotting the data for each exposure.
colors = 'rygcbm'
# Define a first order high pass filter to eliminate low frequency
# signal content when computing variance.
f = np.array([-1, 1]).astype('float32')
# Make it a higher order filter by convolving the first order
# filter with itself a few times.
f = np.convolve(f, f)
f = np.convolve(f, f)
# Compute the normalization of the filter to preserve noise
# power. Let a be the normalization factor we're looking for, and
# Let X and X' be the random variables representing the noise
# before and after filtering, respectively. First, compute
# Var[a*X']:
#
# Var[a*X'] = a^2*Var[X*f_0 + X*f_1 + ... + X*f_N-1]
# = a^2*(f_0^2*Var[X] + f_1^2*Var[X] + ... + (f_N-1)^2*Var[X])
# = sum(f_i^2)*a^2*Var[X]
#
# We want Var[a*X'] to be equal to Var[X]:
#
# sum(f_i^2)*a^2*Var[X] = Var[X] -> a = sqrt(1/sum(f_i^2))
#
# We can just bake this normalization factor into the high pass
# filter kernel.
f /= math.sqrt(np.dot(f, f))
bracket_factor = math.pow(2, bracket_stops)
with its.device.ItsSession() as cam:
props = cam.get_camera_properties()
# Get basic properties we need.
sens_min, sens_max = props['android.sensor.info.sensitivityRange']
sens_max_analog = props['android.sensor.maxAnalogSensitivity']
white_level = props['android.sensor.info.whiteLevel']
print "Sensitivity range: [%f, %f]" % (sens_min, sens_max)
print "Max analog sensitivity: %f" % (sens_max_analog)
# Do AE to get a rough idea of where we are.
s_ae, e_ae, _, _, _ = \
cam.do_3a(get_results=True, do_awb=False, do_af=False)
# Underexpose to get more data for low signal levels.
auto_e = s_ae*e_ae/bracket_factor
# Focus at zero to intentionally blur the scene as much as possible.
f_dist = 0.0
# If the auto-exposure result is too bright for the highest
# sensitivity or too dark for the lowest sensitivity, report
# an error.
min_exposure_ns, max_exposure_ns = \
props['android.sensor.info.exposureTimeRange']
if auto_e < min_exposure_ns*sens_max:
raise its.error.Error("Scene is too bright to properly expose \
at the highest sensitivity")
if auto_e*bracket_factor > max_exposure_ns*sens_min:
raise its.error.Error("Scene is too dark to properly expose \
at the lowest sensitivity")
# Start the sensitivities at the minimum.
s = sens_min
samples = [[], [], [], []]
plots = []
measured_models = [[], [], [], []]
while s <= sens_max + 1:
print "ISO %d" % round(s)
fig = plt.figure()
plt_s = fig.gca()
plt_s.set_title("ISO %d" % round(s))
plt_s.set_xlabel("Mean signal level")
plt_s.set_ylabel("Variance")
samples_s = [[], [], [], []]
for b in range(0, bracket_stops + 1):
# Get the exposure for this sensitivity and exposure time.
e = int(math.pow(2, b)*auto_e/float(s))
req = its.objects.manual_capture_request(round(s), e, f_dist)
cap = cam.do_capture(req, cam.CAP_RAW)
planes = its.image.convert_capture_to_planes(cap, props)
for (pidx, p) in enumerate(planes):
p = p.squeeze()
# Crop the plane to be a multiple of the tile size.
p = p[0:p.shape[0] - p.shape[0]%tile_size,
0:p.shape[1] - p.shape[1]%tile_size]
# convert_capture_to_planes normalizes the range
# to [0, 1], but without subtracting the black
# level.
black_level = its.image.get_black_level(
pidx, props, cap["metadata"])
p *= white_level
p = (p - black_level)/(white_level - black_level)
# Use our high pass filter to filter this plane.
hp = scipy.signal.sepfir2d(p, f, f).astype('float32')
means_tiled = \
np.mean(tile(p, tile_size), axis=(0, 1)).flatten()
vars_tiled = \
np.var(tile(hp, tile_size), axis=(0, 1)).flatten()
samples_e = []
for (mean, var) in zip(means_tiled, vars_tiled):
# Don't include the tile if it has samples that might
# be clipped.
if mean + 2*math.sqrt(var) < max_signal_level:
samples_e.append([mean, var])
if len(samples_e) > 0:
means_e, vars_e = zip(*samples_e)
plt_s.plot(means_e, vars_e, colors[b%len(colors)] + ',')
samples_s[pidx].extend(samples_e)
for (pidx, p) in enumerate(samples_s):
[S, O, R, p, stderr] = scipy.stats.linregress(samples_s[pidx])
measured_models[pidx].append([round(s), S, O])
print "Sensitivity %d: %e*y + %e (R=%f)" % (round(s), S, O, R)
# Add the samples for this sensitivity to the global samples list.
samples[pidx].extend([(round(s), mean, var) for (mean, var) in samples_s[pidx]])
# Add the linear fit to the plot for this sensitivity.
plt_s.plot([0, max_signal_level], [O, O + S*max_signal_level], 'rgkb'[pidx]+'--',
label="Linear fit")
xmax = max([max([x for (x, _) in p]) for p in samples_s])*1.25
ymax = max([max([y for (_, y) in p]) for p in samples_s])*1.25
plt_s.set_xlim(xmin=0, xmax=xmax)
plt_s.set_ylim(ymin=0, ymax=ymax)
fig.savefig("%s_samples_iso%04d.png" % (NAME, round(s)))
plots.append([round(s), fig])
# Move to the next sensitivity.
s *= math.pow(2, 1.0/steps_per_stop)
(fig, (plt_S, plt_O)) = plt.subplots(2, 1)
plt_S.set_title("Noise model")
plt_S.set_ylabel("S")
plt_S.legend(loc=2)
plt_O.set_xlabel("ISO")
plt_O.set_ylabel("O")
A = []
B = []
C = []
D = []
for (pidx, p) in enumerate(measured_models):
# Grab the sensitivities and line parameters from each sensitivity.
S_measured = [e[1] for e in measured_models[pidx]]
O_measured = [e[2] for e in measured_models[pidx]]
sens = np.asarray([e[0] for e in measured_models[pidx]])
sens_sq = np.square(sens)
# Use a global linear optimization to fit the noise model.
gains = np.asarray([s[0] for s in samples[pidx]])
means = np.asarray([s[1] for s in samples[pidx]])
vars_ = np.asarray([s[2] for s in samples[pidx]])
# Define digital gain as the gain above the max analog gain
# per the Camera2 spec. Also, define a corresponding C
# expression snippet to use in the generated model code.
digital_gains = np.maximum(gains/sens_max_analog, 1)
digital_gain_cdef = "(sens / %d.0) < 1.0 ? 1.0 : (sens / %d.0)" % \
(sens_max_analog, sens_max_analog)
# Find the noise model parameters via least squares fit.
ad = gains*means
bd = means
cd = gains*gains
dd = digital_gains*digital_gains
a = np.asarray([ad, bd, cd, dd]).T
b = vars_
# To avoid overfitting to high ISOs (high variances), divide the system
# by the gains.
a /= (np.tile(gains, (a.shape[1], 1)).T)
b /= gains
[A_p, B_p, C_p, D_p], _, _, _ = np.linalg.lstsq(a, b)
A.append(A_p)
B.append(B_p)
C.append(C_p)
D.append(D_p)
# Plot the noise model components with the values predicted by the
# noise model.
S_model = A_p*sens + B_p
O_model = \
C_p*sens_sq + D_p*np.square(np.maximum(sens/sens_max_analog, 1))
plt_S.loglog(sens, S_measured, 'rgkb'[pidx]+'+', basex=10, basey=10,
label="Measured")
plt_S.loglog(sens, S_model, 'rgkb'[pidx]+'x', basex=10, basey=10, label="Model")
plt_O.loglog(sens, O_measured, 'rgkb'[pidx]+'+', basex=10, basey=10,
label="Measured")
plt_O.loglog(sens, O_model, 'rgkb'[pidx]+'x', basex=10, basey=10, label="Model")
fig.savefig("%s.png" % (NAME))
for [s, fig] in plots:
plt_s = fig.gca()
dg = max(s/sens_max_analog, 1)
for (pidx, p) in enumerate(measured_models):
S = A[pidx]*s + B[pidx]
O = C[pidx]*s*s + D[pidx]*dg*dg
plt_s.plot([0, max_signal_level], [O, O + S*max_signal_level], 'rgkb'[pidx]+'-',
label="Model")
plt_s.legend(loc=2)
plt.figure(fig.number)
# Re-save the plot with the global model.
fig.savefig("%s_samples_iso%04d.png" % (NAME, round(s)))
# Generate the noise model implementation.
A_array = ",".join([str(i) for i in A])
B_array = ",".join([str(i) for i in B])
C_array = ",".join([str(i) for i in C])
D_array = ",".join([str(i) for i in D])
noise_model_code = textwrap.dedent("""\
/* Generated test code to dump a table of data for external validation
* of the noise model parameters.
*/
#include <stdio.h>
#include <assert.h>
double compute_noise_model_entry_S(int plane, int sens);
double compute_noise_model_entry_O(int plane, int sens);
int main(void) {
for (int plane = 0; plane < %d; plane++) {
for (int sens = %d; sens <= %d; sens += 100) {
double o = compute_noise_model_entry_O(plane, sens);
double s = compute_noise_model_entry_S(plane, sens);
printf("%%d,%%d,%%lf,%%lf\\n", plane, sens, o, s);
}
}
return 0;
}
/* Generated functions to map a given sensitivity to the O and S noise
* model parameters in the DNG noise model. The planes are in
* R, Gr, Gb, B order.
*/
double compute_noise_model_entry_S(int plane, int sens) {
static double noise_model_A[] = { %s };
static double noise_model_B[] = { %s };
double A = noise_model_A[plane];
double B = noise_model_B[plane];
double s = A * sens + B;
return s < 0.0 ? 0.0 : s;
}
double compute_noise_model_entry_O(int plane, int sens) {
static double noise_model_C[] = { %s };
static double noise_model_D[] = { %s };
double digital_gain = %s;
double C = noise_model_C[plane];
double D = noise_model_D[plane];
double o = C * sens * sens + D * digital_gain * digital_gain;
return o < 0.0 ? 0.0 : o;
}
""" % (len(A), sens_min, sens_max, A_array, B_array, C_array, D_array, digital_gain_cdef))
print noise_model_code
text_file = open("noise_model.c", "w")
text_file.write("%s" % noise_model_code)
text_file.close()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
6bd6130c6ed40039761f0f71d944d545d53d6817 | 63e3e22fd46c07dbd18b74952b460cb7dc347466 | /peripteras/kiosks/migrations/0003_brand_category_item.py | da8d3e4017a041b0f37e0b99d2806d30435f2c9b | [
"MIT"
] | permissive | sm2x/e-peripteras | 0104dad6a4a0e2765c403c0b81dd34b2fefa847b | 39634ca07de535c6a1188af636e394a8966672ca | refs/heads/master | 2021-10-24T01:28:21.987338 | 2019-03-21T10:45:30 | 2019-03-21T10:45:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,812 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kiosks', '0002_auto_20160905_1531'),
]
operations = [
migrations.CreateModel(
name='Brand',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(default=b'', max_length=255, verbose_name='Brand name')),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(default=b'', max_length=255, verbose_name='Category title')),
('slug', models.SlugField(unique=True, max_length=255, blank=True)),
],
options={
'verbose_name_plural': 'Categories',
},
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('price', models.DecimalField(max_digits=6, decimal_places=2)),
('slug', models.SlugField(unique=True, max_length=255, blank=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True, null=True)),
('brand', models.ForeignKey(to='kiosks.Brand')),
('category', models.ForeignKey(to='kiosks.Category')),
('kiosk', models.ForeignKey(to='kiosks.Kiosk')),
],
),
]
| [
"[email protected]"
] | |
6709fd566ee0ff20b661b59581d20d4021135340 | 942ee5e8d54e8ebe9c5c841fbfdd1da652946944 | /1001-1500/1268.Search Suggestions System.py | decc52676b284d49aeebcc99154207205fe08d97 | [] | no_license | kaiwensun/leetcode | 0129c174457f32887fbca078fb448adce46dd89d | 6b607f4aae3a4603e61f2e2b7480fdfba1d9b947 | refs/heads/master | 2023-08-31T07:30:50.459062 | 2023-08-27T07:59:16 | 2023-08-27T07:59:16 | 57,526,914 | 69 | 9 | null | 2023-08-20T06:34:41 | 2016-05-01T05:37:29 | Python | UTF-8 | Python | false | false | 1,033 | py | import collections
class Solution(object):
def suggestedProducts(self, products, searchWord):
"""
:type products: List[str]
:type searchWord: str
:rtype: List[List[str]]
"""
def dfs(p, res):
if '#' in p:
for _ in xrange (p['#'][1]):
if len(res) == 3:
break
res.append(p['#'][0])
if len(res) == 3:
return res
for key in sorted(p.keys()):
if key != "#" and len(dfs(p[key], res)) == 3:
return res
return res
T = lambda: collections.defaultdict(T)
trie = T()
for product in products:
p = trie
for c in product:
p = p[c]
p["#"] = (product, p["#"][1] + 1 if "#" in p else 1)
res = []
p = trie
for c in searchWord:
p = p[c]
res.append(dfs(p, []))
return res
| [
"[email protected]"
] | |
8955a3578716804771d062c9949cb5bf9b88be34 | e51c261f76ecb86d85c1f7f93c0c3e4953284233 | /setup.py | c49e76f060f460899c14fea338caf86f58519c53 | [
"MIT"
] | permissive | adodo1/pyminitouch | 6f0194f3c25ab78abc89fabe9e8cd6bc19b20f54 | cefeab762277626b5a8167454467971a4c376e31 | refs/heads/master | 2020-05-20T09:48:45.430918 | 2019-03-25T06:39:26 | 2019-03-25T06:39:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | from setuptools import setup, find_packages
setup(
name='pyminitouch',
version='0.2.5',
description='python wrapper of minitouch, for better experience',
author='williamfzc',
author_email='[email protected]',
url='https://github.com/williamfzc/pyminitouch',
packages=find_packages(),
install_requires=[
'loguru',
'requests',
]
)
| [
"[email protected]"
] | |
be040983dab80ea1a7023c6ca738096ddb61864d | 0facb323be8a76bb4c168641309972fa77cbecf2 | /Configurations/TTSemiLep/scripts/diffNuisances/diffNuisances.py | c039bed58979b8c0265f3553c0db8f4b52d7615c | [] | no_license | bhoh/SNuAnalytics | ef0a1ba9fa0d682834672a831739dfcfa1e7486b | 34d1fc062e212da152faa83be50561600819df0e | refs/heads/master | 2023-07-06T03:23:45.343449 | 2023-06-26T12:18:28 | 2023-06-26T12:18:28 | 242,880,298 | 0 | 1 | null | 2020-02-25T01:17:50 | 2020-02-25T01:17:49 | null | UTF-8 | Python | false | false | 22,375 | py | #!/usr/b::in/env python
import re
from sys import argv, stdout, stderr, exit
import datetime
from optparse import OptionParser
import HiggsAnalysis.CombinedLimit.calculate_pulls as CP
import ROOT
ROOT.gStyle.SetOptStat(0)
# tool to compare fitted nuisance parameters to prefit values.
#
# Also used to check for potential problems in RooFit workspaces to be used with combine
# (see https://twiki.cern.ch/twiki/bin/viewauth/CMS/HiggsWG/HiggsPAGPreapprovalChecks)
# import ROOT with a fix to get batch mode (http://root.cern.ch/phpBB3/viewtopic.php?t=3198)
hasHelp = False
for X in ("-h", "-?", "--help"):
if X in argv:
hasHelp = True
argv.remove(X)
argv.append( '-b-' )
import ROOT
ROOT.gROOT.SetBatch(True)
#ROOT.gSystem.Load("libHiggsAnalysisCombinedLimit")
argv.remove( '-b-' )
if hasHelp: argv.append("-h")
parser = OptionParser(usage="usage: %prog [options] in.root \nrun with --help to get list of options")
parser.add_option("--vtol", "--val-tolerance", dest="vtol", default=0.30, type="float", help="Report nuisances whose value changes by more than this amount of sigmas")
parser.add_option("--stol", "--sig-tolerance", dest="stol", default=0.10, type="float", help="Report nuisances whose sigma changes by more than this amount")
parser.add_option("--vtol2", "--val-tolerance2", dest="vtol2", default=2.0, type="float", help="Report severely nuisances whose value changes by more than this amount of sigmas")
parser.add_option("--stol2", "--sig-tolerance2", dest="stol2", default=0.50, type="float", help="Report severely nuisances whose sigma changes by more than this amount")
parser.add_option("-a", "--all", dest="show_all_parameters", default=False, action="store_true", help="Print all nuisances, even the ones which are unchanged w.r.t. pre-fit values.")
parser.add_option("-A", "--abs", dest="absolute_values", default=False, action="store_true", help="Report also absolute values of nuisance values and errors, not only the ones normalized to the input sigma")
parser.add_option("-p", "--poi", dest="poi", default="r", type="string", help="Name of signal strength parameter (default is 'r' as per text2workspace.py)")
parser.add_option("-f", "--format", dest="format", default="text", type="string", help="Output format ('text', 'latex', 'twiki'")
parser.add_option("-g", "--histogram", dest="plotfile", default=None, type="string", help="If true, plot the pulls of the nuisances to the given file.")
parser.add_option("", "--pullDef", dest="pullDef", default="", type="string", help="Choose the definition of the pull, see python/calculate_pulls.py for options")
parser.add_option("", "--skipFitS", dest="skipFitS", default=False, action='store_true', help="skip the S+B fit, instead the B-only fit will be repeated")
parser.add_option("", "--skipFitB", dest="skipFitB", default=False, action='store_true', help="skip the B-only fit, instead the S+B fit will be repeated")
parser.add_option("", "--sortBy", dest="sortBy", default="correlation", type='string', help="choose 'correlation' or 'impact' to sort rows by correlation with or impact on --poi (largest to smallest absolute)")
(options, args) = parser.parse_args()
if len(args) == 0:
parser.print_usage()
exit(1)
if options.pullDef!="" and options.pullDef not in CP.allowed_methods(): exit("Method %s not allowed, choose one of [%s]"%(options.pullDef,",".join(CP.allowed_methods())))
if options.pullDef and options.absolute_values :
print "Pulls are always defined as absolute, will modify --absolute_values to False for you"
options.absolute_values = False
if options.pullDef : options.show_all_parameters=True
if options.sortBy not in ['correlation','impact']: exit("choose one of [ %s ] for --sortBy"%(",".join()['correlation','impact']))
setUpString = "diffNuisances run on %s, at %s with the following options ... "%(args[0],datetime.datetime.utcnow())+str(options)
file = ROOT.TFile(args[0])
if file == None: raise RuntimeError, "Cannot open file %s" % args[0]
fit_s = file.Get("fit_s") if not options.skipFitS else file.Get("fit_b")
fit_b = file.Get("fit_b") if not options.skipFitB else file.Get("fit_s")
prefit = file.Get("nuisances_prefit")
if fit_s == None or fit_s.ClassName() != "RooFitResult": raise RuntimeError, "File %s does not contain the output of the signal fit 'fit_s'" % args[0]
if fit_b == None or fit_b.ClassName() != "RooFitResult": raise RuntimeError, "File %s does not contain the output of the background fit 'fit_b'" % args[0]
if prefit == None or prefit.ClassName() != "RooArgSet": raise RuntimeError, "File %s does not contain the prefit nuisances 'nuisances_prefit'" % args[0]
isFlagged = {}
# maps from nuisance parameter name to the row to be printed in the table
table = {}
# get the fitted parameters
fpf_b = fit_b.floatParsFinal()
fpf_s = fit_s.floatParsFinal()
pulls = []
nuis_p_i=0
title = "pull" if options.pullDef else "#theta"
bin_size = 50
total_size = prefit.getSize()
# Also make histograms for pull distributions:
hist_fit_b = ROOT.TH1F("fit_b" ,"B-only fit Nuisances;;%s "%title,total_size,0,total_size)
hist_fit_s = ROOT.TH1F("fit_s" ,"S+B fit Nuisances ;;%s "%title,total_size,0,total_size)
hist_prefit = ROOT.TH1F("prefit_nuisancs","Prefit Nuisances ;;%s "%title,total_size,0,total_size)
# Store also the *asymmetric* uncertainties
gr_fit_b = ROOT.TGraphAsymmErrors(); gr_fit_b.SetTitle("fit_b_g")
gr_fit_s = ROOT.TGraphAsymmErrors(); gr_fit_s.SetTitle("fit_b_s")
error_poi = fpf_s.find(options.poi).getError()
# loop over all fitted parameters
for i in range(prefit.getSize()):
nuis_s = fpf_s.at(i)
name = nuis_s.GetName();
nuis_b = fpf_b.find(name)
nuis_p = prefit.find(name)
# keeps information to be printed about the nuisance parameter
row = []
flag = False;
mean_p, sigma_p, sigma_pu, sigma_pd = 0,0,0,0
if nuis_p == None:
# nuisance parameter NOT present in the prefit result
if not options.absolute_values and not (options.pullDef=="unconstPullAsym"): continue
row += [ "[%.2f, %.2f]" % (nuis_s.getMin(), nuis_s.getMax()) ]
else:
# get best-fit value and uncertainty at prefit for this
# nuisance parameter
if nuis_p.getErrorLo()==0 : nuis_p.setError(nuis_p.getErrorHi())
mean_p, sigma_p, sigma_pu,sigma_pd = (nuis_p.getVal(), nuis_p.getError(),nuis_p.getErrorHi(),nuis_p.getErrorLo())
if not sigma_p > 0: sigma_p = (nuis_p.getMax()-nuis_p.getMin())/2
nuisIsSymm = abs(abs(nuis_p.getErrorLo())-abs(nuis_p.getErrorHi()))<0.01 or nuis_p.getErrorLo() == 0
if options.absolute_values:
if nuisIsSymm : row += [ "%.6f +/- %.6f" % (nuis_p.getVal(), nuis_p.getError()) ]
else: row += [ "%.6f +%.6f %.6f" % (nuis_p.getVal(), nuis_p.getErrorHi(), nuis_p.getErrorLo()) ]
for fit_name, nuis_x in [('b', nuis_b), ('s',nuis_s)]:
if nuis_x == None:
row += [ " n/a " ]
else:
nuisIsSymm = abs(abs(nuis_x.getErrorLo())-abs(nuis_x.getErrorHi()))<0.01 or nuis_x.getErrorLo() == 0
if nuisIsSymm : nuis_x.setError(nuis_x.getErrorHi())
nuiselo = abs(nuis_x.getErrorLo()) if nuis_x.getErrorLo()>0 else nuis_x.getError()
nuisehi = nuis_x.getErrorHi()
if options.pullDef and nuis_p!=None:
nx,ned,neu = CP.returnPullAsym(options.pullDef,nuis_x.getVal(),mean_p,nuisehi,sigma_pu,abs(nuiselo),abs(sigma_pd))
else:
nx,ned,neu = nuis_x.getVal(), nuiselo, nuisehi
if nuisIsSymm : row += [ "%+.2f +/- %.2f" % (nx, (abs(ned)+abs(neu))/2) ]
else: row += [ "%+.2f +%.2f %.2f" % (nx, neu, ned) ]
if nuis_p != None:
if options.plotfile:
if fit_name=='b':
nuis_p_i+=1
if options.pullDef and nuis_p!=None:
#nx,ned,neu = CP.returnPullAsym(options.pullDef,nuis_x.getVal(),mean_p,nuis_x.getErrorHi(),sigma_pu,abs(nuis_x.getErrorLo()),abs(sigma_pd))
gr_fit_b.SetPoint(nuis_p_i-1,nuis_p_i-0.5+0.1,nx)
gr_fit_b.SetPointError(nuis_p_i-1,0,0,ned,neu)
else:
gr_fit_b.SetPoint(nuis_p_i-1,nuis_p_i-0.5+0.1,nuis_x.getVal())
gr_fit_b.SetPointError(nuis_p_i-1,0,0,abs(nuis_x.getErrorLo()),nuis_x.getErrorHi())
hist_fit_b.SetBinContent(nuis_p_i,nuis_x.getVal())
hist_fit_b.SetBinError(nuis_p_i,nuis_x.getError())
hist_fit_b.GetXaxis().SetBinLabel(nuis_p_i,name)
gr_fit_b.GetXaxis().SetBinLabel(nuis_p_i,name)
if fit_name=='s':
if options.pullDef and nuis_p!=None:
#nx,ned,neu = CP.returnPullAsym(options.pullDef,nuis_x.getVal(),mean_p,nuis_x.getErrorHi(),sigma_pu,abs(nuis_x.getErrorLo()),abs(sigma_pd))
gr_fit_s.SetPoint(nuis_p_i-1,nuis_p_i-0.5-0.1,nx)
gr_fit_s.SetPointError(nuis_p_i-1,0,0,ned,neu)
else:
gr_fit_s.SetPoint(nuis_p_i-1,nuis_p_i-0.5-0.1,nuis_x.getVal())
gr_fit_s.SetPointError(nuis_p_i-1,0,0,abs(nuis_x.getErrorLo()),nuis_x.getErrorHi())
hist_fit_s.SetBinContent(nuis_p_i,nuis_x.getVal())
hist_fit_s.SetBinError(nuis_p_i,nuis_x.getError())
hist_fit_s.GetXaxis().SetBinLabel(nuis_p_i,name)
gr_fit_s.GetXaxis().SetBinLabel(nuis_p_i,name)
hist_prefit.SetBinContent(nuis_p_i,mean_p)
hist_prefit.SetBinError(nuis_p_i,sigma_p)
hist_prefit.GetXaxis().SetBinLabel(nuis_p_i,name)
if sigma_p>0:
if options.pullDef:
valShift = nx
sigShift = 1
else:
# calculate the difference of the nuisance parameter
# w.r.t to the prefit value in terms of the uncertainty
# on the prefit value
valShift = (nuis_x.getVal() - mean_p)/sigma_p
# ratio of the nuisance parameter's uncertainty
# w.r.t the prefit uncertainty
sigShift = nuis_x.getError()/sigma_p
else :
#print "No definition for prefit uncertainty %s. Printing absolute shifts"%(nuis_p.GetName())
valShift = (nuis_x.getVal() - mean_p)
sigShift = nuis_x.getError()
if options.pullDef:
row[-1] += ""
elif options.absolute_values:
row[-1] += " (%+4.2fsig, %4.2f)" % (valShift, sigShift)
else:
row[-1] = " %+4.2f, %4.2f" % (valShift, sigShift)
if fit_name == 'b':
pulls.append(valShift)
if (abs(valShift) > options.vtol2 or abs(sigShift-1) > options.stol2):
# severely report this nuisance:
#
# the best fit moved by more than 2.0 sigma or the uncertainty (sigma)
# changed by more than 50% (default thresholds) w.r.t the prefit values
isFlagged[(name,fit_name)] = 2
flag = True
elif (abs(valShift) > options.vtol or abs(sigShift-1) > options.stol):
# report this nuisance:
#
# the best fit moved by more than 0.3 sigma or the uncertainty (sigma)
# changed by more than 10% (default thresholds) w.r.t the prefit values
if options.show_all_parameters: isFlagged[(name,fit_name)] = 1
flag = True
elif options.show_all_parameters:
flag = True
# end of loop over s and b
row += [ "%+4.2f" % fit_s.correlation(name, options.poi) ]
row += [ "%+4.3f" % (nuis_x.getError()*fit_s.correlation(name, options.poi)*error_poi) ]
if flag or options.show_all_parameters: table[name] = row
#end of loop over all fitted parameters
#----------
# print the results
#----------
#print details
print setUpString
print
fmtstring = "%-40s %15s %15s %10s %10s"
highlight = "*%s*"
morelight = "!%s!"
pmsub, sigsub = None, None
if options.format == 'text':
if options.skipFitS: print " option '--skipFitS' set true. s+b Fit is just a copy of the b-only fit"
if options.skipFitB: print " option '--skipFitB' set true. b-only Fit is just a copy of the s+b fit"
if options.pullDef:
fmtstring = "%-40s %30s %30s %10s %10s"
print fmtstring % ('name', 'b-only fit pull', 's+b fit pull', 'rho', 'approx impact')
elif options.absolute_values:
fmtstring = "%-40s %15s %30s %30s %10s %10s"
print fmtstring % ('name', 'pre fit', 'b-only fit', 's+b fit', 'rho', 'approx impact')
else:
print fmtstring % ('name', 'b-only fit', 's+b fit', 'rho', 'approx impact')
elif options.format == 'latex':
pmsub = (r"(\S+) \+/- (\S+)", r"$\1 \\pm \2$")
sigsub = ("sig", r"$\\sigma$")
highlight = "\\textbf{%s}"
morelight = "{{\\color{red}\\textbf{%s}}}"
if options.skipFitS: print " option '--skipFitS' set true. $s+b$ Fit is just a copy of the $b$-only fit"
if options.skipFitB: print " option '--skipFitB' set true. $b$-only Fit is just a copy of the $s+b$ fit"
if options.pullDef:
fmtstring = "%-40s & %30s & %30s & %6s & %6s \\\\"
print "\\begin{tabular}{|l|r|r|r|r|} \\hline ";
print (fmtstring % ('name', '$b$-only fit pull', '$s+b$ fit pull', r'$\rho(\theta, \mu)$', r'I(\theta, \mu)')), " \\hline"
elif options.absolute_values:
fmtstring = "%-40s & %15s & %30s & %30s & %6s & %6s \\\\"
print "\\begin{tabular}{|l|r|r|r|r|r|} \\hline ";
print (fmtstring % ('name', 'pre fit', '$b$-only fit', '$s+b$ fit', r'$\rho(\theta, \mu)$', r'I(\theta, \mu)')), " \\hline"
else:
fmtstring = "%-40s & %15s & %15s & %6s & %6s \\\\"
print "\\begin{tabular}{|l|r|r|r|r|} \\hline ";
#what = r"$(x_\text{out} - x_\text{in})/\sigma_{\text{in}}$, $\sigma_{\text{out}}/\sigma_{\text{in}}$"
what = r"\Delta x/\sigma_{\text{in}}$, $\sigma_{\text{out}}/\sigma_{\text{in}}$"
print fmtstring % ('', '$b$-only fit', '$s+b$ fit', '', '')
print (fmtstring % ('name', what, what, r'$\rho(\theta, \mu)$', r'I(\theta, \mu)')), " \\hline"
elif options.format == 'twiki':
pmsub = (r"(\S+) \+/- (\S+)", r"\1 ± \2")
sigsub = ("sig", r"σ")
highlight = "<b>%s</b>"
morelight = "<b style='color:red;'>%s</b>"
if options.skipFitS: print " option '--skipFitS' set true. $s+b$ Fit is just a copy of the $b$-only fit"
if options.skipFitB: print " option '--skipFitB' set true. $b$-only Fit is just a copy of the $s+b$ fit"
if options.pullDef:
fmtstring = "| <verbatim>%-40s</verbatim> | %-30s | %-30s | %-15s | %-15s | %-15s |"
print "| *name* | *b-only fit pull* | *s+b fit pull* | *corr.* | *approx. impact* |"
elif options.absolute_values:
fmtstring = "| <verbatim>%-40s</verbatim> | %-15s | %-30s | %-30s | %-15s | %-15s |"
print "| *name* | *pre fit* | *b-only fit* | *s+b fit* | *corr.* | *approx. impact* |"
else:
fmtstring = "| <verbatim>%-40s</verbatim> | %-15s | %-15s | %-15s | %-15s |"
print "| *name* | *b-only fit* | *s+b fit* | *corr.* | *approx. impact* |"
elif options.format == 'html':
pmsub = (r"(\S+) \+/- (\S+)", r"\1 ± \2")
sigsub = ("sig", r"σ")
highlight = "<b>%s</b>"
morelight = "<strong>%s</strong>"
print """
<html><head><title>Comparison of nuisances</title>
<style type="text/css">
td, th { border-bottom: 1px solid black; padding: 1px 1em; }
td { font-family: 'Consolas', 'Courier New', courier, monospace; }
strong { color: red; font-weight: bolder; }
</style>
</head><body style="font-family: 'Verdana', sans-serif; font-size: 10pt;"><h1>Comparison of nuisances</h1>
<table>
"""
if options.pullDef:
print "<tr><th>nuisance</th><th>background fit pull </th><th>signal fit pull</th><th>ρ(μ, θ)</th><th>I(μ, θ)</th></tr>"
fmtstring = "<tr><td><tt>%-40s</tt> </td><td> %-30s </td><td> %-30s </td><td> %-15s </td><td> %-15s </td></tr>"
elif options.absolute_values:
print "<tr><th>nuisance</th><th>pre fit</th><th>background fit </th><th>signal fit</th><th>correlation</th></tr>"
fmtstring = "<tr><td><tt>%-40s</tt> </td><td> %-15s </td><td> %-30s </td><td> %-30s </td><td> %-15s </td><td> %-15s </td></tr>"
else:
what = "Δx/σ<sub>in</sub>, σ<sub>out</sub>/σ<sub>in</sub>";
print "<tr><th>nuisance</th><th>background fit<br/>%s </th><th>signal fit<br/>%s</th><th>ρ(μ, θ)<th>I(μ, θ)</th></tr>" % (what,what)
fmtstring = "<tr><td><tt>%-40s</tt> </td><td> %-15s </td><td> %-15s </td><td> %-15s </td><td> %-15s </td></tr>"
names = table.keys()
names.sort()
if options.sortBy == "correlation":
names = [[abs(float(table[t][-2])),t] for t in table.keys()]
names.sort(); names.reverse(); names=[n[1] for n in names]
elif options.sortBy == "impact":
names = [[abs(float(table[t][-1])),t] for t in table.keys()]
names.sort(); names.reverse(); names=[n[1] for n in names]
highlighters = { 1:highlight, 2:morelight };
for n in names:
v = table[n]
if pmsub != None: v = [ re.sub(pmsub[0], pmsub[1], i) for i in v ]
if sigsub != None: v = [ re.sub(sigsub[0], sigsub[1], i) for i in v ]
if (n,'b') in isFlagged: v[-3] = highlighters[isFlagged[(n,'b')]] % v[-3]
if (n,'s') in isFlagged: v[-2] = highlighters[isFlagged[(n,'s')]] % v[-2]
if options.format == "latex": n = n.replace(r"_", r"\_")
if options.absolute_values:
print fmtstring % (n, v[0], v[1], v[2], v[3],v[4])
else:
print fmtstring % (n, v[0], v[1], v[2],v[3])
if options.format == "latex":
print " \\hline\n\end{tabular}"
elif options.format == "html":
print "</table></body></html>"
if options.plotfile:
fout = ROOT.TFile(options.plotfile,"UPDATE")
ROOT.gROOT.SetStyle("Plain")
ROOT.gStyle.SetOptFit(0000)
histogram = ROOT.TH1F("pulls", "Pulls", 60, -3, 3)
for pull in pulls:
histogram.Fill(pull)
canvas = ROOT.TCanvas("asdf", "asdf", 800, 800)
if options.pullDef : histogram.GetXaxis().SetTitle("pull")
else: histogram.GetXaxis().SetTitle("(#theta-#theta_{0})/#sigma_{pre-fit}")
histogram.SetTitle("Post-fit nuisance pull distribution")
histogram.SetMarkerStyle(20)
histogram.SetMarkerSize(2)
histogram.Draw("pe")
fout.WriteTObject(canvas, "asdf")
canvas_nuis = ROOT.TCanvas("nuisances", "nuisances", 900, 600)
canvas_nuis.SetBottomMargin(0.55)
canvas_nuis.Print(options.plotfile.replace(".root",".pdf") + "[")
canvas_pferrs = ROOT.TCanvas("post_fit_errs", "post_fit_errs", 900, 600)
n_iteration = total_size/bin_size + 1
for part_idx in range(1, n_iteration+1):
if (part_idx)*bin_size < total_size:
curr_bin_size = bin_size
else:
curr_bin_size = total_size - (part_idx-1)*bin_size
#iteration range (part_idx-1)*bin_size ~ (part_idx)*bin_size
canvas_nuis.cd()
hist_fit_e_s = hist_fit_s.Clone("errors_s")
hist_fit_e_b = hist_fit_b.Clone("errors_b")
#gr_fit_s = getGraph(hist_fit_s,-0.1)
#gr_fit_b = getGraph(hist_fit_b, 0.1)
gr_fit_s.SetLineColor(ROOT.kRed)
gr_fit_s.SetMarkerColor(ROOT.kRed)
gr_fit_b.SetLineColor(ROOT.kBlue)
gr_fit_b.SetMarkerColor(ROOT.kBlue)
gr_fit_b.SetMarkerStyle(20)
gr_fit_s.SetMarkerStyle(20)
gr_fit_b.SetMarkerSize(1.0)
gr_fit_s.SetMarkerSize(1.0)
gr_fit_b.SetLineWidth(2)
gr_fit_s.SetLineWidth(2)
hist_prefit.SetLineWidth(2)
hist_prefit.SetTitle("Nuisance Parameters")
hist_prefit.SetLineColor(ROOT.kBlack)
hist_prefit.SetFillColor(ROOT.kGray)
hist_prefit.SetMaximum(3)
hist_prefit.SetMinimum(-3)
hist_prefit.LabelsOption("v", "X")
#XXX
hist_prefit.GetXaxis().SetRangeUser((part_idx-1)*bin_size, (part_idx-1)*bin_size + curr_bin_size)
hist_prefit.Draw("E2")
hist_prefit.Draw("histsame")
if not options.skipFitB: gr_fit_b.Draw("EPsame")
if not options.skipFitS: gr_fit_s.Draw("EPsame")
canvas_nuis.SetGridx()
canvas_nuis.RedrawAxis()
canvas_nuis.RedrawAxis('g')
leg=ROOT.TLegend(0.7,0.8,0.89,0.89)
leg.SetFillColor(0)
leg.SetTextFont(42)
leg.AddEntry(hist_prefit,"Prefit","FL")
if not options.skipFitB:leg.AddEntry(gr_fit_b,"B-only fit","EPL")
if not options.skipFitS:leg.AddEntry(gr_fit_s,"S+B fit" ,"EPL")
leg.Draw()
#fout.WriteTObject(canvas_nuis, 'nuisances_{IDX}'.format(IDX=part_idx))
canvas_nuis.Print(options.plotfile.replace(".root",".pdf"))
canvas_pferrs.cd()
for b in range(1,hist_fit_e_s.GetNbinsX()+1):
if hist_prefit.GetBinError(b) < 0.000001: continue
hist_fit_e_s.SetBinContent(b,hist_fit_s.GetBinError(b)/hist_prefit.GetBinError(b))
hist_fit_e_b.SetBinContent(b,hist_fit_b.GetBinError(b)/hist_prefit.GetBinError(b))
hist_fit_e_s.SetBinError(b,0)
hist_fit_e_b.SetBinError(b,0)
hist_fit_e_s.SetFillColor(ROOT.kRed)
hist_fit_e_b.SetFillColor(ROOT.kBlue)
hist_fit_e_s.SetBarWidth(0.4)
hist_fit_e_b.SetBarWidth(0.4)
hist_fit_e_b.SetBarOffset(0.45)
hist_fit_e_b.GetYaxis().SetTitle("#sigma_{#theta}/(#sigma_{#theta} prefit)")
hist_fit_e_b.SetTitle("Nuisance Parameter Uncertainty Reduction")
hist_fit_e_b.SetMaximum(1.5)
hist_fit_e_b.SetMinimum(0)
hist_fit_e_b.Draw("bar")
hist_fit_e_s.Draw("barsame")
leg_rat=ROOT.TLegend(0.6,0.7,0.89,0.89)
leg_rat.SetFillColor(0)
leg_rat.SetTextFont(42)
leg_rat.AddEntry(hist_fit_e_b,"B-only fit","F")
leg_rat.AddEntry(hist_fit_e_s,"S+B fit" ,"F")
leg_rat.Draw()
line_one = ROOT.TLine(0,1,hist_fit_e_s.GetXaxis().GetXmax(),1)
line_one.SetLineColor(1); line_one.SetLineStyle(2); line_one.SetLineWidth(2)
line_one.Draw()
canvas_pferrs.RedrawAxis()
#fout.WriteTObject(canvas_pferrs, 'post_fit_errs_{IDX}'.format(IDX=part_idx))
canvas_pferrs.Clear()
canvas_nuis.Print(options.plotfile.replace(".root",".pdf") + "]")
| [
"[email protected]"
] | |
eb57c92acb99b68b2959e86a7d6a967376b975be | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/_algorithms_challenges/pybites/beginner/254/scoping.py | 34decf08d03878474c2328fa00d3a32c654a37e8 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 502 | py | # num_hundreds -1
#
#
# ___ sum_numbers numbers l.. __ i..
# """Sums passed in numbers returning the total, also
# update the global variable num_hundreds with the amount
# of times 100 fits in total"""
# sumlist s.. ?
# gl.. 'num_hundreds' +_ ? //100
# r.. ?
#
#
# """ numlists = [[],[1, 2, 3],[40, 50, 60],[140, 50, 60],[140, 150, 160],[1140, 150, 160]]
#
# for numlist in numlists:
# print(numlist)
# print(sum_numbers(numlist))
# print(num_hundreds) """ | [
"[email protected]"
] | |
ea2a8410b431daa5b5ae063281210a409f3e2dc2 | 931ae36e876b474a5343d0608ef41da6b33f1048 | /python100_sourceCode/089.py | c81f732407e3b1cf928528d280322cf2dcc05199 | [] | no_license | mucollabo/py100 | 07fc10164b1335ad45a55b6af4767948cf18ee28 | 6361398e61cb5b014d2996099c3acfe604ee457c | refs/heads/master | 2023-01-27T13:48:57.807514 | 2020-12-10T12:49:10 | 2020-12-10T12:49:10 | 267,203,606 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | import sqlite3
# DB 연결
conn = sqlite3.connect('./output/sample.db')
cur = conn.cursor()
# Product 테이블의 id=1인 행 레코드의 가격을 7000원으로 수정
cur.execute('UPDATE Product set title="새 제품", price=7000 where id=1')
conn.commit()
# 변경 내용 확인
cur.execute('SELECT * from Product where id=1')
rows = cur.fetchall()
for row in rows:
print(row)
# DB 연결 종료
conn.close()
| [
"[email protected]"
] | |
e525cb78a6ad0651eeef1a726b80d42425cb8a17 | fe6b6d2253a9efc50571e1d1339ba5134306e978 | /AOJ/1611.py | 4a3e9abfc835b6997ea05081a0d81b344a4dbbd5 | [] | no_license | tehhuu/Atcoder | d6642dea8e92c9d721a914c9bbc208ca4fb484d0 | 3ff8b9890ac5a2b235ec0fbc9e1ef5f9653d956e | refs/heads/master | 2020-12-07T09:02:22.422854 | 2020-08-15T16:20:16 | 2020-08-15T16:20:16 | 232,689,873 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,435 | py | import sys
sys.setrecursionlimit(10**8)
def ii(): return int(sys.stdin.readline())
def mi(): return map(int, sys.stdin.readline().split())
def ti(): return tuple(map(int, sys.stdin.readline().split()))
def li2(N): return [list(map(int, sys.stdin.readline().split())) for _ in range(N)]
def dp2(ini, i, j): return [[ini]*i for _ in range(j)]
def dp3(ini, i, j, k): return [[[ini]*i for _ in range(j)] for _ in range(k)]
#import bisect #bisect.bisect_left(B, a)
#from collections import defaultdict #d = defaultdict(int) d[key] += value
#from itertools import accumulate #list(accumulate(A))
## DP
def solve(N):
A = ti()
dp = dp2(0, N, N)
for i in range(N-1):
if abs(A[i]-A[i+1]) < 2:
dp[i][i+1] = 2
for h in range(2, N): #hは(区間の長さ)-1
for i in range(N):
if i+h >= N:
break
j = i+h
# 区間の長さが偶数の場合
if h % 2:
if abs(A[i]-A[j]) < 2 and dp[i+1][j-1] == h-1:
dp[i][j] = h + 1
continue
else:
dp[i][j] = max(tuple(dp[i][k]+dp[k+1][j] for k in range(i, j)))
# 区間の長さが奇数の場合
else:
dp[i][j] = max(dp[i+1][j], dp[i][j-1])
else:
continue
print(dp[0][N-1])
while True:
N = ii()
if N == 0:
exit()
solve(N) | [
"[email protected]"
] | |
e60a211880a23654d745ef6116f44ceb0a6b2b21 | bf7ceda28eacc4e68dadff5d35224a13e7467d4d | /save_pages_as_pickle_from_db.py | 7da88716bcfdb8ba5e8e88cc411a951fa68dcdf0 | [] | no_license | katryo/task_search | 83983d61618d7755c474a9ed9c39ec6ca5621641 | 20c1985ca27253f64f4f9c124b36166c86bcf2d0 | refs/heads/master | 2020-05-09T10:04:11.155557 | 2014-03-25T04:10:25 | 2014-03-25T04:10:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 935 | py | # -*- coding: utf-8 -*-
import constants
import pdb
from pickle_file_saver_for_original import PickleFileSaverForOriginal
from page_data_loader import PageDataLoader
from web_page import WebPage
if __name__ == '__main__':
queries = constants.QUERIES_4
saver = PickleFileSaverForOriginal()
with PageDataLoader() as page_loader:
for query in queries:
pages = []
page_ids = page_loader.page_ids_with_query(query)
for page_id in page_ids:
pagedata = page_loader.pagedata_with_id(page_id) # (id, url, snippet, body, rank)
page = WebPage(id=page_id,
url=pagedata[0],
query=pagedata[1],
snippet=pagedata[2],
rank=pagedata[3])
pages.append(page)
saver.save_pages_with_query(pages=pages, query=query)
| [
"[email protected]"
] | |
979b67f9de6aee4f55cd9ac3f8fa6994dfbe7fbe | d5ad13232e3f1ced55f6956bc4cbda87925c8085 | /cc_mcc_seq/20sViruses/6.heatmap.py | 52b1d01453d24e84bc8ab093eb29173973a0ce4b | [] | no_license | arvin580/SIBS | c0ba9a8a41f59cb333517c286f7d80300b9501a2 | 0cc2378bf62359ec068336ea4de16d081d0f58a4 | refs/heads/master | 2021-01-23T21:57:35.658443 | 2015-04-09T23:11:34 | 2015-04-09T23:11:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,232 | py | from PyPlot.PyPlotClass import *
import sys
import math
import numpy
V = {}
V['NC_001669.1']='Simian virus'
V['NC_006273.2']='Human herpesvirus 5'
V['NC_003287.2']='Enterobacteria phage M13'
V['NC_000898.1']='Human herpesvirus 6B'
V['NC_007605.1']='Human herpesvirus 4 type 1'
V['NC_009334.1']='Human herpesvirus 4'
V['NC_003977.1']='Hepatitis B virus'
Score = 20
def foo(x):
if x >= Score:
return 1
else:
return 0
def gene_heatmap(sampleNameList, ouF, figsize=0, rowList=[]):
D = dict()
D2 = dict()
for i,inF in enumerate(sampleNameList):
inFile = open(inF + '.unmapped.sam.mapped.fa.fa.blasted.top.num')
for line in inFile:
line = line.strip()
fields =line.split('\t')
D.setdefault(fields[0],[0]*20)
D2.setdefault(fields[0],[0]*20)
#D[fields[0]][i] = int(math.log(int(fields[1])+1,2))
D[fields[0]][i] = int(fields[1])
D2[fields[0]][i] = int(fields[1])
inFile.close()
D3 = {}
D4 = {}
D3['Human herpesvirus']=[foo(x) for x in (numpy.array(D['NC_006273.2'])+numpy.array(D['NC_000898.1'])+numpy.array(D['NC_007605.1'])+numpy.array(D['NC_009334.1']))]
D3['Hepatitis B virus']=[foo(x) for x in D['NC_003977.1']]
D3['Enterobacteria phage M13']=[foo(x) for x in D['NC_003287.2']]
D4['Human herpesvirus']=numpy.array(D['NC_006273.2'])+numpy.array(D['NC_000898.1'])+numpy.array(D['NC_007605.1'])+numpy.array(D['NC_009334.1'])
D4['Hepatitis B virus']=D['NC_003977.1']
D4['Enterobacteria phage M13']=D['NC_003287.2']
ouFile = open(ouF+'.data','w')
for k in D4 :
ouFile.write(k+'\t'+'\t'.join([str(x)for x in D4[k]])+'\n')
LD = []
geneList = []
for key in D3 :
if max(D3[key])>0:
LD.append(D3[key])
geneList.append(key)
print(LD)
pp=PyPlot(ouF)
pp.heatmap(LD,col=False,xLabel=sampleNameList,yLabel=geneList,xLabelVertical=True,grid=True,figsize=figsize,colorTickets=True)
gene_heatmap(['ICC1A','ICC2A','ICC3A','ICC4A','ICC5A','ICC6A','ICC7A','ICC8A','ICC9A','ICC10A','CHC1A','CHC2A','CHC3A','CHC4A','CHC5A','CHC6A','CHC7A','CHC8A','CHC9A','CHC10A'],'viruses.heatmap3.pdf')
| [
"[email protected]"
] | |
d83aba75ffff502d1e1d4ac11c23a729564cdd4c | 26c5f6bb53331f19e2a0ef0797b752aca9a89b19 | /caffe2/python/operator_test/elementwise_logical_ops_test.py | fb886a1845571f85cf8624a289fb54f3ae7c6bc2 | [
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] | permissive | Maratyszcza/caffe2 | 4c68baedbdaf5378f9da0ebf58b232478f689ae4 | f4794ac7629e6825b2c8be99950ea130b69c4840 | refs/heads/master | 2023-06-20T18:23:06.774651 | 2018-03-26T07:41:33 | 2018-03-26T18:22:53 | 122,715,434 | 1 | 0 | Apache-2.0 | 2018-02-24T07:28:21 | 2018-02-24T07:28:21 | null | UTF-8 | Python | false | false | 5,258 | py | # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core
from hypothesis import given
import hypothesis.strategies as st
import caffe2.python.hypothesis_test_util as hu
import numpy as np
import unittest
def mux(select, left, right):
return [np.vectorize(lambda c, x, y: x if c else y)(select, left, right)]
def rowmux(select_vec, left, right):
select = [[s] * len(left) for s in select_vec]
return mux(select, left, right)
class TestWhere(hu.HypothesisTestCase):
def test_reference(self):
self.assertTrue((
np.array([1, 4]) == mux([True, False],
[1, 2],
[3, 4])[0]
).all())
self.assertTrue((
np.array([[1], [4]]) == mux([[True], [False]],
[[1], [2]],
[[3], [4]])[0]
).all())
@given(N=st.integers(min_value=1, max_value=10),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs_cpu_only)
def test_where(self, N, gc, dc, engine):
C = np.random.rand(N).astype(bool)
X = np.random.rand(N).astype(np.float32)
Y = np.random.rand(N).astype(np.float32)
op = core.CreateOperator("Where", ["C", "X", "Y"], ["Z"], engine=engine)
self.assertDeviceChecks(dc, op, [C, X, Y], [0])
self.assertReferenceChecks(gc, op, [C, X, Y], mux)
@given(N=st.integers(min_value=1, max_value=10),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs_cpu_only)
def test_where_dim2(self, N, gc, dc, engine):
C = np.random.rand(N, N).astype(bool)
X = np.random.rand(N, N).astype(np.float32)
Y = np.random.rand(N, N).astype(np.float32)
op = core.CreateOperator("Where", ["C", "X", "Y"], ["Z"], engine=engine)
self.assertDeviceChecks(dc, op, [C, X, Y], [0])
self.assertReferenceChecks(gc, op, [C, X, Y], mux)
class TestRowWhere(hu.HypothesisTestCase):
def test_reference(self):
self.assertTrue((
np.array([1, 2]) == rowmux([True],
[1, 2],
[3, 4])[0]
).all())
self.assertTrue((
np.array([[1, 2], [7, 8]]) == rowmux([True, False],
[[1, 2], [3, 4]],
[[5, 6], [7, 8]])[0]
).all())
@given(N=st.integers(min_value=1, max_value=10),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs_cpu_only)
def test_rowwhere(self, N, gc, dc, engine):
C = np.random.rand(N).astype(bool)
X = np.random.rand(N).astype(np.float32)
Y = np.random.rand(N).astype(np.float32)
op = core.CreateOperator(
"Where",
["C", "X", "Y"],
["Z"],
broadcast_on_rows=True,
engine=engine,
)
self.assertDeviceChecks(dc, op, [C, X, Y], [0])
self.assertReferenceChecks(gc, op, [C, X, Y], mux)
@given(N=st.integers(min_value=1, max_value=10),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs_cpu_only)
def test_rowwhere_dim2(self, N, gc, dc, engine):
C = np.random.rand(N).astype(bool)
X = np.random.rand(N, N).astype(np.float32)
Y = np.random.rand(N, N).astype(np.float32)
op = core.CreateOperator(
"Where",
["C", "X", "Y"],
["Z"],
broadcast_on_rows=True,
engine=engine,
)
self.assertDeviceChecks(dc, op, [C, X, Y], [0])
self.assertReferenceChecks(gc, op, [C, X, Y], rowmux)
class TestIsMemberOf(hu.HypothesisTestCase):
@given(N=st.integers(min_value=1, max_value=10),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs_cpu_only)
def test_is_member_of(self, N, gc, dc, engine):
X = np.random.randint(10, size=N).astype(np.int64)
values = [0, 3, 4, 6, 8]
op = core.CreateOperator(
"IsMemberOf",
["X"],
["Y"],
value=np.array(values),
engine=engine,
)
self.assertDeviceChecks(dc, op, [X], [0])
values = set(values)
def test(x):
return [np.vectorize(lambda x: x in values)(x)]
self.assertReferenceChecks(gc, op, [X], test)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
2da8e86c83b20b30243df47ef601477d5c095ae6 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/428/usersdata/281/102513/submittedfiles/jogoDaVelha.py | f0e243249430cf1433ee1d4ee186ce27cbbfc621 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 469 | py | # -*- coding: utf-8 -*-
from jogoDaVelha_BIB import *
import random
# COLOQUE SEU PROGRAMA A PARTIR DAQUI
print('Bem vindo ao Jogo da Velha da equipe ESSI')
nome=str(input('Qual o seu nome (ou apelido)? '))
simb ()
sorteio ()
if sorteio==1:
print('Vencedor do sorteio para inicio do jogo: Computador' )
elif sorteio==0:
print('Vencedor do sorteio para inicio do jogo: ' +nome)
| [
"[email protected]"
] | |
d3b35b564dfbdedf90d2e01ce3eef7f3c2366590 | d2b42c7a82229b02498ec9ba3bb49bb78857beca | /common.py | c0b5f07acc611a74bb0135b56d8cdd884e802e8b | [] | no_license | flyingbird93/NIMA | 8dd41d0bb10a21b7b9afeca59f26f70c626e9dec | 4c75458d31762d6236b931f0bc66d1784a2ea003 | refs/heads/master | 2022-11-06T10:47:30.404174 | 2018-12-14T07:05:57 | 2018-12-14T12:28:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,488 | py | import os
import numpy as np
import requests
from torchvision import transforms
IMAGE_NET_MEAN = [0.485, 0.456, 0.406]
IMAGE_NET_STD = [0.229, 0.224, 0.225]
class Transform:
def __init__(self):
normalize = transforms.Normalize(
mean=IMAGE_NET_MEAN,
std=IMAGE_NET_STD)
self._train_transform = transforms.Compose([
transforms.Resize((256, 256)),
transforms.RandomHorizontalFlip(),
transforms.RandomCrop((224, 224)),
transforms.ToTensor(),
normalize])
self._val_transform = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize])
@property
def train_transform(self):
return self._train_transform
@property
def val_transform(self):
return self._val_transform
def get_mean_score(score):
buckets = np.arange(1, 11)
mu = (buckets * score).sum()
return mu
def get_std_score(scores):
si = np.arange(1, 11)
mean = get_mean_score(scores)
std = np.sqrt(np.sum(((si - mean) ** 2) * scores))
return std
def download_file(url, local_filename, chunk_size=1024):
if os.path.exists(local_filename):
return local_filename
r = requests.get(url, stream=True)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=chunk_size):
if chunk:
f.write(chunk)
return local_filename
| [
"[email protected]"
] | |
07e8edc00895d0ef29ad6acd8dcaad2abc9c07cb | 6fc5882ad4c38f32162ed30e60c3423ef8da5b7b | /fake_faces/models/baseline_batchnorm.py | e364734eb462c9da412e3e12f9385e20b72190ee | [
"MIT"
] | permissive | alexkyllo/fake-faces | f7334c798fc90eab94657dc18df88c19ec8c052c | 95d7467598bc1275e6c6c0bea331e036da4e625e | refs/heads/main | 2023-01-31T15:43:53.212202 | 2020-12-17T02:11:15 | 2020-12-17T02:11:15 | 304,217,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,931 | py | """baseline.py
A baseline CNN with 3 Conv2D layers"""
import os
from tensorflow.keras import Sequential
from tensorflow.keras.layers import (
Conv2D,
MaxPool2D,
Flatten,
Dense,
Dropout,
BatchNormalization,
)
from tensorflow.keras.optimizers import Adam
from fake_faces.models.model import Model
from fake_faces import SHAPE, BATCH_SIZE, CLASS_MODE
class BaselineBatchNorm(Model):
"""A simple 3-layer CNN with batch normalization."""
def build(self, shape=SHAPE, color_channels=1, momentum=0.99, normalize_fc=False, optimizer=Adam()):
"""Build the model with the given hyperparameter values."""
model = Sequential()
model.add(
Conv2D(
filters=32,
kernel_size=(3, 3),
input_shape=(*SHAPE, color_channels),
activation="relu",
padding="same",
)
)
model.add(BatchNormalization(momentum=momentum))
model.add(MaxPool2D(pool_size=(2, 2)))
model.add(
Conv2D(filters=64, kernel_size=(3, 3), activation="relu", padding="same")
)
model.add(BatchNormalization(momentum=momentum))
model.add(MaxPool2D(pool_size=(2, 2)))
model.add(
Conv2D(filters=128, kernel_size=(3, 3), activation="relu", padding="same")
)
model.add(BatchNormalization(momentum=momentum))
model.add(MaxPool2D(pool_size=(2, 2)))
model.add(Flatten())
if normalize_fc:
model.add(BatchNormalization(momentum=momentum))
model.add(Dense(units=128, activation="relu"))
if normalize_fc:
model.add(BatchNormalization(momentum=momentum))
model.add(Dense(units=1, activation="sigmoid"))
model.compile(
optimizer=optimizer, loss="binary_crossentropy", metrics=["accuracy"]
)
self.model = model
return self
| [
"[email protected]"
] | |
57affb3c8abbefac1bd893146ceb7506392bad76 | d35813d7e9ef6c606591ae1eb4ed3b2d5156633b | /python4everybody/ch12_network_programming/socket1.py | 3e7484b486f0d5399dec90354d6b926489059e4a | [] | no_license | JeremiahZhang/gopython | eb6f598c16c8a00c86245e6526261b1b2d1321f1 | ef13f16d2330849b19ec5daa9f239bf1558fa78c | refs/heads/master | 2022-08-13T22:38:12.416404 | 2022-05-16T02:32:04 | 2022-05-16T02:32:04 | 42,239,933 | 13 | 6 | null | 2022-08-01T08:13:54 | 2015-09-10T11:14:43 | Jupyter Notebook | UTF-8 | Python | false | false | 333 | py | import socket
mysock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
mysock.connect(('data.pr4e.org', 80))
cmd = 'GET http://data.pr4e.org/intro-short.txt HTTP/1.0\r\n\r\n'.encode()
mysock.send(cmd)
while True:
data = mysock.recv(512)
if (len(data) < 1):
break
print(data.decode(), end='')
mysock.close()
| [
"[email protected]"
] | |
cdf1db92901c592528e93cf76a2b6650207ac886 | 311578520b84f1649124cdfa335028ee66e9d88d | /scapy_python2/bridge/scapy_bridge_ThreadPoolExecutor.py | b6adcf9046fa57b1f12222f453747858c4178415 | [
"Unlicense"
] | permissive | thinkAmi-sandbox/syakyo-router_jisaku | b8070494a5fc62b576200cae96eb9c09599f4f41 | 295e4df3cf65ce0275f40884027ff8aaff381dd4 | refs/heads/master | 2021-09-03T07:32:08.995618 | 2018-01-07T03:19:15 | 2018-01-07T03:19:15 | 115,960,241 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 930 | py | # -*- coding: utf-8 -*-
from scapy.all import conf
import concurrent.futures
def bridge_from_eth0_to_eth1():
eth0_socket = conf.L2socket(iface='eth0')
eth1_socket = conf.L2socket(iface='eth1')
while True:
p = eth0_socket.recv()
if p:
eth1_socket.send(p.original)
def bridge_from_eth1_to_eth0():
eth0_socket = conf.L2socket(iface='eth0')
eth1_socket = conf.L2socket(iface='eth1')
while True:
p = eth1_socket.recv()
if p:
eth0_socket.send(p.original)
def bridge():
try:
# スレッドを用意
executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
executor.submit(bridge_from_eth0_to_eth1)
executor.submit(bridge_from_eth1_to_eth0)
except KeyboardInterrupt:
# threadingと異なり、特に何もしなくてもCtrl + C が可能
pass
if __name__ == '__main__':
bridge()
| [
"[email protected]"
] | |
0ea6344fca0f9b96f2ed31bb0dbbd94842d83e03 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03486/s871112697.py | ad62fedae08b65cbd4a367acdee0c6ff80030ad4 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | s = sorted(input())
t = sorted(input(),reverse = True)
if s < t:
print('Yes')
else:
print('No') | [
"[email protected]"
] | |
61576ee9ff5f75ea088469328816acfcc2fd48b8 | f0d583a064cc53510d8b00b42ac869832e70bf41 | /nearl/convolutions.py | 8904603be480f804d66ed61426d0d836ea09e435 | [] | no_license | PaulZoni/nn | 918d543b4b2d955ff991da70ce4e88d4d94d13c8 | 25a81579499c893584b040f536ddbef254197f4e | refs/heads/master | 2020-04-27T19:05:10.968050 | 2019-06-27T12:22:16 | 2019-06-27T12:22:16 | 174,564,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,153 | py | from skimage.exposure import rescale_intensity
import numpy as np
import argparse
import cv2
def convolve(image, K):
(iH, iW) = image.shape[:2]
(kH, kW) = K.shape[:2]
pad = (kW - 1) // 2
image = cv2.copyMakeBorder(image, pad, pad, pad, pad,
cv2.BORDER_REPLICATE)
output = np.zeros((iH, iW), dtype="float")
for y in np.arange(pad, iH + pad):
for x in np.arange(pad, iW + pad):
roi = image[y - pad:y + pad + 1, x - pad:x + pad + 1]
k = (roi * K).sum()
output[y - pad, x - pad] = k
output = rescale_intensity(output, in_range=(0, 255))
output = (output * 255).astype("uint8")
return output
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=False, help="path to the input image",
default="/home/pavel/Изображения/dog.jpg")
args = vars(ap.parse_args())
smallBlur = np.ones((7, 7), dtype="float") * (1.0 / (7 * 7))
largeBlur = np.ones((21, 21), dtype="float") * (1.0 / (21 * 21))
sharpen = np.array((
[0, -1, 0],
[-1, 5, -1],
[0, -1, 0]), dtype="int")
laplacian = np.array((
[0, 1, 0],
[1, -4, 1],
[0, 1, 0]), dtype="int")
sobelX = np.array((
[-1, 0, 1],
[-2, 0, 2],
[-1, 0, 1]), dtype="int")
sobelY = np.array((
[-1, -2, -1],
[0, 0, 0],
[1, 2, 1]), dtype="int")
emboss = np.array((
[-2, -1, 0],
[-1, 1, 1],
[0, 1, 2]), dtype="int")
kernelBank = (
("small_blur", smallBlur),
("large_blur", largeBlur),
("sharpen", sharpen),
("laplacian", laplacian),
("sobel_x", sobelX),
("sobel_y", sobelY),
("emboss", emboss))
image = cv2.imread(args["image"])
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
for (kernelName, K) in kernelBank:
print("[INFO] applying {} kernel".format(kernelName))
convolveOutput = convolve(gray, K)
opencvOutput = cv2.filter2D(gray, -1, K)
cv2.imshow("Original", gray)
cv2.imshow("{} - convole".format(kernelName), convolveOutput)
cv2.imshow("{} - opencv".format(kernelName), opencvOutput)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
3f0f1dcc0fbb8c2b8add0c757387a982bca2cbaf | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /iBL3eDRWzpxgfQyHx_3.py | e27280148842b79a6e873bc4c39cde8d6161a12a | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | """
Fix the code in the **Code** tab so the function returns `true` _if and only
if_ `x` is equal to `7`. Try to debug code and pass all the tests.
### Examples
is_seven(4) ➞ False
is_seven(9) ➞ False
is_seven(7) ➞ True
### Notes
The bug can be subtle, so look closely!
"""
def is_seven(x):
return False if x!=7 else True
| [
"[email protected]"
] | |
cb9259575f3671b729def2abae33d43a65b303dd | 78d35bb7876a3460d4398e1cb3554b06e36c720a | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/aio/operations/_express_route_ports_operations.py | e763f8dc244c70b4ef6e04a55b3d5c47f13bd7ba | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | catchsrinivas/azure-sdk-for-python | e35f59b60318a31b3c940a7a3a07b61b28118aa5 | 596227a7738a5342274486e30489239d539b11d1 | refs/heads/main | 2023-08-27T09:08:07.986249 | 2021-11-11T11:13:35 | 2021-11-11T11:13:35 | 427,045,896 | 0 | 0 | MIT | 2021-11-11T15:14:31 | 2021-11-11T15:14:31 | null | UTF-8 | Python | false | false | 30,188 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRoutePortsOperations:
"""ExpressRoutePortsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
express_route_port_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
express_route_port_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified ExpressRoutePort resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of the ExpressRoutePort resource.
:type express_route_port_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
express_route_port_name=express_route_port_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def get(
self,
resource_group_name: str,
express_route_port_name: str,
**kwargs: Any
) -> "_models.ExpressRoutePort":
"""Retrieves the requested ExpressRoutePort resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of ExpressRoutePort.
:type express_route_port_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRoutePort, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_11_01.models.ExpressRoutePort
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePort"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
express_route_port_name: str,
parameters: "_models.ExpressRoutePort",
**kwargs: Any
) -> "_models.ExpressRoutePort":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePort"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ExpressRoutePort')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
express_route_port_name: str,
parameters: "_models.ExpressRoutePort",
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRoutePort"]:
"""Creates or updates the specified ExpressRoutePort resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of the ExpressRoutePort resource.
:type express_route_port_name: str
:param parameters: Parameters supplied to the create ExpressRoutePort operation.
:type parameters: ~azure.mgmt.network.v2018_11_01.models.ExpressRoutePort
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRoutePort or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.ExpressRoutePort]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePort"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
express_route_port_name=express_route_port_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
express_route_port_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.ExpressRoutePort":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePort"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
express_route_port_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRoutePort"]:
"""Update ExpressRoutePort tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of the ExpressRoutePort resource.
:type express_route_port_name: str
:param parameters: Parameters supplied to update ExpressRoutePort resource tags.
:type parameters: ~azure.mgmt.network.v2018_11_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRoutePort or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.ExpressRoutePort]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePort"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
express_route_port_name=express_route_port_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRoutePortListResult"]:
"""List all the ExpressRoutePort resources in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRoutePortListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.ExpressRoutePortListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePortListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRoutePortListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts'} # type: ignore
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRoutePortListResult"]:
"""List all the ExpressRoutePort resources in the specified subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRoutePortListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.ExpressRoutePortListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePortListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRoutePortListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ExpressRoutePorts'} # type: ignore
| [
"[email protected]"
] | |
fa17aaf115da2b9fef1951a840b2557fca760856 | 9645bdfbb15742e0d94e3327f94471663f32061a | /Python/844 - Backspace String Compare/844_backspace-string-compare.py | e1471d752b7544301a87dd7b7b8a38438f4cdf93 | [] | no_license | aptend/leetcode-rua | f81c080b2260adb2da677612e5c437eda256781d | 80e44f4e9d3a5b592fdebe0bf16d1df54e99991e | refs/heads/master | 2023-06-22T00:40:05.533424 | 2021-03-17T13:51:28 | 2021-03-17T13:51:28 | 186,434,133 | 2 | 0 | null | 2023-06-21T22:12:51 | 2019-05-13T14:17:27 | HTML | UTF-8 | Python | false | false | 1,715 | py | from leezy import Solution, solution
class Q844(Solution):
@solution
def backspaceCompare(self, S, T):
# 8ms 99.44%
def print_s(s):
stack = []
for ch in s:
if ch == '#':
if stack:
stack.pop()
else:
stack.append(ch)
# str comp is somehow faster
return ''.join(stack)
return print_s(S) == print_s(T)
@solution
def backspace_compare(self, S, T):
# 12ms 95.69%
# every '#' only affects previous positions, so we think to handle from back
i, j = len(S)-1, len(T)-1
sback = tback = 0
while True:
# find the char that will REALLY be printed on the paper
while i >= 0 and S[i] == '#':
i -= 1
sback += 1
while i >= 0 and sback:
sback += 1 if S[i] == '#' else -1
i -= 1
while j >= 0 and T[j] == '#':
j -= 1
tback += 1
while j >= 0 and tback:
tback += 1 if T[j] == '#' else -1
j -= 1
if i >= 0 and j >= 0:
if S[i] == T[j]:
i -= 1
j -= 1
else:
return False
elif i < 0 and j < 0:
return True
else:
return False
def main():
q = Q844()
q.add_args('ab#c', 'ad#c')
q.add_args('ab##', 'c#d##')
q.add_args('z#ab###c', 'ad#c')
q.add_args('z####c##', 'ad#c')
q.run()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
c76d6d2e9cca7e7d52dfb5b95ee5a097aa6b684e | 70aa3b9b80a42930234315f335ca7ab06e87c15d | /chapter6/code/vgg.py | 82c411216bb94b89000d79007c91b9a3edd8525e | [
"Apache-2.0"
] | permissive | gvenus/BookSource | c0e6c4fbbf5bc69d3dd2bf437f3fdfa734b83234 | f18fd11f64ac5b400175a0d80a6dd4a393476673 | refs/heads/master | 2020-05-27T06:14:46.789134 | 2018-10-08T11:26:46 | 2018-10-08T11:26:46 | 188,516,561 | 1 | 0 | null | 2019-05-25T03:37:08 | 2019-05-25T03:37:08 | null | UTF-8 | Python | false | false | 1,691 | py | # coding:utf-8
import paddle.v2 as paddle
# ***********************定义VGG卷积神经网络模型***************************************
def vgg_bn_drop(datadim, type_size):
image = paddle.layer.data(name="image",
type=paddle.data_type.dense_vector(datadim))
def conv_block(ipt, num_filter, groups, dropouts, num_channels=None):
return paddle.networks.img_conv_group(
input=ipt,
num_channels=num_channels,
pool_size=2,
pool_stride=2,
conv_num_filter=[num_filter] * groups,
conv_filter_size=3,
conv_act=paddle.activation.Relu(),
conv_with_batchnorm=True,
conv_batchnorm_drop_rate=dropouts,
pool_type=paddle.pooling.Max())
# 最后一个参数是图像的通道数
conv1 = conv_block(image, 64, 2, [0.0, 0], 1)
conv2 = conv_block(conv1, 128, 2, [0.0, 0])
conv3 = conv_block(conv2, 256, 3, [0.0, 0.0, 0])
conv4 = conv_block(conv3, 512, 3, [0.0, 0.0, 0])
conv5 = conv_block(conv4, 512, 3, [0.0, 0.0, 0])
drop = paddle.layer.dropout(input=conv5, dropout_rate=0.5)
fc1 = paddle.layer.fc(input=drop, size=512, act=paddle.activation.Linear())
bn = paddle.layer.batch_norm(input=fc1,
act=paddle.activation.Relu(),
layer_attr=paddle.attr.Extra(drop_rate=0.0))
fc2 = paddle.layer.fc(input=bn, size=512, act=paddle.activation.Linear())
# 通过Softmax获得分类器
out = paddle.layer.fc(input=fc2,
size=type_size,
act=paddle.activation.Softmax())
return out | [
"[email protected]"
] | |
6b00caf4c7ffc8ff1fbd1bd7e0184ef186d20125 | 7a004d2b4508004e40fac8981e38f160d7a6f263 | /cn.fc/py/T4310.格兰赛尔.py | 2073ac4d61028e263b6b687f279b4befa491154b | [] | no_license | LastSongz/SoraVoiceScripts | 0cd1dd6a36eccdf00bd772ceb6d1cfd73b7ab8cd | 35e465669571e562aa78fe8524b6f69c6f14f000 | refs/heads/master | 2020-06-16T05:24:11.381802 | 2019-06-12T11:55:03 | 2019-06-12T11:55:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 90,814 | py | from ED6ScenarioHelper import *
def main():
# 格兰赛尔
CreateScenaFile(
FileName = 'T4310 ._SN',
MapName = 'Grancel',
Location = 'T4310.x',
MapIndex = 1,
MapDefaultBGM = "ed60089",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'特务兵', # 9
'特务兵', # 10
'特务兵', # 11
'特务兵', # 12
'中队长', # 13
'莉安妮', # 14
'基库', # 15
'奈尔', # 16
'科洛蒂娅公主', # 17
'尤莉亚中尉', # 18
'雪拉扎德', # 19
'奥利维尔', # 20
'卡露娜', # 21
'亚妮拉丝', # 22
'库拉茨', # 23
'克鲁茨', # 24
'亲卫队员', # 25
'亲卫队员', # 26
'亲卫队员', # 27
'亲卫队员', # 28
'亲卫队员', # 29
'亲卫队员', # 30
'贵族老奶奶', # 31
'贵族中年男子', # 32
'贵族女孩', # 33
'贵族青年', # 34
'贵族中年女子', # 35
'贵族老人', # 36
'贵族小孩', # 37
'男性学者2', # 38
'管家', # 39
'青年市民', # 40
' ', # 41
)
DeclEntryPoint(
Unknown_00 = 0,
Unknown_04 = 0,
Unknown_08 = 6000,
Unknown_0C = 4,
Unknown_0E = 0,
Unknown_10 = 0,
Unknown_14 = 9500,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2800,
Unknown_2C = 262,
Unknown_30 = 45,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 0,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT07/CH00100 ._CH', # 00
'ED6_DT07/CH00101 ._CH', # 01
'ED6_DT07/CH00110 ._CH', # 02
'ED6_DT07/CH00111 ._CH', # 03
'ED6_DT07/CH00170 ._CH', # 04
'ED6_DT07/CH00171 ._CH', # 05
'ED6_DT07/CH00340 ._CH', # 06
'ED6_DT07/CH00341 ._CH', # 07
'ED6_DT07/CH00510 ._CH', # 08
'ED6_DT07/CH01480 ._CH', # 09
'ED6_DT07/CH02320 ._CH', # 0A
'ED6_DT07/CH02060 ._CH', # 0B
'ED6_DT06/CH20143 ._CH', # 0C
'ED6_DT07/CH02090 ._CH', # 0D
'ED6_DT07/CH00020 ._CH', # 0E
'ED6_DT07/CH00030 ._CH', # 0F
'ED6_DT07/CH00122 ._CH', # 10
'ED6_DT07/CH00514 ._CH', # 11
'ED6_DT07/CH00513 ._CH', # 12
'ED6_DT07/CH01240 ._CH', # 13
'ED6_DT07/CH01630 ._CH', # 14
'ED6_DT07/CH01260 ._CH', # 15
'ED6_DT07/CH01620 ._CH', # 16
'ED6_DT07/CH01320 ._CH', # 17
'ED6_DT07/CH00040 ._CH', # 18
'ED6_DT06/CH20042 ._CH', # 19
'ED6_DT07/CH01180 ._CH', # 1A
'ED6_DT07/CH01200 ._CH', # 1B
'ED6_DT07/CH01210 ._CH', # 1C
'ED6_DT07/CH01220 ._CH', # 1D
'ED6_DT07/CH01230 ._CH', # 1E
'ED6_DT07/CH01490 ._CH', # 1F
'ED6_DT07/CH01470 ._CH', # 20
'ED6_DT07/CH01190 ._CH', # 21
'ED6_DT07/CH01560 ._CH', # 22
'ED6_DT07/CH01220 ._CH', # 23
'ED6_DT06/CH20113 ._CH', # 24
'ED6_DT07/CH00440 ._CH', # 25
'ED6_DT07/CH00441 ._CH', # 26
'ED6_DT07/CH01790 ._CH', # 27
'ED6_DT07/CH00500 ._CH', # 28
'ED6_DT07/CH00501 ._CH', # 29
'ED6_DT07/CH00444 ._CH', # 2A
'ED6_DT07/CH00443 ._CH', # 2B
'ED6_DT06/CH20114 ._CH', # 2C
'ED6_DT06/CH20115 ._CH', # 2D
)
AddCharChipPat(
'ED6_DT07/CH00100P._CP', # 00
'ED6_DT07/CH00101P._CP', # 01
'ED6_DT07/CH00110P._CP', # 02
'ED6_DT07/CH00111P._CP', # 03
'ED6_DT07/CH00170P._CP', # 04
'ED6_DT07/CH00171P._CP', # 05
'ED6_DT07/CH00340P._CP', # 06
'ED6_DT07/CH00341P._CP', # 07
'ED6_DT07/CH00510P._CP', # 08
'ED6_DT07/CH01480P._CP', # 09
'ED6_DT07/CH02320P._CP', # 0A
'ED6_DT07/CH02060P._CP', # 0B
'ED6_DT06/CH20143P._CP', # 0C
'ED6_DT07/CH02090P._CP', # 0D
'ED6_DT07/CH00020P._CP', # 0E
'ED6_DT07/CH00030P._CP', # 0F
'ED6_DT07/CH00122P._CP', # 10
'ED6_DT07/CH00514P._CP', # 11
'ED6_DT07/CH00513P._CP', # 12
'ED6_DT07/CH01240P._CP', # 13
'ED6_DT07/CH01630P._CP', # 14
'ED6_DT07/CH01260P._CP', # 15
'ED6_DT07/CH01620P._CP', # 16
'ED6_DT07/CH01320P._CP', # 17
'ED6_DT07/CH00040P._CP', # 18
'ED6_DT06/CH20042P._CP', # 19
'ED6_DT07/CH01180P._CP', # 1A
'ED6_DT07/CH01200P._CP', # 1B
'ED6_DT07/CH01210P._CP', # 1C
'ED6_DT07/CH01220P._CP', # 1D
'ED6_DT07/CH01230P._CP', # 1E
'ED6_DT07/CH01490P._CP', # 1F
'ED6_DT07/CH01470P._CP', # 20
'ED6_DT07/CH01190P._CP', # 21
'ED6_DT07/CH01560P._CP', # 22
'ED6_DT07/CH01220P._CP', # 23
'ED6_DT06/CH20113P._CP', # 24
'ED6_DT07/CH00440P._CP', # 25
'ED6_DT07/CH00441P._CP', # 26
'ED6_DT07/CH01790P._CP', # 27
'ED6_DT07/CH00500P._CP', # 28
'ED6_DT07/CH00501P._CP', # 29
'ED6_DT07/CH00444P._CP', # 2A
'ED6_DT07/CH00443P._CP', # 2B
'ED6_DT06/CH20114P._CP', # 2C
'ED6_DT06/CH20115P._CP', # 2D
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 6,
ChipIndex = 0x6,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 37,
ChipIndex = 0x25,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 6,
ChipIndex = 0x6,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 37,
ChipIndex = 0x25,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 8,
ChipIndex = 0x8,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 9,
ChipIndex = 0x9,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 10,
ChipIndex = 0xA,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 11,
ChipIndex = 0xB,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 12,
ChipIndex = 0xC,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 13,
ChipIndex = 0xD,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 14,
ChipIndex = 0xF,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 15,
ChipIndex = 0xF,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 19,
ChipIndex = 0x13,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 20,
ChipIndex = 0x14,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 21,
ChipIndex = 0x15,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 22,
ChipIndex = 0x16,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 4420,
Z = 250,
Y = 72560,
Direction = 201,
Unknown2 = 0,
Unknown3 = 26,
ChipIndex = 0x1A,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 5090,
Z = 0,
Y = 70990,
Direction = 254,
Unknown2 = 0,
Unknown3 = 27,
ChipIndex = 0x1B,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 3560,
Z = 250,
Y = 71090,
Direction = 208,
Unknown2 = 0,
Unknown3 = 28,
ChipIndex = 0x1C,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = -4630,
Z = 250,
Y = 72900,
Direction = 165,
Unknown2 = 0,
Unknown3 = 29,
ChipIndex = 0x1D,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = -3480,
Z = 250,
Y = 72300,
Direction = 150,
Unknown2 = 0,
Unknown3 = 30,
ChipIndex = 0x1E,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = -4870,
Z = 0,
Y = 70280,
Direction = 162,
Unknown2 = 0,
Unknown3 = 31,
ChipIndex = 0x1F,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 6280,
Z = 0,
Y = 66790,
Direction = 237,
Unknown2 = 0,
Unknown3 = 32,
ChipIndex = 0x20,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 6740,
Z = 0,
Y = 65120,
Direction = 257,
Unknown2 = 0,
Unknown3 = 33,
ChipIndex = 0x21,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 8300,
Z = 0,
Y = 63060,
Direction = 241,
Unknown2 = 0,
Unknown3 = 34,
ChipIndex = 0x22,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 6540,
Z = 0,
Y = 69410,
Direction = 239,
Unknown2 = 0,
Unknown3 = 35,
ChipIndex = 0x23,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclEvent(
X = -57400,
Y = 1000,
Z = 2550,
Range = -43640,
Unknown_10 = 0xFFFFFC18,
Unknown_14 = 0xFFFFFCCC,
Unknown_18 = 0x0,
Unknown_1C = 4,
)
ScpFunction(
"Function_0_65A", # 00, 0
"Function_1_70A", # 01, 1
"Function_2_718", # 02, 2
"Function_3_72E", # 03, 3
"Function_4_D67", # 04, 4
"Function_5_1203", # 05, 5
"Function_6_142A", # 06, 6
"Function_7_4438", # 07, 7
"Function_8_4460", # 08, 8
"Function_9_4C64", # 09, 9
)
def Function_0_65A(): pass
label("Function_0_65A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 2)), scpexpr(EXPR_END)), "loc_66D")
SetMapFlags(0x10000000)
OP_A3(0x3FA)
Event(0, 3)
label("loc_66D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 3)), scpexpr(EXPR_END)), "loc_67B")
OP_A3(0x3FB)
Event(0, 8)
label("loc_67B")
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x0), scpexpr(EXPR_END)),
(105, "loc_687"),
(SWITCH_DEFAULT, "loc_69D"),
)
label("loc_687")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCB, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCB, 0)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_69A")
OP_A2(0x659)
Event(0, 6)
label("loc_69A")
Jump("loc_69D")
label("loc_69D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCA, 4)), scpexpr(EXPR_END)), "loc_6F8")
SetChrSubChip(0x8, 0)
SetChrSubChip(0x9, 0)
SetChrPos(0x8, -48300, 0, 18410, 90)
SetChrPos(0x9, -48500, 0, 17000, 135)
ClearChrFlags(0x8, 0x1)
ClearChrFlags(0x9, 0x1)
ClearChrFlags(0x8, 0x80)
ClearChrFlags(0x9, 0x80)
SetChrFlags(0x8, 0x800)
SetChrFlags(0x9, 0x800)
SetChrChipByIndex(0x8, 25)
SetChrChipByIndex(0x9, 25)
label("loc_6F8")
OP_51(0xE, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_OR), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Return()
# Function_0_65A end
def Function_1_70A(): pass
label("Function_1_70A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCB, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_717")
OP_1B(0x0, 0x0, 0x9)
label("loc_717")
Return()
# Function_1_70A end
def Function_2_718(): pass
label("Function_2_718")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_72D")
OP_99(0xFE, 0x0, 0x7, 0x5DC)
Jump("Function_2_718")
label("loc_72D")
Return()
# Function_2_718 end
def Function_3_72E(): pass
label("Function_3_72E")
EventBegin(0x0)
OP_6D(19290, 0, 360, 0)
OP_67(0, 6000, -10000, 0)
OP_6B(5020, 0)
OP_6C(45000, 0)
OP_6E(280, 0)
SetChrChipByIndex(0x101, 0)
SetChrChipByIndex(0x102, 2)
SetChrChipByIndex(0x108, 4)
SetChrPos(0x108, 190, 0, -7530, 0)
SetChrPos(0x101, -1330, 0, -8480, 0)
SetChrPos(0x102, 570, 0, -8760, 0)
FadeToBright(1000, 0)
OP_6D(640, 0, -4630, 4000)
Fade(1000)
OP_67(0, 6000, -10000, 0)
OP_6B(3000, 0)
OP_6C(45000, 0)
OP_6E(280, 0)
OP_0D()
ChrTalk( #0
0x101,
(
"#004F这里就是『艾尔贝离宫』……\x02\x03",
"唔~与城里相比,\x01",
"也是同样那么的典雅豪华啊。\x02",
)
)
CloseMessageWindow()
ChrTalk( #1
0x102,
"#010F啊,因为是王家的建筑嘛。\x02",
)
CloseMessageWindow()
ClearChrFlags(0x8, 0x80)
ClearChrFlags(0x9, 0x80)
ClearChrFlags(0xB, 0x80)
SetChrPos(0x8, 11820, 0, -6220, 250)
SetChrPos(0x9, 12550, 0, -5100, 250)
SetChrPos(0xB, 14020, 0, -5780, 250)
OP_62(0x108, 0x0, 2300, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
OP_8C(0x108, 100, 400)
ChrTalk( #2
0x108,
"#076F好的,冲进去!\x02",
)
CloseMessageWindow()
def lambda_947():
TurnDirection(0xFE, 0x8, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_947)
def lambda_955():
TurnDirection(0xFE, 0x8, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_955)
def lambda_963():
OP_6D(6340, 0, -6950, 1500)
ExitThread()
QueueWorkItem(0x102, 2, lambda_963)
def lambda_97B():
OP_67(0, 4710, -10000, 1500)
ExitThread()
QueueWorkItem(0x101, 2, lambda_97B)
def lambda_993():
OP_6C(68000, 1500)
ExitThread()
QueueWorkItem(0x101, 3, lambda_993)
WaitChrThread(0x101, 0x2)
ChrTalk( #3
0x8,
"你、你们是什么人!?\x02",
)
CloseMessageWindow()
OP_8E(0x101, 0xFFFFFD44, 0x0, 0xFFFFE4F8, 0x1388, 0x0)
OP_8E(0x101, 0x316, 0x0, 0xFFFFE82C, 0x1388, 0x0)
OP_8C(0x101, 100, 0)
ChrTalk( #4
0x101,
"#005F#5P你们这些坏人不配知道!\x02",
)
CloseMessageWindow()
ChrTalk( #5
0x102,
(
"#012F#5P不必多言,我们上!\x01",
" \x02",
)
)
CloseMessageWindow()
def lambda_A59():
OP_6D(10400, 0, -6130, 700)
ExitThread()
QueueWorkItem(0x101, 2, lambda_A59)
def lambda_A71():
OP_92(0xFE, 0x8, 0x3E8, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_A71)
Sleep(50)
def lambda_A8B():
OP_92(0xFE, 0x8, 0x3E8, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_A8B)
Sleep(50)
def lambda_AA5():
OP_92(0xFE, 0x8, 0x3E8, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x108, 1, lambda_AA5)
SetChrChipByIndex(0x8, 7)
SetChrFlags(0x8, 0x1000)
def lambda_AC4():
OP_92(0xFE, 0x108, 0x3E8, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x8, 1, lambda_AC4)
Sleep(50)
SetChrChipByIndex(0x9, 38)
SetChrFlags(0x9, 0x1000)
def lambda_AE8():
OP_92(0xFE, 0x108, 0x3E8, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x9, 1, lambda_AE8)
Sleep(50)
SetChrChipByIndex(0xB, 38)
SetChrFlags(0xB, 0x1000)
def lambda_B0C():
OP_92(0xFE, 0x108, 0x3E8, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0xB, 1, lambda_B0C)
WaitChrThread(0x101, 0x2)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x108, 0xFF)
OP_44(0x8, 0xFF)
OP_44(0x9, 0xFF)
OP_44(0xB, 0xFF)
ClearChrFlags(0x8, 0x1)
ClearChrFlags(0x9, 0x1)
ClearChrFlags(0xB, 0x1)
ClearChrFlags(0x8, 0x1000)
ClearChrFlags(0x9, 0x1000)
ClearChrFlags(0xB, 0x1000)
Battle(0x3AD, 0x0, 0x0, 0x0, 0xFF)
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_END)),
(1, "loc_B6F"),
(SWITCH_DEFAULT, "loc_B72"),
)
label("loc_B6F")
OP_B4(0x0)
Return()
label("loc_B72")
EventBegin(0x0)
SetChrChipByIndex(0x8, 25)
SetChrChipByIndex(0x9, 25)
SetChrChipByIndex(0xB, 25)
SetChrSubChip(0x8, 0)
SetChrSubChip(0x9, 0)
SetChrSubChip(0xB, 0)
SetChrFlags(0x8, 0x800)
SetChrFlags(0x9, 0x800)
SetChrFlags(0xB, 0x800)
SetChrPos(0x8, 11700, 0, -9160, 176)
SetChrPos(0x9, 12780, 0, -10830, 90)
SetChrPos(0xB, 10700, 0, -11180, 296)
OP_6D(10320, 0, -5900, 0)
OP_67(0, 8000, -10000, 0)
OP_6B(2800, 0)
OP_6C(45000, 0)
OP_6E(262, 0)
SetChrChipByIndex(0x101, 65535)
SetChrChipByIndex(0x102, 65535)
SetChrChipByIndex(0x108, 65535)
SetChrPos(0x101, 7960, 0, -6540, 90)
SetChrPos(0x108, 9450, 0, -5900, 270)
SetChrPos(0x102, 8270, 0, -5050, 90)
FadeToBright(1000, 0)
OP_6D(8640, 0, -5700, 1500)
OP_0D()
ChrTalk( #6
0x101,
(
"#002F嗯,公主殿下他们被关在哪里呢?\x01",
" \x02",
)
)
CloseMessageWindow()
TurnDirection(0x102, 0x101, 400)
ChrTalk( #7
0x102,
(
"#012F肯定在这个巨大的建筑里面。\x02\x03",
"我们要进行地毯式的调查才行。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk( #8
0x108,
(
"#072F如果再磨磨蹭蹭的话,\x01",
"前庭的那些家伙就会赶来了。\x02\x03",
"尽快行动。\x02",
)
)
CloseMessageWindow()
EventEnd(0x0)
Return()
# Function_3_72E end
def Function_4_D67(): pass
label("Function_4_D67")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCA, 4)), scpexpr(EXPR_END)), "loc_D6F")
Return()
label("loc_D6F")
OP_A2(0x654)
EventBegin(0x0)
ClearChrFlags(0x8, 0x80)
ClearChrFlags(0x9, 0x80)
SetChrPos(0x8, -52180, 0, 20500, 180)
SetChrPos(0x9, -50170, 0, 20530, 180)
SetChrChipByIndex(0x8, 39)
SetChrChipByIndex(0x9, 39)
OP_62(0x0, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
def lambda_DC7():
OP_8C(0xFE, 0, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_DC7)
def lambda_DD5():
OP_8C(0xFE, 0, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_DD5)
def lambda_DE3():
OP_8C(0xFE, 0, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_DE3)
OP_6D(-50570, 0, 17760, 2000)
SetChrChipByIndex(0x101, 0)
SetChrChipByIndex(0x102, 2)
SetChrChipByIndex(0x108, 4)
SetChrPos(0x108, -50910, 0, 8080, 0)
SetChrPos(0x102, -50140, 0, 6930, 0)
SetChrPos(0x101, -52160, 0, 7020, 0)
def lambda_E44():
OP_8E(0xFE, 0xFFFF38C8, 0x0, 0x31EC, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x108, 1, lambda_E44)
def lambda_E5F():
OP_8E(0xFE, 0xFFFF34FE, 0x0, 0x2CF6, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_E5F)
def lambda_E7A():
OP_8E(0xFE, 0xFFFF3BDE, 0x0, 0x2E90, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_E7A)
ChrTalk( #9
0x8,
"#5P你们是什么人……\x02",
)
CloseMessageWindow()
ChrTalk( #10
0x9,
"#5P好像在哪儿见过……\x02",
)
CloseMessageWindow()
OP_62(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
Fade(250)
OP_22(0x1F9, 0x0, 0x64)
SetChrChipByIndex(0x9, 40)
OP_0D()
ChrTalk( #11
0x9,
(
"#5P是他们!\x01",
"武术大会取得优胜的……\x02",
)
)
CloseMessageWindow()
Fade(250)
OP_22(0x1F9, 0x0, 0x64)
SetChrChipByIndex(0x8, 40)
OP_0D()
ChrTalk( #12
0x8,
"#5P游击士那些家伙!?\x02",
)
CloseMessageWindow()
ChrTalk( #13
0x108,
"#070F#2P啊,知道就好。\x02",
)
CloseMessageWindow()
ChrTalk( #14
0x101,
(
"#006F#2P老老实实让我们过去的话,\x01",
"或许还可以饶你们一命。\x02",
)
)
CloseMessageWindow()
ChrTalk( #15
0x8,
"#5P不、不要小看了我们!\x02",
)
CloseMessageWindow()
ChrTalk( #16
0x9,
(
"#5P我们的防守坚如铁壁,\x01",
"能破的了就破来试试看!\x02",
)
)
CloseMessageWindow()
def lambda_1051():
OP_6D(-50570, 0, 20000, 1000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_1051)
SetChrChipByIndex(0x8, 41)
def lambda_106E():
OP_92(0xFE, 0x101, 0x3E8, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x8, 1, lambda_106E)
Sleep(50)
SetChrChipByIndex(0x8, 41)
def lambda_108D():
OP_92(0xFE, 0x102, 0x3E8, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x9, 1, lambda_108D)
def lambda_10A2():
OP_8E(0xFE, 0xFFFF3878, 0x0, 0x9A92, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x108, 1, lambda_10A2)
Sleep(50)
def lambda_10C2():
OP_92(0xFE, 0x8, 0x3E8, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_10C2)
Sleep(50)
def lambda_10DC():
OP_92(0xFE, 0x9, 0x3E8, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_10DC)
Sleep(300)
OP_44(0x101, 0xFF)
OP_44(0x108, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x8, 0xFF)
OP_44(0x9, 0xFF)
Battle(0x3AE, 0x0, 0x0, 0x0, 0xFF)
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_END)),
(1, "loc_111D"),
(SWITCH_DEFAULT, "loc_1120"),
)
label("loc_111D")
OP_B4(0x0)
Return()
label("loc_1120")
SetChrSubChip(0x8, 0)
SetChrSubChip(0x9, 0)
SetChrPos(0x8, -48300, 0, 18410, 90)
SetChrPos(0x9, -48500, 0, 17000, 135)
ClearChrFlags(0x8, 0x1)
ClearChrFlags(0x9, 0x1)
ClearChrFlags(0x8, 0x80)
ClearChrFlags(0x9, 0x80)
SetChrFlags(0x8, 0x800)
SetChrFlags(0x9, 0x800)
SetChrChipByIndex(0x8, 25)
SetChrChipByIndex(0x9, 25)
SetChrChipByIndex(0x101, 65535)
SetChrChipByIndex(0x102, 65535)
SetChrChipByIndex(0x108, 65535)
SetChrPos(0x101, -50450, 0, 17110, 0)
SetChrPos(0x108, -50450, 0, 17110, 0)
SetChrPos(0x102, -50450, 0, 17110, 0)
OP_51(0x101, 0xB, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0x102, 0xB, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0x108, 0xB, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrSubChip(0x101, 0)
SetChrSubChip(0x102, 0)
SetChrSubChip(0x108, 0)
OP_6D(-50450, 0, 17110, 0)
FadeToBright(1000, 0)
EventEnd(0x0)
Return()
# Function_4_D67 end
def Function_5_1203(): pass
label("Function_5_1203")
SetMapFlags(0x80)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCA, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_139C")
OP_A2(0x655)
EventBegin(0x0)
OP_8B(0x0, 0xFFFF38FA, 0x5604, 0x190)
OP_8B(0x1, 0xFFFF38FA, 0x5604, 0x190)
OP_8B(0x2, 0xFFFF38FA, 0x5604, 0x190)
FadeToDark(300, 0, 100)
AnonymousTalk( #17
"\x07\x05门上着锁,无法打开。\x02",
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
ChrTalk( #18
0x101,
"#000F唉~怎么会这样!\x02",
)
CloseMessageWindow()
ChrTalk( #19
0x102,
(
"#010F真是相当坚固的锁呢……\x01",
"得先找到钥匙才行呢。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCA, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_134E")
ChrTalk( #20
0x108,
(
"#070F唔,\x01",
"那就只能暂时先到别的地方看看了。\x02",
)
)
CloseMessageWindow()
Jump("loc_1397")
label("loc_134E")
ChrTalk( #21
0x108,
(
"#070F唔,\x01",
"去问问那个年轻的管家吧。\x02",
)
)
CloseMessageWindow()
OP_28(0x4C, 0x1, 0x8)
label("loc_1397")
EventEnd(0x1)
Jump("loc_1424")
label("loc_139C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCA, 7)), scpexpr(EXPR_END)), "loc_13E9")
OP_8B(0x0, 0xFFFF38FA, 0x5604, 0x190)
OP_A2(0x658)
OP_71(0x1, 0x10)
FadeToDark(300, 0, 100)
AnonymousTalk( #22
"\x07\x05使用了备用钥匙。\x02",
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
OP_64(0x0, 0x1)
Jump("loc_1424")
label("loc_13E9")
OP_8B(0x0, 0xFFFF38FA, 0x5604, 0x190)
FadeToDark(300, 0, 100)
AnonymousTalk( #23
"\x07\x05门上着锁,无法打开。\x02",
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
label("loc_1424")
ClearMapFlags(0x80)
Return()
# Function_5_1203 end
def Function_6_142A(): pass
label("Function_6_142A")
EventBegin(0x0)
OP_6D(-20, 0, 54380, 0)
OP_67(0, 6000, -10000, 0)
OP_6B(1760, 0)
OP_6C(57000, 0)
OP_6E(500, 0)
ClearChrFlags(0x10, 0x80)
ClearChrFlags(0xD, 0x80)
ClearChrFlags(0xF, 0x80)
SetChrPos(0x10, 50, 250, 68860, 180)
SetChrPos(0xD, 6240, 0, 63940, 11)
SetChrPos(0xF, 3070, 0, 58560, 0)
SetChrChipByIndex(0x101, 0)
SetChrChipByIndex(0x102, 2)
SetChrChipByIndex(0x108, 4)
OP_9F(0x101, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_9F(0x102, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_9F(0x108, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
SetChrPos(0x101, -110, 0, 50960, 0)
SetChrPos(0x102, -110, 0, 50960, 0)
SetChrPos(0x108, -110, 0, 50960, 0)
SetChrPos(0xE, -110, 0, 50960, 0)
SetChrPos(0x13, -110, 0, 50960, 0)
SetChrPos(0x11, -110, 0, 50960, 0)
SetChrPos(0x12, -110, 0, 50960, 0)
ClearChrFlags(0x1E, 0x80)
ClearChrFlags(0x1F, 0x80)
ClearChrFlags(0x20, 0x80)
ClearChrFlags(0x21, 0x80)
ClearChrFlags(0x22, 0x80)
ClearChrFlags(0x23, 0x80)
ClearChrFlags(0x24, 0x80)
ClearChrFlags(0x25, 0x80)
ClearChrFlags(0x26, 0x80)
ClearChrFlags(0x27, 0x80)
def lambda_158A():
label("loc_158A")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_158A")
QueueWorkItem2(0xD, 1, lambda_158A)
def lambda_159B():
label("loc_159B")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_159B")
QueueWorkItem2(0x1E, 1, lambda_159B)
def lambda_15AC():
label("loc_15AC")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_15AC")
QueueWorkItem2(0x1F, 1, lambda_15AC)
def lambda_15BD():
label("loc_15BD")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_15BD")
QueueWorkItem2(0x20, 1, lambda_15BD)
def lambda_15CE():
label("loc_15CE")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_15CE")
QueueWorkItem2(0x21, 1, lambda_15CE)
def lambda_15DF():
label("loc_15DF")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_15DF")
QueueWorkItem2(0x22, 1, lambda_15DF)
def lambda_15F0():
label("loc_15F0")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_15F0")
QueueWorkItem2(0x23, 1, lambda_15F0)
def lambda_1601():
label("loc_1601")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_1601")
QueueWorkItem2(0x24, 1, lambda_1601)
def lambda_1612():
label("loc_1612")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_1612")
QueueWorkItem2(0x25, 1, lambda_1612)
def lambda_1623():
label("loc_1623")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_1623")
QueueWorkItem2(0x26, 1, lambda_1623)
def lambda_1634():
label("loc_1634")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_1634")
QueueWorkItem2(0x27, 1, lambda_1634)
OP_1F(0x50, 0x12C)
FadeToBright(1000, 0)
def lambda_1654():
OP_6D(750, 0, 56890, 2000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_1654)
def lambda_166C():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 2, lambda_166C)
def lambda_167E():
OP_8E(0xFE, 0xFFFFFFC4, 0x0, 0xDFFC, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_167E)
Sleep(500)
def lambda_169E():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 2, lambda_169E)
def lambda_16B0():
OP_8E(0xFE, 0x302, 0x0, 0xDBEC, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_16B0)
Sleep(500)
OP_62(0xF, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
def lambda_16E7():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x108, 2, lambda_16E7)
def lambda_16F9():
OP_8E(0xFE, 0xFFFFFC4A, 0x0, 0xDA34, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x108, 1, lambda_16F9)
WaitChrThread(0x101, 0x1)
SetChrChipByIndex(0x101, 65535)
def lambda_171E():
TurnDirection(0xFE, 0xF, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_171E)
WaitChrThread(0x102, 0x1)
SetChrChipByIndex(0x102, 65535)
def lambda_1736():
TurnDirection(0xFE, 0xF, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_1736)
WaitChrThread(0x108, 0x1)
SetChrChipByIndex(0x108, 65535)
def lambda_174E():
TurnDirection(0xFE, 0xF, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_174E)
WaitChrThread(0x101, 0x3)
def lambda_1761():
OP_6D(2460, 0, 58180, 1000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_1761)
TurnDirection(0xF, 0x101, 400)
WaitChrThread(0x101, 0x1)
ChrTalk( #24
0xF,
"#143F你、你们……!?\x02",
)
CloseMessageWindow()
ChrTalk( #25
0x101,
"#006F呀呵~我们来救你们了!\x02",
)
CloseMessageWindow()
ChrTalk( #26
0x102,
(
"#010F奈尔先生,\x01",
"看起来您安然无恙啊。\x02",
)
)
CloseMessageWindow()
ChrTalk( #27
0xF,
"#144F来救我们的,真的!?\x02",
)
CloseMessageWindow()
NpcTalk( #28
0x10,
"女孩的声音",
"艾丝蒂尔、约修亚。\x02",
)
CloseMessageWindow()
NpcTalk( #29
0x10,
"女孩的声音",
"没想到能在这里相会……\x02",
)
CloseMessageWindow()
def lambda_187A():
TurnDirection(0xFE, 0x10, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_187A)
Sleep(100)
def lambda_188D():
TurnDirection(0xFE, 0x10, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_188D)
Sleep(100)
TurnDirection(0x101, 0x10, 400)
ChrTalk( #30
0x101,
"#004F……咦?\x02",
)
CloseMessageWindow()
def lambda_18BC():
OP_6D(70, 250, 68760, 3000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_18BC)
def lambda_18D4():
OP_67(0, 4420, -10000, 3000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_18D4)
def lambda_18EC():
OP_6C(21000, 3000)
ExitThread()
QueueWorkItem(0x102, 2, lambda_18EC)
Sleep(1500)
def lambda_1901():
OP_8E(0xFE, 0xFFFFFDE4, 0x0, 0x1041E, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_1901)
Sleep(100)
def lambda_1921():
OP_8E(0xFE, 0x1EA, 0x0, 0x1041E, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_1921)
Sleep(300)
def lambda_1941():
OP_8E(0xFE, 0xFFFFF6E6, 0x0, 0x10266, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x108, 2, lambda_1941)
Sleep(100)
def lambda_1961():
OP_8E(0xFE, 0x8AC, 0x0, 0x10568, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xF, 2, lambda_1961)
def lambda_197C():
label("loc_197C")
TurnDirection(0xFE, 0x10, 0)
OP_48()
Jump("loc_197C")
QueueWorkItem2(0x101, 0, lambda_197C)
def lambda_198D():
label("loc_198D")
TurnDirection(0xFE, 0x10, 0)
OP_48()
Jump("loc_198D")
QueueWorkItem2(0x102, 0, lambda_198D)
def lambda_199E():
label("loc_199E")
TurnDirection(0xFE, 0x10, 0)
OP_48()
Jump("loc_199E")
QueueWorkItem2(0x108, 1, lambda_199E)
def lambda_19AF():
label("loc_19AF")
TurnDirection(0xFE, 0x10, 0)
OP_48()
Jump("loc_19AF")
QueueWorkItem2(0xF, 1, lambda_19AF)
WaitChrThread(0x101, 0x1)
ChrTalk( #31
0x101,
(
"#501F您、您就是公主殿下吧。\x02\x03",
"#001F初次见面。\x01",
"我们是游击士协会的……\x02",
)
)
CloseMessageWindow()
NpcTalk( #32
0x10,
"身着礼服的女孩",
(
"#406F不是初次见面呢。\x02\x03",
"#401F艾丝蒂尔、约修亚。\x01",
"终于按照约定再会了呢。\x02",
)
)
CloseMessageWindow()
ChrTalk( #33
0x101,
(
"#505F咦……\x02\x03",
"……………………………\x02",
)
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
ChrTalk( #34
0x101,
"#004F#3S啊啊,你不是科洛丝吗!\x02",
)
OP_7C(0x0, 0xC8, 0xBB8, 0x64)
CloseMessageWindow()
NpcTalk( #35
0x10,
"科洛丝",
(
"#405F艾丝蒂尔你真是的。\x02\x03",
"虽然没有立刻察觉,\x01",
"但也不至于那么惊讶嘛。\x02",
)
)
CloseMessageWindow()
ChrTalk( #36
0x101,
(
"#506F话、话虽这么说,\x01",
"可是身着礼服、长发披肩……\x02\x03",
"#501F究竟是怎么回事呢?\x02",
)
)
CloseMessageWindow()
ChrTalk( #37
0x102,
(
"#017F……对不起呢,科洛丝。\x02\x03",
"艾丝蒂尔这个人思想比较单纯。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk( #38
0x101,
(
"#509F我说!\x01",
"你那是什~么意思!\x02",
)
)
CloseMessageWindow()
NpcTalk( #39
0x10,
"科洛丝",
(
"#466F呵呵……\x01",
"我认为那是艾丝蒂尔的一个优点哦。\x02\x03",
"#401F对了,约修亚。\x02\x03",
"你还称呼我……那个名字啊。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk( #40
0x102,
(
"#010F嗯,我觉得你也是这么希望的吧。\x01",
" \x02\x03",
"如果你介意的话,我还是称呼本名吧?\x02",
)
)
CloseMessageWindow()
NpcTalk( #41
0x10,
"科洛丝",
(
"#408F怎么会呢……\x01",
"谢谢呢,我真的好开心。\x02",
)
)
CloseMessageWindow()
ChrTalk( #42
0x101,
(
"#505F???\x02\x03",
"话说回来……\x01",
"为什么科洛丝会在这里呢?\x02\x03",
"还有,公主殿下不是应该在这的吗?\x01",
"为什么到处都没有看见呢……\x02",
)
)
CloseMessageWindow()
ChrTalk( #43
0xF,
(
"#145F我说啊,不就在你的面前吗。\x02\x03",
"这位就是女王陛下的孙女,\x01",
"科洛蒂娅公主殿下。\x02",
)
)
CloseMessageWindow()
ChrTalk( #44
0x101,
"#501F…………哦。\x02",
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_20(0x7D0)
OP_44(0x101, 0x0)
OP_44(0x102, 0x0)
OP_44(0xD, 0x0)
def lambda_1E52():
OP_67(0, 6000, -10000, 1500)
ExitThread()
QueueWorkItem(0x101, 2, lambda_1E52)
OP_6C(45000, 1500)
Sleep(500)
OP_63(0x101)
Sleep(400)
ChrTalk( #45
0x101,
"#005F#5S#2P啊啊啊啊啊啊?\x02",
)
OP_7C(0x0, 0xC8, 0xBB8, 0x64)
OP_6E(480, 0)
OP_44(0x108, 0xFF)
OP_44(0xF, 0xFF)
CloseMessageWindow()
OP_1D(0x11)
NpcTalk( #46
0x10,
"科洛蒂娅公主",
(
"#466F对不起呢,我一直没说……\x02\x03",
"#405F我本来打算和艾丝蒂尔你们\x01",
"在王都再会的时候告诉你们的……\x01",
" \x02\x03",
"结果被理查德上校掳走了……\x02",
)
)
CloseMessageWindow()
ChrTalk( #47
0x101,
(
"#580F可、可是,为什么?\x02\x03",
"为什么公主殿下会隐藏身份\x01",
"在王立学院念书呢……!?\x02\x03",
"而、而且我们称呼你科洛丝,\x01",
"这样可以吗……\x02",
)
)
CloseMessageWindow()
NpcTalk( #48
0x10,
"科洛蒂娅公主",
(
"#406F以后也请一如既往地叫我科洛丝。\x01",
" \x02\x03",
"科洛蒂娅·冯·奥赛雷丝……\x02\x03",
"#401F其实,我的全名的开始和末尾相结合\x01",
"就是我的爱称了。\x02",
)
)
CloseMessageWindow()
ChrTalk( #49
0x101,
(
"#008F是这样的啊……\x02\x03",
"嗯,那么头发呢?\x02",
)
)
CloseMessageWindow()
NpcTalk( #50
0x10,
"科洛蒂娅公主",
(
"#400F啊,这是假发。\x02\x03",
"如果真的是这种发型,\x01",
"在学院里面读书就不太方便了……\x02",
)
)
CloseMessageWindow()
ChrTalk( #51
0xF,
(
"#141F我也有够粗心的了……\x02\x03",
"虽然以前看过您的照片,\x01",
"但在市长官邸事件中见到您时\x01",
"竟然完全没有注意到……\x02",
)
)
CloseMessageWindow()
NpcTalk( #52
0x10,
"科洛蒂娅公主",
(
"#466F呵呵,对不起。\x02\x03",
"杜南王叔和戴尔蒙市长都没有察觉,\x01",
"真是有些意外的效果呢。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk( #53
0x101,
(
"#007F哦,说起来,\x01",
"那个公爵还是你的亲戚呢。\x02\x03",
"#004F嗯,对了。\x01",
"最重要的事情反而忘记了。\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
SetMessageWindowPos(72, 320, 56, 3)
SetChrName("")
AnonymousTalk( #54
(
"\x07\x05艾丝蒂尔他们把至今为止的事情经过一一道来,\x01",
"也说明了接受女王的委托前来营救的事情。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
NpcTalk( #55
0x10,
"科洛蒂娅公主",
(
"#404F是这样啊……\x02\x03",
"#403F艾丝蒂尔、约修亚,\x01",
"还有那位金先生……\x02\x03",
"#406F你们能来营救我们,\x01",
"我发自内心地感谢你们的恩德。\x02",
)
)
CloseMessageWindow()
ChrTalk( #56
0x101,
(
"#001F啊哈哈,怎么又客气起来了呢。\x02\x03",
"如果知道被掳走的是科洛丝的话,\x01",
"就算不委托我们也会来的。\x01",
" \x02",
)
)
CloseMessageWindow()
NpcTalk( #57
0x10,
"科洛蒂娅公主",
"#405F艾丝蒂尔……\x02",
)
CloseMessageWindow()
ChrTalk( #58
0x102,
(
"#019F的确如此呢。\x02\x03",
"#010F不过,相比之下,\x01",
"我觉得你要感谢的应该是陛下才对。\x02\x03",
"她不顾自己所处的不利境况,\x01",
"也要委托我们来营救你。\x02",
)
)
CloseMessageWindow()
ChrTalk( #59
0x108,
(
"#074F的确,公主殿下既然已经平安无事,\x01",
"那么陛下就可以拒绝上校的要求了……\x02\x03",
"#072F也许陛下已经视死如归了。\x02",
)
)
CloseMessageWindow()
TurnDirection(0x10, 0x108, 400)
NpcTalk( #60
0x10,
"科洛蒂娅公主",
(
"#403F是的……\x01",
"祖母大人就是那样的。\x02\x03",
"无论如何也不会妥协,\x01",
"可是这样祖母大人她……\x02",
)
)
CloseMessageWindow()
SetChrPos(0xD, -230, 0, 55310, 346)
SetChrChipByIndex(0x8, 37)
SetChrFlags(0x8, 0x1)
ClearChrFlags(0xC, 0x80)
ClearChrFlags(0x8, 0x80)
SetChrPos(0xC, 1020, 0, 56140, 0)
SetChrPos(0x8, 50, 0, 54770, 0)
OP_20(0x5DC)
NpcTalk( #61
0xC,
"男人的声音",
(
"#1P所谓闹剧,\x01",
"就是这个样子的吗……\x02",
)
)
CloseMessageWindow()
OP_21()
def lambda_2707():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_2707)
def lambda_2715():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_2715)
def lambda_2723():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_2723)
def lambda_2731():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0xF, 1, lambda_2731)
def lambda_273F():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x10, 1, lambda_273F)
def lambda_274D():
OP_6D(680, 0, 60840, 2000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_274D)
def lambda_2765():
OP_6E(500, 2000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_2765)
Sleep(500)
OP_1D(0x56)
Sleep(1500)
ChrTalk( #62
0xD,
"#6P公、公主殿下~……\x02",
)
CloseMessageWindow()
NpcTalk( #63
0x10,
"科洛蒂娅公主",
"#407F小莉安妮!?\x02",
)
CloseMessageWindow()
def lambda_27BA():
OP_6D(850, 0, 60760, 2000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_27BA)
def lambda_27D2():
OP_6E(450, 2000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_27D2)
SetChrChipByIndex(0x101, 0)
def lambda_27E7():
OP_8E(0xFE, 0xFFFFFDEE, 0x0, 0xEF4C, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_27E7)
Sleep(200)
SetChrChipByIndex(0x102, 2)
def lambda_280C():
OP_8E(0xFE, 0x258, 0x0, 0xF19A, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_280C)
Sleep(100)
SetChrChipByIndex(0x108, 4)
def lambda_2831():
OP_8E(0xFE, 0xFFFFF754, 0x0, 0xF1B8, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x108, 1, lambda_2831)
Sleep(200)
def lambda_2851():
OP_8E(0xFE, 0xA, 0x0, 0xF820, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x10, 1, lambda_2851)
WaitChrThread(0x101, 0x2)
WaitChrThread(0x101, 0x1)
ChrTalk( #64
0x101,
"#580F那、那个小女孩是谁!?\x02",
)
CloseMessageWindow()
WaitChrThread(0x10, 0x1)
NpcTalk( #65
0x10,
"科洛蒂娅公主",
(
"#403F是摩尔根将军的孙女……\x02\x03",
"为了威逼被软禁在哈肯大门的将军就范,\x01",
"小莉安妮也被带到这里来了……\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk( #66
0x102,
(
"#012F为了要挟陛下而将公主殿下带到这里,\x01",
"你们对付所有掌权者都是用同一种手段。\x02",
)
)
CloseMessageWindow()
ChrTalk( #67
0xC,
(
"#2P你说得完全没错……\x01",
"不过,别以为这是单纯的威胁哦……\x02",
)
)
CloseMessageWindow()
ChrTalk( #68
0xC,
(
"#2P我们情报部的队员,为了理想,\x01",
"就算化成鬼、化成修罗也在所不惜!\x02",
)
)
CloseMessageWindow()
ChrTalk( #69
0x101,
(
"#005F这、这种事还有脸自吹自擂!\x01",
" \x02",
)
)
CloseMessageWindow()
NpcTalk( #70
0x10,
"科洛蒂娅公主",
(
"#402F中队长,我想和你做个交易。\x02\x03",
"请让我代替那个孩子,作为人质。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk( #71
0xC,
(
"#2P哦……\x01",
"我才不会上当呢。\x02",
)
)
CloseMessageWindow()
ChrTalk( #72
0xC,
(
"#2P对于我们这些人而言,\x01",
"是没有亲手杀死王族成员的勇气的。\x02",
)
)
CloseMessageWindow()
ChrTalk( #73
0xC,
(
"#2P与之相比,\x01",
"摩尔根将军的孙女就要好办得多了。\x02",
)
)
CloseMessageWindow()
ChrTalk( #74
0xC,
(
"#2P既有作为人质的价值,\x01",
"要打伤她又不会很难下手。\x02",
)
)
CloseMessageWindow()
NpcTalk( #75
0x10,
"科洛蒂娅公主",
"#407F……你们……\x02",
)
CloseMessageWindow()
ChrTalk( #76
0x101,
"#509F……无耻~\x02",
)
CloseMessageWindow()
ChrTalk( #77
0x108,
"#075F哎呀呀,无可救药的家伙。\x02",
)
CloseMessageWindow()
ChrTalk( #78
0xC,
"#2P哼,随便你们怎么胡说。\x02",
)
CloseMessageWindow()
ChrTalk( #79
0xC,
(
"#2P到王都执勤的巡回部队\x01",
"很快就要从空中庭园归来了。\x02",
)
)
CloseMessageWindow()
ChrTalk( #80
0xC,
(
"#2P到时候会把亲卫队还有游击士\x01",
"在这儿一网打尽!\x02",
)
)
CloseMessageWindow()
NpcTalk( #81
0x12,
"女性的声音",
"#1P啊~那已经不可能了哦。\x02",
)
CloseMessageWindow()
NpcTalk( #82
0x12,
"女性的声音",
(
"#1P他们在来这里的途中\x01",
"就已经被我们全部消灭了。\x02",
)
)
CloseMessageWindow()
OP_20(0x3E8)
OP_62(0xC, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
def lambda_2DF8():
TurnDirection(0xFE, 0x12, 400)
ExitThread()
QueueWorkItem(0xC, 1, lambda_2DF8)
def lambda_2E06():
TurnDirection(0xFE, 0x12, 400)
ExitThread()
QueueWorkItem(0x8, 1, lambda_2E06)
def lambda_2E14():
TurnDirection(0xFE, 0x12, 400)
ExitThread()
QueueWorkItem(0xD, 1, lambda_2E14)
def lambda_2E22():
OP_6D(500, 0, 59390, 800)
ExitThread()
QueueWorkItem(0x101, 2, lambda_2E22)
OP_6E(500, 800)
def lambda_2E43():
label("loc_2E43")
TurnDirection(0xFE, 0x12, 0)
OP_48()
Jump("loc_2E43")
QueueWorkItem2(0xC, 1, lambda_2E43)
def lambda_2E54():
label("loc_2E54")
TurnDirection(0xFE, 0x12, 0)
OP_48()
Jump("loc_2E54")
QueueWorkItem2(0x8, 1, lambda_2E54)
def lambda_2E65():
label("loc_2E65")
TurnDirection(0xFE, 0x12, 0)
OP_48()
Jump("loc_2E65")
QueueWorkItem2(0xD, 1, lambda_2E65)
SetChrFlags(0x8, 0x20)
SetChrFlags(0xC, 0x20)
SetChrFlags(0x12, 0x20)
OP_51(0x12, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrChipByIndex(0x12, 16)
ClearChrFlags(0x12, 0x80)
OP_1D(0x2F)
def lambda_2E9C():
label("loc_2E9C")
TurnDirection(0xFE, 0x8, 0)
OP_48()
Jump("loc_2E9C")
QueueWorkItem2(0x12, 1, lambda_2E9C)
OP_96(0x12, 0xFFFFF7A4, 0x0, 0xD5C0, 0x3E8, 0x1F40)
OP_22(0x1F6, 0x0, 0x64)
OP_99(0x12, 0x2, 0x4, 0xBB8)
PlayEffect(0x8, 0xFF, 0xFF, 50, 1000, 54770, 0, 0, 0, 400, 400, 400, 0xFF, 0, 0, 0, 0)
TurnDirection(0x8, 0x12, 0)
OP_51(0x8, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrChipByIndex(0x8, 43)
def lambda_2F1E():
OP_94(0x1, 0xFE, 0xB4, 0xBB8, 0x2AF8, 0x0)
ExitThread()
QueueWorkItem(0x8, 1, lambda_2F1E)
OP_99(0x12, 0x4, 0x9, 0xBB8)
OP_44(0xD, 0xFF)
OP_44(0x8, 0xFF)
ChrTalk( #83 op#A op#5
0x8,
"#10A啊!\x05\x02",
)
WaitChrThread(0x8, 0x1)
SetChrChipByIndex(0x8, 42)
OP_22(0x20C, 0x0, 0x64)
def lambda_2F65():
OP_99(0xFE, 0x0, 0x3, 0x7D0)
ExitThread()
QueueWorkItem(0x8, 2, lambda_2F65)
Sleep(500)
def lambda_2F7A():
OP_8E(0xFE, 0xFFFFF4DE, 0x0, 0xD912, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0xD, 1, lambda_2F7A)
Sleep(100)
OP_8F(0xC, 0x6CC, 0x0, 0xDACA, 0x1388, 0x0)
WaitChrThread(0xD, 0x1)
OP_96(0x12, 0xFFFFF858, 0x0, 0xDA16, 0x1F4, 0x1F40)
TurnDirection(0xD, 0x12, 400)
Sleep(200)
ChrTalk( #84
0xC,
"#2P什么……!?\x02",
)
CloseMessageWindow()
ChrTalk( #85
0xD,
"#3P呜……呜呜……\x02",
)
CloseMessageWindow()
OP_62(0xD, 0x0, 1700, 0x28, 0x2B, 0x64, 0x3)
ChrTalk( #86
0xD,
"#3S#3P呜哇哇啊啊啊啊啊啊!\x02",
)
OP_7C(0x0, 0xC8, 0xBB8, 0x64)
CloseMessageWindow()
def lambda_3037():
label("loc_3037")
TurnDirection(0xFE, 0x12, 0)
OP_48()
Jump("loc_3037")
QueueWorkItem2(0x101, 1, lambda_3037)
def lambda_3048():
label("loc_3048")
TurnDirection(0xFE, 0x12, 0)
OP_48()
Jump("loc_3048")
QueueWorkItem2(0x102, 1, lambda_3048)
def lambda_3059():
label("loc_3059")
TurnDirection(0xFE, 0x12, 0)
OP_48()
Jump("loc_3059")
QueueWorkItem2(0x108, 1, lambda_3059)
def lambda_306A():
label("loc_306A")
TurnDirection(0xFE, 0x12, 0)
OP_48()
Jump("loc_306A")
QueueWorkItem2(0xF, 1, lambda_306A)
def lambda_307B():
label("loc_307B")
TurnDirection(0xFE, 0x12, 0)
OP_48()
Jump("loc_307B")
QueueWorkItem2(0x10, 1, lambda_307B)
ChrTalk( #87
0x12,
"#021F#5P乖~乖~已经没事了哦。\x02",
)
CloseMessageWindow()
OP_44(0x12, 0xFF)
OP_8C(0x12, 45, 400)
ChrTalk( #88
0x12,
(
"#020F艾丝蒂尔、约修亚。\x01",
"真是好久不见了呢。\x02",
)
)
CloseMessageWindow()
ChrTalk( #89
0x101,
"#004F雪、雪拉姐!?\x02",
)
CloseMessageWindow()
ChrTalk( #90
0x102,
"#014F终于来了吗……\x02",
)
CloseMessageWindow()
ChrTalk( #91
0xC,
(
"#2P哪、哪里有这么\x01",
"慢条斯理的打招呼的!\x02",
)
)
CloseMessageWindow()
NpcTalk( #92
0x13,
"青年的声音",
"#1P哈·哈·哈。简直不解风情呢。\x02",
)
CloseMessageWindow()
LoadEffect(0x0, "map\\\\mp008_00.eff")
SetChrPos(0x28, 1590, 1000, 54930, 0)
PlayEffect(0x0, 0xFF, 0x13, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0x28, 0, 0, 0, 0)
Sleep(200)
PlayEffect(0x8, 0xFF, 0xFF, 1590, 1000, 54930, 0, 0, 0, 400, 400, 400, 0xFF, 0, 0, 0, 0)
OP_51(0xC, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrChipByIndex(0xC, 18)
def lambda_3240():
OP_96(0xFE, 0xBD6, 0x0, 0xDFFC, 0x7D0, 0xBB8)
ExitThread()
QueueWorkItem(0xC, 1, lambda_3240)
ChrTalk( #93 op#A op#5
0xC,
"#10A呜哦……\x05\x02",
)
Sleep(400)
def lambda_3273():
label("loc_3273")
TurnDirection(0xFE, 0xC, 0)
OP_48()
Jump("loc_3273")
QueueWorkItem2(0x12, 1, lambda_3273)
OP_51(0x12, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_8C(0x12, 90, 0)
def lambda_3296():
OP_96(0xFE, 0x442, 0x0, 0xDDCC, 0x3E8, 0x1388)
ExitThread()
QueueWorkItem(0x12, 1, lambda_3296)
Sleep(200)
OP_22(0x1F6, 0x0, 0x64)
OP_99(0x12, 0x2, 0x4, 0xFA0)
PlayEffect(0x8, 0xFF, 0xFF, 3180, 1500, 56940, 0, 0, 0, 400, 400, 400, 0xFF, 0, 0, 0, 0)
SetChrFlags(0xC, 0x4)
def lambda_3301():
OP_6D(6320, 0, 57730, 1000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_3301)
def lambda_3319():
OP_8F(0xFE, 0x2508, 0x1F4, 0xDCE6, 0x2EE0, 0x0)
ExitThread()
QueueWorkItem(0xC, 1, lambda_3319)
OP_99(0x12, 0x4, 0x9, 0x7D0)
WaitChrThread(0xC, 0x1)
OP_22(0x8E, 0x0, 0x64)
OP_51(0xC, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrChipByIndex(0xC, 17)
ChrTalk( #94 op#A op#5
0xC,
"#10A呜啊!\x05\x02",
)
PlayEffect(0x12, 0xFF, 0xC, 0, 0, -500, 0, 0, 0, 2000, 2000, 2000, 0xFF, 0, 0, 0, 0)
OP_6B(1800, 0)
OP_6B(1760, 80)
Sleep(500)
OP_8F(0xC, 0x2512, 0x0, 0xDCE6, 0x3E8, 0x0)
OP_22(0x20C, 0x0, 0x64)
OP_99(0xC, 0x0, 0x3, 0x7D0)
ChrTalk( #95
0x12,
"#027F#5P刚才那是附赠品哦。\x02",
)
CloseMessageWindow()
OP_6D(280, 0, 59100, 2000)
ChrTalk( #96
0x101,
(
"#509F好、好狠啊~\x02\x03",
"#004F咦,刚才发起攻击的是……\x02",
)
)
CloseMessageWindow()
ChrTalk( #97
0x102,
"#014F……奥利维尔吗?\x02",
)
CloseMessageWindow()
OP_51(0x12, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrChipByIndex(0x12, 14)
TurnDirection(0x12, 0x13, 400)
OP_22(0xA6, 0x0, 0x64)
NpcTalk( #98
0x13,
"青年的声音",
"#1PBingo⊙\x02",
)
CloseMessageWindow()
def lambda_34B6():
label("loc_34B6")
TurnDirection(0xFE, 0x13, 0)
OP_48()
Jump("loc_34B6")
QueueWorkItem2(0x12, 1, lambda_34B6)
def lambda_34C7():
label("loc_34C7")
TurnDirection(0xFE, 0x13, 0)
OP_48()
Jump("loc_34C7")
QueueWorkItem2(0xD, 1, lambda_34C7)
def lambda_34D8():
label("loc_34D8")
TurnDirection(0xFE, 0x13, 0)
OP_48()
Jump("loc_34D8")
QueueWorkItem2(0x101, 1, lambda_34D8)
def lambda_34E9():
label("loc_34E9")
TurnDirection(0xFE, 0x13, 0)
OP_48()
Jump("loc_34E9")
QueueWorkItem2(0x102, 1, lambda_34E9)
def lambda_34FA():
label("loc_34FA")
TurnDirection(0xFE, 0x13, 0)
OP_48()
Jump("loc_34FA")
QueueWorkItem2(0x108, 1, lambda_34FA)
def lambda_350B():
label("loc_350B")
TurnDirection(0xFE, 0x13, 0)
OP_48()
Jump("loc_350B")
QueueWorkItem2(0xF, 1, lambda_350B)
def lambda_351C():
label("loc_351C")
TurnDirection(0xFE, 0x13, 0)
OP_48()
Jump("loc_351C")
QueueWorkItem2(0x10, 1, lambda_351C)
ClearChrFlags(0x13, 0x80)
OP_9F(0x13, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
def lambda_353D():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x13, 2, lambda_353D)
def lambda_354F():
OP_8E(0xFE, 0x1E, 0x0, 0xD7F0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x13, 1, lambda_354F)
Sleep(100)
OP_6D(550, 0, 58110, 2000)
ChrTalk( #99
0x13,
(
"#031F哎呀呀。主角华丽登场了。\x01",
" \x02",
)
)
CloseMessageWindow()
SetChrChipByIndex(0x101, 65535)
def lambda_35FD():
OP_8E(0xFE, 0xFFFFFC18, 0x0, 0xDF34, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x101, 2, lambda_35FD)
Sleep(200)
SetChrChipByIndex(0x102, 65535)
def lambda_3622():
OP_8E(0xFE, 0xF0, 0x0, 0xE2C2, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x102, 2, lambda_3622)
Sleep(200)
SetChrChipByIndex(0x108, 65535)
def lambda_3647():
OP_8E(0xFE, 0xFFFFF830, 0x0, 0xE25E, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x108, 2, lambda_3647)
Sleep(300)
def lambda_3667():
OP_8E(0xFE, 0x78, 0x0, 0xE90C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x10, 2, lambda_3667)
Sleep(200)
def lambda_3687():
OP_8E(0xFE, 0x9A6, 0x0, 0xE902, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0xF, 2, lambda_3687)
OP_44(0x12, 0xFF)
OP_20(0x5DC)
OP_21()
OP_1D(0x11)
ChrTalk( #100
0x108,
(
"#071F哈哈哈……\x01",
"这不是那位怪腔怪调的兄弟吗。\x02\x03",
"#070F对了,雪拉扎德,\x01",
"真是好久不见了啊。\x02",
)
)
CloseMessageWindow()
OP_8C(0x12, 270, 400)
ChrTalk( #101
0x12,
(
"#021F#2P你好,久疏问候了。\x02\x03",
"#020F没想到金先生你也到利贝尔来了呢。\x01",
" \x02\x03",
"听说你和艾丝蒂尔他们在一起时,\x01",
"我就没有那么担心了。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk( #102
0x108,
(
"#070F哈哈,你真是太抬举我了。\x01",
" \x02\x03",
"#071F不过我说你啊……\x01",
"没见一段日子,越来越有魅力了呢。\x02\x03",
"说实话,我都有些认不出来了。\x02",
)
)
CloseMessageWindow()
ChrTalk( #103
0x12,
"#520F#2P哎、哎呀,真的吗?\x02",
)
CloseMessageWindow()
TurnDirection(0x13, 0x12, 400)
Sleep(200)
TurnDirection(0x13, 0x108, 400)
Sleep(200)
TurnDirection(0x13, 0x12, 400)
ChrTalk( #104
0x13,
(
"#032F哼·哼·哼,我好生嫉妒。\x01",
" \x02\x03",
"#034F在把我尽情地享用完之后,\x01",
"又像垃圾一样抛弃了。\x01",
" \x02",
)
)
CloseMessageWindow()
TurnDirection(0x12, 0x13, 400)
ChrTalk( #105
0x12,
(
"#027F#2P哎哟,我说奥利维尔,\x01",
"你不是已经和爱娜她搭上了吗?\x02\x03",
"还想一脚踏两只船啊?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x13, 0x102, 400)
ChrTalk( #106
0x13,
(
"#034F哈·哈·哈,对不起~啦。\x01",
"人家~开玩笑的~啦。\x02",
)
)
CloseMessageWindow()
ChrTalk( #107
0x101,
(
"#506F还真是的……\x01",
"大家都还是老样子呢。\x02",
)
)
CloseMessageWindow()
ChrTalk( #108
0x102,
(
"#014F可是雪拉姐姐怎么来到王都的呢?\x01",
" \x02\x03",
"王国军不是把关所全部封锁了吗?\x01",
" \x02",
)
)
CloseMessageWindow()
OP_44(0x101, 0xFF)
def lambda_3AC0():
TurnDirection(0xFE, 0x12, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_3AC0)
TurnDirection(0x12, 0x102, 400)
ChrTalk( #109
0x12,
(
"#021F#2P嗯,所以我们是乘着小船\x01",
"从瓦雷利亚湖渡过来的。\x02\x03",
"然后在王都的码头上岸。\x02",
)
)
CloseMessageWindow()
ChrTalk( #110
0x102,
"#010F原来如此,真是深思熟虑……\x02",
)
CloseMessageWindow()
ChrTalk( #111
0x101,
(
"#505F可是可是,你为什么又会\x01",
"和这个骗吃骗喝的大赖皮蛋在一起呢?\x02",
)
)
CloseMessageWindow()
def lambda_3BBD():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x12, 1, lambda_3BBD)
ChrTalk( #112
0x12,
(
"#025F#2P我刚踏出王都的协会就撞见他了。\x01",
" \x02\x03",
"他死皮赖脸地跟着我,甩都甩不掉,\x01",
"没办法之下,我就只有带他来了……\x02",
)
)
CloseMessageWindow()
ChrTalk( #113
0x13,
(
"#031F哈·哈·哈。\x02\x03",
"如此有趣好玩的事情,\x01",
"怎能缺少了我这位天才演奏家的参与呢。\x02\x03",
"#030F对了,那位小姐是……\x02",
)
)
CloseMessageWindow()
OP_44(0x102, 0xFF)
OP_44(0x101, 0xFF)
def lambda_3CFF():
TurnDirection(0xFE, 0x10, 400)
ExitThread()
QueueWorkItem(0x12, 1, lambda_3CFF)
OP_8E(0x102, 0x3AC, 0x0, 0xE36C, 0x7D0, 0x0)
TurnDirection(0x102, 0x10, 400)
TurnDirection(0x101, 0x10, 400)
ChrTalk( #114
0x101,
(
"#006F啊,我给大家介绍一下。\x02\x03",
"她是女王陛下的孙女科洛蒂娅公主殿下。\x01",
" \x02\x03",
"是我和约修亚的朋友。\x02",
)
)
CloseMessageWindow()
def lambda_3DB2():
TurnDirection(0xFE, 0x13, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_3DB2)
def lambda_3DC0():
TurnDirection(0xFE, 0x13, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_3DC0)
OP_44(0x10, 0xFF)
NpcTalk( #115
0x10,
"科洛蒂娅公主",
(
"#401F两位,初次见面。\x02\x03",
"非常感谢你们两位刚才的协助。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk( #116
0x12,
(
"#021F#2P别客气,这也是游击士的义务嘛。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk( #117
0x13,
(
"#035F哈·哈·哈。\x01",
"拯救美丽的公主是绅士的无上荣誉呢。\x02\x03",
"#030F能见到公主您,是我的光荣。\x02",
)
)
CloseMessageWindow()
SetChrPos(0x11, -110, 0, 50960, 0)
SetChrPos(0xE, -110, 0, 50960, 0)
OP_9F(0x11, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_9F(0xE, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
NpcTalk( #118
0x11,
"女性的声音",
"#1P科洛丝,你没事吧!\x02",
)
CloseMessageWindow()
def lambda_3FA9():
TurnDirection(0xFE, 0x11, 400)
ExitThread()
QueueWorkItem(0x12, 1, lambda_3FA9)
def lambda_3FB7():
TurnDirection(0xFE, 0x11, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_3FB7)
def lambda_3FC5():
TurnDirection(0xFE, 0x11, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_3FC5)
def lambda_3FD3():
TurnDirection(0xFE, 0x11, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_3FD3)
def lambda_3FE1():
TurnDirection(0xFE, 0x11, 400)
ExitThread()
QueueWorkItem(0x13, 1, lambda_3FE1)
def lambda_3FEF():
OP_8F(0xFE, 0x4A6, 0x0, 0xD9EE, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x13, 2, lambda_3FEF)
def lambda_400A():
OP_8F(0xFE, 0xFFFFF948, 0x0, 0xDEF8, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x101, 2, lambda_400A)
Sleep(500)
def lambda_402A():
label("loc_402A")
TurnDirection(0xFE, 0x11, 0)
OP_48()
Jump("loc_402A")
QueueWorkItem2(0x101, 1, lambda_402A)
def lambda_403B():
label("loc_403B")
TurnDirection(0xFE, 0x11, 0)
OP_48()
Jump("loc_403B")
QueueWorkItem2(0x102, 1, lambda_403B)
def lambda_404C():
label("loc_404C")
TurnDirection(0xFE, 0x11, 0)
OP_48()
Jump("loc_404C")
QueueWorkItem2(0x108, 1, lambda_404C)
def lambda_405D():
label("loc_405D")
TurnDirection(0xFE, 0x11, 0)
OP_48()
Jump("loc_405D")
QueueWorkItem2(0x13, 1, lambda_405D)
def lambda_406E():
label("loc_406E")
TurnDirection(0xFE, 0x11, 0)
OP_48()
Jump("loc_406E")
QueueWorkItem2(0x12, 1, lambda_406E)
ClearChrFlags(0x11, 0x80)
SetChrFlags(0x11, 0x1000)
SetChrChipByIndex(0x11, 44)
def lambda_408E():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x11, 2, lambda_408E)
def lambda_40A0():
OP_8E(0xFE, 0xFFFFFEA2, 0x0, 0xDF7A, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_40A0)
Sleep(500)
OP_22(0x8C, 0x0, 0x64)
ClearChrFlags(0xE, 0x80)
SetChrFlags(0xE, 0x40)
SetChrFlags(0xE, 0x4)
def lambda_40D4():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0xE, 2, lambda_40D4)
def lambda_40E6():
OP_8E(0xFE, 0x33E, 0x0, 0xE8C6, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0xE, 1, lambda_40E6)
WaitChrThread(0x11, 0x1)
SetChrChipByIndex(0x11, 45)
SetChrSubChip(0x11, 2)
WaitChrThread(0xE, 0x1)
OP_43(0xE, 0x1, 0x0, 0x7)
OP_A2(0x0)
NpcTalk( #119
0x10,
"科洛蒂娅公主",
"#409F尤莉亚,基库!\x02",
)
CloseMessageWindow()
ChrTalk( #120
0xE,
"啾!\x02",
)
CloseMessageWindow()
OP_A2(0x1)
OP_A5(0x0)
OP_97(0xE, 0x82, 0xE8C6, 0xFFFEA070, 0x1388, 0x1)
OP_97(0xE, 0x82, 0xE8C6, 0xFFFEA070, 0xBB8, 0x1)
SetChrFlags(0xE, 0x20)
def lambda_418E():
label("loc_418E")
OP_99(0xFE, 0x0, 0x7, 0x1388)
OP_48()
Jump("loc_418E")
QueueWorkItem2(0xE, 2, lambda_418E)
def lambda_41A1():
OP_8F(0xFE, 0xFFFFFD26, 0x258, 0xE0C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xE, 1, lambda_41A1)
OP_8C(0xE, 45, 100)
WaitChrThread(0xE, 0x1)
def lambda_41C8():
OP_8F(0xFE, 0xFFFFFD26, 0x0, 0xE0C4, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0xE, 1, lambda_41C8)
WaitChrThread(0xE, 0x1)
OP_44(0xE, 0x2)
Fade(500)
SetChrFlags(0xE, 0x80)
SetChrFlags(0x11, 0x20)
SetChrChipByIndex(0x11, 36)
SetChrSubChip(0x11, 1)
OP_0D()
ChrTalk( #121
0xE,
(
"#311F#5P啾啾!\x02\x03",
"啾——啾!\x02",
)
)
CloseMessageWindow()
NpcTalk( #122
0x10,
"科洛蒂娅公主",
(
"#408F呵呵,太好了。\x01",
"你们也平安无事。\x02",
)
)
CloseMessageWindow()
ChrTalk( #123
0x11,
(
"#171F#2P殿下,您平安无事就好……\x02\x03",
"真的……真的太好了……\x02",
)
)
CloseMessageWindow()
NpcTalk( #124
0x10,
"科洛蒂娅公主",
(
"#401F尤莉亚你也是……\x01",
"还是那么精神焕发呢。\x02",
)
)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_0D()
Sleep(500)
SetMessageWindowPos(72, 320, 56, 3)
SetChrName("")
AnonymousTalk( #125
(
"\x07\x05之后,艾丝蒂尔他们带着科洛丝\x01",
"和伪装行动的游击士还有亲卫队队员汇合了。\x02",
)
)
CloseMessageWindow()
AnonymousTalk( #126
(
"\x07\x05安顿好其他的人质之后,\x01",
"众人决定一起商讨对策以确认当前的状况。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
OP_1B(0x0, 0x0, 0xFFFF)
OP_28(0x4B, 0x4, 0x10)
OP_28(0x4C, 0x4, 0x10)
OP_28(0x4C, 0x1, 0x40)
OP_28(0x4C, 0x1, 0x80)
OP_28(0x4C, 0x1, 0x100)
OP_28(0x4D, 0x4, 0x2)
OP_28(0x4D, 0x4, 0x4)
OP_28(0x4D, 0x1, 0x1)
OP_28(0x4D, 0x1, 0x2)
OP_20(0x5DC)
OP_21()
OP_A2(0x3FB)
NewScene("ED6_DT01/T4300 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_6_142A end
def Function_7_4438(): pass
label("Function_7_4438")
OP_A6(0x0)
label("loc_443B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_445C")
OP_97(0xFE, 0x82, 0xE8C6, 0xFFFA81C0, 0x1388, 0x1)
OP_48()
Jump("loc_443B")
label("loc_445C")
OP_A3(0x0)
Return()
# Function_7_4438 end
def Function_8_4460(): pass
label("Function_8_4460")
EventBegin(0x0)
OP_6D(1040, 130, 67720, 0)
OP_67(0, 6000, -10000, 0)
OP_6B(3000, 0)
OP_6C(44000, 0)
OP_6E(280, 0)
ClearChrFlags(0x11, 0x80)
ClearChrFlags(0x13, 0x80)
ClearChrFlags(0x10, 0x80)
ClearChrFlags(0x12, 0x80)
ClearChrFlags(0x14, 0x80)
ClearChrFlags(0x16, 0x80)
ClearChrFlags(0x15, 0x80)
ClearChrFlags(0x17, 0x80)
ClearChrFlags(0x18, 0x80)
ClearChrFlags(0x19, 0x80)
ClearChrFlags(0x1A, 0x80)
ClearChrFlags(0x1B, 0x80)
ClearChrFlags(0x1C, 0x80)
ClearChrFlags(0x1D, 0x80)
SetChrPos(0x11, -40, 250, 70880, 180)
SetChrPos(0x102, -2410, 0, 67020, 45)
SetChrPos(0x13, -3610, 0, 67070, 61)
SetChrPos(0x108, -3890, 0, 68290, 80)
SetChrPos(0x101, 2540, 0, 67490, 320)
SetChrPos(0x10, 1280, 0, 67070, 2)
SetChrPos(0x12, 2980, 0, 66110, 297)
SetChrPos(0x14, -730, 0, 65269, 0)
SetChrPos(0x16, 180, 0, 64510, 0)
SetChrPos(0x15, -950, 0, 63940, 0)
SetChrPos(0x17, -2170, 0, 64410, 0)
SetChrPos(0x18, -1770, 0, 61620, 0)
SetChrPos(0x18, 70, 0, 61620, 0)
SetChrPos(0x18, 1840, 0, 61620, 0)
SetChrPos(0x18, -1770, 0, 59790, 0)
SetChrPos(0x18, 70, 0, 59790, 0)
SetChrPos(0x18, 1840, 0, 59790, 0)
SetChrChipByIndex(0x10, 24)
ChrTalk( #127
0x11,
(
"#170F现在我就对解放格兰赛尔城\x01",
"和营救女王陛下的作战进行说明。\x02\x03",
"首先,由约修亚君等\x01",
"三人为一组从地下水路\x01",
"攻入格兰赛尔城的地下。\x02\x03",
"然后迅速赶往亲卫队值勤室\x01",
"将城门的开关装置启动。\x02",
)
)
CloseMessageWindow()
ChrTalk( #128
0x102,
"#010F明白了。\x02",
)
CloseMessageWindow()
ChrTalk( #129
0x108,
(
"#070F嗯,巨大的烟花\x01",
"就要开始燃放了啊。\x02",
)
)
CloseMessageWindow()
ChrTalk( #130
0x13,
(
"#030F哼哼……不管怎样,\x01",
"最后一幕终于开演了。\x02",
)
)
CloseMessageWindow()
ChrTalk( #131
0x11,
(
"#170F在城门打开的同时,\x01",
"全体亲卫队员以及四名\x01",
"游击士就从市街区冲进城内。\x02\x03",
"尽量制造草木皆兵的效果,\x01",
"将敌人全部引入城内集中。\x02",
)
)
CloseMessageWindow()
ChrTalk( #132
0x17,
"好的,交给我们去办吧。\x02",
)
CloseMessageWindow()
ChrTalk( #133
0x16,
"太好了,我已经跃跃欲试了!\x02",
)
CloseMessageWindow()
TurnDirection(0x11, 0x10, 400)
ChrTalk( #134
0x11,
(
"#170F最后还要说的是……\x02\x03",
"……殿下,您真的\x01",
"下决心要参战吗?\x02",
)
)
CloseMessageWindow()
ChrTalk( #135
0x10,
(
"#040F抱歉……\x01",
"我一定要救出祖母大人。\x02\x03",
"而且,\x01",
"我还会操纵飞行艇……\x02\x03",
"没有不让我\x01",
"参战的道理吧。\x02",
)
)
CloseMessageWindow()
ChrTalk( #136
0x11,
(
"#170F唉……\x02\x03",
"如果早知道会发生这样的事情,\x01",
"当初就不会教你操纵飞艇的方法了……\x02",
)
)
CloseMessageWindow()
ChrTalk( #137
0x101,
(
"#000F不用担心啦,尤莉亚中尉。\x02\x03",
"科洛丝就交给\x01",
"我们来照顾吧。\x02",
)
)
CloseMessageWindow()
ChrTalk( #138
0x12,
(
"#020F我以『银闪』之名作为赌注,\x01",
"发誓一定会保护公主的安全。\x02",
)
)
CloseMessageWindow()
ChrTalk( #139
0x11,
(
"#170F我知道了……拜托你们了。\x02\x03",
"在将敌人的兵力集中于城内之后,\x01",
"艾丝蒂尔等三人为一组就乘坐\x01",
"特务飞行艇在空中庭园强行着陆。\x02\x03",
"然后就冲入女王宫\x01",
"救出艾莉茜雅女王陛下。\x02",
)
)
CloseMessageWindow()
OP_8C(0x11, 180, 400)
ChrTalk( #140
0x11,
(
"#170F正午钟响的同时开始作战——\x01",
"在此之前请在各自的地点等候。\x02\x03",
"……全体听命,行动开始!\x02",
)
)
CloseMessageWindow()
ChrTalk( #141
0x18,
"明白!\x02",
)
CloseMessageWindow()
OP_31(0x0, 0xFE, 0x0)
OP_31(0x1, 0xFE, 0x0)
OP_31(0x2, 0xFE, 0x0)
OP_31(0x3, 0xFE, 0x0)
OP_31(0x4, 0xFE, 0x0)
OP_31(0x5, 0xFE, 0x0)
OP_31(0x6, 0xFE, 0x0)
OP_31(0x7, 0xFE, 0x0)
OP_A2(0x3FD)
NewScene("ED6_DT01/T4300 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_8_4460 end
def Function_9_4C64(): pass
label("Function_9_4C64")
EventBegin(0x1)
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_4CD0")
ChrTalk( #142
0x101,
(
"#002F还没有完成女王陛下的委托呢。\x01",
" \x02\x03",
"快点把公主殿下找出来吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_4DAB")
label("loc_4CD0")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_4D3B")
ChrTalk( #143
0x102,
(
"#012F等把人质都解放了,\x01",
"再离开这里吧。\x02\x03",
"总之要先把里面彻底调查一番。\x01",
" \x02",
)
)
CloseMessageWindow()
Jump("loc_4DAB")
label("loc_4D3B")
ChrTalk( #144
0x108,
(
"#072F还没有找到公主殿下和其他人质呢。\x01",
" \x02\x03",
"先把那些坏家伙们\x01",
"一个不留地干掉吧。\x02",
)
)
CloseMessageWindow()
label("loc_4DAB")
OP_90(0x0, 0x0, 0x0, 0x5DC, 0xBB8, 0x0)
Sleep(50)
EventEnd(0x4)
Return()
# Function_9_4C64 end
SaveToFile()
Try(main)
| [
"[email protected]"
] | |
c1f48344496f1629bed80011ff2628c78bb13632 | 2902526147f807514560a963b49a028b4dacd1a1 | /models/conceptmap_tests.py | 4afaf16f92daf99b1666c4c8e5e4ef9ad5b2c507 | [] | no_license | Healthedata1/R4Pyfhir_models | abc8cb4bc3545a9385699c7c1cb7f67d494531f8 | 06472663dfdd4cb0856838d73c5460aa95806467 | refs/heads/master | 2020-04-16T08:39:25.202315 | 2019-01-12T20:23:41 | 2019-01-12T20:23:41 | 165,432,352 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,349 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.6.0-bd605d07 on 2018-12-23.
# 2018, SMART Health IT.
import os
import io
import unittest
import json
from . import conceptmap
from .fhirdate import FHIRDate
class ConceptMapTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("ConceptMap", js["resourceType"])
return conceptmap.ConceptMap(js)
def testConceptMap1(self):
inst = self.instantiate_from("conceptmap-example.json")
self.assertIsNotNone(inst, "Must have instantiated a ConceptMap instance")
self.implConceptMap1(inst)
js = inst.as_json()
self.assertEqual("ConceptMap", js["resourceType"])
inst2 = conceptmap.ConceptMap(js)
self.implConceptMap1(inst2)
def implConceptMap1(self, inst):
self.assertEqual(inst.contact[0].name, "FHIR project team (example)")
self.assertEqual(inst.contact[0].telecom[0].system, "url")
self.assertEqual(inst.contact[0].telecom[0].value, "http://hl7.org/fhir")
self.assertEqual(inst.copyright, "Creative Commons 0")
self.assertEqual(inst.date.date, FHIRDate("2012-06-13").date)
self.assertEqual(inst.date.as_json(), "2012-06-13")
self.assertEqual(inst.description, "A mapping between the FHIR and HL7 v3 AddressUse Code systems")
self.assertTrue(inst.experimental)
self.assertEqual(inst.group[0].element[0].code, "home")
self.assertEqual(inst.group[0].element[0].display, "home")
self.assertEqual(inst.group[0].element[0].target[0].code, "H")
self.assertEqual(inst.group[0].element[0].target[0].display, "home")
self.assertEqual(inst.group[0].element[0].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[1].code, "work")
self.assertEqual(inst.group[0].element[1].display, "work")
self.assertEqual(inst.group[0].element[1].target[0].code, "WP")
self.assertEqual(inst.group[0].element[1].target[0].display, "work place")
self.assertEqual(inst.group[0].element[1].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[2].code, "temp")
self.assertEqual(inst.group[0].element[2].display, "temp")
self.assertEqual(inst.group[0].element[2].target[0].code, "TMP")
self.assertEqual(inst.group[0].element[2].target[0].display, "temporary address")
self.assertEqual(inst.group[0].element[2].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[3].code, "old")
self.assertEqual(inst.group[0].element[3].display, "old")
self.assertEqual(inst.group[0].element[3].target[0].code, "BAD")
self.assertEqual(inst.group[0].element[3].target[0].comment, "In the HL7 v3 AD, old is handled by the usablePeriod element, but you have to provide a time, there's no simple equivalent of flagging an address as old")
self.assertEqual(inst.group[0].element[3].target[0].display, "bad address")
self.assertEqual(inst.group[0].element[3].target[0].equivalence, "disjoint")
self.assertEqual(inst.group[0].source, "http://hl7.org/fhir/address-use")
self.assertEqual(inst.group[0].target, "http://terminology.hl7.org/CodeSystem/v3-AddressUse")
self.assertEqual(inst.group[0].unmapped.code, "temp")
self.assertEqual(inst.group[0].unmapped.display, "temp")
self.assertEqual(inst.group[0].unmapped.mode, "fixed")
self.assertEqual(inst.id, "101")
self.assertEqual(inst.identifier.system, "urn:ietf:rfc:3986")
self.assertEqual(inst.identifier.value, "urn:uuid:53cd62ee-033e-414c-9f58-3ca97b5ffc3b")
self.assertEqual(inst.jurisdiction[0].coding[0].code, "US")
self.assertEqual(inst.jurisdiction[0].coding[0].system, "urn:iso:std:iso:3166")
self.assertEqual(inst.name, "FHIR-v3-Address-Use")
self.assertEqual(inst.publisher, "HL7, Inc")
self.assertEqual(inst.purpose, "To help implementers map from HL7 v3/CDA to FHIR")
self.assertEqual(inst.sourceUri, "http://hl7.org/fhir/ValueSet/address-use")
self.assertEqual(inst.status, "draft")
self.assertEqual(inst.targetUri, "http://terminology.hl7.org/ValueSet/v3-AddressUse")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.title, "FHIR/v3 Address Use Mapping")
self.assertEqual(inst.url, "http://hl7.org/fhir/ConceptMap/101")
self.assertEqual(inst.useContext[0].code.code, "venue")
self.assertEqual(inst.useContext[0].code.system, "http://terminology.hl7.org/CodeSystem/usage-context-type")
self.assertEqual(inst.useContext[0].valueCodeableConcept.text, "for CCDA Usage")
self.assertEqual(inst.version, "3.6.0")
def testConceptMap2(self):
inst = self.instantiate_from("conceptmap-example-2.json")
self.assertIsNotNone(inst, "Must have instantiated a ConceptMap instance")
self.implConceptMap2(inst)
js = inst.as_json()
self.assertEqual("ConceptMap", js["resourceType"])
inst2 = conceptmap.ConceptMap(js)
self.implConceptMap2(inst2)
def implConceptMap2(self, inst):
self.assertEqual(inst.contact[0].name, "FHIR project team (example)")
self.assertEqual(inst.contact[0].telecom[0].system, "url")
self.assertEqual(inst.contact[0].telecom[0].value, "http://hl7.org/fhir")
self.assertEqual(inst.date.date, FHIRDate("2012-06-13").date)
self.assertEqual(inst.date.as_json(), "2012-06-13")
self.assertEqual(inst.description, "An example mapping")
self.assertTrue(inst.experimental)
self.assertEqual(inst.group[0].element[0].code, "code")
self.assertEqual(inst.group[0].element[0].display, "Example Code")
self.assertEqual(inst.group[0].element[0].target[0].code, "code2")
self.assertEqual(inst.group[0].element[0].target[0].dependsOn[0].display, "Something Coded")
self.assertEqual(inst.group[0].element[0].target[0].dependsOn[0].property, "http://example.org/fhir/property-value/example")
self.assertEqual(inst.group[0].element[0].target[0].dependsOn[0].system, "http://example.org/fhir/example3")
self.assertEqual(inst.group[0].element[0].target[0].dependsOn[0].value, "some-code")
self.assertEqual(inst.group[0].element[0].target[0].display, "Some Example Code")
self.assertEqual(inst.group[0].element[0].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].source, "http://example.org/fhir/example1")
self.assertEqual(inst.group[0].target, "http://example.org/fhir/example2")
self.assertEqual(inst.group[0].unmapped.mode, "other-map")
self.assertEqual(inst.group[0].unmapped.url, "http://example.org/fhir/ConceptMap/map2")
self.assertEqual(inst.id, "example2")
self.assertEqual(inst.name, "FHIR-exanple-2")
self.assertEqual(inst.publisher, "HL7, Inc")
self.assertEqual(inst.purpose, "To illustrate mapping features")
self.assertEqual(inst.sourceUri, "http://example.org/fhir/example1")
self.assertEqual(inst.status, "draft")
self.assertEqual(inst.targetUri, "http://example.org/fhir/example2")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.title, "FHIR Example 2")
self.assertEqual(inst.url, "http://hl7.org/fhir/ConceptMap/example2")
self.assertEqual(inst.version, "3.6.0")
def testConceptMap3(self):
inst = self.instantiate_from("conceptmap-example-specimen-type.json")
self.assertIsNotNone(inst, "Must have instantiated a ConceptMap instance")
self.implConceptMap3(inst)
js = inst.as_json()
self.assertEqual("ConceptMap", js["resourceType"])
inst2 = conceptmap.ConceptMap(js)
self.implConceptMap3(inst2)
def implConceptMap3(self, inst):
self.assertEqual(inst.contact[0].telecom[0].system, "url")
self.assertEqual(inst.contact[0].telecom[0].value, "http://hl7.org/fhir")
self.assertEqual(inst.contact[1].telecom[0].system, "url")
self.assertEqual(inst.contact[1].telecom[0].value, "http://www.phconnect.org/group/laboratorymessagingcommunityofpractice/forum/attachment/download?id=3649725%3AUploadedFile%3A145786")
self.assertEqual(inst.date.date, FHIRDate("2013-07-25").date)
self.assertEqual(inst.date.as_json(), "2013-07-25")
self.assertFalse(inst.experimental)
self.assertEqual(inst.group[0].element[0].code, "ACNE")
self.assertEqual(inst.group[0].element[0].target[0].code, "309068002")
self.assertEqual(inst.group[0].element[0].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[1].code, "ACNFLD")
self.assertEqual(inst.group[0].element[1].target[0].code, "119323008")
self.assertEqual(inst.group[0].element[1].target[0].comment, "HL7 term is a historical term. mapped to Pus")
self.assertEqual(inst.group[0].element[1].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[1].target[0].product[0].property, "TypeModifier")
self.assertEqual(inst.group[0].element[1].target[0].product[0].system, "http://snomed.info/sct")
self.assertEqual(inst.group[0].element[1].target[0].product[0].value, "47002008")
self.assertEqual(inst.group[0].element[2].code, "AIRS")
self.assertEqual(inst.group[0].element[2].target[0].code, "446302006")
self.assertEqual(inst.group[0].element[2].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[3].code, "ALL")
self.assertEqual(inst.group[0].element[3].target[0].code, "119376003")
self.assertEqual(inst.group[0].element[3].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[3].target[0].product[0].property, "TypeModifier")
self.assertEqual(inst.group[0].element[3].target[0].product[0].system, "http://snomed.info/sct")
self.assertEqual(inst.group[0].element[3].target[0].product[0].value, "7970006")
self.assertEqual(inst.group[0].element[4].code, "AMP")
self.assertEqual(inst.group[0].element[4].target[0].code, "408654003")
self.assertEqual(inst.group[0].element[4].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[4].target[0].product[0].property, "http://snomed.info/id/246380002")
self.assertEqual(inst.group[0].element[4].target[0].product[0].system, "http://snomed.info/sct")
self.assertEqual(inst.group[0].element[4].target[0].product[0].value, "81723002")
self.assertEqual(inst.group[0].element[5].code, "ANGI")
self.assertEqual(inst.group[0].element[5].target[0].code, "119312009")
self.assertEqual(inst.group[0].element[5].target[0].comment, "TBD in detail")
self.assertEqual(inst.group[0].element[5].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[6].code, "ARTC")
self.assertEqual(inst.group[0].element[6].target[0].code, "119312009")
self.assertEqual(inst.group[0].element[6].target[0].comment, "TBD in detail")
self.assertEqual(inst.group[0].element[6].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[7].code, "ASERU")
self.assertEqual(inst.group[0].element[7].target[0].comment, "pending")
self.assertEqual(inst.group[0].element[7].target[0].equivalence, "unmatched")
self.assertEqual(inst.group[0].element[8].code, "ASP")
self.assertEqual(inst.group[0].element[8].target[0].code, "119295008")
self.assertEqual(inst.group[0].element[8].target[0].equivalence, "equivalent")
self.assertEqual(inst.group[0].element[8].target[0].product[0].property, "http://snomed.info/id/246380002")
self.assertEqual(inst.group[0].element[8].target[0].product[0].system, "http://snomed.info/sct")
self.assertEqual(inst.group[0].element[8].target[0].product[0].value, "14766002")
self.assertEqual(inst.group[0].element[9].code, "ATTE")
self.assertEqual(inst.group[0].element[9].target[0].comment, "TBD")
self.assertEqual(inst.group[0].element[9].target[0].equivalence, "unmatched")
self.assertEqual(inst.group[0].source, "http://terminology.hl7.org/CodeSystem/v2-0487")
self.assertEqual(inst.group[0].target, "http://snomed.info/sct")
self.assertEqual(inst.id, "102")
self.assertEqual(inst.name, "Specimen mapping from v2 table 0487 to SNOMED CT")
self.assertEqual(inst.publisher, "FHIR project team (original source: LabMCoP)")
self.assertEqual(inst.sourceCanonical, "http://terminology.hl7.org/ValueSet/v2-0487")
self.assertEqual(inst.status, "draft")
self.assertEqual(inst.targetCanonical, "http://snomed.info/id?fhir_vs")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.url, "http://hl7.org/fhir/ConceptMap/102")
self.assertEqual(inst.version, "3.6.0")
| [
"[email protected]"
] | |
1be3f37b927eb931b4e39d39885b1e9c66cc025f | d9a45783aa0dc1fd7528b57fbe73bfd3f1a277cd | /08. Exams/Mid Exam - 2 November 2019 Group 2/venv/Scripts/pip-script.py | 6e6f746c1433b351df37825d7f2fb3e2f75ed566 | [] | no_license | miaviles/Python-Fundamentals | 630b9553cbe768b344e199f890e91a5a41d5e141 | 9d6ab06fe9fbdc181bc7871f18447708cb8a33fe | refs/heads/master | 2022-08-22T19:15:18.423296 | 2020-05-25T17:53:16 | 2020-05-25T17:53:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 446 | py | #!"D:\User\Desktop\PythonProjects\Exams\Mid Exam - 2 November 2019 Group 2\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
)
| [
"[email protected]"
] | |
ac5de98114ba10d946ab377a66b11b5ea13312d7 | 6e1c0e2e6713af19d5d3c05b432c908137b62a7a | /riboviria/mqtt-ping.py | ebf838f1ad9e9912ee6284847c868b1074b0a737 | [
"Apache-2.0"
] | permissive | craigderington/westnile | 75e81158b2e6494c2717edb1c9046a9461885297 | 0dbc496dfda816cf85b50ea590fa5e693b735c7e | refs/heads/master | 2022-12-13T01:06:57.751396 | 2021-04-08T22:37:28 | 2021-04-08T22:37:28 | 205,703,695 | 0 | 0 | Apache-2.0 | 2022-12-08T06:06:07 | 2019-09-01T16:34:02 | Python | UTF-8 | Python | false | false | 5,948 | py | #! c:\python34\python.exe
#!/usr/bin/env python
##demo code provided by Steve Cope at www.steves-internet-guide.com
##email [email protected]
##Free to use for any purpose
##If you like and use this code you can
##buy me a drink here https://www.paypal.me/StepenCope
"""
mqtt pinger
"""
import paho.mqtt.client as mqtt
import time
import json
##user can edit this data
mqttclient_log=True
max_connection_time=6 # if not connected after this time assume failure
cname="pinger"
msg="ping test:" +cname
topic="pingtest"
broker="loopback"
port=1883
inputs={"broker":broker,"port":port,"topic":"pingtest","loops":\
4,"loop_delay":1,"silent_flag":False,"username":"",\
"password":""}
mqttclient_log=False
##end user editable data
responses=[]
sent=[]
####
def on_connect(client, userdata, flags, rc):
if rc==0:
client.connected_flag=True
print("connected sending ",inputs["loops"]," messages ",\
inputs["loop_delay"]," second Intervals")
else:
client.bad_connection_flag=True
if rc==5:
print("broker requires authentication")
def on_disconnect(client, userdata, rc):
m="disconnecting reason " ,str(rc)
client.connect_flag=False
client.disconnect_flag=True
def on_subscribe(client, userdata, mid, granted_qos):
#print("subscribed ok ")
client.suback_flag=True
def on_publish(client, userdata, mid):
client.puback_flag=True
def on_message(client, userdata, message):
topic=message.topic
msgr=str(message.payload.decode("utf-8"))
responses.append(json.loads(msgr))
client.rmsg_count+=1
client.rmsg_flagset=True
def on_log(client, userdata, level, buf):
print("log: ",buf)
def Initialise_client_object():
mqtt.Client.bad_connection_flag=False
mqtt.Client.suback_flag=False
mqtt.Client.connected_flag=False
mqtt.Client.disconnect_flag=False
mqtt.Client.disconnect_time=0.0
mqtt.Client.disconnect_flagset=False
mqtt.Client.rmsg_flagset=False
mqtt.Client.rmsg_count=0
mqtt.Client.display_msg_count=0
def Initialise_clients(cname):
#flags set
client= mqtt.Client(cname)
if mqttclient_log: #enable mqqt client logging
client.on_log=on_log
client.on_connect= on_connect #attach function to callback
client.on_message=on_message #attach function to callback
client.on_disconnect=on_disconnect
client.on_subscribe=on_subscribe
client.on_publish=on_publish
return client
def get_input(argv):
broker_in=""
port_in=0
topics_in=""
try:
opts, args = getopt.getopt(argv,"h:p:t:c:d:su:P:")
except getopt.GetoptError:
print (sys.argv[0]," -h <broker> -p <port> -t <topic> -c <count> \
-d <delay> -u <username> -P <pasword>-s <silent True>" )
sys.exit(2)
for opt, arg in opts:
if opt == '--help':
print (sys.argv[0]," -h <broker> -p <port> -t <topic> -c <count> \
-d <delay> -u <username> -P <pasword>-s <silent True>" )
sys.exit()
elif opt == "-h":
inputs["broker"] = str(arg)
elif opt =="-t":
inputs["topic"] = str(arg)
elif opt =="-p":
inputs["port"] = int(arg)
elif opt =="-c":
inputs["loops"] = int(arg)
elif opt =="-u":
inputs["username"] = str(arg)
elif opt =="-P":
inputs["password"] = str(arg)
elif opt =="-d":
inputs["loop_delay"] =int(arg)
elif opt == "-s":
inputs["silent_flag"] =True
return((broker_in,port_in,topics_in))
#start
if __name__ == "__main__":
import sys, getopt
if len(sys.argv)>=2:
get_input(sys.argv[1:])
###
Initialise_client_object()#create object flags
client=Initialise_clients(cname)#create client object and set callbacks
if inputs["username"]!="": #set username/password
client.username_pw_set(username=inputs["username"],password=inputs["password"])
print("connecting to broker ",inputs["broker"],"on port ",inputs["port"],\
" topic",inputs["topic"])
try:
res=client.connect(inputs["broker"],inputs["port"]) #establish connection
except:
print("can't connect to broker",inputs["broker"])
sys.exit()
client.loop_start()
tstart=time.time()
while not client.connected_flag and not client.bad_connection_flag:
time.sleep(.25)
if client.bad_connection_flag:
print("connection failure to broker ",inputs["broker"])
client.loop_stop()
sys.exit()
if inputs["silent_flag"]:
print ("Silent Mode is on")
client.subscribe(inputs["topic"])
while not client.suback_flag: #wait for subscribe to be acknowledged
time.sleep(.25)
count=0
tbegin=time.time()
try:
while count<inputs["loops"]:
wait_response_flag=True
client.rmsg_flagset=False
count+=1
m_out=json.dumps((msg,count))
sent.append(m_out)
if not inputs["silent_flag"]:
print("sending:",m_out)
client.publish(inputs["topic"],m_out) #publish
tstart=time.time()
#print("flags " ,wait_response_flag,client.rmsg_flagset)
while wait_response_flag:
if responses and client.rmsg_flagset:
ttrip=time.time()-tstart
if not inputs["silent_flag"]:
print("received:",responses.pop(0),"time= %2.3f"%ttrip)
wait_response_flag=False
time.sleep(inputs["loop_delay"])
except KeyboardInterrupt:
print("interrrupted by keyboard")
tt=time.time()-tbegin
print("Total time= %2.2f" %tt)
print("Total sent=",count)
print("Total received=",client.rmsg_count)
time.sleep(2)
client.disconnect()
client.loop_stop()
time.sleep(2)
| [
"[email protected]"
] | |
6a6fbc3b7e6a3a9760a73b117d20d87c3e9de4df | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part000147.py | e8bd4521a04948dde809492ba35369e65d1ee11b | [] | no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,530 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher40837(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({0: 1}), [
(VariableWithCount('i2.2.1.2.2.2.1.0', 1, 1, S(1)), Mul)
]),
1: (1, Multiset({}), [
(VariableWithCount('i2.2.1.2.2.2.1.0_1', 1, 1, S(1)), Mul),
(VariableWithCount('i2.2.1.2.2.2.1.1', 1, 1, None), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher40837._instance is None:
CommutativeMatcher40837._instance = CommutativeMatcher40837()
return CommutativeMatcher40837._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 40836
if len(subjects) >= 1 and isinstance(subjects[0], Pow):
tmp1 = subjects.popleft()
subjects2 = deque(tmp1._args)
# State 40838
if len(subjects2) >= 1:
tmp3 = subjects2.popleft()
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i2.2.1.2.2.2.1.1', tmp3)
except ValueError:
pass
else:
pass
# State 40839
if len(subjects2) >= 1 and subjects2[0] == Integer(2):
tmp5 = subjects2.popleft()
# State 40840
if len(subjects2) == 0:
pass
# State 40841
if len(subjects) == 0:
pass
# 0: x**2
yield 0, subst1
subjects2.appendleft(tmp5)
subjects2.appendleft(tmp3)
subjects.appendleft(tmp1)
return
yield
from collections import deque | [
"[email protected]"
] | |
401203770cc2f93e7faaa29cf9e1fd36cbbaa32f | 6e373b40393fb56be4437c37b9bfd218841333a8 | /Level_18/Level_3/Level_3/asgi.py | f8742930a072809c0359d740818761b659f2518a | [] | no_license | mahto4you/Django-Framework | 6e56ac21fc76b6d0352f004a5969f9d4331defe4 | ee38453d9eceea93e2c5f3cb6895eb0dce24dc2b | refs/heads/master | 2023-01-22T01:39:21.734613 | 2020-12-04T03:01:17 | 2020-12-04T03:01:17 | 318,383,854 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | """
ASGI config for Level_3 project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Level_3.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
136d4440722f6be8b2f774e356379319276d55fb | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /ds_read/trust_list.py | d26d454ed09c7e02a796d81fb15423168bae8639 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,243 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import read_no_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ds/describe-trusts.html
if __name__ == '__main__':
"""
create-trust : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ds/create-trust.html
delete-trust : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ds/delete-trust.html
update-trust : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ds/update-trust.html
verify-trust : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ds/verify-trust.html
"""
add_option_dict = {}
#######################################################################
# setting option use
# ex: add_option_dict["setting_matching_parameter"] = "--owners"
# ex: add_option_dict["setting_key"] = "owner_id"
#######################################################################
# single parameter
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
read_no_parameter("ds", "describe-trusts", add_option_dict) | [
"[email protected]"
] | |
a167b927b1efb642f9bccd4b282cf263379f6776 | 395e06560c7b794a965add40c586684cb0b4e59c | /terrascript/data/cobbler.py | f0cc4d039084639e80cae2575f494d24f08893e8 | [
"BSD-2-Clause",
"Python-2.0"
] | permissive | alanyee/python-terrascript | f01edef3f6e21e5b18bc3295efef1657be17e3ca | e880e7650a7c3a88603d5429dafbacd28cd26c7e | refs/heads/develop | 2023-03-09T07:33:32.560816 | 2020-09-21T07:11:09 | 2020-09-21T07:11:09 | 300,696,024 | 0 | 0 | BSD-2-Clause | 2021-02-18T00:33:30 | 2020-10-02T17:57:18 | null | UTF-8 | Python | false | false | 45 | py | # terrascript/data/cobbler.py
__all__ = []
| [
"[email protected]"
] | |
4f39da7e6a56aec71dd90546a3eaa8dd922c328a | 3d76e2e4e1abc66ab0d55e9fe878da12fd6255ab | /rhinoscripts/GetMap.py | f117e93c148239aaf8b3e56a99d7409bc6443b1c | [] | no_license | localcode/localcode | f863aaefa3e0ef25862be05c8ec2bd935805775e | 3089aee227d257adc856021a4e1eb678d9527f50 | refs/heads/master | 2021-01-22T11:38:36.801103 | 2011-12-07T20:52:09 | 2011-12-07T20:52:09 | 2,935,478 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,081 | py | # Written by Lorenz Lachauer, 28.6.2011
# License: CC-BY-NC-SA
# eat-a-bug.blogspot.com
# This script imports a static map form OpenStreetMaps,
# based on an adress entered by the user
import urllib,urllib2,time
import os, Rhino, scriptcontext
import rhinoscriptsyntax as rs
import re, socket, math
def GetMap():
socket.setdefaulttimeout(10)
filename='c:\\map.jpg' # you migth hve to change this path
street = rs.GetString('Street')
city = rs.GetString('City')
country = rs.GetString('Country')
zoom = rs.GetInteger('Zoom', 17, 1, 19)
rs.UnitSystem(4, True)
url='http://nominatim.openstreetmap.org/search?q='+street+','+city+','+country+'&format=xml'
rs.CurrentView('Top')
try:
xml = urllib.urlopen(url).read()
except:
print 'http://nominatim.openstreetmap.org produced an error'
return
temp = xml[xml.find("lat=")+5:-1]
lat= temp[0:temp.find("'")]
temp = xml[xml.find("lon=")+5:-1]
lng= temp[0:temp.find("'")]
print 'Latitude, Longitude: '+lat+", "+lng
picture_page = 'http://osm-tah-cache.firefishy.com/MapOf/?lat='+lat+'&long='+lng+'&z='+str(zoom)+'&w=1000&h=1000&format=jpeg'
opener1 = urllib2.build_opener()
try:
page1 = opener1.open(picture_page)
my_picture = page1.read()
except:
print 'http://osm-tah-cache.firefishy.com produced an error'
return
try:
fout = open(filename, 'wb')
fout.write(my_picture)
fout.close()
except:
print 'writing of '+path+' produced an error'
return
res = 40075017 * math.cos(float(lat)/180*math.pi) / (256 * 2 ** zoom) *1000
rs.Command('_-BackgroundBitmap Remove _Enter',False)
rs.Command('_-BackgroundBitmap '+filename+' '+str(-res/2)+','+str(-res/2)+',0 '+str(res/2)+','+str(res/2)+',0 _Enter',True)
rs.Command('_-BackgroundBitmap Grayscale=No _Enter', False)
rs.Command('_-EarthAnchorPoint Latitude '+lat+' Longitude '+lng+' _Enter _Enter _Enter _Enter _Enter', False)
GetMap() | [
"[email protected]"
] | |
3f7d946761d3c6cbf9cd03516fc3dda61a38ef6e | ce7cba4a7faea3fdd3bfb7bd96e0bbb77042e091 | /tests/metadata/__init__.py | d4673f0bdd7f94be7e7a1fbf2dfb31db0728616b | [
"MIT"
] | permissive | pombredanne/cattrs | 02f74ce620b66140eb32c926207a72dc849c9947 | e99720dac0bc0843f1e037b27e712380d54332eb | refs/heads/master | 2021-09-22T15:19:30.657447 | 2021-09-20T07:11:23 | 2021-09-20T07:11:23 | 77,737,410 | 0 | 0 | MIT | 2021-06-06T08:17:25 | 2016-12-31T11:21:59 | Python | UTF-8 | Python | false | false | 18,398 | py | """Tests for metadata functionality."""
import sys
from collections import OrderedDict
from collections.abc import MutableSequence as AbcMutableSequence
from collections.abc import MutableSet as AbcMutableSet
from collections.abc import Sequence as AbcSequence
from collections.abc import Set as AbcSet
from dataclasses import field, make_dataclass
from functools import partial
from typing import (
Any,
Callable,
Dict,
List,
MutableSequence,
Sequence,
Tuple,
Type,
TypeVar,
)
import attr
from attr import NOTHING, Factory
from attr._make import _CountingAttr
from hypothesis.strategies import (
SearchStrategy,
booleans,
composite,
dictionaries,
floats,
frozensets,
integers,
just,
lists,
recursive,
sets,
text,
tuples,
)
from .. import gen_attr_names, make_class
is_39_or_later = sys.version_info[:2] >= (3, 9)
PosArg = Any
PosArgs = Tuple[Any]
T = TypeVar("T")
def simple_typed_classes(defaults=None, min_attrs=0, frozen=False):
"""Yield tuples of (class, values)."""
return lists_of_typed_attrs(
defaults, min_size=min_attrs, for_frozen=frozen
).flatmap(partial(_create_hyp_class, frozen=frozen))
def simple_typed_dataclasses(defaults=None, min_attrs=0, frozen=False):
"""Yield tuples of (class, values)."""
return lists_of_typed_attrs(
defaults,
min_size=min_attrs,
for_frozen=frozen,
allow_mutable_defaults=False,
).flatmap(partial(_create_dataclass, frozen=frozen))
def simple_typed_classes_and_strats(
defaults=None, min_attrs=0
) -> SearchStrategy[Tuple[Type, SearchStrategy[PosArgs]]]:
"""Yield tuples of (class, (strategies))."""
return lists_of_typed_attrs(defaults, min_size=min_attrs).flatmap(
_create_hyp_class_and_strat
)
def lists_of_typed_attrs(
defaults=None, min_size=0, for_frozen=False, allow_mutable_defaults=True
) -> SearchStrategy[List[Tuple[_CountingAttr, SearchStrategy[PosArg]]]]:
# Python functions support up to 255 arguments.
return lists(
simple_typed_attrs(
defaults,
for_frozen=for_frozen,
allow_mutable_defaults=allow_mutable_defaults,
),
min_size=min_size,
max_size=50,
).map(lambda l: sorted(l, key=lambda t: t[0]._default is not NOTHING))
def simple_typed_attrs(
defaults=None, for_frozen=False, allow_mutable_defaults=True
) -> SearchStrategy[Tuple[_CountingAttr, SearchStrategy[PosArgs]]]:
if not is_39_or_later:
res = (
bare_typed_attrs(defaults)
| int_typed_attrs(defaults)
| str_typed_attrs(defaults)
| float_typed_attrs(defaults)
)
if not for_frozen:
res = (
res
| dict_typed_attrs(defaults, allow_mutable_defaults)
| mutable_seq_typed_attrs(defaults, allow_mutable_defaults)
| seq_typed_attrs(defaults, allow_mutable_defaults)
)
else:
res = (
bare_typed_attrs(defaults)
| int_typed_attrs(defaults)
| str_typed_attrs(defaults)
| float_typed_attrs(defaults)
| frozenset_typed_attrs(defaults)
| homo_tuple_typed_attrs(defaults)
)
if not for_frozen:
res = (
res
| dict_typed_attrs(defaults, allow_mutable_defaults)
| new_dict_typed_attrs(defaults, allow_mutable_defaults)
| set_typed_attrs(defaults, allow_mutable_defaults)
| list_typed_attrs(defaults, allow_mutable_defaults)
| mutable_seq_typed_attrs(defaults, allow_mutable_defaults)
| seq_typed_attrs(defaults, allow_mutable_defaults)
)
return res
def _create_hyp_class(
attrs_and_strategy: List[Tuple[_CountingAttr, SearchStrategy[PosArgs]]],
frozen=False,
) -> SearchStrategy[Tuple[Type, PosArgs]]:
"""
A helper function for Hypothesis to generate attrs classes.
The result is a tuple: an attrs class, and a tuple of values to
instantiate it.
"""
def key(t):
return t[0]._default is not attr.NOTHING
attrs_and_strat = sorted(attrs_and_strategy, key=key)
attrs = [a[0] for a in attrs_and_strat]
for i, a in enumerate(attrs):
a.counter = i
vals = tuple((a[1]) for a in attrs_and_strat)
return tuples(
just(
make_class(
"HypClass",
OrderedDict(zip(gen_attr_names(), attrs)),
frozen=frozen,
)
),
tuples(*vals),
)
def _create_dataclass(
attrs_and_strategy: List[Tuple[_CountingAttr, SearchStrategy[PosArgs]]],
frozen=False,
) -> SearchStrategy[Tuple[Type, PosArgs]]:
"""
A helper function for Hypothesis to generate dataclasses.
The result is a tuple: a dataclass, and a tuple of values to
instantiate it.
"""
def key(t):
return t[0]._default is not attr.NOTHING
attrs_and_strat = sorted(attrs_and_strategy, key=key)
attrs = [a[0] for a in attrs_and_strat]
for i, a in enumerate(attrs):
a.counter = i
vals = tuple((a[1]) for a in attrs_and_strat)
return tuples(
just(
make_dataclass(
"HypDataclass",
[
(n, a.type)
if a._default is NOTHING
else (
(n, a.type, field(default=a._default))
if not isinstance(a._default, Factory)
else (
n,
a.type,
field(default_factory=a._default.factory),
)
)
for n, a in zip(gen_attr_names(), attrs)
],
frozen=frozen,
)
),
tuples(*vals),
)
def _create_hyp_class_and_strat(
attrs_and_strategy: List[Tuple[_CountingAttr, SearchStrategy[PosArg]]]
) -> SearchStrategy[Tuple[Type, SearchStrategy[PosArgs]]]:
def key(t):
return t[0].default is not attr.NOTHING
attrs_and_strat = sorted(attrs_and_strategy, key=key)
attrs = [a[0] for a in attrs_and_strat]
for i, a in enumerate(attrs):
a.counter = i
vals = tuple((a[1]) for a in attrs_and_strat)
return tuples(
just(
make_class("HypClass", OrderedDict(zip(gen_attr_names(), attrs)))
),
just(tuples(*vals)),
)
@composite
def bare_typed_attrs(draw, defaults=None):
"""
Generate a tuple of an attribute and a strategy that yields values
appropriate for that attribute.
"""
default = attr.NOTHING
if defaults is True or (defaults is None and draw(booleans())):
default = None
return (attr.ib(type=Any, default=default), just(None))
@composite
def int_typed_attrs(draw, defaults=None):
"""
Generate a tuple of an attribute and a strategy that yields ints for that
attribute.
"""
default = attr.NOTHING
if defaults is True or (defaults is None and draw(booleans())):
default = draw(integers())
return (attr.ib(type=int, default=default), integers())
@composite
def str_typed_attrs(draw, defaults=None):
"""
Generate a tuple of an attribute and a strategy that yields strs for that
attribute.
"""
default = NOTHING
if defaults is True or (defaults is None and draw(booleans())):
default = draw(text())
return (attr.ib(type=str, default=default), text())
@composite
def float_typed_attrs(draw, defaults=None):
"""
Generate a tuple of an attribute and a strategy that yields floats for that
attribute.
"""
default = attr.NOTHING
if defaults is True or (defaults is None and draw(booleans())):
default = draw(floats())
return (attr.ib(type=float, default=default), floats())
@composite
def dict_typed_attrs(
draw, defaults=None, allow_mutable_defaults=True
) -> SearchStrategy[Tuple[_CountingAttr, SearchStrategy]]:
"""
Generate a tuple of an attribute and a strategy that yields dictionaries
for that attribute. The dictionaries map strings to integers.
"""
default = attr.NOTHING
val_strat = dictionaries(keys=text(), values=integers())
if defaults is True or (defaults is None and draw(booleans())):
default_val = draw(val_strat)
if not allow_mutable_defaults or draw(booleans()):
default = Factory(lambda: default_val)
else:
default = default_val
return (
attr.ib(
type=Dict[str, int] if draw(booleans()) else Dict, default=default
),
val_strat,
)
@composite
def new_dict_typed_attrs(draw, defaults=None, allow_mutable_defaults=True):
"""
Generate a tuple of an attribute and a strategy that yields dictionaries
for that attribute. The dictionaries map strings to integers.
Uses the new 3.9 dict annotation.
"""
default_val = attr.NOTHING
val_strat = dictionaries(keys=text(), values=integers())
if defaults is True or (defaults is None and draw(booleans())):
default_val = draw(val_strat)
if not allow_mutable_defaults or draw(booleans()):
default = Factory(lambda: default_val)
else:
default = default_val
else:
default = default_val
type = (
dict[str, int] if draw(booleans()) else dict
) # We also produce bare dicts.
return (attr.ib(type=type, default=default), val_strat)
@composite
def set_typed_attrs(draw, defaults=None, allow_mutable_defaults=True):
"""
Generate a tuple of an attribute and a strategy that yields sets
for that attribute. The sets contain integers.
"""
default_val = attr.NOTHING
val_strat = sets(integers())
if defaults is True or (defaults is None and draw(booleans())):
default_val = draw(val_strat)
if not allow_mutable_defaults or draw(booleans()):
default = Factory(lambda: default_val)
else:
default = default_val
else:
default = default_val
return (
attr.ib(
type=set[int]
if draw(booleans())
else (AbcSet[int] if draw(booleans()) else AbcMutableSet[int]),
default=default,
),
val_strat,
)
@composite
def frozenset_typed_attrs(draw, defaults=None):
"""
Generate a tuple of an attribute and a strategy that yields frozensets
for that attribute. The frozensets contain integers.
"""
default = attr.NOTHING
val_strat = frozensets(integers())
if defaults is True or (defaults is None and draw(booleans())):
default = draw(val_strat)
return (attr.ib(type=frozenset[int], default=default), val_strat)
@composite
def list_typed_attrs(draw, defaults=None, allow_mutable_defaults=True):
"""
Generate a tuple of an attribute and a strategy that yields lists
for that attribute. The lists contain floats.
"""
default_val = attr.NOTHING
val_strat = lists(floats(allow_infinity=False, allow_nan=False))
if defaults is True or (defaults is None and draw(booleans())):
default_val = draw(val_strat)
if not allow_mutable_defaults or draw(booleans()):
default = Factory(lambda: default_val)
else:
default = default_val
else:
default = default_val
return (
attr.ib(
type=list[float] if draw(booleans()) else List[float],
default=default,
),
val_strat,
)
@composite
def seq_typed_attrs(draw, defaults=None, allow_mutable_defaults=True):
"""
Generate a tuple of an attribute and a strategy that yields lists
for that attribute. The lists contain floats.
"""
default_val = attr.NOTHING
val_strat = lists(integers())
if defaults is True or (defaults is None and draw(booleans())):
default_val = draw(val_strat)
if not allow_mutable_defaults or draw(booleans()):
default = Factory(lambda: default_val)
else:
default = default_val
else:
default = default_val
return (
attr.ib(
type=Sequence[int]
if not is_39_or_later or draw(booleans())
else AbcSequence[int],
default=default,
),
val_strat,
)
@composite
def mutable_seq_typed_attrs(draw, defaults=None, allow_mutable_defaults=True):
"""
Generate a tuple of an attribute and a strategy that yields lists
for that attribute. The lists contain floats.
"""
default_val = attr.NOTHING
val_strat = lists(floats(allow_infinity=False, allow_nan=False))
if defaults is True or (defaults is None and draw(booleans())):
default_val = draw(val_strat)
if not allow_mutable_defaults or draw(booleans()):
default = Factory(lambda: default_val)
else:
default = default_val
else:
default = default_val
return (
attr.ib(
type=MutableSequence[float]
if not is_39_or_later
else AbcMutableSequence[float],
default=default,
),
val_strat,
)
@composite
def homo_tuple_typed_attrs(draw, defaults=None):
"""
Generate a tuple of an attribute and a strategy that yields homogenous
tuples for that attribute. The tuples contain strings.
"""
default = attr.NOTHING
val_strat = tuples(text(), text(), text())
if defaults is True or (defaults is None and draw(booleans())):
default = draw(val_strat)
return (
attr.ib(
type=tuple[str, ...] if draw(booleans()) else Tuple[str, ...],
default=default,
),
val_strat,
)
def just_class(
tup: Tuple[
List[Tuple[_CountingAttr, SearchStrategy]], Tuple[Type, PosArgs]
],
defaults: PosArgs,
):
nested_cl = tup[1][0]
nested_cl_args = tup[1][1]
default = attr.Factory(lambda: nested_cl(*defaults))
combined_attrs = list(tup[0])
combined_attrs.append(
(
attr.ib(type=nested_cl, default=default),
just(nested_cl(*nested_cl_args)),
)
)
return _create_hyp_class_and_strat(combined_attrs)
def list_of_class(
tup: Tuple[
List[Tuple[_CountingAttr, SearchStrategy]], Tuple[Type, PosArgs]
],
defaults: PosArgs,
) -> SearchStrategy[Tuple[Type, SearchStrategy[PosArgs]]]:
nested_cl = tup[1][0]
nested_cl_args = tup[1][1]
default = attr.Factory(lambda: [nested_cl(*defaults)])
combined_attrs = list(tup[0])
combined_attrs.append(
(
attr.ib(type=List[nested_cl], default=default),
just([nested_cl(*nested_cl_args)]),
)
)
return _create_hyp_class_and_strat(combined_attrs)
def new_list_of_class(
tup: Tuple[
List[Tuple[_CountingAttr, SearchStrategy]], Tuple[Type, PosArgs]
],
defaults: PosArgs,
):
"""Uses the new 3.9 list type annotation."""
nested_cl = tup[1][0]
nested_cl_args = tup[1][1]
default = attr.Factory(lambda: [nested_cl(*defaults)])
combined_attrs = list(tup[0])
combined_attrs.append(
(
attr.ib(type=list[nested_cl], default=default),
just([nested_cl(*nested_cl_args)]),
)
)
return _create_hyp_class_and_strat(combined_attrs)
def dict_of_class(
tup: Tuple[
List[Tuple[_CountingAttr, SearchStrategy]], Tuple[Type, PosArgs]
],
defaults: PosArgs,
):
nested_cl = tup[1][0]
nested_cl_args = tup[1][1]
default = attr.Factory(lambda: {"cls": nested_cl(*defaults)})
combined_attrs = list(tup[0])
combined_attrs.append(
(
attr.ib(type=Dict[str, nested_cl], default=default),
just({"cls": nested_cl(*nested_cl_args)}),
)
)
return _create_hyp_class_and_strat(combined_attrs)
def _create_hyp_nested_strategy(
simple_class_strategy: SearchStrategy,
) -> SearchStrategy[Tuple[Type, SearchStrategy[PosArgs]]]:
"""
Create a recursive attrs class.
Given a strategy for building (simpler) classes, create and return
a strategy for building classes that have as an attribute:
* just the simpler class
* a list of simpler classes
* a dict mapping the string "cls" to a simpler class.
"""
# A strategy producing tuples of the form ([list of attributes], <given
# class strategy>).
attrs_and_classes: SearchStrategy[
Tuple[
List[Tuple[_CountingAttr, PosArgs]],
Tuple[Type, SearchStrategy[PosArgs]],
]
] = tuples(lists_of_typed_attrs(), simple_class_strategy)
return nested_classes(attrs_and_classes)
@composite
def nested_classes(
draw: Callable[[SearchStrategy[T]], T],
attrs_and_classes: SearchStrategy[
Tuple[
List[Tuple[_CountingAttr, SearchStrategy]],
Tuple[Type, SearchStrategy[PosArgs]],
]
],
) -> SearchStrategy[Tuple[Type, SearchStrategy[PosArgs]]]:
attrs, class_and_strat = draw(attrs_and_classes)
cls, strat = class_and_strat
defaults = tuple(draw(strat))
init_vals = tuple(draw(strat))
if is_39_or_later:
return draw(
list_of_class((attrs, (cls, init_vals)), defaults)
| new_list_of_class((attrs, (cls, init_vals)), defaults)
| dict_of_class((attrs, (cls, init_vals)), defaults)
| just_class((attrs, (cls, init_vals)), defaults)
)
else:
return draw(
list_of_class((attrs, (cls, init_vals)), defaults)
| dict_of_class((attrs, (cls, init_vals)), defaults)
| just_class((attrs, (cls, init_vals)), defaults)
)
def nested_typed_classes_and_strat(
defaults=None, min_attrs=0
) -> SearchStrategy[Tuple[Type, SearchStrategy[PosArgs]]]:
return recursive(
simple_typed_classes_and_strats(
defaults=defaults, min_attrs=min_attrs
),
_create_hyp_nested_strategy,
)
@composite
def nested_typed_classes(draw, defaults=None, min_attrs=0):
cl, strat = draw(
nested_typed_classes_and_strat(defaults=defaults, min_attrs=min_attrs)
)
return cl, draw(strat)
| [
"[email protected]"
] | |
acf3731d1ba76d907697201fbe3d10aef30551e5 | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /all-gists/9d7288f70414cd170901/snippet.py | 4ef0297206b75218d8477b5d7156f3ab7eb75bde | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 777 | py | #! /usr/bin/env python3
'''(1.196 x STE at 60 ms after the J-point in V3 in mm) + (0.059 x computerized QTc) - (0.326 x R-wave Amplitude in V4 in mm).
Use the calculator below. A value greater than 23.4 is quite sensitive and specific for LAD occlusion.
http://hqmeded-ecg.blogspot.com/2013/06/here-is-link-to-full-text-of-article-in.html'''
import sys
try:
import console
ios = True
except ImportError:
ios = False
ste, qtc, rwave = [float(each) for each in sys.argv[1:]]
score = (1.196 * ste + 0.059 * qtc - 0.326 * rwave) - 23.4
score_string = 'Score: {}'.format(score)
score_message = 'Positive scores are sensitive and specific for LAD occlusion.'
if ios:
console.alert(score_string, score_message)
else:
print(score_string, '\n', score_message)
| [
"[email protected]"
] | |
8bdcc4f7cfa967553a81250d695dd32e60f6eb07 | 9e27f91194541eb36da07420efa53c5c417e8999 | /twilio/rest/pricing/v2/__init__.py | e41e349328afb7633a4ea4c2ed3e2fbcbf2f2b60 | [] | no_license | iosmichael/flask-admin-dashboard | 0eeab96add99430828306b691e012ac9beb957ea | 396d687fd9144d3b0ac04d8047ecf726f7c18fbd | refs/heads/master | 2020-03-24T05:55:42.200377 | 2018-09-17T20:33:42 | 2018-09-17T20:33:42 | 142,508,888 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 943 | py | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from admin.twilio.base.version import Version
from admin.twilio.rest.pricing.v2.voice import VoiceList
class V2(Version):
def __init__(self, domain):
"""
Initialize the V2 version of Pricing
:returns: V2 version of Pricing
:rtype: twilio.rest.pricing.v2.V2.V2
"""
super(V2, self).__init__(domain)
self.version = 'v2'
self._voice = None
@property
def voice(self):
"""
:rtype: twilio.rest.pricing.v2.voice.VoiceList
"""
if self._voice is None:
self._voice = VoiceList(self)
return self._voice
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Pricing.V2>'
| [
"[email protected]"
] | |
b6babe73387bcd92f6acd42b396184cb5ff20f3d | 6551ec86d1e3ea2cc1ad60e16fefe0e1759f3c0a | /d2go/data/gans.py | eaf28eb031399f16e15fd4436915c7786b24d324 | [
"Apache-2.0"
] | permissive | guowenbin90/d2go | b633b03cbfc17e9373f644f29519995503b13686 | bfc08c534859358a5ee87d1091e2b5d661c937e7 | refs/heads/main | 2023-08-18T01:39:42.906859 | 2021-10-16T01:36:50 | 2021-10-16T01:38:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,160 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
This file contains utilities to load GANs datasets.
Similar to how COCO dataset is represented in Detectron2, a GANs dataset is represented
as a list of dicts, where each dict is in "standard dataset dict" format, which contains
raw data with fields such as:
- input_path (str): filename of input image
- fg_path (str): filename to the GT
...
"""
import json
import logging
import os
import tempfile
from pathlib import Path
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.utils.file_io import PathManager
logger = logging.getLogger(__name__)
IMG_EXTENSIONS = [".jpg", ".JPG", ".png", ".PNG", ".ppm", ".PPM", ".bmp", ".BMP"]
def is_image_file(filename):
return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)
def load_pix2pix_image_folder(image_root, input_folder="input", gt_folder="gt"):
"""
Args:
image_root (str): the directory where the images exist.
gt_postfix (str): the postfix for the ground truth images
Returns:
list[dict]: a list of dicts in argos' "standard dataset dict" format
"""
data = []
# gt_postfix = "%s." % (gt_postfix)
input_root = os.path.join(image_root, input_folder)
for root, _, fnames in sorted(os.walk(input_root)):
for fname in sorted(fnames):
if is_image_file(fname):
gt_fname = fname.replace("/%s/" % (gt_folder))
input_path = os.path.join(root, fname)
gt_path = os.path.join(root, gt_fname)
if not os.path.isfile(gt_path):
logger.warning("{} is not exist".format(gt_fname))
continue
# if len(gt_postfix) > 1 and fname.rfind(gt_postfix) != -1: # skip GT file
# continue
# gt_fname = fname[:-4] + gt_postfix + fname[-3:]
# assert gt_fname in fnames, (
# "gt file %s is not exist in %s" % (gt_fname, root))
f = {
"file_name": fname[:-4],
"input_path": input_path,
"gt_path": gt_path,
}
data.append(f)
if image_root.rfind("test") != -1 and len(data) == 5000:
logger.info("Reach maxinum of test data: {} ".format(len(data)))
return data
logger.info("Total number of data dicts: {} ".format(len(data)))
return data
def load_pix2pix_json(
json_path,
input_folder,
gt_folder,
mask_folder,
real_json_path=None,
real_folder=None,
max_num=1e10,
):
"""
Args:
json_path (str): the directory where the json file exists which saves the filenames and labels.
input_folder (str): the directory for the input/source images
input_folder (str): the directory for the ground_truth/target images
mask_folder (str): the directory for the masks
Returns:
list[dict]: a list of dicts
"""
real_filenames = {}
if real_json_path is not None:
with PathManager.open(real_json_path, "r") as f:
real_filenames = json.load(f)
data = []
with PathManager.open(json_path, "r") as f:
filenames = json.load(f)
in_len = len(filenames)
real_len = len(real_filenames)
total_len = min(max(in_len, real_len), max_num)
real_keys = [*real_filenames.keys()]
in_keys = [*filenames.keys()]
cnt = 0
# for fname in filenames.keys():
while cnt < total_len:
fname = in_keys[cnt % in_len]
input_label = filenames[fname]
if isinstance(input_label, tuple) or isinstance(input_label, list):
assert (
len(input_label) == 2
), "Save (real_name, label) as the value of the json dict for resampling"
fname, input_label = input_label
f = {
"file_name": fname,
"input_folder": input_folder,
"gt_folder": gt_folder,
"mask_folder": mask_folder,
"input_label": input_label,
"real_folder": real_folder,
}
if real_len > 0:
real_fname = real_keys[cnt % real_len]
f["real_file_name"] = real_fname
data.append(f)
cnt += 1
# 5000 is the general number of images used to calculate FID in GANs
# if max_num > 0 and len(data) == max_num:
# logger.info("Reach maxinum of test data: {} ".format(len(data)))
# return data
logger.info("Total number of data dicts: {} ".format(len(data)))
return data
def register_folder_dataset(
name,
json_path,
input_folder,
gt_folder=None,
mask_folder=None,
input_src_path=None,
gt_src_path=None,
mask_src_path=None,
real_json_path=None,
real_folder=None,
real_src_path=None,
max_num=1e10,
):
DatasetCatalog.register(
name,
lambda: load_pix2pix_json(
json_path,
input_folder,
gt_folder,
mask_folder,
real_json_path,
real_folder,
max_num,
),
)
metadata = {
"input_src_path": input_src_path,
"gt_src_path": gt_src_path,
"mask_src_path": mask_src_path,
"real_src_path": real_src_path,
"input_folder": input_folder,
"gt_folder": gt_folder,
"mask_folder": mask_folder,
"real_folder": real_folder,
}
MetadataCatalog.get(name).set(**metadata)
def load_lmdb_keys(max_num):
"""
Args:
max_num (str): the total number of
Returns:
list[dict]: a list of dicts
"""
data = []
for i in range(max_num):
f = {"index": i}
data.append(f)
logger.info("Total number of data dicts: {} ".format(len(data)))
return data
def register_lmdb_dataset(
name,
data_folder,
src_data_folder,
max_num,
):
DatasetCatalog.register(name, lambda: load_lmdb_keys(max_num))
metadata = {
"data_folder": data_folder,
"src_data_folder": src_data_folder,
"max_num": max_num,
}
MetadataCatalog.get(name).set(**metadata)
def inject_gan_datasets(cfg):
if cfg.D2GO_DATA.DATASETS.GAN_INJECTION.ENABLE:
name = cfg.D2GO_DATA.DATASETS.GAN_INJECTION.NAME
cfg.merge_from_list(
[
"DATASETS.TRAIN",
list(cfg.DATASETS.TRAIN) + [name + "_train"],
"DATASETS.TEST",
list(cfg.DATASETS.TEST) + [name + "_test"],
]
)
json_path = cfg.D2GO_DATA.DATASETS.GAN_INJECTION.JSON_PATH
assert PathManager.isfile(json_path), "{} is not valid!".format(json_path)
if len(cfg.D2GO_DATA.DATASETS.GAN_INJECTION.LOCAL_DIR) > 0:
image_dir = cfg.D2GO_DATA.DATASETS.GAN_INJECTION.LOCAL_DIR
else:
image_dir = Path(tempfile.mkdtemp())
input_src_path = cfg.D2GO_DATA.DATASETS.GAN_INJECTION.INPUT_SRC_DIR
assert PathManager.isfile(input_src_path), "{} is not valid!".format(
input_src_path
)
input_folder = os.path.join(image_dir, name, "input")
gt_src_path = cfg.D2GO_DATA.DATASETS.GAN_INJECTION.GT_SRC_DIR
if PathManager.isfile(gt_src_path):
gt_folder = os.path.join(image_dir, name, "gt")
else:
gt_src_path = None
gt_folder = None
mask_src_path = cfg.D2GO_DATA.DATASETS.GAN_INJECTION.MASK_SRC_DIR
if PathManager.isfile(mask_src_path):
mask_folder = os.path.join(image_dir, name, "mask")
else:
mask_src_path = None
mask_folder = None
real_src_path = cfg.D2GO_DATA.DATASETS.GAN_INJECTION.REAL_SRC_DIR
if PathManager.isfile(real_src_path):
real_folder = os.path.join(image_dir, name, "real")
real_json_path = cfg.D2GO_DATA.DATASETS.GAN_INJECTION.REAL_JSON_PATH
assert PathManager.isfile(real_json_path), "{} is not valid!".format(
real_json_path
)
else:
real_src_path = None
real_folder = None
real_json_path = None
register_folder_dataset(
name + "_train",
json_path,
input_folder,
gt_folder,
mask_folder,
input_src_path,
gt_src_path,
mask_src_path,
real_json_path,
real_folder,
real_src_path,
)
register_folder_dataset(
name + "_test",
json_path,
input_folder,
gt_folder,
mask_folder,
input_src_path,
gt_src_path,
mask_src_path,
real_json_path,
real_folder,
real_src_path,
max_num=cfg.D2GO_DATA.DATASETS.GAN_INJECTION.MAX_TEST_IMAGES,
)
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.